From 45d0d310d2d1c0d7f336a4dc5856ed1289439c17 Mon Sep 17 00:00:00 2001 From: saxix Date: Thu, 20 Jun 2024 20:08:58 +0200 Subject: [PATCH 01/31] updates CI --- .github/workflows/delete_image.yml | 72 +++++++++++++++++------------- .github/workflows/test.yml | 16 ++++--- 2 files changed, 52 insertions(+), 36 deletions(-) diff --git a/.github/workflows/delete_image.yml b/.github/workflows/delete_image.yml index 80d5ab23..7d51ec6f 100644 --- a/.github/workflows/delete_image.yml +++ b/.github/workflows/delete_image.yml @@ -5,35 +5,45 @@ jobs: if: github.event.ref_type == 'branch' runs-on: ubuntu-latest steps: - - name: Docker meta - id: meta - uses: docker/metadata-action@v5 - - - name: Delete Test Docker Image - shell: bash + - name: Install regctl + uses: regclient/actions/regctl-installer@main + - name: regctl login + uses: regclient/actions/regctl-login@main + with: + registry: docker.io + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - shell: bash run: | - name="${{vars.DOCKER_IMAGE}}:test-${{steps.meta.outputs.version}}" - registry="https://registry-1.docker.io" - curl -v -sSL -X DELETE "http://${registry}/v2/${name}/manifests/$( - curl -sSL -I \ - -H "Accept: application/vnd.docker.distribution.manifest.v2+json" \ - "http://${registry}/v2/${name}/manifests/$( - curl -sSL "http://${registry}/v2/${name}/tags/list" | jq -r '.tags[0]' - )" \ - | awk '$1 == "Docker-Content-Digest:" { print $2 }' \ - | tr -d $'\r' \ - )" - - name: Delete linked Docker Image - shell: bash - run: | - name="${{vars.DOCKER_IMAGE}}:${{steps.meta.outputs.version}}" - registry="https://registry-1.docker.io" - curl -v -sSL -X DELETE "http://${registry}/v2/${name}/manifests/$( - curl -sSL -I \ - -H "Accept: application/vnd.docker.distribution.manifest.v2+json" \ - "http://${registry}/v2/${name}/manifests/$( - curl -sSL "http://${registry}/v2/${name}/tags/list" | jq -r '.tags[0]' - )" \ - | awk '$1 == "Docker-Content-Digest:" { print $2 }' \ - | tr -d $'\r' \ - )" + ref="${{github.event.ref}}" + tag=$(echo $ref | sed -e "s#refs/heads/##g" | sed -e s#/#-##g) + name="${{vars.DOCKER_IMAGE}}:test-${{github.event.ref}}" + echo "Delete $name" +# - name: Delete Test Docker Image +# shell: bash +# run: | +# name="${{vars.DOCKER_IMAGE}}:test-${{github.event.ref}}" +# registry="https://registry-1.docker.io" +# curl -v -sSL -X DELETE "http://${registry}/v2/${name}/manifests/$( +# curl -sSL -I \ +# -H "Accept: application/vnd.docker.distribution.manifest.v2+json" \ +# "http://${registry}/v2/${name}/manifests/$( +# curl -sSL "http://${registry}/v2/${name}/tags/list" | jq -r '.tags[0]' +# )" \ +# | awk '$1 == "Docker-Content-Digest:" { print $2 }' \ +# | tr -d $'\r' \ +# )" +# - name: Delete linked Docker Image +# shell: bash +# run: | +# name="${{vars.DOCKER_IMAGE}}:${{github.event.ref}}" +# registry="https://registry-1.docker.io" +# curl -v -sSL -X DELETE "http://${registry}/v2/${name}/manifests/$( +# curl -sSL -I \ +# -H "Accept: application/vnd.docker.distribution.manifest.v2+json" \ +# "http://${registry}/v2/${name}/manifests/$( +# curl -sSL "http://${registry}/v2/${name}/tags/list" | jq -r '.tags[0]' +# )" \ +# | awk '$1 == "Docker-Content-Digest:" { print $2 }' \ +# | tr -d $'\r' \ +# )" diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index d4fed1a9..277b6cb2 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,6 +1,9 @@ name: Test on: + create: + branches: + - releases/* push: branches: - develop @@ -29,8 +32,9 @@ permissions: jobs: changes: - if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name - + if: (github.event_name != 'pull_request' + || github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name) + || github.event_name == 'create' runs-on: ubuntu-latest timeout-minutes: 1 defaults: @@ -97,7 +101,9 @@ jobs: test: name: Run Test Suite needs: [ changes,build ] - if: needs.changes.outputs.run_tests == 'true' || contains(github.event.head_commit.message, 'ci:test') + if: (needs.changes.outputs.run_tests == 'true' + || contains(github.event.head_commit.message, 'ci:test') + || github.event_name == 'create') runs-on: ubuntu-latest services: redis: @@ -150,7 +156,7 @@ jobs: contents: read # for actions/checkout to fetch code security-events: write # for github/codeql-action/upload-sarif to upload SARIF results actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status - if: needs.build.outputs.created == 'true' || contains(github.event.head_commit.message, 'ci:scan') + if: needs.build.outputs.created == 'true' || contains(github.event.head_commit.message, 'ci:scan') || github.event_name == 'create' steps: - name: Checkout code uses: actions/checkout@v4 @@ -173,7 +179,7 @@ jobs: refs/heads/develop refs/heads/staging refs/heads/master - ', github.ref) || contains(github.event.head_commit.message, 'ci:release') + ', github.ref) || contains(github.event.head_commit.message, 'ci:release') || github.event_name == 'create' name: "Release Docker" needs: [ test ] From 36e90348bb0bf53ecbaea4328d112f7ea74995e5 Mon Sep 17 00:00:00 2001 From: saxix Date: Thu, 20 Jun 2024 20:59:19 +0200 Subject: [PATCH 02/31] updates CI ci:release ci:test --- .github/workflows/test.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 277b6cb2..e4716fa4 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -73,7 +73,7 @@ jobs: build: needs: [ changes ] runs-on: ubuntu-latest - timeout-minutes: 10 + timeout-minutes: 30 defaults: run: shell: bash @@ -184,7 +184,7 @@ jobs: name: "Release Docker" needs: [ test ] runs-on: ubuntu-latest - timeout-minutes: 10 + timeout-minutes: 30 defaults: run: shell: bash From b34eb332faa50b507fa73e20ed24bce38283b8f7 Mon Sep 17 00:00:00 2001 From: saxix Date: Thu, 20 Jun 2024 21:22:20 +0200 Subject: [PATCH 03/31] updates CI ci:release ci:test ci:scan --- .github/workflows/test.yml | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e4716fa4..8f40ccf6 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -208,9 +208,12 @@ jobs: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} code_checksum: ${{ contains(github.event.head_commit.message, 'ci:build') && steps.checksum.outputs.checksum || '' }} - - name: Generate artifact attestation - uses: actions/attest-build-provenance@v1 - with: - subject-name: ${{ steps.build.outputs.image }} - subject-digest: ${{ steps.build.outputs.digest }} - push-to-registry: true + - shell: bash + run: | + echo "${{ toJSON(steps.build.outputs) }}" +# - name: Generate artifact attestation +# uses: actions/attest-build-provenance@v1 +# with: +# subject-name: ${{ steps.build.outputs.image }} +# subject-digest: ${{ steps.build.outputs.digest }} +# push-to-registry: true From fd096136ef15066c2ee1183362daa70269982e73 Mon Sep 17 00:00:00 2001 From: saxix Date: Fri, 21 Jun 2024 07:48:23 +0200 Subject: [PATCH 04/31] updates Dockerfile --- .github/actions/docker_build/action.yml | 7 +++++-- .github/workflows/test.yml | 2 +- docker/Dockerfile | 20 +++++++++++++++++--- 3 files changed, 23 insertions(+), 6 deletions(-) diff --git a/.github/actions/docker_build/action.yml b/.github/actions/docker_build/action.yml index ac97b6bc..d67c619c 100644 --- a/.github/actions/docker_build/action.yml +++ b/.github/actions/docker_build/action.yml @@ -164,8 +164,8 @@ runs: with: context: . tags: ${{ steps.meta.outputs.tags }} - labels: "${{ steps.meta.outputs.labels }}\na=1\nb=2" - annotations: "${{ steps.meta.outputs.annotations }}\nchecksum=${{ inputs.checksum }}" + labels: "${{ steps.meta.outputs.labels }}\nchecksum=${{ inputs.code_checksum }}\ndistro=${{ inputs.target }}" + annotations: "${{ steps.meta.outputs.annotations }}\nchecksum=${{ inputs.code_checksum }}\ndistro=${{ inputs.target }}" target: ${{ inputs.target }} file: ./docker/Dockerfile platforms: linux/amd64 @@ -178,6 +178,7 @@ runs: GITHUB_SERVER_URL=${{ github.server_url }} GITHUB_REPOSITORY=${{ github.repository }} BUILD_DATE=${{ env.BUILD_DATE }} + DISTRO=${{ inputs.target }} CHECKSUM=${{ inputs.code_checksum }} VERSION=${{ steps.meta.outputs.version }} SOURCE_COMMIT=${{ steps.last_commit.outputs.last_commit_short_sha }} @@ -185,4 +186,6 @@ runs: if: (steps.image_status.outputs.updated != 'true' || inputs.rebuild == 'true') && inputs.dryrun != 'true' shell: bash run: | + echo "${{ toJSON(steps.build_push.outputs) }}" + regctl image inspect -p linux/amd64 ${{ steps.image_name.outputs.name }} echo "::notice:: Image ${{ steps.meta.outputs.tags }} successfully built and pushed" diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 8f40ccf6..569ce984 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -211,7 +211,7 @@ jobs: - shell: bash run: | echo "${{ toJSON(steps.build.outputs) }}" -# - name: Generate artifact attestation +# - name: Generate artifact attestations # uses: actions/attest-build-provenance@v1 # with: # subject-name: ${{ steps.build.outputs.image }} diff --git a/docker/Dockerfile b/docker/Dockerfile index cfce73b8..c88d57dd 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -121,6 +121,8 @@ ARG VERSION ENV VERSION=$VERSION ARG BUILD_DATE ENV BUILD_DATE=$BUILD_DATE +ARG DISTRO +ENV DISTRO=$DISTRO ARG SOURCE_COMMIT ENV SOURCE_COMMIT=$SOURCE_COMMIT ARG GITHUB_SERVER_URL @@ -132,7 +134,7 @@ ENV GITHUB_REPOSITORY=$GITHUB_REPOSITORY LABEL date=$BUILD_DATE LABEL version=$VERSION LABEL checksum=$CHECKSUM -LABEL distro="builder-test" +LABEL distro="test" #COPY pyproject.toml pdm.lock ./ #COPY docker/conf/config.toml /etc/xdg/pdm/config.toml @@ -145,7 +147,13 @@ RUN set -x \ && pdm sync --no-editable -v --no-self RUN < /RELEASE -{"version": "$VERSION", "commit": "$SOURCE_COMMIT", "date": "$BUILD_DATE", "checksum": "$CHECKSUM", "source": "${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/tree/${SOURCE_COMMIT:-master}/"} +{"version": "$VERSION", + "commit": "$SOURCE_COMMIT", + "date": "$BUILD_DATE", + "distro": "test", + "checksum": "$CHECKSUM", + "source": "${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/tree/${SOURCE_COMMIT:-master}/" +} EOF FROM build_deps AS python_prod_deps @@ -199,11 +207,17 @@ ENV GITHUB_REPOSITORY=$GITHUB_REPOSITORY WORKDIR /code -COPY --chown=user:app --from=python_prod_deps /code /code +COPY --chown=user:app --from=python_prod_deps /code/__pypackages__ /code/__pypackages__ +ENV PATH=${APATH}:${PATH} \ + PYTHONPATH=${APYTHONPATH} \ + PYTHONDBUFFERED=1 + PYTHONDONTWRITEBYTCODE=1 + RUN < /RELEASE {"version": "$VERSION", "commit": "$SOURCE_COMMIT", "date": "$BUILD_DATE", + "distro": "dist", "checksum": "$CHECKSUM", "source": "${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/tree/${SOURCE_COMMIT:-master}/" } From 891f57aec54841bf9e2525ece0557ad0949c14a2 Mon Sep 17 00:00:00 2001 From: saxix Date: Fri, 21 Jun 2024 07:49:54 +0200 Subject: [PATCH 05/31] fixes Dockerfile --- docker/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index c88d57dd..13df5e25 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -210,7 +210,7 @@ WORKDIR /code COPY --chown=user:app --from=python_prod_deps /code/__pypackages__ /code/__pypackages__ ENV PATH=${APATH}:${PATH} \ PYTHONPATH=${APYTHONPATH} \ - PYTHONDBUFFERED=1 + PYTHONDBUFFERED=1 \ PYTHONDONTWRITEBYTCODE=1 RUN < /RELEASE From 72e5d42b7ec3845457abb68b66e6184a58accf52 Mon Sep 17 00:00:00 2001 From: saxix Date: Fri, 21 Jun 2024 09:41:59 +0200 Subject: [PATCH 06/31] updates CI --- .github/actions/docker_build/action.yml | 8 +++++--- .github/workflows/test.yml | 3 ++- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/.github/actions/docker_build/action.yml b/.github/actions/docker_build/action.yml index d67c619c..a37dc4af 100644 --- a/.github/actions/docker_build/action.yml +++ b/.github/actions/docker_build/action.yml @@ -43,13 +43,13 @@ outputs: value: ${{ steps.meta.outputs.version }} created: description: 'True if new image has been created' - value: ${{ !steps.image_status.outputs.updated }} + value: ${{ steps.status.outputs.created }} digest: description: 'Built image digest' - value: ${{ !steps.build_push.outputs.digest }} + value: ${{ steps.build_push.outputs.digest }} imageId: description: 'Built image ID' - value: ${{ !steps.build_push.outputs.imageId }} + value: ${{ steps.build_push.outputs.imageId }} runs: @@ -183,9 +183,11 @@ runs: VERSION=${{ steps.meta.outputs.version }} SOURCE_COMMIT=${{ steps.last_commit.outputs.last_commit_short_sha }} - name: Status + id: status if: (steps.image_status.outputs.updated != 'true' || inputs.rebuild == 'true') && inputs.dryrun != 'true' shell: bash run: | echo "${{ toJSON(steps.build_push.outputs) }}" regctl image inspect -p linux/amd64 ${{ steps.image_name.outputs.name }} echo "::notice:: Image ${{ steps.meta.outputs.tags }} successfully built and pushed" + echo "created=true" >> $GITHUB_OUTPUT diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 569ce984..e818e36d 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -86,7 +86,7 @@ jobs: uses: actions/checkout@v4.1.7 - id: checksum uses: ./.github/actions/checksum - - name: Build and Test + - name: Build Image id: build uses: ./.github/actions/docker_build with: @@ -179,6 +179,7 @@ jobs: refs/heads/develop refs/heads/staging refs/heads/master + refs/heads/release/ ', github.ref) || contains(github.event.head_commit.message, 'ci:release') || github.event_name == 'create' name: "Release Docker" From ddd60ad46af467d577b55882a6206fe43e2fc0a2 Mon Sep 17 00:00:00 2001 From: saxix Date: Fri, 21 Jun 2024 10:13:58 +0200 Subject: [PATCH 07/31] updates CI --- .github/file-filters.yml | 3 ++- docker/Dockerfile | 6 ++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/.github/file-filters.yml b/.github/file-filters.yml index 2579804c..c9602022 100644 --- a/.github/file-filters.yml +++ b/.github/file-filters.yml @@ -1,6 +1,7 @@ # This is used by the action https://github.com/dorny/paths-filter docker: &docker - - added|modified: './docker/**' + - added|modified: './docker/**/*' + - added|modified: './docker/*' dependencies: &dependencies - 'pdm.lock' diff --git a/docker/Dockerfile b/docker/Dockerfile index 13df5e25..12de2cc2 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -208,6 +208,8 @@ ENV GITHUB_REPOSITORY=$GITHUB_REPOSITORY WORKDIR /code COPY --chown=user:app --from=python_prod_deps /code/__pypackages__ /code/__pypackages__ +COPY --chown=user:app --from=python_prod_deps /code/README.md /code/LICENSE / + ENV PATH=${APATH}:${PATH} \ PYTHONPATH=${APYTHONPATH} \ PYTHONDBUFFERED=1 \ @@ -229,8 +231,8 @@ ENTRYPOINT exec docker-entrypoint.sh "$0" "$@" CMD ["run"] LABEL distro="final" -LABEL maintainer="hope@app.io" -LABEL org.opencontainers.image.authors="author@app.io" +LABEL maintainer="hope@unicef.org" +LABEL org.opencontainers.image.authors="hope@unicef.org" LABEL org.opencontainers.image.created="$BUILD_DATE" LABEL org.opencontainers.image.description="App runtime image" LABEL org.opencontainers.image.documentation="https://github.com/saxix/trash" From 9c956a633b08129726b7254b5b127076befdc770 Mon Sep 17 00:00:00 2001 From: saxix Date: Fri, 21 Jun 2024 10:46:12 +0200 Subject: [PATCH 08/31] add SECURIY.md --- SECURITY.md | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 SECURITY.md diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 00000000..e6279e61 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,20 @@ +Security +Reporting Security Issues +If you've found a security issue in HDE, you can submit your report to hope-security[@]unicef.org via email. + +Please include as much information as possible in your report to better help us understand and resolve the issue: + +Where the security issue exists (ie. HDE Core, API subsystem, etc.) +The type of issue (ex. SQL injection, cross-site scripting, missing authorization, etc.) +Full paths or links to the source files where the security issue exists, if possible +Any special configuration required to reproduce the issue +Step-by-step instructions to reproduce the issue +Proof of concept or exploit code, if available + +If you need to encrypt sensitive information sent to us, please use our [PGP key](https://keys.openpgp.org/vks/v1/by-fingerprint/F72BF087F3A94FE4A305CE449061F6AC06E40F32): + + +F72B F087 F3A9 4FE4 A305 CE44 9061 F6AC 06E4 0F32 + + +hkps://keys.openpgp.org From f7b9b7654d38308ba93f3e24b826afe532b236d5 Mon Sep 17 00:00:00 2001 From: saxix Date: Fri, 21 Jun 2024 10:49:12 +0200 Subject: [PATCH 09/31] updates SECURITY.md --- SECURITY.md | 22 ++++++++++------------ 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/SECURITY.md b/SECURITY.md index e6279e61..c238822c 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -1,20 +1,18 @@ -Security -Reporting Security Issues +# Security + +## Reporting Security Issues + If you've found a security issue in HDE, you can submit your report to hope-security[@]unicef.org via email. Please include as much information as possible in your report to better help us understand and resolve the issue: -Where the security issue exists (ie. HDE Core, API subsystem, etc.) -The type of issue (ex. SQL injection, cross-site scripting, missing authorization, etc.) -Full paths or links to the source files where the security issue exists, if possible -Any special configuration required to reproduce the issue -Step-by-step instructions to reproduce the issue -Proof of concept or exploit code, if available +- Where the security issue exists (ie. HDE Core, API subsystem, etc.) +- The type of issue (ex. SQL injection, cross-site scripting, missing authorization, etc.) +- Full paths or links to the source files where the security issue exists, if possible +- Any special configuration required to reproduce the issue +- Step-by-step instructions to reproduce the issue +- Proof of concept or exploit code, if available If you need to encrypt sensitive information sent to us, please use our [PGP key](https://keys.openpgp.org/vks/v1/by-fingerprint/F72BF087F3A94FE4A305CE449061F6AC06E40F32): - F72B F087 F3A9 4FE4 A305 CE44 9061 F6AC 06E4 0F32 - - -hkps://keys.openpgp.org From 2e43e8fb3323f7f8052c3450493cfda06b5bf63f Mon Sep 17 00:00:00 2001 From: saxix Date: Fri, 21 Jun 2024 11:58:52 +0200 Subject: [PATCH 10/31] updates deps --- pdm.lock | 280 ++++++++++++++++++++++++++++--------------------------- 1 file changed, 142 insertions(+), 138 deletions(-) diff --git a/pdm.lock b/pdm.lock index f9eba18d..14fed7b3 100644 --- a/pdm.lock +++ b/pdm.lock @@ -59,8 +59,8 @@ files = [ [[package]] name = "azure-core" -version = "1.30.1" -requires_python = ">=3.7" +version = "1.30.2" +requires_python = ">=3.8" summary = "Microsoft Azure Core Library for Python" groups = ["default"] dependencies = [ @@ -69,8 +69,8 @@ dependencies = [ "typing-extensions>=4.6.0", ] files = [ - {file = "azure-core-1.30.1.tar.gz", hash = "sha256:26273a254131f84269e8ea4464f3560c731f29c0c1f69ac99010845f239c1a8f"}, - {file = "azure_core-1.30.1-py3-none-any.whl", hash = "sha256:7c5ee397e48f281ec4dd773d67a0a47a0962ed6fa833036057f9ea067f688e74"}, + {file = "azure-core-1.30.2.tar.gz", hash = "sha256:a14dc210efcd608821aa472d9fb8e8d035d29b68993819147bc290a8ac224472"}, + {file = "azure_core-1.30.2-py3-none-any.whl", hash = "sha256:cf019c1ca832e96274ae85abd3d9f752397194d9fea3b41487290562ac8abe4a"}, ] [[package]] @@ -177,13 +177,13 @@ files = [ [[package]] name = "certifi" -version = "2024.2.2" +version = "2024.6.2" requires_python = ">=3.6" summary = "Python package for providing Mozilla's CA Bundle." groups = ["default", "dev"] files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, + {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"}, + {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"}, ] [[package]] @@ -373,7 +373,7 @@ files = [ [[package]] name = "cryptography" -version = "42.0.7" +version = "42.0.8" requires_python = ">=3.7" summary = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." groups = ["default", "dev"] @@ -381,38 +381,38 @@ dependencies = [ "cffi>=1.12; platform_python_implementation != \"PyPy\"", ] files = [ - {file = "cryptography-42.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a987f840718078212fdf4504d0fd4c6effe34a7e4740378e59d47696e8dfb477"}, - {file = "cryptography-42.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:bd13b5e9b543532453de08bcdc3cc7cebec6f9883e886fd20a92f26940fd3e7a"}, - {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a79165431551042cc9d1d90e6145d5d0d3ab0f2d66326c201d9b0e7f5bf43604"}, - {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a47787a5e3649008a1102d3df55424e86606c9bae6fb77ac59afe06d234605f8"}, - {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:02c0eee2d7133bdbbc5e24441258d5d2244beb31da5ed19fbb80315f4bbbff55"}, - {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5e44507bf8d14b36b8389b226665d597bc0f18ea035d75b4e53c7b1ea84583cc"}, - {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:7f8b25fa616d8b846aef64b15c606bb0828dbc35faf90566eb139aa9cff67af2"}, - {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:93a3209f6bb2b33e725ed08ee0991b92976dfdcf4e8b38646540674fc7508e13"}, - {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e6b8f1881dac458c34778d0a424ae5769de30544fc678eac51c1c8bb2183e9da"}, - {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3de9a45d3b2b7d8088c3fbf1ed4395dfeff79d07842217b38df14ef09ce1d8d7"}, - {file = "cryptography-42.0.7-cp37-abi3-win32.whl", hash = "sha256:789caea816c6704f63f6241a519bfa347f72fbd67ba28d04636b7c6b7da94b0b"}, - {file = "cryptography-42.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:8cb8ce7c3347fcf9446f201dc30e2d5a3c898d009126010cbd1f443f28b52678"}, - {file = "cryptography-42.0.7-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:a3a5ac8b56fe37f3125e5b72b61dcde43283e5370827f5233893d461b7360cd4"}, - {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:779245e13b9a6638df14641d029add5dc17edbef6ec915688f3acb9e720a5858"}, - {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d563795db98b4cd57742a78a288cdbdc9daedac29f2239793071fe114f13785"}, - {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:31adb7d06fe4383226c3e963471f6837742889b3c4caa55aac20ad951bc8ffda"}, - {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:efd0bf5205240182e0f13bcaea41be4fdf5c22c5129fc7ced4a0282ac86998c9"}, - {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a9bc127cdc4ecf87a5ea22a2556cab6c7eda2923f84e4f3cc588e8470ce4e42e"}, - {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3577d029bc3f4827dd5bf8bf7710cac13527b470bbf1820a3f394adb38ed7d5f"}, - {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2e47577f9b18723fa294b0ea9a17d5e53a227867a0a4904a1a076d1646d45ca1"}, - {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1a58839984d9cb34c855197043eaae2c187d930ca6d644612843b4fe8513c886"}, - {file = "cryptography-42.0.7-cp39-abi3-win32.whl", hash = "sha256:e6b79d0adb01aae87e8a44c2b64bc3f3fe59515280e00fb6d57a7267a2583cda"}, - {file = "cryptography-42.0.7-cp39-abi3-win_amd64.whl", hash = "sha256:16268d46086bb8ad5bf0a2b5544d8a9ed87a0e33f5e77dd3c3301e63d941a83b"}, - {file = "cryptography-42.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2954fccea107026512b15afb4aa664a5640cd0af630e2ee3962f2602693f0c82"}, - {file = "cryptography-42.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:362e7197754c231797ec45ee081f3088a27a47c6c01eff2ac83f60f85a50fe60"}, - {file = "cryptography-42.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4f698edacf9c9e0371112792558d2f705b5645076cc0aaae02f816a0171770fd"}, - {file = "cryptography-42.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5482e789294854c28237bba77c4c83be698be740e31a3ae5e879ee5444166582"}, - {file = "cryptography-42.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e9b2a6309f14c0497f348d08a065d52f3020656f675819fc405fb63bbcd26562"}, - {file = "cryptography-42.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d8e3098721b84392ee45af2dd554c947c32cc52f862b6a3ae982dbb90f577f14"}, - {file = "cryptography-42.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c65f96dad14f8528a447414125e1fc8feb2ad5a272b8f68477abbcc1ea7d94b9"}, - {file = "cryptography-42.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:36017400817987670037fbb0324d71489b6ead6231c9604f8fc1f7d008087c68"}, - {file = "cryptography-42.0.7.tar.gz", hash = "sha256:ecbfbc00bf55888edda9868a4cf927205de8499e7fabe6c050322298382953f2"}, + {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"}, + {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7"}, + {file = "cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2"}, + {file = "cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba"}, + {file = "cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14"}, + {file = "cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c"}, + {file = "cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad"}, + {file = "cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2"}, ] [[package]] @@ -742,16 +742,16 @@ files = [ [[package]] name = "djangorestframework" -version = "3.15.1" -requires_python = ">=3.6" +version = "3.15.2" +requires_python = ">=3.8" summary = "Web APIs for Django, made easy." groups = ["default"] dependencies = [ - "django>=3.0", + "django>=4.2", ] files = [ - {file = "djangorestframework-3.15.1-py3-none-any.whl", hash = "sha256:3ccc0475bce968608cf30d07fb17d8e52d1d7fc8bfe779c905463200750cbca6"}, - {file = "djangorestframework-3.15.1.tar.gz", hash = "sha256:f88fad74183dfc7144b2756d0d2ac716ea5b4c7c9840995ac3bfd8ec034333c1"}, + {file = "djangorestframework-3.15.2-py3-none-any.whl", hash = "sha256:2b8871b062ba1aefc2de01f773875441a961fefbf79f5eed1e32b2f096944b20"}, + {file = "djangorestframework-3.15.2.tar.gz", hash = "sha256:36fe88cd2d6c6bec23dca9804bab2ba5517a8bb9d8f47ebc68981b56840107ad"}, ] [[package]] @@ -815,7 +815,7 @@ files = [ [[package]] name = "drf-spectacular-sidecar" -version = "2024.5.1" +version = "2024.6.1" requires_python = ">=3.6" summary = "Serve self-contained distribution builds of Swagger UI and Redoc with Django" groups = ["default"] @@ -823,8 +823,8 @@ dependencies = [ "Django>=2.2", ] files = [ - {file = "drf_spectacular_sidecar-2024.5.1-py3-none-any.whl", hash = "sha256:089fdef46b520b7b1c8a497a398cde9336c3f20b115835baeb158dc4138d743d"}, - {file = "drf_spectacular_sidecar-2024.5.1.tar.gz", hash = "sha256:1ecfbe86174461e3cf78a9cd49f69aa8d9e0710cb5e8b35107d3f8cc0f380c21"}, + {file = "drf_spectacular_sidecar-2024.6.1-py3-none-any.whl", hash = "sha256:5ad678c788dcb36697a668884c6fdac2c511a4094cb010978bd01a6345197bbb"}, + {file = "drf_spectacular_sidecar-2024.6.1.tar.gz", hash = "sha256:eed744c26d2caff815fd67d89eca685f645479f07fb86c124d8ee26a13b1d960"}, ] [[package]] @@ -919,7 +919,7 @@ files = [ [[package]] name = "faker" -version = "25.3.0" +version = "25.9.1" requires_python = ">=3.8" summary = "Faker is a Python package that generates fake data for you." groups = ["dev"] @@ -927,35 +927,35 @@ dependencies = [ "python-dateutil>=2.4", ] files = [ - {file = "Faker-25.3.0-py3-none-any.whl", hash = "sha256:0158d47e955b6ec22134c0a74ebb7ed34fe600896208bafbf1008db831b17f04"}, - {file = "Faker-25.3.0.tar.gz", hash = "sha256:bcbe31eee5ef4bbf87ce36c4eba53c01e2a1d912fde2a4d3528b430d2beb784f"}, + {file = "Faker-25.9.1-py3-none-any.whl", hash = "sha256:f1dc27dc8035cb7e97e96afbb5fe1305eed6aeea53374702cbac96acfe851626"}, + {file = "Faker-25.9.1.tar.gz", hash = "sha256:0e1cf7a8d3c94de91a65ab1e9cf7050903efae1e97901f8e5924a9f45147ae44"}, ] [[package]] name = "filelock" -version = "3.14.0" +version = "3.15.3" requires_python = ">=3.8" summary = "A platform independent file lock." groups = ["dev"] files = [ - {file = "filelock-3.14.0-py3-none-any.whl", hash = "sha256:43339835842f110ca7ae60f1e1c160714c5a6afd15a2873419ab185334975c0f"}, - {file = "filelock-3.14.0.tar.gz", hash = "sha256:6ea72da3be9b8c82afd3edcf99f2fffbb5076335a5ae4d03248bb5b6c3eae78a"}, + {file = "filelock-3.15.3-py3-none-any.whl", hash = "sha256:0151273e5b5d6cf753a61ec83b3a9b7d8821c39ae9af9d7ecf2f9e2f17404103"}, + {file = "filelock-3.15.3.tar.gz", hash = "sha256:e1199bf5194a2277273dacd50269f0d87d0682088a3c561c15674ea9005d8635"}, ] [[package]] name = "flake8" -version = "7.0.0" +version = "7.1.0" requires_python = ">=3.8.1" summary = "the modular source code checker: pep8 pyflakes and co" groups = ["dev"] dependencies = [ "mccabe<0.8.0,>=0.7.0", - "pycodestyle<2.12.0,>=2.11.0", + "pycodestyle<2.13.0,>=2.12.0", "pyflakes<3.3.0,>=3.2.0", ] files = [ - {file = "flake8-7.0.0-py2.py3-none-any.whl", hash = "sha256:a6dfbb75e03252917f2473ea9653f7cd799c3064e54d4c8140044c5c065f53c3"}, - {file = "flake8-7.0.0.tar.gz", hash = "sha256:33f96621059e65eec474169085dc92bf26e7b2d47366b70be2f67ab80dc25132"}, + {file = "flake8-7.1.0-py2.py3-none-any.whl", hash = "sha256:2e416edcc62471a64cea09353f4e7bdba32aeb079b6e360554c659a122b1bc6a"}, + {file = "flake8-7.1.0.tar.gz", hash = "sha256:48a07b626b55236e0fb4784ee69a465fbf59d79eec1f5b4785c3d3bc57d17aa5"}, ] [[package]] @@ -1091,7 +1091,7 @@ files = [ [[package]] name = "ipython" -version = "8.24.0" +version = "8.25.0" requires_python = ">=3.10" summary = "IPython: Productive Interactive Computing" groups = ["dev"] @@ -1108,8 +1108,8 @@ dependencies = [ "traitlets>=5.13.0", ] files = [ - {file = "ipython-8.24.0-py3-none-any.whl", hash = "sha256:d7bf2f6c4314984e3e02393213bab8703cf163ede39672ce5918c51fe253a2a3"}, - {file = "ipython-8.24.0.tar.gz", hash = "sha256:010db3f8a728a578bb641fdd06c063b9fb8e96a9464c63aec6310fbcb5e80501"}, + {file = "ipython-8.25.0-py3-none-any.whl", hash = "sha256:53eee7ad44df903a06655871cbab66d156a051fd86f3ec6750470ac9604ac1ab"}, + {file = "ipython-8.25.0.tar.gz", hash = "sha256:c6ed726a140b6e725b911528f80439c534fac915246af3efc39440a6b0f9d716"}, ] [[package]] @@ -1358,34 +1358,37 @@ files = [ [[package]] name = "nodeenv" -version = "1.9.0" +version = "1.9.1" requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" summary = "Node.js virtual environment builder" groups = ["dev"] files = [ - {file = "nodeenv-1.9.0-py2.py3-none-any.whl", hash = "sha256:508ecec98f9f3330b636d4448c0f1a56fc68017c68f1e7857ebc52acf0eb879a"}, - {file = "nodeenv-1.9.0.tar.gz", hash = "sha256:07f144e90dae547bf0d4ee8da0ee42664a42a04e02ed68e06324348dafe4bdb1"}, + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, ] [[package]] name = "numpy" -version = "1.26.4" +version = "2.0.0" requires_python = ">=3.9" summary = "Fundamental package for array computing in Python" groups = ["default"] files = [ - {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, - {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, - {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, - {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, + {file = "numpy-2.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:354f373279768fa5a584bac997de6a6c9bc535c482592d7a813bb0c09be6c76f"}, + {file = "numpy-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4d2f62e55a4cd9c58c1d9a1c9edaedcd857a73cb6fda875bf79093f9d9086f85"}, + {file = "numpy-2.0.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:1e72728e7501a450288fc8e1f9ebc73d90cfd4671ebbd631f3e7857c39bd16f2"}, + {file = "numpy-2.0.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:84554fc53daa8f6abf8e8a66e076aff6ece62de68523d9f665f32d2fc50fd66e"}, + {file = "numpy-2.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c73aafd1afca80afecb22718f8700b40ac7cab927b8abab3c3e337d70e10e5a2"}, + {file = "numpy-2.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49d9f7d256fbc804391a7f72d4a617302b1afac1112fac19b6c6cec63fe7fe8a"}, + {file = "numpy-2.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0ec84b9ba0654f3b962802edc91424331f423dcf5d5f926676e0150789cb3d95"}, + {file = "numpy-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:feff59f27338135776f6d4e2ec7aeeac5d5f7a08a83e80869121ef8164b74af9"}, + {file = "numpy-2.0.0-cp312-cp312-win32.whl", hash = "sha256:c5a59996dc61835133b56a32ebe4ef3740ea5bc19b3983ac60cc32be5a665d54"}, + {file = "numpy-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:a356364941fb0593bb899a1076b92dfa2029f6f5b8ba88a14fd0984aaf76d0df"}, + {file = "numpy-2.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9416a5c2e92ace094e9f0082c5fd473502c91651fb896bc17690d6fc475128d6"}, + {file = "numpy-2.0.0-pp39-pypy39_pp73-macosx_14_0_x86_64.whl", hash = "sha256:17067d097ed036636fa79f6a869ac26df7db1ba22039d962422506640314933a"}, + {file = "numpy-2.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ecb5b0582cd125f67a629072fed6f83562d9dd04d7e03256c9829bdec027ad"}, + {file = "numpy-2.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cef04d068f5fb0518a77857953193b6bb94809a806bd0a14983a8f12ada060c9"}, + {file = "numpy-2.0.0.tar.gz", hash = "sha256:cf5d1c9e6837f8af9f92b6bd3e86d513cdc11f60fd62185cc49ec7d1aba34864"}, ] [[package]] @@ -1401,7 +1404,7 @@ files = [ [[package]] name = "opencv-python" -version = "4.9.0.80" +version = "4.10.0.84" requires_python = ">=3.6" summary = "Wrapper package for OpenCV python bindings." groups = ["default"] @@ -1416,27 +1419,27 @@ dependencies = [ "numpy>=1.26.0; python_version >= \"3.12\"", ] files = [ - {file = "opencv-python-4.9.0.80.tar.gz", hash = "sha256:1a9f0e6267de3a1a1db0c54213d022c7c8b5b9ca4b580e80bdc58516c922c9e1"}, - {file = "opencv_python-4.9.0.80-cp37-abi3-macosx_10_16_x86_64.whl", hash = "sha256:7e5f7aa4486651a6ebfa8ed4b594b65bd2d2f41beeb4241a3e4b1b85acbbbadb"}, - {file = "opencv_python-4.9.0.80-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:71dfb9555ccccdd77305fc3dcca5897fbf0cf28b297c51ee55e079c065d812a3"}, - {file = "opencv_python-4.9.0.80-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b34a52e9da36dda8c151c6394aed602e4b17fa041df0b9f5b93ae10b0fcca2a"}, - {file = "opencv_python-4.9.0.80-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4088cab82b66a3b37ffc452976b14a3c599269c247895ae9ceb4066d8188a57"}, - {file = "opencv_python-4.9.0.80-cp37-abi3-win32.whl", hash = "sha256:dcf000c36dd1651118a2462257e3a9e76db789a78432e1f303c7bac54f63ef6c"}, - {file = "opencv_python-4.9.0.80-cp37-abi3-win_amd64.whl", hash = "sha256:3f16f08e02b2a2da44259c7cc712e779eff1dd8b55fdb0323e8cab09548086c0"}, + {file = "opencv-python-4.10.0.84.tar.gz", hash = "sha256:72d234e4582e9658ffea8e9cae5b63d488ad06994ef12d81dc303b17472f3526"}, + {file = "opencv_python-4.10.0.84-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:fc182f8f4cda51b45f01c64e4cbedfc2f00aff799debebc305d8d0210c43f251"}, + {file = "opencv_python-4.10.0.84-cp37-abi3-macosx_12_0_x86_64.whl", hash = "sha256:71e575744f1d23f79741450254660442785f45a0797212852ee5199ef12eed98"}, + {file = "opencv_python-4.10.0.84-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09a332b50488e2dda866a6c5573ee192fe3583239fb26ff2f7f9ceb0bc119ea6"}, + {file = "opencv_python-4.10.0.84-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ace140fc6d647fbe1c692bcb2abce768973491222c067c131d80957c595b71f"}, + {file = "opencv_python-4.10.0.84-cp37-abi3-win32.whl", hash = "sha256:2db02bb7e50b703f0a2d50c50ced72e95c574e1e5a0bb35a8a86d0b35c98c236"}, + {file = "opencv_python-4.10.0.84-cp37-abi3-win_amd64.whl", hash = "sha256:32dbbd94c26f611dc5cc6979e6b7aa1f55a64d6b463cc1dcd3c95505a63e48fe"}, ] [[package]] name = "openpyxl" -version = "3.1.2" -requires_python = ">=3.6" +version = "3.1.4" +requires_python = ">=3.8" summary = "A Python library to read/write Excel 2010 xlsx/xlsm files" groups = ["dev"] dependencies = [ "et-xmlfile", ] files = [ - {file = "openpyxl-3.1.2-py2.py3-none-any.whl", hash = "sha256:f91456ead12ab3c6c2e9491cf33ba6d08357d802192379bb482f1033ade496f5"}, - {file = "openpyxl-3.1.2.tar.gz", hash = "sha256:a6f5977418eff3b2d5500d54d9db50c8277a368436f4e4f8ddb1be3422870184"}, + {file = "openpyxl-3.1.4-py2.py3-none-any.whl", hash = "sha256:ec17f6483f2b8f7c88c57e5e5d3b0de0e3fb9ac70edc084d28e864f5b33bbefd"}, + {file = "openpyxl-3.1.4.tar.gz", hash = "sha256:8d2c8adf5d20d6ce8f9bca381df86b534835e974ed0156dacefa76f68c1d69fb"}, ] [[package]] @@ -1456,13 +1459,13 @@ files = [ [[package]] name = "packaging" -version = "24.0" -requires_python = ">=3.7" +version = "24.1" +requires_python = ">=3.8" summary = "Core utilities for Python packages" groups = ["dev"] files = [ - {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, - {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] [[package]] @@ -1579,7 +1582,7 @@ files = [ [[package]] name = "prompt-toolkit" -version = "3.0.45" +version = "3.0.47" requires_python = ">=3.7.0" summary = "Library for building powerful interactive command lines in Python" groups = ["default", "dev"] @@ -1587,24 +1590,25 @@ dependencies = [ "wcwidth", ] files = [ - {file = "prompt_toolkit-3.0.45-py3-none-any.whl", hash = "sha256:a29b89160e494e3ea8622b09fa5897610b437884dcdcd054fdc1308883326c2a"}, - {file = "prompt_toolkit-3.0.45.tar.gz", hash = "sha256:07c60ee4ab7b7e90824b61afa840c8f5aad2d46b3e2e10acc33d8ecc94a49089"}, + {file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"}, + {file = "prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360"}, ] [[package]] name = "psutil" -version = "5.9.8" -requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +version = "6.0.0" +requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" summary = "Cross-platform lib for process and system monitoring in Python." groups = ["dev"] files = [ - {file = "psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81"}, - {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421"}, - {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4"}, - {file = "psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0"}, - {file = "psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf"}, - {file = "psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"}, - {file = "psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"}, + {file = "psutil-6.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c588a7e9b1173b6e866756dde596fd4cad94f9399daf99ad8c3258b3cb2b47a0"}, + {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ed2440ada7ef7d0d608f20ad89a04ec47d2d3ab7190896cd62ca5fc4fe08bf0"}, + {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd9a97c8e94059b0ef54a7d4baf13b405011176c3b6ff257c247cae0d560ecd"}, + {file = "psutil-6.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e8d0054fc88153ca0544f5c4d554d42e33df2e009c4ff42284ac9ebdef4132"}, + {file = "psutil-6.0.0-cp37-abi3-win32.whl", hash = "sha256:a495580d6bae27291324fe60cea0b5a7c23fa36a7cd35035a16d93bdcf076b9d"}, + {file = "psutil-6.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:33ea5e1c975250a720b3a6609c490db40dae5d83a4eb315170c4fe0d8b1f34b3"}, + {file = "psutil-6.0.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:ffe7fc9b6b36beadc8c322f84e1caff51e8703b88eee1da46d1e3a6ae11b4fd0"}, + {file = "psutil-6.0.0.tar.gz", hash = "sha256:8faae4f310b6d969fa26ca0545338b21f73c6b15db7c4a8d934a5482faa818f2"}, ] [[package]] @@ -1664,13 +1668,13 @@ files = [ [[package]] name = "pycodestyle" -version = "2.11.1" +version = "2.12.0" requires_python = ">=3.8" summary = "Python style guide checker" groups = ["dev"] files = [ - {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, - {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, + {file = "pycodestyle-2.12.0-py2.py3-none-any.whl", hash = "sha256:949a39f6b86c3e1515ba1787c2022131d165a8ad271b11370a8819aa070269e4"}, + {file = "pycodestyle-2.12.0.tar.gz", hash = "sha256:442f950141b4f43df752dd303511ffded3a04c2b6fb7f65980574f0c31e6e79c"}, ] [[package]] @@ -1720,7 +1724,7 @@ files = [ [[package]] name = "pytest" -version = "8.2.1" +version = "8.2.2" requires_python = ">=3.8" summary = "pytest: simple powerful testing with Python" groups = ["dev"] @@ -1731,8 +1735,8 @@ dependencies = [ "pluggy<2.0,>=1.5", ] files = [ - {file = "pytest-8.2.1-py3-none-any.whl", hash = "sha256:faccc5d332b8c3719f40283d0d44aa5cf101cec36f88cde9ed8f2bc0538612b1"}, - {file = "pytest-8.2.1.tar.gz", hash = "sha256:5046e5b46d8e4cac199c373041f26be56fdb81eb4e67dc11d4e10811fc3408fd"}, + {file = "pytest-8.2.2-py3-none-any.whl", hash = "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343"}, + {file = "pytest-8.2.2.tar.gz", hash = "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977"}, ] [[package]] @@ -1940,13 +1944,13 @@ files = [ [[package]] name = "redis" -version = "5.0.4" +version = "5.0.6" requires_python = ">=3.7" summary = "Python client for Redis database and key-value store" groups = ["default"] files = [ - {file = "redis-5.0.4-py3-none-any.whl", hash = "sha256:7adc2835c7a9b5033b7ad8f8918d09b7344188228809c98df07af226d39dec91"}, - {file = "redis-5.0.4.tar.gz", hash = "sha256:ec31f2ed9675cc54c21ba854cfe0462e6faf1d83c8ce5944709db8a4700b9c61"}, + {file = "redis-5.0.6-py3-none-any.whl", hash = "sha256:c0d6d990850c627bbf7be01c5c4cbaadf67b48593e913bb71c9819c30df37eee"}, + {file = "redis-5.0.6.tar.gz", hash = "sha256:38473cd7c6389ad3e44a91f4c3eaf6bcb8a9f746007f29bf4fb20824ff0b2197"}, ] [[package]] @@ -2012,7 +2016,7 @@ files = [ [[package]] name = "responses" -version = "0.25.0" +version = "0.25.3" requires_python = ">=3.8" summary = "A utility library for mocking out the `requests` Python library." groups = ["dev"] @@ -2022,8 +2026,8 @@ dependencies = [ "urllib3<3.0,>=1.25.10", ] files = [ - {file = "responses-0.25.0-py3-none-any.whl", hash = "sha256:2f0b9c2b6437db4b528619a77e5d565e4ec2a9532162ac1a131a83529db7be1a"}, - {file = "responses-0.25.0.tar.gz", hash = "sha256:01ae6a02b4f34e39bffceb0fc6786b67a25eae919c6368d05eabc8d9576c2a66"}, + {file = "responses-0.25.3-py3-none-any.whl", hash = "sha256:521efcbc82081ab8daa588e08f7e8a64ce79b91c39f6e62199b19159bea7dbcb"}, + {file = "responses-0.25.3.tar.gz", hash = "sha256:617b9247abd9ae28313d57a75880422d55ec63c29d33d629697590a034358dba"}, ] [[package]] @@ -2098,7 +2102,7 @@ files = [ [[package]] name = "sentry-sdk" -version = "2.3.1" +version = "2.6.0" requires_python = ">=3.6" summary = "Python client for Sentry (https://sentry.io)" groups = ["default"] @@ -2107,13 +2111,13 @@ dependencies = [ "urllib3>=1.26.11", ] files = [ - {file = "sentry_sdk-2.3.1-py2.py3-none-any.whl", hash = "sha256:c5aeb095ba226391d337dd42a6f9470d86c9fc236ecc71cfc7cd1942b45010c6"}, - {file = "sentry_sdk-2.3.1.tar.gz", hash = "sha256:139a71a19f5e9eb5d3623942491ce03cf8ebc14ea2e39ba3e6fe79560d8a5b1f"}, + {file = "sentry_sdk-2.6.0-py2.py3-none-any.whl", hash = "sha256:422b91cb49378b97e7e8d0e8d5a1069df23689d45262b86f54988a7db264e874"}, + {file = "sentry_sdk-2.6.0.tar.gz", hash = "sha256:65cc07e9c6995c5e316109f138570b32da3bd7ff8d0d0ee4aaf2628c3dd8127d"}, ] [[package]] name = "sentry-sdk" -version = "2.3.1" +version = "2.6.0" extras = ["celery", "django"] requires_python = ">=3.6" summary = "Python client for Sentry (https://sentry.io)" @@ -2121,22 +2125,22 @@ groups = ["default"] dependencies = [ "celery>=3", "django>=1.8", - "sentry-sdk==2.3.1", + "sentry-sdk==2.6.0", ] files = [ - {file = "sentry_sdk-2.3.1-py2.py3-none-any.whl", hash = "sha256:c5aeb095ba226391d337dd42a6f9470d86c9fc236ecc71cfc7cd1942b45010c6"}, - {file = "sentry_sdk-2.3.1.tar.gz", hash = "sha256:139a71a19f5e9eb5d3623942491ce03cf8ebc14ea2e39ba3e6fe79560d8a5b1f"}, + {file = "sentry_sdk-2.6.0-py2.py3-none-any.whl", hash = "sha256:422b91cb49378b97e7e8d0e8d5a1069df23689d45262b86f54988a7db264e874"}, + {file = "sentry_sdk-2.6.0.tar.gz", hash = "sha256:65cc07e9c6995c5e316109f138570b32da3bd7ff8d0d0ee4aaf2628c3dd8127d"}, ] [[package]] name = "setuptools" -version = "70.0.0" +version = "70.1.0" requires_python = ">=3.8" summary = "Easily download, build, install, upgrade, and uninstall Python packages" groups = ["dev"] files = [ - {file = "setuptools-70.0.0-py3-none-any.whl", hash = "sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4"}, - {file = "setuptools-70.0.0.tar.gz", hash = "sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0"}, + {file = "setuptools-70.1.0-py3-none-any.whl", hash = "sha256:d9b8b771455a97c8a9f3ab3448ebe0b29b5e105f1228bba41028be116985a267"}, + {file = "setuptools-70.1.0.tar.gz", hash = "sha256:01a1e793faa5bd89abc851fa15d0a0db26f160890c7102cd8dce643e886b47f5"}, ] [[package]] @@ -2313,7 +2317,7 @@ files = [ [[package]] name = "types-requests" -version = "2.32.0.20240523" +version = "2.32.0.20240602" requires_python = ">=3.8" summary = "Typing stubs for requests" groups = ["dev"] @@ -2321,8 +2325,8 @@ dependencies = [ "urllib3>=2", ] files = [ - {file = "types-requests-2.32.0.20240523.tar.gz", hash = "sha256:26b8a6de32d9f561192b9942b41c0ab2d8010df5677ca8aa146289d11d505f57"}, - {file = "types_requests-2.32.0.20240523-py3-none-any.whl", hash = "sha256:f19ed0e2daa74302069bbbbf9e82902854ffa780bc790742a810a9aaa52f65ec"}, + {file = "types-requests-2.32.0.20240602.tar.gz", hash = "sha256:3f98d7bbd0dd94ebd10ff43a7fbe20c3b8528acace6d8efafef0b6a184793f06"}, + {file = "types_requests-2.32.0.20240602-py3-none-any.whl", hash = "sha256:ed3946063ea9fbc6b5fc0c44fa279188bae42d582cb63760be6cb4b9d06c3de8"}, ] [[package]] @@ -2338,13 +2342,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.12.0" +version = "4.12.2" requires_python = ">=3.8" summary = "Backported and Experimental Type Hints for Python 3.8+" groups = ["default", "dev"] files = [ - {file = "typing_extensions-4.12.0-py3-none-any.whl", hash = "sha256:b349c66bea9016ac22978d800cfff206d5f9816951f12a7d0ec5578b0a819594"}, - {file = "typing_extensions-4.12.0.tar.gz", hash = "sha256:8cbcdc8606ebcb0d95453ad7dc5065e6237b6aa230a31e81d0f440c30fed5fd8"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] @@ -2407,22 +2411,22 @@ files = [ [[package]] name = "urllib3" -version = "2.2.1" +version = "2.2.2" requires_python = ">=3.8" summary = "HTTP library with thread-safe connection pooling, file post, and more." groups = ["default", "dev"] files = [ - {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, - {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, ] [[package]] name = "uwsgi" -version = "2.0.25.1" +version = "2.0.26" summary = "The uWSGI server" groups = ["default"] files = [ - {file = "uwsgi-2.0.25.1.tar.gz", hash = "sha256:d653d2d804c194c8cbe2585fa56efa2650313ae75c686a9d7931374d4dfbfc6e"}, + {file = "uwsgi-2.0.26.tar.gz", hash = "sha256:86e6bfcd4dc20529665f5b7777193cdc48622fb2c59f0a7f1e3dc32b3882e7f9"}, ] [[package]] From fb71ff3fd4ef5c03274b425d9a072dad497d84ff Mon Sep 17 00:00:00 2001 From: saxix Date: Fri, 21 Jun 2024 13:26:27 +0200 Subject: [PATCH 11/31] updates CI ci:debug --- .github/workflows/dump.yml | 92 ++++++++++++++++++++++++++++++++++++++ .github/workflows/test.yml | 2 +- 2 files changed, 93 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/dump.yml diff --git a/.github/workflows/dump.yml b/.github/workflows/dump.yml new file mode 100644 index 00000000..ca4c20ca --- /dev/null +++ b/.github/workflows/dump.yml @@ -0,0 +1,92 @@ +name: "[DEBUG] Dump" + +on: + check_run: + create: + delete: + discussion: + discussion_comment: + fork: + issues: + issue_comment: + milestone: + pull_request: + pull_request_review_comment: + pull_request_review: + push: + release: + workflow_dispatch: + + +jobs: + dump: + name: "[DEBUG] Echo Full Context" + if: ${{ contains(github.event.head_commit.message, 'ci:debug') }} + runs-on: [ubuntu-latest, self-hosted] + steps: + - name: Inspect + run: | + echo "${{ contains('refs/heads/develop\nrefs/heads/staging\nrefs/heads/master\nrefs/heads/release', github.ref) }} + echo "${{ contains('refs/heads/develop\nrefs/heads/staging\nrefs/heads/master\nrefs/heads/release', github.ref) }} + + - name: Dump Env vars + run: | + echo "====== ENVIRONMENT =================" + env | sort + echo "====================================" + - name: Dump GitHub context + env: + GITHUB_CONTEXT: ${{ toJSON(github) }} + run: | + echo "====== GITHUB_CONTEXT ==============" + echo "$GITHUB_CONTEXT" + echo "====================================" + - name: Dump job context + env: + JOB_CONTEXT: ${{ toJSON(job) }} + run: | + echo "====== JOB_CONTEXT ==============" + echo "$JOB_CONTEXT" + echo "====================================" + - name: Dump steps context + env: + STEPS_CONTEXT: ${{ toJSON(steps) }} + run: | + echo "====== STEPS_CONTEXT ==============" + echo "$STEPS_CONTEXT" + echo "====================================" + - name: Dump runner context + env: + RUNNER_CONTEXT: ${{ toJSON(runner) }} + run: | + echo "====== RUNNER_CONTEXT ==============" + echo "$RUNNER_CONTEXT" + echo "====================================" + - name: Dump strategy context + env: + STRATEGY_CONTEXT: ${{ toJSON(strategy) }} + run: | + echo "====== STRATEGY_CONTEXT ==============" + echo "$STRATEGY_CONTEXT" + echo "====================================" + - name: Dump matrix context + env: + MATRIX_CONTEXT: ${{ toJSON(matrix) }} + run: | + echo "====== MATRIX_CONTEXT ==============" + echo "$MATRIX_CONTEXT" + echo "====================================" + - name: Dump vars context + env: + VARS_CONTEXT: ${{ toJSON(vars) }} + run: | + echo "====== VARS ==============" + echo "$VARS_CONTEXT" + echo "====================================" + - name: Dump env context + env: + ENV_CONTEXT: ${{ toJSON(env) }} + run: | + echo "====== ENV ==============" + echo "$ENV_CONTEXT" + echo "====================================" diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e818e36d..5068efc1 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -179,7 +179,7 @@ jobs: refs/heads/develop refs/heads/staging refs/heads/master - refs/heads/release/ + refs/heads/release ', github.ref) || contains(github.event.head_commit.message, 'ci:release') || github.event_name == 'create' name: "Release Docker" From c4610fb5030c46d5626caf7942ebc760e650b0a5 Mon Sep 17 00:00:00 2001 From: saxix Date: Fri, 21 Jun 2024 13:28:03 +0200 Subject: [PATCH 12/31] updates CI ci:debug --- .github/workflows/dump.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/dump.yml b/.github/workflows/dump.yml index ca4c20ca..96212ea9 100644 --- a/.github/workflows/dump.yml +++ b/.github/workflows/dump.yml @@ -22,12 +22,12 @@ jobs: dump: name: "[DEBUG] Echo Full Context" if: ${{ contains(github.event.head_commit.message, 'ci:debug') }} - runs-on: [ubuntu-latest, self-hosted] + runs-on: ubuntu-latest steps: - name: Inspect run: | echo "${{ contains('refs/heads/develop\nrefs/heads/staging\nrefs/heads/master\nrefs/heads/release', github.ref) }} - echo "${{ contains('refs/heads/develop\nrefs/heads/staging\nrefs/heads/master\nrefs/heads/release', github.ref) }} + echo "${{ contains('refs/heads/develop refs/heads/staging refs/heads/master refs/heads/release', github.ref) }} - name: Dump Env vars run: | From 45f518344050c92b0452f9faafd99e75f8aa7a06 Mon Sep 17 00:00:00 2001 From: saxix Date: Fri, 21 Jun 2024 13:36:02 +0200 Subject: [PATCH 13/31] updates CI ci:debug --- .github/workflows/dump.yml | 5 +++-- .github/workflows/test.yml | 10 ++++------ 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/.github/workflows/dump.yml b/.github/workflows/dump.yml index 96212ea9..ffa7159e 100644 --- a/.github/workflows/dump.yml +++ b/.github/workflows/dump.yml @@ -26,8 +26,9 @@ jobs: steps: - name: Inspect run: | - echo "${{ contains('refs/heads/develop\nrefs/heads/staging\nrefs/heads/master\nrefs/heads/release', github.ref) }} - echo "${{ contains('refs/heads/develop refs/heads/staging refs/heads/master refs/heads/release', github.ref) }} + echo ${{ contains(fromJSON('["refs/heads/develop", "refs/heads/staging", "refs/heads/master"]'), github.ref) }} + echo ${{ contains(fromJSON('["refs/heads/develop", "refs/heads/staging", "refs/heads/master", "refs/heads/release"]'), github.ref) }} + echo ${{ startsWith('refs/heads/release', github.ref) }} - name: Dump Env vars run: | diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 5068efc1..61966bed 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -175,12 +175,10 @@ jobs: release: if: - contains(' - refs/heads/develop - refs/heads/staging - refs/heads/master - refs/heads/release - ', github.ref) || contains(github.event.head_commit.message, 'ci:release') || github.event_name == 'create' + (contains(fromJSON('["refs/heads/develop", "refs/heads/staging", "refs/heads/master", "refs/heads/release"]'), github.ref) + || startsWith('refs/heads/release', github.ref + || contains(github.event.head_commit.message, 'ci:release') + || github.event_name == 'create') name: "Release Docker" needs: [ test ] From d4ff13574ed294335a167c6c8afe9a5ffc89491b Mon Sep 17 00:00:00 2001 From: saxix Date: Fri, 21 Jun 2024 13:38:06 +0200 Subject: [PATCH 14/31] updates CI ci:debug --- .github/workflows/test.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 61966bed..bc1fed5c 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -175,10 +175,10 @@ jobs: release: if: - (contains(fromJSON('["refs/heads/develop", "refs/heads/staging", "refs/heads/master", "refs/heads/release"]'), github.ref) - || startsWith('refs/heads/release', github.ref + contains(fromJSON('["refs/heads/develop", "refs/heads/staging", "refs/heads/master", "refs/heads/release"]'), github.ref) + || startsWith('refs/heads/release', github.ref) || contains(github.event.head_commit.message, 'ci:release') - || github.event_name == 'create') + || github.event_name == 'create' name: "Release Docker" needs: [ test ] From a7242d49400d4c3476820116f94206c7a89af4b9 Mon Sep 17 00:00:00 2001 From: saxix Date: Fri, 21 Jun 2024 13:39:58 +0200 Subject: [PATCH 15/31] 2024-06-21 13:39 - updates CI ci:debug --- .github/workflows/dump.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/dump.yml b/.github/workflows/dump.yml index ffa7159e..5dee41dd 100644 --- a/.github/workflows/dump.yml +++ b/.github/workflows/dump.yml @@ -26,8 +26,10 @@ jobs: steps: - name: Inspect run: | + echo ${{ github.ref }} echo ${{ contains(fromJSON('["refs/heads/develop", "refs/heads/staging", "refs/heads/master"]'), github.ref) }} echo ${{ contains(fromJSON('["refs/heads/develop", "refs/heads/staging", "refs/heads/master", "refs/heads/release"]'), github.ref) }} + echo ${{ contains('refs/heads/develop refs/heads/staging refs/heads/master refs/heads/release'), github.ref) }} echo ${{ startsWith('refs/heads/release', github.ref) }} - name: Dump Env vars From b6cb8184b7c05403d98fd9628364b108fa6e0e8f Mon Sep 17 00:00:00 2001 From: saxix Date: Fri, 21 Jun 2024 13:40:33 +0200 Subject: [PATCH 16/31] 2024-06-21 13:40 - updates CI ci:debug --- .github/workflows/dump.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/dump.yml b/.github/workflows/dump.yml index 5dee41dd..c8e065ee 100644 --- a/.github/workflows/dump.yml +++ b/.github/workflows/dump.yml @@ -29,7 +29,7 @@ jobs: echo ${{ github.ref }} echo ${{ contains(fromJSON('["refs/heads/develop", "refs/heads/staging", "refs/heads/master"]'), github.ref) }} echo ${{ contains(fromJSON('["refs/heads/develop", "refs/heads/staging", "refs/heads/master", "refs/heads/release"]'), github.ref) }} - echo ${{ contains('refs/heads/develop refs/heads/staging refs/heads/master refs/heads/release'), github.ref) }} + echo ${{ contains('refs/heads/develop refs/heads/staging refs/heads/master refs/heads/release', github.ref) }} echo ${{ startsWith('refs/heads/release', github.ref) }} - name: Dump Env vars From a4ef12ad7900a0d849320ccb5d6f234de024660c Mon Sep 17 00:00:00 2001 From: saxix Date: Fri, 21 Jun 2024 13:43:27 +0200 Subject: [PATCH 17/31] 2024-06-21 13:43 - updates CI ci:debug --- .github/workflows/dump.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/dump.yml b/.github/workflows/dump.yml index c8e065ee..62c066da 100644 --- a/.github/workflows/dump.yml +++ b/.github/workflows/dump.yml @@ -30,7 +30,7 @@ jobs: echo ${{ contains(fromJSON('["refs/heads/develop", "refs/heads/staging", "refs/heads/master"]'), github.ref) }} echo ${{ contains(fromJSON('["refs/heads/develop", "refs/heads/staging", "refs/heads/master", "refs/heads/release"]'), github.ref) }} echo ${{ contains('refs/heads/develop refs/heads/staging refs/heads/master refs/heads/release', github.ref) }} - echo ${{ startsWith('refs/heads/release', github.ref) }} + echo ${{ startsWith(github.ref, 'refs/heads/release') }} - name: Dump Env vars run: | From c5f7ae1ab370a221077970a37026173cd907fb16 Mon Sep 17 00:00:00 2001 From: saxix Date: Fri, 21 Jun 2024 13:45:07 +0200 Subject: [PATCH 18/31] 2024-06-21 13:45 - updates CI ci:debug --- .github/workflows/dump.yml | 8 -------- .github/workflows/test.yml | 2 +- 2 files changed, 1 insertion(+), 9 deletions(-) diff --git a/.github/workflows/dump.yml b/.github/workflows/dump.yml index 62c066da..461d80c9 100644 --- a/.github/workflows/dump.yml +++ b/.github/workflows/dump.yml @@ -24,14 +24,6 @@ jobs: if: ${{ contains(github.event.head_commit.message, 'ci:debug') }} runs-on: ubuntu-latest steps: - - name: Inspect - run: | - echo ${{ github.ref }} - echo ${{ contains(fromJSON('["refs/heads/develop", "refs/heads/staging", "refs/heads/master"]'), github.ref) }} - echo ${{ contains(fromJSON('["refs/heads/develop", "refs/heads/staging", "refs/heads/master", "refs/heads/release"]'), github.ref) }} - echo ${{ contains('refs/heads/develop refs/heads/staging refs/heads/master refs/heads/release', github.ref) }} - echo ${{ startsWith(github.ref, 'refs/heads/release') }} - - name: Dump Env vars run: | echo "====== ENVIRONMENT =================" diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index bc1fed5c..5f1b4d78 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -176,7 +176,7 @@ jobs: release: if: contains(fromJSON('["refs/heads/develop", "refs/heads/staging", "refs/heads/master", "refs/heads/release"]'), github.ref) - || startsWith('refs/heads/release', github.ref) + || startsWith(github.ref, 'refs/heads/release') || contains(github.event.head_commit.message, 'ci:release') || github.event_name == 'create' From 9667ea8cd6468250b3e3f11548bcabe078dca22c Mon Sep 17 00:00:00 2001 From: saxix Date: Fri, 21 Jun 2024 13:59:30 +0200 Subject: [PATCH 19/31] 2024-06-21 13:59 - updates CI ci:all --- .github/workflows/test.yml | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 5f1b4d78..8dad0107 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -91,7 +91,7 @@ jobs: uses: ./.github/actions/docker_build with: dryrun: ${{ env.ACT || 'false' }} - rebuild: ${{ contains(github.event.head_commit.message, 'ci:build') }} + rebuild: ${{ contains(fromJSON('["ci:test", "ci:all", "ci:build"]'), github.event.head_commit.message) }} image: ${{ vars.DOCKER_IMAGE }} target: 'python_dev_deps' username: ${{ secrets.DOCKERHUB_USERNAME }} @@ -102,7 +102,7 @@ jobs: name: Run Test Suite needs: [ changes,build ] if: (needs.changes.outputs.run_tests == 'true' - || contains(github.event.head_commit.message, 'ci:test') + || contains(fromJSON('["ci:test", "ci:all"]'), github.event.head_commit.message) || github.event_name == 'create') runs-on: ubuntu-latest services: @@ -156,7 +156,9 @@ jobs: contents: read # for actions/checkout to fetch code security-events: write # for github/codeql-action/upload-sarif to upload SARIF results actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status - if: needs.build.outputs.created == 'true' || contains(github.event.head_commit.message, 'ci:scan') || github.event_name == 'create' + if: needs.build.outputs.created == 'true' + || contains(fromJSON('["ci:scan", "ci:all", "ci:build"]'), github.event.head_commit.message) + || github.event_name == 'create' steps: - name: Checkout code uses: actions/checkout@v4 @@ -177,7 +179,7 @@ jobs: if: contains(fromJSON('["refs/heads/develop", "refs/heads/staging", "refs/heads/master", "refs/heads/release"]'), github.ref) || startsWith(github.ref, 'refs/heads/release') - || contains(github.event.head_commit.message, 'ci:release') + || contains(fromJSON('["ci:release", "ci:all"]'), github.event.head_commit.message) || github.event_name == 'create' name: "Release Docker" From 421e17f3e4112147de3e953acbce4ad7bbeca822 Mon Sep 17 00:00:00 2001 From: saxix Date: Fri, 21 Jun 2024 15:05:45 +0200 Subject: [PATCH 20/31] 2024-06-21 15:05 - updates CI ci:all --- .github/workflows/test.yml | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 8dad0107..60c90837 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -91,7 +91,7 @@ jobs: uses: ./.github/actions/docker_build with: dryrun: ${{ env.ACT || 'false' }} - rebuild: ${{ contains(fromJSON('["ci:test", "ci:all", "ci:build"]'), github.event.head_commit.message) }} + rebuild: ${{ env.BUILD == 'true'}} image: ${{ vars.DOCKER_IMAGE }} target: 'python_dev_deps' username: ${{ secrets.DOCKERHUB_USERNAME }} @@ -102,7 +102,8 @@ jobs: name: Run Test Suite needs: [ changes,build ] if: (needs.changes.outputs.run_tests == 'true' - || contains(fromJSON('["ci:test", "ci:all"]'), github.event.head_commit.message) + || contains(github.event.head_commit.message, 'ci:test') + || contains(github.event.head_commit.message, 'ci:all') || github.event_name == 'create') runs-on: ubuntu-latest services: @@ -157,7 +158,8 @@ jobs: security-events: write # for github/codeql-action/upload-sarif to upload SARIF results actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status if: needs.build.outputs.created == 'true' - || contains(fromJSON('["ci:scan", "ci:all", "ci:build"]'), github.event.head_commit.message) + || contains(github.event.head_commit.message, 'ci:scan') + || contains(github.event.head_commit.message, 'ci:all') || github.event_name == 'create' steps: - name: Checkout code @@ -179,7 +181,8 @@ jobs: if: contains(fromJSON('["refs/heads/develop", "refs/heads/staging", "refs/heads/master", "refs/heads/release"]'), github.ref) || startsWith(github.ref, 'refs/heads/release') - || contains(fromJSON('["ci:release", "ci:all"]'), github.event.head_commit.message) + || contains(github.event.head_commit.message, 'ci:release') + || contains(github.event.head_commit.message, 'ci:all') || github.event_name == 'create' name: "Release Docker" From c14f10160da7f7e0bada180e5ca2079aef28e5e5 Mon Sep 17 00:00:00 2001 From: saxix Date: Fri, 21 Jun 2024 15:12:07 +0200 Subject: [PATCH 21/31] 2024-06-21 15:12 - updates CI ci:all --- .github/workflows/security.yml | 2 +- .github/workflows/test.yml | 66 ++++++++++++++++------------------ 2 files changed, 31 insertions(+), 37 deletions(-) diff --git a/.github/workflows/security.yml b/.github/workflows/security.yml index b5ed8fb1..5771ff76 100644 --- a/.github/workflows/security.yml +++ b/.github/workflows/security.yml @@ -61,7 +61,7 @@ jobs: # Github token of the repository (automatically created by Github) GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information. # File or directory to run bandit on - # path: # optional, default is . + path: src # optional, default is . # Report only issues of a given severity level or higher. Can be LOW, MEDIUM or HIGH. Default is UNDEFINED (everything) # level: # optional, default is UNDEFINED # Report only issues of a given confidence level or higher. Can be LOW, MEDIUM or HIGH. Default is UNDEFINED (everything) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 60c90837..1969422d 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -149,35 +149,7 @@ jobs: verbose: false name: codecov-${{env.GITHUB_REF_NAME}} - trivy: - name: Check Image with Trivy - runs-on: ubuntu-latest - needs: [ build ] - permissions: - contents: read # for actions/checkout to fetch code - security-events: write # for github/codeql-action/upload-sarif to upload SARIF results - actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status - if: needs.build.outputs.created == 'true' - || contains(github.event.head_commit.message, 'ci:scan') - || contains(github.event.head_commit.message, 'ci:all') - || github.event_name == 'create' - steps: - - name: Checkout code - uses: actions/checkout@v4 - - name: Run Trivy vulnerability scanner - uses: aquasecurity/trivy-action@master - with: - image-ref: ${{needs.build.outputs.image}} - format: 'sarif' - output: 'trivy-results.sarif' - severity: 'CRITICAL,HIGH' - - - name: Upload Trivy scan results to GitHub Security tab - uses: github/codeql-action/upload-sarif@v2 - with: - sarif_file: 'trivy-results.sarif' - - release: + deployable: if: contains(fromJSON('["refs/heads/develop", "refs/heads/staging", "refs/heads/master", "refs/heads/release"]'), github.ref) || startsWith(github.ref, 'refs/heads/release') @@ -185,7 +157,7 @@ jobs: || contains(github.event.head_commit.message, 'ci:all') || github.event_name == 'create' - name: "Release Docker" + name: "Build deployable Docker" needs: [ test ] runs-on: ubuntu-latest timeout-minutes: 30 @@ -215,9 +187,31 @@ jobs: - shell: bash run: | echo "${{ toJSON(steps.build.outputs) }}" -# - name: Generate artifact attestations -# uses: actions/attest-build-provenance@v1 -# with: -# subject-name: ${{ steps.build.outputs.image }} -# subject-digest: ${{ steps.build.outputs.digest }} -# push-to-registry: true + + trivy: + name: Check Image with Trivy + runs-on: ubuntu-latest + needs: [ deployable ] + permissions: + contents: read # for actions/checkout to fetch code + security-events: write # for github/codeql-action/upload-sarif to upload SARIF results + actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status + if: needs.release.outputs.created == 'true' + || contains(github.event.head_commit.message, 'ci:scan') + || contains(github.event.head_commit.message, 'ci:all') + || github.event_name == 'create' + steps: + - name: Checkout code + uses: actions/checkout@v4 + - name: Run Trivy vulnerability scanner + uses: aquasecurity/trivy-action@master + with: + image-ref: ${{needs.deployable.outputs.image}} + format: 'sarif' + output: 'trivy-results.sarif' + severity: 'CRITICAL,HIGH' + + - name: Upload Trivy scan results to GitHub Security tab + uses: github/codeql-action/upload-sarif@v2 + with: + sarif_file: 'trivy-results.sarif' From abe4ad654033ebc1209c4c26a70094e9a0d4d64b Mon Sep 17 00:00:00 2001 From: saxix Date: Fri, 21 Jun 2024 15:27:42 +0200 Subject: [PATCH 22/31] 2024-06-21 15:27 - updates CI ci:all --- .github/workflows/test.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 1969422d..8265ca3b 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -55,7 +55,7 @@ jobs: - name: info shell: bash run: | - force_build="${{ contains(github.event.head_commit.message, 'ci:build') }}" + force_build="${{ contains(github.event.head_commit.message, 'ci:build') || contains(github.event.head_commit.message, 'ci:release')}}" force_scan="${{ contains(github.event.head_commit.message, 'ci:scan') }}" force_test="${{ contains(github.event.head_commit.message, 'ci:test') }}" @@ -178,7 +178,7 @@ jobs: uses: ./.github/actions/docker_build with: dryrun: ${{ env.ACT || 'false' }} - rebuild: ${{ contains(github.event.head_commit.message, 'ci:build') }} + rebuild: ${{ env.BUILD == 'true'}} image: ${{ vars.DOCKER_IMAGE }} target: 'dist' username: ${{ secrets.DOCKERHUB_USERNAME }} From 00588f846a6442d6491c87fd970e57ee76d888ad Mon Sep 17 00:00:00 2001 From: saxix Date: Fri, 21 Jun 2024 17:32:19 +0200 Subject: [PATCH 23/31] fixes Dockerfile and settings --- docker/bin/docker-entrypoint.sh | 3 +++ docker/conf/uwsgi.ini | 2 +- src/hope_dedup_engine/apps/core/apps.py | 1 + src/hope_dedup_engine/apps/core/checks.py | 19 +++++++++++++++++++ .../apps/core/management/commands/upgrade.py | 1 + src/hope_dedup_engine/config/settings.py | 3 +-- 6 files changed, 26 insertions(+), 3 deletions(-) create mode 100644 src/hope_dedup_engine/apps/core/checks.py diff --git a/docker/bin/docker-entrypoint.sh b/docker/bin/docker-entrypoint.sh index 7c67a60f..455425be 100755 --- a/docker/bin/docker-entrypoint.sh +++ b/docker/bin/docker-entrypoint.sh @@ -4,10 +4,13 @@ export MEDIA_ROOT="${MEDIA_ROOT:-/var/run/app/media}" export STATIC_ROOT="${STATIC_ROOT:-/var/run/app/static}" export UWSGI_PROCESSES="${UWSGI_PROCESSES:-"4"}" +export DJANGO_SETTINGS_MODULE="${DJANGO_SETTINGS_MODULE:-"hope_dedup_engine.config.settings"}" mkdir -p "${MEDIA_ROOT}" "${STATIC_ROOT}" || echo "Cannot create dirs ${MEDIA_ROOT} ${STATIC_ROOT}" case "$1" in run) + django-admin check --deploy + django-admin upgrade set -- tini -- "$@" set -- gosu user:app uwsgi --ini /conf/uwsgi.ini ;; diff --git a/docker/conf/uwsgi.ini b/docker/conf/uwsgi.ini index 919740de..2492291d 100644 --- a/docker/conf/uwsgi.ini +++ b/docker/conf/uwsgi.ini @@ -3,7 +3,7 @@ http=0.0.0.0:8000 enable-threads=0 honour-range=1 master=1 -module=trash.wsgi +module=hope_dedup_engine.config.wsgi processes=$(UWSGI_PROCESSES) ;virtualenv=/code/.venv/ ;virtualenv=%(_) diff --git a/src/hope_dedup_engine/apps/core/apps.py b/src/hope_dedup_engine/apps/core/apps.py index 8b725e06..e52d1f67 100644 --- a/src/hope_dedup_engine/apps/core/apps.py +++ b/src/hope_dedup_engine/apps/core/apps.py @@ -8,3 +8,4 @@ class Config(AppConfig): def ready(self) -> None: super().ready() from hope_dedup_engine.utils import flags # noqa + from . import checks # noqa diff --git a/src/hope_dedup_engine/apps/core/checks.py b/src/hope_dedup_engine/apps/core/checks.py new file mode 100644 index 00000000..52673245 --- /dev/null +++ b/src/hope_dedup_engine/apps/core/checks.py @@ -0,0 +1,19 @@ +from django.core.checks import Error, register +from django.conf import settings +from pathlib import Path + +@register() +def example_check(app_configs, **kwargs): + errors = [] + for t in settings.TEMPLATES: + for d in t["DIRS"]: + if not Path(d).is_dir(): + errors.append( + Error( + f"'{d}' is not a directory", + hint="Remove this directory from settings.TEMPLATES.", + obj=settings, + id="hde.E001", + ) + ) + return errors diff --git a/src/hope_dedup_engine/apps/core/management/commands/upgrade.py b/src/hope_dedup_engine/apps/core/management/commands/upgrade.py index 513e2f01..c8a0168b 100644 --- a/src/hope_dedup_engine/apps/core/management/commands/upgrade.py +++ b/src/hope_dedup_engine/apps/core/management/commands/upgrade.py @@ -119,6 +119,7 @@ def handle(self, *args: Any, **options: Any) -> None: # noqa: C901 "stdout": self.stdout, } echo("Running upgrade", style_func=self.style.WARNING) + call_command("env", check=True) if self.run_check: diff --git a/src/hope_dedup_engine/config/settings.py b/src/hope_dedup_engine/config/settings.py index f9e4b333..a5442fe2 100644 --- a/src/hope_dedup_engine/config/settings.py +++ b/src/hope_dedup_engine/config/settings.py @@ -6,7 +6,6 @@ # BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) SETTINGS_DIR = Path(__file__).parent PACKAGE_DIR = SETTINGS_DIR.parent -DEVELOPMENT_DIR = PACKAGE_DIR.parent.parent DEBUG = env.bool("DEBUG") @@ -129,7 +128,7 @@ TEMPLATES = [ { "BACKEND": "django.template.backends.django.DjangoTemplates", - "DIRS": [str(PACKAGE_DIR / "templates")], + "DIRS": [str(PACKAGE_DIR / "web/templates")], "APP_DIRS": False, "OPTIONS": { "loaders": [ From ed7504b75bafe362b4211ca2bb0c4945547ff12d Mon Sep 17 00:00:00 2001 From: saxix Date: Fri, 21 Jun 2024 17:55:02 +0200 Subject: [PATCH 24/31] updates CI ci:debug --- .github/workflows/test.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 8265ca3b..051fbf90 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -13,9 +13,9 @@ on: - feature/* - bugfix/* - hotfix/* - pull_request: - branches: [ develop, master ] - types: [ synchronize, opened, reopened, ready_for_review ] +# pull_request: +# branches: [ develop, master ] +# types: [ synchronize, opened, reopened, ready_for_review ] concurrency: group: "${{ github.workflow }}-${{ github.ref }}" From 82dbb212c81b91d8427341ceb656f8a8cafb3311 Mon Sep 17 00:00:00 2001 From: saxix Date: Fri, 21 Jun 2024 17:56:50 +0200 Subject: [PATCH 25/31] updates CI ci:debug --- .github/workflows/lint.yml | 15 +++++++++------ .github/workflows/security.yml | 15 +++++++++------ 2 files changed, 18 insertions(+), 12 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 44b35a9f..85f27b81 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -3,12 +3,15 @@ on: push: branches: - develop -# - master -# - staging -# - releases/* - pull_request: - branches: [develop, master] - types: [synchronize, opened, reopened, ready_for_review] + - master + - staging + - release/* + - feature/* + - bugfix/* + - hotfix/* +# pull_request: +# branches: [develop, master] +# types: [synchronize, opened, reopened, ready_for_review] defaults: run: diff --git a/.github/workflows/security.yml b/.github/workflows/security.yml index 5771ff76..1efe142a 100644 --- a/.github/workflows/security.yml +++ b/.github/workflows/security.yml @@ -3,12 +3,15 @@ on: push: branches: - develop -# - master -# - staging -# - releases/* - pull_request: - branches: [develop, master] - types: [synchronize, opened, reopened, ready_for_review] + - master + - staging + - release/* + - feature/* + - bugfix/* + - hotfix/* +# pull_request: +# branches: [develop, master] +# types: [synchronize, opened, reopened, ready_for_review] defaults: run: From df24735ba048699d3fafb9a34b65edfc41771e4f Mon Sep 17 00:00:00 2001 From: saxix Date: Fri, 21 Jun 2024 17:57:34 +0200 Subject: [PATCH 26/31] updates CI ci:debug --- .github/workflows/label-pullrequest.yml | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/.github/workflows/label-pullrequest.yml b/.github/workflows/label-pullrequest.yml index 5f9c071b..df5a7115 100644 --- a/.github/workflows/label-pullrequest.yml +++ b/.github/workflows/label-pullrequest.yml @@ -2,8 +2,18 @@ name: Adds labels on: - pull_request: - types: [opened, synchronize, edited, ready_for_review] + push: + branches: + - develop + - master + - staging + - release/* + - feature/* + - bugfix/* + - hotfix/* +# pull_request: +# types: [opened, synchronize, edited, ready_for_review] + jobs: label-pullrequest: From 264a9045ae8c14ce7cc4eb6c54ee4411ef138613 Mon Sep 17 00:00:00 2001 From: saxix Date: Fri, 21 Jun 2024 18:53:51 +0200 Subject: [PATCH 27/31] updates CI ci:debug --- .github/workflows/label-pullrequest.yml | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/.github/workflows/label-pullrequest.yml b/.github/workflows/label-pullrequest.yml index df5a7115..5f9c071b 100644 --- a/.github/workflows/label-pullrequest.yml +++ b/.github/workflows/label-pullrequest.yml @@ -2,18 +2,8 @@ name: Adds labels on: - push: - branches: - - develop - - master - - staging - - release/* - - feature/* - - bugfix/* - - hotfix/* -# pull_request: -# types: [opened, synchronize, edited, ready_for_review] - + pull_request: + types: [opened, synchronize, edited, ready_for_review] jobs: label-pullrequest: From ccdba486ef01be3a4647d2fe09bea4f6ec900cf3 Mon Sep 17 00:00:00 2001 From: saxix Date: Fri, 21 Jun 2024 19:00:58 +0200 Subject: [PATCH 28/31] lint --- src/hope_dedup_engine/apps/core/checks.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/hope_dedup_engine/apps/core/checks.py b/src/hope_dedup_engine/apps/core/checks.py index 52673245..19b42332 100644 --- a/src/hope_dedup_engine/apps/core/checks.py +++ b/src/hope_dedup_engine/apps/core/checks.py @@ -2,6 +2,7 @@ from django.conf import settings from pathlib import Path + @register() def example_check(app_configs, **kwargs): errors = [] From 6c1d8bc617d975626049a47acd9b126784bcacf8 Mon Sep 17 00:00:00 2001 From: saxix Date: Fri, 21 Jun 2024 19:01:48 +0200 Subject: [PATCH 29/31] lint --- src/hope_dedup_engine/apps/core/checks.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/hope_dedup_engine/apps/core/checks.py b/src/hope_dedup_engine/apps/core/checks.py index 19b42332..d5e80e1a 100644 --- a/src/hope_dedup_engine/apps/core/checks.py +++ b/src/hope_dedup_engine/apps/core/checks.py @@ -1,7 +1,8 @@ -from django.core.checks import Error, register -from django.conf import settings from pathlib import Path +from django.conf import settings +from django.core.checks import Error, register + @register() def example_check(app_configs, **kwargs): From 61139f79197c0eb53f7b0742c8587498ae8440f4 Mon Sep 17 00:00:00 2001 From: saxix Date: Fri, 21 Jun 2024 22:53:18 +0200 Subject: [PATCH 30/31] 2024-06-21 22:53 - updates CI ci:all --- .pre-commit-config.yaml | 8 +-- CONTRIBUTING.md | 37 +++++++++++ README.md | 43 ++----------- src/hope_dedup_engine/apps/core/apps.py | 1 + tests/admin/test_admin_smoke.py | 8 ++- tests/api/test_auth.py | 9 ++- tests/api/test_business_logic.py | 51 +++++++++++---- tests/api/test_deduplication_set_create.py | 16 +++-- tests/api/test_deduplication_set_delete.py | 16 +++-- tests/api/test_deduplication_set_list.py | 4 +- tests/api/test_duplicate_list.py | 12 +++- tests/api/test_ignored_keys_create.py | 51 +++++++++++---- tests/api/test_ignored_keys_list.py | 13 +++- tests/api/test_image_bulk_create.py | 16 +++-- tests/api/test_image_bulk_delete.py | 29 ++++++--- tests/api/test_image_create.py | 42 ++++++++++--- tests/api/test_image_delete.py | 29 ++++++--- tests/api/test_image_list.py | 12 +++- tests/api/test_utils.py | 4 +- tests/conftest.py | 16 +++-- tests/extras/testutils/decorators.py | 4 +- tests/extras/testutils/factories/__init__.py | 26 ++++++-- tests/extras/testutils/factories/api.py | 6 +- tests/extras/testutils/factories/base.py | 8 ++- .../testutils/factories/django_celery_beat.py | 8 ++- tests/extras/testutils/factories/user.py | 2 +- tests/extras/testutils/perms.py | 8 ++- tests/faces/conftest.py | 47 +++++++++++--- tests/faces/test_celery_tasks.py | 43 ++++++++++--- tests/faces/test_duplicate_groups_builder.py | 25 ++++++-- tests/faces/test_forms.py | 15 ++++- tests/faces/test_image_processor.py | 62 ++++++++++++++----- tests/faces/test_net_manager.py | 4 +- tests/faces/test_storage_manager.py | 6 +- tests/test_commands.py | 24 +++++-- tests/test_state.py | 4 +- tests/utils/test_utils_http.py | 7 ++- 37 files changed, 530 insertions(+), 186 deletions(-) create mode 100644 CONTRIBUTING.md diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5f2aef2b..ce6e48b2 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -5,14 +5,14 @@ repos: - id: isort stages: [commit] - repo: https://github.com/ambv/black - rev: 24.1.1 + rev: 24.4.2 hooks: - id: black args: [--config=pyproject.toml] exclude: "migrations|snapshots" stages: [commit] - repo: https://github.com/PyCQA/flake8 - rev: 5.0.4 + rev: 7.1.0 hooks: - id: flake8 args: [--config=.flake8] @@ -23,7 +23,3 @@ repos: hooks: - id: bandit args: ["-c", "bandit.yaml"] - - repo: https://github.com/twisted/towncrier - rev: 22.13.0 - hooks: - - id: towncrier-check \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..a23fe5c9 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,37 @@ +# Contributing + +## System Requirements + +- python 3.12 +- [direnv](https://direnv.net/) - not mandatory but strongly recommended +- [pdm](https://pdm.fming.dev/2.9/) + + + + +**WARNING** +> Hope Deduplication Engine implements **security first** policy. It means that configuration default values are "almost" production compliant. +> +> Es. `DEBUG=False` or `SECURE_SSL_REDIRECT=True`. +> +> Be sure to run `./manage.py env --check` and `./manage.py env -g all` to check and display your configuration + + + +### 1. Clone repo and install requirements + git clone https://github.com/unicef/hope-dedup-engine + pdm venv create 3.11 + pdm install + pdm venv activate in-project + pre-commit install + +### 2. configure your environment + +Uses `./manage.py env` to check required (and optional) variables to put + + ./manage.py env --check + + +### 3. Run upgrade to run migrations and initial setup + + ./manage.py upgrade diff --git a/README.md b/README.md index a0f22857..fca4725f 100644 --- a/README.md +++ b/README.md @@ -4,44 +4,11 @@ ABOUT HOPE Deduplication Engine [![Test](https://github.com/unicef/hope-dedup-engine/actions/workflows/test.yml/badge.svg)](https://github.com/unicef/hope-dedup-engine/actions/workflows/test.yml) [![Lint](https://github.com/unicef/hope-dedup-engine/actions/workflows/lint.yml/badge.svg)](https://github.com/unicef/hope-dedup-engine/actions/workflows/lint.yml) [![codecov](https://codecov.io/gh/unicef/hope-dedup-engine/graph/badge.svg?token=kAuZEX5k5o)](https://codecov.io/gh/unicef/hope-dedup-engine) -![Version](https://img.shields.io/badge/dynamic/toml?url=https%3A%2F%2Fraw.githubusercontent.com%2Fsaxix%2Ftrash%2Fdevelop%2Fpyproject.toml&query=%24.project.version&label=version) -![License](https://img.shields.io/badge/dynamic/toml?url=https%3A%2F%2Fraw.githubusercontent.com%2Fsaxix%2Ftrash%2Fdevelop%2Fpyproject.toml&query=%24.project.license.text&label=license) +![Version](https://img.shields.io/badge/dynamic/toml?url=https%3A%2F%2Fraw.githubusercontent.com%unicef%2Fhope-dedup-engine%2Fdevelop%2Fpyproject.toml&query=%24.project.version&label=version) +![License](https://img.shields.io/badge/dynamic/toml?url=https%3A%2F%2Fraw.githubusercontent.com%2Funicef%2Fhope-dedup-engine%2Fdevelop%2Fpyproject.toml&query=%24.project.license.text&label=license) -## Contributing +## Help +**Got a question?** We got answers. -### System Requirements - -- python 3.12 -- [direnv](https://direnv.net/) - not mandatory but strongly recommended -- [pdm](https://pdm.fming.dev/2.9/) - - - - -**WARNING** -> Hope Deduplication Engine implements **security first** policy. It means that configuration default values are "almost" production compliant. -> -> Es. `DEBUG=False` or `SECURE_SSL_REDIRECT=True`. -> -> Be sure to run `./manage.py env --check` and `./manage.py env -g all` to check and display your configuration - - - -### 1. Clone repo and install requirements - git clone https://github.com/unicef/hope-dedup-engine - pdm venv create 3.11 - pdm install - pdm venv activate in-project - pre-commit install - -### 2. configure your environment - -Uses `./manage.py env` to check required (and optional) variables to put - - ./manage.py env --check - - -### 3. Run upgrade to run migrations and initial setup - - ./manage.py upgrade +File a GitHub [issue](https://github.com/unicef/hope-dedup-engine/issues) diff --git a/src/hope_dedup_engine/apps/core/apps.py b/src/hope_dedup_engine/apps/core/apps.py index e52d1f67..c7a275c4 100644 --- a/src/hope_dedup_engine/apps/core/apps.py +++ b/src/hope_dedup_engine/apps/core/apps.py @@ -8,4 +8,5 @@ class Config(AppConfig): def ready(self) -> None: super().ready() from hope_dedup_engine.utils import flags # noqa + from . import checks # noqa diff --git a/tests/admin/test_admin_smoke.py b/tests/admin/test_admin_smoke.py index 2a6278df..01b67b63 100644 --- a/tests/admin/test_admin_smoke.py +++ b/tests/admin/test_admin_smoke.py @@ -95,12 +95,16 @@ def record(db, request): modeladmin = request.getfixturevalue("modeladmin") instance = modeladmin.model.objects.first() if not instance: - full_name = f"{modeladmin.model._meta.app_label}.{modeladmin.model._meta.object_name}" + full_name = ( + f"{modeladmin.model._meta.app_label}.{modeladmin.model._meta.object_name}" + ) factory = get_factory_for_model(modeladmin.model) try: instance = factory(**KWARGS.get(full_name, {})) except Exception as e: - raise Exception(f"Error creating fixture for {factory} using {KWARGS}") from e + raise Exception( + f"Error creating fixture for {factory} using {KWARGS}" + ) from e return instance diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index 75636dad..1ba3ed40 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -40,7 +40,10 @@ @mark.parametrize(("view_name", "method", "args"), REQUESTS) def test_anonymous_cannot_access( - anonymous_api_client: APIClient, view_name: str, method: HTTPMethod, args: tuple[Any, ...] + anonymous_api_client: APIClient, + view_name: str, + method: HTTPMethod, + args: tuple[Any, ...], ) -> None: response = getattr(anonymous_api_client, method.lower())(reverse(view_name, args)) assert response.status_code == status.HTTP_401_UNAUTHORIZED @@ -50,7 +53,9 @@ def test_anonymous_cannot_access( def test_authenticated_can_access( api_client: APIClient, view_name: str, method: HTTPMethod, args: tuple[Any, ...] ) -> None: - response = getattr(api_client, method.lower())(reverse(view_name, args), format=JSON) + response = getattr(api_client, method.lower())( + reverse(view_name, args), format=JSON + ) assert response.status_code != status.HTTP_401_UNAUTHORIZED diff --git a/tests/api/test_business_logic.py b/tests/api/test_business_logic.py index 7f83ee37..3378bcef 100644 --- a/tests/api/test_business_logic.py +++ b/tests/api/test_business_logic.py @@ -16,13 +16,18 @@ from hope_dedup_engine.apps.api.models import DeduplicationSet from hope_dedup_engine.apps.api.models.deduplication import Duplicate, Image -from hope_dedup_engine.apps.api.serializers import DeduplicationSetSerializer, ImageSerializer +from hope_dedup_engine.apps.api.serializers import ( + DeduplicationSetSerializer, + ImageSerializer, +) def test_new_deduplication_set_status_is_clean(api_client: APIClient) -> None: data = DeduplicationSetSerializer(DeduplicationSetFactory.build()).data - response = api_client.post(reverse(DEDUPLICATION_SET_LIST_VIEW), data=data, format=JSON) + response = api_client.post( + reverse(DEDUPLICATION_SET_LIST_VIEW), data=data, format=JSON + ) assert response.status_code == status.HTTP_201_CREATED deduplication_set = response.json() assert deduplication_set["state"] == DeduplicationSet.State.CLEAN.label @@ -30,12 +35,20 @@ def test_new_deduplication_set_status_is_clean(api_client: APIClient) -> None: @mark.parametrize( "deduplication_set__state", - (DeduplicationSet.State.CLEAN, DeduplicationSet.State.DIRTY, DeduplicationSet.State.ERROR), + ( + DeduplicationSet.State.CLEAN, + DeduplicationSet.State.DIRTY, + DeduplicationSet.State.ERROR, + ), ) def test_deduplication_set_processing_trigger( - api_client: APIClient, start_processing: MagicMock, deduplication_set: DeduplicationSet + api_client: APIClient, + start_processing: MagicMock, + deduplication_set: DeduplicationSet, ) -> None: - response = api_client.post(reverse(DEDUPLICATION_SET_PROCESS_VIEW, (deduplication_set.pk,))) + response = api_client.post( + reverse(DEDUPLICATION_SET_PROCESS_VIEW, (deduplication_set.pk,)) + ) assert response.status_code == status.HTTP_200_OK start_processing.assert_called_once_with(deduplication_set) @@ -44,7 +57,9 @@ def test_duplicates_are_removed_before_processing( api_client: APIClient, deduplication_set: DeduplicationSet, duplicate: Duplicate ) -> None: assert Duplicate.objects.count() - response = api_client.post(reverse(DEDUPLICATION_SET_PROCESS_VIEW, (deduplication_set.pk,))) + response = api_client.post( + reverse(DEDUPLICATION_SET_PROCESS_VIEW, (deduplication_set.pk,)) + ) assert response.status_code == status.HTTP_200_OK assert not Duplicate.objects.count() @@ -54,7 +69,9 @@ def test_new_image_makes_deduplication_set_state_dirty( ) -> None: assert deduplication_set.state == DeduplicationSet.State.CLEAN response = api_client.post( - reverse(IMAGE_LIST_VIEW, (deduplication_set.pk,)), data=ImageSerializer(ImageFactory.build()).data, format=JSON + reverse(IMAGE_LIST_VIEW, (deduplication_set.pk,)), + data=ImageSerializer(ImageFactory.build()).data, + format=JSON, ) assert response.status_code == status.HTTP_201_CREATED deduplication_set.refresh_from_db() @@ -64,23 +81,33 @@ def test_new_image_makes_deduplication_set_state_dirty( def test_image_deletion_makes_deduplication_state_dirty( api_client: APIClient, deduplication_set: DeduplicationSet, image: Image ) -> None: - response = api_client.delete(reverse(IMAGE_DETAIL_VIEW, (deduplication_set.pk, image.pk))) + response = api_client.delete( + reverse(IMAGE_DETAIL_VIEW, (deduplication_set.pk, image.pk)) + ) assert response.status_code == status.HTTP_204_NO_CONTENT deduplication_set.refresh_from_db() assert deduplication_set.state == DeduplicationSet.State.DIRTY def test_deletion_triggers_model_data_deletion( - api_client: APIClient, deduplication_set: DeduplicationSet, delete_model_data: MagicMock + api_client: APIClient, + deduplication_set: DeduplicationSet, + delete_model_data: MagicMock, ) -> None: - response = api_client.delete(reverse(DEDUPLICATION_SET_DETAIL_VIEW, (deduplication_set.pk,))) + response = api_client.delete( + reverse(DEDUPLICATION_SET_DETAIL_VIEW, (deduplication_set.pk,)) + ) assert response.status_code == status.HTTP_204_NO_CONTENT delete_model_data.assert_called_once_with(deduplication_set) def test_unauthorized_deletion_does_not_trigger_model_data_deletion( - another_system_api_client: APIClient, deduplication_set: DeduplicationSet, delete_model_data: MagicMock + another_system_api_client: APIClient, + deduplication_set: DeduplicationSet, + delete_model_data: MagicMock, ) -> None: - response = another_system_api_client.delete(reverse(DEDUPLICATION_SET_DETAIL_VIEW, (deduplication_set.pk,))) + response = another_system_api_client.delete( + reverse(DEDUPLICATION_SET_DETAIL_VIEW, (deduplication_set.pk,)) + ) assert response.status_code == status.HTTP_403_FORBIDDEN delete_model_data.assert_not_called() diff --git a/tests/api/test_deduplication_set_create.py b/tests/api/test_deduplication_set_create.py index 62973d03..341f35b0 100644 --- a/tests/api/test_deduplication_set_create.py +++ b/tests/api/test_deduplication_set_create.py @@ -15,7 +15,9 @@ def test_can_create_deduplication_set(api_client: APIClient) -> None: previous_amount = DeduplicationSet.objects.count() data = DeduplicationSetSerializer(DeduplicationSetFactory.build()).data - response = api_client.post(reverse(DEDUPLICATION_SET_LIST_VIEW), data=data, format=JSON) + response = api_client.post( + reverse(DEDUPLICATION_SET_LIST_VIEW), data=data, format=JSON + ) assert response.status_code == status.HTTP_201_CREATED assert DeduplicationSet.objects.count() == previous_amount + 1 @@ -28,13 +30,17 @@ def test_can_create_deduplication_set(api_client: APIClient) -> None: ("name", "reference_pk"), ), ) -def test_missing_fields_handling(api_client: APIClient, omit: str | tuple[str, ...]) -> None: +def test_missing_fields_handling( + api_client: APIClient, omit: str | tuple[str, ...] +) -> None: data = DeduplicationSetSerializer(DeduplicationSetFactory.build()).data missing_fields = (omit,) if isinstance(omit, str) else omit for field in missing_fields: del data[field] - response = api_client.post(reverse(DEDUPLICATION_SET_LIST_VIEW), data=data, format=JSON) + response = api_client.post( + reverse(DEDUPLICATION_SET_LIST_VIEW), data=data, format=JSON + ) assert response.status_code == status.HTTP_400_BAD_REQUEST errors = response.json() assert len(errors) == len(missing_fields) @@ -53,7 +59,9 @@ def test_missing_fields_handling(api_client: APIClient, omit: str | tuple[str, . def test_invalid_values_handling(api_client: APIClient, field: str, value: Any) -> None: data = DeduplicationSetSerializer(DeduplicationSetFactory.build()).data data[field] = value - response = api_client.post(reverse(DEDUPLICATION_SET_LIST_VIEW), data=data, format=JSON) + response = api_client.post( + reverse(DEDUPLICATION_SET_LIST_VIEW), data=data, format=JSON + ) assert response.status_code == status.HTTP_400_BAD_REQUEST errors = response.json() assert len(errors) == 1 diff --git a/tests/api/test_deduplication_set_delete.py b/tests/api/test_deduplication_set_delete.py index 1c81fbab..d12781d4 100644 --- a/tests/api/test_deduplication_set_delete.py +++ b/tests/api/test_deduplication_set_delete.py @@ -9,12 +9,16 @@ from hope_dedup_engine.apps.security.models import User -def test_can_delete_deduplication_set(api_client: APIClient, user: User, deduplication_set: DeduplicationSet) -> None: +def test_can_delete_deduplication_set( + api_client: APIClient, user: User, deduplication_set: DeduplicationSet +) -> None: assert not deduplication_set.deleted assert deduplication_set.updated_by is None previous_amount = DeduplicationSet.objects.count() - response = api_client.delete(reverse(DEDUPLICATION_SET_DETAIL_VIEW, (deduplication_set.pk,))) + response = api_client.delete( + reverse(DEDUPLICATION_SET_DETAIL_VIEW, (deduplication_set.pk,)) + ) assert response.status_code == status.HTTP_204_NO_CONTENT # object is only marked as deleted @@ -25,9 +29,13 @@ def test_can_delete_deduplication_set(api_client: APIClient, user: User, dedupli def test_cannot_delete_deduplication_set_between_systems( - another_system_api_client: APIClient, deduplication_set: DeduplicationSet, delete_model_data: MagicMock + another_system_api_client: APIClient, + deduplication_set: DeduplicationSet, + delete_model_data: MagicMock, ) -> None: set_count = DeduplicationSet.objects.filter(deleted=False).count() - response = another_system_api_client.delete(reverse(DEDUPLICATION_SET_DETAIL_VIEW, (deduplication_set.pk,))) + response = another_system_api_client.delete( + reverse(DEDUPLICATION_SET_DETAIL_VIEW, (deduplication_set.pk,)) + ) assert response.status_code == status.HTTP_403_FORBIDDEN assert DeduplicationSet.objects.filter(deleted=False).count() == set_count diff --git a/tests/api/test_deduplication_set_list.py b/tests/api/test_deduplication_set_list.py index 09481bf6..6eb77860 100644 --- a/tests/api/test_deduplication_set_list.py +++ b/tests/api/test_deduplication_set_list.py @@ -6,7 +6,9 @@ from hope_dedup_engine.apps.api.models import DeduplicationSet -def test_can_list_deduplication_sets(api_client: APIClient, deduplication_set: DeduplicationSet) -> None: +def test_can_list_deduplication_sets( + api_client: APIClient, deduplication_set: DeduplicationSet +) -> None: response = api_client.get(reverse(DEDUPLICATION_SET_LIST_VIEW)) assert response.status_code == status.HTTP_200_OK data = response.json() diff --git a/tests/api/test_duplicate_list.py b/tests/api/test_duplicate_list.py index eae8401a..3aff0457 100644 --- a/tests/api/test_duplicate_list.py +++ b/tests/api/test_duplicate_list.py @@ -7,7 +7,9 @@ from hope_dedup_engine.apps.api.models.deduplication import Duplicate -def test_can_list_duplicates(api_client: APIClient, deduplication_set: DeduplicationSet, duplicate: Duplicate) -> None: +def test_can_list_duplicates( + api_client: APIClient, deduplication_set: DeduplicationSet, duplicate: Duplicate +) -> None: response = api_client.get(reverse(DUPLICATE_LIST_VIEW, (deduplication_set.pk,))) assert response.status_code == status.HTTP_200_OK data = response.json() @@ -15,8 +17,12 @@ def test_can_list_duplicates(api_client: APIClient, deduplication_set: Deduplica def test_cannot_list_duplicates_between_systems( - another_system_api_client: APIClient, deduplication_set: DeduplicationSet, duplicate: Duplicate + another_system_api_client: APIClient, + deduplication_set: DeduplicationSet, + duplicate: Duplicate, ) -> None: assert DeduplicationSet.objects.count() - response = another_system_api_client.get(reverse(DUPLICATE_LIST_VIEW, (deduplication_set.pk,))) + response = another_system_api_client.get( + reverse(DUPLICATE_LIST_VIEW, (deduplication_set.pk,)) + ) assert response.status_code == status.HTTP_403_FORBIDDEN diff --git a/tests/api/test_ignored_keys_create.py b/tests/api/test_ignored_keys_create.py index 317e39c0..8eaa594d 100644 --- a/tests/api/test_ignored_keys_create.py +++ b/tests/api/test_ignored_keys_create.py @@ -11,26 +11,40 @@ from hope_dedup_engine.apps.security.models import User -def test_can_create_ignored_key_pair(api_client: APIClient, deduplication_set: DeduplicationSet) -> None: - previous_amount = IgnoredKeyPair.objects.filter(deduplication_set=deduplication_set).count() +def test_can_create_ignored_key_pair( + api_client: APIClient, deduplication_set: DeduplicationSet +) -> None: + previous_amount = IgnoredKeyPair.objects.filter( + deduplication_set=deduplication_set + ).count() data = IgnoredKeyPairSerializer(IgnoredKeyPairFactory.build()).data - response = api_client.post(reverse(IGNORED_KEYS_LIST_VIEW, (deduplication_set.pk,)), data=data, format=JSON) + response = api_client.post( + reverse(IGNORED_KEYS_LIST_VIEW, (deduplication_set.pk,)), data=data, format=JSON + ) assert response.status_code == status.HTTP_201_CREATED - assert IgnoredKeyPair.objects.filter(deduplication_set=deduplication_set).count() == previous_amount + 1 + assert ( + IgnoredKeyPair.objects.filter(deduplication_set=deduplication_set).count() + == previous_amount + 1 + ) def test_cannot_create_ignored_key_pair_between_systems( another_system_api_client: APIClient, deduplication_set: DeduplicationSet ) -> None: - previous_amount = IgnoredKeyPair.objects.filter(deduplication_set=deduplication_set).count() + previous_amount = IgnoredKeyPair.objects.filter( + deduplication_set=deduplication_set + ).count() data = IgnoredKeyPairSerializer(IgnoredKeyPairFactory.build()).data response = another_system_api_client.post( reverse(IGNORED_KEYS_LIST_VIEW, (deduplication_set.pk,)), data=data, format=JSON ) assert response.status_code == status.HTTP_403_FORBIDDEN - assert IgnoredKeyPair.objects.filter(deduplication_set=deduplication_set).count() == previous_amount + assert ( + IgnoredKeyPair.objects.filter(deduplication_set=deduplication_set).count() + == previous_amount + ) INVALID_PK_VALUES = "", None @@ -39,12 +53,17 @@ def test_cannot_create_ignored_key_pair_between_systems( @mark.parametrize("first_pk", INVALID_PK_VALUES) @mark.parametrize("second_pk", INVALID_PK_VALUES) def test_invalid_values_handling( - api_client: APIClient, deduplication_set: DeduplicationSet, first_pk: str | None, second_pk: str | None + api_client: APIClient, + deduplication_set: DeduplicationSet, + first_pk: str | None, + second_pk: str | None, ) -> None: data = IgnoredKeyPairSerializer(IgnoredKeyPairFactory.build()).data data["first_reference_pk"] = first_pk data["second_reference_pk"] = second_pk - response = api_client.post(reverse(IGNORED_KEYS_LIST_VIEW, (deduplication_set.pk,)), data=data, format=JSON) + response = api_client.post( + reverse(IGNORED_KEYS_LIST_VIEW, (deduplication_set.pk,)), data=data, format=JSON + ) assert response.status_code == status.HTTP_400_BAD_REQUEST errors = response.json() assert len(errors) == 2 @@ -52,22 +71,30 @@ def test_invalid_values_handling( assert "second_reference_pk" in errors -def test_missing_pk_handling(api_client: APIClient, deduplication_set: DeduplicationSet) -> None: +def test_missing_pk_handling( + api_client: APIClient, deduplication_set: DeduplicationSet +) -> None: data = IgnoredKeyPairSerializer(IgnoredKeyPairFactory.build()).data del data["first_reference_pk"], data["second_reference_pk"] - response = api_client.post(reverse(IGNORED_KEYS_LIST_VIEW, (deduplication_set.pk,)), data=data, format=JSON) + response = api_client.post( + reverse(IGNORED_KEYS_LIST_VIEW, (deduplication_set.pk,)), data=data, format=JSON + ) assert response.status_code == status.HTTP_400_BAD_REQUEST errors = response.json() assert "first_reference_pk" in errors assert "second_reference_pk" in errors -def test_deduplication_set_is_updated(api_client: APIClient, user: User, deduplication_set: DeduplicationSet) -> None: +def test_deduplication_set_is_updated( + api_client: APIClient, user: User, deduplication_set: DeduplicationSet +) -> None: assert deduplication_set.updated_by is None data = IgnoredKeyPairSerializer(IgnoredKeyPairFactory.build()).data - response = api_client.post(reverse(IGNORED_KEYS_LIST_VIEW, (deduplication_set.pk,)), data=data, format=JSON) + response = api_client.post( + reverse(IGNORED_KEYS_LIST_VIEW, (deduplication_set.pk,)), data=data, format=JSON + ) assert response.status_code == status.HTTP_201_CREATED deduplication_set.refresh_from_db() diff --git a/tests/api/test_ignored_keys_list.py b/tests/api/test_ignored_keys_list.py index 8affedf4..30c73537 100644 --- a/tests/api/test_ignored_keys_list.py +++ b/tests/api/test_ignored_keys_list.py @@ -8,17 +8,24 @@ def test_can_list_ignored_key_pairs( - api_client: APIClient, deduplication_set: DeduplicationSet, ignored_key_pair: IgnoredKeyPair + api_client: APIClient, + deduplication_set: DeduplicationSet, + ignored_key_pair: IgnoredKeyPair, ) -> None: response = api_client.get(reverse(IGNORED_KEYS_LIST_VIEW, (deduplication_set.pk,))) assert response.status_code == status.HTTP_200_OK ignored_key_pairs = response.json() assert len(ignored_key_pairs) - assert len(ignored_key_pairs) == IgnoredKeyPair.objects.filter(deduplication_set=deduplication_set).count() + assert ( + len(ignored_key_pairs) + == IgnoredKeyPair.objects.filter(deduplication_set=deduplication_set).count() + ) def test_cannot_list_ignored_key_pairs_between_systems( another_system_api_client: APIClient, deduplication_set: DeduplicationSet ) -> None: - response = another_system_api_client.get(reverse(IGNORED_KEYS_LIST_VIEW, (deduplication_set.pk,))) + response = another_system_api_client.get( + reverse(IGNORED_KEYS_LIST_VIEW, (deduplication_set.pk,)) + ) assert response.status_code == status.HTTP_403_FORBIDDEN diff --git a/tests/api/test_image_bulk_create.py b/tests/api/test_image_bulk_create.py index 6b91dd8a..a001ab2f 100644 --- a/tests/api/test_image_bulk_create.py +++ b/tests/api/test_image_bulk_create.py @@ -9,9 +9,13 @@ from hope_dedup_engine.apps.security.models import User -def test_can_bulk_create_images(api_client: APIClient, deduplication_set: DeduplicationSet) -> None: +def test_can_bulk_create_images( + api_client: APIClient, deduplication_set: DeduplicationSet +) -> None: data = ImageSerializer(ImageFactory.build_batch(10), many=True).data - response = api_client.post(reverse(BULK_IMAGE_LIST_VIEW, (deduplication_set.pk,)), data=data, format=JSON) + response = api_client.post( + reverse(BULK_IMAGE_LIST_VIEW, (deduplication_set.pk,)), data=data, format=JSON + ) assert response.status_code == status.HTTP_201_CREATED @@ -25,11 +29,15 @@ def test_cannot_bulk_create_images_between_systems( assert response.status_code == status.HTTP_403_FORBIDDEN -def test_deduplication_set_is_updated(api_client: APIClient, user: User, deduplication_set: DeduplicationSet) -> None: +def test_deduplication_set_is_updated( + api_client: APIClient, user: User, deduplication_set: DeduplicationSet +) -> None: assert deduplication_set.updated_by is None data = ImageSerializer(ImageFactory.build_batch(10), many=True).data - response = api_client.post(reverse(BULK_IMAGE_LIST_VIEW, (deduplication_set.pk,)), data=data, format=JSON) + response = api_client.post( + reverse(BULK_IMAGE_LIST_VIEW, (deduplication_set.pk,)), data=data, format=JSON + ) assert response.status_code == status.HTTP_201_CREATED deduplication_set.refresh_from_db() diff --git a/tests/api/test_image_bulk_delete.py b/tests/api/test_image_bulk_delete.py index a1898483..7b4ae469 100644 --- a/tests/api/test_image_bulk_delete.py +++ b/tests/api/test_image_bulk_delete.py @@ -8,27 +8,42 @@ from hope_dedup_engine.apps.security.models import User -def test_can_delete_all_images(api_client: APIClient, deduplication_set: DeduplicationSet, image: Image) -> None: +def test_can_delete_all_images( + api_client: APIClient, deduplication_set: DeduplicationSet, image: Image +) -> None: image_count = Image.objects.filter(deduplication_set=deduplication_set).count() - response = api_client.delete(reverse(BULK_IMAGE_CLEAR_VIEW, (deduplication_set.pk,))) + response = api_client.delete( + reverse(BULK_IMAGE_CLEAR_VIEW, (deduplication_set.pk,)) + ) assert response.status_code == status.HTTP_204_NO_CONTENT - assert Image.objects.filter(deduplication_set=deduplication_set).count() == image_count - 1 + assert ( + Image.objects.filter(deduplication_set=deduplication_set).count() + == image_count - 1 + ) def test_cannot_delete_images_between_systems( - another_system_api_client: APIClient, deduplication_set: DeduplicationSet, image: Image + another_system_api_client: APIClient, + deduplication_set: DeduplicationSet, + image: Image, ) -> None: image_count = Image.objects.filter(deduplication_set=deduplication_set).count() - response = another_system_api_client.delete(reverse(BULK_IMAGE_CLEAR_VIEW, (deduplication_set.pk,))) + response = another_system_api_client.delete( + reverse(BULK_IMAGE_CLEAR_VIEW, (deduplication_set.pk,)) + ) assert response.status_code == status.HTTP_403_FORBIDDEN - assert Image.objects.filter(deduplication_set=deduplication_set).count() == image_count + assert ( + Image.objects.filter(deduplication_set=deduplication_set).count() == image_count + ) def test_deduplication_set_is_updated( api_client: APIClient, user: User, deduplication_set: DeduplicationSet, image: Image ) -> None: assert deduplication_set.updated_by is None - response = api_client.delete(reverse(BULK_IMAGE_CLEAR_VIEW, (deduplication_set.pk,))) + response = api_client.delete( + reverse(BULK_IMAGE_CLEAR_VIEW, (deduplication_set.pk,)) + ) assert response.status_code == status.HTTP_204_NO_CONTENT deduplication_set.refresh_from_db() assert deduplication_set.updated_by == user diff --git a/tests/api/test_image_create.py b/tests/api/test_image_create.py index fe87ca16..dd76189f 100644 --- a/tests/api/test_image_create.py +++ b/tests/api/test_image_create.py @@ -11,13 +11,20 @@ from hope_dedup_engine.apps.security.models import User -def test_can_create_image(api_client: APIClient, deduplication_set: DeduplicationSet) -> None: +def test_can_create_image( + api_client: APIClient, deduplication_set: DeduplicationSet +) -> None: previous_amount = Image.objects.filter(deduplication_set=deduplication_set).count() data = ImageSerializer(ImageFactory.build()).data - response = api_client.post(reverse(IMAGE_LIST_VIEW, (deduplication_set.pk,)), data=data, format=JSON) + response = api_client.post( + reverse(IMAGE_LIST_VIEW, (deduplication_set.pk,)), data=data, format=JSON + ) assert response.status_code == status.HTTP_201_CREATED - assert Image.objects.filter(deduplication_set=deduplication_set).count() == previous_amount + 1 + assert ( + Image.objects.filter(deduplication_set=deduplication_set).count() + == previous_amount + 1 + ) def test_cannot_create_image_between_systems( @@ -26,9 +33,14 @@ def test_cannot_create_image_between_systems( previous_amount = Image.objects.filter(deduplication_set=deduplication_set).count() data = ImageSerializer(ImageFactory.build()).data - response = another_system_api_client.post(reverse(IMAGE_LIST_VIEW, (deduplication_set.pk,)), data=data, format=JSON) + response = another_system_api_client.post( + reverse(IMAGE_LIST_VIEW, (deduplication_set.pk,)), data=data, format=JSON + ) assert response.status_code == status.HTTP_403_FORBIDDEN - assert Image.objects.filter(deduplication_set=deduplication_set).count() == previous_amount + assert ( + Image.objects.filter(deduplication_set=deduplication_set).count() + == previous_amount + ) @mark.parametrize( @@ -43,28 +55,38 @@ def test_invalid_values_handling( ) -> None: data = ImageSerializer(ImageFactory.build()).data data["filename"] = filename - response = api_client.post(reverse(IMAGE_LIST_VIEW, (deduplication_set.pk,)), data=data, format=JSON) + response = api_client.post( + reverse(IMAGE_LIST_VIEW, (deduplication_set.pk,)), data=data, format=JSON + ) assert response.status_code == status.HTTP_400_BAD_REQUEST errors = response.json() assert len(errors) == 1 assert "filename" in errors -def test_missing_filename_handling(api_client: APIClient, deduplication_set: DeduplicationSet) -> None: +def test_missing_filename_handling( + api_client: APIClient, deduplication_set: DeduplicationSet +) -> None: data = ImageSerializer(ImageFactory.build()).data del data["filename"] - response = api_client.post(reverse(IMAGE_LIST_VIEW, (deduplication_set.pk,)), data=data, format=JSON) + response = api_client.post( + reverse(IMAGE_LIST_VIEW, (deduplication_set.pk,)), data=data, format=JSON + ) assert response.status_code == status.HTTP_400_BAD_REQUEST errors = response.json() assert "filename" in errors -def test_deduplication_set_is_updated(api_client: APIClient, user: User, deduplication_set: DeduplicationSet) -> None: +def test_deduplication_set_is_updated( + api_client: APIClient, user: User, deduplication_set: DeduplicationSet +) -> None: assert deduplication_set.updated_by is None data = ImageSerializer(ImageFactory.build()).data - response = api_client.post(reverse(IMAGE_LIST_VIEW, (deduplication_set.pk,)), data=data, format=JSON) + response = api_client.post( + reverse(IMAGE_LIST_VIEW, (deduplication_set.pk,)), data=data, format=JSON + ) assert response.status_code == status.HTTP_201_CREATED deduplication_set.refresh_from_db() diff --git a/tests/api/test_image_delete.py b/tests/api/test_image_delete.py index e0745847..13eaba5c 100644 --- a/tests/api/test_image_delete.py +++ b/tests/api/test_image_delete.py @@ -7,27 +7,42 @@ from hope_dedup_engine.apps.security.models import User -def test_can_delete_image(api_client: APIClient, deduplication_set: DeduplicationSet, image: Image) -> None: +def test_can_delete_image( + api_client: APIClient, deduplication_set: DeduplicationSet, image: Image +) -> None: image_count = Image.objects.filter(deduplication_set=deduplication_set).count() - response = api_client.delete(reverse(IMAGE_DETAIL_VIEW, (deduplication_set.pk, image.pk))) + response = api_client.delete( + reverse(IMAGE_DETAIL_VIEW, (deduplication_set.pk, image.pk)) + ) assert response.status_code == status.HTTP_204_NO_CONTENT - assert Image.objects.filter(deduplication_set=deduplication_set).count() == image_count - 1 + assert ( + Image.objects.filter(deduplication_set=deduplication_set).count() + == image_count - 1 + ) def test_cannot_delete_image_between_systems( - another_system_api_client: APIClient, deduplication_set: DeduplicationSet, image: Image + another_system_api_client: APIClient, + deduplication_set: DeduplicationSet, + image: Image, ) -> None: image_count = Image.objects.filter(deduplication_set=deduplication_set).count() - response = another_system_api_client.delete(reverse(IMAGE_DETAIL_VIEW, (deduplication_set.pk, image.pk))) + response = another_system_api_client.delete( + reverse(IMAGE_DETAIL_VIEW, (deduplication_set.pk, image.pk)) + ) assert response.status_code == status.HTTP_403_FORBIDDEN - assert Image.objects.filter(deduplication_set=deduplication_set).count() == image_count + assert ( + Image.objects.filter(deduplication_set=deduplication_set).count() == image_count + ) def test_deduplication_set_is_updated( api_client: APIClient, user: User, deduplication_set: DeduplicationSet, image: Image ) -> None: assert deduplication_set.updated_by is None - response = api_client.delete(reverse(IMAGE_DETAIL_VIEW, (deduplication_set.pk, image.pk))) + response = api_client.delete( + reverse(IMAGE_DETAIL_VIEW, (deduplication_set.pk, image.pk)) + ) assert response.status_code == status.HTTP_204_NO_CONTENT deduplication_set.refresh_from_db() assert deduplication_set.updated_by == user diff --git a/tests/api/test_image_list.py b/tests/api/test_image_list.py index e02e5f54..a2cb09d3 100644 --- a/tests/api/test_image_list.py +++ b/tests/api/test_image_list.py @@ -7,16 +7,22 @@ from hope_dedup_engine.apps.api.models.deduplication import Image -def test_can_list_images(api_client: APIClient, deduplication_set: DeduplicationSet, image: Image) -> None: +def test_can_list_images( + api_client: APIClient, deduplication_set: DeduplicationSet, image: Image +) -> None: response = api_client.get(reverse(IMAGE_LIST_VIEW, (deduplication_set.pk,))) assert response.status_code == status.HTTP_200_OK images = response.json() assert len(images) - assert len(images) == Image.objects.filter(deduplication_set=deduplication_set).count() + assert ( + len(images) == Image.objects.filter(deduplication_set=deduplication_set).count() + ) def test_cannot_list_images_between_systems( another_system_api_client: APIClient, deduplication_set: DeduplicationSet ) -> None: - response = another_system_api_client.get(reverse(IMAGE_LIST_VIEW, (deduplication_set.pk,))) + response = another_system_api_client.get( + reverse(IMAGE_LIST_VIEW, (deduplication_set.pk,)) + ) assert response.status_code == status.HTTP_403_FORBIDDEN diff --git a/tests/api/test_utils.py b/tests/api/test_utils.py index 64385448..886f5e3d 100644 --- a/tests/api/test_utils.py +++ b/tests/api/test_utils.py @@ -17,7 +17,9 @@ def test_notification_is_sent_when_url_is_set( requests_get_mock: MagicMock, deduplication_set: DeduplicationSet ) -> None: send_notification(deduplication_set) - requests_get_mock.assert_called_once_with(deduplication_set.notification_url, timeout=REQUEST_TIMEOUT) + requests_get_mock.assert_called_once_with( + deduplication_set.notification_url, timeout=REQUEST_TIMEOUT + ) @mark.parametrize("deduplication_set__notification_url", (None,)) diff --git a/tests/conftest.py b/tests/conftest.py index df46bfbc..0cf26130 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -22,10 +22,18 @@ def pytest_configure(config): os.environ["MAILJET_API_KEY"] = "11" os.environ["MAILJET_SECRET_KEY"] = "11" - os.environ["FILE_STORAGE_DEFAULT"] = "django.core.files.storage.FileSystemStorage?location=/tmp/hde/storage/" - os.environ["FILE_STORAGE_STATIC"] = "django.core.files.storage.FileSystemStorage?location=/tmp/hde/static/" - os.environ["FILE_STORAGE_MEDIA"] = "django.core.files.storage.FileSystemStorage?location=/tmp/hde/storage/" - os.environ["FILE_STORAGE_HOPE"] = "django.core.files.storage.FileSystemStorage?location=/tmp/hde/hope/" + os.environ["FILE_STORAGE_DEFAULT"] = ( + "django.core.files.storage.FileSystemStorage?location=/tmp/hde/storage/" + ) + os.environ["FILE_STORAGE_STATIC"] = ( + "django.core.files.storage.FileSystemStorage?location=/tmp/hde/static/" + ) + os.environ["FILE_STORAGE_MEDIA"] = ( + "django.core.files.storage.FileSystemStorage?location=/tmp/hde/storage/" + ) + os.environ["FILE_STORAGE_HOPE"] = ( + "django.core.files.storage.FileSystemStorage?location=/tmp/hde/hope/" + ) os.environ["SOCIAL_AUTH_REDIRECT_IS_HTTPS"] = "0" os.environ["CELERY_TASK_ALWAYS_EAGER"] = "0" os.environ["SECURE_HSTS_PRELOAD"] = "0" diff --git a/tests/extras/testutils/decorators.py b/tests/extras/testutils/decorators.py index a9d6216c..2755f1b9 100644 --- a/tests/extras/testutils/decorators.py +++ b/tests/extras/testutils/decorators.py @@ -10,4 +10,6 @@ def requires_env(*envs): if os.environ.get(env, None) is None: missing.append(env) - return pytest.mark.skipif(len(missing) > 0, reason=f"Not suitable environment {missing} for current test") + return pytest.mark.skipif( + len(missing) > 0, reason=f"Not suitable environment {missing} for current test" + ) diff --git a/tests/extras/testutils/factories/__init__.py b/tests/extras/testutils/factories/__init__.py index 2a687069..93130e16 100644 --- a/tests/extras/testutils/factories/__init__.py +++ b/tests/extras/testutils/factories/__init__.py @@ -5,20 +5,34 @@ from factory.django import DjangoModelFactory from pytest_factoryboy import register -from .base import AutoRegisterModelFactory, TAutoRegisterModelFactory, factories_registry +from .base import ( + AutoRegisterModelFactory, + TAutoRegisterModelFactory, + factories_registry, +) from .django_celery_beat import PeriodicTaskFactory # noqa from .social import SocialAuthUserFactory # noqa -from .user import ExternalSystemFactory, GroupFactory, SuperUserFactory, User, UserFactory # noqa +from .user import ( # noqa + ExternalSystemFactory, + GroupFactory, + SuperUserFactory, + User, + UserFactory, +) from .userrole import UserRole, UserRoleFactory # noqa for _, name, _ in pkgutil.iter_modules([str(Path(__file__).parent)]): importlib.import_module(f".{name}", __package__) -django_model_factories = {factory._meta.model: factory for factory in DjangoModelFactory.__subclasses__()} +django_model_factories = { + factory._meta.model: factory for factory in DjangoModelFactory.__subclasses__() +} -def get_factory_for_model(_model) -> type[TAutoRegisterModelFactory] | type[DjangoModelFactory]: +def get_factory_for_model( + _model, +) -> type[TAutoRegisterModelFactory] | type[DjangoModelFactory]: class Meta: model = _model @@ -29,4 +43,6 @@ class Meta: if _model in django_model_factories: return django_model_factories[_model] - return register(type(f"{_model._meta.model_name}AutoCreatedFactory", bases, {"Meta": Meta})) # noqa + return register( + type(f"{_model._meta.model_name}AutoCreatedFactory", bases, {"Meta": Meta}) + ) # noqa diff --git a/tests/extras/testutils/factories/api.py b/tests/extras/testutils/factories/api.py index 851e9c41..945a94ba 100644 --- a/tests/extras/testutils/factories/api.py +++ b/tests/extras/testutils/factories/api.py @@ -3,7 +3,11 @@ from testutils.factories import ExternalSystemFactory, UserFactory from hope_dedup_engine.apps.api.models import DeduplicationSet, HDEToken -from hope_dedup_engine.apps.api.models.deduplication import Duplicate, IgnoredKeyPair, Image +from hope_dedup_engine.apps.api.models.deduplication import ( + Duplicate, + IgnoredKeyPair, + Image, +) class TokenFactory(DjangoModelFactory): diff --git a/tests/extras/testutils/factories/base.py b/tests/extras/testutils/factories/base.py index 31b5bb13..601ca5dd 100644 --- a/tests/extras/testutils/factories/base.py +++ b/tests/extras/testutils/factories/base.py @@ -3,7 +3,9 @@ import factory from factory.base import FactoryMetaClass -TAutoRegisterModelFactory = typing.TypeVar("TAutoRegisterModelFactory", bound="AutoRegisterModelFactory") +TAutoRegisterModelFactory = typing.TypeVar( + "TAutoRegisterModelFactory", bound="AutoRegisterModelFactory" +) factories_registry: dict[str, TAutoRegisterModelFactory] = {} @@ -15,5 +17,7 @@ def __new__(mcs, class_name, bases, attrs): return new_class -class AutoRegisterModelFactory(factory.django.DjangoModelFactory, metaclass=AutoRegisterFactoryMetaClass): +class AutoRegisterModelFactory( + factory.django.DjangoModelFactory, metaclass=AutoRegisterFactoryMetaClass +): pass diff --git a/tests/extras/testutils/factories/django_celery_beat.py b/tests/extras/testutils/factories/django_celery_beat.py index 30630356..c691ad40 100644 --- a/tests/extras/testutils/factories/django_celery_beat.py +++ b/tests/extras/testutils/factories/django_celery_beat.py @@ -1,7 +1,13 @@ from django.utils import timezone import factory -from django_celery_beat.models import SOLAR_SCHEDULES, ClockedSchedule, IntervalSchedule, PeriodicTask, SolarSchedule +from django_celery_beat.models import ( + SOLAR_SCHEDULES, + ClockedSchedule, + IntervalSchedule, + PeriodicTask, + SolarSchedule, +) from factory.fuzzy import FuzzyChoice from .base import AutoRegisterModelFactory diff --git a/tests/extras/testutils/factories/user.py b/tests/extras/testutils/factories/user.py index b2af0c3a..abc3f7b2 100644 --- a/tests/extras/testutils/factories/user.py +++ b/tests/extras/testutils/factories/user.py @@ -2,7 +2,7 @@ import factory.fuzzy -from hope_dedup_engine.apps.security.models import User, ExternalSystem +from hope_dedup_engine.apps.security.models import ExternalSystem, User from .base import AutoRegisterModelFactory diff --git a/tests/extras/testutils/perms.py b/tests/extras/testutils/perms.py index 49398c61..e7d64d5d 100644 --- a/tests/extras/testutils/perms.py +++ b/tests/extras/testutils/perms.py @@ -46,9 +46,13 @@ def get_group(name=None, permissions=None): except ValueError: raise ValueError(f"Invalid permission name {permission_name}") try: - permission = Permission.objects.get(content_type__app_label=app_label, codename=codename) + permission = Permission.objects.get( + content_type__app_label=app_label, codename=codename + ) except Permission.DoesNotExist: - raise Permission.DoesNotExist("Permission `{0}` does not exists", permission_name) + raise Permission.DoesNotExist( + "Permission `{0}` does not exists", permission_name + ) group.permissions.add(permission) return group diff --git a/tests/faces/conftest.py b/tests/faces/conftest.py index dba1f743..b92c934a 100644 --- a/tests/faces/conftest.py +++ b/tests/faces/conftest.py @@ -20,11 +20,20 @@ from pytest_mock import MockerFixture from docker import from_env -from hope_dedup_engine.apps.core.storage import CV2DNNStorage, HDEAzureStorage, HOPEAzureStorage +from hope_dedup_engine.apps.core.storage import ( + CV2DNNStorage, + HDEAzureStorage, + HOPEAzureStorage, +) from hope_dedup_engine.apps.faces.managers.net import DNNInferenceManager from hope_dedup_engine.apps.faces.managers.storage import StorageManager -from hope_dedup_engine.apps.faces.services.duplication_detector import DuplicationDetector -from hope_dedup_engine.apps.faces.services.image_processor import BlobFromImageConfig, ImageProcessor +from hope_dedup_engine.apps.faces.services.duplication_detector import ( + DuplicationDetector, +) +from hope_dedup_engine.apps.faces.services.image_processor import ( + BlobFromImageConfig, + ImageProcessor, +) @pytest.fixture @@ -59,11 +68,20 @@ def mock_net_manager(mocker: MockerFixture) -> DNNInferenceManager: @pytest.fixture def mock_image_processor( - mocker: MockerFixture, mock_storage_manager, mock_net_manager, mock_open_context_manager + mocker: MockerFixture, + mock_storage_manager, + mock_net_manager, + mock_open_context_manager, ) -> ImageProcessor: - mocker.patch.object(BlobFromImageConfig, "_get_shape", return_value=DEPLOY_PROTO_SHAPE) + mocker.patch.object( + BlobFromImageConfig, "_get_shape", return_value=DEPLOY_PROTO_SHAPE + ) mock_processor = ImageProcessor() - mocker.patch.object(mock_processor.storages.get_storage("images"), "open", return_value=mock_open_context_manager) + mocker.patch.object( + mock_processor.storages.get_storage("images"), + "open", + return_value=mock_open_context_manager, + ) yield mock_processor @@ -87,9 +105,13 @@ def mock_open_context_manager(image_bytes_io): @pytest.fixture def mock_net(): mock_net = MagicMock(spec=cv2.dnn_Net) # Mocking the neural network object - mock_detections = np.array([[FACE_DETECTIONS]], dtype=np.float32) # Mocking the detections array + mock_detections = np.array( + [[FACE_DETECTIONS]], dtype=np.float32 + ) # Mocking the detections array mock_expected_regions = FACE_REGIONS_VALID - mock_net.forward.return_value = mock_detections # Setting up the forward method of the mock network + mock_net.forward.return_value = ( + mock_detections # Setting up the forward method of the mock network + ) mock_imdecode = MagicMock(return_value=np.ones(IMAGE_SIZE, dtype=np.uint8)) mock_resize = MagicMock(return_value=np.ones(RESIZED_IMAGE_SIZE, dtype=np.uint8)) mock_blob = np.zeros(BLOB_SHAPE) @@ -111,7 +133,10 @@ def docker_client(): @pytest.fixture def mock_redis_client(): - with patch("redis.Redis.set") as mock_set, patch("redis.Redis.delete") as mock_delete: + with ( + patch("redis.Redis.set") as mock_set, + patch("redis.Redis.delete") as mock_delete, + ): yield mock_set, mock_delete @@ -120,7 +145,9 @@ def mock_dd_find(): with patch( "hope_dedup_engine.apps.faces.services.duplication_detector.DuplicationDetector.find_duplicates" ) as mock_find: - mock_find.return_value = (FILENAMES[:2],) # Assuming the first two are duplicates based on mock data + mock_find.return_value = ( + FILENAMES[:2], + ) # Assuming the first two are duplicates based on mock data yield mock_find diff --git a/tests/faces/test_celery_tasks.py b/tests/faces/test_celery_tasks.py index e75fdb5b..b00372fb 100644 --- a/tests/faces/test_celery_tasks.py +++ b/tests/faces/test_celery_tasks.py @@ -4,23 +4,36 @@ import pytest from celery import states from celery.exceptions import SoftTimeLimitExceeded, TimeLimitExceeded -from faces_const import CELERY_TASK_DELAYS, CELERY_TASK_NAME, CELERY_TASK_TTL, FILENAMES, IGNORE_PAIRS +from faces_const import ( + CELERY_TASK_DELAYS, + CELERY_TASK_NAME, + CELERY_TASK_TTL, + FILENAMES, + IGNORE_PAIRS, +) from hope_dedup_engine.apps.faces.celery_tasks import deduplicate from hope_dedup_engine.apps.faces.utils.celery_utils import _get_hash @pytest.mark.parametrize("lock_is_acquired", [True, False]) -def test_deduplicate_task_locking(mock_redis_client, mock_dd_find, mock_dd, lock_is_acquired): +def test_deduplicate_task_locking( + mock_redis_client, mock_dd_find, mock_dd, lock_is_acquired +): mock_set, mock_delete = mock_redis_client mock_set.return_value = lock_is_acquired mock_find = mock_dd_find - with patch("hope_dedup_engine.apps.faces.celery_tasks.DuplicationDetector", return_value=mock_dd): + with patch( + "hope_dedup_engine.apps.faces.celery_tasks.DuplicationDetector", + return_value=mock_dd, + ): task_result = deduplicate.apply(args=(FILENAMES, IGNORE_PAIRS)).get() hash_value = _get_hash(FILENAMES, IGNORE_PAIRS) - mock_set.assert_called_once_with(f"{CELERY_TASK_NAME}_{hash_value}", "true", nx=True, ex=CELERY_TASK_TTL) + mock_set.assert_called_once_with( + f"{CELERY_TASK_NAME}_{hash_value}", "true", nx=True, ex=CELERY_TASK_TTL + ) if lock_is_acquired: assert task_result == mock_find.return_value mock_find.assert_called_once() @@ -36,10 +49,15 @@ def test_deduplicate_task_locking(mock_redis_client, mock_dd_find, mock_dd, lock [ (CELERY_TASK_DELAYS["SoftTimeLimitExceeded"], SoftTimeLimitExceeded()), (CELERY_TASK_DELAYS["TimeLimitExceeded"], TimeLimitExceeded()), - (CELERY_TASK_DELAYS["CustomException"], Exception("Simulated custom task failure")), + ( + CELERY_TASK_DELAYS["CustomException"], + Exception("Simulated custom task failure"), + ), ], ) -def test_deduplicate_task_exception_handling(mock_redis_client, mock_dd_find, time_control, mock_dd, delay, exception): +def test_deduplicate_task_exception_handling( + mock_redis_client, mock_dd_find, time_control, mock_dd, delay, exception +): mock_set, mock_delete = mock_redis_client mock_find = mock_dd_find mock_find.side_effect = exception @@ -48,7 +66,10 @@ def test_deduplicate_task_exception_handling(mock_redis_client, mock_dd_find, ti with ( pytest.raises(type(exception)) as exc_info, - patch("hope_dedup_engine.apps.faces.celery_tasks.DuplicationDetector", return_value=mock_dd), + patch( + "hope_dedup_engine.apps.faces.celery_tasks.DuplicationDetector", + return_value=mock_dd, + ), ): task = deduplicate.apply(args=(FILENAMES, IGNORE_PAIRS)) assert exc_info.value == exception @@ -58,6 +79,10 @@ def test_deduplicate_task_exception_handling(mock_redis_client, mock_dd_find, ti assert task.traceback is not None hash_value = _get_hash(FILENAMES, IGNORE_PAIRS) - mock_set.assert_called_once_with(f"{CELERY_TASK_NAME}_{hash_value}", "true", nx=True, ex=3600) - mock_delete.assert_called_once_with(f"{CELERY_TASK_NAME}_{hash_value}") # Lock is released + mock_set.assert_called_once_with( + f"{CELERY_TASK_NAME}_{hash_value}", "true", nx=True, ex=3600 + ) + mock_delete.assert_called_once_with( + f"{CELERY_TASK_NAME}_{hash_value}" + ) # Lock is released mock_find.assert_called_once() diff --git a/tests/faces/test_duplicate_groups_builder.py b/tests/faces/test_duplicate_groups_builder.py index a5aca5ec..2cf2cae5 100644 --- a/tests/faces/test_duplicate_groups_builder.py +++ b/tests/faces/test_duplicate_groups_builder.py @@ -2,14 +2,24 @@ import pytest -from hope_dedup_engine.apps.faces.utils.duplicate_groups_builder import DuplicateGroupsBuilder +from hope_dedup_engine.apps.faces.utils.duplicate_groups_builder import ( + DuplicateGroupsBuilder, +) @pytest.mark.parametrize( "checked, threshold, expected_groups", [ - ({("path1", "path2", 0.2), ("path2", "path3", 0.1)}, 0.3, (("path1", "path2"), ("path3", "path2"))), - ({("path1", "path2", 0.2), ("path2", "path3", 0.4)}, 0.3, (("path1", "path2"),)), + ( + {("path1", "path2", 0.2), ("path2", "path3", 0.1)}, + 0.3, + (("path1", "path2"), ("path3", "path2")), + ), + ( + {("path1", "path2", 0.2), ("path2", "path3", 0.4)}, + 0.3, + (("path1", "path2"),), + ), ({("path1", "path2", 0.4), ("path2", "path3", 0.4)}, 0.3, ()), ( {("path1", "path2", 0.2), ("path2", "path3", 0.2), ("path3", "path4", 0.2)}, @@ -20,11 +30,16 @@ ) def test_duplicate_groups_builder(checked, threshold, expected_groups): def sort_nested_tuples(nested_tuples: tuple[tuple[str]]) -> tuple[tuple[str]]: - sorted_inner = tuple(tuple(sorted(inner_tuple)) for inner_tuple in nested_tuples) + sorted_inner = tuple( + tuple(sorted(inner_tuple)) for inner_tuple in nested_tuples + ) sorted_outer = tuple(sorted(sorted_inner)) return sorted_outer mock_config = MagicMock() mock_config.FACE_DISTANCE_THRESHOLD = threshold - with patch("hope_dedup_engine.apps.faces.utils.duplicate_groups_builder.config", mock_config): + with patch( + "hope_dedup_engine.apps.faces.utils.duplicate_groups_builder.config", + mock_config, + ): DuplicateGroupsBuilder.build(checked) diff --git a/tests/faces/test_forms.py b/tests/faces/test_forms.py index 4fcc3bb5..da337007 100644 --- a/tests/faces/test_forms.py +++ b/tests/faces/test_forms.py @@ -13,9 +13,15 @@ def test_to_python_valid_case(): @pytest.mark.parametrize( "input_value, expected_error_message", [ - ("104.0, 177.0", "Enter a valid tuple of three float values separated by commas and spaces"), + ( + "104.0, 177.0", + "Enter a valid tuple of three float values separated by commas and spaces", + ), ("104.0, 177.0, 256.0", "Each value must be between -255 and 255."), - ("104.0, abc, 123.0", "Enter a valid tuple of three float values separated by commas and spaces"), + ( + "104.0, abc, 123.0", + "Enter a valid tuple of three float values separated by commas and spaces", + ), ], ) def test_to_python_invalid_cases(input_value, expected_error_message): @@ -27,7 +33,10 @@ def test_to_python_invalid_cases(input_value, expected_error_message): @pytest.mark.parametrize( "input_value, expected_output", - [((104.0, 177.0, 123.0), "104.0, 177.0, 123.0"), ("104.0, 177.0, 123.0", "104.0, 177.0, 123.0")], + [ + ((104.0, 177.0, 123.0), "104.0, 177.0, 123.0"), + ("104.0, 177.0, 123.0", "104.0, 177.0, 123.0"), + ], ) def test_prepare_value(input_value, expected_output): field = MeanValuesTupleField() diff --git a/tests/faces/test_image_processor.py b/tests/faces/test_image_processor.py index 747b253f..64ba64f8 100644 --- a/tests/faces/test_image_processor.py +++ b/tests/faces/test_image_processor.py @@ -18,25 +18,42 @@ from hope_dedup_engine.apps.faces.managers.net import DNNInferenceManager from hope_dedup_engine.apps.faces.managers.storage import StorageManager -from hope_dedup_engine.apps.faces.services.image_processor import BlobFromImageConfig, FaceEncodingsConfig +from hope_dedup_engine.apps.faces.services.image_processor import ( + BlobFromImageConfig, + FaceEncodingsConfig, +) -def test_init_creates_expected_attributes(mock_net_manager: DNNInferenceManager, mock_image_processor): +def test_init_creates_expected_attributes( + mock_net_manager: DNNInferenceManager, mock_image_processor +): assert isinstance(mock_image_processor.storages, StorageManager) assert mock_image_processor.net is mock_net_manager assert isinstance(mock_image_processor.blob_from_image_cfg, BlobFromImageConfig) - assert mock_image_processor.blob_from_image_cfg.scale_factor == config.BLOB_FROM_IMAGE_SCALE_FACTOR + assert ( + mock_image_processor.blob_from_image_cfg.scale_factor + == config.BLOB_FROM_IMAGE_SCALE_FACTOR + ) assert isinstance(mock_image_processor.face_encodings_cfg, FaceEncodingsConfig) - assert mock_image_processor.face_encodings_cfg.num_jitters == config.FACE_ENCODINGS_NUM_JITTERS + assert ( + mock_image_processor.face_encodings_cfg.num_jitters + == config.FACE_ENCODINGS_NUM_JITTERS + ) assert mock_image_processor.face_encodings_cfg.model == config.FACE_ENCODINGS_MODEL - assert mock_image_processor.face_detection_confidence == config.FACE_DETECTION_CONFIDENCE + assert ( + mock_image_processor.face_detection_confidence + == config.FACE_DETECTION_CONFIDENCE + ) assert mock_image_processor.distance_threshold == config.FACE_DISTANCE_THRESHOLD assert mock_image_processor.nms_threshold == config.NMS_THRESHOLD def test_get_shape_valid(mock_prototxt_file): with patch("builtins.open", mock_prototxt_file): - config = BlobFromImageConfig(scale_factor=BLOB_FROM_IMAGE_SCALE_FACTOR, mean_values=BLOB_FROM_IMAGE_MEAN_VALUES) + config = BlobFromImageConfig( + scale_factor=BLOB_FROM_IMAGE_SCALE_FACTOR, + mean_values=BLOB_FROM_IMAGE_MEAN_VALUES, + ) shape = config._get_shape() assert shape == DEPLOY_PROTO_SHAPE @@ -44,16 +61,23 @@ def test_get_shape_valid(mock_prototxt_file): def test_get_shape_invalid(): with patch("builtins.open", mock_open(read_data="invalid_prototxt_content")): with pytest.raises(ValidationError): - BlobFromImageConfig(scale_factor=BLOB_FROM_IMAGE_SCALE_FACTOR, mean_values=BLOB_FROM_IMAGE_MEAN_VALUES) + BlobFromImageConfig( + scale_factor=BLOB_FROM_IMAGE_SCALE_FACTOR, + mean_values=BLOB_FROM_IMAGE_MEAN_VALUES, + ) -def test_get_face_detections_dnn_with_detections(mock_image_processor, mock_net, mock_open_context_manager): +def test_get_face_detections_dnn_with_detections( + mock_image_processor, mock_net, mock_open_context_manager +): dnn, imdecode, resize, _, expected_regions = mock_net with ( patch("cv2.imdecode", imdecode), patch("cv2.resize", resize), patch.object( - mock_image_processor.storages.get_storage("images"), "open", return_value=mock_open_context_manager + mock_image_processor.storages.get_storage("images"), + "open", + return_value=mock_open_context_manager, ), patch.object(mock_image_processor, "net", dnn), ): @@ -66,7 +90,9 @@ def test_get_face_detections_dnn_with_detections(mock_image_processor, mock_net, def test_get_face_detections_dnn_no_detections(mock_image_processor): - with (patch.object(mock_image_processor, "_get_face_detections_dnn", return_value=[]),): + with ( + patch.object(mock_image_processor, "_get_face_detections_dnn", return_value=[]), + ): face_regions = mock_image_processor._get_face_detections_dnn() assert len(face_regions) == 0 @@ -75,10 +101,14 @@ def test_get_face_detections_dnn_no_detections(mock_image_processor): def test_encode_face(mock_image_processor, image_bytes_io, face_regions): with ( patch.object( - mock_image_processor.storages.get_storage("images"), "open", side_effect=image_bytes_io.fake_open + mock_image_processor.storages.get_storage("images"), + "open", + side_effect=image_bytes_io.fake_open, ) as mocked_image_open, patch.object( - mock_image_processor.storages.get_storage("encoded"), "open", side_effect=image_bytes_io.fake_open + mock_image_processor.storages.get_storage("encoded"), + "open", + side_effect=image_bytes_io.fake_open, ) as mocked_encoded_open, patch.object( mock_image_processor, "_get_face_detections_dnn", return_value=face_regions @@ -109,11 +139,15 @@ def test_encode_face(mock_image_processor, image_bytes_io, face_regions): (str("face_encodings"), "Test face_encodings exception"), ), ) -def test_encode_face_exception_handling(mock_image_processor, mock_net, method: str, exception_str): +def test_encode_face_exception_handling( + mock_image_processor, mock_net, method: str, exception_str +): dnn, imdecode, *_ = mock_net with ( pytest.raises(Exception, match=exception_str), - patch.object(face_recognition, method, side_effect=Exception(exception_str)) as mock_exception, + patch.object( + face_recognition, method, side_effect=Exception(exception_str) + ) as mock_exception, patch.object(mock_image_processor, "net", dnn), patch("cv2.imdecode", imdecode), patch.object(mock_image_processor.logger, "exception") as mock_logger_exception, diff --git a/tests/faces/test_net_manager.py b/tests/faces/test_net_manager.py index 3a080bd8..0b3cd789 100644 --- a/tests/faces/test_net_manager.py +++ b/tests/faces/test_net_manager.py @@ -5,7 +5,9 @@ def test_successful(mock_storage_manager, mock_net_manager): dnn_manager = DNNInferenceManager(mock_storage_manager.storages["cv2dnn"]) - mock_net_manager.setPreferableBackend.assert_called_once_with(int(config.DNN_BACKEND)) + mock_net_manager.setPreferableBackend.assert_called_once_with( + int(config.DNN_BACKEND) + ) mock_net_manager.setPreferableTarget.assert_called_once_with(int(config.DNN_TARGET)) assert isinstance(dnn_manager, DNNInferenceManager) diff --git a/tests/faces/test_storage_manager.py b/tests/faces/test_storage_manager.py index b211de8a..6f124873 100644 --- a/tests/faces/test_storage_manager.py +++ b/tests/faces/test_storage_manager.py @@ -1,6 +1,10 @@ import pytest -from hope_dedup_engine.apps.core.storage import CV2DNNStorage, HDEAzureStorage, HOPEAzureStorage +from hope_dedup_engine.apps.core.storage import ( + CV2DNNStorage, + HDEAzureStorage, + HOPEAzureStorage, +) from hope_dedup_engine.apps.faces.exceptions import StorageKeyError from hope_dedup_engine.apps.faces.managers.storage import StorageManager diff --git a/tests/test_commands.py b/tests/test_commands.py index 488e3fc5..1a0e3697 100644 --- a/tests/test_commands.py +++ b/tests/test_commands.py @@ -25,14 +25,22 @@ def environment(): } -@pytest.mark.parametrize("static_root", ["static", ""], ids=["static_missing", "static_existing"]) +@pytest.mark.parametrize( + "static_root", ["static", ""], ids=["static_missing", "static_existing"] +) @pytest.mark.parametrize("static", [True, False], ids=["static", "no-static"]) @pytest.mark.parametrize("verbosity", [1, 0], ids=["verbose", ""]) @pytest.mark.parametrize("migrate", [True, False], ids=["migrate", ""]) -def test_upgrade_init(verbosity, migrate, monkeypatch, environment, static, static_root, tmp_path): +def test_upgrade_init( + verbosity, migrate, monkeypatch, environment, static, static_root, tmp_path +): static_root_path = tmp_path / static_root out = StringIO() - with mock.patch.dict(os.environ, {**environment, "STATIC_ROOT": str(static_root_path.absolute())}, clear=True): + with mock.patch.dict( + os.environ, + {**environment, "STATIC_ROOT": str(static_root_path.absolute())}, + clear=True, + ): call_command( "upgrade", static=static, @@ -118,12 +126,18 @@ def test_env_raise(mocked_responses): def test_upgrade_exception(mocked_responses, environment): - with mock.patch("hope_dedup_engine.apps.core.management.commands.upgrade.call_command") as m: + with mock.patch( + "hope_dedup_engine.apps.core.management.commands.upgrade.call_command" + ) as m: m.side_effect = Exception with pytest.raises(SystemExit): call_command("upgrade") out = StringIO() - with mock.patch.dict(os.environ, {"ADMIN_EMAIL": "2222", "ADMIN_USER": "admin", **environment}, clear=True): + with mock.patch.dict( + os.environ, + {"ADMIN_EMAIL": "2222", "ADMIN_USER": "admin", **environment}, + clear=True, + ): with pytest.raises(SystemExit): call_command("upgrade", stdout=out, check=True, admin_email="") diff --git a/tests/test_state.py b/tests/test_state.py index 90502366..c3cc79c6 100644 --- a/tests/test_state.py +++ b/tests/test_state.py @@ -42,7 +42,9 @@ def test_configure(state): @freeze_time("2000-01-01T00:00:00Z") def test_add_cookies(state): - state.add_cookie("test", 22, 3600, None, "/path/", "domain.example.com", True, True, "lax") + state.add_cookie( + "test", 22, 3600, None, "/path/", "domain.example.com", True, True, "lax" + ) r: HttpResponse = HttpResponse() state.set_cookies(r) diff --git a/tests/utils/test_utils_http.py b/tests/utils/test_utils_http.py index 2a696f2c..51ab2897 100644 --- a/tests/utils/test_utils_http.py +++ b/tests/utils/test_utils_http.py @@ -4,7 +4,12 @@ import pytest from hope_dedup_engine.state import state -from hope_dedup_engine.utils.http import absolute_reverse, absolute_uri, get_server_host, get_server_url +from hope_dedup_engine.utils.http import ( + absolute_reverse, + absolute_uri, + get_server_host, + get_server_url, +) if TYPE_CHECKING: from django.http import HttpRequest From 82d9a8cffd66008097b5f75f6a58823e84f6c5bf Mon Sep 17 00:00:00 2001 From: saxix Date: Fri, 21 Jun 2024 22:57:53 +0200 Subject: [PATCH 31/31] 2024-06-21 22:57 - updates CI ci:all --- .github/workflows/lint.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 85f27b81..2ba79248 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -52,7 +52,7 @@ jobs: if: github.event.pull_request.draft == false && needs.changes.outputs.lint steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v2 + - uses: actions/setup-python@v5 with: python-version: '3.12' - name: Install requirements @@ -69,7 +69,7 @@ jobs: if: github.event.pull_request.draft == false && needs.changes.outputs.lint steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v2 + - uses: actions/setup-python@v5 with: python-version: '3.12' - name: Install requirements @@ -82,7 +82,7 @@ jobs: if: github.event.pull_request.draft == false && needs.changes.outputs.lint steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v2 + - uses: actions/setup-python@v5 with: python-version: '3.12' - name: Install requirements