diff --git a/.envrc b/.envrc index 3bd875aed81..64aea05c728 100644 --- a/.envrc +++ b/.envrc @@ -22,6 +22,12 @@ export QE_LOG_LEVEL=debug # Set it to "trace" to enable query-graph debugging lo # export PRISMA_RENDER_DOT_FILE=1 # Uncomment to enable rendering a dot file of the Query Graph from an executed query. # export FMT_SQL=1 # Uncomment it to enable logging formatted SQL queries +### Uncomment to run driver adapters tests. See query-engine-driver-adapters.yml workflow for how tests run in CI. +# export EXTERNAL_TEST_EXECUTOR="$(pwd)/query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh" +# export DRIVER_ADAPTER=pg # Set to pg, neon or planetscale +# export PRISMA_DISABLE_QUAINT_EXECUTORS=1 # Disable quaint executors for driver adapters +# export DRIVER_ADAPTER_URL_OVERRIDE ="postgres://USER:PASSWORD@DATABASExxxx" # Override the database url for the driver adapter tests + # Mongo image requires additional wait time on arm arch for some reason. if uname -a | grep -q 'arm64'; then export INIT_WAIT_SEC="10" @@ -36,7 +42,9 @@ fi # Set up env vars and build inputs from flake.nix automatically for nix users. # If you don't use nix, you can safely ignore this. -if command -v nix &> /dev/null +# You can set the DISABLE_NIX environment variable if you're in an environment +# where nix is pre-installed (e.g. gitpod) but you don't want to use it. +if command -v nix &> /dev/null && [ -z ${DISABLE_NIX+x} ] then if nix flake metadata > /dev/null; then if type nix_direnv_watch_file &> /dev/null; then diff --git a/.github/workflows/driver-adapter-smoke-tests.yml b/.github/workflows/driver-adapter-smoke-tests.yml new file mode 100644 index 00000000000..e3a233339b1 --- /dev/null +++ b/.github/workflows/driver-adapter-smoke-tests.yml @@ -0,0 +1,78 @@ +name: Driver Adapters, Smoke Tests +on: + push: + branches: + - main + pull_request: + paths-ignore: + - '.buildkite/**' + - '*.md' + - 'LICENSE' + - 'CODEOWNERS' + - 'renovate.json' + +jobs: + driver-adapter-smoke-tests: + name: Smoke tests for adapter ${{ matrix.adapter }} + + strategy: + fail-fast: false + matrix: + adapter: ["neon:ws", "neon:http", planetscale, pg] + + runs-on: ubuntu-latest + + services: + postgres: + image: postgres + env: + POSTGRES_PASSWORD: postgres + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 5432:5432 + + env: + JS_NEON_DATABASE_URL: ${{ secrets.JS_NEON_DATABASE_URL }} + JS_PLANETSCALE_DATABASE_URL: ${{ secrets.JS_PLANETSCALE_DATABASE_URL }} + JS_PG_DATABASE_URL: postgres://postgres:postgres@localhost:5432/test # ${{ secrets.JS_PG_DATABASE_URL }} + + steps: + - uses: actions/checkout@v4 + + - uses: dtolnay/rust-toolchain@stable + + - uses: pnpm/action-setup@v2 + with: + version: 8 + - uses: actions/setup-node@v3 + with: + node-version: 18 + #cache: 'pnpm' + + - name: Compile Query Engine + run: cargo build -p query-engine-node-api + + - name: Install Dependencies (Driver Adapters) + run: pnpm install + working-directory: ./query-engine/driver-adapters/js + - name: Build Driver Adapters + run: pnpm build + working-directory: ./query-engine/driver-adapters/js + + - run: pnpm prisma:${{ matrix.adapter }} + working-directory: ./query-engine/driver-adapters/js/smoke-test-js + - run: pnpm ${{ matrix.adapter }}:libquery + working-directory: ./query-engine/driver-adapters/js/smoke-test-js + - name: pnpm ${{ matrix.adapter }}:client (using @prisma/client - including engine! - from Npm) + run: pnpm ${{ matrix.adapter }}:client + if: always() + working-directory: ./query-engine/driver-adapters/js/smoke-test-js + + - name: pnpm errors + run: pnpm errors + if: always() + working-directory: ./query-engine/driver-adapters/js/smoke-test-js \ No newline at end of file diff --git a/.github/workflows/quaint.yml b/.github/workflows/quaint.yml index 264ee975303..c20d0612727 100644 --- a/.github/workflows/quaint.yml +++ b/.github/workflows/quaint.yml @@ -17,13 +17,13 @@ jobs: features: - "--lib --features=all" - "--lib --no-default-features --features=sqlite" - - "--lib --no-default-features --features=sqlite --features=chrono --features=json --features=gis --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=sqlite --features=gis --features=uuid --features=pooled --features=serde-support --features=bigdecimal" - "--lib --no-default-features --features=postgresql" - - "--lib --no-default-features --features=postgresql --features=chrono --features=json --features=gis --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=postgresql --features=gis --features=uuid --features=pooled --features=serde-support --features=bigdecimal" - "--lib --no-default-features --features=mysql" - - "--lib --no-default-features --features=mysql --features=chrono --features=json --features=gis --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=mysql --features=gis --features=uuid --features=pooled --features=serde-support --features=bigdecimal" - "--lib --no-default-features --features=mssql" - - "--lib --no-default-features --features=mssql --features=chrono --features=json --features=gis --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=mssql --features=gis --features=uuid --features=pooled --features=serde-support --features=bigdecimal" - "--doc --features=all" env: TEST_MYSQL: "mysql://root:prisma@localhost:3306/prisma" diff --git a/.github/workflows/query-engine-driver-adapters.yml b/.github/workflows/query-engine-driver-adapters.yml new file mode 100644 index 00000000000..dda32259ff8 --- /dev/null +++ b/.github/workflows/query-engine-driver-adapters.yml @@ -0,0 +1,84 @@ +name: Driver Adapters +on: + push: + branches: + - main + pull_request: + paths-ignore: + - '.buildkite/**' + - '*.md' + - 'LICENSE' + - 'CODEOWNERS' + - 'renovate.json' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + rust-query-engine-tests: + name: "Test `${{ matrix.adapter.name }}` on node v${{ matrix.node_version }}" + + strategy: + fail-fast: false + matrix: + adapter: + - name: "pg" + setup_task: "dev-pg-postgres13" + - name: "neon:ws" + setup_task: "dev-neon-ws-postgres13" + node_version: ["18"] + env: + LOG_LEVEL: "info" # Set to "debug" to trace the query engine and node process running the driver adapter + LOG_QUERIES: "y" + RUST_LOG: "info" + RUST_LOG_FORMAT: "devel" + RUST_BACKTRACE: "1" + CLICOLOR_FORCE: "1" + CLOSED_TX_CLEANUP: "2" + SIMPLE_TEST_MODE: "1" + QUERY_BATCH_SIZE: "10" + WORKSPACE_ROOT: ${{ github.workspace }} + + runs-on: buildjet-16vcpu-ubuntu-2004 + steps: + - uses: actions/checkout@v4 + + - name: "Setup Node.js" + uses: actions/setup-node@v3 + with: + node-version: ${{ matrix.node_version }} + + - name: "Setup pnpm" + uses: pnpm/action-setup@v2 + with: + version: 8 + + - name: "Get pnpm store directory" + shell: bash + run: | + echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV + + - uses: actions/cache@v3 + name: "Setup pnpm cache" + with: + path: ${{ env.STORE_PATH }} + key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} + restore-keys: | + ${{ runner.os }}-pnpm-store- + + - name: "Login to Docker Hub" + uses: docker/login-action@v2 + continue-on-error: true + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - run: make ${{ matrix.adapter.setup_task }} + + - uses: dtolnay/rust-toolchain@stable + + - name: "Run tests" + run: cargo test --package query-engine-tests -- --test-threads=1 + + diff --git a/Cargo.lock b/Cargo.lock index a9bc754e0f2..bde812b35e0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -332,6 +332,7 @@ dependencies = [ "enumflags2", "indoc", "insta", + "query-engine-metrics", "query-engine-tests", "query-tests-setup", "reqwest", @@ -3218,7 +3219,7 @@ dependencies = [ [[package]] name = "postgres-native-tls" version = "0.5.0" -source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#429e76047f28e64761ad63bc6cc9335c3d3337b5" +source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#a1a2dc6d9584deaf70a14293c428e7b6ca614d98" dependencies = [ "native-tls", "tokio", @@ -3229,7 +3230,7 @@ dependencies = [ [[package]] name = "postgres-protocol" version = "0.6.4" -source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#429e76047f28e64761ad63bc6cc9335c3d3337b5" +source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#a1a2dc6d9584deaf70a14293c428e7b6ca614d98" dependencies = [ "base64 0.13.1", "byteorder", @@ -3246,7 +3247,7 @@ dependencies = [ [[package]] name = "postgres-types" version = "0.2.4" -source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#429e76047f28e64761ad63bc6cc9335c3d3337b5" +source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#a1a2dc6d9584deaf70a14293c428e7b6ca614d98" dependencies = [ "bit-vec", "bytes", @@ -3758,6 +3759,7 @@ dependencies = [ "indexmap 1.9.3", "indoc", "itertools", + "jsonrpc-core", "nom", "once_cell", "parse-hyperlinks", @@ -3772,6 +3774,7 @@ dependencies = [ "request-handlers", "serde", "serde_json", + "sql-query-connector", "strip-ansi-escapes", "thiserror", "tokio", @@ -5238,7 +5241,7 @@ dependencies = [ [[package]] name = "tokio-postgres" version = "0.7.7" -source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#429e76047f28e64761ad63bc6cc9335c3d3337b5" +source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#a1a2dc6d9584deaf70a14293c428e7b6ca614d98" dependencies = [ "async-trait", "byteorder", @@ -5576,7 +5579,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675" dependencies = [ "cfg-if", - "rand 0.8.5", + "rand 0.7.3", "static_assertions", ] @@ -5810,9 +5813,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.84" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31f8dcbc21f30d9b8f2ea926ecb58f6b91192c17e9d33594b3df58b2007ca53b" +checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" dependencies = [ "cfg-if", "wasm-bindgen-macro", @@ -5820,16 +5823,16 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.84" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95ce90fd5bcc06af55a641a86428ee4229e44e07033963a2290a8e241607ccb9" +checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.28", "wasm-bindgen-shared", ] @@ -5847,9 +5850,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.84" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c21f77c0bedc37fd5dc21f897894a5ca01e7bb159884559461862ae90c0b4c5" +checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -5857,22 +5860,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.84" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2aff81306fcac3c7515ad4e177f521b5c9a15f2b08f4e32d823066102f35a5f6" +checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.28", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.84" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0046fef7e28c3804e5e38bfa31ea2a0f73905319b677e57ebe37e49358989b5d" +checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" [[package]] name = "wasm-logger" diff --git a/Cargo.toml b/Cargo.toml index ba36373f184..1934e957f35 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -56,10 +56,8 @@ napi-derive = "2.12.4" path = "quaint" features = [ "bigdecimal", - "chrono", "expose-drivers", "fmt-sql", - "json", "mssql", "mysql", "pooled", diff --git a/Makefile b/Makefile index 319bb90b30b..b48d65e01b8 100644 --- a/Makefile +++ b/Makefile @@ -107,6 +107,17 @@ start-postgres13: dev-postgres13: start-postgres13 cp $(CONFIG_PATH)/postgres13 $(CONFIG_FILE) +start-pg-postgres13: build-qe-napi build-connector-kit-js start-postgres13 + +dev-pg-postgres13: start-pg-postgres13 + cp $(CONFIG_PATH)/pg-postgres13 $(CONFIG_FILE) + +start-neon-postgres13: build-qe-napi build-connector-kit-js + docker compose -f docker-compose.yml up -d --remove-orphans neon-postgres13 + +dev-neon-ws-postgres13: start-neon-postgres13 + cp $(CONFIG_PATH)/neon-ws-postgres13 $(CONFIG_FILE) + start-postgres14: docker compose -f docker-compose.yml up -d --remove-orphans postgres14 @@ -245,6 +256,12 @@ dev-vitess_8_0: start-vitess_8_0 # Local dev commands # ###################### +build-qe-napi: + cargo build --package query-engine-node-api + +build-connector-kit-js: + cd query-engine/driver-adapters/js && pnpm i && pnpm build + # Quick schema validation of whatever you have in the dev_datamodel.prisma file. validate: cargo run --bin test-cli -- validate-datamodel dev_datamodel.prisma diff --git a/docker-compose.yml b/docker-compose.yml index a434d601e5a..51192305c2f 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -106,6 +106,21 @@ services: networks: - databases + neon-postgres13: + image: ghcr.io/neondatabase/wsproxy:latest + environment: + # the port of the postgres13 within the databases network + APPEND_PORT: 'postgres13:5432' + ALLOW_ADDR_REGEX: '.*' + LOG_TRAFFIC: 'true' + LOG_CONN_INFO: 'true' + ports: + - '5488:80' + depends_on: + - postgres13 + networks: + - databases + postgres14: image: postgres:14 restart: always diff --git a/flake.lock b/flake.lock index 725613da576..6f11c280dd3 100644 --- a/flake.lock +++ b/flake.lock @@ -14,11 +14,11 @@ ] }, "locked": { - "lastModified": 1693163878, - "narHash": "sha256-HXuyMUVaRSoIA602jfFuYGXt6AMZ+WUxuvLq8iJmYTA=", + "lastModified": 1693787605, + "narHash": "sha256-rwq5U8dy+a9JFny/73L0SJu1GfWwATMPMTp7D+mjHy8=", "owner": "ipetkov", "repo": "crane", - "rev": "43db881168bc65b568d36ceb614a0fc8b276191b", + "rev": "8b4f7a4dab2120cf41e7957a28a853f45016bd9d", "type": "github" }, "original": { @@ -50,11 +50,11 @@ ] }, "locked": { - "lastModified": 1688466019, - "narHash": "sha256-VeM2akYrBYMsb4W/MmBo1zmaMfgbL4cH3Pu8PGyIwJ0=", + "lastModified": 1693611461, + "narHash": "sha256-aPODl8vAgGQ0ZYFIRisxYG5MOGSkIczvu2Cd8Gb9+1Y=", "owner": "hercules-ci", "repo": "flake-parts", - "rev": "8e8d955c22df93dbe24f19ea04f47a74adbdc5ec", + "rev": "7f53fdb7bdc5bb237da7fefef12d099e4fd611ca", "type": "github" }, "original": { @@ -70,11 +70,11 @@ ] }, "locked": { - "lastModified": 1689068808, - "narHash": "sha256-6ixXo3wt24N/melDWjq70UuHQLxGV8jZvooRanIHXw0=", + "lastModified": 1692799911, + "narHash": "sha256-3eihraek4qL744EvQXsK1Ha6C3CR7nnT8X2qWap4RNk=", "owner": "numtide", "repo": "flake-utils", - "rev": "919d646de7be200f3bf08cb76ae1f09402b6f9b4", + "rev": "f9e7cf818399d17d347f847525c5a5a8032e4e44", "type": "github" }, "original": { @@ -90,11 +90,11 @@ ] }, "locked": { - "lastModified": 1660459072, - "narHash": "sha256-8DFJjXG8zqoONA1vXtgeKXy68KdJL5UaXR8NtVMUbx8=", + "lastModified": 1694102001, + "narHash": "sha256-vky6VPK1n1od6vXbqzOXnekrQpTL4hbPAwUhT5J9c9E=", "owner": "hercules-ci", "repo": "gitignore.nix", - "rev": "a20de23b925fd8264fd7fad6454652e142fd7f73", + "rev": "9e21c80adf67ebcb077d75bd5e7d724d21eeafd6", "type": "github" }, "original": { @@ -105,11 +105,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1689192006, - "narHash": "sha256-QM0f0d8oPphOTYJebsHioR9+FzJcy1QNIzREyubB91U=", + "lastModified": 1694422566, + "narHash": "sha256-lHJ+A9esOz9vln/3CJG23FV6Wd2OoOFbDeEs4cMGMqc=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "2de8efefb6ce7f5e4e75bdf57376a96555986841", + "rev": "3a2786eea085f040a66ecde1bc3ddc7099f6dbeb", "type": "github" }, "original": { @@ -139,11 +139,11 @@ ] }, "locked": { - "lastModified": 1693361441, - "narHash": "sha256-TRFdMQj9wSKMduNqe/1xF8TzcPWEdcn/hKWcVcZ5fO8=", + "lastModified": 1694484610, + "narHash": "sha256-aeSDkp7fkAqtVjW3QUn7vq7BKNlFul/BiGgdv7rK+mA=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "1fb2aa49635e9f30b6fa211ab7c454f7175e1ba3", + "rev": "c5b977a7e6a295697fa1f9c42174fd6313b38df4", "type": "github" }, "original": { diff --git a/nix/all-engines.nix b/nix/all-engines.nix index 9235060463a..0e6a1c05b23 100644 --- a/nix/all-engines.nix +++ b/nix/all-engines.nix @@ -1,4 +1,4 @@ -{ pkgs, flakeInputs, lib, self', ... }: +{ pkgs, flakeInputs, lib, self', rustToolchain, ... }: let stdenv = pkgs.clangStdenv; @@ -15,7 +15,7 @@ let src = srcPath; name = "prisma-engines-source"; }; - craneLib = flakeInputs.crane.mkLib pkgs; + craneLib = (flakeInputs.crane.mkLib pkgs).overrideToolchain rustToolchain.default; deps = craneLib.vendorCargoDeps { inherit src; }; libSuffix = stdenv.hostPlatform.extensions.sharedLibrary; in @@ -34,6 +34,7 @@ in ] ++ lib.optionals stdenv.isDarwin [ perl # required to build openssl darwin.apple_sdk.frameworks.Security + iconv ]; configurePhase = '' @@ -53,13 +54,15 @@ in cp target/release/prisma-fmt $out/bin/ cp target/release/libquery_engine${libSuffix} $out/lib/libquery_engine.node ''; + + dontStrip = true; }; packages.test-cli = lib.makeOverridable ({ profile }: stdenv.mkDerivation { name = "test-cli"; inherit src; - inherit (self'.packages.prisma-engines) buildInputs nativeBuildInputs configurePhase; + inherit (self'.packages.prisma-engines) buildInputs nativeBuildInputs configurePhase dontStrip; buildPhase = "cargo build --profile=${profile} --bin=test-cli"; @@ -76,7 +79,7 @@ in ({ profile }: stdenv.mkDerivation { name = "query-engine-bin"; inherit src; - inherit (self'.packages.prisma-engines) buildInputs nativeBuildInputs configurePhase; + inherit (self'.packages.prisma-engines) buildInputs nativeBuildInputs configurePhase dontStrip; buildPhase = "cargo build --profile=${profile} --bin=query-engine"; @@ -96,7 +99,7 @@ in ({ profile }: stdenv.mkDerivation { name = "query-engine-bin-and-lib"; inherit src; - inherit (self'.packages.prisma-engines) buildInputs nativeBuildInputs configurePhase; + inherit (self'.packages.prisma-engines) buildInputs nativeBuildInputs configurePhase dontStrip; buildPhase = '' cargo build --profile=${profile} --bin=query-engine diff --git a/nix/args.nix b/nix/args.nix index d3a2e54dbc8..2254b7f5b13 100644 --- a/nix/args.nix +++ b/nix/args.nix @@ -4,10 +4,10 @@ let overlays = [ flakeInputs.rust-overlay.overlays.default - (self: super: - let toolchain = super.rust-bin.stable.latest; in - { cargo = toolchain.minimal; rustc = toolchain.minimal; rustToolchain = toolchain; }) ]; - in - { pkgs = import flakeInputs.nixpkgs { inherit system overlays; }; }; + in rec + { + pkgs = import flakeInputs.nixpkgs { inherit system overlays; }; + rustToolchain = pkgs.rust-bin.stable.latest; + }; } diff --git a/nix/shell.nix b/nix/shell.nix index c30ca9080d4..94661c972d0 100644 --- a/nix/shell.nix +++ b/nix/shell.nix @@ -1,7 +1,8 @@ -{ self', pkgs, ... }: +{ self', pkgs, rustToolchain, ... }: let - devToolchain = pkgs.rustToolchain.default.override { extensions = [ "rust-analyzer" "rust-src" ]; }; + devToolchain = rustToolchain.default.override { extensions = [ "rust-analyzer" "rust-src" ]; }; + nodejs = pkgs.nodejs_latest; in { devShells.default = pkgs.mkShell { @@ -9,9 +10,9 @@ in devToolchain pkgs.llvmPackages_latest.bintools - pkgs.nodejs - pkgs.nodePackages.typescript-language-server - pkgs.nodePackages.pnpm + nodejs + nodejs.pkgs.typescript-language-server + nodejs.pkgs.pnpm ]; inputsFrom = [ self'.packages.prisma-engines ]; shellHook = pkgs.lib.optionalString pkgs.stdenv.isLinux diff --git a/prisma-fmt/src/code_actions.rs b/prisma-fmt/src/code_actions.rs index b9dbdc58067..1037192b1a9 100644 --- a/prisma-fmt/src/code_actions.rs +++ b/prisma-fmt/src/code_actions.rs @@ -99,6 +99,16 @@ pub(crate) fn available_actions(schema: String, params: CodeActionParams) -> Vec complete_relation.referencing_field(), ); } + + if validated_schema.relation_mode().uses_foreign_keys() { + relation_mode::replace_set_default_mysql( + &mut actions, + ¶ms, + validated_schema.db.source(), + complete_relation, + config, + ) + } } } diff --git a/prisma-fmt/src/code_actions/relation_mode.rs b/prisma-fmt/src/code_actions/relation_mode.rs index 28d9018220e..751fb956073 100644 --- a/prisma-fmt/src/code_actions/relation_mode.rs +++ b/prisma-fmt/src/code_actions/relation_mode.rs @@ -1,5 +1,5 @@ use lsp_types::{CodeAction, CodeActionKind, CodeActionOrCommand}; -use psl::schema_ast::ast::SourceConfig; +use psl::{parser_database::walkers::CompleteInlineRelationWalker, schema_ast::ast::SourceConfig, Configuration}; pub(crate) fn edit_referential_integrity( actions: &mut Vec, @@ -35,3 +35,51 @@ pub(crate) fn edit_referential_integrity( actions.push(CodeActionOrCommand::CodeAction(action)) } + +pub(crate) fn replace_set_default_mysql( + actions: &mut Vec, + params: &lsp_types::CodeActionParams, + schema: &str, + relation: CompleteInlineRelationWalker<'_>, + config: &Configuration, +) { + let datasource = match config.datasources.first() { + Some(ds) => ds, + None => return, + }; + + if datasource.active_connector.provider_name() != "mysql" { + return; + } + + let span = match relation.on_update_span() { + Some(span) => span, + None => return, + }; + + let span_diagnostics = match super::diagnostics_for_span(schema, ¶ms.context.diagnostics, span) { + Some(sd) => sd, + None => return, + }; + + let diagnostics = match + super::filter_diagnostics( + span_diagnostics, + "MySQL does not actually support the `SetDefault` referential action, so using it may result in unexpected errors.") { + Some(value) => value, + None => return, + }; + + let edit = super::create_text_edit(schema, "NoAction".to_owned(), false, span, params); + + let action = CodeAction { + title: r#"Replace SetDefault with NoAction"#.to_owned(), + + kind: Some(CodeActionKind::QUICKFIX), + edit: Some(edit), + diagnostics: Some(diagnostics), + ..Default::default() + }; + + actions.push(CodeActionOrCommand::CodeAction(action)) +} diff --git a/prisma-fmt/tests/code_actions/scenarios/relation_mode_mysql_foreign_keys_set_default/result.json b/prisma-fmt/tests/code_actions/scenarios/relation_mode_mysql_foreign_keys_set_default/result.json new file mode 100644 index 00000000000..d31f54355c3 --- /dev/null +++ b/prisma-fmt/tests/code_actions/scenarios/relation_mode_mysql_foreign_keys_set_default/result.json @@ -0,0 +1,41 @@ +[ + { + "title": "Replace SetDefault with NoAction", + "kind": "quickfix", + "diagnostics": [ + { + "range": { + "start": { + "line": 14, + "character": 62 + }, + "end": { + "line": 14, + "character": 82 + } + }, + "severity": 2, + "message": "MySQL does not actually support the `SetDefault` referential action, so using it may result in unexpected errors. Read more at https://pris.ly/d/mysql-set-default " + } + ], + "edit": { + "changes": { + "file:///path/to/schema.prisma": [ + { + "range": { + "start": { + "line": 14, + "character": 72 + }, + "end": { + "line": 14, + "character": 82 + } + }, + "newText": "NoAction" + } + ] + } + } + } +] \ No newline at end of file diff --git a/prisma-fmt/tests/code_actions/scenarios/relation_mode_mysql_foreign_keys_set_default/schema.prisma b/prisma-fmt/tests/code_actions/scenarios/relation_mode_mysql_foreign_keys_set_default/schema.prisma new file mode 100644 index 00000000000..b1395255300 --- /dev/null +++ b/prisma-fmt/tests/code_actions/scenarios/relation_mode_mysql_foreign_keys_set_default/schema.prisma @@ -0,0 +1,29 @@ +generator client { + provider = "prisma-client-js" +} + +datasource db { + provider = "mysql" + url = env("DATABASE_URL") + relationMode = "foreignKeys" +} + +/// multi line +/// commennttt +model Foo { + id Int @id + bar Bar @relation(fields: [bar_id], references: [id], onUpdate: SetDefault) + bar_id Int @unique + t Test +} + +model Bar { + id Int @id + foo Foo? +} + +// This is a test enum. +enum Test { + TestUno + TestDue +} diff --git a/prisma-fmt/tests/code_actions/tests.rs b/prisma-fmt/tests/code_actions/tests.rs index 41035ed65d9..e76179204d9 100644 --- a/prisma-fmt/tests/code_actions/tests.rs +++ b/prisma-fmt/tests/code_actions/tests.rs @@ -25,6 +25,7 @@ scenarios! { one_to_one_referencing_side_misses_unique_compound_field_indentation_four_spaces relation_mode_prisma_missing_index relation_mode_referential_integrity + relation_mode_mysql_foreign_keys_set_default multi_schema_one_model multi_schema_one_model_one_enum multi_schema_two_models diff --git a/prisma-schema-wasm/Cargo.toml b/prisma-schema-wasm/Cargo.toml index 6387aeedfbb..248c726c9ba 100644 --- a/prisma-schema-wasm/Cargo.toml +++ b/prisma-schema-wasm/Cargo.toml @@ -7,6 +7,6 @@ edition = "2021" crate-type = ["cdylib"] [dependencies] -wasm-bindgen = "=0.2.84" +wasm-bindgen = "=0.2.87" wasm-logger = { version = "0.2.0", optional = true } prisma-fmt = { path = "../prisma-fmt" } diff --git a/psl/parser-database/src/context.rs b/psl/parser-database/src/context.rs index 54c06ddd9a4..45014695302 100644 --- a/psl/parser-database/src/context.rs +++ b/psl/parser-database/src/context.rs @@ -117,7 +117,7 @@ impl<'db> Context<'db> { /// - When you are done validating an attribute set, you must call /// `validate_visited_attributes()`. Otherwise, Context will helpfully panic. pub(super) fn visit_attributes(&mut self, ast_attributes: ast::AttributeContainer) { - if !self.attributes.attributes.is_empty() || !self.attributes.unused_attributes.is_empty() { + if self.attributes.attributes.is_some() || !self.attributes.unused_attributes.is_empty() { panic!( "`ctx.visit_attributes() called with {:?} while the Context is still validating previous attribute set on {:?}`", ast_attributes, @@ -125,9 +125,7 @@ impl<'db> Context<'db> { ); } - self.attributes.attributes.clear(); - self.attributes.unused_attributes.clear(); - self.attributes.extend_attributes(ast_attributes, self.ast); + self.attributes.set_attributes(ast_attributes, self.ast); } /// Look for an optional attribute with a name of the form @@ -139,8 +137,8 @@ impl<'db> Context<'db> { /// with a default that can be first, but with native types, arguments are /// purely positional. pub(crate) fn visit_datasource_scoped(&mut self) -> Option<(StringId, StringId, ast::AttributeId)> { - let attrs = - iter_attributes(&self.attributes.attributes, self.ast).filter(|(_, attr)| attr.name.name.contains('.')); + let attrs = iter_attributes(self.attributes.attributes.as_ref(), self.ast) + .filter(|(_, attr)| attr.name.name.contains('.')); let mut native_type_attr = None; let diagnostics = &mut self.diagnostics; @@ -173,7 +171,8 @@ impl<'db> Context<'db> { /// is defined. #[must_use] pub(crate) fn visit_optional_single_attr(&mut self, name: &'static str) -> bool { - let mut attrs = iter_attributes(&self.attributes.attributes, self.ast).filter(|(_, a)| a.name.name == name); + let mut attrs = + iter_attributes(self.attributes.attributes.as_ref(), self.ast).filter(|(_, a)| a.name.name == name); let (first_idx, first) = match attrs.next() { Some(first) => first, None => return false, @@ -182,7 +181,7 @@ impl<'db> Context<'db> { if attrs.next().is_some() { for (idx, attr) in - iter_attributes(&self.attributes.attributes, self.ast).filter(|(_, a)| a.name.name == name) + iter_attributes(self.attributes.attributes.as_ref(), self.ast).filter(|(_, a)| a.name.name == name) { diagnostics.push_error(DatamodelError::new_duplicate_attribute_error( &attr.name.name, @@ -206,7 +205,7 @@ impl<'db> Context<'db> { let mut has_valid_attribute = false; while !has_valid_attribute { - let first_attr = iter_attributes(&self.attributes.attributes, self.ast) + let first_attr = iter_attributes(self.attributes.attributes.as_ref(), self.ast) .filter(|(_, attr)| attr.name.name == name) .find(|(attr_id, _)| self.attributes.unused_attributes.contains(attr_id)); let (attr_id, attr) = if let Some(first_attr) = first_attr { @@ -297,7 +296,8 @@ impl<'db> Context<'db> { attribute.span, )) } - self.attributes.attributes.clear(); + + self.attributes.attributes = None; self.attributes.unused_attributes.clear(); } @@ -430,11 +430,11 @@ impl<'db> Context<'db> { // Implementation detail. Used for arguments validation. fn iter_attributes<'a, 'ast: 'a>( - attrs: &'a [ast::AttributeContainer], + attrs: Option<&'a ast::AttributeContainer>, ast: &'ast ast::SchemaAst, ) -> impl Iterator + 'a { attrs - .iter() + .into_iter() .flat_map(move |container| ast[*container].iter().enumerate().map(|a| (a, *container))) .map(|((idx, attr), container)| (ast::AttributeId::new_in_container(container, idx), attr)) } diff --git a/psl/parser-database/src/context/attributes.rs b/psl/parser-database/src/context/attributes.rs index 39655decf8b..9f35f5cc364 100644 --- a/psl/parser-database/src/context/attributes.rs +++ b/psl/parser-database/src/context/attributes.rs @@ -4,7 +4,7 @@ use crate::interner::StringId; #[derive(Default, Debug)] pub(super) struct AttributesValidationState { /// The attributes list being validated. - pub(super) attributes: Vec, + pub(super) attributes: Option, pub(super) unused_attributes: HashSet, // the _remaining_ attributes /// The attribute being validated. @@ -13,10 +13,11 @@ pub(super) struct AttributesValidationState { } impl AttributesValidationState { - pub(super) fn extend_attributes(&mut self, attributes: ast::AttributeContainer, ast: &ast::SchemaAst) { + pub(super) fn set_attributes(&mut self, attributes: ast::AttributeContainer, ast: &ast::SchemaAst) { let attribute_ids = (0..ast[attributes].len()).map(|idx| ast::AttributeId::new_in_container(attributes, idx)); + self.unused_attributes.clear(); self.unused_attributes.extend(attribute_ids); - self.attributes.push(attributes); + self.attributes = Some(attributes); } } diff --git a/psl/parser-database/src/walkers/relation/inline/complete.rs b/psl/parser-database/src/walkers/relation/inline/complete.rs index 1c5536e948a..3f7b1b67dc6 100644 --- a/psl/parser-database/src/walkers/relation/inline/complete.rs +++ b/psl/parser-database/src/walkers/relation/inline/complete.rs @@ -2,6 +2,7 @@ use crate::{ walkers::{ModelWalker, RelationFieldId, RelationFieldWalker, ScalarFieldWalker}, ParserDatabase, ReferentialAction, }; +use diagnostics::Span; use schema_ast::ast; /// Represents a relation that has fields and references defined in one of the @@ -65,6 +66,10 @@ impl<'db> CompleteInlineRelationWalker<'db> { .unwrap_or(Cascade) } + pub fn on_update_span(self) -> Option { + self.referencing_field().attributes().on_update.map(|(_, span)| span) + } + /// Prisma allows setting the relation field as optional, even if one of the /// underlying scalar fields is required. For the purpose of referential /// actions, we count the relation field required if any of the underlying diff --git a/quaint/.github/workflows/test.yml b/quaint/.github/workflows/test.yml index b860d499a2c..60988f6e898 100644 --- a/quaint/.github/workflows/test.yml +++ b/quaint/.github/workflows/test.yml @@ -46,13 +46,13 @@ jobs: features: - "--lib --features=all" - "--lib --no-default-features --features=sqlite" - - "--lib --no-default-features --features=sqlite --features=chrono --features=json --features=gis --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=sqlite --features=gis --features=uuid --features=pooled --features=serde-support --features=bigdecimal" - "--lib --no-default-features --features=postgresql" - - "--lib --no-default-features --features=postgresql --features=chrono --features=json --features=gis --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=postgresql --features=gis --features=uuid --features=pooled --features=serde-support --features=bigdecimal" - "--lib --no-default-features --features=mysql" - - "--lib --no-default-features --features=mysql --features=chrono --features=json --features=gis --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=mysql --features=gis --features=uuid --features=pooled --features=serde-support --features=bigdecimal" - "--lib --no-default-features --features=mssql" - - "--lib --no-default-features --features=mssql --features=chrono --features=json --features=gis --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=mssql --features=gis --features=uuid --features=pooled --features=serde-support --features=bigdecimal" - "--doc --features=all" env: TEST_MYSQL: "mysql://root:prisma@localhost:3306/prisma" diff --git a/quaint/Cargo.toml b/quaint/Cargo.toml index f7bba93ba96..babd47800a1 100644 --- a/quaint/Cargo.toml +++ b/quaint/Cargo.toml @@ -30,8 +30,6 @@ docs = [] expose-drivers = [] all = [ - "chrono", - "json", "gis", "mssql", "mysql", @@ -61,11 +59,10 @@ postgresql = [ ] gis = ["geozero", "regex", "once_cell"] -json = ["serde_json", "base64"] -mssql = ["tiberius", "uuid", "chrono", "tokio-util", "tokio/time", "tokio/net", "either"] +mssql = ["tiberius", "uuid", "tokio-util", "tokio/time", "tokio/net", "either"] mysql = ["mysql_async", "tokio/time", "lru-cache"] pooled = ["mobc"] -serde-support = ["serde", "chrono/serde"] +serde-support = ["serde"] sqlite = ["rusqlite", "tokio/sync"] bigdecimal = ["bigdecimal_"] fmt-sql = ["sqlformat"] @@ -84,10 +81,10 @@ url = "2.1" hex = "0.4" either = { version = "1.6", optional = true } -base64 = { version = "0.12.3", optional = true } -chrono = { version = "0.4", optional = true, default-features = false } +base64 = { version = "0.12.3" } +chrono = { version = "0.4", default-features = false, features = ["serde"] } lru-cache = { version = "0.1", optional = true } -serde_json = { version = "1.0.48", optional = true, features = ["float_roundtrip"] } +serde_json = { version = "1.0.48", features = ["float_roundtrip"] } native-tls = { version = "0.2", optional = true } bit-vec = { version = "0.6.1", optional = true } mobc = { version = "0.8", optional = true } diff --git a/quaint/src/ast/compare.rs b/quaint/src/ast/compare.rs index d955c889624..c437e83e291 100644 --- a/quaint/src/ast/compare.rs +++ b/quaint/src/ast/compare.rs @@ -37,7 +37,7 @@ pub enum Compare<'a> { /// without visitor transformation in between. Raw(Box>, Cow<'a, str>, Box>), /// All json related comparators - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] JsonCompare(JsonCompare<'a>), /// All geometry related comparators #[cfg(feature = "gis")] @@ -624,7 +624,7 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_contains(self, item: T) -> Compare<'a> where T: Into>; @@ -644,7 +644,7 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_contains(self, item: T) -> Compare<'a> where T: Into>; @@ -674,7 +674,7 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_begins_with(self, item: T) -> Compare<'a> where T: Into>; @@ -704,7 +704,7 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_begins_with(self, item: T) -> Compare<'a> where T: Into>; @@ -732,7 +732,7 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_ends_into(self, item: T) -> Compare<'a> where T: Into>; @@ -760,7 +760,7 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_ends_into(self, item: T) -> Compare<'a> where T: Into>; @@ -779,7 +779,7 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_type_equals(self, json_type: T) -> Compare<'a> where T: Into>; @@ -798,7 +798,7 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_type_not_equals(self, json_type: T) -> Compare<'a> where T: Into>; @@ -1233,7 +1233,7 @@ where left.compare_raw(raw_comparator.into(), right) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_contains(self, item: T) -> Compare<'a> where T: Into>, @@ -1244,7 +1244,7 @@ where val.json_array_contains(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_contains(self, item: T) -> Compare<'a> where T: Into>, @@ -1255,7 +1255,7 @@ where val.json_array_not_contains(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_begins_with(self, item: T) -> Compare<'a> where T: Into>, @@ -1266,7 +1266,7 @@ where val.json_array_begins_with(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_begins_with(self, item: T) -> Compare<'a> where T: Into>, @@ -1277,7 +1277,7 @@ where val.json_array_not_begins_with(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_ends_into(self, item: T) -> Compare<'a> where T: Into>, @@ -1288,7 +1288,7 @@ where val.json_array_ends_into(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_ends_into(self, item: T) -> Compare<'a> where T: Into>, @@ -1299,7 +1299,7 @@ where val.json_array_not_ends_into(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_type_equals(self, json_type: T) -> Compare<'a> where T: Into>, @@ -1310,7 +1310,7 @@ where val.json_type_equals(json_type) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_type_not_equals(self, json_type: T) -> Compare<'a> where T: Into>, diff --git a/quaint/src/ast/expression.rs b/quaint/src/ast/expression.rs index a99e87d5d1b..77c76b1a26a 100644 --- a/quaint/src/ast/expression.rs +++ b/quaint/src/ast/expression.rs @@ -1,6 +1,6 @@ #[cfg(feature = "gis")] use super::compare::{GeometryCompare, GeometryType}; -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] use super::compare::{JsonCompare, JsonType}; use crate::ast::*; use query::SelectQuery; @@ -45,38 +45,34 @@ impl<'a> Expression<'a> { } } - #[cfg(feature = "json")] pub(crate) fn is_json_expr(&self) -> bool { match &self.kind { - #[cfg(feature = "json")] ExpressionKind::Parameterized(Value::Json(_)) => true, - #[cfg(feature = "json")] + ExpressionKind::Value(expr) => expr.is_json_value(), - #[cfg(feature = "json")] + ExpressionKind::Function(fun) => fun.returns_json(), _ => false, } } #[allow(dead_code)] - #[cfg(feature = "json")] + pub(crate) fn is_json_value(&self) -> bool { match &self.kind { - #[cfg(feature = "json")] ExpressionKind::Parameterized(Value::Json(_)) => true, - #[cfg(feature = "json")] + ExpressionKind::Value(expr) => expr.is_json_value(), _ => false, } } #[allow(dead_code)] - #[cfg(feature = "json")] + pub(crate) fn into_json_value(self) -> Option { match self.kind { - #[cfg(feature = "json")] ExpressionKind::Parameterized(Value::Json(json_val)) => json_val, - #[cfg(feature = "json")] + ExpressionKind::Value(expr) => expr.into_json_value(), _ => None, } @@ -448,7 +444,7 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::Raw(Box::new(self), raw_comparator.into(), Box::new(right.into())) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_contains(self, item: T) -> Compare<'a> where T: Into>, @@ -456,7 +452,7 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::JsonCompare(JsonCompare::ArrayContains(Box::new(self), Box::new(item.into()))) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_contains(self, item: T) -> Compare<'a> where T: Into>, @@ -464,7 +460,7 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::JsonCompare(JsonCompare::ArrayNotContains(Box::new(self), Box::new(item.into()))) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_begins_with(self, item: T) -> Compare<'a> where T: Into>, @@ -474,7 +470,7 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::Equals(Box::new(array_starts_with), Box::new(item.into())) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_begins_with(self, item: T) -> Compare<'a> where T: Into>, @@ -484,7 +480,7 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::NotEquals(Box::new(array_starts_with), Box::new(item.into())) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_ends_into(self, item: T) -> Compare<'a> where T: Into>, @@ -494,7 +490,7 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::Equals(Box::new(array_ends_into), Box::new(item.into())) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_ends_into(self, item: T) -> Compare<'a> where T: Into>, @@ -504,7 +500,7 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::NotEquals(Box::new(array_ends_into), Box::new(item.into())) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_type_equals(self, json_type: T) -> Compare<'a> where T: Into>, @@ -512,7 +508,7 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::JsonCompare(JsonCompare::TypeEquals(Box::new(self), json_type.into())) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_type_not_equals(self, json_type: T) -> Compare<'a> where T: Into>, diff --git a/quaint/src/ast/function.rs b/quaint/src/ast/function.rs index 0721a3757e6..cd45274870a 100644 --- a/quaint/src/ast/function.rs +++ b/quaint/src/ast/function.rs @@ -3,17 +3,17 @@ mod average; mod coalesce; mod concat; mod count; -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] mod json_extract; -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] mod json_extract_array; -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] mod json_unquote; mod lower; mod maximum; mod minimum; mod row_number; -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] mod row_to_json; #[cfg(any(feature = "postgresql", feature = "mysql"))] mod search; @@ -33,19 +33,19 @@ pub use average::*; pub use coalesce::*; pub use concat::*; pub use count::*; -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] pub use json_extract::*; -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] pub(crate) use json_extract_array::*; -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] pub use json_unquote::*; pub use lower::*; pub use maximum::*; pub use minimum::*; pub use row_number::*; -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] pub use row_to_json::*; -#[cfg(any(feature = "postgresql", feature = "mysql"))] +#[cfg(feature = "mysql")] pub use search::*; pub use sum::*; pub use upper::*; @@ -71,13 +71,13 @@ pub struct Function<'a> { impl<'a> Function<'a> { pub fn returns_json(&self) -> bool { match self.typ_ { - #[cfg(all(feature = "json", feature = "postgresql"))] + #[cfg(feature = "postgresql")] FunctionType::RowToJson(_) => true, - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(feature = "mysql")] FunctionType::JsonExtract(_) => true, - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] FunctionType::JsonExtractLastArrayElem(_) => true, - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] FunctionType::JsonExtractFirstArrayElem(_) => true, _ => false, } @@ -94,7 +94,7 @@ impl<'a> Function<'a> { /// A database function type #[derive(Debug, Clone, PartialEq)] pub(crate) enum FunctionType<'a> { - #[cfg(all(feature = "json", feature = "postgresql"))] + #[cfg(feature = "postgresql")] RowToJson(RowToJson<'a>), RowNumber(RowNumber<'a>), Count(Count<'a>), @@ -107,13 +107,13 @@ pub(crate) enum FunctionType<'a> { Maximum(Maximum<'a>), Coalesce(Coalesce<'a>), Concat(Concat<'a>), - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] JsonExtract(JsonExtract<'a>), - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] JsonExtractLastArrayElem(JsonExtractLastArrayElem<'a>), - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] JsonExtractFirstArrayElem(JsonExtractFirstArrayElem<'a>), - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] JsonUnquote(JsonUnquote<'a>), #[cfg(any(feature = "postgresql", feature = "mysql"))] TextSearch(TextSearch<'a>), @@ -143,19 +143,19 @@ impl<'a> Aliasable<'a> for Function<'a> { } } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] function!(RowToJson); -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] function!(JsonExtract); -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] function!(JsonExtractLastArrayElem); -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] function!(JsonExtractFirstArrayElem); -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] function!(JsonUnquote); #[cfg(any(feature = "postgresql", feature = "mysql"))] diff --git a/quaint/src/ast/function/row_to_json.rs b/quaint/src/ast/function/row_to_json.rs index 7ce8e0c98cc..1093431e741 100644 --- a/quaint/src/ast/function/row_to_json.rs +++ b/quaint/src/ast/function/row_to_json.rs @@ -3,7 +3,7 @@ use crate::ast::Table; #[derive(Debug, Clone, PartialEq)] #[cfg_attr(feature = "docs", doc(cfg(feature = "postgresql")))] -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] /// A representation of the `ROW_TO_JSON` function in the database. /// Only for `Postgresql` pub struct RowToJson<'a> { @@ -40,7 +40,7 @@ pub struct RowToJson<'a> { /// # } /// ``` #[cfg_attr(feature = "docs", doc(cfg(feature = "postgresql")))] -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] pub fn row_to_json<'a, T>(expr: T, pretty_print: bool) -> Function<'a> where T: Into>, diff --git a/quaint/src/ast/row.rs b/quaint/src/ast/row.rs index 172781da7ae..ce4a86f0bf0 100644 --- a/quaint/src/ast/row.rs +++ b/quaint/src/ast/row.rs @@ -1,6 +1,6 @@ #[cfg(feature = "gis")] use super::compare::GeometryType; -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] use super::compare::JsonType; use crate::ast::{Comparable, Compare, Expression}; use std::borrow::Cow; @@ -285,7 +285,7 @@ impl<'a> Comparable<'a> for Row<'a> { value.compare_raw(raw_comparator, right) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_contains(self, item: T) -> Compare<'a> where T: Into>, @@ -295,7 +295,7 @@ impl<'a> Comparable<'a> for Row<'a> { value.json_array_contains(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_contains(self, item: T) -> Compare<'a> where T: Into>, @@ -305,7 +305,7 @@ impl<'a> Comparable<'a> for Row<'a> { value.json_array_not_contains(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_begins_with(self, item: T) -> Compare<'a> where T: Into>, @@ -315,7 +315,7 @@ impl<'a> Comparable<'a> for Row<'a> { value.json_array_begins_with(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_begins_with(self, item: T) -> Compare<'a> where T: Into>, @@ -325,7 +325,7 @@ impl<'a> Comparable<'a> for Row<'a> { value.json_array_not_begins_with(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_ends_into(self, item: T) -> Compare<'a> where T: Into>, @@ -335,7 +335,7 @@ impl<'a> Comparable<'a> for Row<'a> { value.json_array_ends_into(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_ends_into(self, item: T) -> Compare<'a> where T: Into>, @@ -345,7 +345,7 @@ impl<'a> Comparable<'a> for Row<'a> { value.json_array_not_ends_into(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_type_equals(self, json_type: T) -> Compare<'a> where T: Into>, @@ -355,7 +355,7 @@ impl<'a> Comparable<'a> for Row<'a> { value.json_type_equals(json_type) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_type_not_equals(self, json_type: T) -> Compare<'a> where T: Into>, diff --git a/quaint/src/ast/values.rs b/quaint/src/ast/values.rs index a9fe9d02201..869a5457962 100644 --- a/quaint/src/ast/values.rs +++ b/quaint/src/ast/values.rs @@ -3,9 +3,7 @@ use crate::error::{Error, ErrorKind}; #[cfg(feature = "bigdecimal")] use bigdecimal::{BigDecimal, FromPrimitive, ToPrimitive}; -#[cfg(feature = "chrono")] use chrono::{DateTime, NaiveDate, NaiveTime, Utc}; -#[cfg(feature = "json")] use serde_json::{Number, Value as JsonValue}; use std::{ borrow::{Borrow, Cow}, @@ -112,8 +110,6 @@ pub enum Value<'a> { #[cfg_attr(feature = "docs", doc(cfg(feature = "bigdecimal")))] /// A numeric value. Numeric(Option), - #[cfg(feature = "json")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "json")))] /// A JSON value. Json(Option), #[cfg(feature = "gis")] @@ -130,16 +126,10 @@ pub enum Value<'a> { #[cfg_attr(feature = "docs", doc(cfg(feature = "uuid")))] /// An UUID value. Uuid(Option), - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] /// A datetime value. DateTime(Option>), - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] /// A date value. Date(Option), - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] /// A time value. Time(Option), } @@ -190,15 +180,11 @@ impl<'a> fmt::Display for Value<'a> { Value::Xml(val) => val.as_ref().map(|v| write!(f, "{v}")), #[cfg(feature = "bigdecimal")] Value::Numeric(val) => val.as_ref().map(|v| write!(f, "{v}")), - #[cfg(feature = "json")] Value::Json(val) => val.as_ref().map(|v| write!(f, "{v}")), #[cfg(feature = "uuid")] Value::Uuid(val) => val.map(|v| write!(f, "\"{v}\"")), - #[cfg(feature = "chrono")] Value::DateTime(val) => val.map(|v| write!(f, "\"{v}\"")), - #[cfg(feature = "chrono")] Value::Date(val) => val.map(|v| write!(f, "\"{v}\"")), - #[cfg(feature = "chrono")] Value::Time(val) => val.map(|v| write!(f, "\"{v}\"")), #[cfg(feature = "gis")] Value::Geometry(val) => val.as_ref().map(|v| write!(f, "\"{v}\"")), @@ -213,8 +199,6 @@ impl<'a> fmt::Display for Value<'a> { } } -#[cfg(feature = "json")] -#[cfg_attr(feature = "docs", doc(cfg(feature = "json")))] impl<'a> From> for serde_json::Value { fn from(pv: Value<'a>) -> Self { let res = match pv { @@ -245,7 +229,6 @@ impl<'a> From> for serde_json::Value { } #[cfg(feature = "bigdecimal")] Value::Numeric(d) => d.map(|d| serde_json::to_value(d.to_f64().unwrap()).unwrap()), - #[cfg(feature = "json")] Value::Json(v) => v, #[cfg(feature = "gis")] Value::Geometry(g) => g.map(|g| serde_json::Value::String(g.to_string())), @@ -253,11 +236,8 @@ impl<'a> From> for serde_json::Value { Value::Geography(g) => g.map(|g| serde_json::Value::String(g.to_string())), #[cfg(feature = "uuid")] Value::Uuid(u) => u.map(|u| serde_json::Value::String(u.hyphenated().to_string())), - #[cfg(feature = "chrono")] Value::DateTime(dt) => dt.map(|dt| serde_json::Value::String(dt.to_rfc3339())), - #[cfg(feature = "chrono")] Value::Date(date) => date.map(|date| serde_json::Value::String(format!("{date}"))), - #[cfg(feature = "chrono")] Value::Time(time) => time.map(|time| serde_json::Value::String(format!("{time}"))), }; @@ -367,29 +347,21 @@ impl<'a> Value<'a> { } /// Creates a new datetime value. - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] pub const fn datetime(value: DateTime) -> Self { Value::DateTime(Some(value)) } /// Creates a new date value. - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] pub const fn date(value: NaiveDate) -> Self { Value::Date(Some(value)) } /// Creates a new time value. - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] pub const fn time(value: NaiveTime) -> Self { Value::Time(Some(value)) } /// Creates a new JSON value. - #[cfg(feature = "json")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "json")))] pub const fn json(value: serde_json::Value) -> Self { Value::Json(Some(value)) } @@ -440,13 +412,9 @@ impl<'a> Value<'a> { Value::Numeric(r) => r.is_none(), #[cfg(feature = "uuid")] Value::Uuid(u) => u.is_none(), - #[cfg(feature = "chrono")] Value::DateTime(dt) => dt.is_none(), - #[cfg(feature = "chrono")] Value::Date(d) => d.is_none(), - #[cfg(feature = "chrono")] Value::Time(t) => t.is_none(), - #[cfg(feature = "json")] Value::Json(json) => json.is_none(), #[cfg(feature = "gis")] Value::Geometry(s) => s.is_none(), @@ -651,15 +619,11 @@ impl<'a> Value<'a> { } /// `true` if the `Value` is a DateTime. - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] pub const fn is_datetime(&self) -> bool { matches!(self, Value::DateTime(_)) } /// Returns a `DateTime` if the value is a `DateTime`, otherwise `None`. - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] pub const fn as_datetime(&self) -> Option> { match self { Value::DateTime(dt) => *dt, @@ -668,15 +632,11 @@ impl<'a> Value<'a> { } /// `true` if the `Value` is a Date. - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] pub const fn is_date(&self) -> bool { matches!(self, Value::Date(_)) } /// Returns a `NaiveDate` if the value is a `Date`, otherwise `None`. - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] pub const fn as_date(&self) -> Option { match self { Value::Date(dt) => *dt, @@ -685,15 +645,11 @@ impl<'a> Value<'a> { } /// `true` if the `Value` is a `Time`. - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] pub const fn is_time(&self) -> bool { matches!(self, Value::Time(_)) } /// Returns a `NaiveTime` if the value is a `Time`, otherwise `None`. - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] pub const fn as_time(&self) -> Option { match self { Value::Time(time) => *time, @@ -702,15 +658,11 @@ impl<'a> Value<'a> { } /// `true` if the `Value` is a JSON value. - #[cfg(feature = "json")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "json")))] pub const fn is_json(&self) -> bool { matches!(self, Value::Json(_)) } /// Returns a reference to a JSON Value if of Json type, otherwise `None`. - #[cfg(feature = "json")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "json")))] pub const fn as_json(&self) -> Option<&serde_json::Value> { match self { Value::Json(Some(j)) => Some(j), @@ -719,8 +671,6 @@ impl<'a> Value<'a> { } /// Transforms to a JSON Value if of Json type, otherwise `None`. - #[cfg(feature = "json")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "json")))] pub fn into_json(self) -> Option { match self { Value::Json(Some(j)) => Some(j), @@ -774,15 +724,11 @@ value!(val: &'a [u8], Bytes, val.into()); value!(val: f64, Double, val); value!(val: f32, Float, val); -#[cfg(feature = "chrono")] value!(val: DateTime, DateTime, val); -#[cfg(feature = "chrono")] value!(val: chrono::NaiveTime, Time, val); -#[cfg(feature = "chrono")] value!(val: chrono::NaiveDate, Date, val); #[cfg(feature = "bigdecimal")] value!(val: BigDecimal, Numeric, val); -#[cfg(feature = "json")] value!(val: JsonValue, Json, val); #[cfg(feature = "uuid")] value!(val: Uuid, Uuid, val); @@ -848,8 +794,6 @@ impl<'a> TryFrom> for bool { } } -#[cfg(feature = "chrono")] -#[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] impl<'a> TryFrom> for DateTime { type Error = Error; @@ -1013,7 +957,6 @@ impl<'a> IntoIterator for Values<'a> { #[cfg(test)] mod tests { use super::*; - #[cfg(feature = "chrono")] use std::str::FromStr; #[test] @@ -1052,7 +995,6 @@ mod tests { } #[test] - #[cfg(feature = "chrono")] fn a_parameterized_value_of_datetimes_can_be_converted_into_a_vec() { let datetime = DateTime::from_str("2019-07-27T05:30:30Z").expect("parsing date/time"); let pv = Value::array(vec![datetime]); @@ -1068,7 +1010,6 @@ mod tests { } #[test] - #[cfg(feature = "chrono")] fn display_format_for_datetime() { let dt: DateTime = DateTime::from_str("2019-07-27T05:30:30Z").expect("failed while parsing date"); let pv = Value::datetime(dt); @@ -1077,7 +1018,6 @@ mod tests { } #[test] - #[cfg(feature = "chrono")] fn display_format_for_date() { let date = NaiveDate::from_ymd_opt(2022, 8, 11).unwrap(); let pv = Value::date(date); @@ -1086,7 +1026,6 @@ mod tests { } #[test] - #[cfg(feature = "chrono")] fn display_format_for_time() { let time = NaiveTime::from_hms_opt(16, 17, 00).unwrap(); let pv = Value::time(time); diff --git a/quaint/src/connector/mssql/conversion.rs b/quaint/src/connector/mssql/conversion.rs index 2a210447ed1..056f83dc505 100644 --- a/quaint/src/connector/mssql/conversion.rs +++ b/quaint/src/connector/mssql/conversion.rs @@ -24,7 +24,6 @@ impl<'a> IntoSql<'a> for &'a Value<'a> { Value::Array(_) => panic!("Arrays are not supported on SQL Server."), #[cfg(feature = "bigdecimal")] Value::Numeric(val) => (*val).to_sql(), - #[cfg(feature = "json")] Value::Json(val) => val.as_ref().map(|val| serde_json::to_string(&val).unwrap()).into_sql(), #[cfg(feature = "gis")] Value::Geometry(_) => panic!("Cannot handle raw Geometry"), @@ -32,11 +31,8 @@ impl<'a> IntoSql<'a> for &'a Value<'a> { Value::Geography(_) => panic!("Cannot handle raw Geography"), #[cfg(feature = "uuid")] Value::Uuid(val) => val.into_sql(), - #[cfg(feature = "chrono")] Value::DateTime(val) => val.into_sql(), - #[cfg(feature = "chrono")] Value::Date(val) => val.into_sql(), - #[cfg(feature = "chrono")] Value::Time(val) => val.into_sql(), } } @@ -64,32 +60,27 @@ impl TryFrom> for Value<'static> { let kind = ErrorKind::conversion("Please enable `bigdecimal` feature to read numeric values"); return Err(Error::builder(kind).build()); } - #[cfg(feature = "chrono")] dt @ ColumnData::DateTime(_) => { use tiberius::time::chrono::{DateTime, NaiveDateTime, Utc}; let dt = NaiveDateTime::from_sql(&dt)?.map(|dt| DateTime::::from_utc(dt, Utc)); Value::DateTime(dt) } - #[cfg(feature = "chrono")] dt @ ColumnData::SmallDateTime(_) => { use tiberius::time::chrono::{DateTime, NaiveDateTime, Utc}; let dt = NaiveDateTime::from_sql(&dt)?.map(|dt| DateTime::::from_utc(dt, Utc)); Value::DateTime(dt) } - #[cfg(feature = "chrono")] dt @ ColumnData::Time(_) => { use tiberius::time::chrono::NaiveTime; Value::Time(NaiveTime::from_sql(&dt)?) } - #[cfg(feature = "chrono")] dt @ ColumnData::Date(_) => { use tiberius::time::chrono::NaiveDate; Value::Date(NaiveDate::from_sql(&dt)?) } - #[cfg(feature = "chrono")] dt @ ColumnData::DateTime2(_) => { use tiberius::time::chrono::{DateTime, NaiveDateTime, Utc}; @@ -97,7 +88,6 @@ impl TryFrom> for Value<'static> { Value::DateTime(dt) } - #[cfg(feature = "chrono")] dt @ ColumnData::DateTimeOffset(_) => { use tiberius::time::chrono::{DateTime, Utc}; diff --git a/quaint/src/connector/mysql/conversion.rs b/quaint/src/connector/mysql/conversion.rs index 8030b9eed8a..640439e8d1e 100644 --- a/quaint/src/connector/mysql/conversion.rs +++ b/quaint/src/connector/mysql/conversion.rs @@ -3,7 +3,6 @@ use crate::{ connector::{queryable::TakeRow, TypeIdentifier}, error::{Error, ErrorKind}, }; -#[cfg(feature = "chrono")] use chrono::{DateTime, Datelike, NaiveDate, NaiveDateTime, NaiveTime, Timelike, Utc}; #[cfg(feature = "gis")] use geozero::{wkb::MySQLWkb, wkt::WktStr, ToWkb, ToWkt}; @@ -44,7 +43,6 @@ pub fn conv_params(params: &[Value<'_>]) -> crate::Result { } #[cfg(feature = "bigdecimal")] Value::Numeric(f) => f.as_ref().map(|f| my::Value::Bytes(f.to_string().as_bytes().to_vec())), - #[cfg(feature = "json")] Value::Json(s) => match s { Some(ref s) => { let json = serde_json::to_string(s)?; @@ -56,15 +54,12 @@ pub fn conv_params(params: &[Value<'_>]) -> crate::Result { }, #[cfg(feature = "uuid")] Value::Uuid(u) => u.map(|u| my::Value::Bytes(u.hyphenated().to_string().into_bytes())), - #[cfg(feature = "chrono")] Value::Date(d) => { d.map(|d| my::Value::Date(d.year() as u16, d.month() as u8, d.day() as u8, 0, 0, 0, 0)) } - #[cfg(feature = "chrono")] Value::Time(t) => { t.map(|t| my::Value::Time(false, 0, t.hour() as u8, t.minute() as u8, t.second() as u8, 0)) } - #[cfg(feature = "chrono")] Value::DateTime(dt) => dt.map(|dt| { my::Value::Date( dt.year() as u16, @@ -241,7 +236,6 @@ impl TakeRow for my::Row { let res = match value { // JSON is returned as bytes. - #[cfg(feature = "json")] my::Value::Bytes(b) if column.is_json() => { serde_json::from_slice(&b).map(Value::json).map_err(|_| { let msg = "Unable to convert bytes to JSON"; @@ -299,7 +293,6 @@ impl TakeRow for my::Row { })?), my::Value::Float(f) => Value::from(f), my::Value::Double(f) => Value::from(f), - #[cfg(feature = "chrono")] my::Value::Date(year, month, day, hour, min, sec, micro) => { if day == 0 || month == 0 { let msg = format!( @@ -317,7 +310,6 @@ impl TakeRow for my::Row { Value::datetime(DateTime::::from_utc(dt, Utc)) } - #[cfg(feature = "chrono")] my::Value::Time(is_neg, days, hours, minutes, seconds, micros) => { if is_neg { let kind = ErrorKind::conversion("Failed to convert a negative time"); @@ -345,13 +337,9 @@ impl TakeRow for my::Row { t if t.is_bytes() => Value::Bytes(None), #[cfg(feature = "bigdecimal")] t if t.is_real() => Value::Numeric(None), - #[cfg(feature = "chrono")] t if t.is_datetime() => Value::DateTime(None), - #[cfg(feature = "chrono")] t if t.is_time() => Value::Time(None), - #[cfg(feature = "chrono")] t if t.is_date() => Value::Date(None), - #[cfg(feature = "json")] t if t.is_json() => Value::Json(None), #[cfg(feature = "gis")] t if t.is_geometry() => Value::Geometry(None), @@ -362,16 +350,6 @@ impl TakeRow for my::Row { return Err(Error::builder(kind).build()); } }, - #[cfg(not(feature = "chrono"))] - typ => { - let msg = format!( - "Value of type {:?} is not supported with the current configuration", - typ - ); - - let kind = ErrorKind::conversion(msg); - Err(Error::builder(kind).build())? - } }; Ok(res) diff --git a/quaint/src/connector/postgres.rs b/quaint/src/connector/postgres.rs index d4dc008bd5f..c35208f8419 100644 --- a/quaint/src/connector/postgres.rs +++ b/quaint/src/connector/postgres.rs @@ -1,5 +1,5 @@ mod conversion; -mod error; +pub mod error; use crate::{ ast::{Query, Value}, diff --git a/quaint/src/connector/postgres/conversion.rs b/quaint/src/connector/postgres/conversion.rs index 4a35c08fc83..6315e299516 100644 --- a/quaint/src/connector/postgres/conversion.rs +++ b/quaint/src/connector/postgres/conversion.rs @@ -10,7 +10,6 @@ use crate::{ use bigdecimal::{num_bigint::BigInt, BigDecimal, FromPrimitive, ToPrimitive}; use bit_vec::BitVec; use bytes::BytesMut; -#[cfg(feature = "chrono")] use chrono::{DateTime, NaiveDateTime, Utc}; #[cfg(feature = "bigdecimal")] pub(crate) use decimal::DecimalWrapper; @@ -54,16 +53,12 @@ pub(crate) fn params_to_types(params: &[Value<'_>]) -> Vec { Value::Char(_) => PostgresType::CHAR, #[cfg(feature = "bigdecimal")] Value::Numeric(_) => PostgresType::NUMERIC, - #[cfg(feature = "json")] Value::Json(_) => PostgresType::JSONB, Value::Xml(_) => PostgresType::XML, #[cfg(feature = "uuid")] Value::Uuid(_) => PostgresType::UUID, - #[cfg(feature = "chrono")] Value::DateTime(_) => PostgresType::TIMESTAMPTZ, - #[cfg(feature = "chrono")] Value::Date(_) => PostgresType::TIMESTAMP, - #[cfg(feature = "chrono")] Value::Time(_) => PostgresType::TIME, #[cfg(feature = "gis")] Value::Geometry(_) => PostgresType::BYTEA, @@ -100,16 +95,12 @@ pub(crate) fn params_to_types(params: &[Value<'_>]) -> Vec { Value::Char(_) => PostgresType::CHAR_ARRAY, #[cfg(feature = "bigdecimal")] Value::Numeric(_) => PostgresType::NUMERIC_ARRAY, - #[cfg(feature = "json")] Value::Json(_) => PostgresType::JSONB_ARRAY, Value::Xml(_) => PostgresType::XML_ARRAY, #[cfg(feature = "uuid")] Value::Uuid(_) => PostgresType::UUID_ARRAY, - #[cfg(feature = "chrono")] Value::DateTime(_) => PostgresType::TIMESTAMPTZ_ARRAY, - #[cfg(feature = "chrono")] Value::Date(_) => PostgresType::TIMESTAMP_ARRAY, - #[cfg(feature = "chrono")] Value::Time(_) => PostgresType::TIME_ARRAY, #[cfg(feature = "gis")] Value::Geometry(_) => PostgresType::BYTEA, @@ -166,10 +157,8 @@ impl<'a> FromSql<'a> for EnumString { } } -#[cfg(feature = "chrono")] struct TimeTz(chrono::NaiveTime); -#[cfg(feature = "chrono")] impl<'a> FromSql<'a> for TimeTz { fn from_sql(_ty: &PostgresType, raw: &'a [u8]) -> Result> { // We assume UTC. @@ -272,7 +261,6 @@ impl GetRow for PostgresRow { } None => Value::Numeric(None), }, - #[cfg(feature = "chrono")] PostgresType::TIMESTAMP => match row.try_get(i)? { Some(val) => { let ts: NaiveDateTime = val; @@ -281,7 +269,6 @@ impl GetRow for PostgresRow { } None => Value::DateTime(None), }, - #[cfg(feature = "chrono")] PostgresType::TIMESTAMPTZ => match row.try_get(i)? { Some(val) => { let ts: DateTime = val; @@ -289,17 +276,14 @@ impl GetRow for PostgresRow { } None => Value::DateTime(None), }, - #[cfg(feature = "chrono")] PostgresType::DATE => match row.try_get(i)? { Some(val) => Value::date(val), None => Value::Date(None), }, - #[cfg(feature = "chrono")] PostgresType::TIME => match row.try_get(i)? { Some(val) => Value::time(val), None => Value::Time(None), }, - #[cfg(feature = "chrono")] PostgresType::TIMETZ => match row.try_get(i)? { Some(val) => { let time: TimeTz = val; @@ -325,7 +309,6 @@ impl GetRow for PostgresRow { } None => Value::Array(None), }, - #[cfg(feature = "json")] PostgresType::JSON | PostgresType::JSONB => Value::Json(row.try_get(i)?), PostgresType::INT2_ARRAY => match row.try_get(i)? { Some(val) => { @@ -381,7 +364,6 @@ impl GetRow for PostgresRow { } None => Value::Array(None), }, - #[cfg(feature = "chrono")] PostgresType::TIMESTAMP_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; @@ -436,7 +418,6 @@ impl GetRow for PostgresRow { } None => Value::Array(None), }, - #[cfg(feature = "chrono")] PostgresType::TIMESTAMPTZ_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec>> = val; @@ -446,7 +427,6 @@ impl GetRow for PostgresRow { } None => Value::Array(None), }, - #[cfg(feature = "chrono")] PostgresType::DATE_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; @@ -456,7 +436,6 @@ impl GetRow for PostgresRow { } None => Value::Array(None), }, - #[cfg(feature = "chrono")] PostgresType::TIME_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; @@ -466,7 +445,6 @@ impl GetRow for PostgresRow { } None => Value::Array(None), }, - #[cfg(feature = "chrono")] PostgresType::TIMETZ_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; @@ -476,7 +454,6 @@ impl GetRow for PostgresRow { } None => Value::Array(None), }, - #[cfg(feature = "json")] PostgresType::JSON_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; @@ -486,7 +463,6 @@ impl GetRow for PostgresRow { } None => Value::Array(None), }, - #[cfg(feature = "json")] PostgresType::JSONB_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; @@ -574,7 +550,7 @@ impl GetRow for PostgresRow { } } ref x => match x.kind() { - Kind::Enum(_) => match row.try_get(i)? { + Kind::Enum => match row.try_get(i)? { Some(val) => { let val: EnumString = val; @@ -583,7 +559,7 @@ impl GetRow for PostgresRow { None => Value::Enum(None), }, Kind::Array(inner) => match inner.kind() { - Kind::Enum(_) => match row.try_get(i)? { + Kind::Enum => match row.try_get(i)? { Some(val) => { let val: Vec> = val; let variants = val.into_iter().map(|x| Value::Enum(x.map(|x| x.value.into()))); @@ -876,7 +852,6 @@ impl<'a> ToSql for Value<'a> { (Value::Geometry(_), _) => panic!("Cannot handle raw Geometry"), #[cfg(feature = "gis")] (Value::Geography(_), _) => panic!("Cannot handle raw Geography"), - #[cfg(feature = "json")] (Value::Text(string), &PostgresType::JSON) | (Value::Text(string), &PostgresType::JSONB) => string .as_ref() .map(|string| serde_json::from_str::(string)?.to_sql(ty, out)), @@ -915,27 +890,20 @@ impl<'a> ToSql for Value<'a> { return Err(Error::builder(kind).build().into()); } - #[cfg(feature = "json")] (Value::Json(value), _) => value.as_ref().map(|value| value.to_sql(ty, out)), (Value::Xml(value), _) => value.as_ref().map(|value| value.to_sql(ty, out)), #[cfg(feature = "uuid")] (Value::Uuid(value), _) => value.map(|value| value.to_sql(ty, out)), - #[cfg(feature = "chrono")] (Value::DateTime(value), &PostgresType::DATE) => value.map(|value| value.date_naive().to_sql(ty, out)), - #[cfg(feature = "chrono")] (Value::Date(value), _) => value.map(|value| value.to_sql(ty, out)), - #[cfg(feature = "chrono")] (Value::Time(value), _) => value.map(|value| value.to_sql(ty, out)), - #[cfg(feature = "chrono")] (Value::DateTime(value), &PostgresType::TIME) => value.map(|value| value.time().to_sql(ty, out)), - #[cfg(feature = "chrono")] (Value::DateTime(value), &PostgresType::TIMETZ) => value.map(|value| { let result = value.time().to_sql(ty, out)?; // We assume UTC. see https://www.postgresql.org/docs/9.5/datatype-datetime.html out.extend_from_slice(&[0; 4]); Ok(result) }), - #[cfg(feature = "chrono")] (Value::DateTime(value), _) => value.map(|value| value.naive_utc().to_sql(ty, out)), }; diff --git a/quaint/src/connector/postgres/error.rs b/quaint/src/connector/postgres/error.rs index 40634e2aa33..4f7bb23a5c8 100644 --- a/quaint/src/connector/postgres/error.rs +++ b/quaint/src/connector/postgres/error.rs @@ -1,37 +1,64 @@ +use std::fmt::{Display, Formatter}; + +use tokio_postgres::error::DbError; + use crate::error::{DatabaseConstraint, Error, ErrorKind, Name}; -impl From for Error { - fn from(e: tokio_postgres::error::Error) -> Error { - use tokio_postgres::error::DbError; +#[derive(Debug)] +pub struct PostgresError { + pub code: String, + pub message: String, + pub severity: String, + pub detail: Option, + pub column: Option, + pub hint: Option, +} - if e.is_closed() { - return Error::builder(ErrorKind::ConnectionClosed).build(); +impl std::error::Error for PostgresError {} + +impl Display for PostgresError { + // copy of DbError::fmt + fn fmt(&self, fmt: &mut Formatter<'_>) -> std::fmt::Result { + write!(fmt, "{}: {}", self.severity, self.message)?; + if let Some(detail) = &self.detail { + write!(fmt, "\nDETAIL: {}", detail)?; + } + if let Some(hint) = &self.hint { + write!(fmt, "\nHINT: {}", hint)?; } + Ok(()) + } +} - match e.code().map(|c| c.code()) { - Some(code) if code == "22001" => { - let code = code.to_string(); +impl From<&DbError> for PostgresError { + fn from(value: &DbError) -> Self { + PostgresError { + code: value.code().code().to_string(), + severity: value.severity().to_string(), + message: value.message().to_string(), + detail: value.detail().map(ToString::to_string), + column: value.column().map(ToString::to_string), + hint: value.hint().map(ToString::to_string), + } + } +} +impl From for Error { + fn from(value: PostgresError) -> Self { + match value.code.as_str() { + "22001" => { let mut builder = Error::builder(ErrorKind::LengthMismatch { column: Name::Unavailable, }); - builder.set_original_code(code); - - let db_error = e.into_source().and_then(|e| e.downcast::().ok()); - if let Some(db_error) = db_error { - builder.set_original_message(db_error.to_string()); - } + builder.set_original_code(&value.code); + builder.set_original_message(value.to_string()); builder.build() } - Some(code) if code == "23505" => { - let code = code.to_string(); - - let db_error = e.into_source().and_then(|e| e.downcast::().ok()); - let detail = db_error.as_ref().and_then(|e| e.detail()).map(ToString::to_string); - - let constraint = detail + "23505" => { + let constraint = value + .detail .as_ref() .and_then(|d| d.split(")=(").next()) .and_then(|d| d.split(" (").nth(1).map(|s| s.replace('\"', ""))) @@ -41,189 +68,138 @@ impl From for Error { let kind = ErrorKind::UniqueConstraintViolation { constraint }; let mut builder = Error::builder(kind); - builder.set_original_code(code); + builder.set_original_code(value.code); - if let Some(detail) = detail { + if let Some(detail) = value.detail { builder.set_original_message(detail); } builder.build() } - // Even lipstick will not save this... - Some(code) if code == "23502" => { - let code = code.to_string(); - let db_error = e.into_source().and_then(|e| e.downcast::().ok()); - let detail = db_error.as_ref().and_then(|e| e.detail()).map(ToString::to_string); - - let constraint = db_error - .as_ref() - .map(|e| e.column()) - .map(DatabaseConstraint::fields) - .unwrap_or(DatabaseConstraint::CannotParse); + // Even lipstick will not save this... + "23502" => { + let constraint = DatabaseConstraint::fields(value.column); let kind = ErrorKind::NullConstraintViolation { constraint }; let mut builder = Error::builder(kind); - builder.set_original_code(code); + builder.set_original_code(value.code); - if let Some(detail) = detail { + if let Some(detail) = value.detail { builder.set_original_message(detail); } builder.build() } - Some(code) if code == "23503" => { - let code = code.to_string(); - let db_error = e.into_source().and_then(|e| e.downcast::().ok()); - - match db_error.as_ref().and_then(|e| e.column()) { - Some(column) => { - let mut builder = Error::builder(ErrorKind::ForeignKeyConstraintViolation { - constraint: DatabaseConstraint::fields(Some(column)), - }); - - builder.set_original_code(code); - - if let Some(message) = db_error.as_ref().map(|e| e.message()) { - builder.set_original_message(message); - } - - builder.build() - } - None => { - let constraint = db_error - .as_ref() - .map(|e| e.message()) - .and_then(|e| e.split_whitespace().nth(10)) - .and_then(|s| s.split('"').nth(1)) - .map(ToString::to_string) - .map(DatabaseConstraint::Index) - .unwrap_or(DatabaseConstraint::CannotParse); - - let kind = ErrorKind::ForeignKeyConstraintViolation { constraint }; - let mut builder = Error::builder(kind); - - builder.set_original_code(code); - - if let Some(message) = db_error.as_ref().map(|e| e.message()) { - builder.set_original_message(message); - } - - builder.build() - } - } - } - Some(code) if code == "3D000" => { - let code = code.to_string(); - let db_error = e.into_source().and_then(|e| e.downcast::().ok()); - let message = db_error.as_ref().map(|e| e.message()); + "23503" => match value.column { + Some(column) => { + let mut builder = Error::builder(ErrorKind::ForeignKeyConstraintViolation { + constraint: DatabaseConstraint::fields(Some(column)), + }); - let db_name = message - .as_ref() - .and_then(|s| s.split_whitespace().nth(1)) + builder.set_original_code(value.code); + builder.set_original_message(value.message); + + builder.build() + } + None => { + let constraint = value + .message + .split_whitespace() + .nth(10) + .and_then(|s| s.split('"').nth(1)) + .map(ToString::to_string) + .map(DatabaseConstraint::Index) + .unwrap_or(DatabaseConstraint::CannotParse); + + let kind = ErrorKind::ForeignKeyConstraintViolation { constraint }; + let mut builder = Error::builder(kind); + + builder.set_original_code(value.code); + builder.set_original_message(value.message); + + builder.build() + } + }, + "3D000" => { + let db_name = value + .message + .split_whitespace() + .nth(1) .and_then(|s| s.split('"').nth(1)) .into(); let kind = ErrorKind::DatabaseDoesNotExist { db_name }; let mut builder = Error::builder(kind); - builder.set_original_code(code); - - if let Some(message) = message { - builder.set_original_message(message); - } + builder.set_original_code(value.code); + builder.set_original_message(value.message); builder.build() } - Some(code) if code == "28000" => { - let code = code.to_string(); - let db_error = e.into_source().and_then(|e| e.downcast::().ok()); - let message = db_error.as_ref().map(|e| e.message()); - - let db_name = message - .as_ref() - .and_then(|m| m.split_whitespace().nth(5)) + "28000" => { + let db_name = value + .message + .split_whitespace() + .nth(5) .and_then(|s| s.split('"').nth(1)) .into(); let kind = ErrorKind::DatabaseAccessDenied { db_name }; let mut builder = Error::builder(kind); - builder.set_original_code(code); - - if let Some(message) = message { - builder.set_original_message(message); - } + builder.set_original_code(value.code); + builder.set_original_message(value.message); builder.build() } - Some(code) if code == "28P01" => { - let code = code.to_string(); - let db_error = e.into_source().and_then(|e| e.downcast::().ok()); - let message = db_error.as_ref().map(|e| e.message()); + "28P01" => { + let message = value.message; let user = message - .as_ref() - .and_then(|m| m.split_whitespace().last()) + .split_whitespace() + .last() .and_then(|s| s.split('"').nth(1)) .into(); let kind = ErrorKind::AuthenticationFailed { user }; let mut builder = Error::builder(kind); - builder.set_original_code(code); - - if let Some(message) = message { - builder.set_original_message(message); - } + builder.set_original_code(value.code); + builder.set_original_message(message); builder.build() } - Some(code) if code == "40001" => { - let code = code.to_string(); - let db_error = e.into_source().and_then(|e| e.downcast::().ok()); - let message = db_error.as_ref().map(|e| e.message()); - let mut builder = Error::builder(ErrorKind::TransactionWriteConflict); + "40001" => { + let mut builder: crate::error::ErrorBuilder = Error::builder(ErrorKind::TransactionWriteConflict); - builder.set_original_code(code); - - if let Some(message) = message { - builder.set_original_message(message); - } + builder.set_original_code(value.code); + builder.set_original_message(value.message); builder.build() } - Some(code) if code == "42P01" => { - let code = code.to_string(); - let db_error = e.into_source().and_then(|e| e.downcast::().ok()); - let message = db_error.as_ref().map(|e| e.message()); - - let table = message - .as_ref() - .and_then(|m| m.split_whitespace().nth(1)) + "42P01" => { + let table = value + .message + .split_whitespace() + .nth(1) .and_then(|s| s.split('"').nth(1)) .into(); let kind = ErrorKind::TableDoesNotExist { table }; let mut builder = Error::builder(kind); - builder.set_original_code(code); - - if let Some(message) = message { - builder.set_original_message(message); - } + builder.set_original_code(value.code); + builder.set_original_message(value.message); builder.build() } - Some(code) if code == "42703" => { - let code = code.to_string(); - let db_error = e.into_source().and_then(|e| e.downcast::().ok()); - let message = db_error.as_ref().map(|e| e.message()); - - let column = message - .as_ref() - .and_then(|m| m.split_whitespace().nth(1)) + "42703" => { + let column = value + .message + .split_whitespace() + .nth(1) .map(|s| s.split('\"')) .and_then(|mut s| match (s.next(), s.next()) { (Some(column), _) if !column.is_empty() => Some(column), @@ -235,92 +211,102 @@ impl From for Error { let kind = ErrorKind::ColumnNotFound { column }; let mut builder = Error::builder(kind); - builder.set_original_code(code); - - if let Some(message) = message { - builder.set_original_message(message); - } - + builder.set_original_code(value.code); + builder.set_original_message(value.message); builder.build() } - Some(code) if code == "42P04" => { - let code = code.to_string(); - let db_error = e.into_source().and_then(|e| e.downcast::().ok()); - let message = db_error.as_ref().map(|e| e.message()); - - let db_name = message - .as_ref() - .and_then(|m| m.split_whitespace().nth(1)) + "42P04" => { + let db_name = value + .message + .split_whitespace() + .nth(1) .and_then(|s| s.split('"').nth(1)) .into(); let kind = ErrorKind::DatabaseAlreadyExists { db_name }; let mut builder = Error::builder(kind); - builder.set_original_code(code); + builder.set_original_code(value.code); + builder.set_original_message(value.message); - if let Some(message) = message { - builder.set_original_message(message); - } + builder.build() + } + + _ => { + let code = value.code.to_owned(); + let message = value.to_string(); + let mut builder = Error::builder(ErrorKind::QueryError(value.into())); + builder.set_original_code(code); + builder.set_original_message(message); builder.build() } - code => { - // This is necessary, on top of the other conversions, for the cases where a - // native_tls error comes wrapped in a tokio_postgres error. - if let Some(tls_error) = try_extracting_tls_error(&e) { - return tls_error; - } + } + } +} - // Same for IO errors. - if let Some(io_error) = try_extracting_io_error(&e) { - return io_error; - } +impl From for Error { + fn from(e: tokio_postgres::error::Error) -> Error { + if e.is_closed() { + return Error::builder(ErrorKind::ConnectionClosed).build(); + } - #[cfg(feature = "uuid")] - if let Some(uuid_error) = try_extracting_uuid_error(&e) { - return uuid_error; - } + if let Some(db_error) = e.as_db_error() { + return PostgresError::from(db_error).into(); + } - let reason = format!("{e}"); - - match reason.as_str() { - "error connecting to server: timed out" => { - let mut builder = Error::builder(ErrorKind::ConnectTimeout); - - if let Some(code) = code { - builder.set_original_code(code); - }; - - builder.set_original_message(reason); - builder.build() - } // sigh... - // https://github.com/sfackler/rust-postgres/blob/0c84ed9f8201f4e5b4803199a24afa2c9f3723b2/tokio-postgres/src/connect_tls.rs#L37 - "error performing TLS handshake: server does not support TLS" => { - let mut builder = Error::builder(ErrorKind::TlsError { - message: reason.clone(), - }); - - if let Some(code) = code { - builder.set_original_code(code); - }; - - builder.set_original_message(reason); - builder.build() - } // double sigh - _ => { - let code = code.map(|c| c.to_string()); - let mut builder = Error::builder(ErrorKind::QueryError(e.into())); - - if let Some(code) = code { - builder.set_original_code(code); - }; - - builder.set_original_message(reason); - builder.build() - } - } + if let Some(tls_error) = try_extracting_tls_error(&e) { + return tls_error; + } + + // Same for IO errors. + if let Some(io_error) = try_extracting_io_error(&e) { + return io_error; + } + + #[cfg(feature = "uuid")] + if let Some(uuid_error) = try_extracting_uuid_error(&e) { + return uuid_error; + } + + let reason = format!("{e}"); + let code = e.code().map(|c| c.code()); + + match reason.as_str() { + "error connecting to server: timed out" => { + let mut builder = Error::builder(ErrorKind::ConnectTimeout); + + if let Some(code) = code { + builder.set_original_code(code); + }; + + builder.set_original_message(reason); + return builder.build(); + } // sigh... + // https://github.com/sfackler/rust-postgres/blob/0c84ed9f8201f4e5b4803199a24afa2c9f3723b2/tokio-postgres/src/connect_tls.rs#L37 + "error performing TLS handshake: server does not support TLS" => { + let mut builder = Error::builder(ErrorKind::TlsError { + message: reason.clone(), + }); + + if let Some(code) = code { + builder.set_original_code(code); + }; + + builder.set_original_message(reason); + return builder.build(); + } // double sigh + _ => { + let code = code.map(|c| c.to_string()); + let mut builder = Error::builder(ErrorKind::QueryError(e.into())); + + if let Some(code) = code { + builder.set_original_code(code); + }; + + builder.set_original_message(reason); + return builder.build(); } } } diff --git a/quaint/src/connector/result_set.rs b/quaint/src/connector/result_set.rs index dedc49d23ff..b98d252a057 100644 --- a/quaint/src/connector/result_set.rs +++ b/quaint/src/connector/result_set.rs @@ -5,10 +5,8 @@ pub use index::*; pub use result_row::*; use crate::{ast::Value, error::*}; -use std::sync::Arc; - -#[cfg(feature = "json")] use serde_json::Map; +use std::sync::Arc; /// Encapsulates a set of results and their respective column names. #[derive(Debug, Default)] @@ -108,8 +106,6 @@ impl Iterator for ResultSetIterator { } } -#[cfg(feature = "json")] -#[cfg_attr(feature = "docs", doc(cfg(feature = "json")))] impl From for serde_json::Value { fn from(result_set: ResultSet) -> Self { let columns: Vec = result_set.columns().iter().map(ToString::to_string).collect(); diff --git a/quaint/src/connector/sqlite/conversion.rs b/quaint/src/connector/sqlite/conversion.rs index 766e58e8b93..ecc3fd9cc84 100644 --- a/quaint/src/connector/sqlite/conversion.rs +++ b/quaint/src/connector/sqlite/conversion.rs @@ -16,7 +16,6 @@ use rusqlite::{ Column, Error as RusqlError, Row as SqliteRow, Rows as SqliteRows, }; -#[cfg(feature = "chrono")] use chrono::TimeZone; impl TypeIdentifier for Column<'_> { @@ -163,9 +162,7 @@ impl<'a> GetRow for SqliteRow<'a> { c if c.is_double() => Value::Double(None), #[cfg(feature = "bigdecimal")] c if c.is_real() => Value::Numeric(None), - #[cfg(feature = "chrono")] c if c.is_datetime() => Value::DateTime(None), - #[cfg(feature = "chrono")] c if c.is_date() => Value::Date(None), c if c.is_bool() => Value::Boolean(None), #[cfg(feature = "gis")] @@ -190,12 +187,10 @@ impl<'a> GetRow for SqliteRow<'a> { Value::boolean(true) } } - #[cfg(feature = "chrono")] c if c.is_date() => { let dt = chrono::NaiveDateTime::from_timestamp_opt(i / 1000, 0).unwrap(); Value::date(dt.date()) } - #[cfg(feature = "chrono")] c if c.is_datetime() => { let dt = chrono::Utc.timestamp_millis_opt(i).unwrap(); Value::datetime(dt) @@ -221,7 +216,6 @@ impl<'a> GetRow for SqliteRow<'a> { Value::numeric(BigDecimal::from_f64(f).unwrap()) } ValueRef::Real(f) => Value::double(f), - #[cfg(feature = "chrono")] ValueRef::Text(bytes) if column.is_datetime() => { let parse_res = std::str::from_utf8(bytes).map_err(|_| { let builder = Error::builder(ErrorKind::ConversionError( @@ -302,7 +296,6 @@ impl<'a> ToSql for Value<'a> { Value::Numeric(d) => d .as_ref() .map(|d| ToSqlOutput::from(d.to_string().parse::().expect("BigDecimal is not a f64."))), - #[cfg(feature = "json")] Value::Json(value) => value.as_ref().map(|value| { let stringified = serde_json::to_string(value) .map_err(|err| RusqlError::ToSqlConversionFailure(Box::new(err))) @@ -313,13 +306,10 @@ impl<'a> ToSql for Value<'a> { Value::Xml(cow) => cow.as_ref().map(|cow| ToSqlOutput::from(cow.as_ref())), #[cfg(feature = "uuid")] Value::Uuid(value) => value.map(|value| ToSqlOutput::from(value.hyphenated().to_string())), - #[cfg(feature = "chrono")] Value::DateTime(value) => value.map(|value| ToSqlOutput::from(value.timestamp_millis())), - #[cfg(feature = "chrono")] Value::Date(date) => date .and_then(|date| date.and_hms_opt(0, 0, 0)) .map(|dt| ToSqlOutput::from(dt.timestamp_millis())), - #[cfg(feature = "chrono")] Value::Time(time) => time .and_then(|time| chrono::NaiveDate::from_ymd_opt(1970, 1, 1).map(|d| (d, time))) .and_then(|(date, time)| { diff --git a/quaint/src/error.rs b/quaint/src/error.rs index 73f88dc90b3..e9bdc890f27 100644 --- a/quaint/src/error.rs +++ b/quaint/src/error.rs @@ -6,6 +6,8 @@ use thiserror::Error; #[cfg(feature = "pooled")] use std::time::Duration; +pub use crate::connector::postgres::error::PostgresError; + #[derive(Debug, PartialEq, Eq)] pub enum DatabaseConstraint { Fields(Vec), @@ -326,8 +328,6 @@ impl From for Error { } } -#[cfg(feature = "json")] -#[cfg_attr(feature = "docs", doc(cfg(feature = "json")))] impl From for Error { fn from(_: serde_json::Error) -> Self { Self::builder(ErrorKind::conversion("Malformed JSON data.")).build() diff --git a/quaint/src/serde.rs b/quaint/src/serde.rs index a5744a94ac0..a8196880dad 100644 --- a/quaint/src/serde.rs +++ b/quaint/src/serde.rs @@ -150,32 +150,24 @@ impl<'de> Deserializer<'de> for ValueDeserializer<'de> { #[cfg(feature = "gis")] Value::Geography(None) => visitor.visit_none(), - #[cfg(feature = "json")] Value::Json(Some(value)) => { let de = value.into_deserializer(); de.deserialize_any(visitor) .map_err(|err| serde::de::value::Error::custom(format!("Error deserializing JSON value: {err}"))) } - #[cfg(feature = "json")] Value::Json(None) => visitor.visit_none(), Value::Xml(Some(s)) => visitor.visit_string(s.into_owned()), Value::Xml(None) => visitor.visit_none(), - #[cfg(feature = "chrono")] Value::DateTime(Some(dt)) => visitor.visit_string(dt.to_rfc3339()), - #[cfg(feature = "chrono")] Value::DateTime(None) => visitor.visit_none(), - #[cfg(feature = "chrono")] Value::Date(Some(d)) => visitor.visit_string(format!("{d}")), - #[cfg(feature = "chrono")] Value::Date(None) => visitor.visit_none(), - #[cfg(feature = "chrono")] Value::Time(Some(t)) => visitor.visit_string(format!("{t}")), - #[cfg(feature = "chrono")] Value::Time(None) => visitor.visit_none(), Value::Array(Some(values)) => { diff --git a/quaint/src/tests/query.rs b/quaint/src/tests/query.rs index 9fc67e9d662..dc286307cf1 100644 --- a/quaint/src/tests/query.rs +++ b/quaint/src/tests/query.rs @@ -1,7 +1,7 @@ mod error; use super::test_api::*; -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] use crate::ast::JsonPath; use crate::{ connector::{IsolationLevel, Queryable, TransactionCapable}, @@ -1412,7 +1412,6 @@ async fn unsigned_integers_are_handled(api: &mut dyn TestApi) -> crate::Result<( Ok(()) } -#[cfg(feature = "json")] #[test_each_connector(tags("mysql", "postgresql"))] async fn json_filtering_works(api: &mut dyn TestApi) -> crate::Result<()> { let json_type = match api.system() { @@ -1691,7 +1690,7 @@ async fn enum_values(api: &mut dyn TestApi) -> crate::Result<()> { } #[test_each_connector(tags("postgresql"))] -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] async fn row_to_json_normal(api: &mut dyn TestApi) -> crate::Result<()> { let cte = Select::default() .value(val!("hello_world").alias("toto")) @@ -1710,7 +1709,7 @@ async fn row_to_json_normal(api: &mut dyn TestApi) -> crate::Result<()> { } #[test_each_connector(tags("postgresql"))] -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] async fn row_to_json_pretty(api: &mut dyn TestApi) -> crate::Result<()> { let cte = Select::default() .value(val!("hello_world").alias("toto")) @@ -2067,7 +2066,6 @@ async fn coalesce_fun(api: &mut dyn TestApi) -> crate::Result<()> { Ok(()) } -#[cfg(feature = "json")] fn value_into_json(value: &Value) -> Option { match value.clone() { // MariaDB returns JSON as text @@ -2082,7 +2080,7 @@ fn value_into_json(value: &Value) -> Option { } } -#[cfg(all(feature = "json", feature = "mysql"))] +#[cfg(feature = "mysql")] #[test_each_connector(tags("mysql"))] async fn json_extract_path_fun(api: &mut dyn TestApi) -> crate::Result<()> { let table = api @@ -2133,7 +2131,7 @@ async fn json_extract_path_fun(api: &mut dyn TestApi) -> crate::Result<()> { Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] async fn json_extract_array_path_postgres(api: &mut dyn TestApi, json_type: &str) -> crate::Result<()> { let table = api .create_temp_table(&format!("{}, obj {}", api.autogen_id("id"), json_type)) @@ -2192,7 +2190,7 @@ async fn json_extract_array_path_postgres(api: &mut dyn TestApi, json_type: &str Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_extract_array_path_fun_on_jsonb(api: &mut dyn TestApi) -> crate::Result<()> { json_extract_array_path_postgres(api, "jsonb").await?; @@ -2200,7 +2198,7 @@ async fn json_extract_array_path_fun_on_jsonb(api: &mut dyn TestApi) -> crate::R Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_extract_array_path_fun_on_json(api: &mut dyn TestApi) -> crate::Result<()> { json_extract_array_path_postgres(api, "json").await?; @@ -2208,7 +2206,7 @@ async fn json_extract_array_path_fun_on_json(api: &mut dyn TestApi) -> crate::Re Ok(()) } -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] async fn json_array_contains(api: &mut dyn TestApi, json_type: &str) -> crate::Result<()> { let table = api .create_temp_table(&format!("{}, obj {}", api.autogen_id("id"), json_type)) @@ -2287,7 +2285,7 @@ async fn json_array_contains(api: &mut dyn TestApi, json_type: &str) -> crate::R Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_contains_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::Result<()> { json_array_contains(api, "jsonb").await?; @@ -2295,7 +2293,7 @@ async fn json_array_contains_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::Resul Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_contains_fun_pg_json(api: &mut dyn TestApi) -> crate::Result<()> { json_array_contains(api, "json").await?; @@ -2303,7 +2301,7 @@ async fn json_array_contains_fun_pg_json(api: &mut dyn TestApi) -> crate::Result Ok(()) } -#[cfg(all(feature = "json", feature = "mysql"))] +#[cfg(feature = "mysql")] #[test_each_connector(tags("mysql"))] async fn json_array_contains_fun(api: &mut dyn TestApi) -> crate::Result<()> { json_array_contains(api, "json").await?; @@ -2311,7 +2309,7 @@ async fn json_array_contains_fun(api: &mut dyn TestApi) -> crate::Result<()> { Ok(()) } -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] async fn json_array_not_contains(api: &mut dyn TestApi, json_type: &str) -> crate::Result<()> { let table = api .create_temp_table(&format!("{}, obj {}", api.autogen_id("id"), json_type)) @@ -2345,7 +2343,7 @@ async fn json_array_not_contains(api: &mut dyn TestApi, json_type: &str) -> crat Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_not_contains_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::Result<()> { json_array_not_contains(api, "jsonb").await?; @@ -2353,7 +2351,7 @@ async fn json_array_not_contains_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::R Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_not_contains_fun_pg_json(api: &mut dyn TestApi) -> crate::Result<()> { json_array_not_contains(api, "json").await?; @@ -2361,7 +2359,7 @@ async fn json_array_not_contains_fun_pg_json(api: &mut dyn TestApi) -> crate::Re Ok(()) } -#[cfg(all(feature = "json", feature = "mysql"))] +#[cfg(feature = "mysql")] #[test_each_connector(tags("mysql"))] async fn json_array_not_contains_fun(api: &mut dyn TestApi) -> crate::Result<()> { json_array_not_contains(api, "json").await?; @@ -2369,7 +2367,7 @@ async fn json_array_not_contains_fun(api: &mut dyn TestApi) -> crate::Result<()> Ok(()) } -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] async fn json_array_begins_with(api: &mut dyn TestApi, json_type: &str) -> crate::Result<()> { let table = api .create_temp_table(&format!("{}, obj {}", api.autogen_id("id"), json_type)) @@ -2437,7 +2435,7 @@ async fn json_array_begins_with(api: &mut dyn TestApi, json_type: &str) -> crate Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_begins_with_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::Result<()> { json_array_begins_with(api, "jsonb").await?; @@ -2445,7 +2443,7 @@ async fn json_array_begins_with_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::Re Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_begins_with_fun_pg_json(api: &mut dyn TestApi) -> crate::Result<()> { json_array_begins_with(api, "json").await?; @@ -2453,7 +2451,7 @@ async fn json_array_begins_with_fun_pg_json(api: &mut dyn TestApi) -> crate::Res Ok(()) } -#[cfg(all(feature = "json", feature = "mysql"))] +#[cfg(feature = "mysql")] #[test_each_connector(tags("mysql"))] async fn json_array_begins_with_fun(api: &mut dyn TestApi) -> crate::Result<()> { json_array_begins_with(api, "json").await?; @@ -2461,7 +2459,7 @@ async fn json_array_begins_with_fun(api: &mut dyn TestApi) -> crate::Result<()> Ok(()) } -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] async fn json_array_not_begins_with(api: &mut dyn TestApi, json_type: &str) -> crate::Result<()> { let table = api .create_temp_table(&format!("{}, obj {}", api.autogen_id("id"), json_type)) @@ -2496,7 +2494,7 @@ async fn json_array_not_begins_with(api: &mut dyn TestApi, json_type: &str) -> c Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_not_begins_with_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::Result<()> { json_array_not_begins_with(api, "jsonb").await?; @@ -2504,7 +2502,7 @@ async fn json_array_not_begins_with_fun_pg_jsonb(api: &mut dyn TestApi) -> crate Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_not_begins_with_fun_pg_json(api: &mut dyn TestApi) -> crate::Result<()> { json_array_not_begins_with(api, "json").await?; @@ -2512,7 +2510,7 @@ async fn json_array_not_begins_with_fun_pg_json(api: &mut dyn TestApi) -> crate: Ok(()) } -#[cfg(all(feature = "json", feature = "mysql"))] +#[cfg(feature = "mysql")] #[test_each_connector(tags("mysql"))] async fn json_array_not_begins_with_fun(api: &mut dyn TestApi) -> crate::Result<()> { json_array_not_begins_with(api, "json").await?; @@ -2520,7 +2518,7 @@ async fn json_array_not_begins_with_fun(api: &mut dyn TestApi) -> crate::Result< Ok(()) } -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] async fn json_array_ends_into(api: &mut dyn TestApi, json_type: &str) -> crate::Result<()> { let table = api .create_temp_table(&format!("{}, obj {}", api.autogen_id("id"), json_type)) @@ -2589,7 +2587,7 @@ async fn json_array_ends_into(api: &mut dyn TestApi, json_type: &str) -> crate:: Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_ends_into_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::Result<()> { json_array_ends_into(api, "jsonb").await?; @@ -2597,7 +2595,7 @@ async fn json_array_ends_into_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::Resu Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_ends_into_fun_pg_json(api: &mut dyn TestApi) -> crate::Result<()> { json_array_ends_into(api, "json").await?; @@ -2605,7 +2603,7 @@ async fn json_array_ends_into_fun_pg_json(api: &mut dyn TestApi) -> crate::Resul Ok(()) } -#[cfg(all(feature = "json", feature = "mysql"))] +#[cfg(feature = "mysql")] #[test_each_connector(tags("mysql"))] async fn json_array_ends_into_fun(api: &mut dyn TestApi) -> crate::Result<()> { json_array_ends_into(api, "json").await?; @@ -2613,7 +2611,7 @@ async fn json_array_ends_into_fun(api: &mut dyn TestApi) -> crate::Result<()> { Ok(()) } -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] async fn json_array_not_ends_into(api: &mut dyn TestApi, json_type: &str) -> crate::Result<()> { let table = api .create_temp_table(&format!("{}, obj {}", api.autogen_id("id"), json_type)) @@ -2649,7 +2647,7 @@ async fn json_array_not_ends_into(api: &mut dyn TestApi, json_type: &str) -> cra Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_not_ends_into_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::Result<()> { json_array_not_ends_into(api, "jsonb").await?; @@ -2657,7 +2655,7 @@ async fn json_array_not_ends_into_fun_pg_jsonb(api: &mut dyn TestApi) -> crate:: Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_not_ends_into_fun_pg_json(api: &mut dyn TestApi) -> crate::Result<()> { json_array_not_ends_into(api, "json").await?; @@ -2665,7 +2663,7 @@ async fn json_array_not_ends_into_fun_pg_json(api: &mut dyn TestApi) -> crate::R Ok(()) } -#[cfg(all(feature = "json", feature = "mysql"))] +#[cfg(feature = "mysql")] #[test_each_connector(tags("mysql"))] async fn json_array_not_ends_into_fun(api: &mut dyn TestApi) -> crate::Result<()> { json_array_not_ends_into(api, "json").await?; @@ -2673,7 +2671,7 @@ async fn json_array_not_ends_into_fun(api: &mut dyn TestApi) -> crate::Result<() Ok(()) } -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] async fn json_gt_gte_lt_lte(api: &mut dyn TestApi, json_type: &str) -> crate::Result<()> { let table = api .create_temp_table(&format!("{}, json {}", api.autogen_id("id"), json_type)) @@ -2817,7 +2815,7 @@ async fn json_gt_gte_lt_lte(api: &mut dyn TestApi, json_type: &str) -> crate::Re Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_gt_gte_lt_lte_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::Result<()> { json_gt_gte_lt_lte(api, "jsonb").await?; @@ -2825,7 +2823,7 @@ async fn json_gt_gte_lt_lte_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::Result Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_gt_gte_lt_lte_fun_pg_json(api: &mut dyn TestApi) -> crate::Result<()> { json_gt_gte_lt_lte(api, "json").await?; @@ -2833,7 +2831,7 @@ async fn json_gt_gte_lt_lte_fun_pg_json(api: &mut dyn TestApi) -> crate::Result< Ok(()) } -#[cfg(all(feature = "json", feature = "mysql"))] +#[cfg(feature = "mysql")] #[test_each_connector(tags("mysql"))] async fn json_gt_gte_lt_lte_fun(api: &mut dyn TestApi) -> crate::Result<()> { json_gt_gte_lt_lte(api, "json").await?; @@ -3085,7 +3083,6 @@ async fn query_raw_typed_numeric(api: &mut dyn TestApi) -> crate::Result<()> { Ok(()) } -#[cfg(feature = "chrono")] #[test_each_connector(tags("postgresql"))] async fn query_raw_typed_date(api: &mut dyn TestApi) -> crate::Result<()> { use chrono::DateTime; @@ -3117,7 +3114,6 @@ async fn query_raw_typed_date(api: &mut dyn TestApi) -> crate::Result<()> { Ok(()) } -#[cfg(feature = "json")] #[test_each_connector(tags("postgresql"))] async fn query_raw_typed_json(api: &mut dyn TestApi) -> crate::Result<()> { use serde_json::json; @@ -3396,7 +3392,7 @@ async fn any_in_expression(api: &mut dyn TestApi) -> crate::Result<()> { Ok(()) } -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] #[test_each_connector(tags("postgresql", "mysql"))] async fn json_unquote_fun(api: &mut dyn TestApi) -> crate::Result<()> { let json_type = match api.system() { @@ -3434,7 +3430,7 @@ async fn json_unquote_fun(api: &mut dyn TestApi) -> crate::Result<()> { Ok(()) } -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] #[test_each_connector(tags("postgresql", "mysql"))] async fn json_col_equal_json_col(api: &mut dyn TestApi) -> crate::Result<()> { let json_type = match api.system() { diff --git a/quaint/src/tests/query/error.rs b/quaint/src/tests/query/error.rs index 63bfd3ef035..d9884a2c574 100644 --- a/quaint/src/tests/query/error.rs +++ b/quaint/src/tests/query/error.rs @@ -257,7 +257,6 @@ async fn ms_my_foreign_key_constraint_violation(api: &mut dyn TestApi) -> crate: Ok(()) } -#[cfg(feature = "chrono")] #[test_each_connector(tags("mysql"))] async fn garbage_datetime_values(api: &mut dyn TestApi) -> crate::Result<()> { api.conn() diff --git a/quaint/src/tests/types/mssql.rs b/quaint/src/tests/types/mssql.rs index 2f9a125022c..6824562cde5 100644 --- a/quaint/src/tests/types/mssql.rs +++ b/quaint/src/tests/types/mssql.rs @@ -127,7 +127,6 @@ test_type!(image( Value::bytes(b"DEADBEEF".to_vec()), )); -#[cfg(feature = "chrono")] test_type!(date( mssql, "date", @@ -135,7 +134,6 @@ test_type!(date( Value::date(chrono::NaiveDate::from_ymd_opt(2020, 4, 20).unwrap()) )); -#[cfg(feature = "chrono")] test_type!(time( mssql, "time", @@ -143,25 +141,21 @@ test_type!(time( Value::time(chrono::NaiveTime::from_hms_opt(16, 20, 00).unwrap()) )); -#[cfg(feature = "chrono")] test_type!(datetime2(mssql, "datetime2", Value::DateTime(None), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:00Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) })); -#[cfg(feature = "chrono")] test_type!(datetime(mssql, "datetime", Value::DateTime(None), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:22Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) })); -#[cfg(feature = "chrono")] test_type!(datetimeoffset(mssql, "datetimeoffset", Value::DateTime(None), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:22Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) })); -#[cfg(feature = "chrono")] test_type!(smalldatetime(mssql, "smalldatetime", Value::DateTime(None), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:00Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) diff --git a/quaint/src/tests/types/mysql.rs b/quaint/src/tests/types/mysql.rs index 15e2c4f6478..b69d736d4a3 100644 --- a/quaint/src/tests/types/mysql.rs +++ b/quaint/src/tests/types/mysql.rs @@ -208,7 +208,6 @@ test_type!(enum( Value::enum_variant("pollicle_dogs") )); -#[cfg(feature = "json")] test_type!(json( mysql, "json", @@ -216,13 +215,11 @@ test_type!(json( Value::json(serde_json::json!({"this": "is", "a": "json", "number": 2})) )); -#[cfg(feature = "chrono")] test_type!(date(mysql, "date", Value::Date(None), { let dt = chrono::DateTime::parse_from_rfc3339("2020-04-20T00:00:00Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) })); -#[cfg(feature = "chrono")] test_type!(time( mysql, "time", @@ -230,13 +227,11 @@ test_type!(time( Value::time(chrono::NaiveTime::from_hms_opt(16, 20, 00).unwrap()) )); -#[cfg(feature = "chrono")] test_type!(datetime(mysql, "datetime", Value::DateTime(None), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:22Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) })); -#[cfg(feature = "chrono")] test_type!(timestamp(mysql, "timestamp", { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:22Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) diff --git a/quaint/src/tests/types/postgres.rs b/quaint/src/tests/types/postgres.rs index a098fa7a5eb..3ca40c822a7 100644 --- a/quaint/src/tests/types/postgres.rs +++ b/quaint/src/tests/types/postgres.rs @@ -260,7 +260,6 @@ test_type!(inet_array( ]) )); -#[cfg(feature = "json")] test_type!(json( postgresql, "json", @@ -268,7 +267,6 @@ test_type!(json( Value::json(serde_json::json!({"foo": "bar"})) )); -#[cfg(feature = "json")] test_type!(json_array( postgresql, "json[]", @@ -280,7 +278,6 @@ test_type!(json_array( ]) )); -#[cfg(feature = "json")] test_type!(jsonb( postgresql, "jsonb", @@ -288,7 +285,6 @@ test_type!(jsonb( Value::json(serde_json::json!({"foo": "bar"})) )); -#[cfg(feature = "json")] test_type!(jsonb_array( postgresql, "jsonb[]", @@ -332,7 +328,6 @@ test_type!(uuid_array( ]) )); -#[cfg(feature = "chrono")] test_type!(date( postgresql, "date", @@ -340,7 +335,6 @@ test_type!(date( Value::date(chrono::NaiveDate::from_ymd_opt(2020, 4, 20).unwrap()) )); -#[cfg(feature = "chrono")] test_type!(date_array( postgresql, "date[]", @@ -351,7 +345,6 @@ test_type!(date_array( ]) )); -#[cfg(feature = "chrono")] test_type!(time( postgresql, "time", @@ -359,7 +352,6 @@ test_type!(time( Value::time(chrono::NaiveTime::from_hms_opt(16, 20, 00).unwrap()) )); -#[cfg(feature = "chrono")] test_type!(time_array( postgresql, "time[]", @@ -370,13 +362,11 @@ test_type!(time_array( ]) )); -#[cfg(feature = "chrono")] test_type!(timestamp(postgresql, "timestamp", Value::DateTime(None), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:22Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) })); -#[cfg(feature = "chrono")] test_type!(timestamp_array(postgresql, "timestamp[]", Value::Array(None), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:22Z").unwrap(); @@ -386,13 +376,11 @@ test_type!(timestamp_array(postgresql, "timestamp[]", Value::Array(None), { ]) })); -#[cfg(feature = "chrono")] test_type!(timestamptz(postgresql, "timestamptz", Value::DateTime(None), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:22Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) })); -#[cfg(feature = "chrono")] test_type!(timestamptz_array(postgresql, "timestamptz[]", Value::Array(None), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:22Z").unwrap(); diff --git a/quaint/src/tests/types/sqlite.rs b/quaint/src/tests/types/sqlite.rs index 39aca6de2d5..80ab4bb5b8f 100644 --- a/quaint/src/tests/types/sqlite.rs +++ b/quaint/src/tests/types/sqlite.rs @@ -1,9 +1,7 @@ #![allow(clippy::approx_constant)] use crate::tests::test_api::sqlite_test_api; -#[cfg(feature = "chrono")] use crate::tests::test_api::TestApi; -#[cfg(feature = "chrono")] use crate::{ast::*, connector::Queryable}; #[cfg(feature = "bigdecimal")] use std::str::FromStr; @@ -78,7 +76,6 @@ test_type!(boolean( Value::boolean(false) )); -#[cfg(feature = "chrono")] test_type!(date( sqlite, "DATE", @@ -86,7 +83,6 @@ test_type!(date( Value::date(chrono::NaiveDate::from_ymd_opt(1984, 1, 1).unwrap()) )); -#[cfg(feature = "chrono")] test_type!(datetime( sqlite, "DATETIME", @@ -94,7 +90,6 @@ test_type!(datetime( Value::datetime(chrono::DateTime::from_str("2020-07-29T09:23:44.458Z").unwrap()) )); -#[cfg(feature = "chrono")] #[test_macros::test_each_connector(tags("sqlite"))] async fn test_type_text_datetime_rfc3339(api: &mut dyn TestApi) -> crate::Result<()> { let table = api.create_type_table("DATETIME").await?; @@ -115,7 +110,6 @@ async fn test_type_text_datetime_rfc3339(api: &mut dyn TestApi) -> crate::Result Ok(()) } -#[cfg(feature = "chrono")] #[test_macros::test_each_connector(tags("sqlite"))] async fn test_type_text_datetime_rfc2822(api: &mut dyn TestApi) -> crate::Result<()> { let table = api.create_type_table("DATETIME").await?; @@ -138,7 +132,6 @@ async fn test_type_text_datetime_rfc2822(api: &mut dyn TestApi) -> crate::Result Ok(()) } -#[cfg(feature = "chrono")] #[test_macros::test_each_connector(tags("sqlite"))] async fn test_type_text_datetime_custom(api: &mut dyn TestApi) -> crate::Result<()> { let table = api.create_type_table("DATETIME").await?; diff --git a/quaint/src/visitor.rs b/quaint/src/visitor.rs index 9cb2cbc16b1..506ee368e2e 100644 --- a/quaint/src/visitor.rs +++ b/quaint/src/visitor.rs @@ -121,22 +121,22 @@ pub trait Visitor<'a> { /// Visit a non-parameterized value. fn visit_raw_value(&mut self, value: Value<'a>) -> Result; - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract(&mut self, json_extract: JsonExtract<'a>) -> Result; - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract_last_array_item(&mut self, extract: JsonExtractLastArrayElem<'a>) -> Result; - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract_first_array_item(&mut self, extract: JsonExtractFirstArrayElem<'a>) -> Result; - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_array_contains(&mut self, left: Expression<'a>, right: Expression<'a>, not: bool) -> Result; - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_type_equals(&mut self, left: Expression<'a>, right: JsonType<'a>, not: bool) -> Result; - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_unquote(&mut self, json_unquote: JsonUnquote<'a>) -> Result; #[cfg(feature = "gis")] @@ -976,7 +976,7 @@ pub trait Visitor<'a> { self.write(" ")?; self.visit_expression(*right) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] Compare::JsonCompare(json_compare) => match json_compare { JsonCompare::ArrayContains(left, right) => self.visit_json_array_contains(*left, *right, false), JsonCompare::ArrayNotContains(left, right) => self.visit_json_array_contains(*left, *right, true), @@ -1075,7 +1075,7 @@ pub trait Visitor<'a> { FunctionType::AggregateToString(agg) => { self.visit_aggregate_to_string(agg.value.as_ref().clone())?; } - #[cfg(all(feature = "json", feature = "postgresql"))] + #[cfg(feature = "postgresql")] FunctionType::RowToJson(row_to_json) => { self.write("ROW_TO_JSON")?; self.surround_with("(", ")", |ref mut s| s.visit_table(row_to_json.expr, false))? @@ -1107,19 +1107,19 @@ pub trait Visitor<'a> { self.write("COALESCE")?; self.surround_with("(", ")", |s| s.visit_columns(coalesce.exprs))?; } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] FunctionType::JsonExtract(json_extract) => { self.visit_json_extract(json_extract)?; } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] FunctionType::JsonExtractFirstArrayElem(extract) => { self.visit_json_extract_first_array_item(extract)?; } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] FunctionType::JsonExtractLastArrayElem(extract) => { self.visit_json_extract_last_array_item(extract)?; } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] FunctionType::JsonUnquote(unquote) => { self.visit_json_unquote(unquote)?; } diff --git a/quaint/src/visitor/mssql.rs b/quaint/src/visitor/mssql.rs index f2239cc4e8c..200731e82c6 100644 --- a/quaint/src/visitor/mssql.rs +++ b/quaint/src/visitor/mssql.rs @@ -1,5 +1,5 @@ use super::Visitor; -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] use crate::prelude::{JsonExtract, JsonType, JsonUnquote}; use crate::{ ast::*, @@ -363,7 +363,6 @@ impl<'a> Visitor<'a> for Mssql<'a> { Value::Geometry(g) => g.map(|g| self.visit_function(geom_from_text(g.wkt.raw(), g.srid.raw(), false))), #[cfg(feature = "gis")] Value::Geography(g) => g.map(|g| self.visit_function(geom_from_text(g.wkt.raw(), g.srid.raw(), true))), - #[cfg(feature = "json")] Value::Json(j) => j.map(|j| self.write(format!("'{}'", serde_json::to_string(&j).unwrap()))), #[cfg(feature = "bigdecimal")] Value::Numeric(r) => r.map(|r| self.write(r)), @@ -372,17 +371,14 @@ impl<'a> Visitor<'a> for Mssql<'a> { let s = format!("CONVERT(uniqueidentifier, N'{}')", uuid.hyphenated()); self.write(s) }), - #[cfg(feature = "chrono")] Value::DateTime(dt) => dt.map(|dt| { let s = format!("CONVERT(datetimeoffset, N'{}')", dt.to_rfc3339()); self.write(s) }), - #[cfg(feature = "chrono")] Value::Date(date) => date.map(|date| { let s = format!("CONVERT(date, N'{date}')"); self.write(s) }), - #[cfg(feature = "chrono")] Value::Time(time) => time.map(|time| { let s = format!("CONVERT(time, N'{time}')"); self.write(s) @@ -713,12 +709,12 @@ impl<'a> Visitor<'a> for Mssql<'a> { self.write(if not { " = 0" } else { " = 1" }) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract(&mut self, _json_extract: JsonExtract<'a>) -> visitor::Result { unimplemented!("JSON filtering is not yet supported on MSSQL") } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_array_contains( &mut self, _left: Expression<'a>, @@ -728,12 +724,12 @@ impl<'a> Visitor<'a> for Mssql<'a> { unimplemented!("JSON filtering is not yet supported on MSSQL") } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_type_equals(&mut self, _left: Expression<'a>, _json_type: JsonType, _not: bool) -> visitor::Result { unimplemented!("JSON_TYPE is not yet supported on MSSQL") } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_unquote(&mut self, _json_unquote: JsonUnquote<'a>) -> visitor::Result { unimplemented!("JSON filtering is not yet supported on MSSQL") } @@ -761,7 +757,7 @@ impl<'a> Visitor<'a> for Mssql<'a> { unimplemented!("Full-text search is not yet supported on MSSQL") } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract_last_array_item( &mut self, _extract: crate::prelude::JsonExtractLastArrayElem<'a>, @@ -769,7 +765,7 @@ impl<'a> Visitor<'a> for Mssql<'a> { unimplemented!("JSON filtering is not yet supported on MSSQL") } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract_first_array_item( &mut self, _extract: crate::prelude::JsonExtractFirstArrayElem<'a>, @@ -1357,7 +1353,7 @@ mod tests { } #[test] - #[cfg(feature = "json")] + fn test_raw_json() { let (sql, params) = Mssql::build(Select::default().value(serde_json::json!({ "foo": "bar" }).raw())).unwrap(); assert_eq!("SELECT '{\"foo\":\"bar\"}'", sql); @@ -1379,7 +1375,6 @@ mod tests { } #[test] - #[cfg(feature = "chrono")] fn test_raw_datetime() { let dt = chrono::Utc::now(); let (sql, params) = Mssql::build(Select::default().value(dt.raw())).unwrap(); diff --git a/quaint/src/visitor/mysql.rs b/quaint/src/visitor/mysql.rs index 2382ad84005..fb121d19ad9 100644 --- a/quaint/src/visitor/mysql.rs +++ b/quaint/src/visitor/mysql.rs @@ -34,7 +34,6 @@ impl<'a> Mysql<'a> { } fn visit_numeric_comparison(&mut self, left: Expression<'a>, right: Expression<'a>, sign: &str) -> visitor::Result { - #[cfg(feature = "json")] fn json_to_quaint_value<'a>(json: serde_json::Value) -> crate::Result> { match json { serde_json::Value::String(str) => Ok(Value::text(str)), @@ -61,7 +60,6 @@ impl<'a> Mysql<'a> { } match (left, right) { - #[cfg(feature = "json")] (left, right) if left.is_json_value() && right.is_fun_retuning_json() => { let quaint_value = json_to_quaint_value(left.into_json_value().unwrap())?; @@ -69,7 +67,7 @@ impl<'a> Mysql<'a> { self.write(format!(" {sign} "))?; self.visit_expression(right)?; } - #[cfg(feature = "json")] + (left, right) if left.is_fun_retuning_json() && right.is_json_value() => { let quaint_value = json_to_quaint_value(right.into_json_value().unwrap())?; @@ -153,7 +151,7 @@ impl<'a> Visitor<'a> for Mysql<'a> { } #[cfg(feature = "bigdecimal")] Value::Numeric(r) => r.map(|r| self.write(r)), - #[cfg(feature = "json")] + Value::Json(j) => match j { Some(ref j) => { let s = serde_json::to_string(&j)?; @@ -167,11 +165,8 @@ impl<'a> Visitor<'a> for Mysql<'a> { Value::Geography(g) => g.map(|g| self.visit_function(geom_from_text(g.wkt.raw(), g.srid.raw(), true))), #[cfg(feature = "uuid")] Value::Uuid(uuid) => uuid.map(|uuid| self.write(format!("'{}'", uuid.hyphenated()))), - #[cfg(feature = "chrono")] Value::DateTime(dt) => dt.map(|dt| self.write(format!("'{}'", dt.to_rfc3339(),))), - #[cfg(feature = "chrono")] Value::Date(date) => date.map(|date| self.write(format!("'{date}'"))), - #[cfg(feature = "chrono")] Value::Time(time) => time.map(|time| self.write(format!("'{time}'"))), Value::Xml(cow) => cow.map(|cow| self.write(format!("'{cow}'"))), }; @@ -330,7 +325,6 @@ impl<'a> Visitor<'a> for Mysql<'a> { } fn visit_equals(&mut self, left: Expression<'a>, right: Expression<'a>) -> visitor::Result { - #[cfg(feature = "json")] { if right.is_json_expr() || left.is_json_expr() { self.surround_with("(", ")", |ref mut s| { @@ -354,15 +348,9 @@ impl<'a> Visitor<'a> for Mysql<'a> { self.visit_regular_equality_comparison(left, right) } } - - #[cfg(not(feature = "json"))] - { - self.visit_regular_equality_comparison(left, right) - } } fn visit_not_equals(&mut self, left: Expression<'a>, right: Expression<'a>) -> visitor::Result { - #[cfg(feature = "json")] { if right.is_json_expr() || left.is_json_expr() { self.surround_with("(", ")", |ref mut s| { @@ -386,14 +374,9 @@ impl<'a> Visitor<'a> for Mysql<'a> { self.visit_regular_difference_comparison(left, right) } } - - #[cfg(not(feature = "json"))] - { - self.visit_regular_difference_comparison(left, right) - } } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract(&mut self, json_extract: JsonExtract<'a>) -> visitor::Result { if json_extract.extract_as_string { self.write("JSON_UNQUOTE(")?; @@ -418,7 +401,7 @@ impl<'a> Visitor<'a> for Mysql<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_array_contains(&mut self, left: Expression<'a>, right: Expression<'a>, not: bool) -> visitor::Result { self.write("JSON_CONTAINS(")?; self.visit_expression(left)?; @@ -433,7 +416,7 @@ impl<'a> Visitor<'a> for Mysql<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_type_equals(&mut self, left: Expression<'a>, json_type: JsonType<'a>, not: bool) -> visitor::Result { self.write("(")?; self.write("JSON_TYPE")?; @@ -572,7 +555,7 @@ impl<'a> Visitor<'a> for Mysql<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract_last_array_item(&mut self, extract: JsonExtractLastArrayElem<'a>) -> visitor::Result { self.write("JSON_EXTRACT(")?; self.visit_expression(*extract.expr.clone())?; @@ -585,7 +568,7 @@ impl<'a> Visitor<'a> for Mysql<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract_first_array_item(&mut self, extract: JsonExtractFirstArrayElem<'a>) -> visitor::Result { self.write("JSON_EXTRACT(")?; self.visit_expression(*extract.expr)?; @@ -596,7 +579,7 @@ impl<'a> Visitor<'a> for Mysql<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_unquote(&mut self, json_unquote: JsonUnquote<'a>) -> visitor::Result { self.write("JSON_UNQUOTE(")?; self.visit_expression(*json_unquote.expr)?; @@ -787,7 +770,6 @@ mod tests { ); } - #[cfg(feature = "json")] #[test] fn equality_with_a_json_value() { let expected = expected_values( @@ -802,7 +784,6 @@ mod tests { assert_eq!(expected.1, params); } - #[cfg(feature = "json")] #[test] fn difference_with_a_json_value() { let expected = expected_values( @@ -941,7 +922,7 @@ mod tests { } #[test] - #[cfg(feature = "json")] + fn test_raw_json() { let (sql, params) = Mysql::build(Select::default().value(serde_json::json!({ "foo": "bar" }).raw())).unwrap(); assert_eq!("SELECT CONVERT('{\"foo\":\"bar\"}', JSON)", sql); @@ -960,7 +941,6 @@ mod tests { } #[test] - #[cfg(feature = "chrono")] fn test_raw_datetime() { let dt = chrono::Utc::now(); let (sql, params) = Mysql::build(Select::default().value(dt.raw())).unwrap(); @@ -997,7 +977,7 @@ mod tests { } #[test] - #[cfg(feature = "json")] + fn test_json_negation() { let conditions = ConditionTree::not("json".equals(Value::Json(Some(serde_json::Value::Null)))); let (sql, _) = Mysql::build(Select::from_table("test").so_that(conditions)).unwrap(); @@ -1009,7 +989,7 @@ mod tests { } #[test] - #[cfg(feature = "json")] + fn test_json_not_negation() { let conditions = ConditionTree::not("json".not_equals(Value::Json(Some(serde_json::Value::Null)))); let (sql, _) = Mysql::build(Select::from_table("test").so_that(conditions)).unwrap(); diff --git a/quaint/src/visitor/postgres.rs b/quaint/src/visitor/postgres.rs index 1c9eb3514a5..6c5e2954295 100644 --- a/quaint/src/visitor/postgres.rs +++ b/quaint/src/visitor/postgres.rs @@ -105,7 +105,7 @@ impl<'a> Visitor<'a> for Postgres<'a> { Ok(()) }) }), - #[cfg(feature = "json")] + Value::Json(j) => j.map(|j| self.write(format!("'{}'", serde_json::to_string(&j).unwrap()))), #[cfg(feature = "gis")] Value::Geometry(g) => g.map(|g| self.visit_function(geom_from_text(g.wkt.raw(), g.srid.raw(), false))), @@ -115,11 +115,8 @@ impl<'a> Visitor<'a> for Postgres<'a> { Value::Numeric(r) => r.map(|r| self.write(r)), #[cfg(feature = "uuid")] Value::Uuid(uuid) => uuid.map(|uuid| self.write(format!("'{}'", uuid.hyphenated()))), - #[cfg(feature = "chrono")] Value::DateTime(dt) => dt.map(|dt| self.write(format!("'{}'", dt.to_rfc3339(),))), - #[cfg(feature = "chrono")] Value::Date(date) => date.map(|date| self.write(format!("'{date}'"))), - #[cfg(feature = "chrono")] Value::Time(time) => time.map(|time| self.write(format!("'{time}'"))), }; @@ -233,14 +230,12 @@ impl<'a> Visitor<'a> for Postgres<'a> { fn visit_equals(&mut self, left: Expression<'a>, right: Expression<'a>) -> visitor::Result { // LHS must be cast to json/xml-text if the right is a json/xml-text value and vice versa. let right_cast = match left { - #[cfg(feature = "json")] _ if left.is_json_value() => "::jsonb", _ if left.is_xml_value() => "::text", _ => "", }; let left_cast = match right { - #[cfg(feature = "json")] _ if right.is_json_value() => "::jsonb", _ if right.is_xml_value() => "::text", _ => "", @@ -258,14 +253,12 @@ impl<'a> Visitor<'a> for Postgres<'a> { fn visit_not_equals(&mut self, left: Expression<'a>, right: Expression<'a>) -> visitor::Result { // LHS must be cast to json/xml-text if the right is a json/xml-text value and vice versa. let right_cast = match left { - #[cfg(feature = "json")] _ if left.is_json_value() => "::jsonb", _ if left.is_xml_value() => "::text", _ => "", }; let left_cast = match right { - #[cfg(feature = "json")] _ if right.is_json_value() => "::jsonb", _ if right.is_xml_value() => "::text", _ => "", @@ -287,7 +280,7 @@ impl<'a> Visitor<'a> for Postgres<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract(&mut self, json_extract: JsonExtract<'a>) -> visitor::Result { match json_extract.path { #[cfg(feature = "mysql")] @@ -327,7 +320,7 @@ impl<'a> Visitor<'a> for Postgres<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_unquote(&mut self, json_unquote: JsonUnquote<'a>) -> visitor::Result { self.write("(")?; self.visit_expression(*json_unquote.expr)?; @@ -337,7 +330,7 @@ impl<'a> Visitor<'a> for Postgres<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_array_contains(&mut self, left: Expression<'a>, right: Expression<'a>, not: bool) -> visitor::Result { if not { self.write("( NOT ")?; @@ -354,7 +347,7 @@ impl<'a> Visitor<'a> for Postgres<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract_last_array_item(&mut self, extract: JsonExtractLastArrayElem<'a>) -> visitor::Result { self.write("(")?; self.visit_expression(*extract.expr)?; @@ -364,7 +357,7 @@ impl<'a> Visitor<'a> for Postgres<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract_first_array_item(&mut self, extract: JsonExtractFirstArrayElem<'a>) -> visitor::Result { self.write("(")?; self.visit_expression(*extract.expr)?; @@ -374,7 +367,7 @@ impl<'a> Visitor<'a> for Postgres<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_type_equals(&mut self, left: Expression<'a>, json_type: JsonType<'a>, not: bool) -> visitor::Result { self.write("JSONB_TYPEOF")?; self.write("(")?; @@ -775,7 +768,6 @@ mod tests { assert_eq!(expected_sql, sql); } - #[cfg(feature = "json")] #[test] fn equality_with_a_json_value() { let expected = expected_values( @@ -790,7 +782,6 @@ mod tests { assert_eq!(expected.1, params); } - #[cfg(feature = "json")] #[test] fn equality_with_a_lhs_json_value() { // A bit artificial, but checks if the ::jsonb casting is done correctly on the right side as well. @@ -807,7 +798,6 @@ mod tests { assert_eq!(expected.1, params); } - #[cfg(feature = "json")] #[test] fn difference_with_a_json_value() { let expected = expected_values( @@ -823,7 +813,6 @@ mod tests { assert_eq!(expected.1, params); } - #[cfg(feature = "json")] #[test] fn difference_with_a_lhs_json_value() { let expected = expected_values( @@ -953,7 +942,7 @@ mod tests { } #[test] - #[cfg(feature = "json")] + fn test_raw_json() { let (sql, params) = Postgres::build(Select::default().value(serde_json::json!({ "foo": "bar" }).raw())).unwrap(); @@ -973,7 +962,6 @@ mod tests { } #[test] - #[cfg(feature = "chrono")] fn test_raw_datetime() { let dt = chrono::Utc::now(); let (sql, params) = Postgres::build(Select::default().value(dt.raw())).unwrap(); diff --git a/quaint/src/visitor/sqlite.rs b/quaint/src/visitor/sqlite.rs index 4e1231e2537..80f3196b73b 100644 --- a/quaint/src/visitor/sqlite.rs +++ b/quaint/src/visitor/sqlite.rs @@ -103,7 +103,7 @@ impl<'a> Visitor<'a> for Sqlite<'a> { return Err(builder.build()); } - #[cfg(feature = "json")] + Value::Json(j) => match j { Some(ref j) => { let s = serde_json::to_string(j)?; @@ -115,11 +115,8 @@ impl<'a> Visitor<'a> for Sqlite<'a> { Value::Numeric(r) => r.map(|r| self.write(r)), #[cfg(feature = "uuid")] Value::Uuid(uuid) => uuid.map(|uuid| self.write(format!("'{}'", uuid.hyphenated()))), - #[cfg(feature = "chrono")] Value::DateTime(dt) => dt.map(|dt| self.write(format!("'{}'", dt.to_rfc3339(),))), - #[cfg(feature = "chrono")] Value::Date(date) => date.map(|date| self.write(format!("'{date}'"))), - #[cfg(feature = "chrono")] Value::Time(time) => time.map(|time| self.write(format!("'{time}'"))), Value::Xml(cow) => cow.map(|cow| self.write(format!("'{cow}'"))), #[cfg(feature = "gis")] @@ -308,12 +305,12 @@ impl<'a> Visitor<'a> for Sqlite<'a> { } } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract(&mut self, _json_extract: JsonExtract<'a>) -> visitor::Result { unimplemented!("JSON filtering is not yet supported on SQLite") } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_array_contains( &mut self, _left: Expression<'a>, @@ -323,7 +320,7 @@ impl<'a> Visitor<'a> for Sqlite<'a> { unimplemented!("JSON filtering is not yet supported on SQLite") } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_type_equals(&mut self, _left: Expression<'a>, _json_type: JsonType, _not: bool) -> visitor::Result { unimplemented!("JSON_TYPE is not yet supported on SQLite") } @@ -348,17 +345,17 @@ impl<'a> Visitor<'a> for Sqlite<'a> { unimplemented!("Full-text search is not yet supported on SQLite") } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract_last_array_item(&mut self, _extract: JsonExtractLastArrayElem<'a>) -> visitor::Result { unimplemented!("JSON filtering is not yet supported on SQLite") } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract_first_array_item(&mut self, _extract: JsonExtractFirstArrayElem<'a>) -> visitor::Result { unimplemented!("JSON filtering is not yet supported on SQLite") } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_unquote(&mut self, _json_unquote: JsonUnquote<'a>) -> visitor::Result { unimplemented!("JSON filtering is not yet supported on SQLite") } @@ -961,7 +958,7 @@ mod tests { } #[test] - #[cfg(feature = "json")] + fn test_raw_json() { let (sql, params) = Sqlite::build(Select::default().value(serde_json::json!({ "foo": "bar" }).raw())).unwrap(); assert_eq!("SELECT '{\"foo\":\"bar\"}'", sql); @@ -980,7 +977,6 @@ mod tests { } #[test] - #[cfg(feature = "chrono")] fn test_raw_datetime() { let dt = chrono::Utc::now(); let (sql, params) = Sqlite::build(Select::default().value(dt.raw())).unwrap(); diff --git a/query-engine/black-box-tests/Cargo.toml b/query-engine/black-box-tests/Cargo.toml index 8bea1998d7c..056ee2bcdb4 100644 --- a/query-engine/black-box-tests/Cargo.toml +++ b/query-engine/black-box-tests/Cargo.toml @@ -14,3 +14,4 @@ tokio.workspace = true user-facing-errors.workspace = true insta = "1.7.1" enumflags2 = "0.7" +query-engine-metrics = {path = "../metrics"} diff --git a/query-engine/black-box-tests/tests/black_box_tests.rs b/query-engine/black-box-tests/tests/black_box_tests.rs index 6c2028e1fe0..d3e6c7065b4 100644 --- a/query-engine/black-box-tests/tests/black_box_tests.rs +++ b/query-engine/black-box-tests/tests/black_box_tests.rs @@ -4,3 +4,5 @@ mod helpers; mod metrics; mod protocols; + +use query_engine_metrics; diff --git a/query-engine/black-box-tests/tests/metrics/smoke_tests.rs b/query-engine/black-box-tests/tests/metrics/smoke_tests.rs index 6b81dd2e3e8..8542f753b78 100644 --- a/query-engine/black-box-tests/tests/metrics/smoke_tests.rs +++ b/query-engine/black-box-tests/tests/metrics/smoke_tests.rs @@ -15,6 +15,7 @@ mod smoke_tests { } #[connector_test] + #[rustfmt::skip] async fn expected_metrics_rendered(r: Runner) -> TestResult<()> { let mut qe_cmd = query_engine_cmd(r.prisma_dml(), "57582"); qe_cmd.arg("--enable-metrics"); @@ -54,23 +55,57 @@ mod smoke_tests { .text() .await .unwrap(); + + // I would have loved to use insta in here and check the snapshot but the order of the metrics is not guaranteed + // And I opted for the manual checking of invariant data that provided enough confidence instead // counters - assert!(metrics.contains("prisma_client_queries_total counter")); - assert!(metrics.contains("prisma_datasource_queries_total counter")); - assert!(metrics.contains("prisma_pool_connections_opened_total counter")); - assert!(metrics.contains("prisma_pool_connections_closed_total counter")); + assert_eq!(metrics.matches("# HELP prisma_client_queries_total The total number of Prisma Client queries executed").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_client_queries_total counter").count(), 1); + + assert_eq!(metrics.matches("# HELP prisma_datasource_queries_total The total number of datasource queries executed").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_datasource_queries_total counter").count(), 1); + + assert_eq!(metrics.matches("# HELP prisma_pool_connections_closed_total The total number of pool connections closed").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_pool_connections_closed_total counter").count(), 1); + + assert_eq!(metrics.matches("# HELP prisma_pool_connections_opened_total The total number of pool connections opened").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_pool_connections_opened_total counter").count(), 1); + // gauges - assert!(metrics.contains("prisma_pool_connections_open gauge")); - assert!(metrics.contains("prisma_pool_connections_busy gauge")); - assert!(metrics.contains("prisma_pool_connections_idle gauge")); - assert!(metrics.contains("prisma_client_queries_active gauge")); - assert!(metrics.contains("prisma_client_queries_wait gauge")); + assert_eq!(metrics.matches("# HELP prisma_client_queries_active The number of currently active Prisma Client queries").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_client_queries_active gauge").count(), 1); + + assert_eq!(metrics.matches("# HELP prisma_client_queries_wait The number of datasource queries currently waiting for an free connection").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_client_queries_wait gauge").count(), 1); + + assert_eq!(metrics.matches("# HELP prisma_pool_connections_busy The number of pool connections currently executing datasource queries").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_pool_connections_busy gauge").count(), 1); + + assert_eq!(metrics.matches("# HELP prisma_pool_connections_idle The number of pool connections that are not busy running a query").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_pool_connections_idle gauge").count(), 1); + + assert_eq!(metrics.matches("# HELP prisma_pool_connections_open The number of pool connections currently open").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_pool_connections_open gauge").count(), 1); + // histograms - assert!(metrics.contains("prisma_client_queries_duration_histogram_ms histogram")); - assert!(metrics.contains("prisma_client_queries_wait_histogram_ms histogram")); - assert!(metrics.contains("prisma_datasource_queries_duration_histogram_ms histogram")); - }) - .await + assert_eq!(metrics.matches("# HELP prisma_client_queries_duration_histogram_ms The distribution of the time Prisma Client queries took to run end to end").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_client_queries_duration_histogram_ms histogram").count(), 1); + + assert_eq!(metrics.matches("# HELP prisma_client_queries_wait_histogram_ms The distribution of the time all datasource queries spent waiting for a free connection").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_client_queries_wait_histogram_ms histogram").count(), 1); + + assert_eq!(metrics.matches("# HELP prisma_datasource_queries_duration_histogram_ms The distribution of the time datasource queries took to run").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_datasource_queries_duration_histogram_ms histogram").count(), 1); + + // Check that exist as many metrics as being accepted + let accepted_metric_count = query_engine_metrics::ACCEPT_LIST.len(); + let displayed_metric_count = metrics.matches("# TYPE").count(); + let non_prisma_metric_count = displayed_metric_count - metrics.matches("# TYPE prisma").count(); + + assert_eq!(displayed_metric_count, accepted_metric_count); + assert_eq!(non_prisma_metric_count, 0); + + }).await } } diff --git a/query-engine/connector-test-kit-rs/README.md b/query-engine/connector-test-kit-rs/README.md index e6821193a62..2c849a2aa98 100644 --- a/query-engine/connector-test-kit-rs/README.md +++ b/query-engine/connector-test-kit-rs/README.md @@ -1,6 +1,5 @@ # Query Engine Test Kit - A Full Guide -The test kit is a (currently incomplete) port of the Scala test kit, located in `../connector-test-kit`. -It's fully focused on integration testing the query engine through request-response assertions. +The test kit is focused on integration testing the query engine through request-response assertions. ## Test organization @@ -35,8 +34,10 @@ Contains the main bulk of logic to make tests run, which is mostly invisible to Tests are executed in the context of *one* _connector_ (with version) and _runner_. Some tests may only be specified to run for a subset of connectors or versions, in which case they will be skipped. Testing all connectors at once is not supported, however, for example, CI will run all the different connectors and versions concurrently in separate runs. ### Configuration + Tests must be configured to run There's a set of env vars that is always useful to have and an optional one. Always useful to have: + ```shell export WORKSPACE_ROOT=/path/to/engines/repository/root ``` @@ -54,6 +55,7 @@ As previously stated, the above can be omitted in favor of the `.test_config` co "version": "10" } ``` + The config file must be either in the current working folder from which you invoke a test run or in `$WORKSPACE_ROOT`. It's recommended to use the file-based config as it's easier to switch between providers with an open IDE (reloading env vars would usually require reloading the IDE). The workspace root makefile contains a series of convenience commands to setup different connector test configs, e.g. `make dev-postgres10` sets up the correct test config file for the tests to pick up. @@ -62,7 +64,27 @@ On the note of docker containers: Most connectors require an endpoint to run aga If you choose to set up the databases yourself, please note that the connection strings used in the tests (found in the files in `/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/`) to set up user, password and database for the test user. +#### Running tests through driver adapters + +The query engine is able to delegate query execution to javascript through [driver adapters](query-engine/driver-adapters/js/README.md). +This means that instead of drivers being implemented in Rust, it's a layer of adapters over NodeJs drivers the code that actually communicates with the databases. + +To run tests through a driver adapters, you should also configure the following environment variables: + +* `EXTERNAL_TEST_EXECUTOR`: tells the query engine test kit to use an external process to run the queries, this is a node process running a program that will read the queries to run from STDIN, and return responses to STDOUT. The connector kit follows a protocol over JSON RPC for this communication. +* `DRIVER_ADAPTER`: tells the test executor to use a particular driver adapter. Set to `neon`, `planetscale` or any other supported adapter. +* `DRIVER_ADAPTER_CONFIG`: a json string with the configuration for the driver adapter. This is adapter specific. See the [github workflow for driver adapter tests](.github/workflows/query-engine-driver-adapters.yml) for examples on how to configure the driver adapters. + +Example: + +```shell +export EXTERNAL_TEST_EXECUTOR="$WORKSPACE_ROOT/query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh" +export DRIVER_ADAPTER=neon +export DRIVER_ADAPTER_CONFIG ='{ "proxyUrl": "127.0.0.1:5488/v1" }' +```` + ### Running + Note that by default tests run concurrently. - VSCode should automatically detect tests and display `run test`. diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs index 8a20f0e4caa..a7097922b4d 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs @@ -9,7 +9,7 @@ mod metrics { use query_engine_tests::*; use serde_json::Value; - #[connector_test] + #[connector_test(exclude(Js))] async fn metrics_are_recorded(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneTestModel(data: { id: 1 }) { id }}"#), @@ -40,7 +40,7 @@ mod metrics { Ok(()) } - #[connector_test] + #[connector_test(exclude(Js))] async fn metrics_tx_do_not_go_negative(mut runner: Runner) -> TestResult<()> { let tx_id = runner.start_tx(5000, 5000, None).await?; runner.set_active_tx(tx_id.clone()); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batch/transactional_batch.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batch/transactional_batch.rs index a3a7c7d34a9..8c6e24db67e 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batch/transactional_batch.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batch/transactional_batch.rs @@ -72,7 +72,7 @@ mod transactional { let batch_results = runner.batch(queries, true, None).await?; let batch_request_idx = batch_results.errors().get(0).unwrap().batch_request_idx(); - assert_eq!(batch_request_idx, Some(1usize)); + assert_eq!(batch_request_idx, Some(1)); Ok(()) } diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create.rs index 2eb06977fc9..be5d80480de 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create.rs @@ -353,21 +353,6 @@ mod json_create { #[connector_test(capabilities(AdvancedJsonNullability))] async fn create_json_errors(runner: Runner) -> TestResult<()> { - // On the JSON protocol, this succeeds because `null` is serialized as JSON. - // It doesn't matter since the client does _not_ allow to send null values, but only DbNull or JsonNull. - if runner.protocol().is_graphql() { - assert_error!( - &runner, - r#"mutation { - createOneTestModel(data: { id: 1, json: null }) { - json - } - }"#, - 2009, - "A value is required but not set" - ); - } - assert_error!( &runner, r#"mutation { diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create_many.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create_many.rs index f82e217bb67..35a044b1473 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create_many.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create_many.rs @@ -360,23 +360,6 @@ mod json_create_many { #[connector_test(capabilities(AdvancedJsonNullability))] async fn create_many_json_errors(runner: Runner) -> TestResult<()> { - // On the JSON protocol, this succeeds because `null` is serialized as JSON. - // It doesn't matter since the client does _not_ allow to send null values, but only DbNull or JsonNull. - if runner.protocol().is_graphql() { - assert_error!( - &runner, - r#"mutation { - createManyTestModel(data: [ - { id: 1, json: null }, - ]) { - count - } - }"#, - 2009, - "A value is required but not set" - ); - } - assert_error!( &runner, r#"mutation { diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update.rs index c2ed7f92cb4..4fe0726a3cc 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update.rs @@ -764,21 +764,6 @@ mod json_update { #[connector_test(capabilities(AdvancedJsonNullability))] async fn update_json_errors(runner: Runner) -> TestResult<()> { - // On the JSON protocol, this succeeds because `null` is serialized as JSON. - // It doesn't matter since the client does _not_ allow to send null values, but only DbNull or JsonNull. - if runner.protocol().is_graphql() { - assert_error!( - &runner, - r#"mutation { - updateOneTestModel(where: { id: 1 }, data: { json: null }) { - json - } - }"#, - 2009, - "A value is required but not set" - ); - } - assert_error!( &runner, r#"mutation { diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update_many.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update_many.rs index fd0068761a5..7e969e21cdc 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update_many.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update_many.rs @@ -386,21 +386,6 @@ mod json_update_many { #[connector_test(capabilities(AdvancedJsonNullability))] async fn update_json_errors(runner: Runner) -> TestResult<()> { - // On the JSON protocol, this succeeds because `null` is serialized as JSON. - // It doesn't matter since the client does _not_ allow to send null values, but only DbNull or JsonNull. - if runner.protocol().is_graphql() { - assert_error!( - &runner, - r#"mutation { - updateManyTestModel(where: { id: 1 }, data: { json: null }) { - json - } - }"#, - 2009, - "A value is required but not set" - ); - } - assert_error!( &runner, r#"mutation { diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/Cargo.toml b/query-engine/connector-test-kit-rs/query-tests-setup/Cargo.toml index be09bc26ac4..088a0d4b2d3 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/Cargo.toml +++ b/query-engine/connector-test-kit-rs/query-tests-setup/Cargo.toml @@ -11,6 +11,7 @@ qe-setup = { path = "../qe-setup" } request-handlers = { path = "../../request-handlers" } tokio.workspace = true query-core = { path = "../../core" } +sql-query-connector = { path = "../../connectors/sql-query-connector" } query-engine = { path = "../../query-engine"} psl.workspace = true user-facing-errors = { path = "../../../libs/user-facing-errors" } @@ -31,6 +32,7 @@ hyper = { version = "0.14", features = ["full"] } indexmap = { version = "1.0", features = ["serde-1"] } query-engine-metrics = {path = "../../metrics"} quaint.workspace = true +jsonrpc-core = "17" # Only this version is vetted, upgrade only after going through the code, # as this is a small crate with little user base. diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs index 8807b4e0dbd..944f1c40040 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs @@ -20,6 +20,21 @@ pub struct TestConfig { #[serde(rename = "version")] connector_version: Option, + /// An external process to execute the test queries and produced responses for assertion + /// Used when testing driver adapters, this process is expected to be a javascript process + /// loading the library engine (as a library, or WASM modules) and providing it with a + /// driver adapter. + /// Env key: `EXTERNAL_TEST_EXECUTOR` + external_test_executor: Option, + + /// The driver adapter to use when running tests, will be forwarded to the external test + /// executor by setting the `DRIVER_ADAPTER` env var when spawning the executor process + driver_adapter: Option, + + /// The driver adapter configuration to forward as a stringified JSON object to the external + /// test executor by setting the `DRIVER_ADAPTER_CONFIG` env var when spawning the executor + driver_adapter_config: Option, + /// Indicates whether or not the tests are running in CI context. /// Env key: `BUILDKITE` #[serde(default)] @@ -35,13 +50,22 @@ const CONFIG_LOAD_FAILED: &str = r####" Test config can come from the environment, or a config file. -♻️ Environment +♻️ Environment variables + +Be sure to have WORKSPACE_ROOT set to the root of the prisma-engines +repository. -Set the following env vars: +Set the following vars to denote the connector under test - TEST_CONNECTOR - TEST_CONNECTOR_VERSION (optional) +And optionally, to test driver adapters + +- EXTERNAL_TEST_EXECUTOR +- DRIVER_ADAPTER +- DRIVER_ADAPTER_CONFIG (optional, not required by all driver adapters) + 📁 Config file Use the Makefile. @@ -51,8 +75,9 @@ fn exit_with_message(msg: &str) -> ! { use std::io::{stderr, Write}; let stderr = stderr(); let mut sink = stderr.lock(); + sink.write_all(b"Error in the test configuration:\n").unwrap(); sink.write_all(msg.as_bytes()).unwrap(); - sink.write_all(b"\n").unwrap(); + sink.write_all(b"Aborting test process\n").unwrap(); std::process::exit(1) } @@ -60,31 +85,44 @@ fn exit_with_message(msg: &str) -> ! { impl TestConfig { /// Loads a configuration. File-based config has precedence over env config. pub(crate) fn load() -> Self { - let config = match Self::from_file().or_else(Self::from_env) { + let mut config = match Self::from_file().or_else(Self::from_env) { Some(config) => config, None => exit_with_message(CONFIG_LOAD_FAILED), }; + config.fill_defaults(); config.validate(); config.log_info(); + config } + #[rustfmt::skip] fn log_info(&self) { println!("******************************"); println!("* Test run information:"); println!( "* Connector: {} {}", self.connector, - self.connector_version.as_ref().unwrap_or(&"".to_owned()) + self.connector_version().unwrap_or_default() ); println!("* CI? {}", self.is_ci); + if self.external_test_executor.as_ref().is_some() { + println!("* External test executor: {}", self.external_test_executor().unwrap_or_default()); + println!("* Driver adapter: {}", self.driver_adapter().unwrap_or_default()); + println!("* Driver adapter url override: {}", self.json_stringify_driver_adapter_config()); + } println!("******************************"); } fn from_env() -> Option { let connector = std::env::var("TEST_CONNECTOR").ok(); let connector_version = std::env::var("TEST_CONNECTOR_VERSION").ok(); + let external_test_executor = std::env::var("EXTERNAL_TEST_EXECUTOR").ok(); + let driver_adapter = std::env::var("DRIVER_ADAPTER").ok(); + let driver_adapter_config = std::env::var("DRIVER_ADAPTER_CONFIG") + .map(|config| serde_json::from_str::(config.as_str()).ok()) + .unwrap_or_default(); // Just care for a set value for now. let is_ci = std::env::var("BUILDKITE").is_ok(); @@ -93,16 +131,18 @@ impl TestConfig { connector, connector_version, is_ci, + external_test_executor, + driver_adapter, + driver_adapter_config, }) } fn from_file() -> Option { let current_dir = env::current_dir().ok(); - let workspace_root = std::env::var("WORKSPACE_ROOT").ok().map(PathBuf::from); current_dir .and_then(|path| Self::try_path(config_path(path))) - .or_else(|| workspace_root.and_then(|path| Self::try_path(config_path(path)))) + .or_else(|| Self::workspace_root().and_then(|path| Self::try_path(config_path(path)))) } fn try_path(path: PathBuf) -> Option { @@ -115,6 +155,33 @@ impl TestConfig { }) } + /// if the loaded value for external_test_executor is "default" (case insensitive), + /// and the workspace_root is set, then use the default external test executor. + fn fill_defaults(&mut self) { + const DEFAULT_TEST_EXECUTOR: &str = + "query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh"; + + if self + .external_test_executor + .as_ref() + .filter(|s| s.eq_ignore_ascii_case("default")) + .is_some() + { + self.external_test_executor = Self::workspace_root() + .map(|path| path.join(DEFAULT_TEST_EXECUTOR)) + .or_else(|| { + exit_with_message( + "WORKSPACE_ROOT needs to be correctly set to the root of the prisma-engines repository", + ) + }) + .and_then(|path| path.to_str().map(|s| s.to_owned())); + } + } + + fn workspace_root() -> Option { + env::var("WORKSPACE_ROOT").ok().map(PathBuf::from) + } + fn validate(&self) { if self.connector.is_empty() { exit_with_message("A test connector is required but was not set."); @@ -138,6 +205,38 @@ impl TestConfig { | Ok(ConnectorVersion::Sqlite) => (), Err(err) => exit_with_message(&err.to_string()), } + + if let Some(file) = self.external_test_executor.as_ref() { + let path = PathBuf::from(file); + let md = path.metadata(); + if !path.exists() || md.is_err() || !md.unwrap().is_file() { + exit_with_message(&format!("The external test executor path `{}` must be a file", file)); + } + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + if path.metadata().is_ok_and(|md| md.permissions().mode() & 0o111 == 0) { + exit_with_message(&format!( + "The external test executor file `{}` must be have permissions to execute", + file + )); + } + } + } + + if self.external_test_executor.is_some() && self.driver_adapter.is_none() { + exit_with_message( + "When using an external test executor, the driver adapter (DRIVER_ADAPTER env var) must be set.", + ); + } + + if self.driver_adapter.is_some() && self.external_test_executor.is_none() { + exit_with_message("When using a driver adapter, the external test executor must be set."); + } + + if self.driver_adapter.is_none() && self.driver_adapter_config.is_some() { + exit_with_message("When using a driver adapter config, the driver adapter must be set."); + } } pub fn connector(&self) -> &str { @@ -145,13 +244,28 @@ impl TestConfig { } pub(crate) fn connector_version(&self) -> Option<&str> { - self.connector_version.as_ref().map(AsRef::as_ref) + self.connector_version.as_deref() } pub fn is_ci(&self) -> bool { self.is_ci } + pub fn external_test_executor(&self) -> Option<&str> { + self.external_test_executor.as_deref() + } + + pub fn driver_adapter(&self) -> Option<&str> { + self.driver_adapter.as_deref() + } + + pub fn json_stringify_driver_adapter_config(&self) -> String { + self.driver_adapter_config + .as_ref() + .map(|value| value.to_string()) + .unwrap_or("{}".to_string()) + } + pub fn test_connector(&self) -> TestResult<(ConnectorTag, ConnectorVersion)> { let version = ConnectorVersion::try_from((self.connector(), self.connector_version()))?; let tag = match version { @@ -166,6 +280,23 @@ impl TestConfig { Ok((tag, version)) } + + #[rustfmt::skip] + pub fn for_external_executor(&self) -> Vec<(String, String)> { + vec!( + ( + "DRIVER_ADAPTER".to_string(), + self.driver_adapter.clone().unwrap_or_default()), + ( + "DRIVER_ADAPTER_CONFIG".to_string(), + self.json_stringify_driver_adapter_config() + ), + ( + "PRISMA_DISABLE_QUAINT_EXECUTORS".to_string(), + "1".to_string(), + ), + ) + } } fn config_path(mut path: PathBuf) -> PathBuf { diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js.rs new file mode 100644 index 00000000000..2ec8513baed --- /dev/null +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js.rs @@ -0,0 +1,14 @@ +mod external_process; + +use super::*; +use external_process::*; +use serde::de::DeserializeOwned; +use std::{collections::HashMap, sync::atomic::AtomicU64}; +use tokio::io::{AsyncBufReadExt, AsyncWriteExt, BufReader}; + +pub(crate) async fn executor_process_request( + method: &str, + params: serde_json::Value, +) -> Result> { + EXTERNAL_PROCESS.request(method, params).await +} diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/external_process.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/external_process.rs new file mode 100644 index 00000000000..7ab0e6e8a02 --- /dev/null +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/external_process.rs @@ -0,0 +1,170 @@ +use super::*; +use once_cell::sync::Lazy; +use serde::de::DeserializeOwned; +use std::{fmt::Display, io::Write as _, sync::atomic::Ordering}; +use tokio::sync::{mpsc, oneshot}; + +type Result = std::result::Result>; + +#[derive(Debug)] +struct GenericError(String); + +impl Display for GenericError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} + +impl std::error::Error for GenericError {} + +pub(crate) struct ExecutorProcess { + task_handle: mpsc::Sender, + request_id_counter: AtomicU64, +} + +fn exit_with_message(status_code: i32, message: &str) -> ! { + let stdout = std::io::stdout(); + stdout.lock().write_all(message.as_bytes()).unwrap(); + std::process::exit(status_code) +} + +impl ExecutorProcess { + fn new() -> Result { + let (sender, receiver) = mpsc::channel::(300); + + std::thread::spawn(|| match start_rpc_thread(receiver) { + Ok(()) => (), + Err(err) => { + exit_with_message(1, &err.to_string()); + } + }); + + Ok(ExecutorProcess { + task_handle: sender, + request_id_counter: Default::default(), + }) + } + + /// Convenient façade. Allocates more than necessary, but this is only for testing. + #[tracing::instrument(skip(self))] + pub(crate) async fn request(&self, method: &str, params: serde_json::Value) -> Result { + let (sender, receiver) = oneshot::channel(); + let params = if let serde_json::Value::Object(params) = params { + params + } else { + panic!("params aren't an object") + }; + let method_call = jsonrpc_core::MethodCall { + jsonrpc: Some(jsonrpc_core::Version::V2), + method: method.to_owned(), + params: jsonrpc_core::Params::Map(params), + id: jsonrpc_core::Id::Num(self.request_id_counter.fetch_add(1, Ordering::Relaxed)), + }; + + self.task_handle.send((method_call, sender)).await?; + let raw_response = receiver.await?; + tracing::debug!(%raw_response); + let response = serde_json::from_value(raw_response)?; + Ok(response) + } +} + +pub(super) static EXTERNAL_PROCESS: Lazy = + Lazy::new(|| match std::thread::spawn(ExecutorProcess::new).join() { + Ok(Ok(process)) => process, + Ok(Err(err)) => exit_with_message(1, &format!("Failed to start node process. Details: {err}")), + Err(err) => { + let err = err.downcast_ref::().map(ToOwned::to_owned).unwrap_or_default(); + exit_with_message(1, &format!("Panic while trying to start node process.\nDetails: {err}")) + } + }); + +type ReqImpl = (jsonrpc_core::MethodCall, oneshot::Sender); + +fn start_rpc_thread(mut receiver: mpsc::Receiver) -> Result<()> { + use std::process::Stdio; + use tokio::process::Command; + + let path = crate::CONFIG + .external_test_executor() + .unwrap_or_else(|| exit_with_message(1, "start_rpc_thread() error: external test executor is not set")); + + tokio::runtime::Builder::new_current_thread() + .enable_io() + .build() + .unwrap() + .block_on(async move { + let process = match Command::new(path) + .envs(CONFIG.for_external_executor()) + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::inherit()) + .spawn() + { + Ok(process) => process, + Err(err) => exit_with_message(1, &format!("Failed to spawn the executor process: `{path}`. Details: {err}\n")), + }; + + let mut stdout = BufReader::new(process.stdout.unwrap()).lines(); + let mut stdin = process.stdin.unwrap(); + let mut pending_requests: HashMap> = + HashMap::new(); + + loop { + tokio::select! { + line = stdout.next_line() => { + match line { + // Two error modes in here: the external process can response with + // something that is not a jsonrpc response (basically any normal logging + // output), or it can respond with a jsonrpc response that represents a + // failure. + Ok(Some(line)) => // new response + { + match serde_json::from_str::(&line) { + Ok(response) => { + let sender = pending_requests.remove(response.id()).unwrap(); + match response { + jsonrpc_core::Output::Success(success) => { + sender.send(success.result).unwrap(); + } + jsonrpc_core::Output::Failure(err) => { + panic!("error response from jsonrpc: {err:?}") + } + } + } + Err(err) => { + tracing::error!(%err, "error when decoding response from child node process. Response was: `{}`", &line); + continue + } + }; + } + Ok(None) => // end of the stream + { + exit_with_message(1, "child node process stdout closed") + } + Err(err) => // log it + { + tracing::error!(%err, "Error when reading from child node process"); + } + } + } + request = receiver.recv() => { + match request { + None => // channel closed + { + exit_with_message(1, "The json-rpc client channel was closed"); + } + Some((request, response_sender)) => { + pending_requests.insert(request.id.clone(), response_sender); + let mut req = serde_json::to_vec(&request).unwrap(); + req.push(b'\n'); + stdin.write_all(&req).await.unwrap(); + } + } + } + } + } + }); + + Ok(()) +} diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs index 77c3190da3e..0e43ad10954 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs @@ -1,4 +1,5 @@ mod cockroachdb; +mod js; mod mongodb; mod mysql; mod postgres; @@ -6,10 +7,12 @@ mod sql_server; mod sqlite; mod vitess; -pub use mysql::*; +pub use mysql::MySqlVersion; pub(crate) use cockroachdb::*; +pub(crate) use js::*; pub(crate) use mongodb::*; +pub(crate) use mysql::*; pub(crate) use postgres::*; pub(crate) use sql_server::*; pub(crate) use sqlite::*; @@ -24,7 +27,7 @@ pub trait ConnectorTagInterface { /// The name of the datamodel provider for this connector. /// Must match valid datamodel provider strings. - fn datamodel_provider(&self) -> &'static str; + fn datamodel_provider(&self) -> &str; /// Returns the renderer to be used for templating the datamodel (the models portion). fn datamodel_renderer(&self) -> Box; @@ -35,7 +38,7 @@ pub trait ConnectorTagInterface { /// Defines where relational constraints are handled: /// - "prisma" is handled in the Query Engine core /// - "foreignKeys" lets the database handle them - fn relation_mode(&self) -> &'static str { + fn relation_mode(&self) -> &str { "foreignKeys" } } @@ -303,10 +306,14 @@ pub(crate) fn should_run( .any(|only| ConnectorVersion::try_from(*only).unwrap().matches_pattern(&version)); } - if exclude - .iter() - .any(|excl| ConnectorVersion::try_from(*excl).unwrap().matches_pattern(&version)) - { + if CONFIG.external_test_executor().is_some() && exclude.iter().any(|excl| excl.0.to_uppercase() == "JS") { + println!("Excluded test execution for JS driver adapters. Skipping test"); + return false; + }; + + if exclude.iter().any(|excl| { + ConnectorVersion::try_from(*excl).map_or(false, |connector_version| connector_version.matches_pattern(&version)) + }) { println!("Connector excluded. Skipping test."); return false; } diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/error.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/error.rs index 79fb457015b..041c63f9dd4 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/error.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/error.rs @@ -6,6 +6,9 @@ use thiserror::Error; #[allow(clippy::large_enum_variant)] #[derive(Debug, Error)] pub enum TestError { + #[error("Handler Error: {0}")] + RequestHandlerError(request_handlers::HandlerError), + #[error("Parse error: {0}")] ParseError(String), @@ -26,6 +29,9 @@ pub enum TestError { #[error("Raw execute error: {0}")] RawExecute(QuaintError), + + #[error("External process error: {0}")] + External(#[from] Box), } impl TestError { diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/lib.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/lib.rs index 0cdf3d1d3ef..2e79581a0c7 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/lib.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/lib.rs @@ -43,7 +43,12 @@ pub static ENGINE_PROTOCOL: Lazy = Lazy::new(|| std::env::var("PRISMA_ENGINE_PROTOCOL").unwrap_or_else(|_| "graphql".to_owned())); /// Teardown of a test setup. -async fn teardown_project(datamodel: &str, db_schemas: &[&str]) -> TestResult<()> { +async fn teardown_project(datamodel: &str, db_schemas: &[&str], schema_id: Option) -> TestResult<()> { + if let Some(schema_id) = schema_id { + let params = serde_json::json!({ "schemaId": schema_id }); + executor_process_request::("teardown", params).await?; + } + Ok(qe_setup::teardown(datamodel, db_schemas).await?) } @@ -167,7 +172,9 @@ fn run_relation_link_test_impl( test_fn(&runner, &dm).await.unwrap(); - teardown_project(&datamodel, Default::default()).await.unwrap(); + teardown_project(&datamodel, Default::default(), runner.schema_id()) + .await + .unwrap(); } .with_subscriber(test_tracing_subscriber( ENV_LOG_LEVEL.to_string(), @@ -275,10 +282,13 @@ fn run_connector_test_impl( ) .await .unwrap(); + let schema_id = runner.schema_id(); test_fn(runner).await.unwrap(); - crate::teardown_project(&datamodel, db_schemas).await.unwrap(); + crate::teardown_project(&datamodel, db_schemas, schema_id) + .await + .unwrap(); } .with_subscriber(test_tracing_subscriber( ENV_LOG_LEVEL.to_string(), diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/query_result.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/query_result.rs index 83855fde1c5..d45f4ae04c7 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/query_result.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/query_result.rs @@ -1,15 +1,66 @@ +use query_core::constants::custom_types; use request_handlers::{GQLError, PrismaResponse}; +use serde::{Deserialize, Serialize}; +#[derive(Serialize, Deserialize, Debug, PartialEq)] +struct SimpleGqlErrorResponse { + #[serde(skip_serializing_if = "Vec::is_empty")] + errors: Vec, +} + +#[derive(Serialize, Deserialize, Debug, PartialEq)] +struct SimpleGqlResponse { + #[serde(skip_serializing_if = "SimpleGqlResponse::data_is_empty")] + #[serde(default)] + data: serde_json::Value, + #[serde(skip_serializing_if = "Vec::is_empty")] + #[serde(default)] + errors: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + #[serde(default)] + extensions: Option, +} + +impl SimpleGqlResponse { + fn data_is_empty(data: &serde_json::Value) -> bool { + match data { + serde_json::Value::Object(o) => o.is_empty(), + serde_json::Value::Null => true, + _ => false, + } + } +} + +#[derive(Serialize, Deserialize, Debug, PartialEq)] +#[serde(rename_all = "camelCase")] +struct SimpleGqlBatchResponse { + batch_result: Vec, + #[serde(skip_serializing_if = "Vec::is_empty")] + #[serde(default)] + errors: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + extensions: Option, +} -#[derive(Debug)] +#[derive(Debug, Serialize, Deserialize, PartialEq)] +#[serde(untagged)] +enum Response { + Error(SimpleGqlErrorResponse), + Multi(SimpleGqlBatchResponse), + Single(SimpleGqlResponse), +} + +#[derive(Debug, Deserialize, PartialEq)] +#[serde(transparent)] pub struct QueryResult { - response: PrismaResponse, + response: Response, } impl QueryResult { pub fn failed(&self) -> bool { match self.response { - PrismaResponse::Single(ref s) => s.errors().next().is_some(), - PrismaResponse::Multi(ref m) => m.errors().next().is_some(), + Response::Error(ref s) => !s.errors.is_empty(), + Response::Single(ref s) => !s.errors.is_empty(), + Response::Multi(ref m) => !(m.errors.is_empty() && m.batch_result.iter().all(|res| res.errors.is_empty())), } } @@ -70,8 +121,13 @@ impl QueryResult { pub fn errors(&self) -> Vec<&GQLError> { match self.response { - PrismaResponse::Single(ref s) => s.errors().collect(), - PrismaResponse::Multi(ref m) => m.errors().collect(), + Response::Error(ref s) => s.errors.iter().collect(), + Response::Single(ref s) => s.errors.iter().collect(), + Response::Multi(ref m) => m + .errors + .iter() + .chain(m.batch_result.iter().flat_map(|res| res.errors.iter())) + .collect(), } } @@ -82,6 +138,20 @@ impl QueryResult { pub fn to_string_pretty(&self) -> String { serde_json::to_string_pretty(&self.response).unwrap() } + + /// Transform a JSON protocol response to a GraphQL protocol response, by removing the type + /// tags. + pub(crate) fn detag(&mut self) { + match &mut self.response { + Response::Error(_) => (), + Response::Single(res) => detag_value(&mut res.data), + Response::Multi(res) => { + for res in &mut res.batch_result { + detag_value(&mut res.data) + } + } + } + } } impl ToString for QueryResult { @@ -92,6 +162,149 @@ impl ToString for QueryResult { impl From for QueryResult { fn from(response: PrismaResponse) -> Self { - Self { response } + match response { + PrismaResponse::Single(res) => QueryResult { + response: Response::Single(SimpleGqlResponse { + data: serde_json::to_value(res.data).unwrap(), + errors: res.errors, + extensions: (!res.extensions.is_empty()).then(|| serde_json::to_value(&res.extensions).unwrap()), + }), + }, + PrismaResponse::Multi(reses) => QueryResult { + response: Response::Multi(SimpleGqlBatchResponse { + batch_result: reses + .batch_result + .into_iter() + .map(|res| SimpleGqlResponse { + data: serde_json::to_value(&res.data).unwrap(), + errors: res.errors, + extensions: (!res.extensions.is_empty()) + .then(|| serde_json::to_value(&res.extensions).unwrap()), + }) + .collect(), + errors: reses.errors, + extensions: (!reses.extensions.is_empty()) + .then(|| serde_json::to_value(&reses.extensions).unwrap()), + }), + }, + } + } +} + +fn detag_value(val: &mut serde_json::Value) { + match val { + serde_json::Value::Object(obj) => { + if obj.len() == 2 && obj.contains_key(custom_types::TYPE) && obj.contains_key(custom_types::VALUE) { + let mut new_val = obj.remove(custom_types::VALUE).unwrap(); + detag_value(&mut new_val); + *val = new_val; + } else { + for elem in obj.values_mut() { + detag_value(elem); + } + } + } + serde_json::Value::Array(arr) => { + for elem in arr { + detag_value(elem) + } + } + _ => (), + } +} + +#[cfg(test)] +mod tests { + use super::*; + use serde_json::json; + + #[test] + fn test_deserializing_successful_batch_response() { + let response = "{\"batchResult\":[{\"data\":{\"findUniqueTestModelOrThrow\":{\"id\":1}}},{\"data\":{\"findUniqueTestModelOrThrow\":{\"id\":2}}}]}"; + let result: QueryResult = serde_json::from_str(response).unwrap(); + + let expected = QueryResult { + response: Response::Multi(SimpleGqlBatchResponse { + batch_result: vec![ + SimpleGqlResponse { + data: json!({ + "findUniqueTestModelOrThrow": { + "id": 1, + }, + }), + errors: vec![], + extensions: None, + }, + SimpleGqlResponse { + data: json!({ + "findUniqueTestModelOrThrow": { + "id": 2, + }, + }), + errors: vec![], + extensions: None, + }, + ], + errors: vec![], + extensions: None, + }), + }; + assert_eq!(result, expected); + } + + #[test] + fn test_deserializing_error_batch_response() { + let response = r###" +{ + "batchResult":[ + { + "data":{ + "findUniqueTestModelOrThrow":{ + "id":2 + } + } + }, + { + "errors":[ + { + "error":"An operation failed because it depends on one or more records that were required but not found. Expected a record, found none.", + "user_facing_error":{ + "is_panic":false, + "message":"An operation failed because it depends on one or more records that were required but not found. Expected a record, found none.", + "meta":{ + "cause":"Expected a record, found none." + }, + "error_code":"P2025" + } + } + ] + } + ] +}"###; + let result: QueryResult = serde_json::from_str(response).unwrap(); + + let expected = QueryResult { + response: Response::Multi(SimpleGqlBatchResponse { + batch_result: vec![ + SimpleGqlResponse { + data: json!({"findUniqueTestModelOrThrow": {"id": 2}}), + errors: vec![], + extensions: None, + }, + SimpleGqlResponse { + data: serde_json::Value::Null, + errors: vec![GQLError::from_user_facing_error(user_facing_errors::KnownError { + message: "An operation failed because it depends on one or more records that were required but not found. Expected a record, found none.".to_string(), + meta: json!({"cause": "Expected a record, found none."}), + error_code: std::borrow::Cow::from("P2025"), + }.into())], + extensions: None, + }, + ], + errors: vec![], + extensions: None, + }), + }; + assert_eq!(result, expected); } } diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/json_adapter/request.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/json_adapter/request.rs index 0486c291e8b..0eee2d9e6cb 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/json_adapter/request.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/json_adapter/request.rs @@ -1,4 +1,4 @@ -use crate::TestResult; +use crate::{TestError, TestResult}; use indexmap::IndexMap; use itertools::Itertools; use prisma_models::PrismaValue; @@ -18,24 +18,28 @@ pub struct JsonRequest; impl JsonRequest { /// Translates a GraphQL query to a JSON query. This is used to keep the same test-suite running on both protocols. pub fn from_graphql(gql: &str, query_schema: &QuerySchema) -> TestResult { - let operation = GraphQLProtocolAdapter::convert_query_to_operation(gql, None).unwrap(); - let operation_name = operation.name(); - let schema_field = query_schema - .find_query_field(operation_name) - .unwrap_or_else(|| query_schema.find_mutation_field(operation_name).unwrap()); - let model_name = schema_field - .model() - .map(|m| query_schema.internal_data_model.walk(m).name().to_owned()); - let query_tag = schema_field.query_tag().unwrap().to_owned(); - let selection = operation.into_selection(); - - let output = JsonSingleQuery { - model_name, - action: Action::new(query_tag), - query: graphql_selection_to_json_field_query(selection, &schema_field), - }; - - Ok(output) + match GraphQLProtocolAdapter::convert_query_to_operation(gql, None) { + Ok(operation) => { + let operation_name = operation.name(); + let schema_field = query_schema + .find_query_field(operation_name) + .unwrap_or_else(|| query_schema.find_mutation_field(operation_name).unwrap()); + let model_name = schema_field + .model() + .map(|m| query_schema.internal_data_model.walk(m).name().to_owned()); + let query_tag = schema_field.query_tag().unwrap().to_owned(); + let selection = operation.into_selection(); + + let output = JsonSingleQuery { + model_name, + action: Action::new(query_tag), + query: graphql_selection_to_json_field_query(selection, &schema_field), + }; + + Ok(output) + } + Err(err) => Err(TestError::RequestHandlerError(err)), + } } } diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/mod.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/mod.rs index e9fce19c2c1..d6505206356 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/mod.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/mod.rs @@ -1,8 +1,12 @@ mod json_adapter; pub use json_adapter::*; +use serde::Deserialize; -use crate::{ConnectorTag, ConnectorVersion, QueryResult, TestLogCapture, TestResult, ENGINE_PROTOCOL}; +use crate::{ + executor_process_request, ConnectorTag, ConnectorVersion, QueryResult, TestError, TestLogCapture, TestResult, + ENGINE_PROTOCOL, +}; use colored::Colorize; use query_core::{ protocol::EngineProtocol, @@ -11,18 +15,76 @@ use query_core::{ }; use query_engine_metrics::MetricRegistry; use request_handlers::{ - load_executor, BatchTransactionOption, ConnectorMode, GraphqlBody, JsonBatchQuery, JsonBody, JsonSingleQuery, - MultiQuery, RequestBody, RequestHandler, + BatchTransactionOption, ConnectorMode, GraphqlBody, JsonBatchQuery, JsonBody, JsonSingleQuery, MultiQuery, + RequestBody, RequestHandler, +}; +use serde_json::json; +use std::{ + env, + sync::{atomic::AtomicUsize, Arc}, }; -use std::{env, sync::Arc}; pub type TxResult = Result<(), user_facing_errors::Error>; pub(crate) type Executor = Box; +#[derive(Deserialize, Debug)] +struct Empty {} + +#[derive(Deserialize, Debug)] +#[serde(untagged)] +enum TransactionEndResponse { + Error(user_facing_errors::Error), + Ok(Empty), +} + +impl From for TxResult { + fn from(value: TransactionEndResponse) -> Self { + match value { + TransactionEndResponse::Ok(_) => Ok(()), + TransactionEndResponse::Error(error) => Err(error), + } + } +} + +pub enum RunnerExecutor { + // Builtin is a runner that uses the query engine in-process, issuing queries against a + // `core::InterpretingExecutor` that uses the particular connector under test in the test suite. + Builtin(Executor), + + // External is a runner that uses an external process that responds to queries piped to its STDIN + // in JsonRPC format. In particular this is used to test the query engine against a node process + // running a library engine configured to use a javascript driver adapter to connect to a database. + // + // In this struct variant, usize represents the index of the schema used for the test suite to + // execute queries against. When the suite starts, a message with the schema and the id is sent to + // the external process, which will create a new instance of the library engine configured to + // access that schema. + // + // Everytime a query is sent to the external process, it's provided the id of the schema, so the + // process knows how to associate the query to the instance of the library engine that will dispatch + // it. + External(usize), +} + +impl RunnerExecutor { + async fn new_external(url: &str, schema: &str) -> TestResult { + static COUNTER: AtomicUsize = AtomicUsize::new(0); + let id = COUNTER.fetch_add(1, std::sync::atomic::Ordering::Relaxed); + + executor_process_request( + "initializeSchema", + json!({ "schema": schema, "schemaId": id, "url": url }), + ) + .await?; + + Ok(RunnerExecutor::External(id)) + } +} + /// Direct engine runner. pub struct Runner { - executor: Executor, + executor: RunnerExecutor, query_schema: QuerySchemaRef, version: ConnectorVersion, connector_tag: ConnectorTag, @@ -34,6 +96,13 @@ pub struct Runner { } impl Runner { + pub(crate) fn schema_id(&self) -> Option { + match self.executor { + RunnerExecutor::Builtin(_) => None, + RunnerExecutor::External(schema_id) => Some(schema_id), + } + } + pub fn prisma_dml(&self) -> &str { self.query_schema.internal_data_model.schema.db.source() } @@ -49,18 +118,22 @@ impl Runner { qe_setup::setup(&datamodel, db_schemas).await?; let protocol = EngineProtocol::from(&ENGINE_PROTOCOL.to_string()); - let schema = psl::parse_schema(datamodel).unwrap(); + let schema = psl::parse_schema(&datamodel).unwrap(); let data_source = schema.configuration.datasources.first().unwrap(); let url = data_source.load_url(|key| env::var(key).ok()).unwrap(); - let connector_mode = ConnectorMode::Rust; - let executor = load_executor( - connector_mode, - data_source, - schema.configuration.preview_features(), - &url, - ) - .await?; + let executor = match crate::CONFIG.external_test_executor() { + Some(_) => RunnerExecutor::new_external(&url, &datamodel).await?, + None => RunnerExecutor::Builtin( + request_handlers::load_executor( + ConnectorMode::Rust, + data_source, + schema.configuration.preview_features(), + &url, + ) + .await?, + ), + }; let query_schema: QuerySchemaRef = Arc::new(schema::build(Arc::new(schema), true)); Ok(Self { @@ -82,9 +155,33 @@ impl Runner { { let query = query.into(); + let executor = match &self.executor { + RunnerExecutor::Builtin(e) => e, + RunnerExecutor::External(schema_id) => match JsonRequest::from_graphql(&query, self.query_schema()) { + Ok(json_query) => { + let response_str: String = + executor_process_request("query", json!({ "query": json_query, "schemaId": schema_id, "txId": self.current_tx_id.as_ref().map(ToString::to_string) })).await?; + let mut response: QueryResult = serde_json::from_str(&response_str).unwrap(); + response.detag(); + return Ok(response); + } + // Conversion from graphql to JSON might fail, and in that case we should consider the error + // (a Handler error) as an error response. + Err(TestError::RequestHandlerError(err)) => { + let gql_err = request_handlers::GQLError::from_handler_error(err); + let gql_res = request_handlers::GQLResponse::from(gql_err); + let prisma_res = request_handlers::PrismaResponse::Single(gql_res); + let mut response = QueryResult::from(prisma_res); + response.detag(); + return Ok(response); + } + Err(err) => return Err(err), + }, + }; + tracing::debug!("Querying: {}", query.clone().green()); - let handler = RequestHandler::new(&*self.executor, &self.query_schema, self.protocol); + let handler = RequestHandler::new(&**executor, &self.query_schema, self.protocol); let request_body = match self.protocol { EngineProtocol::Json => { @@ -127,7 +224,20 @@ impl Runner { println!("{}", query.bright_green()); - let handler = RequestHandler::new(&*self.executor, &self.query_schema, EngineProtocol::Json); + let executor = match &self.executor { + RunnerExecutor::Builtin(e) => e, + RunnerExecutor::External(_) => { + let response_str: String = executor_process_request( + "query", + json!({ "query": query, "txId": self.current_tx_id.as_ref().map(ToString::to_string) }), + ) + .await?; + let response: QueryResult = serde_json::from_str(&response_str).unwrap(); + return Ok(response); + } + }; + + let handler = RequestHandler::new(&**executor, &self.query_schema, EngineProtocol::Json); let serialized_query: JsonSingleQuery = serde_json::from_str(&query).unwrap(); let request_body = RequestBody::Json(JsonBody::Single(serialized_query)); @@ -164,7 +274,12 @@ impl Runner { transaction: bool, isolation_level: Option, ) -> TestResult { - let handler = RequestHandler::new(&*self.executor, &self.query_schema, self.protocol); + let executor = match &self.executor { + RunnerExecutor::External(_) => todo!(), + RunnerExecutor::Builtin(e) => e, + }; + + let handler = RequestHandler::new(&**executor, &self.query_schema, self.protocol); let body = RequestBody::Json(JsonBody::Batch(JsonBatchQuery { batch: queries .into_iter() @@ -184,7 +299,32 @@ impl Runner { transaction: bool, isolation_level: Option, ) -> TestResult { - let handler = RequestHandler::new(&*self.executor, &self.query_schema, self.protocol); + let executor = match &self.executor { + RunnerExecutor::External(schema_id) => { + // Translate the GraphQL query to JSON + let batch = queries + .into_iter() + .map(|query| JsonRequest::from_graphql(&query, self.query_schema())) + .collect::>>() + .unwrap(); + let transaction = match transaction { + true => Some(BatchTransactionOption { isolation_level }), + false => None, + }; + let json_query = JsonBody::Batch(JsonBatchQuery { batch, transaction }); + let response_str: String = executor_process_request( + "query", + json!({ "query": json_query, "schemaId": schema_id, "txId": self.current_tx_id.as_ref().map(ToString::to_string) }) + ).await?; + + let mut response: QueryResult = serde_json::from_str(&response_str).unwrap(); + response.detag(); + return Ok(response); + } + RunnerExecutor::Builtin(e) => e, + }; + + let handler = RequestHandler::new(&**executor, &self.query_schema, self.protocol); let body = match self.protocol { EngineProtocol::Json => { // Translate the GraphQL query to JSON @@ -227,31 +367,74 @@ impl Runner { isolation_level: Option, ) -> TestResult { let tx_opts = TransactionOptions::new(max_acquisition_millis, valid_for_millis, isolation_level); - - let id = self - .executor - .start_tx(self.query_schema.clone(), self.protocol, tx_opts) - .await?; - Ok(id) + match &self.executor { + RunnerExecutor::Builtin(executor) => { + let id = executor + .start_tx(self.query_schema.clone(), self.protocol, tx_opts) + .await?; + Ok(id) + } + RunnerExecutor::External(schema_id) => { + #[derive(Deserialize, Debug)] + #[serde(untagged)] + enum StartTransactionResponse { + Ok { id: String }, + Error(user_facing_errors::Error), + } + let response: StartTransactionResponse = + executor_process_request("startTx", json!({ "schemaId": schema_id, "options": tx_opts })).await?; + + match response { + StartTransactionResponse::Ok { id } => Ok(id.into()), + StartTransactionResponse::Error(err) => { + Err(crate::TestError::InteractiveTransactionError(err.message().into())) + } + } + } + } } pub async fn commit_tx(&self, tx_id: TxId) -> TestResult { - let res = self.executor.commit_tx(tx_id).await; + match &self.executor { + RunnerExecutor::Builtin(executor) => { + let res = executor.commit_tx(tx_id).await; + + if let Err(error) = res { + Ok(Err(error.into())) + } else { + Ok(Ok(())) + } + } + RunnerExecutor::External(schema_id) => { + let response: TransactionEndResponse = + executor_process_request("commitTx", json!({ "schemaId": schema_id, "txId": tx_id.to_string() })) + .await?; - if let Err(error) = res { - Ok(Err(error.into())) - } else { - Ok(Ok(())) + Ok(response.into()) + } } } pub async fn rollback_tx(&self, tx_id: TxId) -> TestResult { - let res = self.executor.rollback_tx(tx_id).await; - - if let Err(error) = res { - Ok(Err(error.into())) - } else { - Ok(Ok(())) + match &self.executor { + RunnerExecutor::Builtin(executor) => { + let res = executor.rollback_tx(tx_id).await; + + if let Err(error) = res { + Ok(Err(error.into())) + } else { + Ok(Ok(())) + } + } + RunnerExecutor::External(schema_id) => { + let response: TransactionEndResponse = executor_process_request( + "rollbackTx", + json!({ "schemaId": schema_id, "txId": tx_id.to_string() }), + ) + .await?; + + Ok(response.into()) + } } } @@ -276,7 +459,18 @@ impl Runner { } pub async fn get_logs(&mut self) -> Vec { - self.log_capture.get_logs().await + let mut logs = self.log_capture.get_logs().await; + match &self.executor { + RunnerExecutor::Builtin(_) => logs, + RunnerExecutor::External(schema_id) => { + let mut external_logs: Vec = + executor_process_request("getLogs", json!({ "schemaId": schema_id })) + .await + .unwrap(); + logs.append(&mut external_logs); + logs + } + } } pub fn connector_version(&self) -> &ConnectorVersion { diff --git a/query-engine/connector-test-kit-rs/test-configs/neon-ws-postgres13 b/query-engine/connector-test-kit-rs/test-configs/neon-ws-postgres13 new file mode 100644 index 00000000000..0097d8c91f5 --- /dev/null +++ b/query-engine/connector-test-kit-rs/test-configs/neon-ws-postgres13 @@ -0,0 +1,7 @@ +{ + "connector": "postgres", + "version": "13", + "driver_adapter": "neon:ws", + "driver_adapter_config": { "proxyUrl": "127.0.0.1:5488/v1" }, + "external_test_executor": "default" +} \ No newline at end of file diff --git a/query-engine/connector-test-kit-rs/test-configs/pg-postgres13 b/query-engine/connector-test-kit-rs/test-configs/pg-postgres13 new file mode 100644 index 00000000000..00f0c75ed73 --- /dev/null +++ b/query-engine/connector-test-kit-rs/test-configs/pg-postgres13 @@ -0,0 +1,6 @@ +{ + "connector": "postgres", + "version": "13", + "driver_adapter": "pg", + "external_test_executor": "default" +} \ No newline at end of file diff --git a/query-engine/connector-test-kit-rs/test-configs/postgres13 b/query-engine/connector-test-kit-rs/test-configs/postgres13 index 84fb5e1c04f..f7b61cb4f88 100644 --- a/query-engine/connector-test-kit-rs/test-configs/postgres13 +++ b/query-engine/connector-test-kit-rs/test-configs/postgres13 @@ -1,3 +1,4 @@ { "connector": "postgres", - "version": "13"} \ No newline at end of file + "version": "13" +} \ No newline at end of file diff --git a/query-engine/connectors/sql-query-connector/src/database/js.rs b/query-engine/connectors/sql-query-connector/src/database/js.rs index 1dced9453fa..0d4714871e5 100644 --- a/query-engine/connectors/sql-query-connector/src/database/js.rs +++ b/query-engine/connectors/sql-query-connector/src/database/js.rs @@ -11,40 +11,25 @@ use quaint::{ connector::{IsolationLevel, Transaction}, prelude::{Queryable as QuaintQueryable, *}, }; -use std::{ - collections::{hash_map::Entry, HashMap}, - sync::{Arc, Mutex}, -}; +use std::sync::{Arc, Mutex}; -/// Registry is the type for the global registry of driver adapters. -type Registry = HashMap; +static ACTIVE_DRIVER_ADAPTER: Lazy>> = Lazy::new(|| Mutex::new(None)); -/// REGISTRY is the global registry of Driver Adapters. -static REGISTRY: Lazy> = Lazy::new(|| Mutex::new(HashMap::new())); +fn active_driver_adapter(provider: &str) -> connector::Result { + let lock = ACTIVE_DRIVER_ADAPTER.lock().unwrap(); -fn registered_driver_adapter(provider: &str) -> connector::Result { - let lock = REGISTRY.lock().unwrap(); - lock.get(provider) + lock.as_ref() + .map(|conn_ref| conn_ref.to_owned()) .ok_or(ConnectorError::from_kind(ErrorKind::UnsupportedConnector(format!( "A driver adapter for {} was not registered", provider )))) - .map(|conn_ref| conn_ref.to_owned()) } -pub fn register_driver_adapter(provider: &str, connector: Arc) -> Result<(), String> { - let mut lock = REGISTRY.lock().unwrap(); - let entry = lock.entry(provider.to_string()); - match entry { - Entry::Occupied(_) => Err(format!( - "A driver adapter for {} was already registered, and cannot be overridden.", - provider - )), - Entry::Vacant(v) => { - v.insert(DriverAdapter { connector }); - Ok(()) - } - } +pub fn activate_driver_adapter(connector: Arc) { + let mut lock = ACTIVE_DRIVER_ADAPTER.lock().unwrap(); + + *lock = Some(DriverAdapter { connector }); } pub struct Js { @@ -69,7 +54,7 @@ impl FromSource for Js { url: &str, features: psl::PreviewFeatures, ) -> connector_interface::Result { - let connector = registered_driver_adapter(source.active_provider)?; + let connector = active_driver_adapter(source.active_provider)?; let connection_info = get_connection_info(url)?; Ok(Js { @@ -117,7 +102,7 @@ impl Connector for Js { // declaration, so finally I couldn't come up with anything better then wrapping a QuaintQueryable // in this object, and implementing TransactionCapable (and quaint::Queryable) explicitly for it. #[derive(Clone)] -struct DriverAdapter { +pub struct DriverAdapter { connector: Arc, } diff --git a/query-engine/connectors/sql-query-connector/src/filter_conversion.rs b/query-engine/connectors/sql-query-connector/src/filter_conversion.rs index a77b61a2a79..86cfb1c38be 100644 --- a/query-engine/connectors/sql-query-connector/src/filter_conversion.rs +++ b/query-engine/connectors/sql-query-connector/src/filter_conversion.rs @@ -330,43 +330,66 @@ impl AliasedSelect for RelationFilter { let alias = alias.unwrap_or_default(); let condition = self.condition; - let table = self.field.as_table(ctx); - let selected_identifier: Vec = self - .field - .identifier_columns(ctx) - .map(|col| col.aliased_col(Some(alias), ctx)) - .collect(); - - let join_columns: Vec = self - .field - .join_columns(ctx) - .map(|c| c.aliased_col(Some(alias), ctx)) - .collect(); - - let related_table = self.field.related_model().as_table(ctx); - let related_join_columns: Vec<_> = ModelProjection::from(self.field.related_field().linking_fields()) - .as_columns(ctx) - .map(|col| col.aliased_col(Some(alias.flip(AliasMode::Join)), ctx)) - .collect(); - - let nested_conditions = self - .nested_filter - .aliased_condition_from(Some(alias.flip(AliasMode::Join)), false, ctx) - .invert_if(condition.invert_of_subselect()); - - let conditions = selected_identifier - .clone() - .into_iter() - .fold(nested_conditions, |acc, column| acc.and(column.is_not_null())); - - let join = related_table - .alias(alias.to_string(Some(AliasMode::Join))) - .on(Row::from(related_join_columns).equals(Row::from(join_columns))); - - Select::from_table(table.alias(alias.to_string(Some(AliasMode::Table)))) - .columns(selected_identifier) - .inner_join(join) - .so_that(conditions) + // Performance can be improved by using fields in related table which skip a join table operation + if self.field.related_field().walker().fields().is_some() { + let related_table = self.field.related_model().as_table(ctx); + let related_columns: Vec<_> = ModelProjection::from(self.field.related_field().linking_fields()) + .as_columns(ctx) + .map(|col| col.aliased_col(Some(alias), ctx)) + .collect(); + + let nested_conditions = self + .nested_filter + .aliased_condition_from(Some(alias), false, ctx) + .invert_if(condition.invert_of_subselect()); + + let conditions = related_columns + .clone() + .into_iter() + .fold(nested_conditions, |acc, column| acc.and(column.is_not_null())); + + Select::from_table(related_table.alias(alias.to_string(Some(AliasMode::Table)))) + .columns(related_columns) + .so_that(conditions) + } else { + let table = self.field.as_table(ctx); + let selected_identifier: Vec = self + .field + .identifier_columns(ctx) + .map(|col| col.aliased_col(Some(alias), ctx)) + .collect(); + + let join_columns: Vec = self + .field + .join_columns(ctx) + .map(|c| c.aliased_col(Some(alias), ctx)) + .collect(); + + let related_table = self.field.related_model().as_table(ctx); + let related_join_columns: Vec<_> = ModelProjection::from(self.field.related_field().linking_fields()) + .as_columns(ctx) + .map(|col| col.aliased_col(Some(alias.flip(AliasMode::Join)), ctx)) + .collect(); + + let nested_conditions = self + .nested_filter + .aliased_condition_from(Some(alias.flip(AliasMode::Join)), false, ctx) + .invert_if(condition.invert_of_subselect()); + + let conditions = selected_identifier + .clone() + .into_iter() + .fold(nested_conditions, |acc, column| acc.and(column.is_not_null())); + + let join = related_table + .alias(alias.to_string(Some(AliasMode::Join))) + .on(Row::from(related_join_columns).equals(Row::from(join_columns))); + + Select::from_table(table.alias(alias.to_string(Some(AliasMode::Table)))) + .columns(selected_identifier) + .inner_join(join) + .so_that(conditions) + } } } diff --git a/query-engine/connectors/sql-query-connector/src/lib.rs b/query-engine/connectors/sql-query-connector/src/lib.rs index 06aa1e376c4..d98f87d9a92 100644 --- a/query-engine/connectors/sql-query-connector/src/lib.rs +++ b/query-engine/connectors/sql-query-connector/src/lib.rs @@ -23,7 +23,7 @@ use self::{column_metadata::*, context::Context, filter_conversion::*, query_ext use quaint::prelude::Queryable; #[cfg(feature = "driver-adapters")] -pub use database::{register_driver_adapter, Js}; +pub use database::{activate_driver_adapter, Js}; pub use database::{FromSource, Mssql, Mysql, PostgreSql, Sqlite}; pub use error::SqlError; diff --git a/query-engine/core/src/executor/mod.rs b/query-engine/core/src/executor/mod.rs index 35ed20ab0c5..ddbb7dfc842 100644 --- a/query-engine/core/src/executor/mod.rs +++ b/query-engine/core/src/executor/mod.rs @@ -21,7 +21,7 @@ use crate::{ }; use async_trait::async_trait; use connector::Connector; -use serde::Deserialize; +use serde::{Deserialize, Serialize}; use tracing::Dispatch; #[async_trait] @@ -57,14 +57,14 @@ pub trait QueryExecutor: TransactionManager { fn primary_connector(&self) -> &(dyn Connector + Send + Sync); } -#[derive(Debug, Deserialize)] +#[derive(Debug, Serialize, Deserialize)] pub struct TransactionOptions { /// Maximum wait time for tx acquisition in milliseconds. - #[serde(rename(deserialize = "max_wait"))] + #[serde(rename = "max_wait")] pub max_acquisition_millis: u64, /// Time in milliseconds after which the transaction rolls back automatically. - #[serde(rename(deserialize = "timeout"))] + #[serde(rename = "timeout")] pub valid_for_millis: u64, /// Isolation level to use for the transaction. @@ -72,7 +72,7 @@ pub struct TransactionOptions { /// An optional pre-defined transaction id. Some value might be provided in case we want to generate /// a new id at the beginning of the transaction - #[serde(skip_deserializing)] + #[serde(skip)] pub new_tx_id: Option, } diff --git a/query-engine/core/src/interactive_transactions/mod.rs b/query-engine/core/src/interactive_transactions/mod.rs index 79eba2bb82e..ce125e8fa17 100644 --- a/query-engine/core/src/interactive_transactions/mod.rs +++ b/query-engine/core/src/interactive_transactions/mod.rs @@ -1,5 +1,6 @@ use crate::CoreError; use connector::Transaction; +use serde::Deserialize; use std::fmt::Display; use tokio::time::{Duration, Instant}; @@ -37,7 +38,7 @@ pub(crate) use messages::*; /// the TransactionActorManager can reply with a helpful error message which explains that no operation can be performed on a closed transaction /// rather than an error message stating that the transaction does not exist. -#[derive(Debug, Clone, Hash, Eq, PartialEq)] +#[derive(Debug, Clone, Hash, Eq, PartialEq, Deserialize)] pub struct TxId(String); const MINIMUM_TX_ID_LENGTH: usize = 24; diff --git a/query-engine/core/src/lib.rs b/query-engine/core/src/lib.rs index fb6806e4450..7970c96139b 100644 --- a/query-engine/core/src/lib.rs +++ b/query-engine/core/src/lib.rs @@ -18,7 +18,10 @@ pub use self::{ query_document::*, telemetry::*, }; -pub use connector::{error::ConnectorError, Connector}; +pub use connector::{ + error::{ConnectorError, ErrorKind as ConnectorErrorKind}, + Connector, +}; mod error; mod interactive_transactions; diff --git a/query-engine/core/src/query_graph_builder/write/upsert.rs b/query-engine/core/src/query_graph_builder/write/upsert.rs index 69d362b09b7..0a01e43e73c 100644 --- a/query-engine/core/src/query_graph_builder/write/upsert.rs +++ b/query-engine/core/src/query_graph_builder/write/upsert.rs @@ -156,6 +156,23 @@ pub(crate) fn upsert_record( } graph.create_edge(&if_node, &create_node, QueryGraphDependency::Else)?; + + // Pass-in the read parent record result to the update node RecordFilter to avoid a redundant read. + graph.create_edge( + &read_parent_records_node, + &update_node, + QueryGraphDependency::ProjectedDataDependency( + model_id.clone(), + Box::new(move |mut update_node, parent_ids| { + if let Node::Query(Query::Write(WriteQuery::UpdateRecord(ref mut ur))) = update_node { + ur.set_selectors(parent_ids); + } + + Ok(update_node) + }), + ), + )?; + graph.create_edge( &update_node, &read_node_update, diff --git a/query-engine/dmmf/src/tests/tests.rs b/query-engine/dmmf/src/tests/tests.rs index f1d597710b5..25f83e64447 100644 --- a/query-engine/dmmf/src/tests/tests.rs +++ b/query-engine/dmmf/src/tests/tests.rs @@ -87,6 +87,54 @@ fn unsupported_in_composite_type() { dmmf_from_schema(schema); } +// Regression test for https://github.com/prisma/prisma/issues/20986 +#[test] +fn unusupported_in_compound_unique_must_not_panic() { + let schema = r#" + datasource db { + provider = "postgresql" + url = env("TEST_DATABASE_URL") + } + + generator client { + provider = "postgresql" + } + + model A { + id Int @id + field Int + unsupported Unsupported("tstzrange") + + @@unique([field, unsupported]) + } + "#; + + dmmf_from_schema(schema); +} + +#[test] +fn unusupported_in_compound_id_must_not_panic() { + let schema = r#" + datasource db { + provider = "postgresql" + url = env("TEST_DATABASE_URL") + } + + generator client { + provider = "postgresql" + } + + model A { + field Int @unique + unsupported Unsupported("tstzrange") + + @@id([field, unsupported]) + } + "#; + + dmmf_from_schema(schema); +} + const SNAPSHOTS_PATH: &str = concat!( env!("CARGO_MANIFEST_DIR"), "/src", diff --git a/query-engine/driver-adapters/js/.prettierrc.yml b/query-engine/driver-adapters/js/.prettierrc.yml new file mode 100644 index 00000000000..f0beb50a216 --- /dev/null +++ b/query-engine/driver-adapters/js/.prettierrc.yml @@ -0,0 +1,5 @@ +tabWidth: 2 +trailingComma: all +singleQuote: true +semi: false +printWidth: 120 diff --git a/query-engine/driver-adapters/js/README.md b/query-engine/driver-adapters/js/README.md index d4198f4c31f..e5e64c60dfc 100644 --- a/query-engine/driver-adapters/js/README.md +++ b/query-engine/driver-adapters/js/README.md @@ -1,17 +1,17 @@ # Prisma Driver Adapters This TypeScript monorepo contains the following packages: -- `@jkomyno/prisma-driver-adapter-utils` (later: `@prisma/driver-adapter-utils`) +- `@prisma/driver-adapter-utils` - Internal set of utilities and types for Prisma's driver adapters. -- `@jkomyno/prisma-adapter-neon` (later: `@prisma/adapter-neon`) +- `@prisma/adapter-neon` - Prisma's Driver Adapter that wraps the `@neondatabase/serverless` driver - It uses `provider = "postgres"` - It exposes debug logs via `DEBUG="prisma:driver-adapter:neon"` -- `@jkomyno/prisma-adapter-planetscale` (later: `@prisma/adapter-planetscale`) +- `@prisma/adapter-planetscale` - Prisma's Driver Adapter that wraps the `@planetscale/database` driver - It uses `provider = "mysql"` - It exposes debug logs via `DEBUG="prisma:driver-adapter:planetscale"` -- `@jkomyno/prisma-adapter-pg` (later: `@prisma/adapter-pg`) +- `@prisma/adapter-pg` - Prisma's Driver Adapter that wraps the `pg` driver - It uses `provider = "postgres"` - It exposes debug logs via `DEBUG="prisma:driver-adapter:pg"` diff --git a/query-engine/driver-adapters/js/adapter-neon/package.json b/query-engine/driver-adapters/js/adapter-neon/package.json index 52ee08f97be..78d18889174 100644 --- a/query-engine/driver-adapters/js/adapter-neon/package.json +++ b/query-engine/driver-adapters/js/adapter-neon/package.json @@ -1,6 +1,6 @@ { - "name": "@jkomyno/prisma-adapter-neon", - "version": "0.2.1", + "name": "@prisma/adapter-neon", + "version": "0.3.3", "description": "Prisma's driver adapter for \"@neondatabase/serverless\"", "main": "dist/index.js", "module": "dist/index.mjs", @@ -18,7 +18,7 @@ "license": "Apache-2.0", "sideEffects": false, "dependencies": { - "@jkomyno/prisma-driver-adapter-utils": "workspace:*" + "@prisma/driver-adapter-utils": "workspace:*" }, "devDependencies": { "@neondatabase/serverless": "^0.6.0" diff --git a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts b/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts index ea91f57eefd..c05ad1f6510 100644 --- a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts +++ b/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts @@ -1,4 +1,4 @@ -import { ColumnTypeEnum, type ColumnType } from '@jkomyno/prisma-driver-adapter-utils' +import { ColumnTypeEnum, type ColumnType } from '@prisma/driver-adapter-utils' import { types } from '@neondatabase/serverless' const NeonColumnType = types.builtins diff --git a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts b/query-engine/driver-adapters/js/adapter-neon/src/neon.ts index ba17da3036b..e2dac37a911 100644 --- a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts +++ b/query-engine/driver-adapters/js/adapter-neon/src/neon.ts @@ -1,14 +1,21 @@ import type neon from '@neondatabase/serverless' -import { Debug } from '@jkomyno/prisma-driver-adapter-utils' -import type { DriverAdapter, ResultSet, Query, Queryable, Transaction, Result, TransactionOptions } from '@jkomyno/prisma-driver-adapter-utils' +import { Debug, ok, err } from '@prisma/driver-adapter-utils' +import type { + DriverAdapter, + ResultSet, + Query, + Queryable, + Transaction, + Result, + TransactionOptions, +} from '@prisma/driver-adapter-utils' import { fieldToColumnType } from './conversion' const debug = Debug('prisma:driver-adapter:neon') -type ARRAY_MODE_DISABLED = false -type FULL_RESULTS_ENABLED = true +type ARRAY_MODE_ENABLED = true -type PerformIOResult = neon.QueryResult | neon.FullQueryResults +type PerformIOResult = neon.QueryResult | neon.FullQueryResults /** * Base class for http client, ws client and ws transaction @@ -20,29 +27,25 @@ abstract class NeonQueryable implements Queryable { const tag = '[js::query_raw]' debug(`${tag} %O`, query) - const { fields, rows: results } = await this.performIO(query) - - const columns = fields.map(field => field.name) - const resultSet: ResultSet = { - columnNames: columns, - columnTypes: fields.map(field => fieldToColumnType(field.dataTypeID)), - rows: results.map(result => columns.map(column => result[column])), - } - - return { ok: true, value: resultSet } + return (await this.performIO(query)).map(({ fields, rows }) => { + const columns = fields.map((field) => field.name) + return { + columnNames: columns, + columnTypes: fields.map((field) => fieldToColumnType(field.dataTypeID)), + rows, + } + }) } async executeRaw(query: Query): Promise> { const tag = '[js::execute_raw]' debug(`${tag} %O`, query) - const { rowCount: rowsAffected } = await this.performIO(query) - // Note: `rowsAffected` can sometimes be null (e.g., when executing `"BEGIN"`) - return { ok: true, value: rowsAffected ?? 0 } + return (await this.performIO(query)).map((r) => r.rowCount ?? 0) } - abstract performIO(query: Query): Promise + abstract performIO(query: Query): Promise> } /** @@ -53,15 +56,25 @@ class NeonWsQueryable extends NeonQ super() } - override async performIO(query: Query): Promise { + override async performIO(query: Query): Promise> { const { sql, args: values } = query try { - return await this.client.query(sql, values) + return ok(await this.client.query({ text: sql, values, rowMode: 'array' })) } catch (e) { - const error = e as Error - debug('Error in performIO: %O', error) - throw error + debug('Error in performIO: %O', e) + if (e && e.code) { + return err({ + kind: 'PostgresError', + code: e.code, + severity: e.severity, + message: e.message, + detail: e.detail, + column: e.column, + hint: e.hint, + }) + } + throw e } } } @@ -75,14 +88,14 @@ class NeonTransaction extends NeonWsQueryable implements Transa debug(`[js::commit]`) this.client.release() - return Promise.resolve({ ok: true, value: undefined }) + return Promise.resolve(ok(undefined)) } async rollback(): Promise> { debug(`[js::rollback]`) this.client.release() - return Promise.resolve({ ok: true, value: undefined }) + return Promise.resolve(ok(undefined)) } } @@ -102,7 +115,7 @@ export class PrismaNeon extends NeonWsQueryable implements DriverAdap debug(`${tag} options: %O`, options) const connection = await this.client.connect() - return { ok: true, value: new NeonTransaction(connection, options) } + return ok(new NeonTransaction(connection, options)) } async close() { @@ -110,21 +123,23 @@ export class PrismaNeon extends NeonWsQueryable implements DriverAdap await this.client.end() this.isRunning = false } - return { ok: true as const, value: undefined } + return ok(undefined) } } export class PrismaNeonHTTP extends NeonQueryable implements DriverAdapter { - constructor(private client: neon.NeonQueryFunction< - ARRAY_MODE_DISABLED, - FULL_RESULTS_ENABLED - >) { + constructor(private client: neon.NeonQueryFunction) { super() } - override async performIO(query: Query): Promise { + override async performIO(query: Query): Promise> { const { sql, args: values } = query - return await this.client(sql, values) + return ok( + await this.client(sql, values, { + arrayMode: true, + fullResults: true, + }), + ) } startTransaction(): Promise> { @@ -132,6 +147,6 @@ export class PrismaNeonHTTP extends NeonQueryable implements DriverAdapter { } async close() { - return { ok: true as const, value: undefined } + return ok(undefined) } } diff --git a/query-engine/driver-adapters/js/adapter-pg/package.json b/query-engine/driver-adapters/js/adapter-pg/package.json index 3055976cb51..dc3e7392960 100644 --- a/query-engine/driver-adapters/js/adapter-pg/package.json +++ b/query-engine/driver-adapters/js/adapter-pg/package.json @@ -1,6 +1,6 @@ { - "name": "@jkomyno/prisma-adapter-pg", - "version": "0.2.1", + "name": "@prisma/adapter-pg", + "version": "0.3.3", "description": "Prisma's driver adapter for \"pg\"", "main": "dist/index.js", "module": "dist/index.mjs", @@ -18,7 +18,7 @@ "license": "Apache-2.0", "sideEffects": false, "dependencies": { - "@jkomyno/prisma-driver-adapter-utils": "workspace:*" + "@prisma/driver-adapter-utils": "workspace:*" }, "devDependencies": { "pg": "^8.11.3", diff --git a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts b/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts index fc9ad43e9f0..8943ae2d16b 100644 --- a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts +++ b/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts @@ -1,4 +1,4 @@ -import { ColumnTypeEnum, type ColumnType } from '@jkomyno/prisma-driver-adapter-utils' +import { ColumnTypeEnum, type ColumnType } from '@prisma/driver-adapter-utils' import { types } from 'pg' const PgColumnType = types.builtins diff --git a/query-engine/driver-adapters/js/adapter-pg/src/pg.ts b/query-engine/driver-adapters/js/adapter-pg/src/pg.ts index a6a4ba4b58d..5c574460b49 100644 --- a/query-engine/driver-adapters/js/adapter-pg/src/pg.ts +++ b/query-engine/driver-adapters/js/adapter-pg/src/pg.ts @@ -1,6 +1,14 @@ import type pg from 'pg' -import { Debug } from '@jkomyno/prisma-driver-adapter-utils' -import type { DriverAdapter, Query, Queryable, Result, ResultSet, Transaction, TransactionOptions } from '@jkomyno/prisma-driver-adapter-utils' +import { Debug, ok } from '@prisma/driver-adapter-utils' +import type { + DriverAdapter, + Query, + Queryable, + Result, + ResultSet, + Transaction, + TransactionOptions, +} from '@prisma/driver-adapter-utils' import { fieldToColumnType } from './conversion' const debug = Debug('prisma:driver-adapter:pg') @@ -8,12 +16,10 @@ const debug = Debug('prisma:driver-adapter:pg') type StdClient = pg.Pool type TransactionClient = pg.PoolClient -class PgQueryable - implements Queryable { +class PgQueryable implements Queryable { readonly flavour = 'postgres' - constructor(protected readonly client: ClientT) { - } + constructor(protected readonly client: ClientT) {} /** * Execute a query given as SQL, interpolating the given parameters. @@ -22,16 +28,17 @@ class PgQueryable const tag = '[js::query_raw]' debug(`${tag} %O`, query) - const { fields, rows: results } = await this.performIO(query) + const { fields, rows } = await this.performIO(query) const columns = fields.map((field) => field.name) + const columnTypes = fields.map((field) => fieldToColumnType(field.dataTypeID)) const resultSet: ResultSet = { columnNames: columns, - columnTypes: fields.map((field) => fieldToColumnType(field.dataTypeID)), - rows: results.map((result) => columns.map((column) => result[column])), + columnTypes, + rows, } - return { ok: true, value: resultSet } + return ok(resultSet) } /** @@ -44,9 +51,9 @@ class PgQueryable debug(`${tag} %O`, query) const { rowCount: rowsAffected } = await this.performIO(query) - + // Note: `rowsAffected` can sometimes be null (e.g., when executing `"BEGIN"`) - return { ok: true, value: rowsAffected ?? 0 } + return ok(rowsAffected ?? 0) } /** @@ -58,7 +65,7 @@ class PgQueryable const { sql, args: values } = query try { - const result = await this.client.query(sql, values) + const result = await this.client.query({ text: sql, values, rowMode: 'array' }) return result } catch (e) { const error = e as Error @@ -68,8 +75,7 @@ class PgQueryable } } -class PgTransaction extends PgQueryable - implements Transaction { +class PgTransaction extends PgQueryable implements Transaction { constructor(client: pg.PoolClient, readonly options: TransactionOptions) { super(client) } @@ -78,14 +84,14 @@ class PgTransaction extends PgQueryable debug(`[js::commit]`) this.client.release() - return Promise.resolve({ ok: true, value: undefined }) + return ok(undefined) } async rollback(): Promise> { debug(`[js::rollback]`) this.client.release() - return Promise.resolve({ ok: true, value: undefined }) + return ok(undefined) } } @@ -103,10 +109,10 @@ export class PrismaPg extends PgQueryable implements DriverAdapter { debug(`${tag} options: %O`, options) const connection = await this.client.connect() - return { ok: true, value: new PgTransaction(connection, options) } + return ok(new PgTransaction(connection, options)) } async close() { - return { ok: true as const, value: undefined } + return ok(undefined) } } diff --git a/query-engine/driver-adapters/js/adapter-planetscale/package.json b/query-engine/driver-adapters/js/adapter-planetscale/package.json index bdda6c0a5c9..aedfde584c3 100644 --- a/query-engine/driver-adapters/js/adapter-planetscale/package.json +++ b/query-engine/driver-adapters/js/adapter-planetscale/package.json @@ -1,6 +1,6 @@ { - "name": "@jkomyno/prisma-adapter-planetscale", - "version": "0.2.1", + "name": "@prisma/adapter-planetscale", + "version": "0.3.3", "description": "Prisma's driver adapter for \"@planetscale/database\"", "main": "dist/index.js", "module": "dist/index.mjs", @@ -18,7 +18,7 @@ "license": "Apache-2.0", "sideEffects": false, "dependencies": { - "@jkomyno/prisma-driver-adapter-utils": "workspace:*" + "@prisma/driver-adapter-utils": "workspace:*" }, "devDependencies": { "@planetscale/database": "^1.11.0" diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/conversion.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/conversion.ts index 2c79afdddd6..1c46538806b 100644 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/conversion.ts +++ b/query-engine/driver-adapters/js/adapter-planetscale/src/conversion.ts @@ -1,4 +1,4 @@ -import { ColumnTypeEnum, type ColumnType } from '@jkomyno/prisma-driver-adapter-utils' +import { ColumnTypeEnum, type ColumnType } from '@prisma/driver-adapter-utils' // See: https://github.com/planetscale/vitess-types/blob/06235e372d2050b4c0fff49972df8111e696c564/src/vitess/query/v16/query.proto#L108-L218 export type PlanetScaleColumnType diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts index 8bd2610336b..b5dffb89272 100644 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts +++ b/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts @@ -1,6 +1,14 @@ import type planetScale from '@planetscale/database' -import { Debug } from '@jkomyno/prisma-driver-adapter-utils' -import type { DriverAdapter, ResultSet, Query, Queryable, Transaction, Result, TransactionOptions } from '@jkomyno/prisma-driver-adapter-utils' +import { Debug, ok } from '@prisma/driver-adapter-utils' +import type { + DriverAdapter, + ResultSet, + Query, + Queryable, + Transaction, + Result, + TransactionOptions, +} from '@prisma/driver-adapter-utils' import { type PlanetScaleColumnType, fieldToColumnType } from './conversion' import { createDeferred, Deferred } from './deferred' @@ -17,11 +25,9 @@ class RollbackError extends Error { } } - class PlanetScaleQueryable implements Queryable { readonly flavour = 'mysql' - constructor(protected client: ClientT) { - } + constructor(protected client: ClientT) {} /** * Execute a query given as SQL, interpolating the given parameters. @@ -30,17 +36,17 @@ class PlanetScaleQueryable field.name) + const columns = fields.map((field) => field.name) const resultSet: ResultSet = { columnNames: columns, - columnTypes: fields.map(field => fieldToColumnType(field.type as PlanetScaleColumnType)), - rows: results.map(result => columns.map(column => result[column])), + columnTypes: fields.map((field) => fieldToColumnType(field.type as PlanetScaleColumnType)), + rows: rows as ResultSet['rows'], lastInsertId, } - return { ok: true, value: resultSet } + return ok(resultSet) } /** @@ -53,7 +59,7 @@ class PlanetScaleQueryable> { debug(`[js::rollback]`) this.txDeferred.reject(new RollbackError()) - return Promise.resolve({ ok: true, value: await this.txResultPromise }) + return Promise.resolve(ok(await this.txResultPromise)) } - } export class PrismaPlanetScale extends PlanetScaleQueryable implements DriverAdapter { @@ -115,25 +122,27 @@ export class PrismaPlanetScale extends PlanetScaleQueryable>((resolve, reject) => { - const txResultPromise = this.client.transaction(async tx => { - const [txDeferred, deferredPromise] = createDeferred() - const txWrapper = new PlanetScaleTransaction(tx, options, txDeferred, txResultPromise) - - resolve({ ok: true, value: txWrapper }) - return deferredPromise - }).catch(error => { - // Rollback error is ignored (so that tx.rollback() won't crash) - // any other error is legit and is re-thrown - if (!(error instanceof RollbackError)) { - return reject(error) - } - - return undefined - }) + const txResultPromise = this.client + .transaction(async (tx) => { + const [txDeferred, deferredPromise] = createDeferred() + const txWrapper = new PlanetScaleTransaction(tx, options, txDeferred, txResultPromise) + + resolve(ok(txWrapper)) + return deferredPromise + }) + .catch((error) => { + // Rollback error is ignored (so that tx.rollback() won't crash) + // any other error is legit and is re-thrown + if (!(error instanceof RollbackError)) { + return reject(error) + } + + return undefined + }) }) } async close() { - return { ok: true as const, value: undefined } + return ok(undefined) } } diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/package.json b/query-engine/driver-adapters/js/connector-test-kit-executor/package.json new file mode 100644 index 00000000000..1dc1315afc8 --- /dev/null +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/package.json @@ -0,0 +1,24 @@ +{ + "name": "connector-test-kit-executor", + "version": "1.0.0", + "description": "", + "main": "dist/index.js", + "private": true, + "scripts": { + "build": "tsup ./src/index.ts --format cjs,esm --dts", + "lint": "tsc -p ./tsconfig.build.json" + }, + "keywords": [], + "author": "", + "sideEffects": false, + "license": "Apache-2.0", + "dependencies": { + "@neondatabase/serverless": "^0.6.0", + "@prisma/adapter-neon": "workspace:*", + "@prisma/adapter-pg": "workspace:*", + "@prisma/driver-adapter-utils": "workspace:*", + "@types/pg": "^8.10.2", + "pg": "^8.11.3", + "undici": "^5.23.0" + } +} diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh b/query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh new file mode 100755 index 00000000000..000f3bd1d45 --- /dev/null +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh @@ -0,0 +1,2 @@ +#!/usr/bin/env bash +node "$(dirname "${BASH_SOURCE[0]}")/../dist/index.mjs" \ No newline at end of file diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/JsonProtocol.ts b/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/JsonProtocol.ts new file mode 100644 index 00000000000..bd491db289a --- /dev/null +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/JsonProtocol.ts @@ -0,0 +1,78 @@ +import * as Transaction from './Transaction' + +export type JsonQuery = { + modelName?: string + action: JsonQueryAction + query: JsonFieldSelection +} + +export type JsonBatchQuery = { + batch: JsonQuery[] + transaction?: { isolationLevel?: Transaction.IsolationLevel } +} + +export type JsonQueryAction = + | 'findUnique' + | 'findUniqueOrThrow' + | 'findFirst' + | 'findFirstOrThrow' + | 'findMany' + | 'createOne' + | 'createMany' + | 'updateOne' + | 'updateMany' + | 'deleteOne' + | 'deleteMany' + | 'upsertOne' + | 'aggregate' + | 'groupBy' + | 'executeRaw' + | 'queryRaw' + | 'runCommandRaw' + | 'findRaw' + | 'aggregateRaw' + +export type JsonFieldSelection = { + arguments?: Record + selection: JsonSelectionSet +} + +export type JsonSelectionSet = { + $scalars?: boolean + $composites?: boolean +} & { + [fieldName: string]: boolean | JsonFieldSelection +} + +export type JsonArgumentValue = + | number + | string + | boolean + | null + | JsonTaggedValue + | JsonArgumentValue[] + | { [key: string]: JsonArgumentValue } + +export type DateTaggedValue = { $type: 'DateTime'; value: string } +export type DecimalTaggedValue = { $type: 'Decimal'; value: string } +export type BytesTaggedValue = { $type: 'Bytes'; value: string } +export type BigIntTaggedValue = { $type: 'BigInt'; value: string } +export type FieldRefTaggedValue = { $type: 'FieldRef'; value: { _ref: string } } +export type EnumTaggedValue = { $type: 'Enum'; value: string } +export type JsonTaggedValue = { $type: 'Json'; value: string } + +export type JsonInputTaggedValue = + | DateTaggedValue + | DecimalTaggedValue + | BytesTaggedValue + | BigIntTaggedValue + | FieldRefTaggedValue + | JsonTaggedValue + | EnumTaggedValue + +export type JsonOutputTaggedValue = + | DateTaggedValue + | DecimalTaggedValue + | BytesTaggedValue + | BigIntTaggedValue + | JsonTaggedValue diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Library.ts b/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Library.ts new file mode 100644 index 00000000000..b0e0b06abc4 --- /dev/null +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Library.ts @@ -0,0 +1,42 @@ +import type { DriverAdapter } from '@prisma/driver-adapter-utils' +import type { QueryEngineConfig } from './QueryEngine' + +export type QueryEngineInstance = { + connect(headers: string): Promise + disconnect(headers: string): Promise + /** + * @param requestStr JSON.stringified `QueryEngineRequest | QueryEngineBatchRequest` + * @param headersStr JSON.stringified `QueryEngineRequestHeaders` + */ + query(requestStr: string, headersStr: string, transactionId?: string): Promise + sdlSchema(): Promise + dmmf(traceparent: string): Promise + startTransaction(options: string, traceHeaders: string): Promise + commitTransaction(id: string, traceHeaders: string): Promise + rollbackTransaction(id: string, traceHeaders: string): Promise + metrics(options: string): Promise +} + +export interface QueryEngineConstructor { + new(config: QueryEngineConfig, logger: (log: string) => void, nodejsFnCtx?: DriverAdapter): QueryEngineInstance +} + +export interface LibraryLoader { + loadLibrary(): Promise +} + +// Main +export type Library = { + QueryEngine: QueryEngineConstructor + + version: () => { + // The commit hash of the engine + commit: string + // Currently 0.1.0 (Set in Cargo.toml) + version: string + } + /** + * This returns a string representation of `DMMF.Document` + */ + dmmf: (datamodel: string) => Promise +} diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/QueryEngine.ts b/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/QueryEngine.ts new file mode 100644 index 00000000000..416da634fc9 --- /dev/null +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/QueryEngine.ts @@ -0,0 +1,89 @@ +import { JsonBatchQuery, JsonQuery } from './JsonProtocol' +import * as Transaction from './Transaction' + +// Events +export type QueryEngineEvent = QueryEngineLogEvent | QueryEngineQueryEvent | QueryEnginePanicEvent + +export type QueryEngineLogEvent = { + level: string + module_path: string + message: string + span?: boolean +} + +export type QueryEngineQueryEvent = { + level: 'info' + module_path: string + query: string + item_type: 'query' + params: string + duration_ms: string + result: string +} + +export type QueryEnginePanicEvent = { + level: 'error' + module_path: string + message: 'PANIC' + reason: string + file: string + line: string + column: string +} + + +export type GraphQLQuery = { + query: string + variables: object +} + +export type EngineProtocol = 'graphql' | 'json' +export type EngineQuery = GraphQLQuery | JsonQuery + +export type EngineBatchQueries = GraphQLQuery[] | JsonQuery[] + +export type QueryEngineConfig = { + // TODO rename datamodel here and other places + datamodel: string + configDir: string + logQueries: boolean + ignoreEnvVarErrors: boolean + datasourceOverrides?: Record + env: Record + logLevel?: string + engineProtocol: EngineProtocol +} + +// Errors +export type SyncRustError = { + is_panic: boolean + message: string + meta: { + full_error: string + } + error_code: string +} + +export type RustRequestError = { + is_panic: boolean + message: string + backtrace: string +} + +export type QueryEngineResult = { + data: T + elapsed: number +} + +export type QueryEngineBatchRequest = QueryEngineBatchGraphQLRequest | JsonBatchQuery + +export type QueryEngineBatchGraphQLRequest = { + batch: QueryEngineRequest[] + transaction?: boolean + isolationLevel?: Transaction.IsolationLevel +} + +export type QueryEngineRequest = { + query: string + variables: Object +} diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Transaction.ts b/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Transaction.ts new file mode 100644 index 00000000000..1c5786cc66d --- /dev/null +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Transaction.ts @@ -0,0 +1,35 @@ +export enum IsolationLevel { + ReadUncommitted = 'ReadUncommitted', + ReadCommitted = 'ReadCommitted', + RepeatableRead = 'RepeatableRead', + Snapshot = 'Snapshot', + Serializable = 'Serializable', +} + +/** + * maxWait ?= 2000 + * timeout ?= 5000 + */ +export type Options = { + maxWait?: number + timeout?: number + isolationLevel?: IsolationLevel +} + +export type InteractiveTransactionInfo = { + /** + * Transaction ID returned by the query engine. + */ + id: string + + /** + * Arbitrary payload the meaning of which depends on the `Engine` implementation. + * For example, `DataProxyEngine` needs to associate different API endpoints with transactions. + * In `LibraryEngine` and `BinaryEngine` it is currently not used. + */ + payload: Payload +} + +export type TransactionHeaders = { + traceparent?: string +} diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts b/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts new file mode 100644 index 00000000000..21df3430d3b --- /dev/null +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts @@ -0,0 +1,244 @@ +import * as qe from './qe' +import * as engines from './engines/Library' +import * as readline from 'node:readline' +import * as jsonRpc from './jsonRpc' + +// pg dependencies +import pgDriver from 'pg' +import * as prismaPg from '@prisma/adapter-pg' + +// neon dependencies +import { Pool as NeonPool, neonConfig } from '@neondatabase/serverless' +import { WebSocket } from 'undici' +import * as prismaNeon from '@prisma/adapter-neon' + +import {bindAdapter, DriverAdapter, ErrorCapturingDriverAdapter} from "@prisma/driver-adapter-utils"; + +const SUPPORTED_ADAPTERS: Record Promise> + = {"pg": pgAdapter, "neon:ws" : neonWsAdapter}; + +// conditional debug logging based on LOG_LEVEL env var +const debug = (() => { + if ((process.env.LOG_LEVEL ?? '').toLowerCase() != 'debug') { + return (...args: any[]) => {} + } + + return (...args: any[]) => { + console.error('[nodejs] DEBUG:', ...args); + }; +})(); + +// error logger +const err = (...args: any[]) => console.error('[nodejs] ERROR:', ...args); + + +async function main(): Promise { + const iface = readline.createInterface({ + input: process.stdin, + output: process.stdout, + terminal: false, + }); + + iface.on('line', async (line) => { + try { + const request: jsonRpc.Request = JSON.parse(line); // todo: validate + debug(`Got a request: ${line}`) + try { + const response = await handleRequest(request.method, request.params) + respondOk(request.id, response) + } catch (err) { + debug("[nodejs] Error from request handler: ", err) + respondErr(request.id, { + code: 1, + message: err.toString(), + }) + } + } catch (err) { + debug("Received non-json line: ", line); + } + + }); +} + +const state: Record = {} + +async function handleRequest(method: string, params: unknown): Promise { + switch (method) { + case 'initializeSchema': { + interface InitializeSchemaParams { + schema: string + schemaId: string + url: string + } + + const castParams = params as InitializeSchemaParams; + const logs = [] as string[] + const [engine, adapter] = await initQe(castParams.url, castParams.schema, (log) => { + logs.push(log) + }); + await engine.connect("") + + state[castParams.schemaId] = { + engine, + adapter, + logs + } + return null + } + case 'query': { + interface QueryPayload { + query: string + schemaId: number + txId?: string + } + + debug("Got `query`", params) + const castParams = params as QueryPayload; + const engine = state[castParams.schemaId].engine + const result = await engine.query(JSON.stringify(castParams.query), "", castParams.txId) + + const parsedResult = JSON.parse(result) + if (parsedResult.errors) { + const error = parsedResult.errors[0]?.user_facing_error + if (error.error_code === 'P2036') { + const jsError = state[castParams.schemaId].adapter.errorRegistry.consumeError(error.meta.id) + if (!jsError) { + err(`Something went wrong. Engine reported external error with id ${error.meta.id}, but it was not registered.`) + } else { + err("got error response from the engine caused by the driver: ", jsError) + } + } + } + + debug("got response from engine: ", result) + // returning unparsed string: otherwise, some information gots lost during this round-trip. + // In particular, floating point without decimal part turn into integers + return result + } + + case 'startTx': { + interface StartTxPayload { + schemaId: number, + options: unknown + } + + debug("Got `startTx", params) + const {schemaId, options} = params as StartTxPayload + const result = await state[schemaId].engine.startTransaction(JSON.stringify(options), "") + return JSON.parse(result) + } + + case 'commitTx': { + interface CommitTxPayload { + schemaId: number, + txId: string, + } + + debug("Got `commitTx", params) + const {schemaId, txId} = params as CommitTxPayload + const result = await state[schemaId].engine.commitTransaction(txId, '{}') + return JSON.parse(result) + } + + case 'rollbackTx': { + interface RollbackTxPayload { + schemaId: number, + txId: string, + } + + debug("Got `rollbackTx", params) + const {schemaId, txId} = params as RollbackTxPayload + const result = await state[schemaId].engine.rollbackTransaction(txId, '{}') + return JSON.parse(result) + } + case 'teardown': { + interface TeardownPayload { + schemaId: number + } + + debug("Got `teardown", params) + const castParams = params as TeardownPayload; + await state[castParams.schemaId].engine.disconnect("") + delete state[castParams.schemaId] + + return {} + } + case 'getLogs': { + interface GetLogsPayload { + schemaId: number + } + + const castParams = params as GetLogsPayload + return state[castParams.schemaId].queryLogs ?? [] + } + default: { + throw new Error(`Unknown method: \`${method}\``) + } + } +} + +function respondErr(requestId: number, error: jsonRpc.RpcError) { + const msg: jsonRpc.ErrResponse = { + jsonrpc: '2.0', + id: requestId, + error, + } + console.log(JSON.stringify(msg)) +} + +function respondOk(requestId: number, payload: unknown) { + const msg: jsonRpc.OkResponse = { + jsonrpc: '2.0', + id: requestId, + result: payload + + }; + console.log(JSON.stringify(msg)) +} + +async function initQe(url: string, prismaSchema: string, logCallback: qe.QueryLogCallback): Promise<[engines.QueryEngineInstance, ErrorCapturingDriverAdapter]> { + const adapter = await adapterFromEnv(url) as DriverAdapter + const errorCapturingAdapter = bindAdapter(adapter) + const engineInstance = qe.initQueryEngine(errorCapturingAdapter, prismaSchema, logCallback, debug) + return [engineInstance, errorCapturingAdapter]; +} + +async function adapterFromEnv(url: string): Promise { + const adapter = process.env.DRIVER_ADAPTER ?? '' + + if (adapter == '') { + throw new Error("DRIVER_ADAPTER is not defined or empty.") + } + + if (!(adapter in SUPPORTED_ADAPTERS)) { + throw new Error(`Unsupported driver adapter: ${adapter}`) + } + + return await SUPPORTED_ADAPTERS[adapter](url) +} + +async function pgAdapter(url: string): Promise { + const pool = new pgDriver.Pool({connectionString: url}) + return new prismaPg.PrismaPg(pool) +} + +async function neonWsAdapter(url: string): Promise { + const proxyURL = JSON.parse(process.env.DRIVER_ADAPTER_CONFIG || '{}').proxyUrl ?? '' + if (proxyURL == '') { + throw new Error("DRIVER_ADAPTER_URL_OVERRIDE is not defined or empty, but its required for neon adapter."); + } + + neonConfig.wsProxy = () => `127.0.0.1:5488/v1` + neonConfig.webSocketConstructor = WebSocket + neonConfig.useSecureWebSocket = false + neonConfig.pipelineConnect = false + + const pool = new NeonPool({ connectionString: url }) + return new prismaNeon.PrismaNeon(pool) +} + +main().catch(err) diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/jsonRpc.ts b/query-engine/driver-adapters/js/connector-test-kit-executor/src/jsonRpc.ts new file mode 100644 index 00000000000..ec734e7b543 --- /dev/null +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/src/jsonRpc.ts @@ -0,0 +1,28 @@ +export interface Request { + jsonrpc: '2.0' + method: string + params?: Object, + id: number +} + +export type Response = OkResponse | ErrResponse + +export interface OkResponse { + jsonrpc: '2.0' + result: unknown + error?: never + id: number +} + +export interface ErrResponse { + jsonrpc: '2.0' + error: RpcError + result?: never + id: number +} + +export interface RpcError { + code: number + message: string + data?: unknown +} diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/qe.ts b/query-engine/driver-adapters/js/connector-test-kit-executor/src/qe.ts new file mode 100644 index 00000000000..764df8f6108 --- /dev/null +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/src/qe.ts @@ -0,0 +1,41 @@ +import type { ErrorCapturingDriverAdapter } from '@prisma/driver-adapter-utils' +import * as lib from './engines/Library' +import * as os from 'node:os' +import * as path from 'node:path' + +export type QueryLogCallback = (log: string) => void + +export function initQueryEngine(adapter: ErrorCapturingDriverAdapter, datamodel: string, queryLogCallback: QueryLogCallback, debug: (...args: any[]) => void): lib.QueryEngineInstance { + // I assume nobody will run this on Windows ¯\_(ツ)_/¯ + const libExt = os.platform() === 'darwin' ? 'dylib' : 'so' + const dirname = path.dirname(new URL(import.meta.url).pathname) + + const libQueryEnginePath = path.join(dirname, `../../../../../target/debug/libquery_engine.${libExt}`) + + const libqueryEngine = { exports: {} as unknown as lib.Library } + // @ts-ignore + process.dlopen(libqueryEngine, libQueryEnginePath) + + const QueryEngine = libqueryEngine.exports.QueryEngine + + const queryEngineOptions = { + datamodel, + configDir: '.', + engineProtocol: 'json' as const, + logLevel: process.env["RUST_LOG"] ?? 'info' as any, + logQueries: true, + env: process.env, + ignoreEnvVarErrors: false, + } + + + const logCallback = (event: any) => { + const parsed = JSON.parse(event) + if (parsed.is_query) { + queryLogCallback(parsed.query) + } + debug(parsed) + } + + return new QueryEngine(queryEngineOptions, logCallback, adapter) +} diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/package.json b/query-engine/driver-adapters/js/driver-adapter-utils/package.json index 524d59e551f..9bb375dff34 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/package.json +++ b/query-engine/driver-adapters/js/driver-adapter-utils/package.json @@ -1,6 +1,6 @@ { - "name": "@jkomyno/prisma-driver-adapter-utils", - "version": "0.2.1", + "name": "@prisma/driver-adapter-utils", + "version": "0.3.3", "description": "Internal set of utilities and types for Prisma's driver adapters.", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts index 9d399056f9a..5f0e055ec3a 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts +++ b/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts @@ -1,4 +1,5 @@ -import type { ErrorCapturingDriverAdapter, DriverAdapter, Transaction, ErrorRegistry, ErrorRecord, Result } from './types' +import { Result, err, ok } from './result' +import type { ErrorCapturingDriverAdapter, DriverAdapter, Transaction, ErrorRegistry, ErrorRecord } from './types' class ErrorRegistryInternal implements ErrorRegistry { private registeredErrors: ErrorRecord[] = [] @@ -22,42 +23,43 @@ class ErrorRegistryInternal implements ErrorRegistry { export const bindAdapter = (adapter: DriverAdapter): ErrorCapturingDriverAdapter => { const errorRegistry = new ErrorRegistryInternal() + const startTransaction = wrapAsync(errorRegistry, adapter.startTransaction.bind(adapter)) return { errorRegistry, queryRaw: wrapAsync(errorRegistry, adapter.queryRaw.bind(adapter)), executeRaw: wrapAsync(errorRegistry, adapter.executeRaw.bind(adapter)), flavour: adapter.flavour, startTransaction: async (...args) => { - const result = await adapter.startTransaction(...args) - if (result.ok) { - return { ok: true, value: bindTransaction(errorRegistry, result.value) } - } - return result + const result = await startTransaction(...args) + return result.map((tx) => bindTransaction(errorRegistry, tx)) }, - close: wrapAsync(errorRegistry, adapter.close.bind(adapter)) + close: wrapAsync(errorRegistry, adapter.close.bind(adapter)), } } // *.bind(transaction) is required to preserve the `this` context of functions whose // execution is delegated to napi.rs. const bindTransaction = (errorRegistry: ErrorRegistryInternal, transaction: Transaction): Transaction => { - return ({ + return { flavour: transaction.flavour, options: transaction.options, queryRaw: wrapAsync(errorRegistry, transaction.queryRaw.bind(transaction)), executeRaw: wrapAsync(errorRegistry, transaction.executeRaw.bind(transaction)), commit: wrapAsync(errorRegistry, transaction.commit.bind(transaction)), rollback: wrapAsync(errorRegistry, transaction.rollback.bind(transaction)), - }); + } } -function wrapAsync(registry: ErrorRegistryInternal, fn: (...args: A) => Promise>): (...args: A) => Promise> { +function wrapAsync( + registry: ErrorRegistryInternal, + fn: (...args: A) => Promise>, +): (...args: A) => Promise> { return async (...args) => { try { return await fn(...args) } catch (error) { const id = registry.registerNewError(error) - return { ok: false, error: { kind: 'GenericJsError', id } } + return err({ kind: 'GenericJsError', id }) } } -} \ No newline at end of file +} diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/index.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/index.ts index ce04822473d..ee851d6961c 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/index.ts +++ b/query-engine/driver-adapters/js/driver-adapter-utils/src/index.ts @@ -1,4 +1,5 @@ export { bindAdapter } from './binder' export { ColumnTypeEnum } from './const' export { Debug } from './debug' +export { ok, err, type Result } from './result' export type * from './types' diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/result.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/result.ts new file mode 100644 index 00000000000..5af95db6867 --- /dev/null +++ b/query-engine/driver-adapters/js/driver-adapter-utils/src/result.ts @@ -0,0 +1,41 @@ +import { Error } from './types' +export type Result = { + // common methods + map(fn: (value: T) => U): Result + flatMap(fn: (value: T) => Result): Result +} & ( + | { + readonly ok: true + readonly value: T + } + | { + readonly ok: false + readonly error: Error + } +) + +export function ok(value: T): Result { + return { + ok: true, + value, + map(fn) { + return ok(fn(value)) + }, + flatMap(fn) { + return fn(value) + }, + } +} + +export function err(error: Error): Result { + return { + ok: false, + error, + map() { + return err(error) + }, + flatMap() { + return err(error) + }, + } +} diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts index 826bc67acea..409f3958bcd 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts +++ b/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts @@ -1,6 +1,7 @@ import { ColumnTypeEnum } from './const' +import { Result } from './result' -export type ColumnType = typeof ColumnTypeEnum[keyof typeof ColumnTypeEnum] +export type ColumnType = (typeof ColumnTypeEnum)[keyof typeof ColumnTypeEnum] export interface ResultSet { /** @@ -33,25 +34,25 @@ export type Query = { } export type Error = { - kind: 'GenericJsError', + kind: 'GenericJsError' id: number -} - -export type Result = { - ok: true, - value: T } | { - ok: false, - error: Error + kind: 'PostgresError' + code: string, + severity: string + message: string + detail: string | undefined + column: string | undefined + hint: string | undefined } -export interface Queryable { - readonly flavour: 'mysql' | 'postgres' +export interface Queryable { + readonly flavour: 'mysql' | 'postgres' | 'sqlite' /** * Execute a query given as SQL, interpolating the given parameters, * and returning the type-aware result set of the query. - * + * * This is the preferred way of executing `SELECT` queries. */ queryRaw(params: Query): Promise> @@ -59,7 +60,7 @@ export interface Queryable { /** * Execute a query given as SQL, interpolating the given parameters, * and returning the number of affected rows. - * + * * This is the preferred way of executing `INSERT`, `UPDATE`, `DELETE` queries, * as well as transactional queries. */ diff --git a/query-engine/driver-adapters/js/package.json b/query-engine/driver-adapters/js/package.json index 6864220623d..1fc20228fe6 100644 --- a/query-engine/driver-adapters/js/package.json +++ b/query-engine/driver-adapters/js/package.json @@ -5,7 +5,7 @@ "description": "", "engines": { "node": ">=16.13", - "pnpm": ">=8.6.7 <9" + "pnpm": ">=8.6.6 <9" }, "license": "Apache-2.0", "scripts": { diff --git a/query-engine/driver-adapters/js/pnpm-lock.yaml b/query-engine/driver-adapters/js/pnpm-lock.yaml index 0c4b01a68f6..ce7a442a538 100644 --- a/query-engine/driver-adapters/js/pnpm-lock.yaml +++ b/query-engine/driver-adapters/js/pnpm-lock.yaml @@ -20,7 +20,7 @@ importers: adapter-neon: dependencies: - '@jkomyno/prisma-driver-adapter-utils': + '@prisma/driver-adapter-utils': specifier: workspace:* version: link:../driver-adapter-utils devDependencies: @@ -30,7 +30,7 @@ importers: adapter-pg: dependencies: - '@jkomyno/prisma-driver-adapter-utils': + '@prisma/driver-adapter-utils': specifier: workspace:* version: link:../driver-adapter-utils devDependencies: @@ -43,7 +43,7 @@ importers: adapter-planetscale: dependencies: - '@jkomyno/prisma-driver-adapter-utils': + '@prisma/driver-adapter-utils': specifier: workspace:* version: link:../driver-adapter-utils devDependencies: @@ -51,6 +51,30 @@ importers: specifier: ^1.11.0 version: 1.11.0 + connector-test-kit-executor: + dependencies: + '@neondatabase/serverless': + specifier: ^0.6.0 + version: 0.6.0 + '@prisma/adapter-neon': + specifier: workspace:* + version: link:../adapter-neon + '@prisma/adapter-pg': + specifier: workspace:* + version: link:../adapter-pg + '@prisma/driver-adapter-utils': + specifier: workspace:* + version: link:../driver-adapter-utils + '@types/pg': + specifier: ^8.10.2 + version: 8.10.2 + pg: + specifier: ^8.11.3 + version: 8.11.3 + undici: + specifier: ^5.23.0 + version: 5.23.0 + driver-adapter-utils: dependencies: debug: @@ -63,27 +87,27 @@ importers: smoke-test-js: dependencies: - '@jkomyno/prisma-adapter-neon': + '@neondatabase/serverless': + specifier: ^0.6.0 + version: 0.6.0 + '@planetscale/database': + specifier: ^1.11.0 + version: 1.11.0 + '@prisma/adapter-neon': specifier: workspace:* version: link:../adapter-neon - '@jkomyno/prisma-adapter-pg': + '@prisma/adapter-pg': specifier: workspace:* version: link:../adapter-pg - '@jkomyno/prisma-adapter-planetscale': + '@prisma/adapter-planetscale': specifier: workspace:* version: link:../adapter-planetscale - '@jkomyno/prisma-driver-adapter-utils': + '@prisma/client': + specifier: 5.4.0-dev.29 + version: 5.4.0-dev.29(prisma@5.4.0-dev.29) + '@prisma/driver-adapter-utils': specifier: workspace:* version: link:../driver-adapter-utils - '@neondatabase/serverless': - specifier: ^0.6.0 - version: 0.6.0 - '@planetscale/database': - specifier: ^1.11.0 - version: 1.11.0 - '@prisma/client': - specifier: 5.3.0-integration-feat-driver-adapters-in-client.1 - version: 5.3.0-integration-feat-driver-adapters-in-client.1(prisma@5.3.0-integration-feat-driver-adapters-in-client.1) pg: specifier: ^8.11.3 version: 8.11.3 @@ -104,8 +128,8 @@ importers: specifier: ^7.0.3 version: 7.0.3 prisma: - specifier: 5.3.0-integration-feat-driver-adapters-in-client.1 - version: 5.3.0-integration-feat-driver-adapters-in-client.1 + specifier: 5.4.0-dev.29 + version: 5.4.0-dev.29 tsx: specifier: ^3.12.7 version: 3.12.7 @@ -391,8 +415,8 @@ packages: resolution: {integrity: sha512-aWbU+D/IRHoDE9975y+Q4c+EwwAWxCPwFId+N1AhQVFXzbeJMkj6KN2iQtoi03elcLMRdfT+V3i9Z4WRw+/oIA==} engines: {node: '>=16'} - /@prisma/client@5.3.0-integration-feat-driver-adapters-in-client.1(prisma@5.3.0-integration-feat-driver-adapters-in-client.1): - resolution: {integrity: sha512-izGFo8RFgmHibBzQGRx66xfh08LcGaOysNWvMRgqT018kZ8c98qqfI0/E+LFgxb3Ar0hqz2zX8M4Fa56KvI6cw==} + /@prisma/client@5.4.0-dev.29(prisma@5.4.0-dev.29): + resolution: {integrity: sha512-rpuBku3CFmX6FDq2SANcc9Ch6ZTqT6fyhvhe66bI/kzJjVY4NN7PwleJesB8/VfS5TkAYMmK5HcPQUwi5hZEVw==} engines: {node: '>=16.13'} requiresBuild: true peerDependencies: @@ -401,16 +425,16 @@ packages: prisma: optional: true dependencies: - '@prisma/engines-version': 5.3.0-28.3457e5de04da1741c969a80068702ad103e99553 - prisma: 5.3.0-integration-feat-driver-adapters-in-client.1 + '@prisma/engines-version': 5.4.0-18.32692fd2ce90d456c093eb8eae68511575243419 + prisma: 5.4.0-dev.29 dev: false - /@prisma/engines-version@5.3.0-28.3457e5de04da1741c969a80068702ad103e99553: - resolution: {integrity: sha512-eb+8hgURyTu1qAWmTxgZCgBjf0UV6REC525fa1XnPpL6hxMZ7cEtFCX0f9GDopa/piCM9pq5H2ttthGOKQyVLA==} + /@prisma/engines-version@5.4.0-18.32692fd2ce90d456c093eb8eae68511575243419: + resolution: {integrity: sha512-6qgjyvmru90p7sn+mWQlZDmX8WgYTZ/cB2kpDShjbg1ymF4dIszqUm6RZqESoZ39Mgp5d620AgDtZqfFQ8sWRQ==} dev: false - /@prisma/engines@5.3.0-integration-feat-driver-adapters-in-client.1: - resolution: {integrity: sha512-euFOT9Wq0dVVXZjcLP/6/XRPr04dm4t9DtKJXUCk5Kja87bAy+knLdcC6Pkmbbjhi0fTThiKQOOxKxWBfXrr4A==} + /@prisma/engines@5.4.0-dev.29: + resolution: {integrity: sha512-3uZ/rLbrJcVSv6js2haSGb2QqB4n26j3Gr4w7iySiQ5O83L11rtOjmkRGdX8yXxEBibuWtOn43SJb24AlQDj8g==} requiresBuild: true /@types/debug@4.1.8: @@ -436,7 +460,6 @@ packages: '@types/node': 20.5.9 pg-protocol: 1.6.0 pg-types: 4.0.1 - dev: true /@types/pg@8.6.6: resolution: {integrity: sha512-O2xNmXebtwVekJDD+02udOncjVcMZQuTEQEMpKJ0ZRf5E7/9JJX3izhKUcUifBkyKpljyUM6BTgy2trmviKlpw==} @@ -843,7 +866,6 @@ packages: /obuf@1.1.2: resolution: {integrity: sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==} - dev: true /once@1.4.0: resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} @@ -891,7 +913,6 @@ packages: /pg-numeric@1.0.2: resolution: {integrity: sha512-BM/Thnrw5jm2kKLE5uJkXqqExRUY/toLHda65XgFTBTFYZyopbKjBe29Ii3RbkvlsMoFwD+tHeGaCjjv0gHlyw==} engines: {node: '>=4'} - dev: true /pg-pool@3.6.1(pg@8.11.3): resolution: {integrity: sha512-jizsIzhkIitxCGfPRzJn1ZdcosIt3pz9Sh3V01fm1vZnbnCMgmGl5wvGGdNN2EL9Rmb0EcFoCkixH4Pu+sP9Og==} @@ -924,7 +945,6 @@ packages: postgres-date: 2.0.1 postgres-interval: 3.0.0 postgres-range: 1.1.3 - dev: true /pg@8.11.3: resolution: {integrity: sha512-+9iuvG8QfaaUrrph+kpF24cXkH1YOOUeArRNYIxq1viYHZagBxrTno7cecY1Fa44tJeZvaoG+Djpkc3JwehN5g==} @@ -983,7 +1003,6 @@ packages: /postgres-array@3.0.2: resolution: {integrity: sha512-6faShkdFugNQCLwucjPcY5ARoW1SlbnrZjmGl0IrrqewpvxvhSLHimCVzqeuULCbG0fQv7Dtk1yDbG3xv7Veog==} engines: {node: '>=12'} - dev: true /postgres-bytea@1.0.0: resolution: {integrity: sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==} @@ -994,7 +1013,6 @@ packages: engines: {node: '>= 6'} dependencies: obuf: 1.1.2 - dev: true /postgres-date@1.0.7: resolution: {integrity: sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==} @@ -1003,7 +1021,6 @@ packages: /postgres-date@2.0.1: resolution: {integrity: sha512-YtMKdsDt5Ojv1wQRvUhnyDJNSr2dGIC96mQVKz7xufp07nfuFONzdaowrMHjlAzY6GDLd4f+LUHHAAM1h4MdUw==} engines: {node: '>=12'} - dev: true /postgres-interval@1.2.0: resolution: {integrity: sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==} @@ -1014,19 +1031,17 @@ packages: /postgres-interval@3.0.0: resolution: {integrity: sha512-BSNDnbyZCXSxgA+1f5UU2GmwhoI0aU5yMxRGO8CdFEcY2BQF9xm/7MqKnYoM1nJDk8nONNWDk9WeSmePFhQdlw==} engines: {node: '>=12'} - dev: true /postgres-range@1.1.3: resolution: {integrity: sha512-VdlZoocy5lCP0c/t66xAfclglEapXPCIVhqqJRncYpvbCgImF0w67aPKfbqUMr72tO2k5q0TdTZwCLjPTI6C9g==} - dev: true - /prisma@5.3.0-integration-feat-driver-adapters-in-client.1: - resolution: {integrity: sha512-M5EjBFZ3P3mjgYOfRBLqg5wKKeXq/VTv2wF9Ft4YCMMsHlcIJJ9IMV1UkzZLmP1yTdMxougJcLeDA9QGmdpsMA==} + /prisma@5.4.0-dev.29: + resolution: {integrity: sha512-TlhIZLVZsDVIQBcVZ8bRi9CJrThkEhKMJ9sEBtsINYx4ju3k7lGl9Kdqlm7zOW4FVwSNPgKvgsdzRgsO6fbDug==} engines: {node: '>=16.13'} hasBin: true requiresBuild: true dependencies: - '@prisma/engines': 5.3.0-integration-feat-driver-adapters-in-client.1 + '@prisma/engines': 5.4.0-dev.29 /punycode@2.3.0: resolution: {integrity: sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==} diff --git a/query-engine/driver-adapters/js/pnpm-workspace.yaml b/query-engine/driver-adapters/js/pnpm-workspace.yaml index 6a17ebd231f..a7ffe6b09c5 100644 --- a/query-engine/driver-adapters/js/pnpm-workspace.yaml +++ b/query-engine/driver-adapters/js/pnpm-workspace.yaml @@ -1,6 +1,7 @@ packages: - './adapter-neon' - - './adapter-planetscale' - './adapter-pg' + - './adapter-planetscale' + - './connector-test-kit-executor' - './driver-adapter-utils' - './smoke-test-js' diff --git a/query-engine/driver-adapters/js/smoke-test-js/README.md b/query-engine/driver-adapters/js/smoke-test-js/README.md index 62ec1d0439e..f719a718928 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/README.md +++ b/query-engine/driver-adapters/js/smoke-test-js/README.md @@ -1,4 +1,4 @@ -# @prisma/smoke-test-js +# @prisma/driver-adapters-smoke-tests-js This is a playground for testing the `libquery` client with the experimental Node.js drivers. It contains a subset of `@prisma/client`, plus some handy executable smoke tests: @@ -7,34 +7,51 @@ It contains a subset of `@prisma/client`, plus some handy executable smoke tests ## How to setup -We assume Node.js `v20.5.1`+ is installed. If not, run `nvm use` in the current directory. +We assume a recent Node.js is installed (e.g., `v20.5.x`). If not, run `nvm use` in the current directory. It's very important to double-check if you have multiple versions installed, as both PlanetScale and Neon requires either Node.js `v18`+ or a custom `fetch` function. +In the parent directory (`cd ..`): +- Build the driver adapters via `pnpm i && pnpm build` + +In the current directoy: - Create a `.envrc` starting from `.envrc.example`, and fill in the missing values following the given template - Install Node.js dependencies via ```bash pnpm i ``` + +Anywhere in the repository: - Run `cargo build -p query-engine-node-api` to compile the `libquery` Query Engine ### PlanetScale -- Create a new database on [PlanetScale](https://planetscale.com/) -- Go to `Settings` > `Passwords`, and create a new password for the `main` database branch. Select the `Prisma` template and copy the generated URL (comprising username, password, etc). Paste it in the `JS_PLANETSCALE_DATABASE_URL` environment variable in `.envrc`. +If you don't have a connection string yet: + +- [Follow the Notion document](https://www.notion.so/How-to-get-a-PlanetScale-and-Neon-database-for-testing-93d978061f9c4ffc80ebfed36896af16) or create a new database on [PlanetScale](https://planetscale.com/) +- Go to `Settings` > `Passwords`, and create a new password for the `main` database branch. Select the `Prisma` template and copy the generated URL (comprising username, password, etc). +- Paste it in the `JS_PLANETSCALE_DATABASE_URL` environment variable in `.envrc`. In the current directory: - Run `pnpm prisma:planetscale` to push the Prisma schema and insert the test data. - Run `pnpm planetscale` to run smoke tests using `libquery` against the PlanetScale database. -- Run `pnpm planetscale:client` to run smoke tests using `@prisma/client` against the PlanetScale database. - -Note: you used to be able to run these Prisma commands without changing the provider name, but [#4074](https://github.com/prisma/prisma-engines/pull/4074) changed that (see https://github.com/prisma/prisma-engines/pull/4074#issuecomment-1649942475). + For more fine-grained control: + - Run `pnpm planetscale:libquery` to test using `libquery` + - Run `pnpm planetscale:client` to test using `@prisma/client` ### Neon -- Create a new database with Neon CLI `npx neonctl projects create` or in [Neon Console](https://neon.tech). +If you don't have a connection string yet: + +- [Follow the Notion document](https://www.notion.so/How-to-get-a-PlanetScale-and-Neon-database-for-testing-93d978061f9c4ffc80ebfed36896af16) or create a new database with Neon CLI `npx neonctl projects create` or in [Neon Console](https://neon.tech). - Paste the connection string to `JS_NEON_DATABASE_URL`. In the current directory: - Run `pnpm prisma:neon` to push the Prisma schema and insert the test data. -- Run `pnpm neon` to run smoke tests using `libquery` against the Neon database. -- Run `pnpm neon:client` to run smoke tests using `@prisma/client` against the Neon database. +- Run `pnpm neon:ws` to run smoke tests using `libquery` against the Neon database, using a WebSocket connection. + For more fine-grained control: + - Run `pnpm neon:ws:libquery` to test using `libquery` + - Run `pnpm neon:ws:client` to test using `@prisma/client` +- Run `pnpm neon:http` to run smoke tests using `libquery` against the Neon database, using an HTTP connection. In this case, transactions won't work, and tests are expected to fail. + For more fine-grained control: + - Run `pnpm neon:ws:http` to test using `libquery` + - Run `pnpm neon:ws:http` to test using `@prisma/client` diff --git a/query-engine/driver-adapters/js/smoke-test-js/package.json b/query-engine/driver-adapters/js/smoke-test-js/package.json index b04840a3cae..95459d0237d 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/package.json +++ b/query-engine/driver-adapters/js/smoke-test-js/package.json @@ -1,5 +1,5 @@ { - "name": "@jkomyno/smoke-test-js", + "name": "@prisma/driver-adapters-smoke-tests-js", "private": true, "type": "module", "version": "0.0.0", @@ -7,32 +7,44 @@ "scripts": { "prisma:db:push:postgres": "prisma db push --schema ./prisma/postgres/schema.prisma --force-reset", "prisma:db:execute:postgres": "prisma db execute --schema ./prisma/postgres/schema.prisma --file ./prisma/postgres/commands/type_test/insert.sql", + "prisma:studio:postgres": "prisma studio --schema ./prisma/postgres/schema.prisma", "prisma:db:push:mysql": "prisma db push --schema ./prisma/mysql/schema.prisma --force-reset", "prisma:db:execute:mysql": "prisma db execute --schema ./prisma/mysql/schema.prisma --file ./prisma/mysql/commands/type_test/insert.sql", + "prisma:studio:mysql": "prisma studio --schema ./prisma/mysql/schema.prisma", + "prisma:neon:ws": "pnpm prisma:neon", + "prisma:neon:http": "pnpm prisma:neon", "prisma:neon": "cross-env-shell DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" \"pnpm prisma:db:push:postgres && pnpm prisma:db:execute:postgres\"", - "neon:ws": "cross-env-shell DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" \"tsx ./src/libquery/neon.ws.ts\"", - "neon:http": "cross-env-shell DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" \"tsx ./src/libquery/neon.http.ts\"", - "neon:ws:client": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --loader=tsx ./src/client/neon.ws.test.ts", - "neon:http:client": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --loader=tsx ./src/client/neon.http.test.ts", + "studio:neon": "cross-env-shell DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" \"pnpm prisma:studio:postgres\"", + "neon:ws:libquery": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/neon.ws.test.ts", + "neon:http:libquery": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/neon.http.test.ts", + "neon:ws:client": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/neon.ws.test.ts", + "neon:http:client": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/neon.http.test.ts", + "neon:ws": "pnpm neon:ws:libquery && pnpm neon:ws:client", + "neon:http": "pnpm neon:http:libquery && pnpm neon:http:client", "prisma:pg": "cross-env-shell DATABASE_URL=\"${JS_PG_DATABASE_URL}\" \"pnpm prisma:db:push:postgres && pnpm prisma:db:execute:postgres\"", - "pg": "cross-env-shell DATABASE_URL=\"${JS_PG_DATABASE_URL}\" \"tsx ./src/libquery/pg.ts\"", - "pg:client": "DATABASE_URL=\"${JS_PG_DATABASE_URL}\" node --test --loader=tsx ./src/client/pg.test.ts", + "studio:pg": "cross-env-shell DATABASE_URL=\"${JS_PG_DATABASE_URL}\" \"pnpm prisma:studio:postgres\"", + "pg:libquery": "cross-env-shell DATABASE_URL=\"${JS_PG_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/pg.test.ts", + "pg:client": "DATABASE_URL=\"${JS_PG_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/pg.test.ts", + "pg": "pnpm pg:libquery && pnpm pg:client", + "errors": "DATABASE_URL=\"${JS_PG_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/errors.test.ts", "prisma:planetscale": "cross-env-shell DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" \"pnpm prisma:db:push:mysql && pnpm prisma:db:execute:mysql\"", - "planetscale": "cross-env-shell DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" \"tsx ./src/libquery/planetscale.ts\"", - "planetscale:client": "DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" node --test --loader=tsx ./src/client/planetscale.test.ts" + "studio:planetscale": "cross-env-shell DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" \"pnpm prisma:studio:mysql\"", + "planetscale:libquery": "DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/planetscale.test.ts", + "planetscale:client": "DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/planetscale.test.ts", + "planetscale": "pnpm planetscale:libquery && pnpm planetscale:client" }, "keywords": [], "author": "Alberto Schiabel ", "license": "Apache-2.0", "sideEffects": true, "dependencies": { - "@jkomyno/prisma-adapter-neon": "workspace:*", - "@jkomyno/prisma-adapter-planetscale": "workspace:*", - "@jkomyno/prisma-adapter-pg": "workspace:*", - "@jkomyno/prisma-driver-adapter-utils": "workspace:*", "@neondatabase/serverless": "^0.6.0", "@planetscale/database": "^1.11.0", - "@prisma/client": "5.3.0-integration-feat-driver-adapters-in-client.1", + "@prisma/adapter-neon": "workspace:*", + "@prisma/adapter-pg": "workspace:*", + "@prisma/adapter-planetscale": "workspace:*", + "@prisma/client": "5.4.0-dev.29", + "@prisma/driver-adapter-utils": "workspace:*", "pg": "^8.11.3", "superjson": "^1.13.1", "undici": "^5.23.0" @@ -41,7 +53,7 @@ "@types/node": "^20.5.1", "@types/pg": "^8.10.2", "cross-env": "^7.0.3", - "prisma": "5.3.0-integration-feat-driver-adapters-in-client.1", + "prisma": "5.4.0-dev.29", "tsx": "^3.12.7" } } diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma b/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma index 6681f70e6c6..00418d57cc2 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma +++ b/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma @@ -114,3 +114,7 @@ model Product { properties Json properties_null Json? } + +model Unique { + email String @id +} diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma b/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma index c2564af557e..74ffd428c72 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma +++ b/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma @@ -98,3 +98,12 @@ model Product { properties Json properties_null Json? } + +model User { + id String @id @default(uuid()) + email String +} + +model Unique { + email String @id +} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts index 8367b43a7ac..35a7e8975f2 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts @@ -1,7 +1,9 @@ import { describe, it } from 'node:test' +import path from 'node:path' import assert from 'node:assert' import { PrismaClient } from '@prisma/client' -import type { DriverAdapter } from '@jkomyno/prisma-driver-adapter-utils' +import type { DriverAdapter } from '@prisma/driver-adapter-utils' +import { getLibQueryEnginePath } from '../libquery/util' export async function smokeTestClient(driverAdapter: DriverAdapter) { const provider = driverAdapter.flavour @@ -13,13 +15,15 @@ export async function smokeTestClient(driverAdapter: DriverAdapter) { } as const, ] + const dirname = path.dirname(new URL(import.meta.url).pathname) + process.env.PRISMA_QUERY_ENGINE_LIBRARY = getLibQueryEnginePath(dirname) + for (const adapter of [driverAdapter, undefined]) { const isUsingDriverAdapters = adapter !== undefined describe(isUsingDriverAdapters ? `using Driver Adapters` : `using Rust drivers`, () => { it('batch queries', async () => { const prisma = new PrismaClient({ - // @ts-ignore - jsConnector: adapter, + adapter, log, }) @@ -48,15 +52,6 @@ export async function smokeTestClient(driverAdapter: DriverAdapter) { '-- Implicit "COMMIT" query via underlying driver', ] - const postgresExpectedQueries = [ - 'BEGIN', - 'DEALLOCATE ALL', - 'SELECT 1', - 'SELECT 2', - 'SELECT 3', - 'COMMIT', - ] - if (['mysql'].includes(provider)) { if (isUsingDriverAdapters) { assert.deepEqual(queries, driverAdapterExpectedQueries) @@ -64,18 +59,18 @@ export async function smokeTestClient(driverAdapter: DriverAdapter) { assert.deepEqual(queries, defaultExpectedQueries) } } else if (['postgres'].includes(provider)) { - if (isUsingDriverAdapters) { - assert.deepEqual(queries, defaultExpectedQueries) - } else { - assert.deepEqual(queries, postgresExpectedQueries) - } + // Note: the "DEALLOCATE ALL" query is only present after "BEGIN" when using Rust Postgres with pgbouncer. + assert.deepEqual(queries.at(0), defaultExpectedQueries.at(0)) + assert.deepEqual( + queries.filter((q) => q !== 'DEALLOCATE ALL'), + defaultExpectedQueries + ) } }) it('applies isolation level when using batch $transaction', async () => { const prisma = new PrismaClient({ - // @ts-ignore - jsConnector: adapter, + adapter, log, }) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts index e2de75384b3..53156ac5624 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts @@ -1,15 +1,12 @@ import { describe } from 'node:test' import { neon } from '@neondatabase/serverless' -import { PrismaNeonHTTP } from '@jkomyno/prisma-adapter-neon' +import { PrismaNeonHTTP } from '@prisma/adapter-neon' import { smokeTestClient } from './client' describe('neon with @prisma/client', async () => { - const connectionString = `${process.env.JS_NEON_DATABASE_URL as string}` + const connectionString = process.env.JS_NEON_DATABASE_URL ?? '' - const connection = neon(connectionString, { - arrayMode: false, - fullResults: true, - }) + const connection = neon(connectionString) const adapter = new PrismaNeonHTTP(connection) smokeTestClient(adapter) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.ws.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.ws.test.ts index fddc42eeade..37b0a9088bb 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.ws.test.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.ws.test.ts @@ -1,13 +1,13 @@ import { describe } from 'node:test' import { Pool, neonConfig } from '@neondatabase/serverless' -import { PrismaNeon } from '@jkomyno/prisma-adapter-neon' +import { PrismaNeon } from '@prisma/adapter-neon' import { WebSocket } from 'undici' import { smokeTestClient } from './client' neonConfig.webSocketConstructor = WebSocket describe('neon with @prisma/client', async () => { - const connectionString = `${process.env.JS_NEON_DATABASE_URL as string}` + const connectionString = process.env.JS_NEON_DATABASE_URL ?? '' const pool = new Pool({ connectionString }) const adapter = new PrismaNeon(pool) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/pg.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/pg.test.ts index a6652d71437..99048ad3d95 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/pg.test.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/client/pg.test.ts @@ -1,13 +1,13 @@ import { describe } from 'node:test' import pg from 'pg' -import { PrismaPg } from '@jkomyno/prisma-adapter-pg' +import { PrismaPg } from '@prisma/adapter-pg' import { smokeTestClient } from './client' describe('pg with @prisma/client', async () => { - const connectionString = `${process.env.JS_PG_DATABASE_URL as string}` + const connectionString = process.env.JS_PG_DATABASE_URL ?? '' const pool = new pg.Pool({ connectionString }) const adapter = new PrismaPg(pool) - + smokeTestClient(adapter) }) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/planetscale.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/planetscale.test.ts index 07a9809b8c0..3c22b7aa306 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/planetscale.test.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/client/planetscale.test.ts @@ -1,13 +1,13 @@ import { connect } from '@planetscale/database' -import { PrismaPlanetScale } from '@jkomyno/prisma-adapter-planetscale' +import { PrismaPlanetScale } from '@prisma/adapter-planetscale' import { describe } from 'node:test' import { smokeTestClient } from './client' describe('planetscale with @prisma/client', async () => { - const connectionString = `${process.env.JS_PLANETSCALE_DATABASE_URL as string}` + const connectionString = process.env.JS_PLANETSCALE_DATABASE_URL ?? '' const connnection = connect({ url: connectionString }) const adapter = new PrismaPlanetScale(connnection) - + smokeTestClient(adapter) }) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Library.ts b/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Library.ts index a8f1c28bb64..a25b3dd2672 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Library.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Library.ts @@ -1,4 +1,4 @@ -import type { ErrorCapturingDriverAdapter } from '@jkomyno/prisma-driver-adapter-utils' +import type { ErrorCapturingDriverAdapter } from '@prisma/driver-adapter-utils' import type { QueryEngineConfig } from './QueryEngine' export type QueryEngineInstance = { diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/errors.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/errors.test.ts new file mode 100644 index 00000000000..c917f35fd7b --- /dev/null +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/errors.test.ts @@ -0,0 +1,96 @@ +import { bindAdapter } from '@prisma/driver-adapter-utils' +import test, { after, before, describe } from 'node:test' +import { createQueryFn, initQueryEngine, throwAdapterError } from './util' +import assert from 'node:assert' + +const fakeAdapter = bindAdapter({ + flavour: 'postgres', + startTransaction() { + throw new Error('Error in startTransaction') + }, + + queryRaw() { + throw new Error('Error in queryRaw') + }, + + executeRaw() { + throw new Error('Error in executeRaw') + }, + close() { + return Promise.resolve({ ok: true, value: undefined }) + }, +}) + +const engine = initQueryEngine(fakeAdapter, '../../prisma/postgres/schema.prisma') +const doQuery = createQueryFn(engine, fakeAdapter) + +const startTransaction = async () => { + const args = { isolation_level: 'Serializable', max_wait: 5000, timeout: 15000 } + const res = JSON.parse(await engine.startTransaction(JSON.stringify(args), '{}')) + if (res['error_code']) { + throwAdapterError(res, fakeAdapter) + } +} + +describe('errors propagation', () => { + before(async () => { + await engine.connect('{}') + }) + after(async () => { + await engine.disconnect('{}') + }) + + test('works for queries', async () => { + await assert.rejects( + doQuery({ + modelName: 'Product', + action: 'findMany', + query: { + arguments: {}, + selection: { + $scalars: true, + }, + }, + }), + /Error in queryRaw/, + ) + }) + + test('works for executeRaw', async () => { + await assert.rejects( + doQuery({ + action: 'executeRaw', + query: { + arguments: { + query: 'SELECT 1', + parameters: '[]', + }, + selection: { + $scalars: true, + }, + }, + }), + /Error in executeRaw/, + ) + }) + + test('works with implicit transaction', async () => { + await assert.rejects( + doQuery({ + modelName: 'Product', + action: 'deleteMany', + query: { + arguments: {}, + selection: { + $scalars: true, + }, + }, + }), + /Error in startTransaction/, + ) + }) + + test('works with explicit transaction', async () => { + await assert.rejects(startTransaction(), /Error in startTransaction/) + }) +}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts index f9c7925c9be..3f659a6cb59 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts @@ -1,168 +1,376 @@ -import { setTimeout } from 'node:timers/promises' -import type { ErrorCapturingDriverAdapter } from '@jkomyno/prisma-driver-adapter-utils' +import { describe, it, before, after } from 'node:test' +import assert from 'node:assert' +import type { ErrorCapturingDriverAdapter } from '@prisma/driver-adapter-utils' import type { QueryEngineInstance } from '../engines/types/Library' -import { initQueryEngine } from './util' +import { createQueryFn, initQueryEngine } from './util' import { JsonQuery } from '../engines/types/JsonProtocol' -export async function smokeTestLibquery(db: ErrorCapturingDriverAdapter, prismaSchemaRelativePath: string) { - const engine = initQueryEngine(db, prismaSchemaRelativePath) +export function smokeTestLibquery(adapter: ErrorCapturingDriverAdapter, prismaSchemaRelativePath: string) { + const engine = initQueryEngine(adapter, prismaSchemaRelativePath) + const flavour = adapter.flavour - console.log('[nodejs] connecting...') - await engine.connect('trace') - console.log('[nodejs] connected') + const doQuery = createQueryFn(engine, adapter) - const test = new SmokeTest(engine, db) - - await test.testJSON() - await test.testTypeTest2() - await test.testFindManyTypeTest() - await test.createAutoIncrement() - await test.testCreateAndDeleteChildParent() - await test.testTransaction() - await test.testRawError() + describe('using libquery with Driver Adapters', () => { + before(async () => { + await engine.connect('trace') + }) - // Note: calling `engine.disconnect` won't actually close the database connection. - console.log('[nodejs] disconnecting...') - await engine.disconnect('trace') - console.log('[nodejs] disconnected') + after(async () => { + await engine.disconnect('trace') + await adapter.close() + }) - console.log('[nodejs] re-connecting...') - await engine.connect('trace') - console.log('[nodejs] re-connecting') + it('create JSON values', async () => { + const json = JSON.stringify({ + foo: 'bar', + baz: 1, + }) - await setTimeout(0) + const created = await doQuery({ + action: 'createOne', + modelName: 'Product', + query: { + arguments: { + data: { + properties: json, + properties_null: null, + }, + }, + selection: { + properties: true, + }, + }, + }) - console.log('[nodejs] re-disconnecting...') - await engine.disconnect('trace') - console.log('[nodejs] re-disconnected') + assert.strictEqual(created.data.createOneProduct.properties.$type, 'Json') + console.log('[nodejs] created', JSON.stringify(created, null, 2)) - // Close the database connection. This is required to prevent the process from hanging. - console.log('[nodejs] closing database connection...') - await db.close() - console.log('[nodejs] closed database connection') -} + const resultSet = await doQuery({ + action: 'findMany', + modelName: 'Product', + query: { + selection: { + id: true, + properties: true, + properties_null: true, + }, + }, + }) + console.log('[nodejs] resultSet', JSON.stringify(resultSet, null, 2)) -class SmokeTest { - readonly flavour: ErrorCapturingDriverAdapter['flavour'] + await doQuery({ + action: 'deleteMany', + modelName: 'Product', + query: { + arguments: { + where: {}, + }, + selection: { + count: true, + }, + }, + }) + }) - constructor(private readonly engine: QueryEngineInstance, private readonly connector: ErrorCapturingDriverAdapter) { - this.flavour = connector.flavour - } + it('create with autoincrement', async () => { + await doQuery({ + modelName: 'Author', + action: 'deleteMany', + query: { + arguments: { + where: {}, + }, + selection: { + count: true, + }, + }, + }) - async testJSON() { - const json = JSON.stringify({ - foo: 'bar', - baz: 1, + const author = await doQuery({ + modelName: 'Author', + action: 'createOne', + query: { + arguments: { + data: { + firstName: 'Firstname from autoincrement', + lastName: 'Lastname from autoincrement', + age: 99, + }, + }, + selection: { + id: true, + firstName: true, + lastName: true, + }, + }, + }) + console.log('[nodejs] author', JSON.stringify(author, null, 2)) }) - const created = await this.doQuery( - { - "action": "createOne", - "modelName": "Product", - "query": { - "arguments": { - "data": { - "properties": json, - "properties_null": null - } + it('create non scalar types', async () => { + const create = await doQuery({ + action: 'createOne', + modelName: 'type_test_2', + query: { + arguments: { + data: {}, }, - "selection": { - "properties": true - } - } + selection: { + id: true, + datetime_column: true, + datetime_column_null: true, + }, + }, }) - console.log('[nodejs] created', JSON.stringify(created, null, 2)) + console.log('[nodejs] create', JSON.stringify(create, null, 2)) - const resultSet = await this.doQuery( - { - "action": "findMany", - "modelName": "Product", - "query": { - "selection": { - "id": true, - "properties": true, - "properties_null": true - } - } - } - ) + const resultSet = await doQuery({ + action: 'findMany', + modelName: 'type_test_2', + query: { + selection: { + id: true, + datetime_column: true, + datetime_column_null: true, + }, + arguments: { + where: {}, + }, + }, + }) - console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) + console.log('[nodejs] resultSet', JSON.stringify(resultSet, null, 2)) - await this.doQuery( - { - "action": "deleteMany", - "modelName": "Product", - "query": { - "arguments": { - "where": {} + await doQuery({ + action: 'deleteMany', + modelName: 'type_test_2', + query: { + arguments: { + where: {}, }, - "selection": { - "count": true - } - } - } - ) + selection: { + count: true, + }, + }, + }) + }) - return resultSet - } + it('create/delete parent and child', async () => { + /* Delete all child and parent records */ + + // Queries: [ + // 'SELECT `cf-users`.`Child`.`id` FROM `cf-users`.`Child` WHERE 1=1', + // 'SELECT `cf-users`.`Child`.`id` FROM `cf-users`.`Child` WHERE 1=1', + // 'DELETE FROM `cf-users`.`Child` WHERE (`cf-users`.`Child`.`id` IN (?) AND 1=1)' + // ] + await doQuery({ + modelName: 'Child', + action: 'deleteMany', + query: { + arguments: { + where: {}, + }, + selection: { + count: true, + }, + }, + }) - async testTypeTest2() { - const create = await this.doQuery( - { - "action": "createOne", - "modelName": "type_test_2", - "query": { - "arguments": { - "data": {} + // Queries: [ + // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE 1=1', + // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE 1=1', + // 'DELETE FROM `cf-users`.`Parent` WHERE (`cf-users`.`Parent`.`id` IN (?) AND 1=1)' + // ] + await doQuery({ + modelName: 'Parent', + action: 'deleteMany', + query: { + arguments: { + where: {}, }, - "selection": { - "id": true, - "datetime_column": true, - "datetime_column_null": true - } - } - } - ) + selection: { + count: true, + }, + }, + }) - console.log('[nodejs] create', JSON.stringify(create, null, 2)) + /* Create a parent with some new children, within a transaction */ + + // Queries: [ + // 'INSERT INTO `cf-users`.`Parent` (`p`,`p_1`,`p_2`,`id`) VALUES (?,?,?,?)', + // 'INSERT INTO `cf-users`.`Child` (`c`,`c_1`,`c_2`,`parentId`,`id`) VALUES (?,?,?,?,?)', + // 'SELECT `cf-users`.`Parent`.`id`, `cf-users`.`Parent`.`p` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`id` = ? LIMIT ? OFFSET ?', + // 'SELECT `cf-users`.`Child`.`id`, `cf-users`.`Child`.`c`, `cf-users`.`Child`.`parentId` FROM `cf-users`.`Child` WHERE `cf-users`.`Child`.`parentId` IN (?)' + // ] + await doQuery({ + modelName: 'Parent', + action: 'createOne', + query: { + arguments: { + data: { + p: 'p1', + p_1: '1', + p_2: '2', + childOpt: { + create: { + c: 'c1', + c_1: 'foo', + c_2: 'bar', + }, + }, + }, + }, + selection: { + p: true, + childOpt: { + selection: { + c: true, + }, + }, + }, + }, + }) - const resultSet = await this.doQuery( - { - "action": "findMany", - "modelName": "type_test_2", - "query": { - "selection": { - "id": true, - "datetime_column": true, - "datetime_column_null": true + /* Delete the parent */ + + // Queries: [ + // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`p` = ?', + // 'SELECT `cf-users`.`Child`.`id`, `cf-users`.`Child`.`parentId` FROM `cf-users`.`Child` WHERE (1=1 AND `cf-users`.`Child`.`parentId` IN (?))', + // 'UPDATE `cf-users`.`Child` SET `parentId` = ? WHERE (`cf-users`.`Child`.`id` IN (?) AND 1=1)', + // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`p` = ?', + // 'DELETE FROM `cf-users`.`Parent` WHERE (`cf-users`.`Parent`.`id` IN (?) AND `cf-users`.`Parent`.`p` = ?)' + // ] + await doQuery({ + modelName: 'Parent', + action: 'deleteMany', + query: { + arguments: { + where: { + p: 'p1', + }, }, - "arguments": { - "where": {} - } - } - } - ) + selection: { + count: true, + }, + }, + }) + }) - console.log('[nodejs] resultSet', JSON.stringify(resultSet, null, 2)) + it('create explicit transaction', async () => { + const args = { isolation_level: 'Serializable', max_wait: 5000, timeout: 15000 } + const startResponse = await engine.startTransaction(JSON.stringify(args), 'trace') + const tx_id = JSON.parse(startResponse).id + + console.log('[nodejs] transaction id', tx_id) + await doQuery( + { + action: 'findMany', + modelName: 'Author', + query: { + selection: { $scalars: true }, + }, + }, + tx_id, + ) - await this.doQuery( - { - "action": "deleteMany", - "modelName": "type_test_2", - "query": { - "arguments": { - "where": {} + const commitResponse = await engine.commitTransaction(tx_id, 'trace') + console.log('[nodejs] commited', commitResponse) + }) + + it('expected error', async () => { + const result = await doQuery({ + modelName: 'Unique', + action: 'createMany', + query: { + arguments: { + data: [{ email: 'duplicate@example.com' }, { email: 'duplicate@example.com' }], }, - "selection": { - "count": true - } - } + selection: { + $scalars: true, + }, + }, + }) + + console.log('[nodejs] error result', JSON.stringify(result, null, 2)) + }) + + describe('read scalar and non scalar types', () => { + if (['mysql'].includes(flavour)) { + it('mysql', async () => { + const resultSet = await doQuery({ + action: 'findMany', + modelName: 'type_test', + query: { + selection: { + tinyint_column: true, + smallint_column: true, + mediumint_column: true, + int_column: true, + bigint_column: true, + float_column: true, + double_column: true, + decimal_column: true, + boolean_column: true, + char_column: true, + varchar_column: true, + text_column: true, + date_column: true, + time_column: true, + datetime_column: true, + timestamp_column: true, + json_column: true, + enum_column: true, + binary_column: true, + varbinary_column: true, + blob_column: true, + }, + }, + }) + + console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) + }) + } else if (['postgres'].includes(flavour)) { + it('postgres', async () => { + const resultSet = await doQuery({ + action: 'findMany', + modelName: 'type_test', + query: { + selection: { + smallint_column: true, + int_column: true, + bigint_column: true, + float_column: true, + double_column: true, + decimal_column: true, + boolean_column: true, + char_column: true, + varchar_column: true, + text_column: true, + date_column: true, + time_column: true, + datetime_column: true, + timestamp_column: true, + json_column: true, + enum_column: true, + }, + }, + }) + console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) + }) + } else { + throw new Error(`Missing test for flavour ${flavour}`) } - ) + }) + }) +} - return resultSet +class SmokeTest { + readonly flavour: ErrorCapturingDriverAdapter['flavour'] + + constructor(private readonly engine: QueryEngineInstance, private readonly connector: ErrorCapturingDriverAdapter) { + this.flavour = connector.flavour } async testFindManyTypeTest() { @@ -175,39 +383,38 @@ class SmokeTest { return } - const resultSet = await this.doQuery( - { - "action": "findMany", - "modelName": "type_test", - "query": { - "selection": { - "tinyint_column": true, - "smallint_column": true, - "mediumint_column": true, - "int_column": true, - "bigint_column": true, - "float_column": true, - "double_column": true, - "decimal_column": true, - "boolean_column": true, - "char_column": true, - "varchar_column": true, - "text_column": true, - "date_column": true, - "time_column": true, - "datetime_column": true, - "timestamp_column": true, - "json_column": true, - "enum_column": true, - "binary_column": true, - "varbinary_column": true, - "blob_column": true - } - } - }) + const resultSet = await this.doQuery({ + action: 'findMany', + modelName: 'type_test', + query: { + selection: { + tinyint_column: true, + smallint_column: true, + mediumint_column: true, + int_column: true, + bigint_column: true, + float_column: true, + double_column: true, + decimal_column: true, + boolean_column: true, + char_column: true, + varchar_column: true, + text_column: true, + date_column: true, + time_column: true, + datetime_column: true, + timestamp_column: true, + json_column: true, + enum_column: true, + binary_column: true, + varbinary_column: true, + blob_column: true, + }, + }, + }) console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) - + return resultSet } @@ -216,160 +423,148 @@ class SmokeTest { return } - const resultSet = await this.doQuery( - { - "action": "findMany", - "modelName": "type_test", - "query": { - "selection": { - "smallint_column": true, - "int_column": true, - "bigint_column": true, - "float_column": true, - "double_column": true, - "decimal_column": true, - "boolean_column": true, - "char_column": true, - "varchar_column": true, - "text_column": true, - "date_column": true, - "time_column": true, - "datetime_column": true, - "timestamp_column": true, - "json_column": true, - "enum_column": true - } - } - } - ) - console.log('[nodejs] findMany resultSet', JSON.stringify((resultSet), null, 2)) - + const resultSet = await this.doQuery({ + action: 'findMany', + modelName: 'type_test', + query: { + selection: { + smallint_column: true, + int_column: true, + bigint_column: true, + float_column: true, + double_column: true, + decimal_column: true, + boolean_column: true, + char_column: true, + varchar_column: true, + text_column: true, + date_column: true, + time_column: true, + datetime_column: true, + timestamp_column: true, + json_column: true, + enum_column: true, + }, + }, + }) + console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) + return resultSet } async createAutoIncrement() { - await this.doQuery( - { - "modelName": "Author", - "action": "deleteMany", - "query": { - "arguments": { - "where": {} - }, - "selection": { - "count": true - } - } - } - ) + await this.doQuery({ + modelName: 'Author', + action: 'deleteMany', + query: { + arguments: { + where: {}, + }, + selection: { + count: true, + }, + }, + }) - const author = await this.doQuery( - { - "modelName": "Author", - "action": "createOne", - "query": { - "arguments": { - "data": { - "firstName": "Firstname from autoincrement", - "lastName": "Lastname from autoincrement", - "age": 99 - } + const author = await this.doQuery({ + modelName: 'Author', + action: 'createOne', + query: { + arguments: { + data: { + firstName: 'Firstname from autoincrement', + lastName: 'Lastname from autoincrement', + age: 99, }, - "selection": { - "id": true, - "firstName": true, - "lastName": true - } - } - } - ) + }, + selection: { + id: true, + firstName: true, + lastName: true, + }, + }, + }) console.log('[nodejs] author', JSON.stringify(author, null, 2)) } async testCreateAndDeleteChildParent() { /* Delete all child and parent records */ - + // Queries: [ // 'SELECT `cf-users`.`Child`.`id` FROM `cf-users`.`Child` WHERE 1=1', // 'SELECT `cf-users`.`Child`.`id` FROM `cf-users`.`Child` WHERE 1=1', // 'DELETE FROM `cf-users`.`Child` WHERE (`cf-users`.`Child`.`id` IN (?) AND 1=1)' // ] - await this.doQuery( - { - "modelName": "Child", - "action": "deleteMany", - "query": { - "arguments": { - "where": {} - }, - "selection": { - "count": true - } - } - } - ) - + await this.doQuery({ + modelName: 'Child', + action: 'deleteMany', + query: { + arguments: { + where: {}, + }, + selection: { + count: true, + }, + }, + }) + // Queries: [ // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE 1=1', // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE 1=1', // 'DELETE FROM `cf-users`.`Parent` WHERE (`cf-users`.`Parent`.`id` IN (?) AND 1=1)' // ] - await this.doQuery( - { - "modelName": "Parent", - "action": "deleteMany", - "query": { - "arguments": { - "where": {} - }, - "selection": { - "count": true - } - } - } - ) - + await this.doQuery({ + modelName: 'Parent', + action: 'deleteMany', + query: { + arguments: { + where: {}, + }, + selection: { + count: true, + }, + }, + }) + /* Create a parent with some new children, within a transaction */ - + // Queries: [ // 'INSERT INTO `cf-users`.`Parent` (`p`,`p_1`,`p_2`,`id`) VALUES (?,?,?,?)', // 'INSERT INTO `cf-users`.`Child` (`c`,`c_1`,`c_2`,`parentId`,`id`) VALUES (?,?,?,?,?)', // 'SELECT `cf-users`.`Parent`.`id`, `cf-users`.`Parent`.`p` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`id` = ? LIMIT ? OFFSET ?', // 'SELECT `cf-users`.`Child`.`id`, `cf-users`.`Child`.`c`, `cf-users`.`Child`.`parentId` FROM `cf-users`.`Child` WHERE `cf-users`.`Child`.`parentId` IN (?)' // ] - await this.doQuery( - { - "modelName": "Parent", - "action": "createOne", - "query": { - "arguments": { - "data": { - "p": "p1", - "p_1": "1", - "p_2": "2", - "childOpt": { - "create": { - "c": "c1", - "c_1": "foo", - "c_2": "bar" - } - } - } + await this.doQuery({ + modelName: 'Parent', + action: 'createOne', + query: { + arguments: { + data: { + p: 'p1', + p_1: '1', + p_2: '2', + childOpt: { + create: { + c: 'c1', + c_1: 'foo', + c_2: 'bar', + }, + }, }, - "selection": { - "p": true, - "childOpt": { - "selection": { - "c": true - } - } - } - } - } - ) - + }, + selection: { + p: true, + childOpt: { + selection: { + c: true, + }, + }, + }, + }, + }) + /* Delete the parent */ - + // Queries: [ // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`p` = ?', // 'SELECT `cf-users`.`Child`.`id`, `cf-users`.`Child`.`parentId` FROM `cf-users`.`Child` WHERE (1=1 AND `cf-users`.`Child`.`parentId` IN (?))', @@ -377,74 +572,58 @@ class SmokeTest { // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`p` = ?', // 'DELETE FROM `cf-users`.`Parent` WHERE (`cf-users`.`Parent`.`id` IN (?) AND `cf-users`.`Parent`.`p` = ?)' // ] - const resultDeleteMany = await this.doQuery( - { - "modelName": "Parent", - "action": "deleteMany", - "query": { - "arguments": { - "where": { - "p": "p1" - } + const resultDeleteMany = await this.doQuery({ + modelName: 'Parent', + action: 'deleteMany', + query: { + arguments: { + where: { + p: 'p1', }, - "selection": { - "count": true - } - } - } - ) + }, + selection: { + count: true, + }, + }, + }) console.log('[nodejs] resultDeleteMany', JSON.stringify(resultDeleteMany, null, 2)) } async testTransaction() { - const startResponse = await this.engine.startTransaction(JSON.stringify({ isolation_level: 'Serializable', max_wait: 5000, timeout: 15000 }), 'trace') + const startResponse = await this.engine.startTransaction( + JSON.stringify({ isolation_level: 'Serializable', max_wait: 5000, timeout: 15000 }), + 'trace', + ) const tx_id = JSON.parse(startResponse).id console.log('[nodejs] transaction id', tx_id) await this.doQuery( { - "action": "findMany", - "modelName": "Author", - "query": { - "selection": { "$scalars": true } - } + action: 'findMany', + modelName: 'Author', + query: { + selection: { $scalars: true }, + }, }, - tx_id + tx_id, ) const commitResponse = await this.engine.commitTransaction(tx_id, 'trace') console.log('[nodejs] commited', commitResponse) } - async testRawError() { - try { - await this.doQuery({ - action: 'queryRaw', - query: { - selection: { $scalars: true }, - arguments: { - query: 'NOT A VALID SQL, THIS WILL FAIL', - parameters: '[]' - } - } - }) - console.log(`[nodejs] expected exception, but query succeeded`) - } catch (error) { - console.log('[nodejs] caught expected error', error) - } - - } - private async doQuery(query: JsonQuery, tx_id?: string) { const result = await this.engine.query(JSON.stringify(query), 'trace', tx_id) const parsedResult = JSON.parse(result) if (parsedResult.errors) { const error = parsedResult.errors[0]?.user_facing_error if (error.error_code === 'P2036') { - const jsError = this.connector.errorRegistry.consumeError(error.meta.id) + const jsError = this.connector.errorRegistry.consumeError(error.meta.id) if (!jsError) { - throw new Error(`Something went wrong. Engine reported external error with id ${error.meta.id}, but it was not registered.`) + throw new Error( + `Something went wrong. Engine reported external error with id ${error.meta.id}, but it was not registered.`, + ) } throw jsError.error } diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts new file mode 100644 index 00000000000..ac165d29f58 --- /dev/null +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts @@ -0,0 +1,16 @@ +import { PrismaNeonHTTP } from '@prisma/adapter-neon' +import { bindAdapter } from '@prisma/driver-adapter-utils' +import { neon } from '@neondatabase/serverless' +import { describe } from 'node:test' +import { smokeTestLibquery } from './libquery' + +describe('neon (HTTP)', () => { + const connectionString = process.env.JS_NEON_DATABASE_URL ?? '' + + const neonConnection = neon(connectionString) + + const adapter = new PrismaNeonHTTP(neonConnection) + const driverAdapter = bindAdapter(adapter) + + smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma') +}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.ts deleted file mode 100644 index 755289dcd42..00000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { PrismaNeonHTTP } from '@jkomyno/prisma-adapter-neon' -import { bindAdapter } from '@jkomyno/prisma-driver-adapter-utils' -import { neon } from '@neondatabase/serverless' -import { smokeTestLibquery } from './libquery' - -async function main() { - const connectionString = `${process.env.JS_NEON_DATABASE_URL as string}` - - const neonConnection = neon(connectionString, { - arrayMode: false, - fullResults: true, - }) - - const adapter = new PrismaNeonHTTP(neonConnection) - const driverAdapter = bindAdapter(adapter) - - await smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma') -} - -main().catch((e) => { - console.error(e) - process.exit(1) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts new file mode 100644 index 00000000000..54765f5961b --- /dev/null +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts @@ -0,0 +1,18 @@ +import { PrismaNeon } from '@prisma/adapter-neon' +import { bindAdapter } from '@prisma/driver-adapter-utils' +import { WebSocket } from 'undici' +import { Pool, neonConfig } from '@neondatabase/serverless' +import { describe } from 'node:test' +import { smokeTestLibquery } from './libquery' + +neonConfig.webSocketConstructor = WebSocket + +describe('neon (WebSocket)', () => { + const connectionString = process.env.JS_NEON_DATABASE_URL ?? '' + + const pool = new Pool({ connectionString }) + const adapter = new PrismaNeon(pool) + const driverAdapter = bindAdapter(adapter) + + smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma') +}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.ts deleted file mode 100644 index 888f29d35e2..00000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { PrismaNeon } from '@jkomyno/prisma-adapter-neon' -import { bindAdapter } from '@jkomyno/prisma-driver-adapter-utils' -import { WebSocket } from 'undici' -import { Pool, neonConfig } from '@neondatabase/serverless' -import { smokeTestLibquery } from './libquery' - -neonConfig.webSocketConstructor = WebSocket - -async function main() { - const connectionString = `${process.env.JS_NEON_DATABASE_URL as string}` - - const pool = new Pool({ connectionString }) - const adapter = new PrismaNeon(pool) - const driverAdapter = bindAdapter(adapter) - - await smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma') -} - -main().catch((e) => { - console.error(e) - process.exit(1) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.test.ts new file mode 100644 index 00000000000..9b79e7284be --- /dev/null +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.test.ts @@ -0,0 +1,15 @@ +import pg from 'pg' +import { PrismaPg } from '@prisma/adapter-pg' +import { bindAdapter } from '@prisma/driver-adapter-utils' +import { describe } from 'node:test' +import { smokeTestLibquery } from './libquery' + +describe('pg', () => { + const connectionString = process.env.JS_PG_DATABASE_URL ?? '' + + const pool = new pg.Pool({ connectionString }) + const adapter = new PrismaPg(pool) + const driverAdapter = bindAdapter(adapter) + + smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma') +}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.ts deleted file mode 100644 index cc657ddbca3..00000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.ts +++ /dev/null @@ -1,19 +0,0 @@ -import pg from 'pg' -import { PrismaPg } from '@jkomyno/prisma-adapter-pg' -import { bindAdapter } from '@jkomyno/prisma-driver-adapter-utils' -import { smokeTestLibquery } from './libquery' - -async function main() { - const connectionString = `${process.env.JS_PG_DATABASE_URL as string}` - - const pool = new pg.Pool({ connectionString }) - const adapter = new PrismaPg(pool) - const driverAdapter = bindAdapter(adapter) - - await smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma') -} - -main().catch((e) => { - console.error(e) - process.exit(1) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.test.ts new file mode 100644 index 00000000000..bb7c81805ad --- /dev/null +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.test.ts @@ -0,0 +1,15 @@ +import { connect } from '@planetscale/database' +import { PrismaPlanetScale } from '@prisma/adapter-planetscale' +import { bindAdapter } from '@prisma/driver-adapter-utils' +import { describe } from 'node:test' +import { smokeTestLibquery } from './libquery' + +describe('planetscale', () => { + const connectionString = process.env.JS_PLANETSCALE_DATABASE_URL ?? '' + + const connnection = connect({ url: connectionString }) + const adapter = new PrismaPlanetScale(connnection) + const driverAdapter = bindAdapter(adapter) + + smokeTestLibquery(driverAdapter, '../../prisma/mysql/schema.prisma') +}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.ts deleted file mode 100644 index 971c3fa0fb8..00000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { connect } from '@planetscale/database' -import { PrismaPlanetScale } from '@jkomyno/prisma-adapter-planetscale' -import { bindAdapter } from '@jkomyno/prisma-driver-adapter-utils' -import { smokeTestLibquery } from './libquery' - -async function main() { - const connectionString = `${process.env.JS_PLANETSCALE_DATABASE_URL as string}` - - const planetscale = connect({ url: connectionString }) - const adapter = new PrismaPlanetScale(planetscale) - const driverAdapter = bindAdapter(adapter) - - await smokeTestLibquery(driverAdapter, '../../prisma/mysql/schema.prisma') -} - -main().catch((e) => { - console.error(e) - process.exit(1) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts index 187d8b86c7f..783eb76759d 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts @@ -1,15 +1,17 @@ import path from 'node:path' import os from 'node:os' import fs from 'node:fs' -import type { ErrorCapturingDriverAdapter } from '@jkomyno/prisma-driver-adapter-utils' +import type { ErrorCapturingDriverAdapter } from '@prisma/driver-adapter-utils' import { Library, QueryEngineInstance } from '../engines/types/Library' +import { JsonQuery } from '../engines/types/JsonProtocol' -export function initQueryEngine(driver: ErrorCapturingDriverAdapter, prismaSchemaRelativePath: string): QueryEngineInstance { - // I assume nobody will run this on Windows ¯\_(ツ)_/¯ - const libExt = os.platform() === 'darwin' ? 'dylib' : 'so' +export function initQueryEngine( + driver: ErrorCapturingDriverAdapter, + prismaSchemaRelativePath: string, +): QueryEngineInstance { const dirname = path.dirname(new URL(import.meta.url).pathname) + const libQueryEnginePath = getLibQueryEnginePath(dirname) - const libQueryEnginePath = path.join(dirname, `../../../../../../target/debug/libquery_engine.${libExt}`) const schemaPath = path.join(dirname, prismaSchemaRelativePath) console.log('[nodejs] read Prisma schema from', schemaPath) @@ -38,3 +40,32 @@ export function initQueryEngine(driver: ErrorCapturingDriverAdapter, prismaSchem return engine } + +export function getLibQueryEnginePath(dirname: String) { + // I assume nobody will run this on Windows ¯\_(ツ)_/¯ + const libExt = os.platform() === 'darwin' ? 'dylib' : 'so' + return path.join(dirname, `../../../../../../target/debug/libquery_engine.${libExt}`) +} + +export function createQueryFn(engine: QueryEngineInstance, adapter: ErrorCapturingDriverAdapter) { + return async function doQuery(query: JsonQuery, tx_id?: string) { + const result = await engine.query(JSON.stringify(query), 'trace', tx_id) + const parsedResult = JSON.parse(result) + if (parsedResult.errors) { + throwAdapterError(parsedResult.errors[0]?.user_facing_error, adapter) + } + return parsedResult + } +} + +export function throwAdapterError(error: any, adapter: ErrorCapturingDriverAdapter) { + if (error.error_code === 'P2036') { + const jsError = adapter.errorRegistry.consumeError(error.meta.id) + if (!jsError) { + throw new Error( + `Something went wrong. Engine reported external error with id ${error.meta.id}, but it was not registered.`, + ) + } + throw jsError.error + } +} diff --git a/query-engine/driver-adapters/src/queryable.rs b/query-engine/driver-adapters/src/queryable.rs index 2a1d6755f68..5dbb549e677 100644 --- a/query-engine/driver-adapters/src/queryable.rs +++ b/query-engine/driver-adapters/src/queryable.rs @@ -46,6 +46,7 @@ impl JsBaseQueryable { match self.flavour { Flavour::Mysql => visitor::Mysql::build(q), Flavour::Postgres => visitor::Postgres::build(q), + Flavour::Sqlite => visitor::Sqlite::build(q), _ => unimplemented!("Unsupported flavour for JS connector {:?}", self.flavour), } } diff --git a/query-engine/driver-adapters/src/result.rs b/query-engine/driver-adapters/src/result.rs index a5965509ef8..fc6f52bd274 100644 --- a/query-engine/driver-adapters/src/result.rs +++ b/query-engine/driver-adapters/src/result.rs @@ -1,14 +1,29 @@ use napi::{bindgen_prelude::FromNapiValue, Env, JsUnknown, NapiValue}; -use quaint::error::Error as QuaintError; +use quaint::error::{Error as QuaintError, PostgresError}; use serde::Deserialize; -#[derive(Deserialize, Debug)] +#[derive(Deserialize)] +#[serde(remote = "PostgresError")] +pub struct PostgresErrorDef { + code: String, + message: String, + severity: String, + detail: Option, + column: Option, + hint: Option, +} + +#[derive(Deserialize)] #[serde(tag = "kind")] /// Wrapper for JS-side errors /// See driver-adapters/js/adapter-utils/src/types.ts file for example pub(crate) enum DriverAdapterError { /// Unexpected JS exception - GenericJsError { id: i32 }, + GenericJsError { + id: i32, + }, + + PostgresError(#[serde(with = "PostgresErrorDef")] PostgresError), // in the future, expected errors that map to known user errors with PXXX codes will also go here } @@ -24,6 +39,7 @@ impl From for QuaintError { fn from(value: DriverAdapterError) -> Self { match value { DriverAdapterError::GenericJsError { id } => QuaintError::external_error(id), + DriverAdapterError::PostgresError(e) => e.into(), // in future, more error types would be added and we'll need to convert them to proper QuaintErrors here } } diff --git a/query-engine/metrics/src/common.rs b/query-engine/metrics/src/common.rs index 76549a4bb2d..c859e142b53 100644 --- a/query-engine/metrics/src/common.rs +++ b/query-engine/metrics/src/common.rs @@ -52,7 +52,30 @@ pub(crate) struct Metric { } impl Metric { - pub fn new(key: Key, description: String, value: MetricValue, global_labels: HashMap) -> Self { + pub(crate) fn renamed( + key: Key, + descriptions: &HashMap, + value: MetricValue, + global_labels: &HashMap, + ) -> Self { + match crate::METRIC_RENAMES.get(key.name()) { + Some((new_key, new_description)) => Self::new( + Key::from_parts(new_key.to_string(), key.labels()), + new_description.to_string(), + value, + global_labels.clone(), + ), + None => { + let description = descriptions + .get(key.name()) + .map(|s| s.to_string()) + .unwrap_or(String::new()); + Self::new(key, description, value, global_labels.clone()) + } + } + } + + fn new(key: Key, description: String, value: MetricValue, global_labels: HashMap) -> Self { let (name, labels) = key.into_parts(); let mut labels_map: HashMap = labels @@ -62,13 +85,8 @@ impl Metric { labels_map.extend(global_labels); - let mut key = name.as_str(); - if let Some(rename) = crate::METRIC_RENAMES.get(key) { - key = rename; - } - Self { - key: key.to_string(), + key: name.as_str().to_string(), value, description, labels: labels_map, diff --git a/query-engine/metrics/src/lib.rs b/query-engine/metrics/src/lib.rs index 4fb3be90ad4..7f34f84a861 100644 --- a/query-engine/metrics/src/lib.rs +++ b/query-engine/metrics/src/lib.rs @@ -34,7 +34,8 @@ use once_cell::sync::Lazy; use recorder::*; pub use registry::MetricRegistry; use serde::Deserialize; -use std::{collections::HashMap, sync::Once}; +use std::collections::HashMap; +use std::sync::Once; pub extern crate metrics; pub use metrics::{ @@ -42,36 +43,31 @@ pub use metrics::{ increment_counter, increment_gauge, }; -// Dependency metrics names emitted by the connector pool implementation (mobc) that will be renamed -// using the `METRIC_RENAMES` map. -pub const MOBC_POOL_CONNECTIONS_OPENED_TOTAL: &str = "mobc_pool_connections_opened_total"; -pub const MOBC_POOL_CONNECTIONS_CLOSED_TOTAL: &str = "mobc_pool_connections_closed_total"; -pub const MOBC_POOL_CONNECTIONS_OPEN: &str = "mobc_pool_connections_open"; -pub const MOBC_POOL_CONNECTIONS_BUSY: &str = "mobc_pool_connections_busy"; -pub const MOBC_POOL_CONNECTIONS_IDLE: &str = "mobc_pool_connections_idle"; -pub const MOBC_POOL_WAIT_COUNT: &str = "mobc_client_queries_wait"; -pub const MOBC_POOL_WAIT_DURATION: &str = "mobc_client_queries_wait_histogram_ms"; - -// External metrics names that we expose. -// counters -pub const PRISMA_CLIENT_QUERIES_TOTAL: &str = "prisma_client_queries_total"; -pub const PRISMA_DATASOURCE_QUERIES_TOTAL: &str = "prisma_datasource_queries_total"; -pub const PRISMA_POOL_CONNECTIONS_OPENED_TOTAL: &str = "prisma_pool_connections_opened_total"; -pub const PRISMA_POOL_CONNECTIONS_CLOSED_TOTAL: &str = "prisma_pool_connections_closed_total"; -// gauges -pub const PRISMA_POOL_CONNECTIONS_OPEN: &str = "prisma_pool_connections_open"; -pub const PRISMA_POOL_CONNECTIONS_BUSY: &str = "prisma_pool_connections_busy"; -pub const PRISMA_POOL_CONNECTIONS_IDLE: &str = "prisma_pool_connections_idle"; -pub const PRISMA_CLIENT_QUERIES_WAIT: &str = "prisma_client_queries_wait"; -pub const PRISMA_CLIENT_QUERIES_ACTIVE: &str = "prisma_client_queries_active"; -// histograms -pub const PRISMA_CLIENT_QUERIES_DURATION_HISTOGRAM_MS: &str = "prisma_client_queries_duration_histogram_ms"; -pub const PRISMA_CLIENT_QUERIES_WAIT_HISTOGRAM_MS: &str = "prisma_client_queries_wait_histogram_ms"; -pub const PRISMA_DATASOURCE_QUERIES_DURATION_HISTOGRAM_MS: &str = "prisma_datasource_queries_duration_histogram_ms"; - -// We need a list of acceptable metrics, we don't want to accidentally process metrics emitted by a -// third party library -const ACCEPT_LIST: &[&str] = &[ +// Metrics that we emit from the engines, third party metrics emitted by libraries and that we rename are omitted. +pub const PRISMA_CLIENT_QUERIES_TOTAL: &str = "prisma_client_queries_total"; // counter +pub const PRISMA_DATASOURCE_QUERIES_TOTAL: &str = "prisma_datasource_queries_total"; // counter +pub const PRISMA_CLIENT_QUERIES_ACTIVE: &str = "prisma_client_queries_active"; // gauge +pub const PRISMA_CLIENT_QUERIES_DURATION_HISTOGRAM_MS: &str = "prisma_client_queries_duration_histogram_ms"; // histogram +pub const PRISMA_DATASOURCE_QUERIES_DURATION_HISTOGRAM_MS: &str = "prisma_datasource_queries_duration_histogram_ms"; // histogram + +// metrics emitted by the connector pool implementation (mobc) that will be renamed using the `METRIC_RENAMES` map. +const MOBC_POOL_CONNECTIONS_OPENED_TOTAL: &str = "mobc_pool_connections_opened_total"; // counter +const MOBC_POOL_CONNECTIONS_CLOSED_TOTAL: &str = "mobc_pool_connections_closed_total"; // counter +const MOBC_POOL_CONNECTIONS_OPEN: &str = "mobc_pool_connections_open"; // gauge +const MOBC_POOL_CONNECTIONS_BUSY: &str = "mobc_pool_connections_busy"; // gauge +const MOBC_POOL_CONNECTIONS_IDLE: &str = "mobc_pool_connections_idle"; // gauge +const MOBC_POOL_WAIT_COUNT: &str = "mobc_client_queries_wait"; // gauge +const MOBC_POOL_WAIT_DURATION: &str = "mobc_client_queries_wait_histogram_ms"; // histogram + +/// Accept list: both first-party (emitted by the query engine) and third-party (emitted) metrics +pub const ACCEPT_LIST: &[&str] = &[ + // first-party + PRISMA_CLIENT_QUERIES_TOTAL, + PRISMA_DATASOURCE_QUERIES_TOTAL, + PRISMA_CLIENT_QUERIES_ACTIVE, + PRISMA_CLIENT_QUERIES_DURATION_HISTOGRAM_MS, + PRISMA_DATASOURCE_QUERIES_DURATION_HISTOGRAM_MS, + // third-party, emitted by mobc MOBC_POOL_CONNECTIONS_OPENED_TOTAL, MOBC_POOL_CONNECTIONS_CLOSED_TOTAL, MOBC_POOL_CONNECTIONS_OPEN, @@ -79,127 +75,97 @@ const ACCEPT_LIST: &[&str] = &[ MOBC_POOL_CONNECTIONS_IDLE, MOBC_POOL_WAIT_COUNT, MOBC_POOL_WAIT_DURATION, - PRISMA_CLIENT_QUERIES_TOTAL, - PRISMA_DATASOURCE_QUERIES_TOTAL, - PRISMA_POOL_CONNECTIONS_OPENED_TOTAL, - PRISMA_POOL_CONNECTIONS_CLOSED_TOTAL, - PRISMA_POOL_CONNECTIONS_OPEN, - PRISMA_POOL_CONNECTIONS_BUSY, - PRISMA_POOL_CONNECTIONS_IDLE, - PRISMA_CLIENT_QUERIES_WAIT, - PRISMA_CLIENT_QUERIES_ACTIVE, - PRISMA_CLIENT_QUERIES_DURATION_HISTOGRAM_MS, - PRISMA_CLIENT_QUERIES_WAIT_HISTOGRAM_MS, - PRISMA_DATASOURCE_QUERIES_DURATION_HISTOGRAM_MS, ]; -// Some of the metrics we receive have their internal names, and we need to expose them under a different -// name, this map translates from the internal names used by mobc to the external names we want to expose -static METRIC_RENAMES: Lazy> = Lazy::new(|| { +/// Map that for any given accepted metric that is emitted by a third-party, in this case only the +/// connection pool library mobc, it points to an internal, accepted metrics name and its description +/// as displayed to users. This is used to rebrand the third-party metrics to accepted, prisma-specific +/// ones. +#[rustfmt::skip] +static METRIC_RENAMES: Lazy> = Lazy::new(|| { HashMap::from([ - (MOBC_POOL_CONNECTIONS_OPENED_TOTAL, PRISMA_POOL_CONNECTIONS_OPENED_TOTAL), - (MOBC_POOL_CONNECTIONS_CLOSED_TOTAL, PRISMA_POOL_CONNECTIONS_CLOSED_TOTAL), - (MOBC_POOL_CONNECTIONS_OPEN, PRISMA_POOL_CONNECTIONS_OPEN), - (MOBC_POOL_CONNECTIONS_BUSY, PRISMA_POOL_CONNECTIONS_BUSY), - (MOBC_POOL_CONNECTIONS_IDLE, PRISMA_POOL_CONNECTIONS_IDLE), - (MOBC_POOL_WAIT_COUNT, PRISMA_CLIENT_QUERIES_WAIT), - (MOBC_POOL_WAIT_DURATION, PRISMA_CLIENT_QUERIES_WAIT_HISTOGRAM_MS), + (MOBC_POOL_CONNECTIONS_OPENED_TOTAL, ("prisma_pool_connections_opened_total", "The total number of pool connections opened")), + (MOBC_POOL_CONNECTIONS_CLOSED_TOTAL, ("prisma_pool_connections_closed_total", "The total number of pool connections closed")), + (MOBC_POOL_CONNECTIONS_OPEN, ("prisma_pool_connections_open", "The number of pool connections currently open")), + (MOBC_POOL_CONNECTIONS_BUSY, ("prisma_pool_connections_busy", "The number of pool connections currently executing datasource queries")), + (MOBC_POOL_CONNECTIONS_IDLE, ("prisma_pool_connections_idle", "The number of pool connections that are not busy running a query")), + (MOBC_POOL_WAIT_COUNT, ("prisma_client_queries_wait", "The number of datasource queries currently waiting for an free connection")), + (MOBC_POOL_WAIT_DURATION, ("prisma_client_queries_wait_histogram_ms", "The distribution of the time all datasource queries spent waiting for a free connection")), ]) }); -// At the moment the histogram is only used for timings. So the bounds are hard coded here -// The buckets are for ms -pub(crate) const HISTOGRAM_BOUNDS: [f64; 10] = [0.0, 1.0, 5.0, 10.0, 50.0, 100.0, 500.0, 1000.0, 5000.0, 50000.0]; +pub fn setup() { + set_recorder(); + initialize_metrics(); +} -#[derive(PartialEq, Eq, Debug, Deserialize)] -pub enum MetricFormat { - #[serde(alias = "json")] - Json, - #[serde(alias = "prometheus")] - Prometheus, +static METRIC_RECORDER: Once = Once::new(); + +fn set_recorder() { + METRIC_RECORDER.call_once(|| { + metrics::set_boxed_recorder(Box::new(MetricRecorder)).unwrap(); + }); } -pub fn setup() { - set_recorder(); - describe_metrics(); +/// Initialize metrics descriptions and values +pub fn initialize_metrics() { + initialize_metrics_descriptions(); + initialize_metrics_values(); } -// Describe all metric here so that every time for create -// a new metric registry for a Query Instance the descriptions -// will be in place -pub fn describe_metrics() { - // counters +/// Describe all first-party metrics that we record in prisma-engines. Metrics recorded by third-parties +/// --like mobc-- are described by such third parties, but ignored, and replaced by the descriptions in the +/// METRICS_RENAMES map. +fn initialize_metrics_descriptions() { describe_counter!( PRISMA_CLIENT_QUERIES_TOTAL, - "Total number of Prisma Client queries executed" + "The total number of Prisma Client queries executed" ); describe_counter!( PRISMA_DATASOURCE_QUERIES_TOTAL, - "Total number of Datasource Queries executed" - ); - describe_counter!( - PRISMA_POOL_CONNECTIONS_OPENED_TOTAL, - "Total number of Pool Connections opened" - ); - describe_counter!( - PRISMA_POOL_CONNECTIONS_CLOSED_TOTAL, - "Total number of Pool Connections closed" - ); - - absolute_counter!(PRISMA_CLIENT_QUERIES_TOTAL, 0); - absolute_counter!(PRISMA_DATASOURCE_QUERIES_TOTAL, 0); - absolute_counter!(PRISMA_POOL_CONNECTIONS_OPENED_TOTAL, 0); - absolute_counter!(PRISMA_POOL_CONNECTIONS_CLOSED_TOTAL, 0); - - // gauges - describe_gauge!( - PRISMA_POOL_CONNECTIONS_OPEN, - "Number of currently open Pool Connections (able to execute a datasource query)" - ); - describe_gauge!( - PRISMA_POOL_CONNECTIONS_BUSY, - "Number of currently busy Pool Connections (executing a datasource query)" - ); - describe_gauge!( - PRISMA_POOL_CONNECTIONS_IDLE, - "Number of currently unused Pool Connections (waiting for the next datasource query to run)" - ); - describe_gauge!( - PRISMA_CLIENT_QUERIES_WAIT, - "Number of Prisma Client queries currently waiting for a connection" + "The total number of datasource queries executed" ); describe_gauge!( PRISMA_CLIENT_QUERIES_ACTIVE, - "Number of currently active Prisma Client queries" + "The number of currently active Prisma Client queries" ); - - gauge!(PRISMA_POOL_CONNECTIONS_OPEN, 0.0); - gauge!(PRISMA_POOL_CONNECTIONS_BUSY, 0.0); - gauge!(PRISMA_POOL_CONNECTIONS_IDLE, 0.0); - gauge!(PRISMA_CLIENT_QUERIES_WAIT, 0.0); - gauge!(PRISMA_CLIENT_QUERIES_ACTIVE, 0.0); - - // histograms describe_histogram!( - PRISMA_CLIENT_QUERIES_WAIT_HISTOGRAM_MS, - "Histogram of the wait time of all Prisma Client Queries in ms" + PRISMA_CLIENT_QUERIES_DURATION_HISTOGRAM_MS, + "The distribution of the time Prisma Client queries took to run end to end" ); describe_histogram!( PRISMA_DATASOURCE_QUERIES_DURATION_HISTOGRAM_MS, - "Histogram of the duration of all executed Datasource Queries in ms" - ); - describe_histogram!( - PRISMA_CLIENT_QUERIES_DURATION_HISTOGRAM_MS, - "Histogram of the duration of all executed Prisma Client queries in ms" + "The distribution of the time datasource queries took to run" ); } -static METRIC_RECORDER: Once = Once::new(); +/// Initialize all metrics values (first and third-party) +/// +/// FIXME: https://github.com/prisma/prisma/issues/21070 +/// Histograms are excluded, as their initialization will alter the histogram values. +/// (i.e. histograms don't have a neutral value, like counters or gauges) +fn initialize_metrics_values() { + absolute_counter!(PRISMA_CLIENT_QUERIES_TOTAL, 0); + absolute_counter!(PRISMA_DATASOURCE_QUERIES_TOTAL, 0); + gauge!(PRISMA_CLIENT_QUERIES_ACTIVE, 0.0); + absolute_counter!(MOBC_POOL_CONNECTIONS_OPENED_TOTAL, 0); + absolute_counter!(MOBC_POOL_CONNECTIONS_CLOSED_TOTAL, 0); + gauge!(MOBC_POOL_CONNECTIONS_OPEN, 0.0); + gauge!(MOBC_POOL_CONNECTIONS_BUSY, 0.0); + gauge!(MOBC_POOL_CONNECTIONS_IDLE, 0.0); + gauge!(MOBC_POOL_WAIT_COUNT, 0.0); +} -fn set_recorder() { - METRIC_RECORDER.call_once(|| { - metrics::set_boxed_recorder(Box::new(MetricRecorder)).unwrap(); - }); +// At the moment the histogram is only used for timings. So the bounds are hard coded here +// The buckets are for ms +pub(crate) const HISTOGRAM_BOUNDS: [f64; 10] = [0.0, 1.0, 5.0, 10.0, 50.0, 100.0, 500.0, 1000.0, 5000.0, 50000.0]; + +#[derive(PartialEq, Eq, Debug, Deserialize)] +pub enum MetricFormat { + #[serde(alias = "json")] + Json, + #[serde(alias = "prometheus")] + Prometheus, } #[cfg(test)] diff --git a/query-engine/metrics/src/registry.rs b/query-engine/metrics/src/registry.rs index 3f4a892b708..6530edbe876 100644 --- a/query-engine/metrics/src/registry.rs +++ b/query-engine/metrics/src/registry.rs @@ -160,20 +160,16 @@ impl MetricRegistry { let mut counters: Vec = counter_handles .into_iter() .map(|(key, counter)| { - let key_name = key.name(); let value = counter.get_inner().load(Ordering::Acquire); - let description = descriptions.get(key_name).cloned().unwrap_or_default(); - Metric::new(key, description, MetricValue::Counter(value), global_labels.clone()) + Metric::renamed(key, &descriptions, MetricValue::Counter(value), &global_labels) }) .collect(); let mut gauges: Vec = gauge_handles .into_iter() .map(|(key, gauge)| { - let key_name = key.name(); - let description = descriptions.get(key_name).cloned().unwrap_or_default(); let value = f64::from_bits(gauge.get_inner().load(Ordering::Acquire)); - Metric::new(key, description, MetricValue::Gauge(value), global_labels.clone()) + Metric::renamed(key, &descriptions, MetricValue::Gauge(value), &global_labels) }) .collect(); @@ -185,13 +181,11 @@ impl MetricRegistry { histogram.record_many(s); }); - let key_name = key.name(); - let description = descriptions.get(key_name).cloned().unwrap_or_default(); - Metric::new( + Metric::renamed( key, - description, + &descriptions, MetricValue::Histogram(histogram.into()), - global_labels.clone(), + &global_labels, ) }) .collect(); diff --git a/query-engine/query-engine-node-api/src/engine.rs b/query-engine/query-engine-node-api/src/engine.rs index 37baeaee2c6..e9e7ad681cd 100644 --- a/query-engine/query-engine-node-api/src/engine.rs +++ b/query-engine/query-engine-node-api/src/engine.rs @@ -147,7 +147,7 @@ impl QueryEngine { napi_env: Env, options: JsUnknown, callback: JsFunction, - maybe_driver: Option, + maybe_adapter: Option, ) -> napi::Result { let mut log_callback = callback.create_threadsafe_function(0usize, |ctx: ThreadSafeCallContext| { Ok(vec![ctx.env.create_string(&ctx.value)?]) @@ -163,7 +163,17 @@ impl QueryEngine { config_dir, ignore_env_var_errors, engine_protocol, - } = napi_env.from_js_value(options)?; + } = napi_env.from_js_value(options).expect( + r###" + Failed to deserialize constructor options. + + This usually happens when the javascript object passed to the constructor is missing + properties for the ConstructorOptions fields that must have some value. + + If you set some of these in javascript trough environment variables, make sure there are + values for data_model, log_level, and any field that is not Option + "###, + ); let env = stringify_env_values(env)?; // we cannot trust anything JS sends us from process.env let overrides: Vec<(_, _)> = datasource_overrides.into_iter().collect(); @@ -181,17 +191,14 @@ impl QueryEngine { ); } else { #[cfg(feature = "driver-adapters")] - if let Some(driver) = maybe_driver { - let js_queryable = driver_adapters::from_napi(driver); - let provider_name = schema.connector.provider_name(); + if let Some(adapter) = maybe_adapter { + let js_queryable = driver_adapters::from_napi(adapter); - match sql_connector::register_driver_adapter(provider_name, Arc::new(js_queryable)) { - Ok(_) => { - connector_mode = ConnectorMode::Js; - tracing::info!("Registered driver adapter for {provider_name}.") - } - Err(err) => tracing::error!("Failed to register driver adapter for {provider_name}. {err}"), - } + sql_connector::activate_driver_adapter(Arc::new(js_queryable)); + connector_mode = ConnectorMode::Js; + + let provider_name = schema.connector.provider_name(); + tracing::info!("Registered driver adapter for {provider_name}."); } } @@ -235,7 +242,7 @@ impl QueryEngine { if enable_metrics { napi_env.execute_tokio_future( async { - query_engine_metrics::describe_metrics(); + query_engine_metrics::initialize_metrics(); Ok(()) } .with_subscriber(logger.dispatcher()), diff --git a/query-engine/request-handlers/src/load_executor.rs b/query-engine/request-handlers/src/load_executor.rs index 6d4dec48228..652ad3108f0 100644 --- a/query-engine/request-handlers/src/load_executor.rs +++ b/query-engine/request-handlers/src/load_executor.rs @@ -2,6 +2,7 @@ use psl::{builtin_connectors::*, Datasource, PreviewFeatures}; use query_core::{executor::InterpretingExecutor, Connector, QueryExecutor}; use sql_query_connector::*; use std::collections::HashMap; +use std::env; use tracing::trace; use url::Url; @@ -17,24 +18,38 @@ pub async fn load( features: PreviewFeatures, url: &str, ) -> query_core::Result> { - if connector_mode == ConnectorMode::Js { - #[cfg(feature = "driver-adapters")] - return driver_adapter(source, url, features).await; - } + match connector_mode { + ConnectorMode::Js => { + #[cfg(not(feature = "driver-adapters"))] + panic!("Driver adapters are not enabled, but connector mode is set to JS"); + + #[cfg(feature = "driver-adapters")] + driver_adapter(source, url, features).await + } + + ConnectorMode::Rust => { + if let Ok(value) = env::var("PRISMA_DISABLE_QUAINT_EXECUTORS") { + let disable = value.to_uppercase(); + if disable == "TRUE" || disable == "1" { + panic!("Quaint executors are disabled, as per env var PRISMA_DISABLE_QUAINT_EXECUTORS."); + } + } - match source.active_provider { - p if SQLITE.is_provider(p) => sqlite(source, url, features).await, - p if MYSQL.is_provider(p) => mysql(source, url, features).await, - p if POSTGRES.is_provider(p) => postgres(source, url, features).await, - p if MSSQL.is_provider(p) => mssql(source, url, features).await, - p if COCKROACH.is_provider(p) => postgres(source, url, features).await, + match source.active_provider { + p if SQLITE.is_provider(p) => sqlite(source, url, features).await, + p if MYSQL.is_provider(p) => mysql(source, url, features).await, + p if POSTGRES.is_provider(p) => postgres(source, url, features).await, + p if MSSQL.is_provider(p) => mssql(source, url, features).await, + p if COCKROACH.is_provider(p) => postgres(source, url, features).await, - #[cfg(feature = "mongodb")] - p if MONGODB.is_provider(p) => mongodb(source, url, features).await, + #[cfg(feature = "mongodb")] + p if MONGODB.is_provider(p) => mongodb(source, url, features).await, - x => Err(query_core::CoreError::ConfigurationError(format!( - "Unsupported connector type: {x}" - ))), + x => Err(query_core::CoreError::ConfigurationError(format!( + "Unsupported connector type: {x}" + ))), + } + } } } diff --git a/query-engine/request-handlers/src/response.rs b/query-engine/request-handlers/src/response.rs index af99835813e..a196daade4b 100644 --- a/query-engine/request-handlers/src/response.rs +++ b/query-engine/request-handlers/src/response.rs @@ -9,26 +9,26 @@ use crate::HandlerError; #[derive(Debug, serde::Serialize, Default, PartialEq)] pub struct GQLResponse { #[serde(skip_serializing_if = "IndexMap::is_empty")] - data: Map, + pub data: Map, #[serde(skip_serializing_if = "Vec::is_empty")] - errors: Vec, + pub errors: Vec, #[serde(skip_serializing_if = "IndexMap::is_empty")] - extensions: Map, + pub extensions: Map, } #[derive(Debug, serde::Serialize, Default, PartialEq)] #[serde(rename_all = "camelCase")] pub struct GQLBatchResponse { #[serde(skip_serializing_if = "Vec::is_empty")] - batch_result: Vec, + pub batch_result: Vec, #[serde(skip_serializing_if = "Vec::is_empty")] - errors: Vec, + pub errors: Vec, #[serde(skip_serializing_if = "IndexMap::is_empty")] - extensions: Map, + pub extensions: Map, } #[derive(Debug, serde::Serialize, serde::Deserialize, PartialEq)] diff --git a/query-engine/schema/src/build/input_types/objects/filter_objects.rs b/query-engine/schema/src/build/input_types/objects/filter_objects.rs index b8af982182a..0ea555f7772 100644 --- a/query-engine/schema/src/build/input_types/objects/filter_objects.rs +++ b/query-engine/schema/src/build/input_types/objects/filter_objects.rs @@ -113,6 +113,7 @@ pub(crate) fn where_unique_object_type(ctx: &'_ QuerySchema, model: Model) -> In .indexes() .filter(|idx| idx.is_unique()) .filter(|index| index.fields().len() > 1) + .filter(|index| !index.fields().any(|f| f.is_unsupported())) .map(|index| { let fields = index .fields() @@ -130,6 +131,7 @@ pub(crate) fn where_unique_object_type(ctx: &'_ QuerySchema, model: Model) -> In .walk(model.id) .primary_key() .filter(|pk| pk.fields().len() > 1) + .filter(|pk| !pk.fields().any(|f| f.is_unsupported())) .map(|pk| { let name = compound_id_field_name(pk); let fields = model.fields().id_fields().unwrap().collect(); diff --git a/schema-engine/sql-schema-describer/src/postgres.rs b/schema-engine/sql-schema-describer/src/postgres.rs index df7e79ccea7..0263b3e044c 100644 --- a/schema-engine/sql-schema-describer/src/postgres.rs +++ b/schema-engine/sql-schema-describer/src/postgres.rs @@ -888,6 +888,7 @@ impl<'a> SqlSchemaDescriber<'a> { FROM pg_class JOIN pg_namespace on pg_namespace.oid = pg_class.relnamespace AND pg_namespace.nspname = ANY ( $1 ) + WHERE reltype > 0 ) as oid on oid.oid = att.attrelid AND relname = info.table_name AND namespace = info.table_schema