diff --git a/.buildkite/engineer b/.buildkite/engineer index 0b1adc2d8011..71c1211a83b4 100755 --- a/.buildkite/engineer +++ b/.buildkite/engineer @@ -1,5 +1,47 @@ #!/usr/bin/env bash +set -e + +if [[ -z "$2" ]]; then + printf "Error: the name of the pipeline must be provided.\nExample: './engineer pipeline test'" 1>&2 + exit 1 +else + echo "We are in the $2 pipeline." +fi + +# Checks what's the diff with the previous commit +# This is used to detect if the previous commit was empty +GIT_DIFF=$(git diff --name-only HEAD HEAD~1 -- .) + +# Checks what's the diff with the previous commit, +# excluding some paths that do not need a run, +# because they do not affect tests running in Buildkite. +GIT_DIFF_WITH_IGNORED_PATHS=$(git diff --name-only HEAD HEAD~1 -- . ':!.github' ':!query-engine/driver-adapters/js' ':!query-engine/query-engine-wasm' ':!renovate.json' ':!*.md' ':!LICENSE' ':!CODEOWNERS';) + +# $2 is either "test" or "build", depending on the pipeline +# Example: ./.buildkite/engineer pipeline test +# We only want to check for changes and skip in the test pipeline. +if [[ "$2" == "test" ]]; then + # If GIT_DIFF is empty then the previous commit was empty + # We assume it's intended and we continue with the run + # Example use: to get a new engine hash built with identical code + if [ -z "${GIT_DIFF}" ]; then + echo "The previous commit is empty, this run will continue..." + else + # Checking if GIT_DIFF_WITH_IGNORED_PATHS is empty + # If it's empty then it's most likely that there are changes but they are in ignored paths. + # So we do not start Buildkite + if [ -z "${GIT_DIFF_WITH_IGNORED_PATHS}" ]; then + echo "No changes found for the previous commit in paths that are not ignored, this run will now be skipped." + exit 0 + else + # Note that printf works better for displaying line returns in CI + printf "Changes found for the previous commit in paths that are not ignored: \n\n%s\n\nThis run will continue...\n" "${GIT_DIFF_WITH_IGNORED_PATHS}" + fi + fi +fi + +# Check OS if [[ "$OSTYPE" == "linux-gnu" ]]; then OS=linux-amzn elif [[ "$OSTYPE" == "darwin"* ]]; then @@ -12,8 +54,7 @@ fi # Check if the system has engineer installed, if not, use a local copy. if ! type "engineer" &> /dev/null; then # Setup Prisma engine build & test tool (engineer). - set -e - curl --fail -sSL "https://prisma-engineer.s3-eu-west-1.amazonaws.com/1.59/latest/$OS/engineer.gz" --output engineer.gz + curl --fail -sSL "https://prisma-engineer.s3-eu-west-1.amazonaws.com/1.63/latest/$OS/engineer.gz" --output engineer.gz gzip -d engineer.gz chmod +x engineer @@ -22,6 +63,5 @@ if ! type "engineer" &> /dev/null; then rm -rf ./engineer else # Already installed on the system - set -e engineer "$@" fi diff --git a/.github/workflows/driver-adapter-smoke-tests.yml b/.github/workflows/driver-adapter-smoke-tests.yml deleted file mode 100644 index aa653ea7a57d..000000000000 --- a/.github/workflows/driver-adapter-smoke-tests.yml +++ /dev/null @@ -1,84 +0,0 @@ -name: Driver Adapters, Smoke Tests -on: - push: - branches: - - main - pull_request: - paths-ignore: - - '.github/**' - - '!.github/workflows/driver-adapter-smoke-tests.yml' - - '.buildkite/**' - - '*.md' - - 'LICENSE' - - 'CODEOWNERS' - - 'renovate.json' - -jobs: - driver-adapter-smoke-tests: - name: ${{ matrix.adapter }} - - strategy: - fail-fast: false - matrix: - adapter: ['neon:ws', 'neon:http', planetscale, pg, libsql] - - runs-on: ubuntu-latest - - services: - postgres: - image: postgres - env: - POSTGRES_PASSWORD: postgres - options: >- - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - ports: - - 5432:5432 - - env: - JS_NEON_DATABASE_URL: ${{ secrets.JS_NEON_DATABASE_URL }} - JS_PLANETSCALE_DATABASE_URL: ${{ secrets.JS_PLANETSCALE_DATABASE_URL }} - JS_PG_DATABASE_URL: postgres://postgres:postgres@localhost:5432/test # ${{ secrets.JS_PG_DATABASE_URL }} - # TODO: test sqld and embedded replicas - JS_LIBSQL_DATABASE_URL: file:/tmp/libsql.db - # TODO: test all three of ("number", "bigint", "string") and conditionally skip some tests as appropriate - JS_LIBSQL_INT_MODE: bigint - - steps: - - uses: actions/checkout@v4 - - - uses: dtolnay/rust-toolchain@stable - - - uses: pnpm/action-setup@v2 - with: - version: 8 - - uses: actions/setup-node@v3 - with: - node-version: 18 - #cache: 'pnpm' - - - name: Compile Query Engine - run: cargo build -p query-engine-node-api - - - name: Install Dependencies (Driver Adapters) - run: pnpm install - working-directory: ./query-engine/driver-adapters/js - - name: Build Driver Adapters - run: pnpm build - working-directory: ./query-engine/driver-adapters/js - - - run: pnpm prisma:${{ matrix.adapter }} - working-directory: ./query-engine/driver-adapters/js/smoke-test-js - - run: pnpm ${{ matrix.adapter }}:libquery - working-directory: ./query-engine/driver-adapters/js/smoke-test-js - - name: pnpm ${{ matrix.adapter }}:client (using @prisma/client - including engine! - from Npm) - run: pnpm ${{ matrix.adapter }}:client - if: always() - working-directory: ./query-engine/driver-adapters/js/smoke-test-js - - - name: pnpm errors - run: pnpm errors - if: always() - working-directory: ./query-engine/driver-adapters/js/smoke-test-js diff --git a/.github/workflows/publish-driver-adapters.yml b/.github/workflows/publish-driver-adapters.yml deleted file mode 100644 index 7da972c35e1b..000000000000 --- a/.github/workflows/publish-driver-adapters.yml +++ /dev/null @@ -1,83 +0,0 @@ -name: Build and publish Prisma Driver Adapters -run-name: npm - release Driver Adapters ${{ github.event.inputs.prismaVersion }} from ${{ github.event.inputs.enginesHash }} on ${{ github.event.inputs.npmDistTag }} - -concurrency: publish-prisma-driver-adapters - -on: - # usually triggered via GH Actions Workflow in prisma/prisma repo - workflow_dispatch: - inputs: - enginesHash: - description: Engine commit hash to checkout for publishing - required: true - prismaVersion: - description: Prisma version to use for publishing - required: true - npmDistTag: - description: npm dist-tag to use for publishing - required: true - default: 'latest' - dryRun: - description: 'Check to do a dry run (does not publish packages)' - type: boolean - -jobs: - build: - name: Build and publish Prisma Driver Adapters - runs-on: ubuntu-latest - steps: - - name: Print input - env: - THE_INPUT: '${{ toJson(github.event.inputs) }}' - run: | - echo $THE_INPUT - - - uses: actions/checkout@v4 - with: - ref: ${{ github.event.inputs.enginesHash }} - - - uses: pnpm/action-setup@v2.4.0 - with: - version: 8 - - - uses: actions/setup-node@v3 - with: - node-version: '20.x' - registry-url: 'https://registry.npmjs.org/' - - - name: Install dependencies - run: pnpm i - working-directory: query-engine/driver-adapters/js - - - name: Build - run: pnpm -r build - working-directory: query-engine/driver-adapters/js - - - name: Update version in package.json - run: | - # find all files package.json, and for each use jq to write the version, then write to temp file and overwrite original file with result - find . -name "package.json" -exec bash -c 'jq --arg version "${{ github.event.inputs.prismaVersion }}" ".version = \$version" "{}" > tmpfile && mv tmpfile "{}"' \; - working-directory: query-engine/driver-adapters/js - - - name: Publish Prisma Driver Adapters packages - run: | - pnpm -r publish --no-git-checks --tag ${{ github.event.inputs.npmDistTag }} ${{ env.DRY_RUN }} - working-directory: query-engine/driver-adapters/js - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} - DRY_RUN: ${{ github.event.inputs.dryRun == 'true' && '--dry-run' || '' }} - - # - # Failure handlers - # - - - name: Set current job url in SLACK_FOOTER env var - if: ${{ failure() }} - run: echo "SLACK_FOOTER=<$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID|Click here to go to the job logs>" >> $GITHUB_ENV - - name: Slack Notification on Failure - if: ${{ failure() }} - uses: rtCamp/action-slack-notify@v2.2.1 - env: - SLACK_TITLE: 'prisma driver adapters publishing failed :x:' - SLACK_COLOR: '#FF0000' - SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_DRIVER_ADPATERS_FAILING }} diff --git a/.github/workflows/publish-prisma-schema-wasm.yml b/.github/workflows/publish-prisma-schema-wasm.yml index e166c05e5841..3d4951da0427 100644 --- a/.github/workflows/publish-prisma-schema-wasm.yml +++ b/.github/workflows/publish-prisma-schema-wasm.yml @@ -1,6 +1,7 @@ name: Build and publish @prisma/prisma-schema-wasm +run-name: npm - release @prisma/prisma-schema-wasm@${{ github.event.inputs.enginesWrapperVersion }} from ${{ github.event.inputs.enginesHash }} on ${{ github.event.inputs.npmDistTag }} -concurrency: build-prisma-schema-wasm +concurrency: publish-prisma-schema-wasm on: # usually triggered via GH Actions Workflow in prisma/engines-wrapper repo @@ -12,7 +13,7 @@ on: required: true npmDistTag: required: true - default: "latest" + default: 'latest' jobs: build: @@ -21,7 +22,7 @@ jobs: steps: - name: Print input env: - THE_INPUT: "${{ toJson(github.event.inputs) }}" + THE_INPUT: '${{ toJson(github.event.inputs) }}' run: | echo $THE_INPUT @@ -30,29 +31,22 @@ jobs: ref: ${{ github.event.inputs.enginesHash }} - uses: cachix/install-nix-action@v23 - # - # Build - # - - - run: nix build .#prisma-schema-wasm - - # - # Publish - # + - name: Build + run: nix build .#prisma-schema-wasm - - uses: actions/setup-node@v3 + - uses: actions/setup-node@v4 with: - node-version: "14.x" + node-version: '20.x' - - name: Set up NPM token + # This is needed to be done manually because of `PACKAGE_DIR` used later + - name: Set up NPM token for publishing later run: echo "//registry.npmjs.org/:_authToken=${{ secrets.NPM_TOKEN }}" > ~/.npmrc - - run: | + - name: Update version in package.json & Publish @prisma/prisma-schema-wasm + run: | + # Update version in package.json and return directory for later usage PACKAGE_DIR=$( nix run .#renderPrismaSchemaWasmPackage ${{ github.event.inputs.enginesWrapperVersion }}) npm publish "$PACKAGE_DIR" --access public --tag ${{ github.event.inputs.npmDistTag }} - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} - # # Failure handlers # @@ -60,11 +54,10 @@ jobs: - name: Set current job url in SLACK_FOOTER env var if: ${{ failure() }} run: echo "SLACK_FOOTER=<$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID|Click here to go to the job logs>" >> $GITHUB_ENV - - name: Slack Notification on Failure if: ${{ failure() }} uses: rtCamp/action-slack-notify@v2.2.1 env: - SLACK_TITLE: "prisma-schema-wasm publishing failed :x:" - SLACK_COLOR: "#FF0000" + SLACK_TITLE: 'prisma-schema-wasm publishing failed :x:' + SLACK_COLOR: '#FF0000' SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WASM_FAILING }} diff --git a/.github/workflows/query-engine-black-box.yml b/.github/workflows/query-engine-black-box.yml index 78e60178d7f7..5ebcd79cec4c 100644 --- a/.github/workflows/query-engine-black-box.yml +++ b/.github/workflows/query-engine-black-box.yml @@ -50,6 +50,10 @@ jobs: - name: Login to Docker Hub uses: docker/login-action@v3 continue-on-error: true + env: + DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} + if: "${{ env.DOCKERHUB_USERNAME != '' && env.DOCKERHUB_TOKEN != '' }}" with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/query-engine-driver-adapters.yml b/.github/workflows/query-engine-driver-adapters.yml index dea1726c56c9..3de0238aa0e7 100644 --- a/.github/workflows/query-engine-driver-adapters.yml +++ b/.github/workflows/query-engine-driver-adapters.yml @@ -44,12 +44,14 @@ jobs: QUERY_BATCH_SIZE: '10' WORKSPACE_ROOT: ${{ github.workspace }} - runs-on: buildjet-16vcpu-ubuntu-2004 + runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} - name: 'Setup Node.js' - uses: actions/setup-node@v3 + uses: actions/setup-node@v4 with: node-version: ${{ matrix.node_version }} @@ -66,10 +68,23 @@ jobs: - name: 'Login to Docker Hub' uses: docker/login-action@v3 continue-on-error: true + env: + DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} + if: "${{ env.DOCKERHUB_USERNAME != '' && env.DOCKERHUB_TOKEN != '' }}" with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Extract Branch Name + id: extract-branch + run: | + branch="$(git show -s --format=%s | grep -o "DRIVER_ADAPTERS_BRANCH=[^ ]*" | cut -f2 -d=)" + if [ -n "$branch" ]; then + echo "Using $branch branch of driver adapters" + echo "DRIVER_ADAPTERS_BRANCH=$branch" >> "$GITHUB_ENV" + fi + - run: make ${{ matrix.adapter.setup_task }} - uses: dtolnay/rust-toolchain@stable diff --git a/.github/workflows/query-engine.yml b/.github/workflows/query-engine.yml index 9c242217662d..762c3da4a50a 100644 --- a/.github/workflows/query-engine.yml +++ b/.github/workflows/query-engine.yml @@ -80,6 +80,10 @@ jobs: - name: Login to Docker Hub uses: docker/login-action@v3 continue-on-error: true + env: + DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} + if: "${{ env.DOCKERHUB_USERNAME != '' && env.DOCKERHUB_TOKEN != '' }}" with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/schema-engine.yml b/.github/workflows/schema-engine.yml index 5bdf25a2bd35..03d23317bbd0 100644 --- a/.github/workflows/schema-engine.yml +++ b/.github/workflows/schema-engine.yml @@ -113,6 +113,10 @@ jobs: - name: Login to Docker Hub uses: docker/login-action@v3 continue-on-error: true + env: + DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} + if: "${{ env.DOCKERHUB_USERNAME != '' && env.DOCKERHUB_TOKEN != '' }}" with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.gitignore b/.gitignore index 43e03e31867d..75c06e9ce68b 100644 --- a/.gitignore +++ b/.gitignore @@ -46,3 +46,7 @@ dmmf.json graph.dot prisma-schema-wasm/nodejs + +# Ignore pnpm-lock.yaml +query-engine/driver-adapters/pnpm-lock.yaml +package-lock.json diff --git a/CODEOWNERS b/CODEOWNERS index c1a996de1f21..cb8fc144133d 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1 +1 @@ -* @prisma/team-orm-rust +* @prisma/ORM-Rust diff --git a/Cargo.lock b/Cargo.lock index 3002a1404210..167ae495efbb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -326,6 +326,7 @@ dependencies = [ "query-engine-metrics", "query-engine-tests", "query-tests-setup", + "regex", "reqwest", "serde_json", "tokio", @@ -672,6 +673,16 @@ dependencies = [ "windows-sys 0.45.0", ] +[[package]] +name = "console_error_panic_hook" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06aeb73f470f66dcdbf7223caeebb85984942f22f1adb2a088cf9668146bbbc" +dependencies = [ + "cfg-if", + "wasm-bindgen", +] + [[package]] name = "convert_case" version = "0.4.0" @@ -860,29 +871,26 @@ dependencies = [ [[package]] name = "cuid" version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51294db11d38eb763c92936c5c88425d0090e27dce21dd15748134af9e53e739" +source = "git+https://github.com/prisma/cuid-rust?branch=wasm32-support#81309f9a11f70d178bb545971d51ceb7da692c52" dependencies = [ "base36", "cuid-util", "cuid2", - "hostname", "num", "once_cell", "rand 0.8.5", + "sha3", ] [[package]] name = "cuid-util" version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ea2bfe0336ff1b7ca74819b2df8dfae9afea358aff6b1688baa5c181d8c3713" +source = "git+https://github.com/prisma/cuid-rust?branch=wasm32-support#81309f9a11f70d178bb545971d51ceb7da692c52" [[package]] name = "cuid2" version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47d99cacd52fd67db7490ad051c8c1973fb75520174d69aabbae08c534c9d0e8" +source = "git+https://github.com/prisma/cuid-rust?branch=wasm32-support#81309f9a11f70d178bb545971d51ceb7da692c52" dependencies = [ "cuid-util", "num", @@ -1074,8 +1082,8 @@ dependencies = [ "indoc 2.0.3", "itertools", "pretty_assertions", - "prisma-models", "psl", + "query-structure", "schema", "serde", "serde_json", @@ -1547,8 +1555,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" dependencies = [ "cfg-if", + "js-sys", "libc", "wasi 0.11.0+wasi-snapshot-preview1", + "wasm-bindgen", ] [[package]] @@ -1563,6 +1573,19 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" +[[package]] +name = "gloo-utils" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "037fcb07216cb3a30f7292bd0176b050b7b9a052ba830ef7d5d65f6dc64ba58e" +dependencies = [ + "js-sys", + "serde", + "serde_json", + "wasm-bindgen", + "web-sys", +] + [[package]] name = "graphql-parser" version = "0.3.0" @@ -2374,9 +2397,9 @@ dependencies = [ [[package]] name = "mobc" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bdeff49b387edef305eccfe166af3e1483bb57902dbf369dddc42dc824df23b" +checksum = "90eb49dc5d193287ff80e72a86f34cfb27aae562299d22fea215e06ea1059dd3" dependencies = [ "async-trait", "futures-channel", @@ -2463,11 +2486,11 @@ dependencies = [ "itertools", "mongodb", "mongodb-client", - "prisma-models", "prisma-value", "psl", "query-connector", "query-engine-metrics", + "query-structure", "rand 0.7.3", "regex", "serde", @@ -3323,21 +3346,6 @@ dependencies = [ "structopt", ] -[[package]] -name = "prisma-models" -version = "0.0.0" -dependencies = [ - "bigdecimal", - "chrono", - "cuid", - "itertools", - "nanoid", - "prisma-value", - "psl", - "thiserror", - "uuid", -] - [[package]] name = "prisma-schema-build" version = "0.1.0" @@ -3628,8 +3636,8 @@ dependencies = [ "futures", "indexmap 1.9.3", "itertools", - "prisma-models", "prisma-value", + "query-structure", "serde", "serde_json", "thiserror", @@ -3655,10 +3663,10 @@ dependencies = [ "once_cell", "opentelemetry", "petgraph 0.4.13", - "prisma-models", "psl", "query-connector", "query-engine-metrics", + "query-structure", "schema", "serde", "serde_json", @@ -3738,12 +3746,12 @@ dependencies = [ "napi-build", "napi-derive", "opentelemetry", - "prisma-models", "psl", "quaint", "query-connector", "query-core", "query-engine-metrics", + "query-structure", "request-handlers", "serde", "serde_json", @@ -3784,6 +3792,51 @@ dependencies = [ "uuid", ] +[[package]] +name = "query-engine-wasm" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "connection-string", + "console_error_panic_hook", + "futures", + "js-sys", + "log", + "psl", + "query-structure", + "serde", + "serde-wasm-bindgen", + "serde_json", + "thiserror", + "tokio", + "tracing", + "tracing-futures", + "tracing-subscriber", + "tsify", + "url", + "user-facing-errors", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-logger", +] + +[[package]] +name = "query-structure" +version = "0.0.0" +dependencies = [ + "bigdecimal", + "chrono", + "cuid", + "getrandom 0.2.10", + "itertools", + "nanoid", + "prisma-value", + "psl", + "thiserror", + "uuid", +] + [[package]] name = "query-test-macros" version = "0.1.0" @@ -3809,13 +3862,13 @@ dependencies = [ "nom", "once_cell", "parse-hyperlinks", - "prisma-models", "psl", "qe-setup", "quaint", "query-core", "query-engine", "query-engine-metrics", + "query-structure", "regex", "request-handlers", "serde", @@ -4097,10 +4150,10 @@ dependencies = [ "itertools", "mongodb-query-connector", "once_cell", - "prisma-models", "psl", "quaint", "query-core", + "query-structure", "schema", "serde", "serde_json", @@ -4368,8 +4421,8 @@ version = "0.1.0" dependencies = [ "codspeed-criterion-compat", "once_cell", - "prisma-models", "psl", + "query-structure", "rustc-hash", ] @@ -4533,6 +4586,17 @@ dependencies = [ "serde_derive", ] +[[package]] +name = "serde-wasm-bindgen" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3b143e2833c57ab9ad3ea280d21fd34e285a42837aeb0ee301f4f41890fa00e" +dependencies = [ + "js-sys", + "serde", + "wasm-bindgen", +] + [[package]] name = "serde_bytes" version = "0.11.12" @@ -4553,6 +4617,17 @@ dependencies = [ "syn 2.0.28", ] +[[package]] +name = "serde_derive_internals" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e578a843d40b4189a4d66bba51d7684f57da5bd7c304c64e14bd63efbef49509" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.28", +] + [[package]] name = "serde_json" version = "1.0.104" @@ -4859,11 +4934,11 @@ dependencies = [ "itertools", "once_cell", "opentelemetry", - "prisma-models", "prisma-value", "psl", "quaint", "query-connector", + "query-structure", "rand 0.7.3", "serde", "serde_json", @@ -5663,6 +5738,31 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" +[[package]] +name = "tsify" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6b26cf145f2f3b9ff84e182c448eaf05468e247f148cf3d2a7d67d78ff023a0" +dependencies = [ + "gloo-utils", + "serde", + "serde_json", + "tsify-macros", + "wasm-bindgen", +] + +[[package]] +name = "tsify-macros" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a94b0f0954b3e59bfc2c246b4c8574390d94a4ad4ad246aaf2fb07d7dfd3b47" +dependencies = [ + "proc-macro2", + "quote", + "serde_derive_internals", + "syn 2.0.28", +] + [[package]] name = "twox-hash" version = "1.6.3" diff --git a/Cargo.toml b/Cargo.toml index 4499033a624b..4892eba3e497 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -21,9 +21,10 @@ members = [ "query-engine/dmmf", "query-engine/driver-adapters", "query-engine/metrics", - "query-engine/prisma-models", + "query-engine/query-structure", "query-engine/query-engine", "query-engine/query-engine-node-api", + "query-engine/query-engine-wasm", "query-engine/request-handlers", "query-engine/schema", "libs/*", diff --git a/Makefile b/Makefile index 3a683b824e3b..e00c122e2713 100644 --- a/Makefile +++ b/Makefile @@ -2,6 +2,7 @@ CONFIG_PATH = ./query-engine/connector-test-kit-rs/test-configs CONFIG_FILE = .test_config SCHEMA_EXAMPLES_PATH = ./query-engine/example_schemas DEV_SCHEMA_FILE = dev_datamodel.prisma +DRIVER_ADAPTERS_BRANCH ?= main LIBRARY_EXT := $(shell \ case "$$(uname -s)" in \ @@ -44,7 +45,13 @@ release: ################# test-qe: +ifndef DRIVER_ADAPTER cargo test --package query-engine-tests +else + @echo "Executing query engine tests with $(DRIVER_ADAPTER) driver adapter"; \ + # Add your actual command for the "test-driver-adapter" task here + $(MAKE) test-driver-adapter-$(DRIVER_ADAPTER); +endif test-qe-verbose: cargo test --package query-engine-tests -- --nocapture @@ -67,7 +74,7 @@ test-qe-black-box: build-qe ########################### all-dbs-up: - docker compose -f docker-compose.yml up -d --remove-orphans + docker compose -f docker-compose.yml up --wait -d --remove-orphans all-dbs-down: docker compose -f docker-compose.yml down -v --remove-orphans @@ -80,32 +87,36 @@ dev-sqlite: dev-libsql-sqlite: build-qe-napi build-connector-kit-js cp $(CONFIG_PATH)/libsql-sqlite $(CONFIG_FILE) +test-libsql-sqlite: dev-libsql-sqlite test-qe-st + +test-driver-adapter-libsql: test-libsql-sqlite + start-postgres9: - docker compose -f docker-compose.yml up -d --remove-orphans postgres9 + docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres9 dev-postgres9: start-postgres9 cp $(CONFIG_PATH)/postgres9 $(CONFIG_FILE) start-postgres10: - docker compose -f docker-compose.yml up -d --remove-orphans postgres10 + docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres10 dev-postgres10: start-postgres10 cp $(CONFIG_PATH)/postgres10 $(CONFIG_FILE) start-postgres11: - docker compose -f docker-compose.yml up -d --remove-orphans postgres11 + docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres11 dev-postgres11: start-postgres11 cp $(CONFIG_PATH)/postgres11 $(CONFIG_FILE) start-postgres12: - docker compose -f docker-compose.yml up -d --remove-orphans postgres12 + docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres12 dev-postgres12: start-postgres12 cp $(CONFIG_PATH)/postgres12 $(CONFIG_FILE) start-postgres13: - docker compose -f docker-compose.yml up -d --remove-orphans postgres13 + docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres13 dev-postgres13: start-postgres13 cp $(CONFIG_PATH)/postgres13 $(CONFIG_FILE) @@ -115,121 +126,129 @@ start-pg-postgres13: build-qe-napi build-connector-kit-js start-postgres13 dev-pg-postgres13: start-pg-postgres13 cp $(CONFIG_PATH)/pg-postgres13 $(CONFIG_FILE) -start-neon-postgres13: build-qe-napi build-connector-kit-js - docker compose -f docker-compose.yml up -d --remove-orphans neon-postgres13 +test-pg-postgres13: dev-pg-postgres13 test-qe-st + +test-driver-adapter-pg: test-pg-postgres13 -dev-neon-ws-postgres13: start-neon-postgres13 +start-neon-postgres13: + docker compose -f docker-compose.yml up --wait -d --remove-orphans neon-postgres13 + +dev-neon-ws-postgres13: start-neon-postgres13 build-qe-napi build-connector-kit-js cp $(CONFIG_PATH)/neon-ws-postgres13 $(CONFIG_FILE) +test-neon-ws-postgres13: dev-neon-ws-postgres13 test-qe-st + +test-driver-adapter-neon: test-neon-ws-postgres13 + start-postgres14: - docker compose -f docker-compose.yml up -d --remove-orphans postgres14 + docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres14 dev-postgres14: start-postgres14 cp $(CONFIG_PATH)/postgres14 $(CONFIG_FILE) start-postgres15: - docker compose -f docker-compose.yml up -d --remove-orphans postgres15 + docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres15 dev-postgres15: start-postgres15 cp $(CONFIG_PATH)/postgres15 $(CONFIG_FILE) start-cockroach_23_1: - docker compose -f docker-compose.yml up -d --remove-orphans cockroach_23_1 + docker compose -f docker-compose.yml up --wait -d --remove-orphans cockroach_23_1 dev-cockroach_23_1: start-cockroach_23_1 cp $(CONFIG_PATH)/cockroach_23_1 $(CONFIG_FILE) start-cockroach_22_2: - docker compose -f docker-compose.yml up -d --remove-orphans cockroach_22_2 + docker compose -f docker-compose.yml up --wait -d --remove-orphans cockroach_22_2 dev-cockroach_22_2: start-cockroach_22_2 cp $(CONFIG_PATH)/cockroach_22_2 $(CONFIG_FILE) start-cockroach_22_1_0: - docker compose -f docker-compose.yml up -d --remove-orphans cockroach_22_1_0 + docker compose -f docker-compose.yml up --wait -d --remove-orphans cockroach_22_1_0 dev-cockroach_22_1_0: start-cockroach_22_1_0 cp $(CONFIG_PATH)/cockroach_22_1 $(CONFIG_FILE) start-cockroach_21_2_0_patched: - docker compose -f docker-compose.yml up -d --remove-orphans cockroach_21_2_0_patched + docker compose -f docker-compose.yml up --wait -d --remove-orphans cockroach_21_2_0_patched dev-cockroach_21_2_0_patched: start-cockroach_21_2_0_patched cp $(CONFIG_PATH)/cockroach_21_2_0_patched $(CONFIG_FILE) dev-pgbouncer: - docker compose -f docker-compose.yml up -d --remove-orphans pgbouncer postgres11 + docker compose -f docker-compose.yml up --wait -d --remove-orphans pgbouncer postgres11 start-mysql_5_7: - docker compose -f docker-compose.yml up -d --remove-orphans mysql-5-7 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mysql-5-7 dev-mysql: start-mysql_5_7 cp $(CONFIG_PATH)/mysql57 $(CONFIG_FILE) start-mysql_5_6: - docker compose -f docker-compose.yml up -d --remove-orphans mysql-5-6 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mysql-5-6 dev-mysql_5_6: start-mysql_5_6 cp $(CONFIG_PATH)/mysql56 $(CONFIG_FILE) start-mysql_8: - docker compose -f docker-compose.yml up -d --remove-orphans mysql-8-0 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mysql-8-0 dev-mysql8: start-mysql_8 cp $(CONFIG_PATH)/mysql8 $(CONFIG_FILE) start-mysql_mariadb: - docker compose -f docker-compose.yml up -d --remove-orphans mariadb-10-0 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mariadb-10-0 dev-mariadb: start-mysql_mariadb cp $(CONFIG_PATH)/mariadb $(CONFIG_FILE) start-mssql_2019: - docker compose -f docker-compose.yml up -d --remove-orphans mssql-2019 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mssql-2019 dev-mssql2019: start-mssql_2019 cp $(CONFIG_PATH)/sqlserver2019 $(CONFIG_FILE) start-mssql_2022: - docker compose -f docker-compose.yml up -d --remove-orphans mssql-2022 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mssql-2022 dev-mssql2022: start-mssql_2022 cp $(CONFIG_PATH)/sqlserver2022 $(CONFIG_FILE) start-mssql_edge: - docker compose -f docker-compose.yml up -d --remove-orphans azure-edge + docker compose -f docker-compose.yml up --wait -d --remove-orphans azure-edge dev-mssql_edge: start-mssql_edge cp $(CONFIG_PATH)/sqlserver2019 $(CONFIG_FILE) start-mssql_2017: - docker compose -f docker-compose.yml up -d --remove-orphans mssql-2017 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mssql-2017 dev-mssql2017: start-mssql_2017 cp $(CONFIG_PATH)/sqlserver2017 $(CONFIG_FILE) start-mongodb42-single: - docker compose -f docker-compose.yml up -d --remove-orphans mongo42-single + docker compose -f docker-compose.yml up --wait -d --remove-orphans mongo42-single start-mongodb44-single: - docker compose -f docker-compose.yml up -d --remove-orphans mongo44-single + docker compose -f docker-compose.yml up --wait -d --remove-orphans mongo44-single start-mongodb4-single: start-mongodb44-single start-mongodb5-single: - docker compose -f docker-compose.yml up -d --remove-orphans mongo5-single + docker compose -f docker-compose.yml up --wait -d --remove-orphans mongo5-single start-mongodb_4_2: - docker compose -f docker-compose.yml up -d --remove-orphans mongo42 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mongo42 start-mongodb_4_4: - docker compose -f docker-compose.yml up -d --remove-orphans mongo44 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mongo44 dev-mongodb_4_4: start-mongodb_4_4 cp $(CONFIG_PATH)/mongodb44 $(CONFIG_FILE) start-mongodb_5: - docker compose -f docker-compose.yml up -d --remove-orphans mongo5 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mongo5 dev-mongodb_5: start-mongodb_5 cp $(CONFIG_PATH)/mongodb5 $(CONFIG_FILE) @@ -238,17 +257,27 @@ dev-mongodb_4_2: start-mongodb_4_2 cp $(CONFIG_PATH)/mongodb42 $(CONFIG_FILE) start-vitess_5_7: - docker compose -f docker-compose.yml up -d --remove-orphans vitess-test-5_7 vitess-shadow-5_7 + docker compose -f docker-compose.yml up --wait -d --remove-orphans vitess-test-5_7 vitess-shadow-5_7 dev-vitess_5_7: start-vitess_5_7 cp $(CONFIG_PATH)/vitess_5_7 $(CONFIG_FILE) start-vitess_8_0: - docker compose -f docker-compose.yml up -d --remove-orphans vitess-test-8_0 vitess-shadow-8_0 + docker compose -f docker-compose.yml up --wait -d --remove-orphans vitess-test-8_0 vitess-shadow-8_0 dev-vitess_8_0: start-vitess_8_0 cp $(CONFIG_PATH)/vitess_8_0 $(CONFIG_FILE) +start-planetscale-vitess8: + docker compose -f docker-compose.yml up -d --remove-orphans planetscale-vitess8 + +dev-planetscale-vitess8: start-planetscale-vitess8 build-qe-napi build-connector-kit-js + cp $(CONFIG_PATH)/planetscale-vitess8 $(CONFIG_FILE) + +test-planetscale-vitess8: dev-planetscale-vitess8 test-qe-st + +test-driver-adapter-planetscale: test-planetscale-vitess8 + ###################### # Local dev commands # ###################### @@ -256,8 +285,25 @@ dev-vitess_8_0: start-vitess_8_0 build-qe-napi: cargo build --package query-engine-node-api -build-connector-kit-js: - cd query-engine/driver-adapters/js && pnpm i && pnpm build +build-connector-kit-js: build-driver-adapters + cd query-engine/driver-adapters && pnpm i && pnpm build + +build-driver-adapters: ensure-prisma-present + @echo "Building driver adapters..." + @cd ../prisma && pnpm --filter "*adapter*" i + @echo "Driver adapters build completed."; + +ensure-prisma-present: + @if [ -d ../prisma ]; then \ + cd "$(realpath ../prisma)" && git fetch origin main; \ + LOCAL_CHANGES=$$(git diff --name-only HEAD origin/main -- 'packages/*adapter*'); \ + if [ -n "$$LOCAL_CHANGES" ]; then \ + echo "⚠️ ../prisma diverges from prisma/prisma main branch. Test results might diverge from those in CI ⚠️ "; \ + fi \ + else \ + echo "git clone --depth=1 https://github.com/prisma/prisma.git --branch=$(DRIVER_ADAPTERS_BRANCH) ../prisma"; \ + git clone --depth=1 https://github.com/prisma/prisma.git --branch=$(DRIVER_ADAPTERS_BRANCH) "../prisma" && echo "Prisma repository has been cloned to ../prisma"; \ + fi; # Quick schema validation of whatever you have in the dev_datamodel.prisma file. validate: @@ -293,7 +339,7 @@ use-local-query-engine: cp target/release/query-engine $(PRISMA2_BINARY_PATH)/query-engine-darwin show-metrics: - docker compose -f docker-compose.yml up -d --remove-orphans grafana prometheus + docker compose -f docker-compose.yml up --wait -d --remove-orphans grafana prometheus ## OpenTelemetry otel: diff --git a/README.md b/README.md index 6fd072072757..c28a53a6d657 100644 --- a/README.md +++ b/README.md @@ -203,6 +203,7 @@ integration tests. - Alternatively: Load the defined environment in `./.envrc` manually in your shell. **Setup:** + There are helper `make` commands to set up a test environment for a specific database connector you want to test. The commands set up a container (if needed) and write the `.test_config` file, which is picked up by the integration @@ -234,31 +235,82 @@ Other variables may or may not be useful. Run `cargo test` in the repository root. -## Parallel rust-analyzer builds +### Testing driver adapters -When rust-analzyer runs `cargo check` it will lock the build directory and stop any cargo commands from running until it has completed. This makes the build process feel a lot longer. It is possible to avoid this by setting a different build path for -rust-analyzer. To avoid this. Open VSCode settings and search for `Check on Save: Extra Args`. Look for the `Rust-analyzer › Check On Save: Extra Args` settings and add a new directory for rust-analyzer. Something like: +Please refer to the [Testing driver adapters](./query-engine/connector-test-kit-rs/README.md#testing-driver-adapters) section in the connector-test-kit-rs README. + +**ℹ️ Important note on developing features that require changes to the both the query engine, and driver adapters code** + +As explained in [Testing driver adapters](./query-engine/connector-test-kit-rs/README.md#testing-driver-adapters), running `DRIVER_ADAPTER=$adapter make qe-test` +will ensure you have prisma checked out in your filesystem in the same directory as prisma-engines. This is needed because the driver adapters code is symlinked in prisma-engines. + +When working on a feature or bugfix spanning adapters code and query-engine code, you will need to open sibling PRs in `prisma/prisma` and `prisma/prisma-engines` respectively. +Locally, each time you run `DRIVER_ADAPTER=$adapter make qe-test` tests will run using the driver adapters built from the source code in the working copy of prisma/prisma. All good. + +In CI, tho', we need to denote which branch of prisma/prisma we want to use for tests. In CI, there's no working copy of prisma/prisma before tests run. +The CI jobs clones prisma/prisma `main` branch by default, which doesn't include your local changes. To test in integration, we can tell CI to use the branch of prisma/prisma containing +the changes in adapters. To do it, you can use a simple convention in commit messages. Like this: ``` ---target-dir:/tmp/rust-analyzer-check +git commit -m "DRIVER_ADAPTERS_BRANCH=prisma-branch-with-changes-in-adapters [...]" ``` -### Automated integration releases from this repository to npm +GitHub actions will then pick up the branch name and use it to clone that branch's code of prisma/prisma, and build the driver adapters code from there. -(Since July 2022). Any branch name starting with `integration/` will, first, run the full test suite and, second, if passing, run the publish pipeline (build and upload engines to S3) +When it's time to merge the sibling PRs, you'll need to merge the prisma/prisma PR first, so when merging the engines PR you have the code of the adapters ready in prisma/prisma `main` branch. + +### Testing engines in `prisma/prisma` + +You can trigger releases from this repository to npm that can be used for testing the engines in `prisma/prisma` either automatically or manually: + +#### Automated integration releases from this repository to npm + +(Since July 2022). Any branch name starting with `integration/` will, first, run the full test suite in Buildkite `[Test] Prisma Engines` and, second, if passing, run the publish pipeline (build and upload engines to S3 & R2) The journey through the pipeline is the same as a commit on the `main` branch. -- It will trigger [prisma/engines-wrapper](https://github.com/prisma/engines-wrapper) and publish a new [`@prisma/engines-version`](https://www.npmjs.com/package/@prisma/engines-version) npm package but on the `integration` tag. -- Which triggers [prisma/prisma](https://github.com/prisma/prisma) to create a `chore(Automated Integration PR): [...]` PR with a branch name also starting with `integration/` -- Since in prisma/prisma we also trigger the publish pipeline when a branch name starts with `integration/`, this will publish all prisma/prisma monorepo packages to npm on the `integration` tag. +- It will trigger [`prisma/engines-wrapper`](https://github.com/prisma/engines-wrapper) and publish a new [`@prisma/engines-version`](https://www.npmjs.com/package/@prisma/engines-version) npm package but on the `integration` tag. +- Which triggers [`prisma/prisma`](https://github.com/prisma/prisma) to create a `chore(Automated Integration PR): [...]` PR with a branch name also starting with `integration/` +- Since in `prisma/prisma` we also trigger the publish pipeline when a branch name starts with `integration/`, this will publish all `prisma/prisma` monorepo packages to npm on the `integration` tag. - Our [ecosystem-tests](https://github.com/prisma/ecosystem-tests/) tests will automatically pick up this new version and run tests, results will show in [GitHub Actions](https://github.com/prisma/ecosystem-tests/actions?query=branch%3Aintegration) This end to end will take minimum ~1h20 to complete, but is completely automated :robot: Notes: -- in prisma/prisma repository, we do not run tests for `integration/` branches, it is much faster and also means that there is no risk of test failing (e.g. flaky tests, snapshots) that would stop the publishing process. -- in prisma/prisma-engines tests must first pass, before publishing starts. So better keep an eye on them and restart them as needed. +- in `prisma/prisma` repository, we do not run tests for `integration/` branches, it is much faster and also means that there is no risk of tests failing (e.g. flaky tests, snapshots) that would stop the publishing process. +- in `prisma/prisma-engines` the Buildkite test pipeline must first pass, then the engines will be built and uploaded to our storage via the Buildkite release pipeline. These 2 pipelines can fail for different reasons, it's recommended to keep an eye on them (check notifications in Slack) and restart jobs as needed. Finally, it will trigger [`prisma/engines-wrapper`](https://github.com/prisma/engines-wrapper). + +#### Manual integration releases from this repository to npm + +Additionally to the automated integration release for `integration/` branches, you can also trigger a publish **manually** in the Buildkite `[Test] Prisma Engines` job if that succeeds for _any_ branch name. Click "🚀 Publish binaries" at the bottom of the test list to unlock the publishing step. When all the jobs in `[Release] Prisma Engines` succeed, you also have to unlock the next step by clicking "🚀 Publish client". This will then trigger the same journey as described above. + +## Parallel rust-analyzer builds +When rust-analzyer runs `cargo check` it will lock the build directory and stop any cargo commands from running until it has completed. This makes the build process feel a lot longer. It is possible to avoid this by setting a different build path for +rust-analyzer. To avoid this. Open VSCode settings and search for `Check on Save: Extra Args`. Look for the `Rust-analyzer › Check On Save: Extra Args` settings and add a new directory for rust-analyzer. Something like: + +``` +--target-dir:/tmp/rust-analyzer-check +``` + + +## Community PRs: create a local branch for a branch coming from a fork + +To trigger an [Automated integration releases from this repository to npm](#automated-integration-releases-from-this-repository-to-npm) or [Manual integration releases from this repository to npm](#manual-integration-releases-from-this-repository-to-npm) branches of forks need to be pulled into this repository so the Buildkite job is triggered. You can use these GitHub and git CLI commands to achieve that easily: + +``` +gh pr checkout 4375 +git checkout -b integration/sql-nested-transactions +git push --set-upstream origin integration/sql-nested-transactions +``` + +If there is a need to re-create this branch because it has been updated, deleting it and re-creating will make sure the content is identical and avoid any conflicts. + +``` +git branch --delete integration/sql-nested-transactions +gh pr checkout 4375 +git checkout -b integration/sql-nested-transactions +git push --set-upstream origin integration/sql-nested-transactions --force +``` ## Security diff --git a/docker-compose.yml b/docker-compose.yml index 1988f864d304..a8b48748abc4 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,113 +1,115 @@ -version: "3" +version: '3' services: cockroach_23_1: image: prismagraphql/cockroachdb-custom:23.1 + restart: unless-stopped command: | start-single-node --insecure ports: - - "26260:26257" + - '26260:26257' networks: - databases cockroach_22_2: image: prismagraphql/cockroachdb-custom:22.2 - restart: always + restart: unless-stopped command: start-single-node --insecure ports: - - "26259:26257" + - '26259:26257' networks: - databases cockroach_22_1_0: image: prismagraphql/cockroachdb-custom:22.1.0 - restart: always + restart: unless-stopped command: start-single-node --insecure ports: - - "26257:26257" + - '26257:26257' networks: - databases cockroach_21_2_0_patched: image: prismagraphql/cockroachdb-custom:21.2.0-patched - restart: always + restart: unless-stopped command: start-single-node --insecure ports: - - "26258:26257" + - '26258:26257' networks: - databases pgbouncer: image: brainsam/pgbouncer:latest - restart: always + restart: unless-stopped environment: - DB_HOST: "postgres11" - DB_PORT: "5432" - DB_USER: "postgres" - DB_PASSWORD: "prisma" - POOL_MODE: "transaction" - MAX_CLIENT_CONN: "1000" + DB_HOST: 'postgres11' + DB_PORT: '5432' + DB_USER: 'postgres' + DB_PASSWORD: 'prisma' + POOL_MODE: 'transaction' + MAX_CLIENT_CONN: '1000' networks: - databases ports: - - "6432:6432" + - '6432:6432' postgres9: image: postgres:9.6 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" + POSTGRES_PASSWORD: 'prisma' ports: - - "5431:5432" + - '5431:5432' networks: - databases postgres10: image: postgres:10 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" + POSTGRES_PASSWORD: 'prisma' ports: - - "5432:5432" + - '5432:5432' networks: - databases postgres11: image: postgres:11 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" + POSTGRES_PASSWORD: 'prisma' ports: - - "5433:5432" + - '5433:5432' networks: - databases postgres12: image: postgres:12 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" + POSTGRES_PASSWORD: 'prisma' ports: - - "5434:5432" + - '5434:5432' networks: - databases postgres13: image: postgres:13 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" + POSTGRES_PASSWORD: 'prisma' ports: - - "5435:5432" + - '5435:5432' networks: - databases neon-postgres13: image: ghcr.io/neondatabase/wsproxy:latest + restart: unless-stopped environment: # the port of the postgres13 within the databases network APPEND_PORT: 'postgres13:5432' @@ -121,58 +123,74 @@ services: networks: - databases + planetscale-vitess8: + build: ./docker/planetscale_proxy + environment: + MYSQL_HOST: 'vitess-test-8_0' + MYSQL_PORT: 33807 + MYSQL_DATABASE: 'test' + ports: + - '8085:8085' + depends_on: + - vitess-test-8_0 + restart: unless-stopped + healthcheck: + test: ['CMD', 'nc', '-z', '127.0.0.1', '8085'] + interval: 5s + timeout: 2s + retries: 20 + postgres14: image: postgres:14 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" - POSTGRES_HOST_AUTH_METHOD: "md5" - POSTGRES_INITDB_ARGS: "--auth-host=md5" + POSTGRES_PASSWORD: 'prisma' + POSTGRES_HOST_AUTH_METHOD: 'md5' + POSTGRES_INITDB_ARGS: '--auth-host=md5' ports: - - "5437:5432" + - '5437:5432' networks: - databases postgres15: image: postgres:15 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" - POSTGRES_HOST_AUTH_METHOD: "md5" - POSTGRES_INITDB_ARGS: "--auth-host=md5" + POSTGRES_PASSWORD: 'prisma' + POSTGRES_HOST_AUTH_METHOD: 'md5' + POSTGRES_INITDB_ARGS: '--auth-host=md5' ports: - - "5438:5432" + - '5438:5432' networks: - databases mysql-5-6: image: mysql:5.6.50 command: mysqld - restart: always + restart: unless-stopped platform: linux/x86_64 environment: MYSQL_USER: root MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3309:3306" + - '3309:3306' networks: - databases tmpfs: /var/lib/mysql mysql-5-7: - image: mysql:5.7.32 + image: mysql:5.7.44 command: mysqld - restart: always + restart: unless-stopped platform: linux/x86_64 environment: - MYSQL_USER: root MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3306:3306" + - '3306:3306' networks: - databases tmpfs: /var/lib/mysql @@ -180,219 +198,246 @@ services: mysql-8-0: image: mysql:8.0.28 command: mysqld - restart: always + restart: unless-stopped platform: linux/x86_64 environment: MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3307:3306" + - '3307:3306' networks: - databases tmpfs: /var/lib/mysql8 mariadb-10-0: image: mariadb:10 - restart: always + restart: unless-stopped environment: MYSQL_USER: root MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3308:3306" + - '3308:3306' networks: - databases tmpfs: /var/lib/mariadb vitess-test-5_7: image: vitess/vttestserver:mysql57@sha256:23863a518b34330109c502ac61a396008f5f023e96263bcb2bb1b0f7f7d5dc7f - restart: always + restart: unless-stopped ports: - 33577:33577 environment: PORT: 33574 - KEYSPACES: "test" - NUM_SHARDS: "1" - MYSQL_BIND_HOST: "0.0.0.0" - FOREIGN_KEY_MODE: "disallow" + KEYSPACES: 'test' + NUM_SHARDS: '1' + MYSQL_BIND_HOST: '0.0.0.0' + FOREIGN_KEY_MODE: 'disallow' ENABLE_ONLINE_DDL: false + MYSQL_MAX_CONNECTIONS: 100000 + TABLET_REFRESH_INTERVAL: '500ms' + healthcheck: + test: ['CMD', 'mysqladmin', 'ping', '-h127.0.0.1', '-P33577'] + interval: 5s + timeout: 2s + retries: 20 vitess-test-8_0: image: vitess/vttestserver:mysql80@sha256:8bec2644d83cb322eb2cdd596d33c0f858243ba6ade9164c95dfcc519643094e - restart: always + restart: unless-stopped ports: - 33807:33807 environment: PORT: 33804 - KEYSPACES: "test" - NUM_SHARDS: "1" - MYSQL_BIND_HOST: "0.0.0.0" - FOREIGN_KEY_MODE: "disallow" - TABLET_REFRESH_INTERVAL: "500ms" + KEYSPACES: 'test' + NUM_SHARDS: '1' + MYSQL_BIND_HOST: '0.0.0.0' + FOREIGN_KEY_MODE: 'disallow' ENABLE_ONLINE_DDL: false + MYSQL_MAX_CONNECTIONS: 100000 + TABLET_REFRESH_INTERVAL: '500ms' + healthcheck: + test: ['CMD', 'mysqladmin', 'ping', '-h127.0.0.1', '-P33807'] + interval: 5s + timeout: 2s + retries: 20 vitess-shadow-5_7: image: vitess/vttestserver:mysql57@sha256:23863a518b34330109c502ac61a396008f5f023e96263bcb2bb1b0f7f7d5dc7f - restart: always + restart: unless-stopped ports: - 33578:33577 environment: PORT: 33574 - KEYSPACES: "shadow" - NUM_SHARDS: "1" - MYSQL_BIND_HOST: "0.0.0.0" - FOREIGN_KEY_MODE: "disallow" + KEYSPACES: 'shadow' + NUM_SHARDS: '1' + MYSQL_BIND_HOST: '0.0.0.0' + FOREIGN_KEY_MODE: 'disallow' ENABLE_ONLINE_DDL: false + MYSQL_MAX_CONNECTIONS: 100000 + TABLET_REFRESH_INTERVAL: '500ms' + healthcheck: + test: ['CMD', 'mysqladmin', 'ping', '-h127.0.0.1', '-P33577'] + interval: 5s + timeout: 2s + retries: 20 vitess-shadow-8_0: image: vitess/vttestserver:mysql80@sha256:8bec2644d83cb322eb2cdd596d33c0f858243ba6ade9164c95dfcc519643094e - restart: always + restart: unless-stopped ports: - 33808:33807 environment: PORT: 33804 - KEYSPACES: "shadow" - NUM_SHARDS: "1" - MYSQL_BIND_HOST: "0.0.0.0" - FOREIGN_KEY_MODE: "disallow" - TABLET_REFRESH_INTERVAL: "500ms" + KEYSPACES: 'shadow' + NUM_SHARDS: '1' + MYSQL_BIND_HOST: '0.0.0.0' + FOREIGN_KEY_MODE: 'disallow' ENABLE_ONLINE_DDL: false + MYSQL_MAX_CONNECTIONS: 100000 + TABLET_REFRESH_INTERVAL: '500ms' + healthcheck: + test: ['CMD', 'mysqladmin', 'ping', '-h127.0.0.1', '-P33807'] + interval: 5s + timeout: 2s + retries: 20 mssql-2017: image: mcr.microsoft.com/mssql/server:2017-latest - restart: always + restart: unless-stopped environment: - ACCEPT_EULA: "Y" - SA_PASSWORD: "" + ACCEPT_EULA: 'Y' + SA_PASSWORD: '' ports: - - "1434:1433" + - '1434:1433' networks: - databases - + mssql-2019: image: mcr.microsoft.com/mssql/server:2019-latest - restart: always + restart: unless-stopped environment: - ACCEPT_EULA: "Y" - SA_PASSWORD: "" + ACCEPT_EULA: 'Y' + SA_PASSWORD: '' ports: - - "1433:1433" + - '1433:1433' networks: - databases mssql-2022: image: mcr.microsoft.com/mssql/server:2022-latest - restart: always + restart: unless-stopped environment: - ACCEPT_EULA: "Y" - SA_PASSWORD: "" + ACCEPT_EULA: 'Y' + SA_PASSWORD: '' ports: - - "1435:1433" + - '1435:1433' networks: - databases azure-edge: image: mcr.microsoft.com/azure-sql-edge - restart: always + restart: unless-stopped environment: - ACCEPT_EULA: "Y" - MSSQL_SA_PASSWORD: "" + ACCEPT_EULA: 'Y' + MSSQL_SA_PASSWORD: '' ports: - - "1433:1433" + - '1433:1433' networks: - databases mongo42: image: prismagraphql/mongo-single-replica:4.2.17-bionic - restart: always + restart: unless-stopped environment: - MONGO_INITDB_ROOT_USERNAME: "prisma" - MONGO_INITDB_ROOT_PASSWORD: "prisma" + MONGO_INITDB_ROOT_USERNAME: 'prisma' + MONGO_INITDB_ROOT_PASSWORD: 'prisma' MONGO_PORT: 27016 INIT_WAIT_SEC: $INIT_WAIT_SEC networks: - databases ports: - - "27016:27016" + - '27016:27016' mongo44: image: prismagraphql/mongo-single-replica:4.4.3-bionic - restart: always + restart: unless-stopped environment: - MONGO_INITDB_ROOT_USERNAME: "prisma" - MONGO_INITDB_ROOT_PASSWORD: "prisma" + MONGO_INITDB_ROOT_USERNAME: 'prisma' + MONGO_INITDB_ROOT_PASSWORD: 'prisma' INIT_WAIT_SEC: $INIT_WAIT_SEC ports: - - "27017:27017" + - '27017:27017' networks: - databases mongo42-single: image: mongo:4.2 - restart: always + restart: unless-stopped environment: - MONGO_INITDB_ROOT_USERNAME: "prisma" - MONGO_INITDB_ROOT_PASSWORD: "prisma" + MONGO_INITDB_ROOT_USERNAME: 'prisma' + MONGO_INITDB_ROOT_PASSWORD: 'prisma' INIT_WAIT_SEC: $INIT_WAIT_SEC ports: - - "27016:27017" + - '27016:27017' networks: - databases mongo44-single: image: mongo:4.4 - restart: always + restart: unless-stopped environment: - MONGO_INITDB_ROOT_USERNAME: "prisma" - MONGO_INITDB_ROOT_PASSWORD: "prisma" + MONGO_INITDB_ROOT_USERNAME: 'prisma' + MONGO_INITDB_ROOT_PASSWORD: 'prisma' INIT_WAIT_SEC: $INIT_WAIT_SEC ports: - - "27017:27017" + - '27017:27017' networks: - databases mongo5: image: prismagraphql/mongo-single-replica:5.0.3 - restart: always + restart: unless-stopped environment: - MONGO_INITDB_ROOT_USERNAME: "prisma" - MONGO_INITDB_ROOT_PASSWORD: "prisma" + MONGO_INITDB_ROOT_USERNAME: 'prisma' + MONGO_INITDB_ROOT_PASSWORD: 'prisma' MONGO_PORT: 27018 INIT_WAIT_SEC: $INIT_WAIT_SEC ports: - - "27018:27018" + - '27018:27018' networks: - databases mongo5-single: image: mongo:5 - restart: always + restart: unless-stopped environment: - MONGO_INITDB_ROOT_USERNAME: "prisma" - MONGO_INITDB_ROOT_PASSWORD: "prisma" + MONGO_INITDB_ROOT_USERNAME: 'prisma' + MONGO_INITDB_ROOT_PASSWORD: 'prisma' INIT_WAIT_SEC: $INIT_WAIT_SEC ports: - - "27018:27017" + - '27018:27017' networks: - databases mongo-express: image: mongo-express - restart: always + restart: unless-stopped ports: - 8081:8081 environment: - ME_CONFIG_MONGODB_ADMINUSERNAME: "prisma" - ME_CONFIG_MONGODB_ADMINPASSWORD: "prisma" + ME_CONFIG_MONGODB_ADMINUSERNAME: 'prisma' + ME_CONFIG_MONGODB_ADMINPASSWORD: 'prisma' ME_CONFIG_MONGODB_URL: mongodb://prisma:prisma@mongo4-single:27017/ networks: - databases otel: image: jaegertracing/all-in-one:1.35 + restart: unless-stopped environment: - COLLECTOR_OTLP_ENABLED: "true" - COLLECTOR_ZIPKIN_HOST_PORT: ":9411" + COLLECTOR_OTLP_ENABLED: 'true' + COLLECTOR_ZIPKIN_HOST_PORT: ':9411' ports: - 6831:6831/udp - 6832:6832/udp @@ -407,6 +452,7 @@ services: prometheus: image: prom/prometheus + restart: unless-stopped volumes: - ${PWD}/metrics/prometheus:/prometheus-data command: --config.file=/prometheus-data/prometheus.yml diff --git a/docker/planetscale_proxy/Dockerfile b/docker/planetscale_proxy/Dockerfile new file mode 100644 index 000000000000..2411894d88f0 --- /dev/null +++ b/docker/planetscale_proxy/Dockerfile @@ -0,0 +1,15 @@ +FROM golang:1 + +RUN apt update && apt install netcat-openbsd -y +RUN cd /go/src && git clone https://github.com/prisma/planetscale-proxy.git +RUN cd /go/src/planetscale-proxy && go install . + +ENTRYPOINT /go/bin/ps-http-sim \ + -http-addr=0.0.0.0 \ + -http-port=8085 \ + -mysql-addr=$MYSQL_HOST \ + -mysql-port=$MYSQL_PORT \ + -mysql-idle-timeout=1200s \ + -mysql-no-pass \ + -mysql-max-rows=1000 \ + -mysql-dbname=$MYSQL_DATABASE diff --git a/psl/psl-core/src/validate/validation_pipeline/validations.rs b/psl/psl-core/src/validate/validation_pipeline/validations.rs index 4040844bb767..90f8ec9fe79e 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations.rs @@ -123,7 +123,7 @@ pub(super) fn validate(ctx: &mut Context<'_>) { indexes::supports_clustering_setting(index, ctx); indexes::clustering_can_be_defined_only_once(index, ctx); indexes::opclasses_are_not_allowed_with_other_than_normal_indices(index, ctx); - indexes::composite_types_are_not_allowed_in_index(index, ctx); + indexes::composite_type_in_compound_unique_index(index, ctx); for field_attribute in index.scalar_field_attributes() { let span = index.ast_attribute().span; diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/indexes.rs b/psl/psl-core/src/validate/validation_pipeline/validations/indexes.rs index 5f3288264016..7a7d0e1d105e 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/indexes.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/indexes.rs @@ -386,20 +386,25 @@ pub(crate) fn opclasses_are_not_allowed_with_other_than_normal_indices(index: In } } -pub(crate) fn composite_types_are_not_allowed_in_index(index: IndexWalker<'_>, ctx: &mut Context<'_>) { - for field in index.fields() { - if field.scalar_field_type().as_composite_type().is_some() { - let message = format!( - "Indexes can only contain scalar attributes. Please remove {:?} from the argument list of the indexes.", - field.name() - ); - ctx.push_error(DatamodelError::new_attribute_validation_error( - &message, - index.attribute_name(), - index.ast_attribute().span, - )); - return; - } +pub(crate) fn composite_type_in_compound_unique_index(index: IndexWalker<'_>, ctx: &mut Context<'_>) { + if !index.is_unique() { + return; + } + + let composite_type = index + .fields() + .find(|f| f.scalar_field_type().as_composite_type().is_some()); + + if index.fields().len() > 1 && composite_type.is_some() { + let message = format!( + "Prisma does not currently support composite types in compound unique indices, please remove {:?} from the index. See https://pris.ly/d/mongodb-composite-compound-indices for more details", + composite_type.unwrap().name() + ); + ctx.push_error(DatamodelError::new_attribute_validation_error( + &message, + index.attribute_name(), + index.ast_attribute().span, + )); } } diff --git a/quaint/README.md b/quaint/README.md index e27c56972390..92033db269b1 100644 --- a/quaint/README.md +++ b/quaint/README.md @@ -41,7 +41,7 @@ choice. ```sh > cargo build --features all - ``` +``` ### Testing @@ -70,8 +70,8 @@ This requires the rust nightly channel: > cargo rustdoc --all-features ``` -Documentation index would be created at `$CARGO_TARGET_DIR/doc/quaint/index.html` +Documentation index would be created at `$CARGO_TARGET_DIR/doc/quaint/index.html`. ## Security -If you have a security issue to report, please contact us at [security@prisma.io](mailto:security@prisma.io?subject=[GitHub]%20Prisma%202%20Security%20Report%20Quaint) +If you have a security issue to report, please contact us at [security@prisma.io](mailto:security@prisma.io?subject=[GitHub]%20Prisma%202%20Security%20Report%20Quaint). diff --git a/quaint/docker-compose.yml b/quaint/docker-compose.yml index ec3c06faa289..47f1a3456a6e 100644 --- a/quaint/docker-compose.yml +++ b/quaint/docker-compose.yml @@ -1,14 +1,14 @@ -version: "3" +version: '3' services: postgres13: image: postgres:13 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" - PGDATA: "/pgtmpfs13" + POSTGRES_PASSWORD: 'prisma' + PGDATA: '/pgtmpfs13' ports: - - "5432:5432" + - '5432:5432' networks: - databases tmpfs: /pgtmpfs12 @@ -16,13 +16,13 @@ services: mysql57: image: mysql:5.7 command: mysqld - restart: always + restart: unless-stopped platform: linux/x86_64 environment: MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3306:3306" + - '3306:3306' networks: - databases tmpfs: /var/lib/mysql5.7 @@ -30,48 +30,48 @@ services: mysql8: image: mysql:8.0.22 command: mysqld - restart: always + restart: unless-stopped platform: linux/x86_64 environment: MYSQL_USER: root MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3307:3306" + - '3307:3306' networks: - databases tmpfs: /var/lib/mysql8 mariadb: image: mariadb:10 - restart: always + restart: unless-stopped environment: MYSQL_USER: root MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3308:3306" + - '3308:3306' networks: - databases tmpfs: /var/lib/mariadb mssql: image: mcr.microsoft.com/mssql/server:2022-latest - restart: always + restart: unless-stopped environment: - ACCEPT_EULA: "Y" - SA_PASSWORD: "" + ACCEPT_EULA: 'Y' + SA_PASSWORD: '' ports: - - "1433:1433" + - '1433:1433' networks: - databases cockroach_22_2: image: prismagraphql/cockroachdb-custom:22.2 - restart: always + restart: unless-stopped command: start-single-node --insecure ports: - - "26259:26257" + - '26259:26257' networks: - databases diff --git a/quaint/src/ast/values.rs b/quaint/src/ast/values.rs index 081405374340..a1bf4f41a26d 100644 --- a/quaint/src/ast/values.rs +++ b/quaint/src/ast/values.rs @@ -33,13 +33,43 @@ where } } +/// A native-column type, i.e. the connector-specific type of the column. +#[derive(Debug, Clone, PartialEq)] +pub struct NativeColumnType<'a>(Cow<'a, str>); + +impl<'a> std::ops::Deref for NativeColumnType<'a> { + type Target = str; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl<'a> From<&'a str> for NativeColumnType<'a> { + fn from(s: &'a str) -> Self { + Self(Cow::Owned(s.to_uppercase())) + } +} + #[derive(Debug, Clone, PartialEq)] pub struct Value<'a> { pub typed: ValueType<'a>, - pub native_column_type: Option>, + pub native_column_type: Option>, } impl<'a> Value<'a> { + /// Returns the native column type of the value, if any, in the form + /// of an UPCASE string. ex: "VARCHAR, BYTEA, DATE, TIMEZ" + pub fn native_column_type_name(&'a self) -> Option<&'a str> { + self.native_column_type.as_deref() + } + + /// Changes the value to include information about the native column type + pub fn with_native_column_type>>(mut self, column_type: Option) -> Self { + self.native_column_type = column_type.map(|ct| ct.into()); + self + } + /// Creates a new 32-bit signed integer. pub fn int32(value: I) -> Self where diff --git a/quaint/src/connector/mysql.rs b/quaint/src/connector/mysql.rs index e5a1b794ab5b..4b6f27a583da 100644 --- a/quaint/src/connector/mysql.rs +++ b/quaint/src/connector/mysql.rs @@ -24,6 +24,8 @@ use std::{ use tokio::sync::Mutex; use url::{Host, Url}; +pub use error::MysqlError; + /// The underlying MySQL driver. Only available with the `expose-drivers` /// Cargo feature. #[cfg(feature = "expose-drivers")] diff --git a/quaint/src/connector/mysql/error.rs b/quaint/src/connector/mysql/error.rs index 8b381e1581bb..dd7c3d3bfa66 100644 --- a/quaint/src/connector/mysql/error.rs +++ b/quaint/src/connector/mysql/error.rs @@ -1,22 +1,29 @@ use crate::error::{DatabaseConstraint, Error, ErrorKind}; use mysql_async as my; -impl From for Error { - fn from(e: my::Error) -> Error { - use my::ServerError; +pub struct MysqlError { + pub code: u16, + pub message: String, + pub state: String, +} - match e { - my::Error::Io(my::IoError::Tls(err)) => Error::builder(ErrorKind::TlsError { - message: err.to_string(), - }) - .build(), - my::Error::Io(my::IoError::Io(err)) if err.kind() == std::io::ErrorKind::UnexpectedEof => { - Error::builder(ErrorKind::ConnectionClosed).build() - } - my::Error::Io(io_error) => Error::builder(ErrorKind::ConnectionError(io_error.into())).build(), - my::Error::Driver(e) => Error::builder(ErrorKind::QueryError(e.into())).build(), - my::Error::Server(ServerError { ref message, code, .. }) if code == 1062 => { - let constraint = message +impl From<&my::ServerError> for MysqlError { + fn from(value: &my::ServerError) -> Self { + MysqlError { + code: value.code, + message: value.message.to_owned(), + state: value.state.to_owned(), + } + } +} + +impl From for Error { + fn from(error: MysqlError) -> Self { + let code = error.code; + match code { + 1062 => { + let constraint = error + .message .split_whitespace() .last() .and_then(|s| s.split('\'').nth(1)) @@ -29,12 +36,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1451 || code == 1452 => { - let constraint = message + 1451 | 1452 => { + let constraint = error + .message .split_whitespace() .nth(17) .and_then(|s| s.split('`').nth(1)) @@ -45,12 +53,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1263 => { - let constraint = message + 1263 => { + let constraint = error + .message .split_whitespace() .last() .and_then(|s| s.split('\'').nth(1)) @@ -62,22 +71,23 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1264 => { + 1264 => { let mut builder = Error::builder(ErrorKind::ValueOutOfRange { - message: message.clone(), + message: error.message.clone(), }); builder.set_original_code(code.to_string()); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1364 || code == 1048 => { - let constraint = message + 1364 | 1048 => { + let constraint = error + .message .split_whitespace() .nth(1) .and_then(|s| s.split('\'').nth(1)) @@ -88,12 +98,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1049 => { - let db_name = message + 1049 => { + let db_name = error + .message .split_whitespace() .last() .and_then(|s| s.split('\'').nth(1)) @@ -103,12 +114,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1007 => { - let db_name = message + 1007 => { + let db_name = error + .message .split_whitespace() .nth(3) .and_then(|s| s.split('\'').nth(1)) @@ -118,12 +130,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1044 => { - let db_name = message + 1044 => { + let db_name = error + .message .split_whitespace() .last() .and_then(|s| s.split('\'').nth(1)) @@ -133,12 +146,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1045 => { - let user = message + 1045 => { + let user = error + .message .split_whitespace() .nth(4) .and_then(|s| s.split('@').next()) @@ -149,12 +163,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1146 => { - let table = message + 1146 => { + let table = error + .message .split_whitespace() .nth(1) .and_then(|s| s.split('\'').nth(1)) @@ -165,12 +180,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1054 => { - let column = message + 1054 => { + let column = error + .message .split_whitespace() .nth(2) .and_then(|s| s.split('\'').nth(1)) @@ -179,68 +195,77 @@ impl From for Error { let mut builder = Error::builder(ErrorKind::ColumnNotFound { column }); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { - ref message, - code, - state: _, - }) if code == 1406 => { - let column = message.split_whitespace().flat_map(|s| s.split('\'')).nth(6).into(); + 1406 => { + let column = error + .message + .split_whitespace() + .flat_map(|s| s.split('\'')) + .nth(6) + .into(); let kind = ErrorKind::LengthMismatch { column }; let mut builder = Error::builder(kind); builder.set_original_code(code.to_string()); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { - ref message, - code, - state: _, - }) if code == 1191 => { + 1191 => { let kind = ErrorKind::MissingFullTextSearchIndex; let mut builder = Error::builder(kind); builder.set_original_code(code.to_string()); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { - ref message, - code, - state: _, - }) if code == 1213 => { + 1213 => { let mut builder = Error::builder(ErrorKind::TransactionWriteConflict); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { - ref message, - code, - ref state, - }) => { + _ => { let kind = ErrorKind::QueryError( - my::Error::Server(ServerError { - message: message.clone(), + my::Error::Server(my::ServerError { + message: error.message.clone(), code, - state: state.clone(), + state: error.state.clone(), }) .into(), ); let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } + } + } +} + +impl From for Error { + fn from(e: my::Error) -> Error { + match e { + my::Error::Io(my::IoError::Tls(err)) => Error::builder(ErrorKind::TlsError { + message: err.to_string(), + }) + .build(), + my::Error::Io(my::IoError::Io(err)) if err.kind() == std::io::ErrorKind::UnexpectedEof => { + Error::builder(ErrorKind::ConnectionClosed).build() + } + my::Error::Io(io_error) => Error::builder(ErrorKind::ConnectionError(io_error.into())).build(), + my::Error::Driver(e) => Error::builder(ErrorKind::QueryError(e.into())).build(), + my::Error::Server(ref server_error) => { + let mysql_error: MysqlError = server_error.into(); + mysql_error.into() + } e => Error::builder(ErrorKind::QueryError(e.into())).build(), } } diff --git a/quaint/src/connector/postgres.rs b/quaint/src/connector/postgres.rs index 2c81144c812b..766be38b27e4 100644 --- a/quaint/src/connector/postgres.rs +++ b/quaint/src/connector/postgres.rs @@ -1,5 +1,5 @@ mod conversion; -pub mod error; +mod error; use crate::{ ast::{Query, Value}, @@ -27,6 +27,8 @@ use tokio_postgres::{ }; use url::{Host, Url}; +pub use error::PostgresError; + pub(crate) const DEFAULT_SCHEMA: &str = "public"; /// The underlying postgres driver. Only available with the `expose-drivers` diff --git a/quaint/src/connector/sqlite.rs b/quaint/src/connector/sqlite.rs index 6db49523c80a..3a1ef72b4883 100644 --- a/quaint/src/connector/sqlite.rs +++ b/quaint/src/connector/sqlite.rs @@ -1,6 +1,8 @@ mod conversion; mod error; +pub use error::SqliteError; + pub use rusqlite::{params_from_iter, version as sqlite_version}; use super::IsolationLevel; diff --git a/quaint/src/connector/sqlite/error.rs b/quaint/src/connector/sqlite/error.rs index fa8b83f3f28a..c10b335cb3c0 100644 --- a/quaint/src/connector/sqlite/error.rs +++ b/quaint/src/connector/sqlite/error.rs @@ -1,69 +1,45 @@ +use std::fmt; + use crate::error::*; use rusqlite::ffi; use rusqlite::types::FromSqlError; -impl From for Error { - fn from(e: rusqlite::Error) -> Error { - match e { - rusqlite::Error::ToSqlConversionFailure(error) => match error.downcast::() { - Ok(error) => *error, - Err(error) => { - let mut builder = Error::builder(ErrorKind::QueryError(error)); - - builder.set_original_message("Could not interpret parameters in an SQLite query."); - - builder.build() - } - }, - rusqlite::Error::InvalidQuery => { - let mut builder = Error::builder(ErrorKind::QueryError(e.into())); - - builder.set_original_message( - "Could not interpret the query or its parameters. Check the syntax and parameter types.", - ); - - builder.build() - } - rusqlite::Error::ExecuteReturnedResults => { - let mut builder = Error::builder(ErrorKind::QueryError(e.into())); - builder.set_original_message("Execute returned results, which is not allowed in SQLite."); - - builder.build() - } - - rusqlite::Error::QueryReturnedNoRows => Error::builder(ErrorKind::NotFound).build(), +#[derive(Debug)] +pub struct SqliteError { + pub extended_code: i32, + pub message: Option, +} - rusqlite::Error::SqliteFailure( - ffi::Error { - code: ffi::ErrorCode::ConstraintViolation, - extended_code: 2067, - }, - Some(description), - ) => { - let constraint = description - .split(": ") - .nth(1) - .map(|s| s.split(", ")) - .map(|i| i.flat_map(|s| s.split('.').last())) - .map(DatabaseConstraint::fields) - .unwrap_or(DatabaseConstraint::CannotParse); +impl fmt::Display for SqliteError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "Error code {}: {}", + self.extended_code, + ffi::code_to_str(self.extended_code) + ) + } +} - let kind = ErrorKind::UniqueConstraintViolation { constraint }; - let mut builder = Error::builder(kind); +impl std::error::Error for SqliteError {} - builder.set_original_code("2067"); - builder.set_original_message(description); +impl SqliteError { + pub fn new(extended_code: i32, message: Option) -> Self { + Self { extended_code, message } + } - builder.build() - } + pub fn primary_code(&self) -> i32 { + self.extended_code & 0xFF + } +} - rusqlite::Error::SqliteFailure( - ffi::Error { - code: ffi::ErrorCode::ConstraintViolation, - extended_code: 1555, - }, - Some(description), - ) => { +impl From for Error { + fn from(error: SqliteError) -> Self { + match error { + SqliteError { + extended_code: ffi::SQLITE_CONSTRAINT_UNIQUE | ffi::SQLITE_CONSTRAINT_PRIMARYKEY, + message: Some(description), + } => { let constraint = description .split(": ") .nth(1) @@ -75,19 +51,16 @@ impl From for Error { let kind = ErrorKind::UniqueConstraintViolation { constraint }; let mut builder = Error::builder(kind); - builder.set_original_code("1555"); + builder.set_original_code(error.extended_code.to_string()); builder.set_original_message(description); builder.build() } - rusqlite::Error::SqliteFailure( - ffi::Error { - code: ffi::ErrorCode::ConstraintViolation, - extended_code: 1299, - }, - Some(description), - ) => { + SqliteError { + extended_code: ffi::SQLITE_CONSTRAINT_NOTNULL, + message: Some(description), + } => { let constraint = description .split(": ") .nth(1) @@ -99,64 +72,41 @@ impl From for Error { let kind = ErrorKind::NullConstraintViolation { constraint }; let mut builder = Error::builder(kind); - builder.set_original_code("1299"); - builder.set_original_message(description); - - builder.build() - } - - rusqlite::Error::SqliteFailure( - ffi::Error { - code: ffi::ErrorCode::ConstraintViolation, - extended_code: 787, - }, - Some(description), - ) => { - let mut builder = Error::builder(ErrorKind::ForeignKeyConstraintViolation { - constraint: DatabaseConstraint::ForeignKey, - }); - - builder.set_original_code("787"); + builder.set_original_code(error.extended_code.to_string()); builder.set_original_message(description); builder.build() } - rusqlite::Error::SqliteFailure( - ffi::Error { - code: ffi::ErrorCode::ConstraintViolation, - extended_code: 1811, - }, - Some(description), - ) => { + SqliteError { + extended_code: ffi::SQLITE_CONSTRAINT_FOREIGNKEY | ffi::SQLITE_CONSTRAINT_TRIGGER, + message: Some(description), + } => { let mut builder = Error::builder(ErrorKind::ForeignKeyConstraintViolation { constraint: DatabaseConstraint::ForeignKey, }); - builder.set_original_code("1811"); + builder.set_original_code(error.extended_code.to_string()); builder.set_original_message(description); builder.build() } - rusqlite::Error::SqliteFailure( - ffi::Error { - code: ffi::ErrorCode::DatabaseBusy, - extended_code, - }, - description, - ) => { + SqliteError { extended_code, message } if error.primary_code() == ffi::SQLITE_BUSY => { let mut builder = Error::builder(ErrorKind::SocketTimeout); builder.set_original_code(format!("{extended_code}")); - if let Some(description) = description { + if let Some(description) = message { builder.set_original_message(description); } builder.build() } - rusqlite::Error::SqliteFailure(ffi::Error { extended_code, .. }, ref description) => match description { + SqliteError { + extended_code, + ref message, + } => match message { Some(d) if d.starts_with("no such table") => { let table = d.split(": ").last().into(); let kind = ErrorKind::TableDoesNotExist { table }; @@ -188,8 +138,8 @@ impl From for Error { builder.build() } _ => { - let description = description.as_ref().map(|d| d.to_string()); - let mut builder = Error::builder(ErrorKind::QueryError(e.into())); + let description = message.as_ref().map(|d| d.to_string()); + let mut builder = Error::builder(ErrorKind::QueryError(error.into())); builder.set_original_code(format!("{extended_code}")); if let Some(description) = description { @@ -199,31 +149,50 @@ impl From for Error { builder.build() } }, + } + } +} - rusqlite::Error::SqlInputError { - error: ffi::Error { extended_code, .. }, - ref msg, - .. - } => match msg { - d if d.starts_with("no such column: ") => { - let column = d.split("no such column: ").last().into(); - let kind = ErrorKind::ColumnNotFound { column }; - - let mut builder = Error::builder(kind); - builder.set_original_code(extended_code.to_string()); - builder.set_original_message(d); +impl From for Error { + fn from(e: rusqlite::Error) -> Error { + match e { + rusqlite::Error::ToSqlConversionFailure(error) => match error.downcast::() { + Ok(error) => *error, + Err(error) => { + let mut builder = Error::builder(ErrorKind::QueryError(error)); - builder.build() - } - _ => { - let description = msg.clone(); - let mut builder = Error::builder(ErrorKind::QueryError(e.into())); - builder.set_original_code(extended_code.to_string()); - builder.set_original_message(description); + builder.set_original_message("Could not interpret parameters in an SQLite query."); builder.build() } }, + rusqlite::Error::InvalidQuery => { + let mut builder = Error::builder(ErrorKind::QueryError(e.into())); + + builder.set_original_message( + "Could not interpret the query or its parameters. Check the syntax and parameter types.", + ); + + builder.build() + } + rusqlite::Error::ExecuteReturnedResults => { + let mut builder = Error::builder(ErrorKind::QueryError(e.into())); + builder.set_original_message("Execute returned results, which is not allowed in SQLite."); + + builder.build() + } + + rusqlite::Error::QueryReturnedNoRows => Error::builder(ErrorKind::NotFound).build(), + + rusqlite::Error::SqliteFailure(ffi::Error { code: _, extended_code }, message) => { + SqliteError::new(extended_code, message).into() + } + + rusqlite::Error::SqlInputError { + error: ffi::Error { extended_code, .. }, + msg, + .. + } => SqliteError::new(extended_code, Some(msg)).into(), e => Error::builder(ErrorKind::QueryError(e.into())).build(), } diff --git a/quaint/src/error.rs b/quaint/src/error.rs index 22037d443c35..705bb6b37ee0 100644 --- a/quaint/src/error.rs +++ b/quaint/src/error.rs @@ -6,7 +6,9 @@ use thiserror::Error; #[cfg(feature = "pooled")] use std::time::Duration; -pub use crate::connector::postgres::error::PostgresError; +pub use crate::connector::mysql::MysqlError; +pub use crate::connector::postgres::PostgresError; +pub use crate::connector::sqlite::SqliteError; #[derive(Debug, PartialEq, Eq)] pub enum DatabaseConstraint { diff --git a/query-engine/black-box-tests/Cargo.toml b/query-engine/black-box-tests/Cargo.toml index 056ee2bcdb43..cc9e99b8ca3c 100644 --- a/query-engine/black-box-tests/Cargo.toml +++ b/query-engine/black-box-tests/Cargo.toml @@ -15,3 +15,4 @@ user-facing-errors.workspace = true insta = "1.7.1" enumflags2 = "0.7" query-engine-metrics = {path = "../metrics"} +regex = "1.9.3" diff --git a/query-engine/black-box-tests/tests/metrics/smoke_tests.rs b/query-engine/black-box-tests/tests/metrics/smoke_tests.rs index 8542f753b78e..5ff7ec8ad9ba 100644 --- a/query-engine/black-box-tests/tests/metrics/smoke_tests.rs +++ b/query-engine/black-box-tests/tests/metrics/smoke_tests.rs @@ -4,6 +4,7 @@ use query_engine_tests::*; /// Asserts common basics for composite type writes. #[test_suite(schema(schema))] mod smoke_tests { + use regex::Regex; fn schema() -> String { let schema = indoc! { r#"model Person { @@ -14,6 +15,24 @@ mod smoke_tests { schema.to_owned() } + fn assert_value_in_range(metrics: &str, metric: &str, low: f64, high: f64) { + let regex = Regex::new(format!(r"{metric}\s+([+-]?\d+(\.\d+)?)").as_str()).unwrap(); + match regex.captures(&metrics) { + Some(capture) => { + let value = capture.get(1).unwrap().as_str().parse::().unwrap(); + assert!( + value >= low && value <= high, + "expected {} value of {} to be between {} and {}", + metric, + value, + low, + high + ); + } + None => panic!("Metric {} not found in metrics text", metric), + } + } + #[connector_test] #[rustfmt::skip] async fn expected_metrics_rendered(r: Runner) -> TestResult<()> { @@ -62,6 +81,8 @@ mod smoke_tests { // counters assert_eq!(metrics.matches("# HELP prisma_client_queries_total The total number of Prisma Client queries executed").count(), 1); assert_eq!(metrics.matches("# TYPE prisma_client_queries_total counter").count(), 1); + assert_eq!(metrics.matches("prisma_client_queries_total 1").count(), 1); + assert_eq!(metrics.matches("# HELP prisma_datasource_queries_total The total number of datasource queries executed").count(), 1); assert_eq!(metrics.matches("# TYPE prisma_datasource_queries_total counter").count(), 1); @@ -76,18 +97,20 @@ mod smoke_tests { assert_eq!(metrics.matches("# HELP prisma_client_queries_active The number of currently active Prisma Client queries").count(), 1); assert_eq!(metrics.matches("# TYPE prisma_client_queries_active gauge").count(), 1); - assert_eq!(metrics.matches("# HELP prisma_client_queries_wait The number of datasource queries currently waiting for an free connection").count(), 1); + assert_eq!(metrics.matches("# HELP prisma_client_queries_wait The number of datasource queries currently waiting for a free connection").count(), 1); assert_eq!(metrics.matches("# TYPE prisma_client_queries_wait gauge").count(), 1); assert_eq!(metrics.matches("# HELP prisma_pool_connections_busy The number of pool connections currently executing datasource queries").count(), 1); assert_eq!(metrics.matches("# TYPE prisma_pool_connections_busy gauge").count(), 1); + assert_value_in_range(&metrics, "prisma_pool_connections_busy", 0f64, 1f64); assert_eq!(metrics.matches("# HELP prisma_pool_connections_idle The number of pool connections that are not busy running a query").count(), 1); assert_eq!(metrics.matches("# TYPE prisma_pool_connections_idle gauge").count(), 1); assert_eq!(metrics.matches("# HELP prisma_pool_connections_open The number of pool connections currently open").count(), 1); assert_eq!(metrics.matches("# TYPE prisma_pool_connections_open gauge").count(), 1); - + assert_value_in_range(&metrics, "prisma_pool_connections_open", 0f64, 1f64); + // histograms assert_eq!(metrics.matches("# HELP prisma_client_queries_duration_histogram_ms The distribution of the time Prisma Client queries took to run end to end").count(), 1); assert_eq!(metrics.matches("# TYPE prisma_client_queries_duration_histogram_ms histogram").count(), 1); diff --git a/query-engine/connector-test-kit-rs/README.md b/query-engine/connector-test-kit-rs/README.md index 2c849a2aa985..97d19467879a 100644 --- a/query-engine/connector-test-kit-rs/README.md +++ b/query-engine/connector-test-kit-rs/README.md @@ -64,34 +64,45 @@ On the note of docker containers: Most connectors require an endpoint to run aga If you choose to set up the databases yourself, please note that the connection strings used in the tests (found in the files in `/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/`) to set up user, password and database for the test user. +### Running + +Note that by default tests run concurrently. + +- VSCode should automatically detect tests and display `run test`. +- Use `make test-qe` (minimal log output) or `make test-qe-verbose` (all log output) in `$WORKSPACE_ROOT`. +- `cargo test` in the `query-engine-tests` crate. +- A single test can be tested with the normal cargo rust facilities from command line, e.g. `cargo test --package query-engine-tests --test query_engine_tests --all-features -- queries::filters::where_unique::where_unique::no_unique_fields --exact --nocapture` where `queries::filters::where_unique::where_unique::no_unique_fields` can be substituted for the path you want to test. +- If you want to test a single relation test, define the `RELATION_TEST_IDX` env var with its index. + #### Running tests through driver adapters -The query engine is able to delegate query execution to javascript through [driver adapters](query-engine/driver-adapters/js/README.md). -This means that instead of drivers being implemented in Rust, it's a layer of adapters over NodeJs drivers the code that actually communicates with the databases. +The query engine is able to delegate query execution to javascript through driver adapters. +This means that instead of drivers being implemented in Rust, it's a layer of adapters over NodeJs +drivers the code that actually communicates with the databases. See [`adapter-*` packages in prisma/prisma](https://github.com/prisma/prisma/tree/main/packages) To run tests through a driver adapters, you should also configure the following environment variables: -* `EXTERNAL_TEST_EXECUTOR`: tells the query engine test kit to use an external process to run the queries, this is a node process running a program that will read the queries to run from STDIN, and return responses to STDOUT. The connector kit follows a protocol over JSON RPC for this communication. +* `EXTERNAL_TEST_EXECUTOR`: tells the query engine test kit to use an external process to run the queries, this is a node process running a program that will read the queries to run from STDIN, and return responses to STDOUT. The connector kit follows a protocol over JSON RPC for this communication. * `DRIVER_ADAPTER`: tells the test executor to use a particular driver adapter. Set to `neon`, `planetscale` or any other supported adapter. * `DRIVER_ADAPTER_CONFIG`: a json string with the configuration for the driver adapter. This is adapter specific. See the [github workflow for driver adapter tests](.github/workflows/query-engine-driver-adapters.yml) for examples on how to configure the driver adapters. Example: ```shell -export EXTERNAL_TEST_EXECUTOR="$WORKSPACE_ROOT/query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh" +export EXTERNAL_TEST_EXECUTOR="$WORKSPACE_ROOT/query-engine/driver-adapters/connector-test-kit-executor/script/start_node.sh" export DRIVER_ADAPTER=neon export DRIVER_ADAPTER_CONFIG ='{ "proxyUrl": "127.0.0.1:5488/v1" }' ```` -### Running +We have provided helpers to run the query-engine tests with driver adapters, these helpers set all the required environment +variables for you: -Note that by default tests run concurrently. +```shell +DRIVER_ADAPTER=$adapter make test-qe +``` + +Where `$adapter` is one of the supported adapters: `neon`, `planetscale`, `libsql`. -- VSCode should automatically detect tests and display `run test`. -- Use `make test-qe` (minimal log output) or `make test-qe-verbose` (all log output) in `$WORKSPACE_ROOT`. -- `cargo test` in the `query-engine-tests` crate. -- A single test can be tested with the normal cargo rust facilities from command line, e.g. `cargo test --package query-engine-tests --test query_engine_tests --all-features -- queries::filters::where_unique::where_unique::no_unique_fields --exact --nocapture` where `queries::filters::where_unique::where_unique::no_unique_fields` can be substituted for the path you want to test. -- If you want to test a single relation test, define the `RELATION_TEST_IDX` env var with its index. ## Authoring tests The following is an example on how to write a new test suite, as extending or changing an existing one follows the same rules and considerations. diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/assertion_violation_error.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/assertion_violation_error.rs index 62c4e3005f71..a3e45b0a05b5 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/assertion_violation_error.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/assertion_violation_error.rs @@ -1,8 +1,8 @@ use query_engine_tests::*; -#[test_suite(schema(generic), only(Postgres))] +#[test_suite(schema(generic))] mod raw_params { - #[connector_test] + #[connector_test(only(Postgres), exclude(JS))] async fn value_too_many_bind_variables(runner: Runner) -> TestResult<()> { let n = 32768; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs index 9aa34a943560..e45cef8ac306 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs @@ -213,7 +213,7 @@ mod interactive_tx { Ok(()) } - #[connector_test(exclude(JS))] + #[connector_test] async fn batch_queries_failure(mut runner: Runner) -> TestResult<()> { // Tx expires after five second. let tx_id = runner.start_tx(5000, 5000, None).await?; @@ -256,7 +256,7 @@ mod interactive_tx { Ok(()) } - #[connector_test(exclude(JS))] + #[connector_test] async fn tx_expiration_failure_cycle(mut runner: Runner) -> TestResult<()> { // Tx expires after one seconds. let tx_id = runner.start_tx(5000, 1000, None).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_default.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_default.rs index 8ea08acc85da..393581b8ad91 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_default.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_default.rs @@ -66,7 +66,7 @@ mod one2one_req { } /// Deleting the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(required_with_default), exclude(MongoDb, MySQL, JS))] + #[connector_test(schema(required_with_default), exclude(MongoDb, MySQL))] async fn delete_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, child: { create: { id: 1 }}}) { id }}"#), @@ -167,7 +167,7 @@ mod one2one_opt { } /// Deleting the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL, JS))] + #[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL))] async fn delete_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, child: { create: { id: 1 }}}) { id }}"#), @@ -270,7 +270,7 @@ mod one2many_req { } /// Deleting the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(required_with_default), exclude(MongoDb, MySQL, JS))] + #[connector_test(schema(required_with_default), exclude(MongoDb, MySQL))] async fn delete_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, children: { create: { id: 1 }}}) { id }}"#), @@ -371,7 +371,7 @@ mod one2many_opt { } /// Deleting the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL, JS))] + #[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL))] async fn delete_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, children: { create: { id: 1 }}}) { id }}"#), diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_default.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_default.rs index b0e566ffcb55..974c165ed942 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_default.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_default.rs @@ -68,7 +68,7 @@ mod one2one_req { } /// Updating the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(required_with_default), exclude(MongoDb, MySQL, JS))] + #[connector_test(schema(required_with_default), exclude(MongoDb, MySQL))] async fn update_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", child: { create: { id: 1 }}}) { id }}"#), @@ -171,7 +171,7 @@ mod one2one_opt { } /// Updating the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL, JS))] + #[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL))] async fn update_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", child: { create: { id: 1 }}}) { id }}"#), @@ -276,7 +276,7 @@ mod one2many_req { } /// Updating the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(required_with_default), exclude(MongoDb, MySQL, JS))] + #[connector_test(schema(required_with_default), exclude(MongoDb, MySQL))] async fn update_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#), @@ -379,7 +379,7 @@ mod one2many_opt { } /// Updating the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL, JS))] + #[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL))] async fn update_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#), diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/max_integer.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/max_integer.rs index 581bc21bebe8..7b25cfff279e 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/max_integer.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/max_integer.rs @@ -187,8 +187,8 @@ mod max_integer { schema.to_owned() } - #[connector_test(schema(overflow_pg), only(Postgres))] - async fn unfitted_int_should_fail_pg(runner: Runner) -> TestResult<()> { + #[connector_test(schema(overflow_pg), only(Postgres), exclude(JS))] + async fn unfitted_int_should_fail_pg_quaint(runner: Runner) -> TestResult<()> { // int assert_error!( runner, @@ -234,6 +234,55 @@ mod max_integer { Ok(()) } + // The driver adapter for neon provides different error messages on overflow + #[connector_test(schema(overflow_pg), only(JS, Postgres))] + async fn unfitted_int_should_fail_pg_js(runner: Runner) -> TestResult<()> { + // int + assert_error!( + runner, + format!("mutation {{ createOneTest(data: {{ int: {I32_OVERFLOW_MAX} }}) {{ id }} }}"), + None, + "value \\\"2147483648\\\" is out of range for type integer" + ); + assert_error!( + runner, + format!("mutation {{ createOneTest(data: {{ int: {I32_OVERFLOW_MIN} }}) {{ id }} }}"), + None, + "value \\\"-2147483649\\\" is out of range for type integer" + ); + + // smallint + assert_error!( + runner, + format!("mutation {{ createOneTest(data: {{ smallint: {I16_OVERFLOW_MAX} }}) {{ id }} }}"), + None, + "value \\\"32768\\\" is out of range for type smallint" + ); + assert_error!( + runner, + format!("mutation {{ createOneTest(data: {{ smallint: {I16_OVERFLOW_MIN} }}) {{ id }} }}"), + None, + "value \\\"-32769\\\" is out of range for type smallint" + ); + + //oid + assert_error!( + runner, + format!("mutation {{ createOneTest(data: {{ oid: {U32_OVERFLOW_MAX} }}) {{ id }} }}"), + None, + "value \\\"4294967296\\\" is out of range for type oid" + ); + + // The underlying driver swallows a negative id by interpreting it as unsigned. + // {"data":{"createOneTest":{"id":1,"oid":4294967295}}} + run_query!( + runner, + format!("mutation {{ createOneTest(data: {{ oid: {OVERFLOW_MIN} }}) {{ id, oid }} }}") + ); + + Ok(()) + } + #[connector_test(schema(overflow_pg), only(Postgres))] async fn fitted_int_should_work_pg(runner: Runner) -> TestResult<()> { // int diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_15204.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_15204.rs index c1df015c577b..ccf04dd2f4af 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_15204.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_15204.rs @@ -24,8 +24,8 @@ mod conversion_error { schema.to_owned() } - #[connector_test(schema(schema_int))] - async fn convert_to_int(runner: Runner) -> TestResult<()> { + #[connector_test(schema(schema_int), only(Sqlite), exclude(JS))] + async fn convert_to_int_sqlite_quaint(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; assert_error!( @@ -38,8 +38,22 @@ mod conversion_error { Ok(()) } - #[connector_test(schema(schema_bigint))] - async fn convert_to_bigint(runner: Runner) -> TestResult<()> { + #[connector_test(schema(schema_int), only(Sqlite, JS))] + async fn convert_to_int_sqlite_js(runner: Runner) -> TestResult<()> { + create_test_data(&runner).await?; + + assert_error!( + runner, + r#"query { findManyTestModel { field } }"#, + 2023, + "Inconsistent column data: Conversion failed: number must be an integer in column 'field'" + ); + + Ok(()) + } + + #[connector_test(schema(schema_bigint), only(Sqlite), exclude(JS))] + async fn convert_to_bigint_sqlite_quaint(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; assert_error!( @@ -52,6 +66,20 @@ mod conversion_error { Ok(()) } + #[connector_test(schema(schema_bigint), only(Sqlite, JS))] + async fn convert_to_bigint_sqlite_js(runner: Runner) -> TestResult<()> { + create_test_data(&runner).await?; + + assert_error!( + runner, + r#"query { findManyTestModel { field } }"#, + 2023, + "Inconsistent column data: Conversion failed: number must be an i64 in column 'field'" + ); + + Ok(()) + } + async fn create_test_data(runner: &Runner) -> TestResult<()> { run_query!( runner, diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/json.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/json.rs index 2fe8af850120..2b4b880b4975 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/json.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/json.rs @@ -207,7 +207,9 @@ mod json { Ok(()) } - #[connector_test(schema(json_opt))] + // The external runner for driver adapters, in spite of the protocol being used in the test matrix + // uses the JSON representation of queries, so this test should not apply to driver adapters (exclude(JS)) + #[connector_test(schema(json_opt), exclude(JS, MySQL(5.6)))] async fn nested_not_shorthand(runner: Runner) -> TestResult<()> { // Those tests pass with the JSON protocol because the entire object is parsed as JSON. // They remain useful to ensure we don't ever allow a full JSON filter input object type at the schema level. diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/casts.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/casts.rs index 0039b924108c..635726c71380 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/casts.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/casts.rs @@ -5,7 +5,20 @@ use query_engine_tests::*; mod casts { use query_engine_tests::{fmt_query_raw, run_query, RawParam}; - #[connector_test] + // The following tests are excluded for driver adapters. The underlying + // driver rejects queries where the values of the positional arguments do + // not match the expected types. As an example, the following query to the + // driver + // + // ```json + // { + // sql: 'SELECT $1::int4 AS decimal_to_i4; ', + // args: [ 42.51 ] + // } + // + // Bails with: ERROR: invalid input syntax for type integer: "42.51" + // + #[connector_test(only(Postgres), exclude(JS))] async fn query_numeric_casts(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query_pretty!(&runner, fmt_query_raw(r#" diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/errors.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/errors.rs index 88409d8d17f6..43417cb352e9 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/errors.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/errors.rs @@ -34,8 +34,8 @@ mod raw_errors { Ok(()) } - #[connector_test(schema(common_nullable_types))] - async fn list_param_for_scalar_column_should_not_panic(runner: Runner) -> TestResult<()> { + #[connector_test(schema(common_nullable_types), only(Postgres), exclude(JS))] + async fn list_param_for_scalar_column_should_not_panic_quaint(runner: Runner) -> TestResult<()> { assert_error!( runner, fmt_execute_raw( @@ -48,4 +48,19 @@ mod raw_errors { Ok(()) } + + #[connector_test(schema(common_nullable_types), only(JS, Postgres))] + async fn list_param_for_scalar_column_should_not_panic_pg_js(runner: Runner) -> TestResult<()> { + assert_error!( + runner, + fmt_execute_raw( + r#"INSERT INTO "TestModel" ("id") VALUES ($1);"#, + vec![RawParam::array(vec![1])], + ), + 2010, + r#"invalid input syntax for type integer"# + ); + + Ok(()) + } } diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/native_types/mysql.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/native_types/mysql.rs index c29b12d5b73f..4d3c3137f4a2 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/native_types/mysql.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/native_types/mysql.rs @@ -198,6 +198,35 @@ mod mysql { Ok(()) } + fn schema_decimal_vitess() -> String { + let schema = indoc! { + r#"model Model { + #id(id, String, @id, @default(cuid())) + decLarge Decimal @test.Decimal(20, 10) + }"# + }; + + schema.to_owned() + } + + #[connector_test(only(Vitess), schema(schema_decimal_vitess))] + async fn native_decimal_vitess_precision(runner: Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(&runner, r#"mutation { + createOneModel( + data: { + decLarge: "131603421.38724228" + } + ) { + decLarge + } + }"#), + @r###"{"data":{"createOneModel":{"decLarge":"131603421.38724228"}}}"### + ); + + Ok(()) + } + fn schema_string() -> String { let schema = indoc! { r#"model Model { diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/Cargo.toml b/query-engine/connector-test-kit-rs/query-tests-setup/Cargo.toml index 088a0d4b2d34..095c9cd02f60 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/Cargo.toml +++ b/query-engine/connector-test-kit-rs/query-tests-setup/Cargo.toml @@ -5,7 +5,7 @@ edition = "2021" [dependencies] serde_json.workspace = true -prisma-models = { path = "../../prisma-models" } +query-structure = { path = "../../query-structure" } once_cell = "1" qe-setup = { path = "../qe-setup" } request-handlers = { path = "../../request-handlers" } diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs index b27f27406e5c..4af4e763298a 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs @@ -159,7 +159,7 @@ impl TestConfig { /// and the workspace_root is set, then use the default external test executor. fn fill_defaults(&mut self) { const DEFAULT_TEST_EXECUTOR: &str = - "query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh"; + "query-engine/driver-adapters/connector-test-kit-executor/script/start_node.sh"; if self .external_test_executor diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs index d92bb5e96314..8c21dd93f903 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs @@ -296,19 +296,33 @@ pub(crate) fn should_run( return false; } - if !only.is_empty() { - return only - .iter() - .any(|only| ConnectorVersion::try_from(*only).unwrap().matches_pattern(&version)); - } - + // We skip tests that exclude JS driver adapters when an external test executor is configured. + // A test that you only want to run with rust drivers can be annotated with exclude(JS) if CONFIG.external_test_executor().is_some() && exclude.iter().any(|excl| excl.0.to_uppercase() == "JS") { println!("Excluded test execution for JS driver adapters. Skipping test"); return false; }; + // we consume the JS token to prevent it from being used in the following checks + let exclude: Vec<_> = exclude.iter().filter(|excl| excl.0.to_uppercase() != "JS").collect(); + + // We only run tests that include JS driver adapters when an external test executor is configured. + // A test that you only want to run with js driver adapters can be annotated with only(JS) + if CONFIG.external_test_executor().is_none() && only.iter().any(|incl| incl.0.to_uppercase() == "JS") { + println!("Excluded test execution for rust driver adapters. Skipping test"); + return false; + } + // we consume the JS token to prevent it from being used in the following checks + let only: Vec<_> = only.iter().filter(|incl| incl.0.to_uppercase() != "JS").collect(); + + if !only.is_empty() { + return only + .iter() + .any(|incl| ConnectorVersion::try_from(**incl).unwrap().matches_pattern(&version)); + } if exclude.iter().any(|excl| { - ConnectorVersion::try_from(*excl).map_or(false, |connector_version| connector_version.matches_pattern(&version)) + ConnectorVersion::try_from(**excl) + .map_or(false, |connector_version| connector_version.matches_pattern(&version)) }) { println!("Connector excluded. Skipping test."); return false; diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/json_adapter/request.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/json_adapter/request.rs index 0eee2d9e6cb6..b9354056b692 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/json_adapter/request.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/json_adapter/request.rs @@ -1,7 +1,6 @@ use crate::{TestError, TestResult}; use indexmap::IndexMap; use itertools::Itertools; -use prisma_models::PrismaValue; use query_core::{ constants::custom_types, schema::{ @@ -10,6 +9,7 @@ use query_core::{ }, ArgumentValue, ArgumentValueObject, Selection, }; +use query_structure::PrismaValue; use request_handlers::{Action, FieldQuery, GraphQLProtocolAdapter, JsonSingleQuery, SelectionSet, SelectionSetValue}; use serde_json::{json, Value as JsonValue}; diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/json_adapter/response.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/json_adapter/response.rs index 29029e3cf81c..a366fb6bdc1b 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/json_adapter/response.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/json_adapter/response.rs @@ -1,8 +1,8 @@ -use prisma_models::PrismaValue; use query_core::{ constants::custom_types, response_ir::{Item, ItemRef, Map}, }; +use query_structure::PrismaValue; use request_handlers::{GQLBatchResponse, GQLResponse, PrismaResponse}; pub struct JsonResponse; diff --git a/query-engine/connector-test-kit-rs/test-configs/planetscale-vitess8 b/query-engine/connector-test-kit-rs/test-configs/planetscale-vitess8 new file mode 100644 index 000000000000..48c89c79427c --- /dev/null +++ b/query-engine/connector-test-kit-rs/test-configs/planetscale-vitess8 @@ -0,0 +1,7 @@ +{ + "connector": "vitess", + "version": "8.0", + "driver_adapter": "planetscale", + "driver_adapter_config": { "proxyUrl": "http://root:root@127.0.0.1:8085" }, + "external_test_executor": "default" +} diff --git a/query-engine/connectors/mongodb-query-connector/Cargo.toml b/query-engine/connectors/mongodb-query-connector/Cargo.toml index d41210342107..b451e17f6e6f 100644 --- a/query-engine/connectors/mongodb-query-connector/Cargo.toml +++ b/query-engine/connectors/mongodb-query-connector/Cargo.toml @@ -22,9 +22,10 @@ tracing-futures = "0.2" uuid.workspace = true indexmap = "1.7" query-engine-metrics = {path = "../../metrics"} +cuid = { git = "https://github.com/prisma/cuid-rust", branch = "wasm32-support" } -[dependencies.prisma-models] -path = "../../prisma-models" +[dependencies.query-structure] +path = "../../query-structure" [dependencies.mongodb-client] path = "../../../libs/mongodb-client" @@ -46,9 +47,6 @@ workspace = true [dependencies.serde] workspace = true -[dependencies.cuid] -version = "1.2" - [dependencies.user-facing-errors] features = ["sql"] workspace = true diff --git a/query-engine/connectors/mongodb-query-connector/src/cursor.rs b/query-engine/connectors/mongodb-query-connector/src/cursor.rs index 9adbf8c1966b..1aaa22ef6b19 100644 --- a/query-engine/connectors/mongodb-query-connector/src/cursor.rs +++ b/query-engine/connectors/mongodb-query-connector/src/cursor.rs @@ -1,6 +1,6 @@ use crate::{orderby::OrderByData, IntoBson}; use mongodb::bson::{doc, Document}; -use prisma_models::{OrderBy, SelectionResult, SortOrder}; +use query_structure::{OrderBy, SelectionResult, SortOrder}; #[derive(Debug, Clone)] pub(crate) struct CursorData { diff --git a/query-engine/connectors/mongodb-query-connector/src/error.rs b/query-engine/connectors/mongodb-query-connector/src/error.rs index d71f39fb54e5..f32ff78e29c9 100644 --- a/query-engine/connectors/mongodb-query-connector/src/error.rs +++ b/query-engine/connectors/mongodb-query-connector/src/error.rs @@ -4,7 +4,7 @@ use mongodb::{ bson::{self, extjson}, error::{CommandError, Error as DriverError, TRANSIENT_TRANSACTION_ERROR}, }; -use prisma_models::{CompositeFieldRef, Field, ScalarFieldRef, SelectedField}; +use query_structure::{CompositeFieldRef, Field, ScalarFieldRef, SelectedField}; use regex::Regex; use thiserror::Error; use user_facing_errors::query_engine::DatabaseConstraint; diff --git a/query-engine/connectors/mongodb-query-connector/src/filter.rs b/query-engine/connectors/mongodb-query-connector/src/filter.rs index 44ca06cf875b..64bdadafd6a9 100644 --- a/query-engine/connectors/mongodb-query-connector/src/filter.rs +++ b/query-engine/connectors/mongodb-query-connector/src/filter.rs @@ -1,11 +1,6 @@ use crate::{constants::group_by, error::MongoError, join::JoinStage, query_builder::AggregationType, IntoBson}; -use connector_interface::{ - AggregationFilter, CompositeCondition, CompositeFilter, ConditionListValue, ConditionValue, Filter, - OneRelationIsNullFilter, QueryMode, RelationFilter, ScalarCompare, ScalarCondition, ScalarFilter, ScalarListFilter, - ScalarProjection, -}; use mongodb::bson::{doc, Bson, Document}; -use prisma_models::{CompositeFieldRef, PrismaValue, ScalarFieldRef, TypeIdentifier}; +use query_structure::*; #[derive(Debug, Clone)] pub(crate) enum MongoFilter { @@ -132,9 +127,9 @@ impl MongoFilterVisitor { fn visit_scalar_filter(&self, filter: ScalarFilter) -> crate::Result { let field = match filter.projection { - connector_interface::ScalarProjection::Single(sf) => sf, - connector_interface::ScalarProjection::Compound(mut c) if c.len() == 1 => c.pop().unwrap(), - connector_interface::ScalarProjection::Compound(_) => { + ScalarProjection::Single(sf) => sf, + ScalarProjection::Compound(mut c) if c.len() == 1 => c.pop().unwrap(), + ScalarProjection::Compound(_) => { unreachable!( "Multi-field compound filter case hit when it should have been folded into normal filters previously." ) @@ -417,7 +412,7 @@ impl MongoFilterVisitor { let field_ref = filter.as_field_ref().cloned(); let filter_doc = match filter.condition { - connector_interface::ScalarListCondition::Contains(val) => { + ScalarListCondition::Contains(val) => { let bson = match val { ConditionValue::Value(value) => (field, value).into_bson()?, ConditionValue::FieldRef(field_ref) => self.prefixed_field_ref(&field_ref)?, @@ -426,11 +421,11 @@ impl MongoFilterVisitor { doc! { "$in": [bson, coerce_as_array(&field_name)] } } - connector_interface::ScalarListCondition::ContainsEvery(vals) if vals.is_empty() => { + ScalarListCondition::ContainsEvery(vals) if vals.is_empty() => { // Empty hasEvery: Return all records. render_stub_condition(true) } - connector_interface::ScalarListCondition::ContainsEvery(ConditionListValue::List(vals)) => { + ScalarListCondition::ContainsEvery(ConditionListValue::List(vals)) => { let ins = vals .into_iter() .map(|val| { @@ -442,20 +437,18 @@ impl MongoFilterVisitor { doc! { "$and": ins } } - connector_interface::ScalarListCondition::ContainsEvery(ConditionListValue::FieldRef(field_ref)) => { - render_every( - &field_name, - "elem", - doc! { "$in": ["$$elem", coerce_as_array((self.prefix(), &field_ref).into_bson()?)] }, - true, - ) - } + ScalarListCondition::ContainsEvery(ConditionListValue::FieldRef(field_ref)) => render_every( + &field_name, + "elem", + doc! { "$in": ["$$elem", coerce_as_array((self.prefix(), &field_ref).into_bson()?)] }, + true, + ), - connector_interface::ScalarListCondition::ContainsSome(vals) if vals.is_empty() => { + ScalarListCondition::ContainsSome(vals) if vals.is_empty() => { // Empty hasSome: Return no records. render_stub_condition(false) } - connector_interface::ScalarListCondition::ContainsSome(ConditionListValue::List(vals)) => { + ScalarListCondition::ContainsSome(ConditionListValue::List(vals)) => { let ins = vals .into_iter() .map(|val| { @@ -467,19 +460,17 @@ impl MongoFilterVisitor { doc! { "$or": ins } } - connector_interface::ScalarListCondition::ContainsSome(ConditionListValue::FieldRef(field_ref)) => { - render_some( - &field_name, - "elem", - doc! { "$in": ["$$elem", coerce_as_array((self.prefix(), &field_ref).into_bson()?)] }, - true, - ) - } + ScalarListCondition::ContainsSome(ConditionListValue::FieldRef(field_ref)) => render_some( + &field_name, + "elem", + doc! { "$in": ["$$elem", coerce_as_array((self.prefix(), &field_ref).into_bson()?)] }, + true, + ), - connector_interface::ScalarListCondition::IsEmpty(true) => { + ScalarListCondition::IsEmpty(true) => { doc! { "$eq": [render_size(&field_name, true), 0] } } - connector_interface::ScalarListCondition::IsEmpty(false) => { + ScalarListCondition::IsEmpty(false) => { doc! { "$gt": [render_size(&field_name, true), 0] } } }; @@ -653,21 +644,21 @@ impl MongoFilterVisitor { let mut join_stage = JoinStage::new(from_field); let filter_doc = match filter.condition { - connector_interface::RelationCondition::EveryRelatedRecord => { + RelationCondition::EveryRelatedRecord => { let (every, nested_joins) = render_every_from_filter(&field_name, nested_filter, false, false)?; join_stage.extend_nested(nested_joins); every } - connector_interface::RelationCondition::AtLeastOneRelatedRecord => { + RelationCondition::AtLeastOneRelatedRecord => { let (some, nested_joins) = render_some_from_filter(&field_name, nested_filter, false, false)?; join_stage.extend_nested(nested_joins); some } - connector_interface::RelationCondition::NoRelatedRecord if is_to_one => { + RelationCondition::NoRelatedRecord if is_to_one => { if is_empty_filter { // Doesn't need coercing the array since joins always return arrays doc! { "$eq": [render_size(&field_name, false), 0] } @@ -688,7 +679,7 @@ impl MongoFilterVisitor { } } } - connector_interface::RelationCondition::NoRelatedRecord => { + RelationCondition::NoRelatedRecord => { if is_empty_filter { // Doesn't need coercing the array since joins always return arrays doc! { "$eq": [render_size(&field_name, false), 0] } @@ -700,7 +691,7 @@ impl MongoFilterVisitor { none } } - connector_interface::RelationCondition::ToOneRelatedRecord => { + RelationCondition::ToOneRelatedRecord => { // To-ones are coerced to single-element arrays via the join. // We render an "every" expression on that array to ensure that the predicate is matched. let (every, nested_joins) = render_every_from_filter(&field_name, nested_filter, false, false)?; diff --git a/query-engine/connectors/mongodb-query-connector/src/interface/connection.rs b/query-engine/connectors/mongodb-query-connector/src/interface/connection.rs index 261694ee5605..76e634908c11 100644 --- a/query-engine/connectors/mongodb-query-connector/src/interface/connection.rs +++ b/query-engine/connectors/mongodb-query-connector/src/interface/connection.rs @@ -10,7 +10,7 @@ use connector_interface::{ WriteArgs, WriteOperations, }; use mongodb::{ClientSession, Database}; -use prisma_models::{prelude::*, SelectionResult}; +use query_structure::{prelude::*, SelectionResult}; use std::collections::HashMap; pub struct MongoDbConnection { @@ -187,7 +187,7 @@ impl ReadOperations for MongoDbConnection { async fn get_single_record( &mut self, model: &Model, - filter: &connector_interface::Filter, + filter: &query_structure::Filter, selected_fields: &FieldSelection, aggr_selections: &[RelAggregationSelection], _trace_id: Option, @@ -209,7 +209,7 @@ impl ReadOperations for MongoDbConnection { async fn get_many_records( &mut self, model: &Model, - query_arguments: connector_interface::QueryArguments, + query_arguments: query_structure::QueryArguments, selected_fields: &FieldSelection, _nested: Vec, aggregation_selections: &[RelAggregationSelection], @@ -244,10 +244,10 @@ impl ReadOperations for MongoDbConnection { async fn aggregate_records( &mut self, model: &Model, - query_arguments: connector_interface::QueryArguments, + query_arguments: query_structure::QueryArguments, selections: Vec, group_by: Vec, - having: Option, + having: Option, _trace_id: Option, ) -> connector_interface::Result> { catch(async move { diff --git a/query-engine/connectors/mongodb-query-connector/src/interface/mod.rs b/query-engine/connectors/mongodb-query-connector/src/interface/mod.rs index 5b5821410c97..620d7628182f 100644 --- a/query-engine/connectors/mongodb-query-connector/src/interface/mod.rs +++ b/query-engine/connectors/mongodb-query-connector/src/interface/mod.rs @@ -12,8 +12,8 @@ use connector_interface::{ }; use futures::Future; use mongodb::Client; -use prisma_models::prelude::*; use psl::Datasource; +use query_structure::prelude::*; use crate::error::MongoError; diff --git a/query-engine/connectors/mongodb-query-connector/src/interface/transaction.rs b/query-engine/connectors/mongodb-query-connector/src/interface/transaction.rs index b5447a815d82..5804ee75c077 100644 --- a/query-engine/connectors/mongodb-query-connector/src/interface/transaction.rs +++ b/query-engine/connectors/mongodb-query-connector/src/interface/transaction.rs @@ -7,8 +7,8 @@ use connector_interface::{ ConnectionLike, ReadOperations, RelAggregationSelection, RelatedQuery, Transaction, UpdateType, WriteOperations, }; use mongodb::options::{Acknowledgment, ReadConcern, TransactionOptions, WriteConcern}; -use prisma_models::SelectionResult; use query_engine_metrics::{decrement_gauge, increment_gauge, metrics, PRISMA_CLIENT_QUERIES_ACTIVE}; +use query_structure::SelectionResult; use std::collections::HashMap; pub struct MongoDbTransaction<'conn> { @@ -252,7 +252,7 @@ impl<'conn> ReadOperations for MongoDbTransaction<'conn> { async fn get_single_record( &mut self, model: &Model, - filter: &connector_interface::Filter, + filter: &query_structure::Filter, selected_fields: &FieldSelection, aggr_selections: &[RelAggregationSelection], _trace_id: Option, @@ -274,7 +274,7 @@ impl<'conn> ReadOperations for MongoDbTransaction<'conn> { async fn get_many_records( &mut self, model: &Model, - query_arguments: connector_interface::QueryArguments, + query_arguments: query_structure::QueryArguments, selected_fields: &FieldSelection, _nested: Vec, aggregation_selections: &[RelAggregationSelection], @@ -315,10 +315,10 @@ impl<'conn> ReadOperations for MongoDbTransaction<'conn> { async fn aggregate_records( &mut self, model: &Model, - query_arguments: connector_interface::QueryArguments, + query_arguments: query_structure::QueryArguments, selections: Vec, group_by: Vec, - having: Option, + having: Option, _trace_id: Option, ) -> connector_interface::Result> { catch(async move { diff --git a/query-engine/connectors/mongodb-query-connector/src/join.rs b/query-engine/connectors/mongodb-query-connector/src/join.rs index fb5c46d6858a..24c8abe2fba9 100644 --- a/query-engine/connectors/mongodb-query-connector/src/join.rs +++ b/query-engine/connectors/mongodb-query-connector/src/join.rs @@ -1,6 +1,6 @@ use crate::filter::MongoFilter; use mongodb::bson::{doc, Document}; -use prisma_models::{walkers, RelationFieldRef, ScalarFieldRef}; +use query_structure::{walkers, RelationFieldRef, ScalarFieldRef}; /// A join stage describes a tree of joins and nested joins to be performed on a collection. /// Every document of the `source` side will be joined with the collection documents diff --git a/query-engine/connectors/mongodb-query-connector/src/orderby.rs b/query-engine/connectors/mongodb-query-connector/src/orderby.rs index 2e89d4399b47..15b37691ed27 100644 --- a/query-engine/connectors/mongodb-query-connector/src/orderby.rs +++ b/query-engine/connectors/mongodb-query-connector/src/orderby.rs @@ -1,7 +1,7 @@ use crate::join::JoinStage; use itertools::Itertools; use mongodb::bson::{doc, Document}; -use prisma_models::{OrderBy, OrderByHop, OrderByToManyAggregation, SortOrder}; +use query_structure::{OrderBy, OrderByHop, OrderByToManyAggregation, SortOrder}; use std::iter; #[derive(Debug)] @@ -230,11 +230,11 @@ impl OrderByBuilder { // Can only be scalar aggregations for groupBy, ToMany aggregations are not supported yet. if let OrderBy::ScalarAggregation(order_by_aggr) = &data.order_by { let prefix = match order_by_aggr.sort_aggregation { - prisma_models::SortAggregation::Count => "count", - prisma_models::SortAggregation::Avg => "avg", - prisma_models::SortAggregation::Sum => "sum", - prisma_models::SortAggregation::Min => "min", - prisma_models::SortAggregation::Max => "max", + query_structure::SortAggregation::Count => "count", + query_structure::SortAggregation::Avg => "avg", + query_structure::SortAggregation::Sum => "sum", + query_structure::SortAggregation::Min => "min", + query_structure::SortAggregation::Max => "max", }; format!("{}_{}", prefix, data.scalar_field_name()) @@ -258,7 +258,7 @@ impl OrderByBuilder { if let OrderBy::ToManyAggregation(order_by_aggregate) = &data.order_by { if !order_by_aggregate.path.is_empty() { match order_by_aggregate.sort_aggregation { - prisma_models::SortAggregation::Count => { + query_structure::SortAggregation::Count => { if let Some(clone_to) = data.prefix.as_ref().and_then(|x| x.clone_to.clone()) { order_aggregate_proj_doc.push(doc! { "$addFields": { clone_to.clone(): { "$size": { "$ifNull": [format!("${}", data.full_reference_path(false)), []] } } } }); field_name = clone_to; // Todo: Just a hack right now, this whole function needs love. diff --git a/query-engine/connectors/mongodb-query-connector/src/output_meta.rs b/query-engine/connectors/mongodb-query-connector/src/output_meta.rs index 3e8474776772..081672f9d6e6 100644 --- a/query-engine/connectors/mongodb-query-connector/src/output_meta.rs +++ b/query-engine/connectors/mongodb-query-connector/src/output_meta.rs @@ -1,6 +1,6 @@ use connector_interface::{AggregationSelection, RelAggregationSelection}; use indexmap::IndexMap; -use prisma_models::{ +use query_structure::{ ast::FieldArity, DefaultKind, FieldSelection, PrismaValue, ScalarFieldRef, SelectedField, TypeIdentifier, }; diff --git a/query-engine/connectors/mongodb-query-connector/src/projection.rs b/query-engine/connectors/mongodb-query-connector/src/projection.rs index 4ed83cd14522..80a6a3e792e7 100644 --- a/query-engine/connectors/mongodb-query-connector/src/projection.rs +++ b/query-engine/connectors/mongodb-query-connector/src/projection.rs @@ -1,6 +1,6 @@ use crate::IntoBson; use mongodb::bson::{Bson, Document}; -use prisma_models::{FieldSelection, SelectedField}; +use query_structure::{FieldSelection, SelectedField}; /// Used as projection document for Mongo queries. impl IntoBson for FieldSelection { @@ -15,13 +15,13 @@ impl IntoBson for FieldSelection { fn path_prefixed_selection(doc: &mut Document, parent_paths: Vec, selections: Vec) { for field in selections { match field { - prisma_models::SelectedField::Scalar(sf) => { + query_structure::SelectedField::Scalar(sf) => { let mut parent_paths = parent_paths.clone(); parent_paths.push(sf.db_name().to_owned()); doc.insert(parent_paths.join("."), Bson::Int32(1)); } - prisma_models::SelectedField::Composite(cs) => { + query_structure::SelectedField::Composite(cs) => { let mut parent_paths = parent_paths.clone(); parent_paths.push(cs.field.db_name().to_owned()); path_prefixed_selection(doc, parent_paths, cs.selections); diff --git a/query-engine/connectors/mongodb-query-connector/src/query_builder/group_by_builder.rs b/query-engine/connectors/mongodb-query-connector/src/query_builder/group_by_builder.rs index 4ea3d4590446..f5ac3659f1b5 100644 --- a/query-engine/connectors/mongodb-query-connector/src/query_builder/group_by_builder.rs +++ b/query-engine/connectors/mongodb-query-connector/src/query_builder/group_by_builder.rs @@ -1,8 +1,8 @@ use crate::constants::*; -use connector_interface::{AggregationSelection, Filter}; +use connector_interface::AggregationSelection; use mongodb::bson::{doc, Bson, Document}; -use prisma_models::ScalarFieldRef; +use query_structure::{AggregationFilter, Filter, ScalarFieldRef}; use std::collections::HashSet; /// Represents a `$group` aggregation stage. @@ -161,19 +161,19 @@ impl GroupByBuilder { unfold_filters(filters); } Filter::Aggregation(aggregation) => match aggregation { - connector_interface::AggregationFilter::Count(filter) => { + AggregationFilter::Count(filter) => { self.insert_from_filter(filter.as_ref(), AggregationType::Count); } - connector_interface::AggregationFilter::Average(filter) => { + AggregationFilter::Average(filter) => { self.insert_from_filter(filter.as_ref(), AggregationType::Average); } - connector_interface::AggregationFilter::Sum(filter) => { + AggregationFilter::Sum(filter) => { self.insert_from_filter(filter.as_ref(), AggregationType::Sum); } - connector_interface::AggregationFilter::Min(filter) => { + AggregationFilter::Min(filter) => { self.insert_from_filter(filter.as_ref(), AggregationType::Min); } - connector_interface::AggregationFilter::Max(filter) => { + AggregationFilter::Max(filter) => { self.insert_from_filter(filter.as_ref(), AggregationType::Max); } }, diff --git a/query-engine/connectors/mongodb-query-connector/src/query_builder/read_query_builder.rs b/query-engine/connectors/mongodb-query-connector/src/query_builder/read_query_builder.rs index e6ca54929989..fcf749fc2d35 100644 --- a/query-engine/connectors/mongodb-query-connector/src/query_builder/read_query_builder.rs +++ b/query-engine/connectors/mongodb-query-connector/src/query_builder/read_query_builder.rs @@ -10,14 +10,14 @@ use crate::{ root_queries::observing, vacuum_cursor, BsonTransform, IntoBson, }; -use connector_interface::{AggregationSelection, Filter, QueryArguments, RelAggregationSelection}; +use connector_interface::{AggregationSelection, RelAggregationSelection}; use itertools::Itertools; use mongodb::{ bson::{doc, Document}, options::AggregateOptions, ClientSession, Collection, }; -use prisma_models::{FieldSelection, Model, ScalarFieldRef}; +use query_structure::{FieldSelection, Filter, Model, QueryArguments, ScalarFieldRef}; use std::convert::TryFrom; // Mongo Driver broke usage of the simple API, can't be used by us anymore. diff --git a/query-engine/connectors/mongodb-query-connector/src/root_queries/aggregate.rs b/query-engine/connectors/mongodb-query-connector/src/root_queries/aggregate.rs index 65ee8a71ae30..05ff57053e95 100644 --- a/query-engine/connectors/mongodb-query-connector/src/root_queries/aggregate.rs +++ b/query-engine/connectors/mongodb-query-connector/src/root_queries/aggregate.rs @@ -2,7 +2,7 @@ use crate::{constants::*, output_meta, query_builder::MongoReadQueryBuilder, val use connector_interface::*; use mongodb::{bson::Document, ClientSession, Database}; -use prisma_models::prelude::*; +use query_structure::{prelude::*, Filter, QueryArguments}; pub async fn aggregate<'conn>( database: &Database, diff --git a/query-engine/connectors/mongodb-query-connector/src/root_queries/mod.rs b/query-engine/connectors/mongodb-query-connector/src/root_queries/mod.rs index ba1257270da2..f66adbac3e3b 100644 --- a/query-engine/connectors/mongodb-query-connector/src/root_queries/mod.rs +++ b/query-engine/connectors/mongodb-query-connector/src/root_queries/mod.rs @@ -13,11 +13,11 @@ use crate::{ use futures::Future; use mongodb::bson::Bson; use mongodb::bson::Document; -use prisma_models::*; use query_engine_metrics::{ histogram, increment_counter, metrics, PRISMA_DATASOURCE_QUERIES_DURATION_HISTOGRAM_MS, PRISMA_DATASOURCE_QUERIES_TOTAL, }; +use query_structure::*; use std::time::Instant; use tracing::debug; diff --git a/query-engine/connectors/mongodb-query-connector/src/root_queries/raw.rs b/query-engine/connectors/mongodb-query-connector/src/root_queries/raw.rs index 6876ab683333..f674eafe9fd4 100644 --- a/query-engine/connectors/mongodb-query-connector/src/root_queries/raw.rs +++ b/query-engine/connectors/mongodb-query-connector/src/root_queries/raw.rs @@ -4,7 +4,7 @@ use mongodb::{ bson::{from_bson, Bson, Document}, options::*, }; -use prisma_models::{Model, PrismaValue}; +use query_structure::{Model, PrismaValue}; use std::collections::HashMap; #[allow(clippy::large_enum_variant)] diff --git a/query-engine/connectors/mongodb-query-connector/src/root_queries/read.rs b/query-engine/connectors/mongodb-query-connector/src/root_queries/read.rs index 58bf634c4c0d..0d9ac09ae26a 100644 --- a/query-engine/connectors/mongodb-query-connector/src/root_queries/read.rs +++ b/query-engine/connectors/mongodb-query-connector/src/root_queries/read.rs @@ -3,9 +3,9 @@ use crate::{ error::DecorateErrorWithFieldInformationExtension, output_meta, query_builder::MongoReadQueryBuilder, vacuum_cursor, IntoBson, }; -use connector_interface::{Filter, QueryArguments, RelAggregationSelection}; +use connector_interface::RelAggregationSelection; use mongodb::{bson::doc, options::FindOptions, ClientSession, Database}; -use prisma_models::*; +use query_structure::*; use tracing::{info_span, Instrument}; /// Finds a single record. Joins are not required at the moment because the selector is always a unique one. diff --git a/query-engine/connectors/mongodb-query-connector/src/root_queries/update/into_operation.rs b/query-engine/connectors/mongodb-query-connector/src/root_queries/update/into_operation.rs index 3f18d8351930..01ff5abcbd13 100644 --- a/query-engine/connectors/mongodb-query-connector/src/root_queries/update/into_operation.rs +++ b/query-engine/connectors/mongodb-query-connector/src/root_queries/update/into_operation.rs @@ -3,7 +3,7 @@ use crate::*; use connector_interface::{CompositeWriteOperation, FieldPath, ScalarWriteOperation, WriteOperation}; use mongodb::bson::doc; -use prisma_models::{Field, PrismaValue}; +use query_structure::{Field, PrismaValue}; pub(crate) trait IntoUpdateOperation { fn into_update_operations(self, field: &Field, path: FieldPath) -> crate::Result>; diff --git a/query-engine/connectors/mongodb-query-connector/src/root_queries/update/operation.rs b/query-engine/connectors/mongodb-query-connector/src/root_queries/update/operation.rs index cdb7be2c601c..0fa814d81af7 100644 --- a/query-engine/connectors/mongodb-query-connector/src/root_queries/update/operation.rs +++ b/query-engine/connectors/mongodb-query-connector/src/root_queries/update/operation.rs @@ -1,6 +1,7 @@ use super::{expression, into_expression::IntoUpdateExpression}; -use connector_interface::{FieldPath, Filter}; +use connector_interface::FieldPath; use mongodb::bson::{doc, Document}; +use query_structure::Filter; /// `UpdateOperation` is an intermediary AST used to perform preliminary transformations from a `WriteOperation`. /// It is meant to be transformed into an `UpdateExpression`. diff --git a/query-engine/connectors/mongodb-query-connector/src/root_queries/write.rs b/query-engine/connectors/mongodb-query-connector/src/root_queries/write.rs index f5aa24c6b5d6..c0c43f108aec 100644 --- a/query-engine/connectors/mongodb-query-connector/src/root_queries/write.rs +++ b/query-engine/connectors/mongodb-query-connector/src/root_queries/write.rs @@ -15,7 +15,7 @@ use mongodb::{ options::InsertManyOptions, ClientSession, Collection, Database, }; -use prisma_models::{Model, PrismaValue, SelectionResult}; +use query_structure::{Model, PrismaValue, SelectionResult}; use std::{collections::HashMap, convert::TryInto}; use tracing::{info_span, Instrument}; use update::IntoUpdateDocumentExtension; diff --git a/query-engine/connectors/mongodb-query-connector/src/value.rs b/query-engine/connectors/mongodb-query-connector/src/value.rs index cf984ad76830..cf6812d59b6d 100644 --- a/query-engine/connectors/mongodb-query-connector/src/value.rs +++ b/query-engine/connectors/mongodb-query-connector/src/value.rs @@ -7,10 +7,10 @@ use bigdecimal::{BigDecimal, FromPrimitive, ToPrimitive}; use chrono::{TimeZone, Utc}; use itertools::Itertools; use mongodb::bson::{oid::ObjectId, spec::BinarySubtype, Binary, Bson, Document, Timestamp}; -use prisma_models::{ +use psl::builtin_connectors::MongoDbType; +use query_structure::{ CompositeFieldRef, Field, PrismaValue, RelationFieldRef, ScalarFieldRef, SelectedField, TypeIdentifier, }; -use psl::builtin_connectors::MongoDbType; use serde_json::Value; use std::{convert::TryFrom, fmt::Display}; diff --git a/query-engine/connectors/query-connector/Cargo.toml b/query-engine/connectors/query-connector/Cargo.toml index d16771aa3daf..7fcc749b3714 100644 --- a/query-engine/connectors/query-connector/Cargo.toml +++ b/query-engine/connectors/query-connector/Cargo.toml @@ -9,7 +9,7 @@ async-trait = "0.1.31" chrono = {version = "0.4", features = ["serde"]} futures = "0.3" itertools = "0.10" -prisma-models = {path = "../../prisma-models"} +query-structure = {path = "../../query-structure"} prisma-value = {path = "../../../libs/prisma-value"} serde.workspace = true serde_json.workspace = true diff --git a/query-engine/connectors/query-connector/src/coerce.rs b/query-engine/connectors/query-connector/src/coerce.rs index 87f04eed4f24..9c09ea5235ec 100644 --- a/query-engine/connectors/query-connector/src/coerce.rs +++ b/query-engine/connectors/query-connector/src/coerce.rs @@ -1,4 +1,4 @@ -use prisma_models::PrismaValue; +use query_structure::PrismaValue; pub fn coerce_null_to_zero_value(value: PrismaValue) -> PrismaValue { if let PrismaValue::Null = value { diff --git a/query-engine/connectors/query-connector/src/error.rs b/query-engine/connectors/query-connector/src/error.rs index 96d8d9dcbacb..e34b7668a7dc 100644 --- a/query-engine/connectors/query-connector/src/error.rs +++ b/query-engine/connectors/query-connector/src/error.rs @@ -1,6 +1,6 @@ -use crate::filter::Filter; use itertools::Itertools; -use prisma_models::prelude::DomainError; +use query_structure::prelude::DomainError; +use query_structure::Filter; use std::fmt::Display; use thiserror::Error; use user_facing_errors::{query_engine::DatabaseConstraint, KnownError}; diff --git a/query-engine/connectors/query-connector/src/interface.rs b/query-engine/connectors/query-connector/src/interface.rs index 6cfb3fdc5250..aebbdbe563a1 100644 --- a/query-engine/connectors/query-connector/src/interface.rs +++ b/query-engine/connectors/query-connector/src/interface.rs @@ -1,7 +1,7 @@ -use crate::{coerce_null_to_zero_value, Filter, NativeUpsert, QueryArguments, WriteArgs}; +use crate::{coerce_null_to_zero_value, NativeUpsert, WriteArgs}; use async_trait::async_trait; -use prisma_models::{ast::FieldArity, *}; use prisma_value::PrismaValue; +use query_structure::{ast::FieldArity, *}; use std::collections::HashMap; #[async_trait] diff --git a/query-engine/connectors/query-connector/src/lib.rs b/query-engine/connectors/query-connector/src/lib.rs index b60554c54b50..5488dfaef494 100644 --- a/query-engine/connectors/query-connector/src/lib.rs +++ b/query-engine/connectors/query-connector/src/lib.rs @@ -1,20 +1,14 @@ #![allow(clippy::derive_partial_eq_without_eq)] pub mod error; -pub mod filter; mod coerce; -mod compare; mod interface; -mod query_arguments; mod upsert; mod write_args; pub use coerce::*; -pub use compare::*; -pub use filter::*; pub use interface::*; -pub use query_arguments::*; pub use upsert::*; pub use write_args::*; diff --git a/query-engine/connectors/query-connector/src/upsert.rs b/query-engine/connectors/query-connector/src/upsert.rs index 87421511de1e..9455fbc30c49 100644 --- a/query-engine/connectors/query-connector/src/upsert.rs +++ b/query-engine/connectors/query-connector/src/upsert.rs @@ -1,5 +1,5 @@ -use crate::{Filter, RecordFilter, WriteArgs}; -use prisma_models::{FieldSelection, Model, ScalarFieldRef}; +use crate::{RecordFilter, WriteArgs}; +use query_structure::{FieldSelection, Filter, Model, ScalarFieldRef}; #[derive(Debug, Clone)] pub struct NativeUpsert { diff --git a/query-engine/connectors/query-connector/src/write_args.rs b/query-engine/connectors/query-connector/src/write_args.rs index e75ca288ac0c..e0b030975042 100644 --- a/query-engine/connectors/query-connector/src/write_args.rs +++ b/query-engine/connectors/query-connector/src/write_args.rs @@ -1,10 +1,8 @@ -use crate::{ - error::{ConnectorError, ErrorKind}, - Filter, -}; +use crate::error::{ConnectorError, ErrorKind}; use indexmap::{map::Keys, IndexMap}; -use prisma_models::{ - CompositeFieldRef, Field, Model, ModelProjection, PrismaValue, ScalarFieldRef, SelectedField, SelectionResult, +use query_structure::{ + CompositeFieldRef, Field, Filter, Model, ModelProjection, PrismaValue, ScalarFieldRef, SelectedField, + SelectionResult, }; use std::{borrow::Borrow, convert::TryInto, ops::Deref}; diff --git a/query-engine/connectors/sql-query-connector/Cargo.toml b/query-engine/connectors/sql-query-connector/Cargo.toml index 5fe3052f2e8d..ba2ff436823f 100644 --- a/query-engine/connectors/sql-query-connector/Cargo.toml +++ b/query-engine/connectors/sql-query-connector/Cargo.toml @@ -25,13 +25,14 @@ uuid.workspace = true opentelemetry = { version = "0.17", features = ["tokio"] } tracing-opentelemetry = "0.17.3" quaint.workspace = true +cuid = { git = "https://github.com/prisma/cuid-rust", branch = "wasm32-support" } [dependencies.connector-interface] package = "query-connector" path = "../query-connector" -[dependencies.prisma-models] -path = "../../prisma-models" +[dependencies.query-structure] +path = "../../query-structure" [dependencies.prisma-value] path = "../../../libs/prisma-value" @@ -44,9 +45,6 @@ version = "0.4" features = ["derive"] version = "1.0" -[dependencies.cuid] -version = "1.2" - [dependencies.user-facing-errors] features = ["sql"] path = "../../../libs/user-facing-errors" diff --git a/query-engine/connectors/sql-query-connector/src/column_metadata.rs b/query-engine/connectors/sql-query-connector/src/column_metadata.rs index 7555bbf3331b..c64871b7eb22 100644 --- a/query-engine/connectors/sql-query-connector/src/column_metadata.rs +++ b/query-engine/connectors/sql-query-connector/src/column_metadata.rs @@ -1,4 +1,4 @@ -use prisma_models::{FieldArity, TypeIdentifier}; +use query_structure::{FieldArity, TypeIdentifier}; /// Helps dealing with column value conversion and possible error resolution. #[derive(Clone, Debug, Copy)] diff --git a/query-engine/connectors/sql-query-connector/src/cursor_condition.rs b/query-engine/connectors/sql-query-connector/src/cursor_condition.rs index 34373eaf3d5b..d34fd49b2a3d 100644 --- a/query-engine/connectors/sql-query-connector/src/cursor_condition.rs +++ b/query-engine/connectors/sql-query-connector/src/cursor_condition.rs @@ -5,10 +5,9 @@ use crate::{ query_arguments_ext::QueryArgumentsExt, Context, }; -use connector_interface::QueryArguments; use itertools::Itertools; -use prisma_models::*; use quaint::ast::*; +use query_structure::*; #[derive(Debug)] struct CursorOrderDefinition { diff --git a/query-engine/connectors/sql-query-connector/src/database/connection.rs b/query-engine/connectors/sql-query-connector/src/database/connection.rs index 882ae6086ba2..a8d4ed299342 100644 --- a/query-engine/connectors/sql-query-connector/src/database/connection.rs +++ b/query-engine/connectors/sql-query-connector/src/database/connection.rs @@ -3,16 +3,16 @@ use crate::{database::operations::*, Context, SqlError}; use async_trait::async_trait; use connector::{ConnectionLike, RelAggregationSelection, RelatedQuery}; use connector_interface::{ - self as connector, filter::Filter, AggregationRow, AggregationSelection, Connection, QueryArguments, - ReadOperations, RecordFilter, Transaction, WriteArgs, WriteOperations, + self as connector, AggregationRow, AggregationSelection, Connection, ReadOperations, RecordFilter, Transaction, + WriteArgs, WriteOperations, }; -use prisma_models::{prelude::*, SelectionResult}; use prisma_value::PrismaValue; use psl::PreviewFeature; use quaint::{ connector::{IsolationLevel, TransactionCapable}, prelude::{ConnectionInfo, Queryable}, }; +use query_structure::{prelude::*, Filter, QueryArguments, SelectionResult}; use std::{collections::HashMap, str::FromStr}; pub(crate) struct SqlConnection { diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/read.rs b/query-engine/connectors/sql-query-connector/src/database/operations/read.rs index 3bbaf3e65fa7..d921dbdd3227 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/read.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/read.rs @@ -9,8 +9,8 @@ use crate::{ use connector_interface::*; use futures::stream::{FuturesUnordered, StreamExt}; -use prisma_models::*; use quaint::ast::*; +use query_structure::*; pub(crate) async fn get_single_record( conn: &dyn Queryable, diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/update.rs b/query-engine/connectors/sql-query-connector/src/database/operations/update.rs index 2270d6c6fefa..617e02455abd 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/update.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/update.rs @@ -8,7 +8,7 @@ use crate::{Context, QueryExt, Queryable}; use connector_interface::*; use itertools::Itertools; -use prisma_models::*; +use query_structure::*; use std::usize; /// Performs an update with an explicit selection set. diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/upsert.rs b/query-engine/connectors/sql-query-connector/src/database/operations/upsert.rs index cfd473923ffc..f086e4c60798 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/upsert.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/upsert.rs @@ -7,8 +7,8 @@ use crate::{ Context, Queryable, }; use connector_interface::NativeUpsert; -use prisma_models::{ModelProjection, Record, SingleRecord}; use quaint::prelude::{OnConflict, Query}; +use query_structure::{ModelProjection, Record, SingleRecord}; pub(crate) async fn native_upsert( conn: &dyn Queryable, diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/write.rs b/query-engine/connectors/sql-query-connector/src/database/operations/write.rs index 425f4ac1d4b3..8503e1434001 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/write.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/write.rs @@ -8,11 +8,11 @@ use crate::{ }; use connector_interface::*; use itertools::Itertools; -use prisma_models::*; use quaint::{ error::ErrorKind, prelude::{native_uuid, uuid_to_bin, uuid_to_bin_swapped, Aliasable, Select, SqlFamily}, }; +use query_structure::*; use std::{ collections::{HashMap, HashSet}, ops::Deref, diff --git a/query-engine/connectors/sql-query-connector/src/database/transaction.rs b/query-engine/connectors/sql-query-connector/src/database/transaction.rs index d5fd92cf2606..ba88721e15ca 100644 --- a/query-engine/connectors/sql-query-connector/src/database/transaction.rs +++ b/query-engine/connectors/sql-query-connector/src/database/transaction.rs @@ -3,12 +3,12 @@ use crate::{database::operations::*, Context, SqlError}; use async_trait::async_trait; use connector::{ConnectionLike, RelAggregationSelection, RelatedQuery}; use connector_interface::{ - self as connector, filter::Filter, AggregationRow, AggregationSelection, QueryArguments, ReadOperations, - RecordFilter, Transaction, WriteArgs, WriteOperations, + self as connector, AggregationRow, AggregationSelection, ReadOperations, RecordFilter, Transaction, WriteArgs, + WriteOperations, }; -use prisma_models::{prelude::*, SelectionResult}; use prisma_value::PrismaValue; use quaint::prelude::ConnectionInfo; +use query_structure::{prelude::*, Filter, QueryArguments, SelectionResult}; use std::collections::HashMap; pub struct SqlConnectorTransaction<'tx> { diff --git a/query-engine/connectors/sql-query-connector/src/error.rs b/query-engine/connectors/sql-query-connector/src/error.rs index 9744aa0d5ef8..a7770879c510 100644 --- a/query-engine/connectors/sql-query-connector/src/error.rs +++ b/query-engine/connectors/sql-query-connector/src/error.rs @@ -1,6 +1,6 @@ -use connector_interface::{error::*, Filter}; -use prisma_models::prelude::DomainError; +use connector_interface::error::*; use quaint::error::ErrorKind as QuaintKind; +use query_structure::{prelude::DomainError, Filter}; use std::{any::Any, string::FromUtf8Error}; use thiserror::Error; use user_facing_errors::query_engine::DatabaseConstraint; @@ -267,8 +267,8 @@ impl SqlError { } } -impl From for SqlError { - fn from(e: prisma_models::ConversionFailure) -> Self { +impl From for SqlError { + fn from(e: query_structure::ConversionFailure) -> Self { Self::ConversionError(e.into()) } } diff --git a/query-engine/connectors/sql-query-connector/src/filter/alias.rs b/query-engine/connectors/sql-query-connector/src/filter/alias.rs index 61686929d400..c7a62bba02ab 100644 --- a/query-engine/connectors/sql-query-connector/src/filter/alias.rs +++ b/query-engine/connectors/sql-query-connector/src/filter/alias.rs @@ -1,7 +1,7 @@ use crate::{model_extensions::AsColumn, *}; -use prisma_models::ScalarField; use quaint::prelude::Column; +use query_structure::ScalarField; #[derive(Clone, Copy, Debug)] /// A distinction in aliasing to separate the parent table and the joined data diff --git a/query-engine/connectors/sql-query-connector/src/filter/mod.rs b/query-engine/connectors/sql-query-connector/src/filter/mod.rs index bbf3557b16b7..b9ae856ef655 100644 --- a/query-engine/connectors/sql-query-connector/src/filter/mod.rs +++ b/query-engine/connectors/sql-query-connector/src/filter/mod.rs @@ -1,8 +1,8 @@ mod alias; mod visitor; -use connector_interface::Filter; use quaint::prelude::*; +use query_structure::Filter; use visitor::*; use crate::{context::Context, join_utils::AliasedJoin}; diff --git a/query-engine/connectors/sql-query-connector/src/filter/visitor.rs b/query-engine/connectors/sql-query-connector/src/filter/visitor.rs index 6ab32f89735f..1a71cdd824a8 100644 --- a/query-engine/connectors/sql-query-connector/src/filter/visitor.rs +++ b/query-engine/connectors/sql-query-connector/src/filter/visitor.rs @@ -2,11 +2,10 @@ use super::alias::*; use crate::join_utils::{compute_one2m_join, AliasedJoin}; use crate::{model_extensions::*, Context}; -use connector_interface::filter::*; -use prisma_models::prelude::*; use psl::datamodel_connector::ConnectorCapability; use quaint::ast::concat; use quaint::ast::*; +use query_structure::{filter::*, prelude::*}; use std::convert::TryInto; pub(crate) trait FilterVisitorExt { diff --git a/query-engine/connectors/sql-query-connector/src/join_utils.rs b/query-engine/connectors/sql-query-connector/src/join_utils.rs index 4b4d2fc8aa24..cedc264f94b2 100644 --- a/query-engine/connectors/sql-query-connector/src/join_utils.rs +++ b/query-engine/connectors/sql-query-connector/src/join_utils.rs @@ -1,7 +1,6 @@ use crate::{filter::FilterBuilder, model_extensions::*, Context}; -use connector_interface::Filter; -use prisma_models::*; use quaint::prelude::*; +use query_structure::*; #[derive(Debug, Clone)] pub(crate) struct AliasedJoin { diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/column.rs b/query-engine/connectors/sql-query-connector/src/model_extensions/column.rs index 445bada9c45c..045587df049b 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/column.rs +++ b/query-engine/connectors/sql-query-connector/src/model_extensions/column.rs @@ -1,7 +1,7 @@ use crate::{model_extensions::ScalarFieldExt, Context}; use itertools::Itertools; -use prisma_models::{Field, ModelProjection, RelationField, ScalarField}; use quaint::ast::{Column, Row}; +use query_structure::{Field, ModelProjection, RelationField, ScalarField}; pub struct ColumnIterator { inner: Box> + 'static>, diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/record.rs b/query-engine/connectors/sql-query-connector/src/model_extensions/record.rs index 0204046315ad..e764aa8e58ba 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/record.rs +++ b/query-engine/connectors/sql-query-connector/src/model_extensions/record.rs @@ -1,6 +1,6 @@ use crate::{value::to_prisma_value, SqlError}; -use prisma_models::{DomainError, ModelProjection, SelectionResult}; use quaint::connector::ResultSet; +use query_structure::{DomainError, ModelProjection, SelectionResult}; pub fn try_convert(model_projection: &ModelProjection, result_set: ResultSet) -> crate::Result { let columns: Vec = result_set.columns().iter().map(|c| c.to_string()).collect(); diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/relation.rs b/query-engine/connectors/sql-query-connector/src/model_extensions/relation.rs index 6941546c51e1..981390536807 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/relation.rs +++ b/query-engine/connectors/sql-query-connector/src/model_extensions/relation.rs @@ -2,8 +2,8 @@ use crate::{ model_extensions::{AsColumns, AsTable, ColumnIterator}, Context, }; -use prisma_models::{walkers, ModelProjection, Relation, RelationField}; use quaint::{ast::Table, prelude::Column}; +use query_structure::{walkers, ModelProjection, Relation, RelationField}; pub(crate) trait RelationFieldExt { fn m2m_columns(&self, ctx: &Context<'_>) -> Vec>; diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs b/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs index b8ea590f25dc..21612e1a6392 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs +++ b/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs @@ -1,11 +1,11 @@ use crate::context::Context; use chrono::Utc; -use prisma_models::{ScalarField, TypeIdentifier}; use prisma_value::PrismaValue; use quaint::{ ast::{EnumName, Value, ValueType}, prelude::{EnumVariant, TypeDataLength, TypeFamily}, }; +use query_structure::{ScalarField, TypeIdentifier}; pub(crate) trait ScalarFieldExt { fn value<'a>(&self, pv: PrismaValue, ctx: &Context<'_>) -> Value<'a>; @@ -14,7 +14,7 @@ pub(crate) trait ScalarFieldExt { impl ScalarFieldExt for ScalarField { fn value<'a>(&self, pv: PrismaValue, ctx: &Context<'_>) -> Value<'a> { - match (pv, self.type_identifier()) { + let value = match (pv, self.type_identifier()) { (PrismaValue::String(s), _) => s.into(), (PrismaValue::Float(f), _) => f.into(), (PrismaValue::Boolean(b), _) => b.into(), @@ -76,7 +76,9 @@ impl ScalarFieldExt for ScalarField { TypeIdentifier::Bytes => Value::null_bytes(), TypeIdentifier::Unsupported => unreachable!("No unsupported field should reach that path"), }, - } + }; + + value.with_native_column_type(self.native_type().map(|nt| nt.name())) } fn type_family(&self) -> TypeFamily { diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/selection_result.rs b/query-engine/connectors/sql-query-connector/src/model_extensions/selection_result.rs index 25d994b1d64d..51eb7768d068 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/selection_result.rs +++ b/query-engine/connectors/sql-query-connector/src/model_extensions/selection_result.rs @@ -1,7 +1,7 @@ use super::ScalarFieldExt; use crate::context::Context; -use prisma_models::{PrismaValue, SelectedField, SelectionResult}; use quaint::Value; +use query_structure::{PrismaValue, SelectedField, SelectionResult}; pub(crate) trait SelectionResultExt { fn misses_autogen_value(&self) -> bool; diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/table.rs b/query-engine/connectors/sql-query-connector/src/model_extensions/table.rs index 8217b5e12044..ead15c34658e 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/table.rs +++ b/query-engine/connectors/sql-query-connector/src/model_extensions/table.rs @@ -1,6 +1,6 @@ use crate::{model_extensions::AsColumns, Context}; -use prisma_models::Model; use quaint::ast::{Column, Table}; +use query_structure::Model; pub(crate) fn db_name_with_schema(model: &Model, ctx: &Context<'_>) -> Table<'static> { let schema_prefix = model @@ -32,7 +32,7 @@ impl AsTable for Model { self.unique_indexes().fold(table, |table, index| { let fields: Vec<_> = index .fields() - .map(|f| prisma_models::ScalarFieldRef::from((self.dm.clone(), f))) + .map(|f| query_structure::ScalarFieldRef::from((self.dm.clone(), f))) .collect(); let index: Vec> = fields.as_columns(ctx).collect(); table.add_unique_index(index) diff --git a/query-engine/connectors/sql-query-connector/src/ordering.rs b/query-engine/connectors/sql-query-connector/src/ordering.rs index ddda79d27954..310e10ec43d7 100644 --- a/query-engine/connectors/sql-query-connector/src/ordering.rs +++ b/query-engine/connectors/sql-query-connector/src/ordering.rs @@ -1,8 +1,7 @@ use crate::{join_utils::*, model_extensions::*, query_arguments_ext::QueryArgumentsExt, Context}; -use connector_interface::QueryArguments; use itertools::Itertools; -use prisma_models::*; use quaint::ast::*; +use query_structure::*; static ORDER_JOIN_PREFIX: &str = "orderby_"; static ORDER_AGGREGATOR_ALIAS: &str = "orderby_aggregator"; diff --git a/query-engine/connectors/sql-query-connector/src/query_arguments_ext.rs b/query-engine/connectors/sql-query-connector/src/query_arguments_ext.rs index b0319b28b22c..414ab7247c67 100644 --- a/query-engine/connectors/sql-query-connector/src/query_arguments_ext.rs +++ b/query-engine/connectors/sql-query-connector/src/query_arguments_ext.rs @@ -1,4 +1,4 @@ -use connector_interface::QueryArguments; +use query_structure::QueryArguments; pub(crate) trait QueryArgumentsExt { /// If we need to take rows before a cursor position, then we need to reverse the order in SQL. diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/mod.rs b/query-engine/connectors/sql-query-connector/src/query_builder/mod.rs index bf82725e8937..7f16b84f95fd 100644 --- a/query-engine/connectors/sql-query-connector/src/query_builder/mod.rs +++ b/query-engine/connectors/sql-query-connector/src/query_builder/mod.rs @@ -4,8 +4,8 @@ pub(crate) mod write; use crate::context::Context; use crate::model_extensions::SelectionResultExt; -use prisma_models::SelectionResult; use quaint::ast::{Column, Comparable, ConditionTree, Query, Row, Values}; +use query_structure::SelectionResult; const PARAMETER_LIMIT: usize = 2000; diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/read.rs b/query-engine/connectors/sql-query-connector/src/query_builder/read.rs index 6a77dac88bf5..fb24f9869356 100644 --- a/query-engine/connectors/sql-query-connector/src/query_builder/read.rs +++ b/query-engine/connectors/sql-query-connector/src/query_builder/read.rs @@ -2,12 +2,10 @@ use crate::{ cursor_condition, filter::FilterBuilder, model_extensions::*, nested_aggregations, ordering::OrderByBuilder, sql_trace::SqlTraceComment, Context, }; -use connector_interface::{ - filter::Filter, AggregationSelection, QueryArguments, RelAggregationSelection, RelatedQuery, -}; +use connector_interface::{AggregationSelection, RelAggregationSelection}; use itertools::Itertools; -use prisma_models::*; use quaint::ast::*; +use query_structure::*; use tracing::Span; use super::select; diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/write.rs b/query-engine/connectors/sql-query-connector/src/query_builder/write.rs index b9356842b285..c5bb3e24ddb6 100644 --- a/query-engine/connectors/sql-query-connector/src/query_builder/write.rs +++ b/query-engine/connectors/sql-query-connector/src/query_builder/write.rs @@ -1,7 +1,7 @@ use crate::{model_extensions::*, sql_trace::SqlTraceComment, Context}; use connector_interface::{DatasourceFieldName, ScalarWriteOperation, WriteArgs}; -use prisma_models::*; use quaint::ast::*; +use query_structure::*; use std::{collections::HashSet, convert::TryInto}; use tracing::Span; diff --git a/query-engine/connectors/sql-query-connector/src/query_ext.rs b/query-engine/connectors/sql-query-connector/src/query_ext.rs index 2dba40dcb7fc..a78145773478 100644 --- a/query-engine/connectors/sql-query-connector/src/query_ext.rs +++ b/query-engine/connectors/sql-query-connector/src/query_ext.rs @@ -4,13 +4,13 @@ use crate::{ value_ext::IntoTypedJsonExtension, ColumnMetadata, Context, SqlRow, ToSqlRow, }; use async_trait::async_trait; -use connector_interface::{filter::Filter, RecordFilter}; +use connector_interface::RecordFilter; use futures::future::FutureExt; use itertools::Itertools; use opentelemetry::trace::TraceContextExt; use opentelemetry::trace::TraceFlags; -use prisma_models::*; use quaint::{ast::*, connector::Queryable}; +use query_structure::*; use serde_json::{Map, Value}; use std::{collections::HashMap, panic::AssertUnwindSafe}; use tracing::{info_span, Span}; diff --git a/query-engine/connectors/sql-query-connector/src/row.rs b/query-engine/connectors/sql-query-connector/src/row.rs index 250ee7d9420f..6f154b1f77dc 100644 --- a/query-engine/connectors/sql-query-connector/src/row.rs +++ b/query-engine/connectors/sql-query-connector/src/row.rs @@ -2,8 +2,8 @@ use crate::{column_metadata::ColumnMetadata, error::SqlError, value::to_prisma_v use bigdecimal::{BigDecimal, FromPrimitive, ToPrimitive}; use chrono::{DateTime, NaiveDate, Utc}; use connector_interface::{coerce_null_to_zero_value, AggregationResult, AggregationSelection}; -use prisma_models::{ConversionFailure, FieldArity, PrismaValue, Record, TypeIdentifier}; use quaint::{connector::ResultRow, Value, ValueType}; +use query_structure::{ConversionFailure, FieldArity, PrismaValue, Record, TypeIdentifier}; use std::{io, str::FromStr}; use uuid::Uuid; diff --git a/query-engine/connectors/sql-query-connector/src/value.rs b/query-engine/connectors/sql-query-connector/src/value.rs index 4c31fc9eedb9..0929003955f7 100644 --- a/query-engine/connectors/sql-query-connector/src/value.rs +++ b/query-engine/connectors/sql-query-connector/src/value.rs @@ -1,8 +1,8 @@ use crate::row::{sanitize_f32, sanitize_f64}; use bigdecimal::{BigDecimal, FromPrimitive}; use chrono::{DateTime, NaiveDate, Utc}; -use prisma_models::PrismaValue; use quaint::ValueType; +use query_structure::PrismaValue; pub fn to_prisma_value<'a, T: Into>>(qv: T) -> crate::Result { let val = match qv.into() { diff --git a/query-engine/core/Cargo.toml b/query-engine/core/Cargo.toml index c9700bb85f19..86f3825cadc3 100644 --- a/query-engine/core/Cargo.toml +++ b/query-engine/core/Cargo.toml @@ -16,7 +16,7 @@ indexmap = { version = "1.7", features = ["serde-1"] } itertools = "0.10" once_cell = "1" petgraph = "0.4" -prisma-models = { path = "../prisma-models", features = ["default_generators"] } +query-structure = { path = "../query-structure", features = ["default_generators"] } opentelemetry = { version = "0.17.0", features = ["rt-tokio", "serialize"] } query-engine-metrics = {path = "../metrics"} serde.workspace = true @@ -29,7 +29,7 @@ tracing-subscriber = { version = "0.3", features = ["env-filter"] } tracing-opentelemetry = "0.17.4" user-facing-errors = { path = "../../libs/user-facing-errors" } uuid = "1" -cuid = "1.2" +cuid = { git = "https://github.com/prisma/cuid-rust", branch = "wasm32-support" } schema = { path = "../schema" } lru = "0.7.7" enumflags2 = "0.7" diff --git a/query-engine/core/src/constants.rs b/query-engine/core/src/constants.rs index 6d185a1c6866..abf320a2969c 100644 --- a/query-engine/core/src/constants.rs +++ b/query-engine/core/src/constants.rs @@ -1,5 +1,5 @@ pub mod custom_types { - use prisma_models::PrismaValue; + use query_structure::PrismaValue; pub const TYPE: &str = "$type"; pub const VALUE: &str = "value"; diff --git a/query-engine/core/src/error.rs b/query-engine/core/src/error.rs index 6ca41f1a9d7b..3a3803bf0d67 100644 --- a/query-engine/core/src/error.rs +++ b/query-engine/core/src/error.rs @@ -1,6 +1,6 @@ use crate::{InterpreterError, QueryGraphBuilderError, RelationViolation, TransactionError}; use connector::error::ConnectorError; -use prisma_models::DomainError; +use query_structure::DomainError; use thiserror::Error; use user_facing_errors::UnknownError; diff --git a/query-engine/core/src/executor/request_context.rs b/query-engine/core/src/executor/request_context.rs index 13d2e7245178..e4f0c7122ee9 100644 --- a/query-engine/core/src/executor/request_context.rs +++ b/query-engine/core/src/executor/request_context.rs @@ -1,5 +1,5 @@ use crate::protocol::EngineProtocol; -use prisma_models::PrismaValue; +use query_structure::PrismaValue; #[derive(Debug)] struct RequestContext { diff --git a/query-engine/core/src/interpreter/error.rs b/query-engine/core/src/interpreter/error.rs index 0660c65adb7e..0a60c5d7848e 100644 --- a/query-engine/core/src/interpreter/error.rs +++ b/query-engine/core/src/interpreter/error.rs @@ -1,6 +1,6 @@ use crate::{QueryGraphBuilderError, QueryGraphError}; use connector::error::ConnectorError; -use prisma_models::DomainError; +use query_structure::DomainError; use std::fmt; #[derive(Debug)] diff --git a/query-engine/core/src/interpreter/interpreter_impl.rs b/query-engine/core/src/interpreter/interpreter_impl.rs index 54af1fd87720..f1011b13f8f5 100644 --- a/query-engine/core/src/interpreter/interpreter_impl.rs +++ b/query-engine/core/src/interpreter/interpreter_impl.rs @@ -6,7 +6,7 @@ use super::{ use crate::{Query, QueryResult}; use connector::ConnectionLike; use futures::future::BoxFuture; -use prisma_models::prelude::*; +use query_structure::prelude::*; use std::{collections::HashMap, fmt}; use tracing::Instrument; diff --git a/query-engine/core/src/interpreter/query_interpreters/inmemory_record_processor.rs b/query-engine/core/src/interpreter/query_interpreters/inmemory_record_processor.rs index a324b499d759..f4c0465412e2 100644 --- a/query-engine/core/src/interpreter/query_interpreters/inmemory_record_processor.rs +++ b/query-engine/core/src/interpreter/query_interpreters/inmemory_record_processor.rs @@ -1,6 +1,5 @@ -use connector::QueryArguments; use itertools::Itertools; -use prisma_models::{FieldSelection, ManyRecords, Record, SelectionResult}; +use query_structure::*; use std::ops::Deref; #[derive(Debug)] diff --git a/query-engine/core/src/interpreter/query_interpreters/nested_read.rs b/query-engine/core/src/interpreter/query_interpreters/nested_read.rs index 508a40171c8b..2d6cabff4727 100644 --- a/query-engine/core/src/interpreter/query_interpreters/nested_read.rs +++ b/query-engine/core/src/interpreter/query_interpreters/nested_read.rs @@ -1,10 +1,7 @@ use super::{inmemory_record_processor::InMemoryRecordProcessor, read}; use crate::{interpreter::InterpretationResult, query_ast::*}; -use connector::{ - self, filter::Filter, ConditionListValue, ConnectionLike, QueryArguments, RelAggregationRow, - RelAggregationSelection, ScalarCompare, -}; -use prisma_models::{FieldSelection, ManyRecords, PrismaValue, Record, RelationFieldRef, SelectionResult}; +use connector::{self, ConnectionLike, RelAggregationRow, RelAggregationSelection}; +use query_structure::*; use std::collections::HashMap; pub(crate) async fn m2m( diff --git a/query-engine/core/src/interpreter/query_interpreters/read.rs b/query-engine/core/src/interpreter/query_interpreters/read.rs index 00c7360b4134..e14fec7b4884 100644 --- a/query-engine/core/src/interpreter/query_interpreters/read.rs +++ b/query-engine/core/src/interpreter/query_interpreters/read.rs @@ -5,7 +5,7 @@ use connector::{ }; use futures::future::{BoxFuture, FutureExt}; use inmemory_record_processor::InMemoryRecordProcessor; -use prisma_models::ManyRecords; +use query_structure::ManyRecords; use std::collections::HashMap; use user_facing_errors::KnownError; diff --git a/query-engine/core/src/query_ast/mod.rs b/query-engine/core/src/query_ast/mod.rs index 43c73a657238..4b67b0ba4548 100644 --- a/query-engine/core/src/query_ast/mod.rs +++ b/query-engine/core/src/query_ast/mod.rs @@ -5,8 +5,7 @@ pub use read::*; pub use write::*; use crate::ToGraphviz; -use connector::filter::Filter; -use prisma_models::{FieldSelection, Model, SelectionResult}; +use query_structure::{FieldSelection, Filter, Model, SelectionResult}; #[derive(Debug, Clone)] #[allow(clippy::large_enum_variant)] diff --git a/query-engine/core/src/query_ast/read.rs b/query-engine/core/src/query_ast/read.rs index 57f57c3ba1b9..e753ce5e9f86 100644 --- a/query-engine/core/src/query_ast/read.rs +++ b/query-engine/core/src/query_ast/read.rs @@ -1,9 +1,9 @@ //! Prisma read query AST use super::FilteredQuery; use crate::ToGraphviz; -use connector::{filter::Filter, AggregationSelection, QueryArguments, RelAggregationSelection}; +use connector::{AggregationSelection, RelAggregationSelection}; use enumflags2::BitFlags; -use prisma_models::prelude::*; +use query_structure::{prelude::*, Filter, QueryArguments}; use std::fmt::Display; #[allow(clippy::enum_variant_names)] diff --git a/query-engine/core/src/query_ast/write.rs b/query-engine/core/src/query_ast/write.rs index 9d02e0d49d17..ee51830e7962 100644 --- a/query-engine/core/src/query_ast/write.rs +++ b/query-engine/core/src/query_ast/write.rs @@ -1,8 +1,8 @@ //! Write query AST use super::{FilteredNestedMutation, FilteredQuery}; use crate::{RecordQuery, ToGraphviz}; -use connector::{filter::Filter, DatasourceFieldName, NativeUpsert, RecordFilter, WriteArgs}; -use prisma_models::prelude::*; +use connector::{DatasourceFieldName, NativeUpsert, RecordFilter, WriteArgs}; +use query_structure::{prelude::*, Filter}; use std::collections::HashMap; #[derive(Debug, Clone)] diff --git a/query-engine/core/src/query_document/argument_value.rs b/query-engine/core/src/query_document/argument_value.rs index 3d085ee57e77..7629ea73c9fb 100644 --- a/query-engine/core/src/query_document/argument_value.rs +++ b/query-engine/core/src/query_document/argument_value.rs @@ -1,7 +1,7 @@ use bigdecimal::BigDecimal; use chrono::{DateTime, FixedOffset}; use indexmap::IndexMap; -use prisma_models::PrismaValue; +use query_structure::PrismaValue; use serde::Serialize; pub type ArgumentValueObject = IndexMap; diff --git a/query-engine/core/src/query_document/mod.rs b/query-engine/core/src/query_document/mod.rs index 7b86d5e87bea..fa424bc44d6e 100644 --- a/query-engine/core/src/query_document/mod.rs +++ b/query-engine/core/src/query_document/mod.rs @@ -32,7 +32,7 @@ use crate::{ query_ast::{QueryOption, QueryOptions}, query_graph_builder::resolve_compound_field, }; -use prisma_models::Model; +use query_structure::Model; use schema::{constants::*, QuerySchema}; use std::collections::HashMap; use user_facing_errors::query_engine::validation::ValidationError; diff --git a/query-engine/core/src/query_document/parse_ast.rs b/query-engine/core/src/query_document/parse_ast.rs index b9d1e8f68168..256763f59e68 100644 --- a/query-engine/core/src/query_document/parse_ast.rs +++ b/query-engine/core/src/query_document/parse_ast.rs @@ -2,7 +2,7 @@ //! Structures represent parsed and validated parts of the query document, used by the query builders. use crate::QueryParserResult; use indexmap::IndexMap; -use prisma_models::{OrderBy, PrismaValue, ScalarFieldRef}; +use query_structure::{OrderBy, PrismaValue, ScalarFieldRef}; use schema::ObjectTag; use std::{ borrow::Cow, diff --git a/query-engine/core/src/query_document/parser.rs b/query-engine/core/src/query_document/parser.rs index 58a814692271..79f30e1bd8b7 100644 --- a/query-engine/core/src/query_document/parser.rs +++ b/query-engine/core/src/query_document/parser.rs @@ -4,7 +4,7 @@ use bigdecimal::{BigDecimal, ToPrimitive}; use chrono::prelude::*; use core::fmt; use indexmap::{IndexMap, IndexSet}; -use prisma_models::{DefaultKind, PrismaValue, ValueGeneratorFn}; +use query_structure::{DefaultKind, PrismaValue, ValueGeneratorFn}; use std::{borrow::Cow, convert::TryFrom, rc::Rc, str::FromStr}; use user_facing_errors::query_engine::validation::ValidationError; use uuid::Uuid; @@ -414,7 +414,7 @@ impl QueryDocumentParser { argument_path: &Path, s: &str, ) -> QueryParserResult> { - prisma_models::parse_datetime(s).map_err(|err| { + query_structure::parse_datetime(s).map_err(|err| { ValidationError::invalid_argument_value( selection_path.segments(), argument_path.segments(), @@ -426,15 +426,17 @@ impl QueryDocumentParser { } fn parse_bytes(&self, selection_path: &Path, argument_path: &Path, s: String) -> QueryParserResult { - prisma_models::decode_bytes(&s).map(PrismaValue::Bytes).map_err(|err| { - ValidationError::invalid_argument_value( - selection_path.segments(), - argument_path.segments(), - s.to_string(), - "base64 String", - Some(Box::new(err)), - ) - }) + query_structure::decode_bytes(&s) + .map(PrismaValue::Bytes) + .map_err(|err| { + ValidationError::invalid_argument_value( + selection_path.segments(), + argument_path.segments(), + s.to_string(), + "base64 String", + Some(Box::new(err)), + ) + }) } fn parse_decimal( @@ -782,7 +784,7 @@ pub(crate) mod conversions { schema::{InputType, OutputType}, ArgumentValue, }; - use prisma_models::PrismaValue; + use query_structure::PrismaValue; use schema::InnerOutputType; use user_facing_errors::query_engine::validation::{self, InputTypeDescription}; diff --git a/query-engine/core/src/query_document/transformers.rs b/query-engine/core/src/query_document/transformers.rs index 8d5894e9bbc8..20296b7ff04f 100644 --- a/query-engine/core/src/query_document/transformers.rs +++ b/query-engine/core/src/query_document/transformers.rs @@ -7,7 +7,7 @@ use super::*; use bigdecimal::ToPrimitive; use chrono::prelude::*; -use prisma_models::{OrderBy, PrismaValue, ScalarFieldRef}; +use query_structure::{OrderBy, PrismaValue, ScalarFieldRef}; use std::convert::TryInto; use user_facing_errors::query_engine::validation::ValidationError; diff --git a/query-engine/core/src/query_graph/mod.rs b/query-engine/core/src/query_graph/mod.rs index e51dcea97110..6086fa243331 100644 --- a/query-engine/core/src/query_graph/mod.rs +++ b/query-engine/core/src/query_graph/mod.rs @@ -10,7 +10,6 @@ use crate::{ interpreter::ExpressionResult, FilteredQuery, ManyRecordsQuery, Query, QueryGraphBuilderResult, QueryOptions, ReadQuery, }; -use connector::{IntoFilter, QueryArguments}; use guard::*; use itertools::Itertools; use petgraph::{ @@ -18,7 +17,7 @@ use petgraph::{ visit::{EdgeRef as PEdgeRef, NodeIndexable}, *, }; -use prisma_models::{FieldSelection, SelectionResult}; +use query_structure::{FieldSelection, IntoFilter, QueryArguments, SelectionResult}; use std::{collections::HashSet, fmt}; pub type QueryGraphResult = std::result::Result; diff --git a/query-engine/core/src/query_graph_builder/error.rs b/query-engine/core/src/query_graph_builder/error.rs index 6fa1b82001a9..825b312bbbf5 100644 --- a/query-engine/core/src/query_graph_builder/error.rs +++ b/query-engine/core/src/query_graph_builder/error.rs @@ -1,5 +1,5 @@ use crate::QueryGraphError; -use prisma_models::{DomainError, RelationFieldRef}; +use query_structure::{DomainError, RelationFieldRef}; use user_facing_errors::query_engine::validation::ValidationError; #[derive(Debug)] diff --git a/query-engine/core/src/query_graph_builder/extractors/filters/composite.rs b/query-engine/core/src/query_graph_builder/extractors/filters/composite.rs index 9bb26c388894..66bd7c44ebb0 100644 --- a/query-engine/core/src/query_graph_builder/extractors/filters/composite.rs +++ b/query-engine/core/src/query_graph_builder/extractors/filters/composite.rs @@ -1,7 +1,6 @@ use super::extract_filter; use crate::{ParsedInputMap, ParsedInputValue, QueryGraphBuilderError, QueryGraphBuilderResult}; -use connector::{CompositeCompare, Filter}; -use prisma_models::{CompositeFieldRef, PrismaValue}; +use query_structure::{CompositeCompare, CompositeFieldRef, Filter, PrismaValue}; use schema::{constants::filters, ObjectTag}; use std::convert::TryInto; diff --git a/query-engine/core/src/query_graph_builder/extractors/filters/filter_fold.rs b/query-engine/core/src/query_graph_builder/extractors/filters/filter_fold.rs index 46ef17314c3a..3438ecec8477 100644 --- a/query-engine/core/src/query_graph_builder/extractors/filters/filter_fold.rs +++ b/query-engine/core/src/query_graph_builder/extractors/filters/filter_fold.rs @@ -1,4 +1,4 @@ -use connector::Filter; +use query_structure::Filter; pub fn fold_filter(filter: Filter) -> Filter { match filter { diff --git a/query-engine/core/src/query_graph_builder/extractors/filters/mod.rs b/query-engine/core/src/query_graph_builder/extractors/filters/mod.rs index e15aca250668..cb9e4e7f8025 100644 --- a/query-engine/core/src/query_graph_builder/extractors/filters/mod.rs +++ b/query-engine/core/src/query_graph_builder/extractors/filters/mod.rs @@ -9,15 +9,10 @@ use crate::{ query_document::{ParsedInputMap, ParsedInputValue}, QueryGraphBuilderError, QueryGraphBuilderResult, }; -use connector::{ - filter::Filter, CompositeCompare, QueryMode, RelationCompare, ScalarCompare, ScalarCondition, ScalarProjection, -}; use filter_fold::*; use filter_grouping::*; use indexmap::IndexMap; -use prisma_models::{ - prelude::ParentContainer, CompositeFieldRef, Field, Model, PrismaValue, RelationFieldRef, ScalarFieldRef, -}; +use query_structure::{prelude::ParentContainer, *}; use schema::constants::filters; use std::{borrow::Cow, collections::HashMap, convert::TryInto, str::FromStr}; diff --git a/query-engine/core/src/query_graph_builder/extractors/filters/relation.rs b/query-engine/core/src/query_graph_builder/extractors/filters/relation.rs index e3df0144f67b..47ec7ab9d193 100644 --- a/query-engine/core/src/query_graph_builder/extractors/filters/relation.rs +++ b/query-engine/core/src/query_graph_builder/extractors/filters/relation.rs @@ -1,7 +1,6 @@ use super::extract_filter; use crate::{ParsedInputMap, ParsedInputValue, QueryGraphBuilderError, QueryGraphBuilderResult}; -use connector::{Filter, RelationCompare}; -use prisma_models::RelationFieldRef; +use query_structure::*; use schema::constants::filters; use std::convert::TryInto; diff --git a/query-engine/core/src/query_graph_builder/extractors/filters/scalar.rs b/query-engine/core/src/query_graph_builder/extractors/filters/scalar.rs index 0c3a100b7af9..ac84ce06aa21 100644 --- a/query-engine/core/src/query_graph_builder/extractors/filters/scalar.rs +++ b/query-engine/core/src/query_graph_builder/extractors/filters/scalar.rs @@ -1,9 +1,5 @@ use crate::{ParsedInputMap, ParsedInputValue, QueryGraphBuilderError, QueryGraphBuilderResult}; -use connector::{ - ConditionListValue, ConditionValue, Filter, JsonCompare, JsonFilterPath, JsonTargetType, ScalarCompare, - ScalarListCompare, -}; -use prisma_models::{prelude::ParentContainer, Field, PrismaValue, ScalarFieldRef, TypeIdentifier}; +use query_structure::{prelude::ParentContainer, *}; use schema::constants::{aggregations, filters, json_null}; use std::convert::TryInto; diff --git a/query-engine/core/src/query_graph_builder/extractors/query_arguments.rs b/query-engine/core/src/query_graph_builder/extractors/query_arguments.rs index f4c890aa7d00..e42cdaca63f9 100644 --- a/query-engine/core/src/query_graph_builder/extractors/query_arguments.rs +++ b/query-engine/core/src/query_graph_builder/extractors/query_arguments.rs @@ -3,8 +3,7 @@ use crate::{ query_document::{ParsedArgument, ParsedInputMap}, QueryGraphBuilderError, QueryGraphBuilderResult, }; -use connector::QueryArguments; -use prisma_models::prelude::*; +use query_structure::{prelude::*, QueryArguments}; use schema::constants::{aggregations, args, ordering}; use std::convert::TryInto; diff --git a/query-engine/core/src/query_graph_builder/extractors/utils.rs b/query-engine/core/src/query_graph_builder/extractors/utils.rs index d216db2706fb..b05720d4f727 100644 --- a/query-engine/core/src/query_graph_builder/extractors/utils.rs +++ b/query-engine/core/src/query_graph_builder/extractors/utils.rs @@ -1,4 +1,4 @@ -use prisma_models::{Model, ScalarFieldRef}; +use query_structure::{Model, ScalarFieldRef}; /// Attempts to resolve a field name to a compound field. pub fn resolve_compound_field(name: &str, model: &Model) -> Option> { diff --git a/query-engine/core/src/query_graph_builder/read/aggregations/aggregate.rs b/query-engine/core/src/query_graph_builder/read/aggregations/aggregate.rs index 91e5d1169aa3..6a6332dbd868 100644 --- a/query-engine/core/src/query_graph_builder/read/aggregations/aggregate.rs +++ b/query-engine/core/src/query_graph_builder/read/aggregations/aggregate.rs @@ -1,6 +1,6 @@ use super::*; use crate::{query_document::ParsedField, AggregateRecordsQuery}; -use prisma_models::Model; +use query_structure::Model; pub(crate) fn aggregate(field: ParsedField<'_>, model: Model) -> QueryGraphBuilderResult { let name = field.name; diff --git a/query-engine/core/src/query_graph_builder/read/aggregations/group_by.rs b/query-engine/core/src/query_graph_builder/read/aggregations/group_by.rs index 167c35f78bf4..5b821b460003 100644 --- a/query-engine/core/src/query_graph_builder/read/aggregations/group_by.rs +++ b/query-engine/core/src/query_graph_builder/read/aggregations/group_by.rs @@ -1,7 +1,6 @@ use super::*; use crate::{query_document::ParsedField, AggregateRecordsQuery, ArgumentListLookup, ParsedInputValue, ReadQuery}; -use connector::Filter; -use prisma_models::{Model, OrderBy, ScalarFieldRef}; +use query_structure::{Filter, Model, OrderBy, ScalarFieldRef}; use schema::constants::args; use std::convert::TryInto; diff --git a/query-engine/core/src/query_graph_builder/read/aggregations/mod.rs b/query-engine/core/src/query_graph_builder/read/aggregations/mod.rs index 43354d708072..94e8b1bcbdc9 100644 --- a/query-engine/core/src/query_graph_builder/read/aggregations/mod.rs +++ b/query-engine/core/src/query_graph_builder/read/aggregations/mod.rs @@ -8,7 +8,7 @@ use super::*; use crate::FieldPair; use connector::AggregationSelection; use itertools::Itertools; -use prisma_models::{Model, ScalarFieldRef}; +use query_structure::{Model, ScalarFieldRef}; use schema::constants::aggregations::*; /// Resolves the given field as a aggregation query. diff --git a/query-engine/core/src/query_graph_builder/read/first.rs b/query-engine/core/src/query_graph_builder/read/first.rs index c68969b07b1f..84c90016858a 100644 --- a/query-engine/core/src/query_graph_builder/read/first.rs +++ b/query-engine/core/src/query_graph_builder/read/first.rs @@ -1,4 +1,4 @@ -use prisma_models::Model; +use query_structure::Model; use super::*; use crate::ParsedField; diff --git a/query-engine/core/src/query_graph_builder/read/many.rs b/query-engine/core/src/query_graph_builder/read/many.rs index b84b546dcfd0..6c9242330a83 100644 --- a/query-engine/core/src/query_graph_builder/read/many.rs +++ b/query-engine/core/src/query_graph_builder/read/many.rs @@ -1,6 +1,6 @@ use super::*; use crate::{query_document::ParsedField, ManyRecordsQuery, QueryOption, QueryOptions, ReadQuery}; -use prisma_models::Model; +use query_structure::Model; pub(crate) fn find_many(field: ParsedField<'_>, model: Model) -> QueryGraphBuilderResult { find_many_with_options(field, model, QueryOptions::none()) diff --git a/query-engine/core/src/query_graph_builder/read/one.rs b/query-engine/core/src/query_graph_builder/read/one.rs index edb7425bc20d..d71c2535bb2f 100644 --- a/query-engine/core/src/query_graph_builder/read/one.rs +++ b/query-engine/core/src/query_graph_builder/read/one.rs @@ -1,6 +1,6 @@ use super::*; use crate::{query_document::*, QueryOption, QueryOptions, ReadQuery, RecordQuery}; -use prisma_models::Model; +use query_structure::Model; use schema::constants::args; use std::convert::TryInto; diff --git a/query-engine/core/src/query_graph_builder/read/related.rs b/query-engine/core/src/query_graph_builder/read/related.rs index c8beb952e02b..9c73699b0477 100644 --- a/query-engine/core/src/query_graph_builder/read/related.rs +++ b/query-engine/core/src/query_graph_builder/read/related.rs @@ -1,6 +1,6 @@ use super::*; use crate::{query_document::ParsedField, ReadQuery, RelatedRecordsQuery}; -use prisma_models::{Model, RelationFieldRef}; +use query_structure::{Model, RelationFieldRef}; pub(crate) fn find_related( field: ParsedField<'_>, diff --git a/query-engine/core/src/query_graph_builder/read/utils.rs b/query-engine/core/src/query_graph_builder/read/utils.rs index 234a50d6f09f..545393ba3d15 100644 --- a/query-engine/core/src/query_graph_builder/read/utils.rs +++ b/query-engine/core/src/query_graph_builder/read/utils.rs @@ -1,7 +1,7 @@ use super::*; use crate::{ArgumentListLookup, FieldPair, ParsedField, ReadQuery}; use connector::RelAggregationSelection; -use prisma_models::prelude::*; +use query_structure::prelude::*; use schema::constants::{aggregations::*, args}; pub fn collect_selection_order(from: &[FieldPair<'_>]) -> Vec { diff --git a/query-engine/core/src/query_graph_builder/write/connect.rs b/query-engine/core/src/query_graph_builder/write/connect.rs index 7df971de5e6f..03e681477cac 100644 --- a/query-engine/core/src/query_graph_builder/write/connect.rs +++ b/query-engine/core/src/query_graph_builder/write/connect.rs @@ -3,7 +3,7 @@ use crate::{ query_graph::{Node, NodeRef, QueryGraph, QueryGraphDependency}, QueryGraphBuilderError, QueryGraphBuilderResult, }; -use prisma_models::RelationFieldRef; +use query_structure::RelationFieldRef; /// Only for many to many relations. /// diff --git a/query-engine/core/src/query_graph_builder/write/create.rs b/query-engine/core/src/query_graph_builder/write/create.rs index 7707f4818514..59661c6c16b3 100644 --- a/query-engine/core/src/query_graph_builder/write/create.rs +++ b/query-engine/core/src/query_graph_builder/write/create.rs @@ -4,9 +4,8 @@ use crate::{ query_graph::{Node, NodeRef, QueryGraph, QueryGraphDependency}, ArgumentListLookup, ParsedField, ParsedInputList, ParsedInputMap, }; -use connector::IntoFilter; -use prisma_models::Model; use psl::datamodel_connector::ConnectorCapability; +use query_structure::{IntoFilter, Model}; use schema::{constants::args, QuerySchema}; use std::convert::TryInto; use write_args_parser::*; diff --git a/query-engine/core/src/query_graph_builder/write/delete.rs b/query-engine/core/src/query_graph_builder/write/delete.rs index a5bca5af7758..df6a66436022 100644 --- a/query-engine/core/src/query_graph_builder/write/delete.rs +++ b/query-engine/core/src/query_graph_builder/write/delete.rs @@ -4,8 +4,7 @@ use crate::{ query_graph::{Node, QueryGraph, QueryGraphDependency}, ArgumentListLookup, FilteredQuery, ParsedField, }; -use connector::filter::Filter; -use prisma_models::Model; +use query_structure::{Filter, Model}; use schema::{constants::args, QuerySchema}; use std::convert::TryInto; diff --git a/query-engine/core/src/query_graph_builder/write/disconnect.rs b/query-engine/core/src/query_graph_builder/write/disconnect.rs index fea9d6f4f140..e354e9dc5400 100644 --- a/query-engine/core/src/query_graph_builder/write/disconnect.rs +++ b/query-engine/core/src/query_graph_builder/write/disconnect.rs @@ -3,7 +3,7 @@ use crate::{ query_graph::{Node, NodeRef, QueryGraph, QueryGraphDependency}, QueryGraphBuilderError, QueryGraphBuilderResult, }; -use prisma_models::RelationFieldRef; +use query_structure::RelationFieldRef; /// Only for many to many relations. /// diff --git a/query-engine/core/src/query_graph_builder/write/nested/connect_nested.rs b/query-engine/core/src/query_graph_builder/write/nested/connect_nested.rs index 6052778037d0..81038c18a57e 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/connect_nested.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/connect_nested.rs @@ -4,9 +4,8 @@ use crate::{ query_graph::{Node, NodeRef, QueryGraph, QueryGraphDependency}, ParsedInputMap, ParsedInputValue, QueryResult, }; -use connector::{Filter, IntoFilter}; use itertools::Itertools; -use prisma_models::{Model, RelationFieldRef}; +use query_structure::{Filter, IntoFilter, Model, RelationFieldRef}; use std::convert::TryInto; /// Handles nested connect cases. diff --git a/query-engine/core/src/query_graph_builder/write/nested/connect_or_create_nested.rs b/query-engine/core/src/query_graph_builder/write/nested/connect_or_create_nested.rs index bbe63701cbd2..bcaacc1f5811 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/connect_or_create_nested.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/connect_or_create_nested.rs @@ -4,8 +4,7 @@ use crate::{ query_graph::{Flow, Node, NodeRef, QueryGraph, QueryGraphDependency}, Computation, ParsedInputMap, ParsedInputValue, }; -use connector::{Filter, IntoFilter}; -use prisma_models::{Model, RelationFieldRef, SelectionResult}; +use query_structure::{Filter, IntoFilter, Model, RelationFieldRef, SelectionResult}; use schema::constants::args; use std::convert::TryInto; diff --git a/query-engine/core/src/query_graph_builder/write/nested/create_nested.rs b/query-engine/core/src/query_graph_builder/write/nested/create_nested.rs index 80229d018515..d0f649c3ecf6 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/create_nested.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/create_nested.rs @@ -5,8 +5,7 @@ use crate::{ write::write_args_parser::WriteArgsParser, ParsedInputList, ParsedInputValue, }; -use connector::{Filter, IntoFilter}; -use prisma_models::{Model, RelationFieldRef}; +use query_structure::{Filter, IntoFilter, Model, RelationFieldRef}; use schema::constants::args; use std::convert::TryInto; diff --git a/query-engine/core/src/query_graph_builder/write/nested/delete_nested.rs b/query-engine/core/src/query_graph_builder/write/nested/delete_nested.rs index 875902a7ecad..ceed2b578b03 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/delete_nested.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/delete_nested.rs @@ -4,8 +4,8 @@ use crate::{ query_graph::{Node, NodeRef, QueryGraph, QueryGraphDependency}, ParsedInputMap, ParsedInputValue, }; -use connector::{Filter, RecordFilter}; -use prisma_models::{Model, PrismaValue, RelationFieldRef}; +use connector::RecordFilter; +use query_structure::{Filter, Model, PrismaValue, RelationFieldRef}; use std::convert::TryInto; /// Adds a delete (single) record node to the graph and connects it to the parent. diff --git a/query-engine/core/src/query_graph_builder/write/nested/disconnect_nested.rs b/query-engine/core/src/query_graph_builder/write/nested/disconnect_nested.rs index 1b4c208a7855..841b7722eb28 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/disconnect_nested.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/disconnect_nested.rs @@ -3,9 +3,8 @@ use crate::{ query_graph::{Node, NodeRef, QueryGraph, QueryGraphDependency}, ParsedInputMap, ParsedInputValue, Query, WriteQuery, }; -use connector::{Filter, RelationCompare}; use itertools::Itertools; -use prisma_models::{Model, PrismaValue, RelationFieldRef, SelectionResult}; +use query_structure::{Filter, Model, PrismaValue, RelationCompare, RelationFieldRef, SelectionResult}; use std::convert::TryInto; /// Handles nested disconnect cases. diff --git a/query-engine/core/src/query_graph_builder/write/nested/mod.rs b/query-engine/core/src/query_graph_builder/write/nested/mod.rs index 7f8049c76433..5d0ad21a4c7e 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/mod.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/mod.rs @@ -17,7 +17,7 @@ use connect_or_create_nested::*; use create_nested::*; use delete_nested::*; use disconnect_nested::*; -use prisma_models::RelationFieldRef; +use query_structure::RelationFieldRef; use schema::{constants::operations, QuerySchema}; use set_nested::*; use update_nested::*; diff --git a/query-engine/core/src/query_graph_builder/write/nested/set_nested.rs b/query-engine/core/src/query_graph_builder/write/nested/set_nested.rs index 0f71bc2816aa..67fa227aa2c6 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/set_nested.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/set_nested.rs @@ -1,8 +1,7 @@ use super::*; use crate::{query_ast::*, query_graph::*, ParsedInputValue}; -use connector::Filter; use itertools::Itertools; -use prisma_models::{Model, RelationFieldRef, SelectionResult}; +use query_structure::{Filter, Model, RelationFieldRef, SelectionResult}; use std::convert::TryInto; /// Only for x-to-many relations. diff --git a/query-engine/core/src/query_graph_builder/write/nested/update_nested.rs b/query-engine/core/src/query_graph_builder/write/nested/update_nested.rs index bca227dd3a08..78bf69af2f79 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/update_nested.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/update_nested.rs @@ -4,8 +4,7 @@ use crate::{ query_graph::{Node, NodeRef, QueryGraph, QueryGraphDependency}, ParsedInputValue, }; -use connector::Filter; -use prisma_models::{Model, RelationFieldRef}; +use query_structure::{Filter, Model, RelationFieldRef}; use schema::constants::args; use std::convert::TryInto; diff --git a/query-engine/core/src/query_graph_builder/write/nested/upsert_nested.rs b/query-engine/core/src/query_graph_builder/write/nested/upsert_nested.rs index 006c0b516099..0e72e1fa141c 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/upsert_nested.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/upsert_nested.rs @@ -5,8 +5,7 @@ use crate::{ query_graph::{Flow, Node, NodeRef, QueryGraph, QueryGraphDependency}, ParsedInputMap, ParsedInputValue, }; -use connector::Filter; -use prisma_models::RelationFieldRef; +use query_structure::{Filter, RelationFieldRef}; use schema::constants::args; use std::convert::TryInto; diff --git a/query-engine/core/src/query_graph_builder/write/raw.rs b/query-engine/core/src/query_graph_builder/write/raw.rs index 80835b9691cb..af44770aa3f0 100644 --- a/query-engine/core/src/query_graph_builder/write/raw.rs +++ b/query-engine/core/src/query_graph_builder/write/raw.rs @@ -1,6 +1,6 @@ use super::*; use crate::{query_ast::*, query_graph::QueryGraph, ParsedField}; -use prisma_models::{Model, PrismaValue}; +use query_structure::{Model, PrismaValue}; use std::{collections::HashMap, convert::TryInto}; pub(crate) fn execute_raw(graph: &mut QueryGraph, field: ParsedField<'_>) -> QueryGraphBuilderResult<()> { diff --git a/query-engine/core/src/query_graph_builder/write/update.rs b/query-engine/core/src/query_graph_builder/write/update.rs index 847efc38410c..001e2b48a96d 100644 --- a/query-engine/core/src/query_graph_builder/write/update.rs +++ b/query-engine/core/src/query_graph_builder/write/update.rs @@ -5,9 +5,8 @@ use crate::{ query_graph::{Node, NodeRef, QueryGraph, QueryGraphDependency}, ArgumentListLookup, ParsedField, ParsedInputMap, }; -use connector::{Filter, IntoFilter}; -use prisma_models::Model; use psl::datamodel_connector::ConnectorCapability; +use query_structure::{Filter, IntoFilter, Model}; use schema::{constants::args, QuerySchema}; use std::convert::TryInto; diff --git a/query-engine/core/src/query_graph_builder/write/upsert.rs b/query-engine/core/src/query_graph_builder/write/upsert.rs index 0a01e43e73c0..92fcd6d12efd 100644 --- a/query-engine/core/src/query_graph_builder/write/upsert.rs +++ b/query-engine/core/src/query_graph_builder/write/upsert.rs @@ -4,8 +4,7 @@ use crate::{ query_graph::{Flow, Node, QueryGraph, QueryGraphDependency}, ParsedField, ParsedInputMap, ParsedInputValue, ParsedObject, }; -use connector::IntoFilter; -use prisma_models::Model; +use query_structure::{IntoFilter, Model}; use schema::QuerySchema; /// Handles a top-level upsert diff --git a/query-engine/core/src/query_graph_builder/write/utils.rs b/query-engine/core/src/query_graph_builder/write/utils.rs index 113e09e39230..2f2e736aedaf 100644 --- a/query-engine/core/src/query_graph_builder/write/utils.rs +++ b/query-engine/core/src/query_graph_builder/write/utils.rs @@ -3,10 +3,10 @@ use crate::{ query_graph::{Flow, Node, NodeRef, QueryGraph, QueryGraphDependency}, Computation, ParsedInputValue, QueryGraphBuilderError, QueryGraphBuilderResult, }; -use connector::{DatasourceFieldName, Filter, RecordFilter, WriteArgs, WriteOperation}; +use connector::{DatasourceFieldName, RecordFilter, WriteArgs, WriteOperation}; use indexmap::IndexMap; -use prisma_models::{FieldSelection, Model, PrismaValue, RelationFieldRef, SelectionResult}; use psl::parser_database::ReferentialAction; +use query_structure::{FieldSelection, Filter, Model, PrismaValue, RelationFieldRef, SelectionResult}; use schema::QuerySchema; /// Coerces single values (`ParsedInputValue::Single` and `ParsedInputValue::Map`) into a vector. diff --git a/query-engine/core/src/query_graph_builder/write/write_args_parser.rs b/query-engine/core/src/query_graph_builder/write/write_args_parser.rs index c5473065ac60..255247e4cee9 100644 --- a/query-engine/core/src/query_graph_builder/write/write_args_parser.rs +++ b/query-engine/core/src/query_graph_builder/write/write_args_parser.rs @@ -1,7 +1,7 @@ use super::*; use crate::query_document::{ParsedInputMap, ParsedInputValue}; use connector::{DatasourceFieldName, WriteArgs, WriteOperation}; -use prisma_models::{CompositeFieldRef, Field, Model, PrismaValue, RelationFieldRef, ScalarFieldRef, TypeIdentifier}; +use query_structure::{CompositeFieldRef, Field, Model, PrismaValue, RelationFieldRef, ScalarFieldRef, TypeIdentifier}; use schema::constants::{args, json_null, operations}; use std::{borrow::Cow, convert::TryInto}; diff --git a/query-engine/core/src/response_ir/internal.rs b/query-engine/core/src/response_ir/internal.rs index bd1fc60dbd7b..fd77628e353c 100644 --- a/query-engine/core/src/response_ir/internal.rs +++ b/query-engine/core/src/response_ir/internal.rs @@ -8,7 +8,7 @@ use crate::{ use connector::{AggregationResult, RelAggregationResult, RelAggregationRow}; use indexmap::IndexMap; use itertools::Itertools; -use prisma_models::{CompositeFieldRef, Field, PrismaValue, SelectionResult}; +use query_structure::{CompositeFieldRef, Field, PrismaValue, SelectionResult}; use schema::{ constants::{aggregations::*, output_fields::*}, *, diff --git a/query-engine/core/src/response_ir/ir_serializer.rs b/query-engine/core/src/response_ir/ir_serializer.rs index 4e69b5534e17..d8efa710bff8 100644 --- a/query-engine/core/src/response_ir/ir_serializer.rs +++ b/query-engine/core/src/response_ir/ir_serializer.rs @@ -1,6 +1,6 @@ use super::{internal::serialize_internal, response::*, *}; use crate::{CoreError, ExpressionResult, QueryResult}; -use prisma_models::PrismaValue; +use query_structure::PrismaValue; use schema::{OutputField, QuerySchema}; #[derive(Debug)] diff --git a/query-engine/core/src/response_ir/mod.rs b/query-engine/core/src/response_ir/mod.rs index e53422ff8962..e9a4eeb0c9a4 100644 --- a/query-engine/core/src/response_ir/mod.rs +++ b/query-engine/core/src/response_ir/mod.rs @@ -18,7 +18,7 @@ pub(crate) use ir_serializer::*; use crate::ArgumentValue; use indexmap::IndexMap; -use prisma_models::PrismaValue; +use query_structure::PrismaValue; use serde::ser::{Serialize, SerializeMap, SerializeSeq, Serializer}; use std::{collections::HashMap, fmt, sync::Arc}; diff --git a/query-engine/core/src/result_ast/mod.rs b/query-engine/core/src/result_ast/mod.rs index 968daa8316f0..a54f333c90a2 100644 --- a/query-engine/core/src/result_ast/mod.rs +++ b/query-engine/core/src/result_ast/mod.rs @@ -1,5 +1,5 @@ use connector::{AggregationRow, RelAggregationRow}; -use prisma_models::{ManyRecords, Model, SelectionResult}; +use query_structure::{ManyRecords, Model, SelectionResult}; #[derive(Debug, Clone)] pub(crate) enum QueryResult { diff --git a/query-engine/dmmf/Cargo.toml b/query-engine/dmmf/Cargo.toml index f4a8bfb6e6f2..367c87be5a60 100644 --- a/query-engine/dmmf/Cargo.toml +++ b/query-engine/dmmf/Cargo.toml @@ -10,7 +10,7 @@ serde.workspace = true serde_json.workspace = true schema = { path = "../schema" } indexmap = { version = "1.7", features = ["serde-1"] } -prisma-models = { path = "../prisma-models" } +query-structure = { path = "../query-structure", features = ["default_generators"] } [dev-dependencies] expect-test = "1.2.2" diff --git a/query-engine/dmmf/src/ast_builders/datamodel_ast_builder.rs b/query-engine/dmmf/src/ast_builders/datamodel_ast_builder.rs index 8d078719d4fa..c367695150f6 100644 --- a/query-engine/dmmf/src/ast_builders/datamodel_ast_builder.rs +++ b/query-engine/dmmf/src/ast_builders/datamodel_ast_builder.rs @@ -2,11 +2,11 @@ use crate::serialization_ast::datamodel_ast::{ Datamodel, Enum, EnumValue, Field, Function, Model, PrimaryKey, UniqueIndex, }; use bigdecimal::ToPrimitive; -use prisma_models::{dml_default_kind, encode_bytes, DefaultKind, FieldArity, PrismaValue}; use psl::{ parser_database::{walkers, ScalarFieldType}, schema_ast::ast::WithDocumentation, }; +use query_structure::{dml_default_kind, encode_bytes, DefaultKind, FieldArity, PrismaValue}; pub(crate) fn schema_to_dmmf(schema: &psl::ValidatedSchema) -> Datamodel { let mut datamodel = Datamodel { diff --git a/query-engine/driver-adapters/.gitignore b/query-engine/driver-adapters/.gitignore new file mode 100644 index 000000000000..dab5c8905550 --- /dev/null +++ b/query-engine/driver-adapters/.gitignore @@ -0,0 +1,3 @@ +node_modules +adapter-* +driver-adapter-utils diff --git a/query-engine/driver-adapters/connector-test-kit-executor/.gitignore b/query-engine/driver-adapters/connector-test-kit-executor/.gitignore new file mode 100644 index 000000000000..37b61ff565c7 --- /dev/null +++ b/query-engine/driver-adapters/connector-test-kit-executor/.gitignore @@ -0,0 +1,3 @@ +node_modules +pnpm-debug.log +dist/ diff --git a/query-engine/driver-adapters/connector-test-kit-executor/package.json b/query-engine/driver-adapters/connector-test-kit-executor/package.json new file mode 100644 index 000000000000..4648887f5063 --- /dev/null +++ b/query-engine/driver-adapters/connector-test-kit-executor/package.json @@ -0,0 +1,38 @@ +{ + "engines": { + "node": ">=16.13", + "pnpm": ">=8.6.6 <9" + }, + "name": "connector-test-kit-executor", + "version": "0.0.1", + "description": "", + "main": "dist/index.mjs", + "module": "dist/index.mjs", + "private": true, + "scripts": { + "build": "tsup ./src/index.ts --format esm --dts" + }, + "keywords": [], + "author": "", + "sideEffects": false, + "license": "Apache-2.0", + "dependencies": { + "@libsql/client": "0.3.5", + "@neondatabase/serverless": "0.6.0", + "@planetscale/database": "1.11.0", + "@prisma/adapter-libsql": "workspace:*", + "@prisma/adapter-neon": "workspace:*", + "@prisma/adapter-pg": "workspace:*", + "@prisma/adapter-planetscale": "workspace:*", + "@prisma/driver-adapter-utils": "workspace:*", + "@types/pg": "8.10.2", + "pg": "8.11.3", + "undici": "5.26.5", + "ws": "8.14.2" + }, + "devDependencies": { + "@types/node": "20.5.1", + "tsup": "7.2.0", + "typescript": "5.2.2" + } +} \ No newline at end of file diff --git a/query-engine/driver-adapters/js/pnpm-lock.yaml b/query-engine/driver-adapters/connector-test-kit-executor/pnpm-lock.yaml similarity index 79% rename from query-engine/driver-adapters/js/pnpm-lock.yaml rename to query-engine/driver-adapters/connector-test-kit-executor/pnpm-lock.yaml index efa3787712e4..d4f9fa09277d 100644 --- a/query-engine/driver-adapters/js/pnpm-lock.yaml +++ b/query-engine/driver-adapters/connector-test-kit-executor/pnpm-lock.yaml @@ -4,185 +4,76 @@ settings: autoInstallPeers: true excludeLinksFromLockfile: false -importers: - - .: - devDependencies: - '@types/node': - specifier: ^20.5.1 - version: 20.5.1 - tsup: - specifier: ^7.2.0 - version: 7.2.0(typescript@5.1.6) - tsx: - specifier: ^3.12.7 - version: 3.12.7 - typescript: - specifier: ^5.1.6 - version: 5.1.6 - - adapter-libsql: - dependencies: - '@prisma/driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils - async-mutex: - specifier: 0.4.0 - version: 0.4.0 - devDependencies: - '@libsql/client': - specifier: 0.3.5 - version: 0.3.5 - - adapter-neon: - dependencies: - '@prisma/driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils - devDependencies: - '@neondatabase/serverless': - specifier: ^0.6.0 - version: 0.6.0 - - adapter-pg: - dependencies: - '@prisma/driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils - devDependencies: - '@types/pg': - specifier: ^8.10.2 - version: 8.10.2 - pg: - specifier: ^8.11.3 - version: 8.11.3 - - adapter-planetscale: - dependencies: - '@prisma/driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils - devDependencies: - '@planetscale/database': - specifier: ^1.11.0 - version: 1.11.0 - - connector-test-kit-executor: - dependencies: - '@libsql/client': - specifier: 0.3.5 - version: 0.3.5 - '@neondatabase/serverless': - specifier: ^0.6.0 - version: 0.6.0 - '@prisma/adapter-libsql': - specifier: workspace:* - version: link:../adapter-libsql - '@prisma/adapter-neon': - specifier: workspace:* - version: link:../adapter-neon - '@prisma/adapter-pg': - specifier: workspace:* - version: link:../adapter-pg - '@prisma/driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils - '@types/pg': - specifier: ^8.10.2 - version: 8.10.2 - pg: - specifier: ^8.11.3 - version: 8.11.3 - undici: - specifier: ^5.23.0 - version: 5.23.0 - - driver-adapter-utils: - dependencies: - debug: - specifier: ^4.3.4 - version: 4.3.4 - devDependencies: - '@types/debug': - specifier: ^4.1.8 - version: 4.1.8 - - smoke-test-js: - dependencies: - '@libsql/client': - specifier: 0.3.5 - version: 0.3.5 - '@neondatabase/serverless': - specifier: ^0.6.0 - version: 0.6.0 - '@planetscale/database': - specifier: ^1.11.0 - version: 1.11.0 - '@prisma/adapter-libsql': - specifier: workspace:* - version: link:../adapter-libsql - '@prisma/adapter-neon': - specifier: workspace:* - version: link:../adapter-neon - '@prisma/adapter-pg': - specifier: workspace:* - version: link:../adapter-pg - '@prisma/adapter-planetscale': - specifier: workspace:* - version: link:../adapter-planetscale - '@prisma/client': - specifier: 5.4.1 - version: 5.4.1(prisma@5.4.1) - '@prisma/driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils - pg: - specifier: ^8.11.3 - version: 8.11.3 - superjson: - specifier: ^1.13.1 - version: 1.13.1 - undici: - specifier: ^5.23.0 - version: 5.23.0 - devDependencies: - '@types/node': - specifier: ^20.5.1 - version: 20.5.1 - '@types/pg': - specifier: ^8.10.2 - version: 8.10.2 - cross-env: - specifier: ^7.0.3 - version: 7.0.3 - prisma: - specifier: 5.4.1 - version: 5.4.1 - tsx: - specifier: ^3.12.7 - version: 3.12.7 +dependencies: + '@libsql/client': + specifier: 0.3.5 + version: 0.3.5 + '@neondatabase/serverless': + specifier: ^0.6.0 + version: 0.6.0 + '@planetscale/database': + specifier: 1.11.0 + version: 1.11.0 + '@prisma/adapter-libsql': + specifier: ../adapter-libsql + version: link:../adapter-libsql + '@prisma/adapter-neon': + specifier: ../adapter-neon + version: link:../adapter-neon + '@prisma/adapter-pg': + specifier: ../adapter-pg + version: link:../adapter-pg + '@prisma/adapter-planetscale': + specifier: ../adapter-planetscale + version: link:../adapter-planetscale + '@prisma/driver-adapter-utils': + specifier: ../driver-adapter-utils + version: link:../driver-adapter-utils + '@types/pg': + specifier: ^8.10.2 + version: 8.10.2 + pg: + specifier: ^8.11.3 + version: 8.11.3 + undici: + specifier: ^5.26.5 + version: 5.26.5 + +devDependencies: + '@types/node': + specifier: ^20.5.1 + version: 20.5.1 + tsup: + specifier: ^7.2.0 + version: 7.2.0(typescript@5.1.6) + tsx: + specifier: ^3.12.7 + version: 3.12.7 + typescript: + specifier: ^5.1.6 + version: 5.1.6 packages: - /@esbuild-kit/cjs-loader@2.4.2: - resolution: {integrity: sha512-BDXFbYOJzT/NBEtp71cvsrGPwGAMGRB/349rwKuoxNSiKjPraNNnlK6MIIabViCjqZugu6j+xeMDlEkWdHHJSg==} + /@esbuild-kit/cjs-loader@2.4.4: + resolution: {integrity: sha512-NfsJX4PdzhwSkfJukczyUiZGc7zNNWZcEAyqeISpDnn0PTfzMJR1aR8xAIPskBejIxBJbIgCCMzbaYa9SXepIg==} dependencies: - '@esbuild-kit/core-utils': 3.2.2 - get-tsconfig: 4.7.0 + '@esbuild-kit/core-utils': 3.3.2 + get-tsconfig: 4.7.2 dev: true - /@esbuild-kit/core-utils@3.2.2: - resolution: {integrity: sha512-Ub6LaRaAgF80dTSzUdXpFLM1pVDdmEVB9qb5iAzSpyDlX/mfJTFGOnZ516O05p5uWWteNviMKi4PAyEuRxI5gA==} + /@esbuild-kit/core-utils@3.3.2: + resolution: {integrity: sha512-sPRAnw9CdSsRmEtnsl2WXWdyquogVpB3yZ3dgwJfe8zrOzTsV7cJvmwrKVa+0ma5BoiGJ+BoqkMvawbayKUsqQ==} dependencies: esbuild: 0.18.20 source-map-support: 0.5.21 dev: true - /@esbuild-kit/esm-loader@2.5.5: - resolution: {integrity: sha512-Qwfvj/qoPbClxCRNuac1Du01r9gvNOT+pMYtJDapfB1eoGN1YlJ1BixLyL9WVENRx5RXgNLdfYdx/CuswlGhMw==} + /@esbuild-kit/esm-loader@2.6.5: + resolution: {integrity: sha512-FxEMIkJKnodyA1OaCUoEvbYRkoZlLZ4d/eXFu9Fh8CbBBgP5EmZxrfTRyN0qpXZ4vOvqnE5YdRdcrmUUXuU+dA==} dependencies: - '@esbuild-kit/core-utils': 3.2.2 - get-tsconfig: 4.7.0 + '@esbuild-kit/core-utils': 3.3.2 + get-tsconfig: 4.7.2 dev: true /@esbuild/android-arm64@0.18.20: @@ -383,13 +274,18 @@ packages: dev: true optional: true + /@fastify/busboy@2.0.0: + resolution: {integrity: sha512-JUFJad5lv7jxj926GPgymrWQxxjPYuJNiNjNMzqT+HiuP6Vl3dk5xzG+8sTX96np0ZAluvaMzPsjhHZ5rNuNQQ==} + engines: {node: '>=14'} + dev: false + /@jridgewell/gen-mapping@0.3.3: resolution: {integrity: sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ==} engines: {node: '>=6.0.0'} dependencies: '@jridgewell/set-array': 1.1.2 '@jridgewell/sourcemap-codec': 1.4.15 - '@jridgewell/trace-mapping': 0.3.19 + '@jridgewell/trace-mapping': 0.3.20 dev: true /@jridgewell/resolve-uri@3.1.1: @@ -406,8 +302,8 @@ packages: resolution: {integrity: sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==} dev: true - /@jridgewell/trace-mapping@0.3.19: - resolution: {integrity: sha512-kf37QtfW+Hwx/buWGMPcR60iF9ziHa6r/CZJIHbmcm4+0qrXiVdxegAH0F6yddEVQ7zdkjcGCgCzUu+BcbhQxw==} + /@jridgewell/trace-mapping@0.3.20: + resolution: {integrity: sha512-R8LcPeWZol2zR8mmH3JeKQ6QRCFb7XgUhV9ZlGhHLGyg4wpPiPZNQOOWhFZhxKw8u//yTbNGI42Bx/3paXEQ+Q==} dependencies: '@jridgewell/resolve-uri': 3.1.1 '@jridgewell/sourcemap-codec': 1.4.15 @@ -418,24 +314,27 @@ packages: dependencies: '@libsql/hrana-client': 0.5.5 js-base64: 3.7.5 - libsql: 0.1.23 + libsql: 0.1.34 transitivePeerDependencies: - bufferutil - encoding - utf-8-validate + dev: false - /@libsql/darwin-arm64@0.1.23: - resolution: {integrity: sha512-+V9aoOrZ47iYbY5NrcS0F2bDOCH407QI0wxAtss0CLOcFxlz/T6Nw0ryLK31GabklJQAmOXIyqkumLfz5HT64w==} + /@libsql/darwin-arm64@0.1.34: + resolution: {integrity: sha512-Wv8jvkj/fUAO8DF3A4HaddCMldUUpKcg/WW1sY95FNsSHOxktyxqU80jAp/tCuZ85GQIJozvgSr51/ARIC0gsw==} cpu: [arm64] os: [darwin] requiresBuild: true + dev: false optional: true - /@libsql/darwin-x64@0.1.23: - resolution: {integrity: sha512-toHo7s0HiMl4VCIfjhGXDe9bGWWo78eP8fxIbwU6RlaLO6MNV9fjHY/GjTWccWOwyxcT+q6X/kUc957HnoW3bg==} + /@libsql/darwin-x64@0.1.34: + resolution: {integrity: sha512-2NQXD9nUzC08hg7FdcZLq5uTEwGz1KbD7YvUzQb/psO1lO/E/p83wl1es1082+Pp0z5pSPDWQeRTuccD41L+3w==} cpu: [x64] os: [darwin] requiresBuild: true + dev: false optional: true /@libsql/hrana-client@0.5.5: @@ -449,52 +348,76 @@ packages: - bufferutil - encoding - utf-8-validate + dev: false /@libsql/isomorphic-fetch@0.1.10: resolution: {integrity: sha512-dH0lMk50gKSvEKD78xWMu60SY1sjp1sY//iFLO0XMmBwfVfG136P9KOk06R4maBdlb8KMXOzJ1D28FR5ZKnHTA==} dependencies: - '@types/node-fetch': 2.6.6 + '@types/node-fetch': 2.6.7 node-fetch: 2.7.0 transitivePeerDependencies: - encoding + dev: false /@libsql/isomorphic-ws@0.1.5: resolution: {integrity: sha512-DtLWIH29onUYR00i0GlQ3UdcTRC6EP4u9w/h9LxpUZJWRMARk6dQwZ6Jkd+QdwVpuAOrdxt18v0K2uIYR3fwFg==} dependencies: - '@types/ws': 8.5.5 + '@types/ws': 8.5.8 ws: 8.14.2 transitivePeerDependencies: - bufferutil - utf-8-validate + dev: false + + /@libsql/linux-arm64-gnu@0.1.34: + resolution: {integrity: sha512-r3dY1FDYZ7eX5HX7HyAoYSqK5FPugj5NSB5Bt/nz+ygBWdXASgSKxkE/RqjJIM59vXwv300iJX9qhR5fXv8sTw==} + cpu: [arm64] + os: [linux] + requiresBuild: true + dev: false + optional: true - /@libsql/linux-x64-gnu@0.1.23: - resolution: {integrity: sha512-U11LdjayakOj0lQCHDYkTgUfe4Q+7AjZZh8MzgEDF/9l0bmKNI3eFLWA3JD2Xm98yz65lUx95om0WKOKu5VW/w==} + /@libsql/linux-arm64-musl@0.1.34: + resolution: {integrity: sha512-9AE/eNb9eQRcNsLxqtpLJxVEoIMmItrdwqJDImPJtOp10rhp4U0x/9RGKerl9Mg3ObVj676pyhAR2KzyudrOfQ==} + cpu: [arm64] + os: [linux] + requiresBuild: true + dev: false + optional: true + + /@libsql/linux-x64-gnu@0.1.34: + resolution: {integrity: sha512-o8toY1Txstjt13fBhZbFe8sNAW6OaS6qVcp1Bd6bHkCLSBLZ6pjJmwzQN8rFv9QFBPAnaKP3lI4vaOXXw7huTA==} cpu: [x64] os: [linux] requiresBuild: true + dev: false optional: true - /@libsql/linux-x64-musl@0.1.23: - resolution: {integrity: sha512-8UcCK2sPVzcafHsEmcU5IDp/NxjD6F6JFS5giijsMX5iGgxYQiiwTUMOmSxW0AWBeT4VY5U7G6rG5PC8JSFtfg==} + /@libsql/linux-x64-musl@0.1.34: + resolution: {integrity: sha512-EldEmcAxxNPSCjJ73oFxg81PDDIpDbPqK/QOrhmmGYLvYwrnQtVRUIbARf80JQvcy6bCxOO/Q9dh6wGhnyHyYA==} cpu: [x64] os: [linux] requiresBuild: true + dev: false optional: true - /@libsql/win32-x64-msvc@0.1.23: - resolution: {integrity: sha512-HAugD66jTmRRRGNMLKRiaFeMOC3mgUsAiuO6NRdRz3nM6saf9e5QqN/Ppuu9yqHHcZfv7VhQ9UGlAvzVK64Itg==} + /@libsql/win32-x64-msvc@0.1.34: + resolution: {integrity: sha512-jnv0qfVMnrVv00r+wUOe6DHrHuao9y1w1lN543cV2J1JdQNJT/eSZzhyZFSlS3T2ZUvXfZfZ5GeL8U18IAID6w==} cpu: [x64] os: [win32] requiresBuild: true + dev: false optional: true /@neon-rs/load@0.0.4: resolution: {integrity: sha512-kTPhdZyTQxB+2wpiRcFWrDcejc4JI6tkPuS7UZCG4l6Zvc5kU/gGQ/ozvHTh1XR5tS+UlfAfGuPajjzQjCiHCw==} + dev: false /@neondatabase/serverless@0.6.0: resolution: {integrity: sha512-qXxBRYN0m2v8kVQBfMxbzNGn2xFAhTXFibzQlE++NfJ56Shz3m7+MyBBtXDlEH+3Wfa6lToDXf1MElocY4sJ3w==} dependencies: '@types/pg': 8.6.6 + dev: false /@nodelib/fs.scandir@2.1.5: resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} @@ -520,73 +443,39 @@ packages: /@planetscale/database@1.11.0: resolution: {integrity: sha512-aWbU+D/IRHoDE9975y+Q4c+EwwAWxCPwFId+N1AhQVFXzbeJMkj6KN2iQtoi03elcLMRdfT+V3i9Z4WRw+/oIA==} engines: {node: '>=16'} - - /@prisma/client@5.4.1(prisma@5.4.1): - resolution: {integrity: sha512-xyD0DJ3gRNfLbPsC+YfMBBuLJtZKQfy1OD2qU/PZg+HKrr7SO+09174LMeTlWP0YF2wca9LxtVd4HnAiB5ketQ==} - engines: {node: '>=16.13'} - requiresBuild: true - peerDependencies: - prisma: '*' - peerDependenciesMeta: - prisma: - optional: true - dependencies: - '@prisma/engines-version': 5.4.1-1.2f302df92bd8945e20ad4595a73def5b96afa54f - prisma: 5.4.1 - dev: false - - /@prisma/engines-version@5.4.1-1.2f302df92bd8945e20ad4595a73def5b96afa54f: - resolution: {integrity: sha512-+nUQM/y8C+1GG5Ioeqcu6itFslCfxvQSAUVSMC9XM2G2Fcq0F4Afnp6m0pXF6X6iUBWen7jZBPmM9Qlq4Nr3/A==} dev: false - /@prisma/engines@5.4.1: - resolution: {integrity: sha512-vJTdY4la/5V3N7SFvWRmSMUh4mIQnyb/MNoDjzVbh9iLmEC+uEykj/1GPviVsorvfz7DbYSQC4RiwmlEpTEvGA==} - requiresBuild: true - - /@types/debug@4.1.8: - resolution: {integrity: sha512-/vPO1EPOs306Cvhwv7KfVfYvOJqA/S/AXjaHQiJboCZzcNDb+TIJFN9/2C9DZ//ijSKWioNyUxD792QmDJ+HKQ==} + /@types/node-fetch@2.6.7: + resolution: {integrity: sha512-lX17GZVpJ/fuCjguZ5b3TjEbSENxmEk1B2z02yoXSK9WMEWRivhdSY73wWMn6bpcCDAOh6qAdktpKHIlkDk2lg==} dependencies: - '@types/ms': 0.7.31 - dev: true - - /@types/ms@0.7.31: - resolution: {integrity: sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA==} - dev: true - - /@types/node-fetch@2.6.6: - resolution: {integrity: sha512-95X8guJYhfqiuVVhRFxVQcf4hW/2bCuoPwDasMf/531STFoNoWTT7YDnWdXHEZKqAGUigmpG31r2FE70LwnzJw==} - dependencies: - '@types/node': 20.6.5 + '@types/node': 20.5.1 form-data: 4.0.0 + dev: false /@types/node@20.5.1: resolution: {integrity: sha512-4tT2UrL5LBqDwoed9wZ6N3umC4Yhz3W3FloMmiiG4JwmUJWpie0c7lcnUNd4gtMKuDEO4wRVS8B6Xa0uMRsMKg==} - dev: true - - /@types/node@20.5.9: - resolution: {integrity: sha512-PcGNd//40kHAS3sTlzKB9C9XL4K0sTup8nbG5lC14kzEteTNuAFh9u5nA0o5TWnSG2r/JNPRXFVcHJIIeRlmqQ==} - - /@types/node@20.6.5: - resolution: {integrity: sha512-2qGq5LAOTh9izcc0+F+dToFigBWiK1phKPt7rNhOqJSr35y8rlIBjDwGtFSgAI6MGIhjwOVNSQZVdJsZJ2uR1w==} /@types/pg@8.10.2: resolution: {integrity: sha512-MKFs9P6nJ+LAeHLU3V0cODEOgyThJ3OAnmOlsZsxux6sfQs3HRXR5bBn7xG5DjckEFhTAxsXi7k7cd0pCMxpJw==} dependencies: - '@types/node': 20.5.9 + '@types/node': 20.5.1 pg-protocol: 1.6.0 pg-types: 4.0.1 + dev: false /@types/pg@8.6.6: resolution: {integrity: sha512-O2xNmXebtwVekJDD+02udOncjVcMZQuTEQEMpKJ0ZRf5E7/9JJX3izhKUcUifBkyKpljyUM6BTgy2trmviKlpw==} dependencies: - '@types/node': 20.5.9 + '@types/node': 20.5.1 pg-protocol: 1.6.0 pg-types: 2.2.0 + dev: false - /@types/ws@8.5.5: - resolution: {integrity: sha512-lwhs8hktwxSjf9UaZ9tG5M03PGogvFaH8gUgLNbN9HKIg0dvv6q+gkSuJ8HN4/VbyxkuLzCjlN7GquQ0gUJfIg==} + /@types/ws@8.5.8: + resolution: {integrity: sha512-flUksGIQCnJd6sZ1l5dqCEG/ksaoAg/eUwiLAGTJQcfgvZJKF++Ta4bJA6A5aPSJmsr+xlseHn4KLgVlNnvPTg==} dependencies: - '@types/node': 20.6.5 + '@types/node': 20.5.1 + dev: false /any-promise@1.3.0: resolution: {integrity: sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==} @@ -605,14 +494,9 @@ packages: engines: {node: '>=8'} dev: true - /async-mutex@0.4.0: - resolution: {integrity: sha512-eJFZ1YhRR8UN8eBLoNzcDPcy/jqjsg6I1AP+KvWQX80BqOSW1oJPJXDylPUEeMr2ZQvHgnQ//Lp6f3RQ1zI7HA==} - dependencies: - tslib: 2.6.2 - dev: false - /asynckit@0.4.0: resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} + dev: false /balanced-match@1.0.2: resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} @@ -644,9 +528,10 @@ packages: /buffer-writer@2.0.0: resolution: {integrity: sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==} engines: {node: '>=4'} + dev: false - /bundle-require@4.0.1(esbuild@0.18.20): - resolution: {integrity: sha512-9NQkRHlNdNpDBGmLpngF3EFDcwodhMUuLz9PaWYciVcQF9SE4LFjM2DB/xV1Li5JiuDMv7ZUWuC3rGbqR0MAXQ==} + /bundle-require@4.0.2(esbuild@0.18.20): + resolution: {integrity: sha512-jwzPOChofl67PSTW2SGubV9HBQAhhR2i6nskiOThauo9dzwDUgOWQScFVaJkjEfYX+UXiD+LEx8EblQMc2wIag==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} peerDependencies: esbuild: '>=0.17' @@ -655,13 +540,6 @@ packages: load-tsconfig: 0.2.5 dev: true - /busboy@1.6.0: - resolution: {integrity: sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA==} - engines: {node: '>=10.16.0'} - dependencies: - streamsearch: 1.1.0 - dev: false - /cac@6.7.14: resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} engines: {node: '>=8'} @@ -687,6 +565,7 @@ packages: engines: {node: '>= 0.8'} dependencies: delayed-stream: 1.0.0 + dev: false /commander@4.1.1: resolution: {integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==} @@ -697,21 +576,6 @@ packages: resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} dev: true - /copy-anything@3.0.5: - resolution: {integrity: sha512-yCEafptTtb4bk7GLEQoM8KVJpxAfdBJYaXyzQEgQQQgYrZiDp8SJmGKlYza6CYjEDNstAdNdKA3UuoULlEbS6w==} - engines: {node: '>=12.13'} - dependencies: - is-what: 4.1.15 - dev: false - - /cross-env@7.0.3: - resolution: {integrity: sha512-+/HKd6EgcQCJGh2PSjZuUitQBQynKor4wrFbRg4DtAgS1aWO+gU52xpH7M9ScGgXSYmAVS9bIJ8EzuaGw0oNAw==} - engines: {node: '>=10.14', npm: '>=6', yarn: '>=1'} - hasBin: true - dependencies: - cross-spawn: 7.0.3 - dev: true - /cross-spawn@7.0.3: resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==} engines: {node: '>= 8'} @@ -724,6 +588,7 @@ packages: /data-uri-to-buffer@4.0.1: resolution: {integrity: sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==} engines: {node: '>= 12'} + dev: false /debug@4.3.4: resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==} @@ -735,14 +600,17 @@ packages: optional: true dependencies: ms: 2.1.2 + dev: true /delayed-stream@1.0.0: resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} engines: {node: '>=0.4.0'} + dev: false /detect-libc@2.0.2: resolution: {integrity: sha512-UX6sGumvvqSaXgdKGUsgZWqcUyIXZ/vZTrlRT/iobiKhGL0zL4d3osHj3uqllWJK+i+sixDS/3COVEOFbupFyw==} engines: {node: '>=8'} + dev: false /dir-glob@3.0.1: resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} @@ -819,6 +687,7 @@ packages: dependencies: node-domexception: 1.0.0 web-streams-polyfill: 3.2.1 + dev: false /fill-range@7.0.1: resolution: {integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==} @@ -834,12 +703,14 @@ packages: asynckit: 0.4.0 combined-stream: 1.0.8 mime-types: 2.1.35 + dev: false /formdata-polyfill@4.0.10: resolution: {integrity: sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==} engines: {node: '>=12.20.0'} dependencies: fetch-blob: 3.2.0 + dev: false /fs.realpath@1.0.0: resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} @@ -858,8 +729,8 @@ packages: engines: {node: '>=10'} dev: true - /get-tsconfig@4.7.0: - resolution: {integrity: sha512-pmjiZ7xtB8URYm74PlGJozDNyhvsVLUcpBa8DZBG3bWHwaHa9bPiRpiSfovw+fjhwONSCWKRyk+JQHEGZmMrzw==} + /get-tsconfig@4.7.2: + resolution: {integrity: sha512-wuMsz4leaj5hbGgg4IvDU0bqJagpftG5l5cXIAvo8uZrqn0NJqwtfupTN00VnkQJPcIRrxYrm1Ue24btpCha2A==} dependencies: resolve-pkg-maps: 1.0.0 dev: true @@ -944,11 +815,6 @@ packages: engines: {node: '>=8'} dev: true - /is-what@4.1.15: - resolution: {integrity: sha512-uKua1wfy3Yt+YqsD6mTUEa2zSi3G1oPlqTflgaPJ7z63vUGN5pxFpnQfeSLMFnJDEsdvOtkp1rUWkYjB4YfhgA==} - engines: {node: '>=12.13'} - dev: false - /isexe@2.0.0: resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} dev: true @@ -960,20 +826,24 @@ packages: /js-base64@3.7.5: resolution: {integrity: sha512-3MEt5DTINKqfScXKfJFrRbxkrnk2AxPWGBL/ycjz4dK8iqiSJ06UxD8jh8xuh6p10TX4t2+7FsBYVxxQbMg+qA==} + dev: false - /libsql@0.1.23: - resolution: {integrity: sha512-Nf/1B2Glxvcnba4jYFhXcaYmicyBA3RRm0LVwBkTl8UWCIDbX+Ad7c1ecrQwixPLPffWOVxKIqyCNTuUHUkVgA==} + /libsql@0.1.34: + resolution: {integrity: sha512-LGofp7z7gi1Td6vu2GxaA4WyvSPEkuFn0f/ePSti1TsAlBU0LWxdk+bj9D8nqswzxiqe5wpAyTLhVzTIYSyXEA==} cpu: [x64, arm64] os: [darwin, linux, win32] dependencies: '@neon-rs/load': 0.0.4 detect-libc: 2.0.2 optionalDependencies: - '@libsql/darwin-arm64': 0.1.23 - '@libsql/darwin-x64': 0.1.23 - '@libsql/linux-x64-gnu': 0.1.23 - '@libsql/linux-x64-musl': 0.1.23 - '@libsql/win32-x64-msvc': 0.1.23 + '@libsql/darwin-arm64': 0.1.34 + '@libsql/darwin-x64': 0.1.34 + '@libsql/linux-arm64-gnu': 0.1.34 + '@libsql/linux-arm64-musl': 0.1.34 + '@libsql/linux-x64-gnu': 0.1.34 + '@libsql/linux-x64-musl': 0.1.34 + '@libsql/win32-x64-msvc': 0.1.34 + dev: false /lilconfig@2.1.0: resolution: {integrity: sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==} @@ -1013,12 +883,14 @@ packages: /mime-db@1.52.0: resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} engines: {node: '>= 0.6'} + dev: false /mime-types@2.1.35: resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} engines: {node: '>= 0.6'} dependencies: mime-db: 1.52.0 + dev: false /mimic-fn@2.1.0: resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} @@ -1033,6 +905,7 @@ packages: /ms@2.1.2: resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} + dev: true /mz@2.7.0: resolution: {integrity: sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==} @@ -1045,6 +918,7 @@ packages: /node-domexception@1.0.0: resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==} engines: {node: '>=10.5.0'} + dev: false /node-fetch@2.7.0: resolution: {integrity: sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==} @@ -1056,6 +930,7 @@ packages: optional: true dependencies: whatwg-url: 5.0.0 + dev: false /node-fetch@3.3.2: resolution: {integrity: sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==} @@ -1064,6 +939,7 @@ packages: data-uri-to-buffer: 4.0.1 fetch-blob: 3.2.0 formdata-polyfill: 4.0.10 + dev: false /normalize-path@3.0.0: resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} @@ -1084,6 +960,7 @@ packages: /obuf@1.1.2: resolution: {integrity: sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==} + dev: false /once@1.4.0: resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} @@ -1100,6 +977,7 @@ packages: /packet-reader@1.0.0: resolution: {integrity: sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==} + dev: false /path-is-absolute@1.0.1: resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==} @@ -1119,18 +997,22 @@ packages: /pg-cloudflare@1.1.1: resolution: {integrity: sha512-xWPagP/4B6BgFO+EKz3JONXv3YDgvkbVrGw2mTo3D6tVDQRh1e7cqVGvyR3BE+eQgAvx1XhW/iEASj4/jCWl3Q==} requiresBuild: true + dev: false optional: true /pg-connection-string@2.6.2: resolution: {integrity: sha512-ch6OwaeaPYcova4kKZ15sbJ2hKb/VP48ZD2gE7i1J+L4MspCtBMAx8nMgz7bksc7IojCIIWuEhHibSMFH8m8oA==} + dev: false /pg-int8@1.0.1: resolution: {integrity: sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==} engines: {node: '>=4.0.0'} + dev: false /pg-numeric@1.0.2: resolution: {integrity: sha512-BM/Thnrw5jm2kKLE5uJkXqqExRUY/toLHda65XgFTBTFYZyopbKjBe29Ii3RbkvlsMoFwD+tHeGaCjjv0gHlyw==} engines: {node: '>=4'} + dev: false /pg-pool@3.6.1(pg@8.11.3): resolution: {integrity: sha512-jizsIzhkIitxCGfPRzJn1ZdcosIt3pz9Sh3V01fm1vZnbnCMgmGl5wvGGdNN2EL9Rmb0EcFoCkixH4Pu+sP9Og==} @@ -1138,9 +1020,11 @@ packages: pg: '>=8.0' dependencies: pg: 8.11.3 + dev: false /pg-protocol@1.6.0: resolution: {integrity: sha512-M+PDm637OY5WM307051+bsDia5Xej6d9IR4GwJse1qA1DIhiKlksvrneZOYQq42OM+spubpcNYEo2FcKQrDk+Q==} + dev: false /pg-types@2.2.0: resolution: {integrity: sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==} @@ -1151,6 +1035,7 @@ packages: postgres-bytea: 1.0.0 postgres-date: 1.0.7 postgres-interval: 1.2.0 + dev: false /pg-types@4.0.1: resolution: {integrity: sha512-hRCSDuLII9/LE3smys1hRHcu5QGcLs9ggT7I/TCs0IE+2Eesxi9+9RWAAwZ0yaGjxoWICF/YHLOEjydGujoJ+g==} @@ -1163,6 +1048,7 @@ packages: postgres-date: 2.0.1 postgres-interval: 3.0.0 postgres-range: 1.1.3 + dev: false /pg@8.11.3: resolution: {integrity: sha512-+9iuvG8QfaaUrrph+kpF24cXkH1YOOUeArRNYIxq1viYHZagBxrTno7cecY1Fa44tJeZvaoG+Djpkc3JwehN5g==} @@ -1182,11 +1068,13 @@ packages: pgpass: 1.0.5 optionalDependencies: pg-cloudflare: 1.1.1 + dev: false /pgpass@1.0.5: resolution: {integrity: sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==} dependencies: split2: 4.2.0 + dev: false /picomatch@2.3.1: resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} @@ -1211,55 +1099,56 @@ packages: optional: true dependencies: lilconfig: 2.1.0 - yaml: 2.3.2 + yaml: 2.3.3 dev: true /postgres-array@2.0.0: resolution: {integrity: sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==} engines: {node: '>=4'} + dev: false /postgres-array@3.0.2: resolution: {integrity: sha512-6faShkdFugNQCLwucjPcY5ARoW1SlbnrZjmGl0IrrqewpvxvhSLHimCVzqeuULCbG0fQv7Dtk1yDbG3xv7Veog==} engines: {node: '>=12'} + dev: false /postgres-bytea@1.0.0: resolution: {integrity: sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==} engines: {node: '>=0.10.0'} + dev: false /postgres-bytea@3.0.0: resolution: {integrity: sha512-CNd4jim9RFPkObHSjVHlVrxoVQXz7quwNFpz7RY1okNNme49+sVyiTvTRobiLV548Hx/hb1BG+iE7h9493WzFw==} engines: {node: '>= 6'} dependencies: obuf: 1.1.2 + dev: false /postgres-date@1.0.7: resolution: {integrity: sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==} engines: {node: '>=0.10.0'} + dev: false /postgres-date@2.0.1: resolution: {integrity: sha512-YtMKdsDt5Ojv1wQRvUhnyDJNSr2dGIC96mQVKz7xufp07nfuFONzdaowrMHjlAzY6GDLd4f+LUHHAAM1h4MdUw==} engines: {node: '>=12'} + dev: false /postgres-interval@1.2.0: resolution: {integrity: sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==} engines: {node: '>=0.10.0'} dependencies: xtend: 4.0.2 + dev: false /postgres-interval@3.0.0: resolution: {integrity: sha512-BSNDnbyZCXSxgA+1f5UU2GmwhoI0aU5yMxRGO8CdFEcY2BQF9xm/7MqKnYoM1nJDk8nONNWDk9WeSmePFhQdlw==} engines: {node: '>=12'} + dev: false /postgres-range@1.1.3: resolution: {integrity: sha512-VdlZoocy5lCP0c/t66xAfclglEapXPCIVhqqJRncYpvbCgImF0w67aPKfbqUMr72tO2k5q0TdTZwCLjPTI6C9g==} - - /prisma@5.4.1: - resolution: {integrity: sha512-op9PmU8Bcw5dNAas82wBYTG0yHnpq9/O3bhxbDBrNzwZTwBqsVCxxYRLf6wHNh9HVaDGhgjjHlu1+BcW8qdnBg==} - engines: {node: '>=16.13'} - hasBin: true - requiresBuild: true - dependencies: - '@prisma/engines': 5.4.1 + dev: false /punycode@2.3.0: resolution: {integrity: sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==} @@ -1291,8 +1180,8 @@ packages: engines: {iojs: '>=1.0.0', node: '>=0.10.0'} dev: true - /rollup@3.28.1: - resolution: {integrity: sha512-R9OMQmIHJm9znrU3m3cpE8uhN0fGdXiawME7aZIpQqvpS/85+Vt1Hq1/yVIcYfOmaQiHjvXkQAoJukvLpau6Yw==} + /rollup@3.29.4: + resolution: {integrity: sha512-oWzmBZwvYrU0iJHtDmhsm662rC15FRXmcjCk1xD771dFDx5jJ02ufAQQTn0etB2emNk4J9EZg/yWKpsn9BWGRw==} engines: {node: '>=14.18.0', npm: '>=8.0.0'} hasBin: true optionalDependencies: @@ -1348,10 +1237,6 @@ packages: /split2@4.2.0: resolution: {integrity: sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==} engines: {node: '>= 10.x'} - - /streamsearch@1.1.0: - resolution: {integrity: sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==} - engines: {node: '>=10.0.0'} dev: false /strip-final-newline@2.0.0: @@ -1373,13 +1258,6 @@ packages: ts-interface-checker: 0.1.13 dev: true - /superjson@1.13.1: - resolution: {integrity: sha512-AVH2eknm9DEd3qvxM4Sq+LTCkSXE2ssfh1t11MHMXyYXFQyQ1HLgVvV+guLTsaQnJU3gnaVo34TohHPulY/wLg==} - engines: {node: '>=10'} - dependencies: - copy-anything: 3.0.5 - dev: false - /thenify-all@1.6.0: resolution: {integrity: sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==} engines: {node: '>=0.8'} @@ -1402,6 +1280,7 @@ packages: /tr46@0.0.3: resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==} + dev: false /tr46@1.0.1: resolution: {integrity: sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA==} @@ -1418,10 +1297,6 @@ packages: resolution: {integrity: sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==} dev: true - /tslib@2.6.2: - resolution: {integrity: sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==} - dev: false - /tsup@7.2.0(typescript@5.1.6): resolution: {integrity: sha512-vDHlczXbgUvY3rWvqFEbSqmC1L7woozbzngMqTtL2PGBODTtWlRwGDDawhvWzr5c1QjKe4OAKqJGfE1xeXUvtQ==} engines: {node: '>=16.14'} @@ -1438,7 +1313,7 @@ packages: typescript: optional: true dependencies: - bundle-require: 4.0.1(esbuild@0.18.20) + bundle-require: 4.0.2(esbuild@0.18.20) cac: 6.7.14 chokidar: 3.5.3 debug: 4.3.4 @@ -1448,7 +1323,7 @@ packages: joycon: 3.1.1 postcss-load-config: 4.0.1 resolve-from: 5.0.0 - rollup: 3.28.1 + rollup: 3.29.4 source-map: 0.8.0-beta.0 sucrase: 3.34.0 tree-kill: 1.2.2 @@ -1462,9 +1337,9 @@ packages: resolution: {integrity: sha512-C2Ip+jPmqKd1GWVQDvz/Eyc6QJbGfE7NrR3fx5BpEHMZsEHoIxHL1j+lKdGobr8ovEyqeNkPLSKp6SCSOt7gmw==} hasBin: true dependencies: - '@esbuild-kit/cjs-loader': 2.4.2 - '@esbuild-kit/core-utils': 3.2.2 - '@esbuild-kit/esm-loader': 2.5.5 + '@esbuild-kit/cjs-loader': 2.4.4 + '@esbuild-kit/core-utils': 3.3.2 + '@esbuild-kit/esm-loader': 2.6.5 optionalDependencies: fsevents: 2.3.3 dev: true @@ -1475,19 +1350,21 @@ packages: hasBin: true dev: true - /undici@5.23.0: - resolution: {integrity: sha512-1D7w+fvRsqlQ9GscLBwcAJinqcZGHUKjbOmXdlE/v8BvEGXjeWAax+341q44EuTcHXXnfyKNbKRq4Lg7OzhMmg==} + /undici@5.26.5: + resolution: {integrity: sha512-cSb4bPFd5qgR7qr2jYAi0hlX9n5YKK2ONKkLFkxl+v/9BvC0sOpZjBHDBSXc5lWAf5ty9oZdRXytBIHzgUcerw==} engines: {node: '>=14.0'} dependencies: - busboy: 1.6.0 + '@fastify/busboy': 2.0.0 dev: false /web-streams-polyfill@3.2.1: resolution: {integrity: sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q==} engines: {node: '>= 8'} + dev: false /webidl-conversions@3.0.1: resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==} + dev: false /webidl-conversions@4.0.2: resolution: {integrity: sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==} @@ -1498,6 +1375,7 @@ packages: dependencies: tr46: 0.0.3 webidl-conversions: 3.0.1 + dev: false /whatwg-url@7.1.0: resolution: {integrity: sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg==} @@ -1530,12 +1408,14 @@ packages: optional: true utf-8-validate: optional: true + dev: false /xtend@4.0.2: resolution: {integrity: sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==} engines: {node: '>=0.4'} + dev: false - /yaml@2.3.2: - resolution: {integrity: sha512-N/lyzTPaJasoDmfV7YTrYCI0G/3ivm/9wdG0aHuheKowWQwGTsK0Eoiw6utmzAnI6pkJa0DUVygvp3spqqEKXg==} + /yaml@2.3.3: + resolution: {integrity: sha512-zw0VAJxgeZ6+++/su5AFoqBbZbrEakwu+X0M5HmcwUiBL7AzcuPKjj5we4xfQLp78LkEMpD0cOnUhmgOVy3KdQ==} engines: {node: '>= 14'} dev: true diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh b/query-engine/driver-adapters/connector-test-kit-executor/script/start_node.sh similarity index 100% rename from query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh rename to query-engine/driver-adapters/connector-test-kit-executor/script/start_node.sh diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/JsonProtocol.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/engines/JsonProtocol.ts similarity index 100% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/JsonProtocol.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/engines/JsonProtocol.ts diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Library.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/engines/Library.ts similarity index 100% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Library.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/engines/Library.ts diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/QueryEngine.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/engines/QueryEngine.ts similarity index 100% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/QueryEngine.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/engines/QueryEngine.ts diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Transaction.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/engines/Transaction.ts similarity index 100% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Transaction.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/engines/Transaction.ts diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/index.ts similarity index 87% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/index.ts index 68664272a6ce..2318c0525760 100644 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts +++ b/query-engine/driver-adapters/connector-test-kit-executor/src/index.ts @@ -9,18 +9,29 @@ import * as prismaPg from '@prisma/adapter-pg' // neon dependencies import { Pool as NeonPool, neonConfig } from '@neondatabase/serverless' -import { WebSocket } from 'undici' +import { fetch } from 'undici' +import { WebSocket } from 'ws' import * as prismaNeon from '@prisma/adapter-neon' // libsql dependencies import { createClient } from '@libsql/client' import { PrismaLibSQL } from '@prisma/adapter-libsql' +// planetscale dependencies +import { Client as PlanetscaleClient } from '@planetscale/database' +import { PrismaPlanetScale } from '@prisma/adapter-planetscale' + + import {bindAdapter, DriverAdapter, ErrorCapturingDriverAdapter} from "@prisma/driver-adapter-utils"; const SUPPORTED_ADAPTERS: Record Promise> - = {"pg": pgAdapter, "neon:ws" : neonWsAdapter, "libsql": libsqlAdapter}; + = { + "pg": pgAdapter, + "neon:ws" : neonWsAdapter, + "libsql": libsqlAdapter, + "planetscale": planetscaleAdapter, + }; // conditional debug logging based on LOG_LEVEL env var const debug = (() => { @@ -225,8 +236,17 @@ async function adapterFromEnv(url: string): Promise { return await SUPPORTED_ADAPTERS[adapter](url) } +function postgres_options(url: string): any { + let args: any = {connectionString: url} + const schemaName = new URL(url).searchParams.get('schema') + if (schemaName != null) { + args.options = `--search_path="${schemaName}"` + } + return args; +} + async function pgAdapter(url: string): Promise { - const pool = new pgDriver.Pool({connectionString: url}) + const pool = new pgDriver.Pool(postgres_options(url)) return new prismaPg.PrismaPg(pool) } @@ -241,7 +261,7 @@ async function neonWsAdapter(url: string): Promise { neonConfig.useSecureWebSocket = false neonConfig.pipelineConnect = false - const pool = new NeonPool({ connectionString: url }) + const pool = new NeonPool(postgres_options(url)) return new prismaNeon.PrismaNeon(pool) } @@ -250,4 +270,18 @@ async function libsqlAdapter(url: string): Promise { return new PrismaLibSQL(libsql) } +async function planetscaleAdapter(url: string): Promise { + const proxyURL = JSON.parse(process.env.DRIVER_ADAPTER_CONFIG || '{}').proxyUrl ?? '' + if (proxyURL == '') { + throw new Error("DRIVER_ADAPTER_CONFIG is not defined or empty, but its required for planetscale adapter."); + } + + const client = new PlanetscaleClient({ + url: proxyURL, + fetch, + }) + + return new PrismaPlanetScale(client) +} + main().catch(err) diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/jsonRpc.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/jsonRpc.ts similarity index 100% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/jsonRpc.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/jsonRpc.ts diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/qe.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/qe.ts similarity index 92% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/qe.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/qe.ts index 764df8f6108d..186d7a9e80d2 100644 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/src/qe.ts +++ b/query-engine/driver-adapters/connector-test-kit-executor/src/qe.ts @@ -10,7 +10,7 @@ export function initQueryEngine(adapter: ErrorCapturingDriverAdapter, datamodel: const libExt = os.platform() === 'darwin' ? 'dylib' : 'so' const dirname = path.dirname(new URL(import.meta.url).pathname) - const libQueryEnginePath = path.join(dirname, `../../../../../target/debug/libquery_engine.${libExt}`) + const libQueryEnginePath = path.join(dirname, `../../../../target/debug/libquery_engine.${libExt}`) const libqueryEngine = { exports: {} as unknown as lib.Library } // @ts-ignore diff --git a/query-engine/driver-adapters/js/tsconfig.json b/query-engine/driver-adapters/connector-test-kit-executor/tsconfig.json similarity index 99% rename from query-engine/driver-adapters/js/tsconfig.json rename to query-engine/driver-adapters/connector-test-kit-executor/tsconfig.json index b405cea50201..516c114b3e15 100644 --- a/query-engine/driver-adapters/js/tsconfig.json +++ b/query-engine/driver-adapters/connector-test-kit-executor/tsconfig.json @@ -20,4 +20,4 @@ "resolveJsonModule": true }, "exclude": ["**/dist", "**/declaration", "**/node_modules", "**/src/__tests__"] -} +} \ No newline at end of file diff --git a/query-engine/driver-adapters/js/.gitignore b/query-engine/driver-adapters/js/.gitignore deleted file mode 100644 index e885963af278..000000000000 --- a/query-engine/driver-adapters/js/.gitignore +++ /dev/null @@ -1,44 +0,0 @@ -node_modules - -yarn-error.log -dist -build -tmp -pnpm-debug.log -sandbox -.DS_Store - -query-engine* -migration-engine* -schema-engine* -libquery_engine* -libquery-engine* -query_engine-windows.dll.node - -*tmp.db -dist/ -declaration/ - -*.tsbuildinfo -.prisma -.pnpm-store - -.vscode -!.vscode/launch.json.default -coverage - -.eslintcache - -.pnpm-debug.log - -.envrc - -esm -reproductions/* -!reproductions/basic-sqlite -!reproductions/tracing -!reproductions/pnpm-workspace.yaml - -dev.db -junit.xml -/output.txt diff --git a/query-engine/driver-adapters/js/.npmrc b/query-engine/driver-adapters/js/.npmrc deleted file mode 100644 index c87ec9b9e3d3..000000000000 --- a/query-engine/driver-adapters/js/.npmrc +++ /dev/null @@ -1,2 +0,0 @@ -git-checks=false -access=public diff --git a/query-engine/driver-adapters/js/.nvmrc b/query-engine/driver-adapters/js/.nvmrc deleted file mode 100644 index 8c60e1e54f37..000000000000 --- a/query-engine/driver-adapters/js/.nvmrc +++ /dev/null @@ -1 +0,0 @@ -v20.5.1 diff --git a/query-engine/driver-adapters/js/.prettierrc.yml b/query-engine/driver-adapters/js/.prettierrc.yml deleted file mode 100644 index f0beb50a2167..000000000000 --- a/query-engine/driver-adapters/js/.prettierrc.yml +++ /dev/null @@ -1,5 +0,0 @@ -tabWidth: 2 -trailingComma: all -singleQuote: true -semi: false -printWidth: 120 diff --git a/query-engine/driver-adapters/js/README.md b/query-engine/driver-adapters/js/README.md deleted file mode 100644 index e5e64c60dfc8..000000000000 --- a/query-engine/driver-adapters/js/README.md +++ /dev/null @@ -1,34 +0,0 @@ -# Prisma Driver Adapters - -This TypeScript monorepo contains the following packages: -- `@prisma/driver-adapter-utils` - - Internal set of utilities and types for Prisma's driver adapters. -- `@prisma/adapter-neon` - - Prisma's Driver Adapter that wraps the `@neondatabase/serverless` driver - - It uses `provider = "postgres"` - - It exposes debug logs via `DEBUG="prisma:driver-adapter:neon"` -- `@prisma/adapter-planetscale` - - Prisma's Driver Adapter that wraps the `@planetscale/database` driver - - It uses `provider = "mysql"` - - It exposes debug logs via `DEBUG="prisma:driver-adapter:planetscale"` -- `@prisma/adapter-pg` - - Prisma's Driver Adapter that wraps the `pg` driver - - It uses `provider = "postgres"` - - It exposes debug logs via `DEBUG="prisma:driver-adapter:pg"` - -## Get Started - -We assume Node.js `v18.16.1`+ is installed. If not, run `nvm use` in the current directory. -This is very important to double-check if you have multiple versions installed, as PlanetScale requires either Node.js `v18.16.1`+ or a custom `fetch` function. - -Install `pnpm` via: - -```sh -npm i -g pnpm -``` - -## Development - -- Install Node.js dependencies via `pnpm i` -- Build and link TypeScript packages via `pnpm build` -- Publish packages to `npm` via `pnpm publish -r` diff --git a/query-engine/driver-adapters/js/adapter-libsql/.gitignore b/query-engine/driver-adapters/js/adapter-libsql/.gitignore deleted file mode 100644 index c370cb644f95..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/.gitignore +++ /dev/null @@ -1 +0,0 @@ -test.db diff --git a/query-engine/driver-adapters/js/adapter-libsql/README.md b/query-engine/driver-adapters/js/adapter-libsql/README.md deleted file mode 100644 index 219200af2080..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# @prisma/adapter-libsql - -Prisma driver adapter for Turso and libSQL. - -See https://prisma.io/turso for details. diff --git a/query-engine/driver-adapters/js/adapter-libsql/package.json b/query-engine/driver-adapters/js/adapter-libsql/package.json deleted file mode 100644 index fbce33c98a29..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/package.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "name": "@prisma/adapter-libsql", - "version": "0.0.0", - "description": "Prisma's driver adapter for libSQL and Turso", - "main": "dist/index.js", - "module": "dist/index.mjs", - "types": "dist/index.d.ts", - "scripts": { - "build": "tsup ./src/index.ts --format cjs,esm --dts", - "lint": "tsc -p ./tsconfig.build.json", - "test": "node --loader tsx --test tests/*.test.mts" - }, - "files": [ - "dist", - "README.md" - ], - "keywords": [], - "author": "Alexey Orlenko ", - "license": "Apache-2.0", - "sideEffects": false, - "dependencies": { - "@prisma/driver-adapter-utils": "workspace:*", - "async-mutex": "0.4.0" - }, - "devDependencies": { - "@libsql/client": "0.3.5" - }, - "peerDependencies": { - "@libsql/client": "^0.3.5" - } -} diff --git a/query-engine/driver-adapters/js/adapter-libsql/src/conversion.ts b/query-engine/driver-adapters/js/adapter-libsql/src/conversion.ts deleted file mode 100644 index b2fa4b5b4095..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/src/conversion.ts +++ /dev/null @@ -1,161 +0,0 @@ -import { ColumnTypeEnum, ColumnType, Debug } from '@prisma/driver-adapter-utils' -import { Row, Value } from '@libsql/client' -import { isArrayBuffer } from 'node:util/types' - -const debug = Debug('prisma:driver-adapter:libsql:conversion') - -// Mirrors sqlite/conversion.rs in quaint -function mapDeclType(declType: string): ColumnType | null { - switch (declType.toUpperCase()) { - case '': - return null - case 'DECIMAL': - return ColumnTypeEnum.Numeric - case 'FLOAT': - return ColumnTypeEnum.Float - case 'DOUBLE': - case 'DOUBLE PRECISION': - case 'NUMERIC': - case 'REAL': - return ColumnTypeEnum.Double - case 'TINYINT': - case 'SMALLINT': - case 'MEDIUMINT': - case 'INT': - case 'INTEGER': - case 'SERIAL': - case 'INT2': - return ColumnTypeEnum.Int32 - case 'BIGINT': - case 'UNSIGNED BIG INT': - case 'INT8': - return ColumnTypeEnum.Int64 - case 'DATETIME': - case 'TIMESTAMP': - return ColumnTypeEnum.DateTime - case 'TIME': - return ColumnTypeEnum.Time - case 'DATE': - return ColumnTypeEnum.Date - case 'TEXT': - case 'CLOB': - case 'CHARACTER': - case 'VARCHAR': - case 'VARYING CHARACTER': - case 'NCHAR': - case 'NATIVE CHARACTER': - case 'NVARCHAR': - return ColumnTypeEnum.Text - case 'BLOB': - return ColumnTypeEnum.Bytes - case 'BOOLEAN': - return ColumnTypeEnum.Boolean - default: - debug('unknown decltype:', declType) - return null - } -} - -function mapDeclaredColumnTypes(columntTypes: string[]): [out: Array, empty: Set] { - const emptyIndices = new Set() - const result = columntTypes.map((typeName, index) => { - const mappedType = mapDeclType(typeName) - if (mappedType === null) { - emptyIndices.add(index) - } - return mappedType - }) - return [result, emptyIndices] -} - -export function getColumnTypes(declaredTypes: string[], rows: Row[]): ColumnType[] { - const [columnTypes, emptyIndices] = mapDeclaredColumnTypes(declaredTypes) - - if (emptyIndices.size === 0) { - return columnTypes as ColumnType[] - } - - columnLoop: for (const columnIndex of emptyIndices) { - // No declared column type in db schema, infer using first non-null value - for (let rowIndex = 0; rowIndex < rows.length; rowIndex++) { - const candidateValue = rows[rowIndex][columnIndex] - if (candidateValue !== null) { - columnTypes[columnIndex] = inferColumnType(candidateValue) - continue columnLoop - } - } - - // No non-null value found for this column, fall back to int32 to mimic what quaint does - columnTypes[columnIndex] = ColumnTypeEnum.Int32 - } - - return columnTypes as ColumnType[] -} - -function inferColumnType(value: NonNullable): ColumnType { - switch (typeof value) { - case 'string': - return ColumnTypeEnum.Text - case 'bigint': - return ColumnTypeEnum.Int64 - case 'boolean': - return ColumnTypeEnum.Boolean - case 'number': - return ColumnTypeEnum.UnknownNumber - case 'object': - return inferObjectType(value) - default: - throw new UnexpectedTypeError(value) - } -} - -function inferObjectType(value: {}): ColumnType { - if (isArrayBuffer(value)) { - return ColumnTypeEnum.Bytes - } - throw new UnexpectedTypeError(value) -} - -class UnexpectedTypeError extends Error { - name = 'UnexpectedTypeError' - constructor(value: unknown) { - const type = typeof value - const repr = type === 'object' ? JSON.stringify(value) : String(value) - super(`unexpected value of type ${type}: ${repr}`) - } -} - -export function mapRow(row: Row, columnTypes: ColumnType[]): unknown[] { - // `Row` doesn't have map, so we copy the array once and modify it in-place - // to avoid allocating and copying twice if we used `Array.from(row).map(...)`. - const result: unknown[] = Array.from(row) - - for (let i = 0; i < result.length; i++) { - const value = result[i] - - // Convert bigint to string as we can only use JSON-encodable types here - if (typeof value === 'bigint') { - result[i] = value.toString() - } - - // Convert array buffers to arrays of bytes. - // Base64 would've been more efficient but would collide with the existing - // logic that treats string values of type Bytes as raw UTF-8 bytes that was - // implemented for other adapters. - if (isArrayBuffer(value)) { - result[i] = Array.from(new Uint8Array(value)) - } - - // If an integer is required and the current number isn't one, - // discard the fractional part. - if ( - typeof value === 'number' && - (columnTypes[i] === ColumnTypeEnum.Int32 || columnTypes[i] === ColumnTypeEnum.Int64) && - !Number.isInteger(value) - ) { - result[i] = Math.trunc(value) - } - } - - return result -} diff --git a/query-engine/driver-adapters/js/adapter-libsql/src/index.ts b/query-engine/driver-adapters/js/adapter-libsql/src/index.ts deleted file mode 100644 index 04a95cc4cfcd..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/src/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { PrismaLibSQL } from './libsql' diff --git a/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts b/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts deleted file mode 100644 index 5d104e8e2949..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts +++ /dev/null @@ -1,163 +0,0 @@ -import { Debug, ok } from '@prisma/driver-adapter-utils' -import type { - DriverAdapter, - Query, - Queryable, - Result, - ResultSet, - Transaction, - TransactionOptions, -} from '@prisma/driver-adapter-utils' -import type { InStatement, Client as LibSqlClientRaw, Transaction as LibSqlTransactionRaw } from '@libsql/client' -import { Mutex } from 'async-mutex' -import { getColumnTypes, mapRow } from './conversion' - -const debug = Debug('prisma:driver-adapter:libsql') - -type StdClient = LibSqlClientRaw -type TransactionClient = LibSqlTransactionRaw - -const LOCK_TAG = Symbol() - -class LibSqlQueryable implements Queryable { - readonly flavour = 'sqlite'; - - [LOCK_TAG] = new Mutex() - - constructor(protected readonly client: ClientT) {} - - /** - * Execute a query given as SQL, interpolating the given parameters. - */ - async queryRaw(query: Query): Promise> { - const tag = '[js::query_raw]' - debug(`${tag} %O`, query) - - const { columns, rows, columnTypes: declaredColumnTypes } = await this.performIO(query) - - const columnTypes = getColumnTypes(declaredColumnTypes, rows) - - const resultSet: ResultSet = { - columnNames: columns, - columnTypes, - rows: rows.map((row) => mapRow(row, columnTypes)), - } - - return ok(resultSet) - } - - /** - * Execute a query given as SQL, interpolating the given parameters and - * returning the number of affected rows. - * Note: Queryable expects a u64, but napi.rs only supports u32. - */ - async executeRaw(query: Query): Promise> { - const tag = '[js::execute_raw]' - debug(`${tag} %O`, query) - - const { rowsAffected } = await this.performIO(query) - return ok(rowsAffected ?? 0) - } - - /** - * Run a query against the database, returning the result set. - * Should the query fail due to a connection error, the connection is - * marked as unhealthy. - */ - private async performIO(query: Query) { - const release = await this[LOCK_TAG].acquire() - try { - const result = await this.client.execute(query as InStatement) - return result - } catch (e) { - const error = e as Error - debug('Error in performIO: %O', error) - throw error - } finally { - release() - } - } -} - -class LibSqlTransaction extends LibSqlQueryable implements Transaction { - finished = false - - constructor( - client: TransactionClient, - readonly options: TransactionOptions, - readonly unlockParent: () => void, - ) { - super(client) - } - - async commit(): Promise> { - debug(`[js::commit]`) - - this.finished = true - - try { - await this.client.commit() - } finally { - this.unlockParent() - } - - return ok(undefined) - } - - async rollback(): Promise> { - debug(`[js::rollback]`) - - this.finished = true - - try { - await this.client.rollback() - } catch (error) { - debug('error in rollback:', error) - } finally { - this.unlockParent() - } - - return ok(undefined) - } - - dispose(): Result { - if (!this.finished) { - this.finished = true - this.rollback().catch(console.error) - } - return ok(undefined) - } -} - -export class PrismaLibSQL extends LibSqlQueryable implements DriverAdapter { - constructor(client: StdClient) { - super(client) - } - - async startTransaction(): Promise> { - const options: TransactionOptions = { - usePhantomQuery: true, - } - - const tag = '[js::startTransaction]' - debug(`${tag} options: %O`, options) - - const release = await this[LOCK_TAG].acquire() - - try { - const tx = await this.client.transaction('deferred') - return ok(new LibSqlTransaction(tx, options, release)) - } catch (e) { - // note: we only release the lock if creating the transaction fails, it must stay locked otherwise, - // hence `catch` and rethrowing the error and not `finally`. - release() - throw e - } - } - - async close(): Promise> { - await this[LOCK_TAG].acquire() - this.client.close() - return ok(undefined) - } -} diff --git a/query-engine/driver-adapters/js/adapter-libsql/tests/types.test.mts b/query-engine/driver-adapters/js/adapter-libsql/tests/types.test.mts deleted file mode 100644 index f7f1b474a300..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/tests/types.test.mts +++ /dev/null @@ -1,151 +0,0 @@ -import assert from 'node:assert/strict' -import { describe, it } from 'node:test' -import { Config, createClient } from '@libsql/client' -import { PrismaLibSQL } from '../dist/index.js' -import { ColumnTypeEnum } from '@jkomyno/prisma-driver-adapter-utils' - -function connect(config?: Partial): PrismaLibSQL { - const client = createClient({ url: 'file:test.db', ...config }) - return new PrismaLibSQL(client) -} - -it('checks declared types', async () => { - const client = connect() - - await client.executeRaw({ - sql: ` - DROP TABLE IF EXISTS types; - `, - args: [], - }) - - await client.executeRaw({ - sql: ` - CREATE TABLE types ( - id INTEGER PRIMARY KEY, - real REAL, - bigint BIGINT, - date DATETIME, - text TEXT, - blob BLOB - ) - `, - args: [], - }) - - const result = await client.queryRaw({ - sql: ` - SELECT * FROM types - `, - args: [], - }) - - assert(result.ok) - assert.deepEqual(result.value.columnTypes, [ - ColumnTypeEnum.Int32, - ColumnTypeEnum.Double, - ColumnTypeEnum.Int64, - ColumnTypeEnum.DateTime, - ColumnTypeEnum.Text, - ColumnTypeEnum.Bytes, - ]) -}) - -it('infers types when sqlite decltype is not available', async () => { - const client = connect() - - const result = await client.queryRaw({ - sql: ` - SELECT 1 as first, 'test' as second - `, - args: [], - }) - - assert(result.ok) - assert.deepEqual(result.value.columnTypes, [ColumnTypeEnum.Int64, ColumnTypeEnum.Text]) -}) - -describe('int64 with different intMode', () => { - const N = 2n ** 63n - 1n - - it('correctly infers int64 with intMode=number for safe JS integers', async () => { - const client = connect({ intMode: 'number' }) - - const result = await client.queryRaw({ - sql: `SELECT ?`, - args: [Number.MAX_SAFE_INTEGER], - }) - - assert(result.ok) - assert.equal(result.value.columnTypes[0], ColumnTypeEnum.Int64) - assert.equal(result.value.rows[0][0], Number.MAX_SAFE_INTEGER) - }) - - it("doesn't support very big int64 with intMode=number", async () => { - const client = connect({ intMode: 'number' }) - - assert.rejects( - client.queryRaw({ - sql: `SELECT ?`, - args: [N], - }), - ) - }) - - it('correctly infers int64 with intMode=bigint', async () => { - const client = connect({ intMode: 'bigint' }) - - const result = await client.queryRaw({ - sql: `SELECT ?`, - args: [N], - }) - - assert(result.ok) - assert.equal(result.value.columnTypes[0], ColumnTypeEnum.Int64) - - // bigints are converted to strings because we can't currently pass a bigint - // to rust due to a napi.rs limitation - assert.equal(result.value.rows[0][0], N.toString()) - }) - - it('correctly infers int64 with intMode=string when we have decltype', async () => { - const client = connect({ intMode: 'string' }) - - await client.executeRaw({ - sql: `DROP TABLE IF EXISTS test`, - args: [], - }) - - await client.executeRaw({ - sql: `CREATE TABLE test (int64 BIGINT)`, - args: [], - }) - - await client.executeRaw({ - sql: `INSERT INTO test (int64) VALUES (?)`, - args: [N], - }) - - const result = await client.queryRaw({ - sql: `SELECT int64 FROM test`, - args: [], - }) - - assert(result.ok) - assert.equal(result.value.columnTypes[0], ColumnTypeEnum.Int64) - assert.equal(result.value.rows[0][0], N.toString()) - }) - - it("can't infer int64 with intMode=string without schema", async () => { - const client = connect({ intMode: 'string' }) - - const result = await client.queryRaw({ - sql: `SELECT ?`, - args: [N], - }) - - assert(result.ok) - assert.equal(result.value.columnTypes[0], ColumnTypeEnum.Text) - assert.equal(result.value.rows[0][0], N.toString()) - }) -}) diff --git a/query-engine/driver-adapters/js/adapter-libsql/tsconfig.build.json b/query-engine/driver-adapters/js/adapter-libsql/tsconfig.build.json deleted file mode 100644 index 28c56f6c3a9a..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/tsconfig.build.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "extends": "../tsconfig.json", - "compilerOptions": { - "outDir": "declaration" - } -} diff --git a/query-engine/driver-adapters/js/adapter-libsql/tsconfig.json b/query-engine/driver-adapters/js/adapter-libsql/tsconfig.json deleted file mode 100644 index 3c43903cfdd1..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/tsconfig.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../tsconfig.json" -} diff --git a/query-engine/driver-adapters/js/adapter-neon/README.md b/query-engine/driver-adapters/js/adapter-neon/README.md deleted file mode 100644 index 8af259ab74c1..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# @prisma/adapter-neon - -Prisma driver adapter for [Neon Serverless Driver](https://github.com/neondatabase/serverless). - -See https://github.com/prisma/prisma/releases/tag/5.4.0 for details. diff --git a/query-engine/driver-adapters/js/adapter-neon/package.json b/query-engine/driver-adapters/js/adapter-neon/package.json deleted file mode 100644 index 03d19f6eeb0d..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/package.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "name": "@prisma/adapter-neon", - "version": "0.0.0", - "description": "Prisma's driver adapter for \"@neondatabase/serverless\"", - "main": "dist/index.js", - "module": "dist/index.mjs", - "types": "dist/index.d.ts", - "scripts": { - "build": "tsup ./src/index.ts --format cjs,esm --dts", - "lint": "tsc -p ./tsconfig.build.json" - }, - "files": [ - "dist", - "README.md" - ], - "keywords": [], - "author": "Alberto Schiabel ", - "license": "Apache-2.0", - "sideEffects": false, - "dependencies": { - "@prisma/driver-adapter-utils": "workspace:*" - }, - "devDependencies": { - "@neondatabase/serverless": "^0.6.0" - }, - "peerDependencies": { - "@neondatabase/serverless": "^0.6.0" - } -} diff --git a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts b/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts deleted file mode 100644 index 932461e3bc3b..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts +++ /dev/null @@ -1,181 +0,0 @@ -import { ColumnTypeEnum, type ColumnType, JsonNullMarker } from '@prisma/driver-adapter-utils' -import { types } from '@neondatabase/serverless' - -const NeonColumnType = types.builtins - -/** - * PostgreSQL array column types (not defined in NeonColumnType). - */ -const ArrayColumnType = { - BOOL_ARRAY: 1000, - BYTEA_ARRAY: 1001, - BPCHAR_ARRAY: 1014, - CHAR_ARRAY: 1002, - DATE_ARRAY: 1182, - FLOAT4_ARRAY: 1021, - FLOAT8_ARRAY: 1022, - INT2_ARRAY: 1005, - INT4_ARRAY: 1007, - JSONB_ARRAY: 3807, - JSON_ARRAY: 199, - MONEY_ARRAY: 791, - NUMERIC_ARRAY: 1231, - TEXT_ARRAY: 1009, - TIMESTAMP_ARRAY: 1115, - TIME_ARRAY: 1183, - UUID_ARRAY: 2951, - VARCHAR_ARRAY: 1015, - XML_ARRAY: 143, -} - -/** - * This is a simplification of quaint's value inference logic. Take a look at quaint's conversion.rs - * module to see how other attributes of the field packet such as the field length are used to infer - * the correct quaint::Value variant. - */ -export function fieldToColumnType(fieldTypeId: number): ColumnType { - switch (fieldTypeId) { - case NeonColumnType['INT2']: - case NeonColumnType['INT4']: - return ColumnTypeEnum.Int32 - case NeonColumnType['INT8']: - return ColumnTypeEnum.Int64 - case NeonColumnType['FLOAT4']: - return ColumnTypeEnum.Float - case NeonColumnType['FLOAT8']: - return ColumnTypeEnum.Double - case NeonColumnType['BOOL']: - return ColumnTypeEnum.Boolean - case NeonColumnType['DATE']: - return ColumnTypeEnum.Date - case NeonColumnType['TIME']: - return ColumnTypeEnum.Time - case NeonColumnType['TIMESTAMP']: - return ColumnTypeEnum.DateTime - case NeonColumnType['NUMERIC']: - case NeonColumnType['MONEY']: - return ColumnTypeEnum.Numeric - case NeonColumnType['JSON']: - case NeonColumnType['JSONB']: - return ColumnTypeEnum.Json - case NeonColumnType['UUID']: - return ColumnTypeEnum.Uuid - case NeonColumnType['OID']: - return ColumnTypeEnum.Int64 - case NeonColumnType['BPCHAR']: - case NeonColumnType['TEXT']: - case NeonColumnType['VARCHAR']: - case NeonColumnType['BIT']: - case NeonColumnType['VARBIT']: - case NeonColumnType['INET']: - case NeonColumnType['CIDR']: - case NeonColumnType['XML']: - return ColumnTypeEnum.Text - case NeonColumnType['BYTEA']: - return ColumnTypeEnum.Bytes - - case ArrayColumnType.INT2_ARRAY: - case ArrayColumnType.INT4_ARRAY: - return ColumnTypeEnum.Int32Array - case ArrayColumnType.FLOAT4_ARRAY: - return ColumnTypeEnum.FloatArray - case ArrayColumnType.FLOAT8_ARRAY: - return ColumnTypeEnum.DoubleArray - case ArrayColumnType.NUMERIC_ARRAY: - case ArrayColumnType.MONEY_ARRAY: - return ColumnTypeEnum.NumericArray - case ArrayColumnType.BOOL_ARRAY: - return ColumnTypeEnum.BooleanArray - case ArrayColumnType.CHAR_ARRAY: - return ColumnTypeEnum.CharArray - case ArrayColumnType.TEXT_ARRAY: - case ArrayColumnType.VARCHAR_ARRAY: - case ArrayColumnType.BPCHAR_ARRAY: - case ArrayColumnType.XML_ARRAY: - return ColumnTypeEnum.TextArray - case ArrayColumnType.DATE_ARRAY: - return ColumnTypeEnum.DateArray - case ArrayColumnType.TIME_ARRAY: - return ColumnTypeEnum.TimeArray - case ArrayColumnType.TIMESTAMP_ARRAY: - return ColumnTypeEnum.DateTimeArray - case ArrayColumnType.JSON_ARRAY: - case ArrayColumnType.JSONB_ARRAY: - return ColumnTypeEnum.JsonArray - case ArrayColumnType.BYTEA_ARRAY: - return ColumnTypeEnum.BytesArray - case ArrayColumnType.UUID_ARRAY: - return ColumnTypeEnum.UuidArray - - default: - if (fieldTypeId >= 10000) { - // Postgres Custom Types - return ColumnTypeEnum.Enum - } - throw new Error(`Unsupported column type: ${fieldTypeId}`) - } -} - -/** - * JsonNull are stored in JSON strings as the string "null", distinguishable from - * the `null` value which is used by the driver to represent the database NULL. - * By default, JSON and JSONB columns use JSON.parse to parse a JSON column value - * and this will lead to serde_json::Value::Null in Rust, which will be interpreted - * as DbNull. - * - * By converting "null" to JsonNullMarker, we can signal JsonNull in Rust side and - * convert it to QuaintValue::Json(Some(Null)). - */ -function convertJson(json: string): unknown { - return (json === 'null') ? JsonNullMarker : JSON.parse(json) -} - -// Original BYTEA parser -const parsePgBytes = types.getTypeParser(NeonColumnType.BYTEA) as (_: string) => Buffer - -/** - * Convert bytes to a JSON-encodable representation since we can't - * currently send a parsed Buffer or ArrayBuffer across JS to Rust - * boundary. - */ -function convertBytes(serializedBytes: string): number[] { - const buffer = parsePgBytes(serializedBytes) - return encodeBuffer(buffer) -} - -/** - * TODO: - * 1. Check if using base64 would be more efficient than this encoding. - * 2. Consider the possibility of eliminating re-encoding altogether - * and passing bytea hex format to the engine if that can be aligned - * with other adapter flavours. - */ -function encodeBuffer(buffer: Buffer) { - return Array.from(new Uint8Array(buffer)) -} - -// return string instead of JavaScript Date object -types.setTypeParser(NeonColumnType.TIME, date => date) -types.setTypeParser(NeonColumnType.DATE, date => date) -types.setTypeParser(NeonColumnType.TIMESTAMP, date => date) -types.setTypeParser(NeonColumnType.JSONB, convertJson) -types.setTypeParser(NeonColumnType.JSON, convertJson) -types.setTypeParser(NeonColumnType.MONEY, money => money.slice(1)) -types.setTypeParser(NeonColumnType.BYTEA, convertBytes) - -const parseBytesArray = types.getTypeParser(ArrayColumnType.BYTEA_ARRAY) as (_: string) => Buffer[] - -types.setTypeParser(ArrayColumnType.BYTEA_ARRAY, (serializedBytesArray) => { - const buffers = parseBytesArray(serializedBytesArray) - return buffers.map(encodeBuffer) -}) - -const parseTextArray = types.getTypeParser(ArrayColumnType.TEXT_ARRAY) as (_: string) => string[] - -types.setTypeParser(ArrayColumnType.TIME_ARRAY, parseTextArray) -types.setTypeParser(ArrayColumnType.DATE_ARRAY, parseTextArray) -types.setTypeParser(ArrayColumnType.TIMESTAMP_ARRAY, parseTextArray) - -types.setTypeParser(ArrayColumnType.MONEY_ARRAY, (moneyArray) => - parseTextArray(moneyArray).map((money) => money.slice(1)), -) diff --git a/query-engine/driver-adapters/js/adapter-neon/src/index.ts b/query-engine/driver-adapters/js/adapter-neon/src/index.ts deleted file mode 100644 index f160d413ade0..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/src/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { PrismaNeon, PrismaNeonHTTP } from './neon' diff --git a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts b/query-engine/driver-adapters/js/adapter-neon/src/neon.ts deleted file mode 100644 index c86b8d88bef0..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts +++ /dev/null @@ -1,168 +0,0 @@ -import type neon from '@neondatabase/serverless' -import { Debug, ok, err } from '@prisma/driver-adapter-utils' -import type { - DriverAdapter, - ResultSet, - Query, - Queryable, - Transaction, - Result, - TransactionOptions, -} from '@prisma/driver-adapter-utils' -import { fieldToColumnType } from './conversion' - -const debug = Debug('prisma:driver-adapter:neon') - -type ARRAY_MODE_ENABLED = true - -type PerformIOResult = neon.QueryResult | neon.FullQueryResults - -/** - * Base class for http client, ws client and ws transaction - */ -abstract class NeonQueryable implements Queryable { - readonly flavour = 'postgres' - - async queryRaw(query: Query): Promise> { - const tag = '[js::query_raw]' - debug(`${tag} %O`, query) - - return (await this.performIO(query)).map(({ fields, rows }) => { - const columns = fields.map((field) => field.name) - const columnTypes = fields.map((field) => fieldToColumnType(field.dataTypeID)) - - return { - columnNames: columns, - columnTypes, - rows, - } - }) - } - - async executeRaw(query: Query): Promise> { - const tag = '[js::execute_raw]' - debug(`${tag} %O`, query) - - // Note: `rowsAffected` can sometimes be null (e.g., when executing `"BEGIN"`) - return (await this.performIO(query)).map((r) => r.rowCount ?? 0) - } - - abstract performIO(query: Query): Promise> -} - -/** - * Base class for WS-based queryables: top-level client and transaction - */ -class NeonWsQueryable extends NeonQueryable { - constructor(protected client: ClientT) { - super() - } - - override async performIO(query: Query): Promise> { - const { sql, args: values } = query - - try { - return ok(await this.client.query({ text: sql, values, rowMode: 'array' })) - } catch (e) { - debug('Error in performIO: %O', e) - if (e && e.code) { - return err({ - kind: 'PostgresError', - code: e.code, - severity: e.severity, - message: e.message, - detail: e.detail, - column: e.column, - hint: e.hint, - }) - } - throw e - } - } -} - -class NeonTransaction extends NeonWsQueryable implements Transaction { - finished = false - - constructor( - client: neon.PoolClient, - readonly options: TransactionOptions, - ) { - super(client) - } - - async commit(): Promise> { - debug(`[js::commit]`) - - this.finished = true - this.client.release() - return Promise.resolve(ok(undefined)) - } - - async rollback(): Promise> { - debug(`[js::rollback]`) - - this.finished = true - this.client.release() - return Promise.resolve(ok(undefined)) - } - - dispose(): Result { - if (!this.finished) { - this.client.release() - } - return ok(undefined) - } -} - -export class PrismaNeon extends NeonWsQueryable implements DriverAdapter { - private isRunning = true - - constructor(pool: neon.Pool) { - super(pool) - } - - async startTransaction(): Promise> { - const options: TransactionOptions = { - usePhantomQuery: false, - } - - const tag = '[js::startTransaction]' - debug(`${tag} options: %O`, options) - - const connection = await this.client.connect() - return ok(new NeonTransaction(connection, options)) - } - - async close() { - if (this.isRunning) { - await this.client.end() - this.isRunning = false - } - return ok(undefined) - } -} - -export class PrismaNeonHTTP extends NeonQueryable implements DriverAdapter { - constructor(private client: neon.NeonQueryFunction) { - super() - } - - override async performIO(query: Query): Promise> { - const { sql, args: values } = query - return ok( - await this.client(sql, values, { - arrayMode: true, - fullResults: true, - }), - ) - } - - startTransaction(): Promise> { - return Promise.reject(new Error('Transactions are not supported in HTTP mode')) - } - - async close() { - return ok(undefined) - } -} diff --git a/query-engine/driver-adapters/js/adapter-neon/tsconfig.build.json b/query-engine/driver-adapters/js/adapter-neon/tsconfig.build.json deleted file mode 100644 index 28c56f6c3a9a..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/tsconfig.build.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "extends": "../tsconfig.json", - "compilerOptions": { - "outDir": "declaration" - } -} diff --git a/query-engine/driver-adapters/js/adapter-neon/tsconfig.json b/query-engine/driver-adapters/js/adapter-neon/tsconfig.json deleted file mode 100644 index 3c43903cfdd1..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/tsconfig.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../tsconfig.json" -} diff --git a/query-engine/driver-adapters/js/adapter-pg/README.md b/query-engine/driver-adapters/js/adapter-pg/README.md deleted file mode 100644 index b8463742e25c..000000000000 --- a/query-engine/driver-adapters/js/adapter-pg/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# @prisma/adapter-pg - -**INTERNAL PACKAGE, DO NOT USE** diff --git a/query-engine/driver-adapters/js/adapter-pg/package.json b/query-engine/driver-adapters/js/adapter-pg/package.json deleted file mode 100644 index 3573d33bc161..000000000000 --- a/query-engine/driver-adapters/js/adapter-pg/package.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "name": "@prisma/adapter-pg", - "version": "0.0.0", - "description": "Prisma's driver adapter for \"pg\"", - "main": "dist/index.js", - "module": "dist/index.mjs", - "types": "dist/index.d.ts", - "scripts": { - "build": "tsup ./src/index.ts --format cjs,esm --dts", - "lint": "tsc -p ./tsconfig.build.json" - }, - "files": [ - "dist", - "README.md" - ], - "keywords": [], - "author": "Tom Houlé ", - "license": "Apache-2.0", - "sideEffects": false, - "dependencies": { - "@prisma/driver-adapter-utils": "workspace:*" - }, - "devDependencies": { - "pg": "^8.11.3", - "@types/pg": "^8.10.2" - }, - "peerDependencies": { - "pg": "^8.11.3" - } -} diff --git a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts b/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts deleted file mode 100644 index a1c8ce7c5e6a..000000000000 --- a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts +++ /dev/null @@ -1,181 +0,0 @@ -import { ColumnTypeEnum, type ColumnType, JsonNullMarker } from '@prisma/driver-adapter-utils' -import { types } from 'pg' - -const PgColumnType = types.builtins - -/** - * PostgreSQL array column types (not defined in PgColumnType). - */ -const ArrayColumnType = { - BOOL_ARRAY: 1000, - BYTEA_ARRAY: 1001, - BPCHAR_ARRAY: 1014, - CHAR_ARRAY: 1002, - DATE_ARRAY: 1182, - FLOAT4_ARRAY: 1021, - FLOAT8_ARRAY: 1022, - INT2_ARRAY: 1005, - INT4_ARRAY: 1007, - JSONB_ARRAY: 3807, - JSON_ARRAY: 199, - MONEY_ARRAY: 791, - NUMERIC_ARRAY: 1231, - TEXT_ARRAY: 1009, - TIMESTAMP_ARRAY: 1115, - TIME_ARRAY: 1183, - UUID_ARRAY: 2951, - VARCHAR_ARRAY: 1015, - XML_ARRAY: 143, -} - -/** - * This is a simplification of quaint's value inference logic. Take a look at quaint's conversion.rs - * module to see how other attributes of the field packet such as the field length are used to infer - * the correct quaint::Value variant. - */ -export function fieldToColumnType(fieldTypeId: number): ColumnType { - switch (fieldTypeId) { - case PgColumnType['INT2']: - case PgColumnType['INT4']: - return ColumnTypeEnum.Int32 - case PgColumnType['INT8']: - return ColumnTypeEnum.Int64 - case PgColumnType['FLOAT4']: - return ColumnTypeEnum.Float - case PgColumnType['FLOAT8']: - return ColumnTypeEnum.Double - case PgColumnType['BOOL']: - return ColumnTypeEnum.Boolean - case PgColumnType['DATE']: - return ColumnTypeEnum.Date - case PgColumnType['TIME']: - return ColumnTypeEnum.Time - case PgColumnType['TIMESTAMP']: - return ColumnTypeEnum.DateTime - case PgColumnType['NUMERIC']: - case PgColumnType['MONEY']: - return ColumnTypeEnum.Numeric - case PgColumnType['JSON']: - case PgColumnType['JSONB']: - return ColumnTypeEnum.Json - case PgColumnType['UUID']: - return ColumnTypeEnum.Uuid - case PgColumnType['OID']: - return ColumnTypeEnum.Int64 - case PgColumnType['BPCHAR']: - case PgColumnType['TEXT']: - case PgColumnType['VARCHAR']: - case PgColumnType['BIT']: - case PgColumnType['VARBIT']: - case PgColumnType['INET']: - case PgColumnType['CIDR']: - case PgColumnType['XML']: - return ColumnTypeEnum.Text - case PgColumnType['BYTEA']: - return ColumnTypeEnum.Bytes - - case ArrayColumnType.INT2_ARRAY: - case ArrayColumnType.INT4_ARRAY: - return ColumnTypeEnum.Int32Array - case ArrayColumnType.FLOAT4_ARRAY: - return ColumnTypeEnum.FloatArray - case ArrayColumnType.FLOAT8_ARRAY: - return ColumnTypeEnum.DoubleArray - case ArrayColumnType.NUMERIC_ARRAY: - case ArrayColumnType.MONEY_ARRAY: - return ColumnTypeEnum.NumericArray - case ArrayColumnType.BOOL_ARRAY: - return ColumnTypeEnum.BooleanArray - case ArrayColumnType.CHAR_ARRAY: - return ColumnTypeEnum.CharArray - case ArrayColumnType.TEXT_ARRAY: - case ArrayColumnType.VARCHAR_ARRAY: - case ArrayColumnType.BPCHAR_ARRAY: - case ArrayColumnType.XML_ARRAY: - return ColumnTypeEnum.TextArray - case ArrayColumnType.DATE_ARRAY: - return ColumnTypeEnum.DateArray - case ArrayColumnType.TIME_ARRAY: - return ColumnTypeEnum.TimeArray - case ArrayColumnType.TIMESTAMP_ARRAY: - return ColumnTypeEnum.DateTimeArray - case ArrayColumnType.JSON_ARRAY: - case ArrayColumnType.JSONB_ARRAY: - return ColumnTypeEnum.JsonArray - case ArrayColumnType.BYTEA_ARRAY: - return ColumnTypeEnum.BytesArray - case ArrayColumnType.UUID_ARRAY: - return ColumnTypeEnum.UuidArray - - default: - if (fieldTypeId >= 10000) { - // Postgres Custom Types - return ColumnTypeEnum.Enum - } - throw new Error(`Unsupported column type: ${fieldTypeId}`) - } -} - -/** - * JsonNull are stored in JSON strings as the string "null", distinguishable from - * the `null` value which is used by the driver to represent the database NULL. - * By default, JSON and JSONB columns use JSON.parse to parse a JSON column value - * and this will lead to serde_json::Value::Null in Rust, which will be interpreted - * as DbNull. - * - * By converting "null" to JsonNullMarker, we can signal JsonNull in Rust side and - * convert it to QuaintValue::Json(Some(Null)). - */ -function convertJson(json: string): unknown { - return (json === 'null') ? JsonNullMarker : JSON.parse(json) -} - -// Original BYTEA parser -const parsePgBytes = types.getTypeParser(PgColumnType.BYTEA) as (_: string) => Buffer - -/** - * Convert bytes to a JSON-encodable representation since we can't - * currently send a parsed Buffer or ArrayBuffer across JS to Rust - * boundary. - */ -function convertBytes(serializedBytes: string): number[] { - const buffer = parsePgBytes(serializedBytes) - return encodeBuffer(buffer) -} - -/** - * TODO: - * 1. Check if using base64 would be more efficient than this encoding. - * 2. Consider the possibility of eliminating re-encoding altogether - * and passing bytea hex format to the engine if that can be aligned - * with other adapter flavours. - */ -function encodeBuffer(buffer: Buffer) { - return Array.from(new Uint8Array(buffer)) -} - -// return string instead of JavaScript Date object -types.setTypeParser(PgColumnType.TIME, date => date) -types.setTypeParser(PgColumnType.DATE, date => date) -types.setTypeParser(PgColumnType.TIMESTAMP, date => date) -types.setTypeParser(PgColumnType.JSONB, convertJson) -types.setTypeParser(PgColumnType.JSON, convertJson) -types.setTypeParser(PgColumnType.MONEY, money => money.slice(1)) -types.setTypeParser(PgColumnType.BYTEA, convertBytes) - -const parseBytesArray = types.getTypeParser(ArrayColumnType.BYTEA_ARRAY) as (_: string) => Buffer[] - -types.setTypeParser(ArrayColumnType.BYTEA_ARRAY, (serializedBytesArray) => { - const buffers = parseBytesArray(serializedBytesArray) - return buffers.map(encodeBuffer) -}) - -const parseTextArray = types.getTypeParser(ArrayColumnType.TEXT_ARRAY) as (_: string) => string[] - -types.setTypeParser(ArrayColumnType.TIME_ARRAY, parseTextArray) -types.setTypeParser(ArrayColumnType.DATE_ARRAY, parseTextArray) -types.setTypeParser(ArrayColumnType.TIMESTAMP_ARRAY, parseTextArray) - -types.setTypeParser(ArrayColumnType.MONEY_ARRAY, (moneyArray) => - parseTextArray(moneyArray).map((money) => money.slice(1)), -) diff --git a/query-engine/driver-adapters/js/adapter-pg/src/index.ts b/query-engine/driver-adapters/js/adapter-pg/src/index.ts deleted file mode 100644 index f8e51ac2685b..000000000000 --- a/query-engine/driver-adapters/js/adapter-pg/src/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { PrismaPg } from './pg' diff --git a/query-engine/driver-adapters/js/adapter-pg/src/pg.ts b/query-engine/driver-adapters/js/adapter-pg/src/pg.ts deleted file mode 100644 index a049b59a0740..000000000000 --- a/query-engine/driver-adapters/js/adapter-pg/src/pg.ts +++ /dev/null @@ -1,130 +0,0 @@ -import type pg from 'pg' -import { Debug, ok } from '@prisma/driver-adapter-utils' -import type { - DriverAdapter, - Query, - Queryable, - Result, - ResultSet, - Transaction, - TransactionOptions, -} from '@prisma/driver-adapter-utils' -import { fieldToColumnType } from './conversion' - -const debug = Debug('prisma:driver-adapter:pg') - -type StdClient = pg.Pool -type TransactionClient = pg.PoolClient - -class PgQueryable implements Queryable { - readonly flavour = 'postgres' - - constructor(protected readonly client: ClientT) {} - - /** - * Execute a query given as SQL, interpolating the given parameters. - */ - async queryRaw(query: Query): Promise> { - const tag = '[js::query_raw]' - debug(`${tag} %O`, query) - - const { fields, rows } = await this.performIO(query) - - const columns = fields.map((field) => field.name) - const columnTypes = fields.map((field) => fieldToColumnType(field.dataTypeID)) - - const resultSet: ResultSet = { - columnNames: columns, - columnTypes, - rows, - } - - return ok(resultSet) - } - - /** - * Execute a query given as SQL, interpolating the given parameters and - * returning the number of affected rows. - * Note: Queryable expects a u64, but napi.rs only supports u32. - */ - async executeRaw(query: Query): Promise> { - const tag = '[js::execute_raw]' - debug(`${tag} %O`, query) - - const { rowCount: rowsAffected } = await this.performIO(query) - - // Note: `rowsAffected` can sometimes be null (e.g., when executing `"BEGIN"`) - return ok(rowsAffected ?? 0) - } - - /** - * Run a query against the database, returning the result set. - * Should the query fail due to a connection error, the connection is - * marked as unhealthy. - */ - private async performIO(query: Query) { - const { sql, args: values } = query - - try { - const result = await this.client.query({ text: sql, values, rowMode: 'array' }) - return result - } catch (e) { - const error = e as Error - debug('Error in performIO: %O', error) - throw error - } - } -} - -class PgTransaction extends PgQueryable implements Transaction { - finished = false - - constructor(client: pg.PoolClient, readonly options: TransactionOptions) { - super(client) - } - - async commit(): Promise> { - debug(`[js::commit]`) - - this.finished = true - this.client.release() - return ok(undefined) - } - - async rollback(): Promise> { - debug(`[js::rollback]`) - - this.finished = true - this.client.release() - return ok(undefined) - } - - dispose(): Result { - if (!this.finished) { - this.client.release() - } - return ok(undefined) - } -} - -export class PrismaPg extends PgQueryable implements DriverAdapter { - constructor(client: pg.Pool) { - super(client) - } - - async startTransaction(): Promise> { - const options: TransactionOptions = { - usePhantomQuery: false, - } - - const tag = '[js::startTransaction]' - debug(`${tag} options: %O`, options) - - const connection = await this.client.connect() - return ok(new PgTransaction(connection, options)) - } - - async close() { - return ok(undefined) - } -} diff --git a/query-engine/driver-adapters/js/adapter-planetscale/README.md b/query-engine/driver-adapters/js/adapter-planetscale/README.md deleted file mode 100644 index 8e145c07c098..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/README.md +++ /dev/null @@ -1,6 +0,0 @@ -# @prisma/adapter-planetscale - -Prisma driver adapter for [PlanetScale Serverless Driver](https://github.com/planetscale/database-js). - -See https://github.com/prisma/prisma/releases/tag/5.4.0 for details. - diff --git a/query-engine/driver-adapters/js/adapter-planetscale/package.json b/query-engine/driver-adapters/js/adapter-planetscale/package.json deleted file mode 100644 index 59d59704ab50..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/package.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "name": "@prisma/adapter-planetscale", - "version": "0.0.0", - "description": "Prisma's driver adapter for \"@planetscale/database\"", - "main": "dist/index.js", - "module": "dist/index.mjs", - "types": "dist/index.d.ts", - "scripts": { - "build": "tsup ./src/index.ts --format cjs,esm --dts", - "lint": "tsc -p ./tsconfig.build.json" - }, - "files": [ - "dist", - "README.md" - ], - "keywords": [], - "author": "Alberto Schiabel ", - "license": "Apache-2.0", - "sideEffects": false, - "dependencies": { - "@prisma/driver-adapter-utils": "workspace:*" - }, - "devDependencies": { - "@planetscale/database": "^1.11.0" - }, - "peerDependencies": { - "@planetscale/database": "^1.11.0" - } -} diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/conversion.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/conversion.ts deleted file mode 100644 index f6cf8563dc24..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/conversion.ts +++ /dev/null @@ -1,98 +0,0 @@ -import { ColumnTypeEnum, type ColumnType } from '@prisma/driver-adapter-utils' - -// See: https://github.com/planetscale/vitess-types/blob/06235e372d2050b4c0fff49972df8111e696c564/src/vitess/query/v16/query.proto#L108-L218 -export type PlanetScaleColumnType - = 'NULL' - | 'INT8' - | 'UINT8' - | 'INT16' - | 'UINT16' - | 'INT24' - | 'UINT24' - | 'INT32' - | 'UINT32' - | 'INT64' - | 'UINT64' - | 'FLOAT32' - | 'FLOAT64' - | 'TIMESTAMP' - | 'DATE' - | 'TIME' - | 'DATETIME' - | 'YEAR' - | 'DECIMAL' - | 'TEXT' - | 'BLOB' - | 'VARCHAR' - | 'VARBINARY' - | 'CHAR' - | 'BINARY' - | 'BIT' - | 'ENUM' - | 'SET' // unsupported - | 'TUPLE' // unsupported - | 'GEOMETRY' - | 'JSON' - | 'EXPRESSION' // unsupported - | 'HEXNUM' - | 'HEXVAL' - | 'BITNUM' - -/** - * This is a simplification of quaint's value inference logic. Take a look at quaint's conversion.rs - * module to see how other attributes of the field packet such as the field length are used to infer - * the correct quaint::Value variant. - */ -export function fieldToColumnType(field: PlanetScaleColumnType): ColumnType { - switch (field) { - case 'INT8': - case 'UINT8': - case 'INT16': - case 'UINT16': - case 'INT24': - case 'UINT24': - case 'INT32': - case 'UINT32': - case 'YEAR': - return ColumnTypeEnum.Int32 - case 'INT64': - case 'UINT64': - return ColumnTypeEnum.Int64 - case 'FLOAT32': - return ColumnTypeEnum.Float - case 'FLOAT64': - return ColumnTypeEnum.Double - case 'TIMESTAMP': - case 'DATETIME': - return ColumnTypeEnum.DateTime - case 'DATE': - return ColumnTypeEnum.Date - case 'TIME': - return ColumnTypeEnum.Time - case 'DECIMAL': - return ColumnTypeEnum.Numeric - case 'CHAR': - return ColumnTypeEnum.Char - case 'TEXT': - case 'VARCHAR': - return ColumnTypeEnum.Text - case 'ENUM': - return ColumnTypeEnum.Enum - case 'JSON': - return ColumnTypeEnum.Json - case 'BLOB': - case 'BINARY': - case 'VARBINARY': - case 'BIT': - case 'BITNUM': - case 'HEXNUM': - case 'HEXVAL': - case 'GEOMETRY': - return ColumnTypeEnum.Bytes - case 'NULL': - // Fall back to Int32 for consistency with quaint. - return ColumnTypeEnum.Int32 - default: - throw new Error(`Unsupported column type: ${field}`) - } -} diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/deferred.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/deferred.ts deleted file mode 100644 index 013409c8424f..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/deferred.ts +++ /dev/null @@ -1,13 +0,0 @@ -export type Deferred = { - resolve(value: T | PromiseLike): void; - reject(reason: unknown): void; -} - - -export function createDeferred(): [Deferred, Promise] { - const deferred = {} as Deferred - return [deferred, new Promise((resolve, reject) => { - deferred.resolve = resolve - deferred.reject = reject - })] -} \ No newline at end of file diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/index.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/index.ts deleted file mode 100644 index 5e8add856fbb..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { PrismaPlanetScale } from './planetscale' diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts deleted file mode 100644 index cffb00482003..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts +++ /dev/null @@ -1,159 +0,0 @@ -import type planetScale from '@planetscale/database' -import { Debug, ok } from '@prisma/driver-adapter-utils' -import type { - DriverAdapter, - ResultSet, - Query, - Queryable, - Transaction, - Result, - TransactionOptions, -} from '@prisma/driver-adapter-utils' -import { type PlanetScaleColumnType, fieldToColumnType } from './conversion' -import { createDeferred, Deferred } from './deferred' - -const debug = Debug('prisma:driver-adapter:planetscale') - -class RollbackError extends Error { - constructor() { - super('ROLLBACK') - this.name = 'RollbackError' - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, RollbackError) - } - } -} - -class PlanetScaleQueryable implements Queryable { - readonly flavour = 'mysql' - constructor(protected client: ClientT) {} - - /** - * Execute a query given as SQL, interpolating the given parameters. - */ - async queryRaw(query: Query): Promise> { - const tag = '[js::query_raw]' - debug(`${tag} %O`, query) - - const { fields, insertId: lastInsertId, rows } = await this.performIO(query) - - const columns = fields.map((field) => field.name) - const resultSet: ResultSet = { - columnNames: columns, - columnTypes: fields.map((field) => fieldToColumnType(field.type as PlanetScaleColumnType)), - rows: rows as ResultSet['rows'], - lastInsertId, - } - - return ok(resultSet) - } - - /** - * Execute a query given as SQL, interpolating the given parameters and - * returning the number of affected rows. - * Note: Queryable expects a u64, but napi.rs only supports u32. - */ - async executeRaw(query: Query): Promise> { - const tag = '[js::execute_raw]' - debug(`${tag} %O`, query) - - const { rowsAffected } = await this.performIO(query) - return ok(rowsAffected) - } - - /** - * Run a query against the database, returning the result set. - * Should the query fail due to a connection error, the connection is - * marked as unhealthy. - */ - private async performIO(query: Query) { - const { sql, args: values } = query - - try { - const result = await this.client.execute(sql, values, { - as: 'array', - }) - return result - } catch (e) { - const error = e as Error - debug('Error in performIO: %O', error) - throw error - } - } -} - -class PlanetScaleTransaction extends PlanetScaleQueryable implements Transaction { - finished = false - - constructor( - tx: planetScale.Transaction, - readonly options: TransactionOptions, - private txDeferred: Deferred, - private txResultPromise: Promise, - ) { - super(tx) - } - - async commit(): Promise> { - debug(`[js::commit]`) - - this.finished = true - this.txDeferred.resolve() - return Promise.resolve(ok(await this.txResultPromise)) - } - - async rollback(): Promise> { - debug(`[js::rollback]`) - - this.finished = true - this.txDeferred.reject(new RollbackError()) - return Promise.resolve(ok(await this.txResultPromise)) - } - - dispose(): Result { - if (!this.finished) { - this.rollback().catch(console.error) - } - return ok(undefined) - } -} - -export class PrismaPlanetScale extends PlanetScaleQueryable implements DriverAdapter { - constructor(client: planetScale.Connection) { - super(client) - } - - async startTransaction() { - const options: TransactionOptions = { - usePhantomQuery: true, - } - - const tag = '[js::startTransaction]' - debug(`${tag} options: %O`, options) - - return new Promise>((resolve, reject) => { - const txResultPromise = this.client - .transaction(async (tx) => { - const [txDeferred, deferredPromise] = createDeferred() - const txWrapper = new PlanetScaleTransaction(tx, options, txDeferred, txResultPromise) - - resolve(ok(txWrapper)) - return deferredPromise - }) - .catch((error) => { - // Rollback error is ignored (so that tx.rollback() won't crash) - // any other error is legit and is re-thrown - if (!(error instanceof RollbackError)) { - return reject(error) - } - - return undefined - }) - }) - } - - async close() { - return ok(undefined) - } -} diff --git a/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.build.json b/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.build.json deleted file mode 100644 index 28c56f6c3a9a..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.build.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "extends": "../tsconfig.json", - "compilerOptions": { - "outDir": "declaration" - } -} diff --git a/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.json b/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.json deleted file mode 100644 index 3c43903cfdd1..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../tsconfig.json" -} diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/package.json b/query-engine/driver-adapters/js/connector-test-kit-executor/package.json deleted file mode 100644 index be6a54a315fb..000000000000 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/package.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "name": "connector-test-kit-executor", - "version": "5.4.0", - "description": "", - "main": "dist/index.js", - "private": true, - "scripts": { - "build": "tsup ./src/index.ts --format cjs,esm --dts", - "lint": "tsc -p ./tsconfig.build.json" - }, - "keywords": [], - "author": "", - "sideEffects": false, - "license": "Apache-2.0", - "dependencies": { - "@libsql/client": "0.3.5", - "@neondatabase/serverless": "^0.6.0", - "@prisma/adapter-libsql": "workspace:*", - "@prisma/adapter-neon": "workspace:*", - "@prisma/adapter-pg": "workspace:*", - "@prisma/driver-adapter-utils": "workspace:*", - "@types/pg": "^8.10.2", - "pg": "^8.11.3", - "undici": "^5.23.0" - } -} diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/README.md b/query-engine/driver-adapters/js/driver-adapter-utils/README.md deleted file mode 100644 index 78938e802bd3..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# @prisma/driver-adapters-utils - -**INTERNAL PACKAGE, DO NOT USE** diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/package.json b/query-engine/driver-adapters/js/driver-adapter-utils/package.json deleted file mode 100644 index 64301a7a5533..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/package.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "name": "@prisma/driver-adapter-utils", - "version": "0.0.0", - "description": "Internal set of utilities and types for Prisma's driver adapters.", - "main": "dist/index.js", - "module": "dist/index.mjs", - "types": "dist/index.d.ts", - "scripts": { - "build": "tsup ./src/index.ts --format cjs,esm --dts", - "lint": "tsc -p ./tsconfig.build.json" - }, - "files": [ - "dist", - "README.md" - ], - "keywords": [], - "author": "Alberto Schiabel ", - "license": "Apache-2.0", - "sideEffects": false, - "dependencies": { - "debug": "^4.3.4" - }, - "devDependencies": { - "@types/debug": "^4.1.8" - } -} diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts deleted file mode 100644 index aee18197e291..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts +++ /dev/null @@ -1,80 +0,0 @@ -import { Result, err, ok } from './result' -import type { ErrorCapturingDriverAdapter, DriverAdapter, Transaction, ErrorRegistry, ErrorRecord } from './types' - -class ErrorRegistryInternal implements ErrorRegistry { - private registeredErrors: ErrorRecord[] = [] - - consumeError(id: number): ErrorRecord | undefined { - return this.registeredErrors[id] - } - - registerNewError(error: unknown) { - let i = 0 - while (this.registeredErrors[i] !== undefined) { - i++ - } - this.registeredErrors[i] = { error } - return i - } -} - -// *.bind(adapter) is required to preserve the `this` context of functions whose -// execution is delegated to napi.rs. -export const bindAdapter = (adapter: DriverAdapter): ErrorCapturingDriverAdapter => { - const errorRegistry = new ErrorRegistryInternal() - - const startTransaction = wrapAsync(errorRegistry, adapter.startTransaction.bind(adapter)) - return { - errorRegistry, - queryRaw: wrapAsync(errorRegistry, adapter.queryRaw.bind(adapter)), - executeRaw: wrapAsync(errorRegistry, adapter.executeRaw.bind(adapter)), - flavour: adapter.flavour, - startTransaction: async (...args) => { - const result = await startTransaction(...args) - return result.map((tx) => bindTransaction(errorRegistry, tx)) - }, - close: wrapAsync(errorRegistry, adapter.close.bind(adapter)), - } -} - -// *.bind(transaction) is required to preserve the `this` context of functions whose -// execution is delegated to napi.rs. -const bindTransaction = (errorRegistry: ErrorRegistryInternal, transaction: Transaction): Transaction => { - return { - flavour: transaction.flavour, - options: transaction.options, - queryRaw: wrapAsync(errorRegistry, transaction.queryRaw.bind(transaction)), - executeRaw: wrapAsync(errorRegistry, transaction.executeRaw.bind(transaction)), - commit: wrapAsync(errorRegistry, transaction.commit.bind(transaction)), - rollback: wrapAsync(errorRegistry, transaction.rollback.bind(transaction)), - dispose: wrapSync(errorRegistry, transaction.dispose.bind(transaction)), - } -} - -function wrapAsync( - registry: ErrorRegistryInternal, - fn: (...args: A) => Promise>, -): (...args: A) => Promise> { - return async (...args) => { - try { - return await fn(...args) - } catch (error) { - const id = registry.registerNewError(error) - return err({ kind: 'GenericJsError', id }) - } - } -} - -function wrapSync( - registry: ErrorRegistryInternal, - fn: (...args: A) => Result, -): (...args: A) => Result { - return (...args) => { - try { - return fn(...args) - } catch (error) { - const id = registry.registerNewError(error) - return err({ kind: 'GenericJsError', id }) - } - } -} diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts deleted file mode 100644 index 5ddc7f20b390..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts +++ /dev/null @@ -1,48 +0,0 @@ -// Same order as in rust driver-adapters' `ColumnType`. -// Note: exporting const enums causes lots of problems with bundlers, so we emulate -// them via regular dictionaries. -// See: https://hackmd.io/@dzearing/Sk3xV0cLs -export const ColumnTypeEnum = { - // Scalars - Int32: 0, - Int64: 1, - Float: 2, - Double: 3, - Numeric: 4, - Boolean: 5, - Char: 6, - Text: 7, - Date: 8, - Time: 9, - DateTime: 10, - Json: 11, - Enum: 12, - Bytes: 13, - Set: 14, - Uuid: 15, - - // Arrays - Int32Array: 64, - Int64Array: 65, - FloatArray: 66, - DoubleArray: 67, - NumericArray: 68, - BooleanArray: 69, - CharArray: 70, - TextArray: 71, - DateArray: 72, - TimeArray: 73, - DateTimeArray: 74, - JsonArray: 75, - EnumArray: 76, - BytesArray: 77, - UuidArray: 78, - - // Custom - UnknownNumber: 128, -} as const - -// This string value paired with `ColumnType.Json` will be treated as JSON `null` -// when convering to a quaint value. This is to work around JS/JSON null values -// already being used to represent database NULLs. -export const JsonNullMarker = '$__prisma_null' diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/debug.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/debug.ts deleted file mode 100644 index e0a1fe380fa2..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/debug.ts +++ /dev/null @@ -1,3 +0,0 @@ -import { debug as Debug } from 'debug' - -export { Debug } diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/index.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/index.ts deleted file mode 100644 index e7c13be99966..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -export { bindAdapter } from './binder' -export { ColumnTypeEnum, JsonNullMarker } from './const' -export { Debug } from './debug' -export { ok, err, type Result } from './result' -export type * from './types' diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/result.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/result.ts deleted file mode 100644 index 5af95db68671..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/result.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { Error } from './types' -export type Result = { - // common methods - map(fn: (value: T) => U): Result - flatMap(fn: (value: T) => Result): Result -} & ( - | { - readonly ok: true - readonly value: T - } - | { - readonly ok: false - readonly error: Error - } -) - -export function ok(value: T): Result { - return { - ok: true, - value, - map(fn) { - return ok(fn(value)) - }, - flatMap(fn) { - return fn(value) - }, - } -} - -export function err(error: Error): Result { - return { - ok: false, - error, - map() { - return err(error) - }, - flatMap() { - return err(error) - }, - } -} diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts deleted file mode 100644 index 65fa002dcc3a..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts +++ /dev/null @@ -1,116 +0,0 @@ -import { ColumnTypeEnum } from './const' -import { Result } from './result' - -export type ColumnType = (typeof ColumnTypeEnum)[keyof typeof ColumnTypeEnum] - -export interface ResultSet { - /** - * List of column types appearing in a database query, in the same order as `columnNames`. - * They are used within the Query Engine to convert values from JS to Quaint values. - */ - columnTypes: Array - - /** - * List of column names appearing in a database query, in the same order as `columnTypes`. - */ - columnNames: Array - - /** - * List of rows retrieved from a database query. - * Each row is a list of values, whose length matches `columnNames` and `columnTypes`. - */ - rows: Array> - - /** - * The last ID of an `INSERT` statement, if any. - * This is required for `AUTO_INCREMENT` columns in MySQL and SQLite-flavoured databases. - */ - lastInsertId?: string -} - -export type Query = { - sql: string - args: Array -} - -export type Error = { - kind: 'GenericJsError' - id: number -} | { - kind: 'PostgresError' - code: string, - severity: string - message: string - detail: string | undefined - column: string | undefined - hint: string | undefined -} - -export interface Queryable { - readonly flavour: 'mysql' | 'postgres' | 'sqlite' - - /** - * Execute a query given as SQL, interpolating the given parameters, - * and returning the type-aware result set of the query. - * - * This is the preferred way of executing `SELECT` queries. - */ - queryRaw(params: Query): Promise> - - /** - * Execute a query given as SQL, interpolating the given parameters, - * and returning the number of affected rows. - * - * This is the preferred way of executing `INSERT`, `UPDATE`, `DELETE` queries, - * as well as transactional queries. - */ - executeRaw(params: Query): Promise> -} - -export interface DriverAdapter extends Queryable { - /** - * Starts new transation. - */ - startTransaction(): Promise> - - /** - * Closes the connection to the database, if any. - */ - close: () => Promise> -} - -export type TransactionOptions = { - usePhantomQuery: boolean -} - -export interface Transaction extends Queryable { - /** - * Transaction options. - */ - readonly options: TransactionOptions - /** - * Commit the transaction. - */ - commit(): Promise> - /** - * Rolls back the transaction. - */ - rollback(): Promise> - /** - * Discards and closes the transaction which may or may not have been committed or rolled back. - * This operation must be synchronous. If the implementation requires calling creating new - * asynchronous tasks on the event loop, the driver is responsible for handling the errors - * appropriately to ensure they don't crash the application. - */ - dispose(): Result -} - -export interface ErrorCapturingDriverAdapter extends DriverAdapter { - readonly errorRegistry: ErrorRegistry -} - -export interface ErrorRegistry { - consumeError(id: number): ErrorRecord | undefined -} - -export type ErrorRecord = { error: unknown } diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.build.json b/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.build.json deleted file mode 100644 index 2c2e266bdb3b..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.build.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "extends": "../tsconfig.json", - "compilerOptions": { - "outDir": "declaration", - } -} diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.json b/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.json deleted file mode 100644 index 3c43903cfdd1..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../tsconfig.json" -} diff --git a/query-engine/driver-adapters/js/pnpm-workspace.yaml b/query-engine/driver-adapters/js/pnpm-workspace.yaml deleted file mode 100644 index f9e70da7ee5a..000000000000 --- a/query-engine/driver-adapters/js/pnpm-workspace.yaml +++ /dev/null @@ -1,8 +0,0 @@ -packages: - - './adapter-libsql' - - './adapter-neon' - - './adapter-pg' - - './adapter-planetscale' - - './connector-test-kit-executor' - - './driver-adapter-utils' - - './smoke-test-js' diff --git a/query-engine/driver-adapters/js/smoke-test-js/.envrc.example b/query-engine/driver-adapters/js/smoke-test-js/.envrc.example deleted file mode 100644 index 15a286787cbd..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/.envrc.example +++ /dev/null @@ -1,26 +0,0 @@ -# Uncomment "source_up" if you need to load the .envrc at the root of the -# `prisma-engines` repository before loading this one (for example, if you -# are using Nix). -# -# source_up - -export JS_PLANETSCALE_DATABASE_URL="mysql://USER:PASSWORD@aws.connect.psdb.cloud/DATABASE?sslaccept=strict" -export JS_NEON_DATABASE_URL="postgres://USER:PASSWORD@DATABASE-pooler.eu-central-1.aws.neon.tech/neondb?pgbouncer=true&connect_timeout=10" - -# Note: if you use hosted Postgres instances (e.g., from PDP provision), you need `?sslmode=disable` -export JS_PG_DATABASE_URL="postgres://postgres:prisma@localhost:5438" - -# Set this to a `file:` URL when using a local sqlite database (either -# standalone or as an embedded replica). Otherwise, when using a remote Turso -# (or sqld) database in HTTP mode directly without an embedded replica, set its -# URL here. -export JS_LIBSQL_DATABASE_URL="file:${PWD}/libsql.db" - -# # Set this to the URL of remote Turso database when using an embedded replica. -# export JS_LIBSQL_SYNC_URL="" - -# # Provide an auth token when using a remote Turso database. -# export JS_LIBSQL_AUTH_TOKEN="" - -# Can be one of "number" (the default when nothing is specified), "bigint" or "string". "bigint" works best with Prisma. -export JS_LIBSQL_INT_MODE="bigint" diff --git a/query-engine/driver-adapters/js/smoke-test-js/.gitignore b/query-engine/driver-adapters/js/smoke-test-js/.gitignore deleted file mode 100644 index be550f99317f..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -libsql.db -libsql.db-journal -libsql.db-shm -libsql.db-wal diff --git a/query-engine/driver-adapters/js/smoke-test-js/README.md b/query-engine/driver-adapters/js/smoke-test-js/README.md deleted file mode 100644 index 204be94670b9..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/README.md +++ /dev/null @@ -1,77 +0,0 @@ -# @prisma/driver-adapters-smoke-tests-js - -This is a playground for testing the `libquery` client with the experimental Node.js drivers. -It contains a subset of `@prisma/client`, plus some handy executable smoke tests: -- [`./src/libquery`](./src/libquery): it contains smoke tests using a local `libquery`, the Query Engine library. -- [`./src/client`](./src/client): it contains smoke tests using `@prisma/client`. - -## How to setup - -We assume a recent Node.js is installed (e.g., `v20.5.x`). If not, run `nvm use` in the current directory. -It's very important to double-check if you have multiple versions installed, as both PlanetScale and Neon requires either Node.js `v18`+ or a custom `fetch` function. - -In the parent directory (`cd ..`): -- Build the driver adapters via `pnpm i && pnpm build` - -In the current directoy: -- Create a `.envrc` starting from `.envrc.example`, and fill in the missing values following the given template -- Install Node.js dependencies via - ```bash - pnpm i - ``` - -Anywhere in the repository: -- Run `cargo build -p query-engine-node-api` to compile the `libquery` Query Engine - -### PlanetScale - -If you don't have a connection string yet: - -- [Follow the Notion document](https://www.notion.so/How-to-get-a-PlanetScale-and-Neon-database-for-testing-93d978061f9c4ffc80ebfed36896af16) or create a new database on [PlanetScale](https://planetscale.com/) -- Go to `Settings` > `Passwords`, and create a new password for the `main` database branch. Select the `Prisma` template and copy the generated URL (comprising username, password, etc). -- Paste it in the `JS_PLANETSCALE_DATABASE_URL` environment variable in `.envrc`. - -In the current directory: -- Run `pnpm prisma:planetscale` to push the Prisma schema and insert the test data. -- Run `pnpm planetscale` to run smoke tests using `libquery` against the PlanetScale database. - For more fine-grained control: - - Run `pnpm planetscale:libquery` to test using `libquery` - - Run `pnpm planetscale:client` to test using `@prisma/client` - -### Neon - -If you don't have a connection string yet: - -- [Follow the Notion document](https://www.notion.so/How-to-get-a-PlanetScale-and-Neon-database-for-testing-93d978061f9c4ffc80ebfed36896af16) or create a new database with Neon CLI `npx neonctl projects create` or in [Neon Console](https://neon.tech). -- Paste the connection string to `JS_NEON_DATABASE_URL`. - -In the current directory: -- Run `pnpm prisma:neon` to push the Prisma schema and insert the test data. -- Run `pnpm neon:ws` to run smoke tests using `libquery` against the Neon database, using a WebSocket connection. - For more fine-grained control: - - Run `pnpm neon:ws:libquery` to test using `libquery` - - Run `pnpm neon:ws:client` to test using `@prisma/client` -- Run `pnpm neon:http` to run smoke tests using `libquery` against the Neon database, using an HTTP connection. In this case, transactions won't work, and tests are expected to fail. - For more fine-grained control: - - Run `pnpm neon:ws:http` to test using `libquery` - - Run `pnpm neon:ws:http` to test using `@prisma/client` - -### Pg - -Start database via `docker compose up postgres15` in `/docker`. - -In the current directory: -- Run `pnpm prisma:pg` to push the Prisma schema and insert the test data. -- Run `pnpm pg` to run smoke tests using `libquery` against the PostgreSQL database, using `pg` - For more fine-grained control: - - Run `pnpm pg:libquery` to test using `libquery` - - Run `pnpm pg:client` to test using `@prisma/client` - -### Libsql - -In the current directory: -- Run `pnpm prisma:libsql` to push the Prisma schema and insert the test data. -- Run `pnpm libsql` to run smoke tests using `libquery` against the SQLite database, using `libSQL` - For more fine-grained control: - - Run `pnpm libsql:libquery` to test using `libquery` - - Run `pnpm libsql:client` to test using `@prisma/client` \ No newline at end of file diff --git a/query-engine/driver-adapters/js/smoke-test-js/package.json b/query-engine/driver-adapters/js/smoke-test-js/package.json deleted file mode 100644 index 90f25234be1e..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/package.json +++ /dev/null @@ -1,67 +0,0 @@ -{ - "name": "@prisma/driver-adapters-smoke-tests-js", - "private": true, - "type": "module", - "version": "5.4.0", - "description": "", - "scripts": { - "prisma:db:push:postgres": "prisma db push --schema ./prisma/postgres/schema.prisma --force-reset", - "prisma:db:execute:postgres": "prisma db execute --schema ./prisma/postgres/schema.prisma --file ./prisma/postgres/commands/type_test/insert.sql", - "prisma:studio:postgres": "prisma studio --schema ./prisma/postgres/schema.prisma", - "prisma:db:push:mysql": "prisma db push --schema ./prisma/mysql/schema.prisma --force-reset", - "prisma:db:execute:mysql": "prisma db execute --schema ./prisma/mysql/schema.prisma --file ./prisma/mysql/commands/type_test/insert.sql", - "prisma:db:push:sqlite": "prisma db push --schema ./prisma/sqlite/schema.prisma --force-reset", - "prisma:db:execute:sqlite": "prisma db execute --schema ./prisma/sqlite/schema.prisma --file ./prisma/sqlite/commands/type_test/insert.sql", - "prisma:studio:mysql": "prisma studio --schema ./prisma/mysql/schema.prisma", - "prisma:neon:ws": "pnpm prisma:neon", - "prisma:neon:http": "pnpm prisma:neon", - "prisma:neon": "cross-env-shell DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" \"pnpm prisma:db:push:postgres && pnpm prisma:db:execute:postgres\"", - "studio:neon": "cross-env-shell DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" \"pnpm prisma:studio:postgres\"", - "neon:ws:libquery": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/neon.ws.test.ts", - "neon:http:libquery": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/neon.http.test.ts", - "neon:ws:client": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/neon.ws.test.ts", - "neon:http:client": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/neon.http.test.ts", - "neon:ws": "pnpm neon:ws:libquery && pnpm neon:ws:client", - "neon:http": "pnpm neon:http:libquery && pnpm neon:http:client", - "prisma:pg": "cross-env-shell DATABASE_URL=\"${JS_PG_DATABASE_URL}\" \"pnpm prisma:db:push:postgres && pnpm prisma:db:execute:postgres\"", - "studio:pg": "cross-env-shell DATABASE_URL=\"${JS_PG_DATABASE_URL}\" \"pnpm prisma:studio:postgres\"", - "pg:libquery": "cross-env-shell DATABASE_URL=\"${JS_PG_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/pg.test.ts", - "pg:client": "DATABASE_URL=\"${JS_PG_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/pg.test.ts", - "pg": "pnpm pg:libquery && pnpm pg:client", - "errors": "DATABASE_URL=\"${JS_PG_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/errors.test.ts", - "prisma:planetscale": "cross-env-shell DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" \"pnpm prisma:db:push:mysql && pnpm prisma:db:execute:mysql\"", - "studio:planetscale": "cross-env-shell DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" \"pnpm prisma:studio:mysql\"", - "planetscale:libquery": "DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/planetscale.test.ts", - "planetscale:client": "DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/planetscale.test.ts", - "planetscale": "pnpm planetscale:libquery && pnpm planetscale:client", - "prisma:libsql": "cross-env-shell DATABASE_URL=\"${JS_LIBSQL_DATABASE_URL}\" \"pnpm prisma:db:push:sqlite && pnpm prisma:db:execute:sqlite\"", - "libsql:libquery": "cross-env-shell DATABASE_URL=\"${JS_LIBSQL_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/libsql.test.ts", - "libsql:client": "cross-env-shell DATABASE_URL=\"${JS_LIBSQL_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/libsql.test.ts", - "libsql": "pnpm libsql:libquery && pnpm libsql:client" - }, - "keywords": [], - "author": "Alberto Schiabel ", - "license": "Apache-2.0", - "sideEffects": true, - "dependencies": { - "@libsql/client": "0.3.5", - "@neondatabase/serverless": "^0.6.0", - "@planetscale/database": "^1.11.0", - "@prisma/adapter-libsql": "workspace:*", - "@prisma/adapter-neon": "workspace:*", - "@prisma/adapter-pg": "workspace:*", - "@prisma/adapter-planetscale": "workspace:*", - "@prisma/client": "5.4.1", - "@prisma/driver-adapter-utils": "workspace:*", - "pg": "^8.11.3", - "superjson": "^1.13.1", - "undici": "^5.23.0" - }, - "devDependencies": { - "@types/node": "^20.5.1", - "@types/pg": "^8.10.2", - "cross-env": "^7.0.3", - "prisma": "5.4.1", - "tsx": "^3.12.7" - } -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/commands/type_test/insert.sql b/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/commands/type_test/insert.sql deleted file mode 100644 index 6641eff216b2..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/commands/type_test/insert.sql +++ /dev/null @@ -1,51 +0,0 @@ -INSERT INTO type_test ( - tinyint_column, - smallint_column, - mediumint_column, - int_column, - bigint_column, - float_column, - double_column, - decimal_column, - boolean_column, - bit_column, - char_column, - varchar_column, - text_column, - date_column, - time_column, - year_column, - datetime_column, - timestamp_column, - json_column, - enum_column, - binary_column, - varbinary_column, - blob_column, - set_column -) VALUES ( - 127, -- tinyint - 32767, -- smallint - 8388607, -- mediumint - 2147483647, -- int - 9223372036854775807, -- bigint - 3.402823466, -- float - 1.7976931348623157, -- double - 99999999.99, -- decimal - TRUE, -- boolean - 1, -- bit - 'c', -- char - 'Sample varchar', -- varchar - 'This is a long text...', -- text - '2023-07-24', -- date - '23:59:59', -- time - 2023, -- year - '2023-07-24 23:59:59.415', -- datetime - '2023-07-24 23:59:59', -- timestamp - '{"key": "value"}', -- json - 'value3', -- enum - 0x4D7953514C, -- binary - 0x48656C6C6F20, -- varbinary - _binary 'binary', -- blob - 'option1,option3' -- set -); diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma b/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma deleted file mode 100644 index 00418d57cc2c..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma +++ /dev/null @@ -1,120 +0,0 @@ -generator client { - provider = "prisma-client-js" - previewFeatures = ["driverAdapters"] -} - -datasource db { - provider = "mysql" - url = env("DATABASE_URL") -} - -model type_test { - id Int @id @default(autoincrement()) - tinyint_column Int @db.TinyInt - tinyint_column_null Int? @db.TinyInt - smallint_column Int @db.SmallInt - smallint_column_null Int? @db.SmallInt - mediumint_column Int @db.MediumInt - mediumint_column_null Int? @db.MediumInt - int_column Int - int_column_null Int? - bigint_column BigInt - bigint_column_null BigInt? - float_column Float @db.Float - float_column_null Float? @db.Float - double_column Float - double_column_null Float? - decimal_column Decimal @db.Decimal(10, 2) - decimal_column_null Decimal? @db.Decimal(10, 2) - boolean_column Boolean - boolean_column_null Boolean? - bit_column Boolean @db.Bit(1) - bit_column_null Boolean? @db.Bit(1) - char_column String @db.Char(10) - char_column_null String? @db.Char(10) - varchar_column String @db.VarChar(255) - varchar_column_null String? @db.VarChar(255) - text_column String @db.Text - text_column_null String? @db.Text - date_column DateTime @db.Date - date_column_null DateTime? @db.Date - time_column DateTime @db.Time(0) - time_column_null DateTime? @db.Time(0) - year_column Int @db.Year - year_column_null Int? @db.Year - datetime_column DateTime @db.DateTime(3) - datetime_column_null DateTime? @db.DateTime(3) - timestamp_column DateTime @db.Timestamp(0) - timestamp_column_null DateTime? @db.Timestamp(0) - json_column Json - json_column_null Json? - enum_column type_test_enum_column - enum_column_null type_test_enum_column_null? - binary_column Bytes @db.Binary(64) - binary_column_null Bytes? @db.Binary(64) - varbinary_column Bytes @db.VarBinary(128) - varbinary_column_null Bytes? @db.VarBinary(128) - blob_column Bytes @db.Blob - blob_null Bytes? @db.Blob - set_column String - set_column_null String? -} - -// This will eventually supersede type_test -model type_test_2 { - id String @id @default(cuid()) - datetime_column DateTime @default(now()) @db.DateTime(3) - datetime_column_null DateTime? @db.DateTime(3) -} - -enum type_test_enum_column { - value1 - value2 - value3 -} - -enum type_test_enum_column_null { - value1 - value2 - value3 -} - -model Child { - c String @unique - c_1 String - c_2 String - parentId String? @unique - non_unique String? - id String @id - - @@unique([c_1, c_2]) -} - -model Parent { - p String @unique - p_1 String - p_2 String - non_unique String? - id String @id - - @@unique([p_1, p_2]) -} - -model Author { - id Int @id @default(autoincrement()) - firstName String - lastName String - age Int - - @@map("authors") -} - -model Product { - id String @id @default(cuid()) - properties Json - properties_null Json? -} - -model Unique { - email String @id -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/commands/type_test/insert.sql b/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/commands/type_test/insert.sql deleted file mode 100644 index 170bafb9d810..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/commands/type_test/insert.sql +++ /dev/null @@ -1,35 +0,0 @@ -INSERT INTO type_test ( - smallint_column, - int_column, - bigint_column, - float_column, - double_column, - decimal_column, - boolean_column, - char_column, - varchar_column, - text_column, - date_column, - time_column, - datetime_column, - timestamp_column, - json_column, - enum_column -) VALUES ( - 32767, -- smallint - 2147483647, -- int - 9223372036854775807, -- bigint - 3.402823466, -- float - 1.7976931348623157, -- double - 99999999.99, -- decimal - TRUE, -- boolean - 'c', -- char - 'Sample varchar', -- varchar - 'This is a long text...', -- text - '2023-07-24', -- date - '23:59:59', -- time - '2023-07-24 23:59:59.415', -- datetime - '2023-07-24 23:59:59', -- timestamp - '{"key": "value"}', -- json - 'value3' -- enum -); diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma b/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma deleted file mode 100644 index 7319f07d8a60..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma +++ /dev/null @@ -1,112 +0,0 @@ -generator client { - provider = "prisma-client-js" - previewFeatures = ["driverAdapters"] -} - -datasource db { - provider = "postgres" - url = env("DATABASE_URL") -} - -model type_test { - id Int @id @default(autoincrement()) - smallint_column Int @db.SmallInt - smallint_column_null Int? @db.SmallInt - int_column Int - int_column_null Int? - bigint_column BigInt - bigint_column_null BigInt? - float_column Float @db.Real - float_column_null Float? @db.Real - double_column Float - double_column_null Float? - decimal_column Decimal @db.Decimal(10, 2) - decimal_column_null Decimal? @db.Decimal(10, 2) - boolean_column Boolean - boolean_column_null Boolean? - char_column String @db.Char(10) - char_column_null String? @db.Char(10) - varchar_column String @db.VarChar(255) - varchar_column_null String? @db.VarChar(255) - text_column String - text_column_null String? - date_column DateTime @db.Date - date_column_null DateTime? @db.Date - time_column DateTime @db.Time(0) - time_column_null DateTime? @db.Time(0) - datetime_column DateTime @db.Timestamp(3) - datetime_column_null DateTime? @db.Timestamp(3) - timestamp_column DateTime @db.Timestamp(0) - timestamp_column_null DateTime? @db.Timestamp(0) - json_column Json - json_column_null Json? - enum_column type_test_enum_column - enum_column_null type_test_enum_column_null? -} - -// This will eventually supersede type_test -model type_test_2 { - id String @id @default(cuid()) - datetime_column DateTime @default(now()) @db.Timestamp(3) - datetime_column_null DateTime? @db.Timestamp(3) -} - -model Child { - c String @unique - c_1 String - c_2 String - parentId String? @unique - non_unique String? - id String @id - - @@unique([c_1, c_2]) -} - -model Parent { - p String @unique - p_1 String - p_2 String - non_unique String? - id String @id - - @@unique([p_1, p_2]) -} - -enum type_test_enum_column { - value1 - value2 - value3 -} - -enum type_test_enum_column_null { - value1 - value2 - value3 -} - -model Author { - id Int @id @default(autoincrement()) - firstName String - lastName String - age Int - - @@map("authors") -} - -model Product { - id String @id @default(cuid()) - properties Json - properties_null Json? - users User[] -} - -model User { - id String @id @default(uuid()) - email String - favoriteProduct Product? @relation(fields: [productId], references: [id]) - productId String? -} - -model Unique { - email String @id -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/commands/type_test/insert.sql b/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/commands/type_test/insert.sql deleted file mode 100644 index 014592d2fa2c..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/commands/type_test/insert.sql +++ /dev/null @@ -1,17 +0,0 @@ -INSERT INTO type_test ( - int_column, - bigint_column, - double_column, - decimal_column, - boolean_column, - text_column, - datetime_column -) VALUES ( - 2147483647, -- int - 9223372036854775807, -- bigint - 1.7976931348623157, -- double - 99999999.99, -- decimal - TRUE, -- boolean - 'This is a long text...', -- text - '2023-07-24 23:59:59.415' -- datetime -); diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/20230915202554_init/migration.sql b/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/20230915202554_init/migration.sql deleted file mode 100644 index 31c63d423e22..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/20230915202554_init/migration.sql +++ /dev/null @@ -1,85 +0,0 @@ --- CreateTable -CREATE TABLE "type_test" ( - "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, - "int_column" INTEGER NOT NULL, - "int_column_null" INTEGER, - "bigint_column" BIGINT NOT NULL, - "bigint_column_null" BIGINT, - "double_column" REAL NOT NULL, - "double_column_null" REAL, - "decimal_column" DECIMAL NOT NULL, - "decimal_column_null" DECIMAL, - "boolean_column" BOOLEAN NOT NULL, - "boolean_column_null" BOOLEAN, - "text_column" TEXT NOT NULL, - "text_column_null" TEXT, - "datetime_column" DATETIME NOT NULL, - "datetime_column_null" DATETIME -); - --- CreateTable -CREATE TABLE "type_test_2" ( - "id" TEXT NOT NULL PRIMARY KEY, - "datetime_column" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, - "datetime_column_null" DATETIME -); - --- CreateTable -CREATE TABLE "type_test_3" ( - "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, - "bytes" BLOB NOT NULL -); - --- CreateTable -CREATE TABLE "Child" ( - "c" TEXT NOT NULL, - "c_1" TEXT NOT NULL, - "c_2" TEXT NOT NULL, - "parentId" TEXT, - "non_unique" TEXT, - "id" TEXT NOT NULL PRIMARY KEY -); - --- CreateTable -CREATE TABLE "Parent" ( - "p" TEXT NOT NULL, - "p_1" TEXT NOT NULL, - "p_2" TEXT NOT NULL, - "non_unique" TEXT, - "id" TEXT NOT NULL PRIMARY KEY -); - --- CreateTable -CREATE TABLE "authors" ( - "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, - "firstName" TEXT NOT NULL, - "lastName" TEXT NOT NULL, - "age" INTEGER NOT NULL -); - --- CreateTable -CREATE TABLE "Product" ( - "id" TEXT NOT NULL PRIMARY KEY, - "properties" TEXT NOT NULL, - "properties_null" TEXT -); - --- CreateTable -CREATE TABLE "Unique" ( - "email" TEXT NOT NULL PRIMARY KEY, -); - --- CreateIndex -CREATE UNIQUE INDEX "Child_c_key" ON "Child"("c"); - --- CreateIndex -CREATE UNIQUE INDEX "Child_parentId_key" ON "Child"("parentId"); - --- CreateIndex -CREATE UNIQUE INDEX "Child_c_1_c_2_key" ON "Child"("c_1", "c_2"); - --- CreateIndex -CREATE UNIQUE INDEX "Parent_p_key" ON "Parent"("p"); - --- CreateIndex -CREATE UNIQUE INDEX "Parent_p_1_p_2_key" ON "Parent"("p_1", "p_2"); diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/migration_lock.toml b/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/migration_lock.toml deleted file mode 100644 index e5e5c4705ab0..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/migration_lock.toml +++ /dev/null @@ -1,3 +0,0 @@ -# Please do not edit this file manually -# It should be added in your version-control system (i.e. Git) -provider = "sqlite" \ No newline at end of file diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/schema.prisma b/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/schema.prisma deleted file mode 100644 index bde23dee66ac..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/schema.prisma +++ /dev/null @@ -1,79 +0,0 @@ -generator client { - provider = "prisma-client-js" - previewFeatures = ["driverAdapters"] -} - -datasource db { - provider = "sqlite" - url = env("DATABASE_URL") -} - -model type_test { - id Int @id @default(autoincrement()) - int_column Int - int_column_null Int? - bigint_column BigInt - bigint_column_null BigInt? - double_column Float - double_column_null Float? - decimal_column Decimal - decimal_column_null Decimal? - boolean_column Boolean - boolean_column_null Boolean? - text_column String - text_column_null String? - datetime_column DateTime - datetime_column_null DateTime? -} - -// This will eventually supersede type_test -model type_test_2 { - id String @id @default(cuid()) - datetime_column DateTime @default(now()) - datetime_column_null DateTime? -} - -model type_test_3 { - id Int @id @default(autoincrement()) - bytes Bytes -} - -model Child { - c String @unique - c_1 String - c_2 String - parentId String? @unique - non_unique String? - id String @id - - @@unique([c_1, c_2]) -} - -model Parent { - p String @unique - p_1 String - p_2 String - non_unique String? - id String @id - - @@unique([p_1, p_2]) -} - -model Author { - id Int @id @default(autoincrement()) - firstName String - lastName String - age Int - - @@map("authors") -} - -model Product { - id String @id @default(cuid()) - properties String - properties_null String? -} - -model Unique { - email String @id -} \ No newline at end of file diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts deleted file mode 100644 index dcae3c46437d..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts +++ /dev/null @@ -1,141 +0,0 @@ -import { describe, it } from 'node:test' -import path from 'node:path' -import assert from 'node:assert' -import { PrismaClient } from '@prisma/client' -import type { DriverAdapter } from '@prisma/driver-adapter-utils' -import { getLibQueryEnginePath } from '../libquery/util' - -export async function smokeTestClient(driverAdapter: DriverAdapter) { - const provider = driverAdapter.flavour - - const log = [ - { - emit: 'event', - level: 'query', - } as const, - ] - - const dirname = path.dirname(new URL(import.meta.url).pathname) - process.env.PRISMA_QUERY_ENGINE_LIBRARY = getLibQueryEnginePath(dirname) - - // Run twice, once with adapter and once fully without - for (const adapter of [driverAdapter, null]) { - const isUsingDriverAdapters = adapter !== null - describe(isUsingDriverAdapters ? `using Driver Adapters` : `using Rust drivers`, () => { - it('batch queries', async () => { - const prisma = new PrismaClient({ - adapter, - log, - }) - - const queries: string[] = [] - prisma.$on('query', ({ query }) => queries.push(query)) - - await prisma.$transaction([ - prisma.$queryRawUnsafe('SELECT 1'), - prisma.$queryRawUnsafe('SELECT 2'), - prisma.$queryRawUnsafe('SELECT 3'), - ]) - - const defaultExpectedQueries = [ - 'BEGIN', - 'SELECT 1', - 'SELECT 2', - 'SELECT 3', - 'COMMIT', - ] - - const driverAdapterExpectedQueries = [ - '-- Implicit "BEGIN" query via underlying driver', - 'SELECT 1', - 'SELECT 2', - 'SELECT 3', - '-- Implicit "COMMIT" query via underlying driver', - ] - - // TODO: sqlite should be here too but it's too flaky the way the test is currently written, - // only a subset of logs arrives on time (from 2 to 4 out of 5) - if (['mysql'].includes(provider)) { - if (isUsingDriverAdapters) { - assert.deepEqual(queries, driverAdapterExpectedQueries) - } else { - assert.deepEqual(queries, defaultExpectedQueries) - } - } else if (['postgres'].includes(provider)) { - // Note: the "DEALLOCATE ALL" query is only present after "BEGIN" when using Rust Postgres with pgbouncer. - assert.deepEqual(queries.at(0), defaultExpectedQueries.at(0)) - assert.deepEqual( - queries.filter((q) => q !== 'DEALLOCATE ALL'), - defaultExpectedQueries, - ) - } - }) - - if (provider !== 'sqlite') { - it('applies isolation level when using batch $transaction', async () => { - const prisma = new PrismaClient({ adapter, log }) - - const queries: string[] = [] - prisma.$on('query', ({ query }) => queries.push(query)) - - await prisma.$transaction([prisma.child.findMany(), prisma.child.count()], { - isolationLevel: 'ReadCommitted', - }) - - if (['mysql'].includes(provider)) { - assert.deepEqual(queries.slice(0, 2), ['SET TRANSACTION ISOLATION LEVEL READ COMMITTED', 'BEGIN']) - } else if (['postgres'].includes(provider)) { - assert.deepEqual(queries.slice(0, 2), ['BEGIN', 'SET TRANSACTION ISOLATION LEVEL READ COMMITTED']) - } - - assert.deepEqual(queries.at(-1), 'COMMIT') - }) - } else { - describe('isolation levels with sqlite', () => { - it('accepts Serializable as a no-op', async () => { - const prisma = new PrismaClient({ adapter, log }) - - const queries: string[] = [] - prisma.$on('query', ({ query }) => queries.push(query)) - - await prisma.$transaction([prisma.child.findMany(), prisma.child.count()], { - isolationLevel: 'Serializable', - }) - - if (isUsingDriverAdapters) { - assert.equal(queries.at(0), '-- Implicit "BEGIN" query via underlying driver') - assert.equal(queries.at(-1), '-- Implicit "COMMIT" query via underlying driver') - } else { - assert.equal(queries.at(0), 'BEGIN') - assert.equal(queries.at(-1), 'COMMIT') - } - - assert(!queries.find((q) => q.includes('SET TRANSACTION ISOLATION LEVEL'))) - }) - - it('throws on unsupported isolation levels', async () => { - const prisma = new PrismaClient({ adapter }) - - assert.rejects( - prisma.$transaction([prisma.child.findMany(), prisma.child.count()], { - isolationLevel: 'ReadCommitted', - }), - ) - }) - - it('bytes type support', async () => { - const prisma = new PrismaClient({ adapter, log }) - - const result = await prisma.type_test_3.create({ - data: { - bytes: Buffer.from([1, 2, 3, 4]), - }, - }) - - assert.deepEqual(result.bytes, Buffer.from([1, 2, 3, 4])) - }) - }) - } - }) - } -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/libsql.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/libsql.test.ts deleted file mode 100644 index f216b2a02ac7..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/libsql.test.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { PrismaLibSQL } from '@prisma/adapter-libsql' -import { IntMode, createClient } from '@libsql/client' -import { describe } from 'node:test' -import { smokeTestClient } from './client' - -describe('libsql with @prisma/client', async () => { - const url = process.env.JS_LIBSQL_DATABASE_URL as string - const syncUrl = process.env.JS_LIBSQL_SYNC_URL - const authToken = process.env.JS_LIBSQL_AUTH_TOKEN - const intMode = process.env.JS_LIBSQL_INT_MODE as IntMode | undefined - - const client = createClient({ url, syncUrl, authToken, intMode }) - const adapter = new PrismaLibSQL(client) - - if (syncUrl) { - await client.sync() - } - - smokeTestClient(adapter) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts deleted file mode 100644 index 53156ac56249..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { describe } from 'node:test' -import { neon } from '@neondatabase/serverless' -import { PrismaNeonHTTP } from '@prisma/adapter-neon' -import { smokeTestClient } from './client' - -describe('neon with @prisma/client', async () => { - const connectionString = process.env.JS_NEON_DATABASE_URL ?? '' - - const connection = neon(connectionString) - const adapter = new PrismaNeonHTTP(connection) - - smokeTestClient(adapter) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.ws.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.ws.test.ts deleted file mode 100644 index 37b0a9088bb7..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.ws.test.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { describe } from 'node:test' -import { Pool, neonConfig } from '@neondatabase/serverless' -import { PrismaNeon } from '@prisma/adapter-neon' -import { WebSocket } from 'undici' -import { smokeTestClient } from './client' - -neonConfig.webSocketConstructor = WebSocket - -describe('neon with @prisma/client', async () => { - const connectionString = process.env.JS_NEON_DATABASE_URL ?? '' - - const pool = new Pool({ connectionString }) - const adapter = new PrismaNeon(pool) - - smokeTestClient(adapter) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/pg.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/pg.test.ts deleted file mode 100644 index 99048ad3d95f..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/pg.test.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { describe } from 'node:test' -import pg from 'pg' -import { PrismaPg } from '@prisma/adapter-pg' -import { smokeTestClient } from './client' - -describe('pg with @prisma/client', async () => { - const connectionString = process.env.JS_PG_DATABASE_URL ?? '' - - const pool = new pg.Pool({ connectionString }) - const adapter = new PrismaPg(pool) - - smokeTestClient(adapter) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/planetscale.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/planetscale.test.ts deleted file mode 100644 index 3c22b7aa3062..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/planetscale.test.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { connect } from '@planetscale/database' -import { PrismaPlanetScale } from '@prisma/adapter-planetscale' -import { describe } from 'node:test' -import { smokeTestClient } from './client' - -describe('planetscale with @prisma/client', async () => { - const connectionString = process.env.JS_PLANETSCALE_DATABASE_URL ?? '' - - const connnection = connect({ url: connectionString }) - const adapter = new PrismaPlanetScale(connnection) - - smokeTestClient(adapter) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/JsonProtocol.ts b/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/JsonProtocol.ts deleted file mode 100644 index bd491db289a3..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/JsonProtocol.ts +++ /dev/null @@ -1,78 +0,0 @@ -import * as Transaction from './Transaction' - -export type JsonQuery = { - modelName?: string - action: JsonQueryAction - query: JsonFieldSelection -} - -export type JsonBatchQuery = { - batch: JsonQuery[] - transaction?: { isolationLevel?: Transaction.IsolationLevel } -} - -export type JsonQueryAction = - | 'findUnique' - | 'findUniqueOrThrow' - | 'findFirst' - | 'findFirstOrThrow' - | 'findMany' - | 'createOne' - | 'createMany' - | 'updateOne' - | 'updateMany' - | 'deleteOne' - | 'deleteMany' - | 'upsertOne' - | 'aggregate' - | 'groupBy' - | 'executeRaw' - | 'queryRaw' - | 'runCommandRaw' - | 'findRaw' - | 'aggregateRaw' - -export type JsonFieldSelection = { - arguments?: Record - selection: JsonSelectionSet -} - -export type JsonSelectionSet = { - $scalars?: boolean - $composites?: boolean -} & { - [fieldName: string]: boolean | JsonFieldSelection -} - -export type JsonArgumentValue = - | number - | string - | boolean - | null - | JsonTaggedValue - | JsonArgumentValue[] - | { [key: string]: JsonArgumentValue } - -export type DateTaggedValue = { $type: 'DateTime'; value: string } -export type DecimalTaggedValue = { $type: 'Decimal'; value: string } -export type BytesTaggedValue = { $type: 'Bytes'; value: string } -export type BigIntTaggedValue = { $type: 'BigInt'; value: string } -export type FieldRefTaggedValue = { $type: 'FieldRef'; value: { _ref: string } } -export type EnumTaggedValue = { $type: 'Enum'; value: string } -export type JsonTaggedValue = { $type: 'Json'; value: string } - -export type JsonInputTaggedValue = - | DateTaggedValue - | DecimalTaggedValue - | BytesTaggedValue - | BigIntTaggedValue - | FieldRefTaggedValue - | JsonTaggedValue - | EnumTaggedValue - -export type JsonOutputTaggedValue = - | DateTaggedValue - | DecimalTaggedValue - | BytesTaggedValue - | BigIntTaggedValue - | JsonTaggedValue diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Library.ts b/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Library.ts deleted file mode 100644 index a25b3dd26728..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Library.ts +++ /dev/null @@ -1,46 +0,0 @@ -import type { ErrorCapturingDriverAdapter } from '@prisma/driver-adapter-utils' -import type { QueryEngineConfig } from './QueryEngine' - -export type QueryEngineInstance = { - connect(headers: string): Promise - disconnect(headers: string): Promise - /** - * @param requestStr JSON.stringified `QueryEngineRequest | QueryEngineBatchRequest` - * @param headersStr JSON.stringified `QueryEngineRequestHeaders` - */ - query(requestStr: string, headersStr: string, transactionId?: string): Promise - sdlSchema(): Promise - dmmf(traceparent: string): Promise - startTransaction(options: string, traceHeaders: string): Promise - commitTransaction(id: string, traceHeaders: string): Promise - rollbackTransaction(id: string, traceHeaders: string): Promise - metrics(options: string): Promise -} - -export interface QueryEngineConstructor { - new( - config: QueryEngineConfig, - logger: (log: string) => void, - driverAdapter?: ErrorCapturingDriverAdapter, - ): QueryEngineInstance -} - -export interface LibraryLoader { - loadLibrary(): Promise -} - -// Main -export type Library = { - QueryEngine: QueryEngineConstructor - - version: () => { - // The commit hash of the engine - commit: string - // Currently 0.1.0 (Set in Cargo.toml) - version: string - } - /** - * This returns a string representation of `DMMF.Document` - */ - dmmf: (datamodel: string) => Promise -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/QueryEngine.ts b/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/QueryEngine.ts deleted file mode 100644 index 5bab74493dee..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/QueryEngine.ts +++ /dev/null @@ -1,97 +0,0 @@ -import { JsonBatchQuery, JsonQuery } from './JsonProtocol' -import * as Transaction from './Transaction' - -// Events -export type QueryEngineEvent = QueryEngineLogEvent | QueryEngineQueryEvent | QueryEnginePanicEvent - -export type QueryEngineLogEvent = { - level: string - module_path: string - message: string - span?: boolean -} - -export type QueryEngineQueryEvent = { - level: 'info' - module_path: string - query: string - item_type: 'query' - params: string - duration_ms: string - result: string -} - -export type QueryEnginePanicEvent = { - level: 'error' - module_path: string - message: 'PANIC' - reason: string - file: string - line: string - column: string -} - -// Configuration -export type QueryEngineLogLevel = 'trace' | 'debug' | 'info' | 'warn' | 'error' | 'off' - -export type QueryEngineTelemetry = { - enabled: Boolean - endpoint: string -} - -export type GraphQLQuery = { - query: string - variables: object -} - -export type EngineProtocol = 'graphql' | 'json' -export type EngineQuery = GraphQLQuery | JsonQuery - -export type EngineBatchQueries = GraphQLQuery[] | JsonQuery[] - -export type QueryEngineConfig = { - // TODO rename datamodel here and other places - datamodel: string - configDir: string - logQueries: boolean - ignoreEnvVarErrors: boolean - datasourceOverrides?: Record - env: Record - logLevel: QueryEngineLogLevel - telemetry?: QueryEngineTelemetry - engineProtocol: EngineProtocol -} - -// Errors -export type SyncRustError = { - is_panic: boolean - message: string - meta: { - full_error: string - } - error_code: string -} - -export type RustRequestError = { - is_panic: boolean - message: string - backtrace: string -} - -export type QueryEngineResult = { - data: T - elapsed: number -} - -export type QueryEngineBatchRequest = QueryEngineBatchGraphQLRequest | JsonBatchQuery - -export type QueryEngineBatchGraphQLRequest = { - batch: QueryEngineRequest[] - transaction?: boolean - isolationLevel?: Transaction.IsolationLevel -} - -export type QueryEngineRequest = { - query: string - variables: Object -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Transaction.ts b/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Transaction.ts deleted file mode 100644 index 1c5786cc66da..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Transaction.ts +++ /dev/null @@ -1,35 +0,0 @@ -export enum IsolationLevel { - ReadUncommitted = 'ReadUncommitted', - ReadCommitted = 'ReadCommitted', - RepeatableRead = 'RepeatableRead', - Snapshot = 'Snapshot', - Serializable = 'Serializable', -} - -/** - * maxWait ?= 2000 - * timeout ?= 5000 - */ -export type Options = { - maxWait?: number - timeout?: number - isolationLevel?: IsolationLevel -} - -export type InteractiveTransactionInfo = { - /** - * Transaction ID returned by the query engine. - */ - id: string - - /** - * Arbitrary payload the meaning of which depends on the `Engine` implementation. - * For example, `DataProxyEngine` needs to associate different API endpoints with transactions. - * In `LibraryEngine` and `BinaryEngine` it is currently not used. - */ - payload: Payload -} - -export type TransactionHeaders = { - traceparent?: string -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/errors.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/errors.test.ts deleted file mode 100644 index 13ac5cd9ec81..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/errors.test.ts +++ /dev/null @@ -1,105 +0,0 @@ -import { bindAdapter } from '@prisma/driver-adapter-utils' -import test, { after, before, describe } from 'node:test' -import { createQueryFn, initQueryEngine, throwAdapterError } from './util' -import assert from 'node:assert' - -const fakeAdapter = bindAdapter({ - flavour: 'postgres', - startTransaction() { - throw new Error('Error in startTransaction') - }, - - queryRaw() { - throw new Error('Error in queryRaw') - }, - - executeRaw() { - throw new Error('Error in executeRaw') - }, - close() { - return Promise.resolve({ ok: true, value: undefined }) - }, -}) - -const engine = initQueryEngine(fakeAdapter, '../../prisma/postgres/schema.prisma') -const doQuery = createQueryFn(engine, fakeAdapter) - -const startTransaction = async () => { - const args = { isolation_level: 'Serializable', max_wait: 5000, timeout: 15000 } - const res = JSON.parse(await engine.startTransaction(JSON.stringify(args), '{}')) - if (res['error_code']) { - throwAdapterError(res, fakeAdapter) - } -} - -describe('errors propagation', () => { - before(async () => { - await engine.connect('{}') - }) - after(async () => { - await engine.disconnect('{}') - }) - - test('works for queries', async () => { - await assert.rejects( - doQuery({ - modelName: 'Product', - action: 'findMany', - query: { - arguments: {}, - selection: { - $scalars: true, - }, - }, - }), - /Error in queryRaw/, - ) - }) - - test('works for executeRaw', async () => { - await assert.rejects( - doQuery({ - action: 'executeRaw', - query: { - arguments: { - query: 'SELECT 1', - parameters: '[]', - }, - selection: { - $scalars: true, - }, - }, - }), - /Error in executeRaw/, - ) - }) - - test('works with implicit transaction', async () => { - await assert.rejects( - doQuery({ - modelName: 'User', - action: 'createOne', - query: { - arguments: { - data: { - email: 'user@example.com', - favoriteProduct: { - create: { - properties: {}, - }, - }, - }, - }, - selection: { - $scalars: true, - }, - }, - }), - /Error in startTransaction/, - ) - }) - - test('works with explicit transaction', async () => { - await assert.rejects(startTransaction(), /Error in startTransaction/) - }) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts deleted file mode 100644 index bdf50eab5669..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts +++ /dev/null @@ -1,713 +0,0 @@ -import { describe, it, before, after } from 'node:test' -import assert from 'node:assert' -import type { ErrorCapturingDriverAdapter } from '@prisma/driver-adapter-utils' -import type { QueryEngineInstance } from '../engines/types/Library' -import { createQueryFn, initQueryEngine } from './util' -import { JsonQuery } from '../engines/types/JsonProtocol' - -export function smokeTestLibquery(adapter: ErrorCapturingDriverAdapter, prismaSchemaRelativePath: string) { - const engine = initQueryEngine(adapter, prismaSchemaRelativePath) - const flavour = adapter.flavour - - const doQuery = createQueryFn(engine, adapter) - - describe('using libquery with Driver Adapters', () => { - before(async () => { - await engine.connect('trace') - }) - - after(async () => { - await engine.disconnect('trace') - await adapter.close() - }) - - it('create JSON values', async () => { - const json = JSON.stringify({ - foo: 'bar', - baz: 1, - }) - - const created = await doQuery({ - action: 'createOne', - modelName: 'Product', - query: { - arguments: { - data: { - properties: json, - properties_null: null, - }, - }, - selection: { - properties: true, - }, - }, - }) - - if (flavour !== 'sqlite') { - assert.strictEqual(created.data.createOneProduct.properties.$type, 'Json') - } - - console.log('[nodejs] created', JSON.stringify(created, null, 2)) - - const resultSet = await doQuery({ - action: 'findMany', - modelName: 'Product', - query: { - selection: { - id: true, - properties: true, - properties_null: true, - }, - }, - }) - console.log('[nodejs] resultSet', JSON.stringify(resultSet, null, 2)) - - await doQuery({ - action: 'deleteMany', - modelName: 'Product', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - }) - - it('create with autoincrement', async () => { - await doQuery({ - modelName: 'Author', - action: 'deleteMany', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - - const author = await doQuery({ - modelName: 'Author', - action: 'createOne', - query: { - arguments: { - data: { - firstName: 'Firstname from autoincrement', - lastName: 'Lastname from autoincrement', - age: 99, - }, - }, - selection: { - id: true, - firstName: true, - lastName: true, - }, - }, - }) - console.log('[nodejs] author', JSON.stringify(author, null, 2)) - }) - - it('create non scalar types', async () => { - const create = await doQuery({ - action: 'createOne', - modelName: 'type_test_2', - query: { - arguments: { - data: {}, - }, - selection: { - id: true, - datetime_column: true, - datetime_column_null: true, - }, - }, - }) - - console.log('[nodejs] create', JSON.stringify(create, null, 2)) - - const resultSet = await doQuery({ - action: 'findMany', - modelName: 'type_test_2', - query: { - selection: { - id: true, - datetime_column: true, - datetime_column_null: true, - }, - arguments: { - where: {}, - }, - }, - }) - - console.log('[nodejs] resultSet', JSON.stringify(resultSet, null, 2)) - - await doQuery({ - action: 'deleteMany', - modelName: 'type_test_2', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - }) - - it('create/delete parent and child', async () => { - /* Delete all child and parent records */ - - // Queries: [ - // 'SELECT `cf-users`.`Child`.`id` FROM `cf-users`.`Child` WHERE 1=1', - // 'SELECT `cf-users`.`Child`.`id` FROM `cf-users`.`Child` WHERE 1=1', - // 'DELETE FROM `cf-users`.`Child` WHERE (`cf-users`.`Child`.`id` IN (?) AND 1=1)' - // ] - await doQuery({ - modelName: 'Child', - action: 'deleteMany', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - - // Queries: [ - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE 1=1', - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE 1=1', - // 'DELETE FROM `cf-users`.`Parent` WHERE (`cf-users`.`Parent`.`id` IN (?) AND 1=1)' - // ] - await doQuery({ - modelName: 'Parent', - action: 'deleteMany', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - - /* Create a parent with some new children, within a transaction */ - - // Queries: [ - // 'INSERT INTO `cf-users`.`Parent` (`p`,`p_1`,`p_2`,`id`) VALUES (?,?,?,?)', - // 'INSERT INTO `cf-users`.`Child` (`c`,`c_1`,`c_2`,`parentId`,`id`) VALUES (?,?,?,?,?)', - // 'SELECT `cf-users`.`Parent`.`id`, `cf-users`.`Parent`.`p` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`id` = ? LIMIT ? OFFSET ?', - // 'SELECT `cf-users`.`Child`.`id`, `cf-users`.`Child`.`c`, `cf-users`.`Child`.`parentId` FROM `cf-users`.`Child` WHERE `cf-users`.`Child`.`parentId` IN (?)' - // ] - await doQuery({ - modelName: 'Parent', - action: 'createOne', - query: { - arguments: { - data: { - p: 'p1', - p_1: '1', - p_2: '2', - childOpt: { - create: { - c: 'c1', - c_1: 'foo', - c_2: 'bar', - }, - }, - }, - }, - selection: { - p: true, - childOpt: { - selection: { - c: true, - }, - }, - }, - }, - }) - - /* Delete the parent */ - - // Queries: [ - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`p` = ?', - // 'SELECT `cf-users`.`Child`.`id`, `cf-users`.`Child`.`parentId` FROM `cf-users`.`Child` WHERE (1=1 AND `cf-users`.`Child`.`parentId` IN (?))', - // 'UPDATE `cf-users`.`Child` SET `parentId` = ? WHERE (`cf-users`.`Child`.`id` IN (?) AND 1=1)', - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`p` = ?', - // 'DELETE FROM `cf-users`.`Parent` WHERE (`cf-users`.`Parent`.`id` IN (?) AND `cf-users`.`Parent`.`p` = ?)' - // ] - await doQuery({ - modelName: 'Parent', - action: 'deleteMany', - query: { - arguments: { - where: { - p: 'p1', - }, - }, - selection: { - count: true, - }, - }, - }) - }) - - it('create explicit transaction', async () => { - const args = { isolation_level: 'Serializable', max_wait: 5000, timeout: 15000 } - const startResponse = await engine.startTransaction(JSON.stringify(args), 'trace') - const tx_id = JSON.parse(startResponse).id - - console.log('[nodejs] transaction id', tx_id) - await doQuery( - { - action: 'findMany', - modelName: 'Author', - query: { - selection: { $scalars: true }, - }, - }, - tx_id, - ) - - const commitResponse = await engine.commitTransaction(tx_id, 'trace') - console.log('[nodejs] commited', commitResponse) - }) - - it('expected error', async () => { - - - await assert.rejects( - async () => { - const result = await doQuery({ - modelName: 'Unique', - action: 'createOne', - query: { - arguments: { - data: { email: 'duplicate@example.com' }, - }, - selection: { - $scalars: true, - }, - }, - }) - const result2 = await doQuery({ - modelName: 'Unique', - action: 'createOne', - query: { - arguments: { - data: { email: 'duplicate@example.com' } - }, - selection: { - $scalars: true, - }, - }, - }) - console.log('[nodejs] error result', JSON.stringify(result, null, 2)) - }, - (err) => { - assert.match(err.message, /unique/i); - return true; - }, - ); - - }) - - describe('read scalar and non scalar types', () => { - if (['mysql'].includes(flavour)) { - it('mysql', async () => { - const resultSet = await doQuery({ - action: 'findMany', - modelName: 'type_test', - query: { - selection: { - tinyint_column: true, - smallint_column: true, - mediumint_column: true, - int_column: true, - bigint_column: true, - float_column: true, - double_column: true, - decimal_column: true, - boolean_column: true, - char_column: true, - varchar_column: true, - text_column: true, - date_column: true, - time_column: true, - datetime_column: true, - timestamp_column: true, - json_column: true, - enum_column: true, - binary_column: true, - varbinary_column: true, - blob_column: true, - }, - }, - }) - - console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) - }) - } else if (['postgres'].includes(flavour)) { - it('postgres', async () => { - const resultSet = await doQuery({ - action: 'findMany', - modelName: 'type_test', - query: { - selection: { - smallint_column: true, - int_column: true, - bigint_column: true, - float_column: true, - double_column: true, - decimal_column: true, - boolean_column: true, - char_column: true, - varchar_column: true, - text_column: true, - date_column: true, - time_column: true, - datetime_column: true, - timestamp_column: true, - json_column: true, - enum_column: true, - }, - }, - }) - console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) - }) - } else if (['sqlite'].includes(flavour)) { - it('sqlite', async () => { - const resultSet = await doQuery( - { - "action": "findMany", - "modelName": "type_test", - "query": { - "selection": { - "int_column": true, - "bigint_column": true, - "double_column": true, - "decimal_column": true, - "boolean_column": true, - "text_column": true, - "datetime_column": true, - } - } - } - ) - console.log('[nodejs] findMany resultSet', JSON.stringify((resultSet), null, 2)) - }) - } else { - throw new Error(`Missing test for flavour ${flavour}`) - } - }) - - it('write and read back bytes', async () => { - const createResultSet = await doQuery({ - action: 'createOne', - modelName: 'type_test_3', - query: { - selection: { - bytes: true, - }, - arguments: { - data: { - bytes: { - $type: 'Bytes', - value: 'AQID', - }, - }, - }, - }, - }) - console.log('[nodejs] createOne resultSet:') - console.dir(createResultSet, { depth: Infinity }) - - const findResultSet = await doQuery({ - action: 'findMany', - modelName: 'type_test_3', - query: { - selection: { - bytes: true, - }, - }, - }) - console.log('[nodejs] findMany resultSet:') - console.dir(findResultSet, { depth: Infinity }) - }) - }) -} - -class SmokeTest { - readonly flavour: ErrorCapturingDriverAdapter['flavour'] - - constructor(private readonly engine: QueryEngineInstance, private readonly connector: ErrorCapturingDriverAdapter) { - this.flavour = connector.flavour - } - - async testFindManyTypeTest() { - await this.testFindManyTypeTestMySQL() - await this.testFindManyTypeTestPostgres() - } - - private async testFindManyTypeTestMySQL() { - if (this.flavour !== 'mysql') { - return - } - - const resultSet = await this.doQuery({ - action: 'findMany', - modelName: 'type_test', - query: { - selection: { - tinyint_column: true, - smallint_column: true, - mediumint_column: true, - int_column: true, - bigint_column: true, - float_column: true, - double_column: true, - decimal_column: true, - boolean_column: true, - char_column: true, - varchar_column: true, - text_column: true, - date_column: true, - time_column: true, - datetime_column: true, - timestamp_column: true, - json_column: true, - enum_column: true, - binary_column: true, - varbinary_column: true, - blob_column: true, - }, - }, - }) - - console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) - - return resultSet - } - - private async testFindManyTypeTestPostgres() { - if (this.flavour !== 'postgres') { - return - } - - const resultSet = await this.doQuery({ - action: 'findMany', - modelName: 'type_test', - query: { - selection: { - smallint_column: true, - int_column: true, - bigint_column: true, - float_column: true, - double_column: true, - decimal_column: true, - boolean_column: true, - char_column: true, - varchar_column: true, - text_column: true, - date_column: true, - time_column: true, - datetime_column: true, - timestamp_column: true, - json_column: true, - enum_column: true, - }, - }, - }) - console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) - - return resultSet - } - - async createAutoIncrement() { - await this.doQuery({ - modelName: 'Author', - action: 'deleteMany', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - - const author = await this.doQuery({ - modelName: 'Author', - action: 'createOne', - query: { - arguments: { - data: { - firstName: 'Firstname from autoincrement', - lastName: 'Lastname from autoincrement', - age: 99, - }, - }, - selection: { - id: true, - firstName: true, - lastName: true, - }, - }, - }) - console.log('[nodejs] author', JSON.stringify(author, null, 2)) - } - - async testCreateAndDeleteChildParent() { - /* Delete all child and parent records */ - - // Queries: [ - // 'SELECT `cf-users`.`Child`.`id` FROM `cf-users`.`Child` WHERE 1=1', - // 'SELECT `cf-users`.`Child`.`id` FROM `cf-users`.`Child` WHERE 1=1', - // 'DELETE FROM `cf-users`.`Child` WHERE (`cf-users`.`Child`.`id` IN (?) AND 1=1)' - // ] - await this.doQuery({ - modelName: 'Child', - action: 'deleteMany', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - - // Queries: [ - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE 1=1', - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE 1=1', - // 'DELETE FROM `cf-users`.`Parent` WHERE (`cf-users`.`Parent`.`id` IN (?) AND 1=1)' - // ] - await this.doQuery({ - modelName: 'Parent', - action: 'deleteMany', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - - /* Create a parent with some new children, within a transaction */ - - // Queries: [ - // 'INSERT INTO `cf-users`.`Parent` (`p`,`p_1`,`p_2`,`id`) VALUES (?,?,?,?)', - // 'INSERT INTO `cf-users`.`Child` (`c`,`c_1`,`c_2`,`parentId`,`id`) VALUES (?,?,?,?,?)', - // 'SELECT `cf-users`.`Parent`.`id`, `cf-users`.`Parent`.`p` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`id` = ? LIMIT ? OFFSET ?', - // 'SELECT `cf-users`.`Child`.`id`, `cf-users`.`Child`.`c`, `cf-users`.`Child`.`parentId` FROM `cf-users`.`Child` WHERE `cf-users`.`Child`.`parentId` IN (?)' - // ] - await this.doQuery({ - modelName: 'Parent', - action: 'createOne', - query: { - arguments: { - data: { - p: 'p1', - p_1: '1', - p_2: '2', - childOpt: { - create: { - c: 'c1', - c_1: 'foo', - c_2: 'bar', - }, - }, - }, - }, - selection: { - p: true, - childOpt: { - selection: { - c: true, - }, - }, - }, - }, - }) - - /* Delete the parent */ - - // Queries: [ - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`p` = ?', - // 'SELECT `cf-users`.`Child`.`id`, `cf-users`.`Child`.`parentId` FROM `cf-users`.`Child` WHERE (1=1 AND `cf-users`.`Child`.`parentId` IN (?))', - // 'UPDATE `cf-users`.`Child` SET `parentId` = ? WHERE (`cf-users`.`Child`.`id` IN (?) AND 1=1)', - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`p` = ?', - // 'DELETE FROM `cf-users`.`Parent` WHERE (`cf-users`.`Parent`.`id` IN (?) AND `cf-users`.`Parent`.`p` = ?)' - // ] - const resultDeleteMany = await this.doQuery({ - modelName: 'Parent', - action: 'deleteMany', - query: { - arguments: { - where: { - p: 'p1', - }, - }, - selection: { - count: true, - }, - }, - }) - console.log('[nodejs] resultDeleteMany', JSON.stringify(resultDeleteMany, null, 2)) - } - - async testTransaction() { - const startResponse = await this.engine.startTransaction( - JSON.stringify({ isolation_level: 'Serializable', max_wait: 5000, timeout: 15000 }), - 'trace', - ) - - const tx_id = JSON.parse(startResponse).id - - console.log('[nodejs] transaction id', tx_id) - await this.doQuery( - { - action: 'findMany', - modelName: 'Author', - query: { - selection: { $scalars: true }, - }, - }, - tx_id, - ) - - const commitResponse = await this.engine.commitTransaction(tx_id, 'trace') - console.log('[nodejs] commited', commitResponse) - } - - private async doQuery(query: JsonQuery, tx_id?: string) { - const result = await this.engine.query(JSON.stringify(query), 'trace', tx_id) - const parsedResult = JSON.parse(result) - if (parsedResult.errors) { - const error = parsedResult.errors[0]?.user_facing_error - if (error.error_code === 'P2036') { - const jsError = this.connector.errorRegistry.consumeError(error.meta.id) - if (!jsError) { - throw new Error( - `Something went wrong. Engine reported external error with id ${error.meta.id}, but it was not registered.`, - ) - } - throw jsError.error - } - } - return parsedResult - } -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libsql.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libsql.test.ts deleted file mode 100644 index 7f0a1038ec74..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libsql.test.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { PrismaLibSQL } from '@prisma/adapter-libsql' -import { bindAdapter } from '@prisma/driver-adapter-utils' -import { IntMode, createClient } from '@libsql/client' -import { describe } from 'node:test' -import { smokeTestLibquery } from './libquery' - -describe('libsql', async () => { - const url = process.env.JS_LIBSQL_DATABASE_URL as string - const syncUrl = process.env.JS_LIBSQL_SYNC_URL - const authToken = process.env.JS_LIBSQL_AUTH_TOKEN - const intMode = process.env.JS_LIBSQL_INT_MODE as IntMode | undefined - - const client = createClient({ url, syncUrl, authToken, intMode }) - const adapter = new PrismaLibSQL(client) - const driverAdapter = bindAdapter(adapter) - - if (syncUrl) { - await client.sync() - } - - smokeTestLibquery(driverAdapter, '../../prisma/sqlite/schema.prisma') -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts deleted file mode 100644 index ac165d29f584..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { PrismaNeonHTTP } from '@prisma/adapter-neon' -import { bindAdapter } from '@prisma/driver-adapter-utils' -import { neon } from '@neondatabase/serverless' -import { describe } from 'node:test' -import { smokeTestLibquery } from './libquery' - -describe('neon (HTTP)', () => { - const connectionString = process.env.JS_NEON_DATABASE_URL ?? '' - - const neonConnection = neon(connectionString) - - const adapter = new PrismaNeonHTTP(neonConnection) - const driverAdapter = bindAdapter(adapter) - - smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma') -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts deleted file mode 100644 index 54765f5961ba..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { PrismaNeon } from '@prisma/adapter-neon' -import { bindAdapter } from '@prisma/driver-adapter-utils' -import { WebSocket } from 'undici' -import { Pool, neonConfig } from '@neondatabase/serverless' -import { describe } from 'node:test' -import { smokeTestLibquery } from './libquery' - -neonConfig.webSocketConstructor = WebSocket - -describe('neon (WebSocket)', () => { - const connectionString = process.env.JS_NEON_DATABASE_URL ?? '' - - const pool = new Pool({ connectionString }) - const adapter = new PrismaNeon(pool) - const driverAdapter = bindAdapter(adapter) - - smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma') -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.test.ts deleted file mode 100644 index 9b79e7284be8..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.test.ts +++ /dev/null @@ -1,15 +0,0 @@ -import pg from 'pg' -import { PrismaPg } from '@prisma/adapter-pg' -import { bindAdapter } from '@prisma/driver-adapter-utils' -import { describe } from 'node:test' -import { smokeTestLibquery } from './libquery' - -describe('pg', () => { - const connectionString = process.env.JS_PG_DATABASE_URL ?? '' - - const pool = new pg.Pool({ connectionString }) - const adapter = new PrismaPg(pool) - const driverAdapter = bindAdapter(adapter) - - smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma') -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.test.ts deleted file mode 100644 index bb7c81805adc..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.test.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { connect } from '@planetscale/database' -import { PrismaPlanetScale } from '@prisma/adapter-planetscale' -import { bindAdapter } from '@prisma/driver-adapter-utils' -import { describe } from 'node:test' -import { smokeTestLibquery } from './libquery' - -describe('planetscale', () => { - const connectionString = process.env.JS_PLANETSCALE_DATABASE_URL ?? '' - - const connnection = connect({ url: connectionString }) - const adapter = new PrismaPlanetScale(connnection) - const driverAdapter = bindAdapter(adapter) - - smokeTestLibquery(driverAdapter, '../../prisma/mysql/schema.prisma') -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts deleted file mode 100644 index 783eb76759d2..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts +++ /dev/null @@ -1,71 +0,0 @@ -import path from 'node:path' -import os from 'node:os' -import fs from 'node:fs' -import type { ErrorCapturingDriverAdapter } from '@prisma/driver-adapter-utils' -import { Library, QueryEngineInstance } from '../engines/types/Library' -import { JsonQuery } from '../engines/types/JsonProtocol' - -export function initQueryEngine( - driver: ErrorCapturingDriverAdapter, - prismaSchemaRelativePath: string, -): QueryEngineInstance { - const dirname = path.dirname(new URL(import.meta.url).pathname) - const libQueryEnginePath = getLibQueryEnginePath(dirname) - - const schemaPath = path.join(dirname, prismaSchemaRelativePath) - - console.log('[nodejs] read Prisma schema from', schemaPath) - - const libqueryEngine = { exports: {} as unknown as Library } - // @ts-ignore - process.dlopen(libqueryEngine, libQueryEnginePath) - - const QueryEngine = libqueryEngine.exports.QueryEngine - - const queryEngineOptions = { - datamodel: fs.readFileSync(schemaPath, 'utf-8'), - configDir: '.', - engineProtocol: 'json' as const, - logLevel: 'info' as const, - logQueries: false, - env: process.env, - ignoreEnvVarErrors: false, - } - - const logCallback = (...args) => { - console.log(args) - } - - const engine = new QueryEngine(queryEngineOptions, logCallback, driver) - - return engine -} - -export function getLibQueryEnginePath(dirname: String) { - // I assume nobody will run this on Windows ¯\_(ツ)_/¯ - const libExt = os.platform() === 'darwin' ? 'dylib' : 'so' - return path.join(dirname, `../../../../../../target/debug/libquery_engine.${libExt}`) -} - -export function createQueryFn(engine: QueryEngineInstance, adapter: ErrorCapturingDriverAdapter) { - return async function doQuery(query: JsonQuery, tx_id?: string) { - const result = await engine.query(JSON.stringify(query), 'trace', tx_id) - const parsedResult = JSON.parse(result) - if (parsedResult.errors) { - throwAdapterError(parsedResult.errors[0]?.user_facing_error, adapter) - } - return parsedResult - } -} - -export function throwAdapterError(error: any, adapter: ErrorCapturingDriverAdapter) { - if (error.error_code === 'P2036') { - const jsError = adapter.errorRegistry.consumeError(error.meta.id) - if (!jsError) { - throw new Error( - `Something went wrong. Engine reported external error with id ${error.meta.id}, but it was not registered.`, - ) - } - throw jsError.error - } -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/tsconfig.json b/query-engine/driver-adapters/js/smoke-test-js/tsconfig.json deleted file mode 100644 index 3c43903cfdd1..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/tsconfig.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../tsconfig.json" -} diff --git a/query-engine/driver-adapters/js/version.sh b/query-engine/driver-adapters/js/version.sh deleted file mode 100755 index 8f592c0e197c..000000000000 --- a/query-engine/driver-adapters/js/version.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash - -# Usage: `./version.sh x.y.z` will set the `x.y.z` to every package in the monorepo. - -target_version=$1 -package_dirs=$(pnpm -r list -r --depth -1 --json | jq -r '.[] | .path' | tail -n +2) - -# Iterate through each package directory -for package_dir in $package_dirs; do - # Check if the directory exists - if [ -d "$package_dir" ]; then - # Set the target version using pnpm - (cd "$package_dir" && pnpm version "$target_version" --no-git-tag-version --allow-same-version) - fi -done diff --git a/query-engine/driver-adapters/js/package.json b/query-engine/driver-adapters/package.json similarity index 55% rename from query-engine/driver-adapters/js/package.json rename to query-engine/driver-adapters/package.json index 2036794f8c02..e137d6a524b2 100644 --- a/query-engine/driver-adapters/js/package.json +++ b/query-engine/driver-adapters/package.json @@ -10,14 +10,16 @@ "license": "Apache-2.0", "scripts": { "build": "pnpm -r run build", - "lint": "pnpm -r run lint" + "lint": "pnpm -r run lint", + "clean": "git clean -nXd -e !query-engine/driver-adapters" }, "keywords": [], "author": "", "devDependencies": { - "@types/node": "^20.5.1", - "tsup": "^7.2.0", - "tsx": "^3.12.7", - "typescript": "^5.1.6" + "@types/node": "20.8.10", + "tsup": "7.2.0", + "typescript": "5.2.2", + "esbuild": "0.19.5", + "esbuild-register": "3.5.0" } } diff --git a/query-engine/driver-adapters/pnpm-workspace.yaml b/query-engine/driver-adapters/pnpm-workspace.yaml new file mode 100644 index 000000000000..d37910ea5ae6 --- /dev/null +++ b/query-engine/driver-adapters/pnpm-workspace.yaml @@ -0,0 +1,8 @@ +packages: + - '../../../prisma/packages/adapter-libsql' + - '../../../prisma/packages/adapter-neon' + - '../../../prisma/packages/adapter-pg' + - '../../../prisma/packages/adapter-planetscale' + - '../../../prisma/packages/driver-adapter-utils' + - '../../../prisma/packages/debug' + - './connector-test-kit-executor' \ No newline at end of file diff --git a/query-engine/driver-adapters/src/conversion.rs b/query-engine/driver-adapters/src/conversion.rs index 2d469a5ab7c3..00061d72de44 100644 --- a/query-engine/driver-adapters/src/conversion.rs +++ b/query-engine/driver-adapters/src/conversion.rs @@ -1,14 +1,15 @@ +pub(crate) mod mysql; +pub(crate) mod postgres; +pub(crate) mod sqlite; + use napi::bindgen_prelude::{FromNapiValue, ToNapiValue}; use napi::NapiValue; -use quaint::ast::Value as QuaintValue; -use quaint::ast::ValueType as QuaintValueType; use serde::Serialize; use serde_json::value::Value as JsonValue; -#[derive(Debug, Serialize)] +#[derive(Debug, PartialEq, Serialize)] #[serde(untagged)] pub enum JSArg { - RawString(String), Value(serde_json::Value), Buffer(Vec), Array(Vec), @@ -33,7 +34,6 @@ impl FromNapiValue for JSArg { impl ToNapiValue for JSArg { unsafe fn to_napi_value(env: napi::sys::napi_env, value: Self) -> napi::Result { match value { - JSArg::RawString(s) => ToNapiValue::to_napi_value(env, s), JSArg::Value(v) => ToNapiValue::to_napi_value(env, v), JSArg::Buffer(bytes) => { ToNapiValue::to_napi_value(env, napi::Env::from_raw(env).create_buffer_with_data(bytes)?.into_raw()) @@ -49,7 +49,7 @@ impl ToNapiValue for JSArg { for (index, item) in items.into_iter().enumerate() { let js_value = ToNapiValue::to_napi_value(env.raw(), item)?; // TODO: NapiRaw could be implemented for sys::napi_value directly, there should - // be no need for re-wrapping; submit a patch to napi-rs and simplify here. + // be no need for re-wrapping; submit a patch to napi-rs and simplify here. array.set(index as u32, napi::JsUnknown::from_raw_unchecked(env.raw(), js_value))?; } @@ -58,36 +58,3 @@ impl ToNapiValue for JSArg { } } } - -pub fn conv_params(params: &[QuaintValue<'_>]) -> serde_json::Result> { - let mut values = Vec::with_capacity(params.len()); - - for qv in params { - let res = match &qv.typed { - QuaintValueType::Json(s) => match s { - Some(ref s) => { - let json_str = serde_json::to_string(s)?; - JSArg::RawString(json_str) - } - None => JsonValue::Null.into(), - }, - QuaintValueType::Bytes(bytes) => match bytes { - Some(bytes) => JSArg::Buffer(bytes.to_vec()), - None => JsonValue::Null.into(), - }, - quaint_value @ QuaintValueType::Numeric(bd) => match bd { - Some(bd) => match bd.to_string().parse::() { - Ok(double) => JSArg::from(JsonValue::from(double)), - Err(_) => JSArg::from(JsonValue::from(quaint_value.clone())), - }, - None => JsonValue::Null.into(), - }, - QuaintValueType::Array(Some(items)) => JSArg::Array(conv_params(items)?), - quaint_value => JSArg::from(JsonValue::from(quaint_value.clone())), - }; - - values.push(res); - } - - Ok(values) -} diff --git a/query-engine/driver-adapters/src/conversion/mysql.rs b/query-engine/driver-adapters/src/conversion/mysql.rs new file mode 100644 index 000000000000..aab33213431a --- /dev/null +++ b/query-engine/driver-adapters/src/conversion/mysql.rs @@ -0,0 +1,107 @@ +use crate::conversion::JSArg; +use serde_json::value::Value as JsonValue; + +const DATETIME_FORMAT: &str = "%Y-%m-%d %H:%M:%S%.f"; +const DATE_FORMAT: &str = "%Y-%m-%d"; +const TIME_FORMAT: &str = "%H:%M:%S%.f"; + +#[rustfmt::skip] +pub fn value_to_js_arg(value: &quaint::Value) -> serde_json::Result { + let res = match &value.typed { + quaint::ValueType::Numeric(Some(bd)) => JSArg::Value(JsonValue::String(bd.to_string())), + quaint::ValueType::Json(Some(s)) => JSArg::Value(JsonValue::String(serde_json::to_string(s)?)), + quaint::ValueType::Bytes(Some(bytes)) => JSArg::Buffer(bytes.to_vec()), + quaint::ValueType::Date(Some(d)) => JSArg::Value(JsonValue::String(d.format(DATE_FORMAT).to_string())), + quaint::ValueType::DateTime(Some(dt)) => JSArg::Value(JsonValue::String(dt.format(DATETIME_FORMAT).to_string())), + quaint::ValueType::Time(Some(t)) => JSArg::Value(JsonValue::String(t.format(TIME_FORMAT).to_string())), + quaint::ValueType::Array(Some(ref items)) => JSArg::Array( + items + .iter() + .map(value_to_js_arg) + .collect::>>()?, + ), + quaint_value => JSArg::from(JsonValue::from(quaint_value.clone())), + }; + Ok(res) +} + +#[cfg(test)] +mod test { + use super::*; + use bigdecimal::BigDecimal; + use chrono::*; + use quaint::ValueType; + use std::str::FromStr; + + #[test] + #[rustfmt::skip] + fn test_value_to_js_arg() { + let test_cases = vec![ + ( + ValueType::Numeric(Some(1.into())), + JSArg::Value(JsonValue::String("1".to_string())) + ), + ( + ValueType::Numeric(Some(BigDecimal::from_str("-1.1").unwrap())), + JSArg::Value(JsonValue::String("-1.1".to_string())) + ), + ( + ValueType::Numeric(None), + JSArg::Value(JsonValue::Null) + ), + ( + ValueType::Json(Some(serde_json::json!({"a": 1}))), + JSArg::Value(JsonValue::String("{\"a\":1}".to_string())) + ), + ( + ValueType::Json(None), + JSArg::Value(JsonValue::Null) + ), + ( + ValueType::Date(Some(NaiveDate::from_ymd_opt(2020, 2, 29).unwrap())), + JSArg::Value(JsonValue::String("2020-02-29".to_string())) + ), + ( + ValueType::Date(None), + JSArg::Value(JsonValue::Null) + ), + ( + ValueType::DateTime(Some(Utc.with_ymd_and_hms(2020, 1, 1, 23, 13, 1).unwrap().with_nanosecond(100).unwrap())), + JSArg::Value(JsonValue::String("2020-01-01 23:13:01.000000100".to_string())) + ), + ( + ValueType::DateTime(None), + JSArg::Value(JsonValue::Null) + ), + ( + ValueType::Time(Some(NaiveTime::from_hms_opt(23, 13, 1).unwrap().with_nanosecond(1200).unwrap())), + JSArg::Value(JsonValue::String("23:13:01.000001200".to_string())) + ), + ( + ValueType::Time(None), + JSArg::Value(JsonValue::Null) + ), + ( + ValueType::Array(Some(vec!( + ValueType::Numeric(Some(1.into())).into_value(), + ValueType::Numeric(None).into_value(), + ValueType::Time(Some(NaiveTime::from_hms_opt(23, 13, 1).unwrap())).into_value(), + ))), + JSArg::Array(vec!( + JSArg::Value(JsonValue::String("1".to_string())), + JSArg::Value(JsonValue::Null), + JSArg::Value(JsonValue::String("23:13:01".to_string())) + )) + ), + ]; + + let mut errors: Vec = vec![]; + for (val, expected) in test_cases { + let actual = value_to_js_arg(&val.clone().into_value()).unwrap(); + if actual != expected { + errors.push(format!("transforming: {:?}, expected: {:?}, actual: {:?}", &val, expected, actual)); + } + } + assert_eq!(errors.len(), 0, "{}", errors.join("\n")); + } +} diff --git a/query-engine/driver-adapters/src/conversion/postgres.rs b/query-engine/driver-adapters/src/conversion/postgres.rs new file mode 100644 index 000000000000..113be5170a84 --- /dev/null +++ b/query-engine/driver-adapters/src/conversion/postgres.rs @@ -0,0 +1,119 @@ +use crate::conversion::JSArg; +use chrono::format::StrftimeItems; +use once_cell::sync::Lazy; +use serde_json::value::Value as JsonValue; + +static TIME_FMT: Lazy = Lazy::new(|| StrftimeItems::new("%H:%M:%S%.f")); + +#[rustfmt::skip] +pub fn value_to_js_arg(value: &quaint::Value) -> serde_json::Result { + let res = match (&value.typed, value.native_column_type_name()) { + (quaint::ValueType::DateTime(Some(dt)), Some("DATE")) => JSArg::Value(JsonValue::String(dt.date_naive().to_string())), + (quaint::ValueType::DateTime(Some(dt)), Some("TIME")) => JSArg::Value(JsonValue::String(dt.time().to_string())), + (quaint::ValueType::DateTime(Some(dt)), Some("TIMETZ")) => JSArg::Value(JsonValue::String(dt.time().format_with_items(TIME_FMT.clone()).to_string())), + (quaint::ValueType::DateTime(Some(dt)), _) => JSArg::Value(JsonValue::String(dt.naive_utc().to_string())), + (quaint::ValueType::Json(Some(s)), _) => JSArg::Value(JsonValue::String(serde_json::to_string(s)?)), + (quaint::ValueType::Bytes(Some(bytes)), _) => JSArg::Buffer(bytes.to_vec()), + (quaint::ValueType::Numeric(Some(bd)), _) => JSArg::Value(JsonValue::String(bd.to_string())), + (quaint::ValueType::Array(Some(items)), _) => JSArg::Array( + items + .iter() + .map(value_to_js_arg) + .collect::>>()?, + ), + (quaint_value, _) => JSArg::from(JsonValue::from(quaint_value.clone())), + }; + + Ok(res) +} + +#[cfg(test)] +mod test { + use super::*; + use bigdecimal::BigDecimal; + use chrono::*; + use quaint::ValueType; + use std::str::FromStr; + + #[test] + #[rustfmt::skip] + fn test_value_to_js_arg() { + let test_cases: Vec<(quaint::Value, JSArg)> = vec![ + ( + ValueType::Numeric(Some(1.into())).into_value(), + JSArg::Value(JsonValue::String("1".to_string())) + ), + ( + ValueType::Numeric(Some(BigDecimal::from_str("-1.1").unwrap())).into_value(), + JSArg::Value(JsonValue::String("-1.1".to_string())) + ), + ( + ValueType::Numeric(None).into_value(), + JSArg::Value(JsonValue::Null) + ), + ( + ValueType::Json(Some(serde_json::json!({"a": 1}))).into_value(), + JSArg::Value(JsonValue::String("{\"a\":1}".to_string())) + ), + ( + ValueType::Json(None).into_value(), + JSArg::Value(JsonValue::Null) + ), + ( + ValueType::Date(Some(NaiveDate::from_ymd_opt(2020, 2, 29).unwrap())).into_value(), + JSArg::Value(JsonValue::String("2020-02-29".to_string())) + ), + ( + ValueType::Date(None).into_value(), + JSArg::Value(JsonValue::Null) + ), + ( + ValueType::DateTime(Some(Utc.with_ymd_and_hms(2020, 1, 1, 23, 13, 1).unwrap())).into_value().with_native_column_type(Some("DATE")), + JSArg::Value(JsonValue::String("2020-01-01".to_string())) + ), + ( + ValueType::DateTime(Some(Utc.with_ymd_and_hms(2020, 1, 1, 23, 13, 1).unwrap())).into_value().with_native_column_type(Some("TIME")), + JSArg::Value(JsonValue::String("23:13:01".to_string())) + ), + ( + ValueType::DateTime(Some(Utc.with_ymd_and_hms(2020, 1, 1, 23, 13, 1).unwrap())).into_value().with_native_column_type(Some("TIMETZ")), + JSArg::Value(JsonValue::String("23:13:01".to_string())) + ), + ( + ValueType::DateTime(None).into_value(), + JSArg::Value(JsonValue::Null) + ), + ( + ValueType::Time(Some(NaiveTime::from_hms_opt(23, 13, 1).unwrap())).into_value(), + JSArg::Value(JsonValue::String("23:13:01".to_string())) + ), + ( + ValueType::Time(None).into_value(), + JSArg::Value(JsonValue::Null) + ), + ( + ValueType::Array(Some(vec!( + ValueType::Numeric(Some(1.into())).into_value(), + ValueType::Numeric(None).into_value(), + ValueType::Time(Some(NaiveTime::from_hms_opt(23, 13, 1).unwrap())).into_value(), + ValueType::Time(None).into_value(), + ))).into_value(), + JSArg::Array(vec!( + JSArg::Value(JsonValue::String("1".to_string())), + JSArg::Value(JsonValue::Null), + JSArg::Value(JsonValue::String("23:13:01".to_string())), + JSArg::Value(JsonValue::Null), + )) + ), + ]; + + let mut errors: Vec = vec![]; + for (val, expected) in test_cases { + let actual = value_to_js_arg(&val).unwrap(); + if actual != expected { + errors.push(format!("transforming: {:?}, expected: {:?}, actual: {:?}", &val, expected, actual)); + } + } + assert_eq!(errors.len(), 0, "{}", errors.join("\n")); + } +} diff --git a/query-engine/driver-adapters/src/conversion/sqlite.rs b/query-engine/driver-adapters/src/conversion/sqlite.rs new file mode 100644 index 000000000000..032c16923256 --- /dev/null +++ b/query-engine/driver-adapters/src/conversion/sqlite.rs @@ -0,0 +1,108 @@ +use crate::conversion::JSArg; +use serde_json::value::Value as JsonValue; + +pub fn value_to_js_arg(value: &quaint::Value) -> serde_json::Result { + let res = match &value.typed { + quaint::ValueType::Numeric(Some(bd)) => match bd.to_string().parse::() { + Ok(double) => JSArg::from(JsonValue::from(double)), + Err(_) => JSArg::from(JsonValue::from(value.clone())), + }, + quaint::ValueType::Json(Some(s)) => JSArg::Value(s.to_owned()), + quaint::ValueType::Bytes(Some(bytes)) => JSArg::Buffer(bytes.to_vec()), + quaint::ValueType::Array(Some(ref items)) => JSArg::Array( + items + .iter() + .map(value_to_js_arg) + .collect::>>()?, + ), + quaint_value => JSArg::from(JsonValue::from(quaint_value.clone())), + }; + + Ok(res) +} + +// unit tests for value_to_js_arg +#[cfg(test)] +mod test { + use super::*; + use bigdecimal::BigDecimal; + use chrono::*; + use quaint::ValueType; + use serde_json::Value; + use std::str::FromStr; + + #[test] + #[rustfmt::skip] + fn test_value_to_js_arg() { + let test_cases = vec![ + ( + // This is different than how mysql or postgres processes integral BigInt values. + ValueType::Numeric(Some(1.into())), + JSArg::Value(Value::Number("1.0".parse().unwrap())) + ), + ( + ValueType::Numeric(Some(BigDecimal::from_str("-1.1").unwrap())), + JSArg::Value(Value::Number("-1.1".parse().unwrap())), + ), + ( + ValueType::Numeric(None), + JSArg::Value(Value::Null) + ), + ( + ValueType::Json(Some(serde_json::json!({"a": 1}))), + JSArg::Value(serde_json::json!({"a": 1})), + ), + ( + ValueType::Json(None), + JSArg::Value(Value::Null) + ), + ( + ValueType::Date(Some(NaiveDate::from_ymd_opt(2020, 2, 29).unwrap())), + JSArg::Value(Value::String("2020-02-29".to_string())), + ), + ( + ValueType::Date(None), + JSArg::Value(Value::Null) + ), + ( + ValueType::DateTime(Some(Utc.with_ymd_and_hms(2020, 1, 1, 23, 13, 1).unwrap())), + JSArg::Value(Value::String("2020-01-01T23:13:01+00:00".to_string())), + ), + ( + ValueType::DateTime(None), + JSArg::Value(Value::Null) + ), + ( + ValueType::Time(Some(NaiveTime::from_hms_opt(23, 13, 1).unwrap())), + JSArg::Value(Value::String("23:13:01".to_string())), + ), + ( + ValueType::Time(None), + JSArg::Value(Value::Null) + ), + ( + ValueType::Array(Some(vec!( + ValueType::Numeric(Some(1.into())).into_value(), + ValueType::Numeric(None).into_value(), + ValueType::Time(Some(NaiveTime::from_hms_opt(23, 13, 1).unwrap())).into_value(), + ValueType::Time(None).into_value(), + ))), + JSArg::Array(vec!( + JSArg::Value(Value::Number("1.0".parse().unwrap())), + JSArg::Value(Value::Null), + JSArg::Value(Value::String("23:13:01".to_string())), + JSArg::Value(Value::Null), + )) + ), + ]; + + let mut errors: Vec = vec![]; + for (val, expected) in test_cases { + let actual = value_to_js_arg(&val.clone().into_value()).unwrap(); + if actual != expected { + errors.push(format!("transforming: {:?}, expected: {:?}, actual: {:?}", &val, expected, actual)); + } + } + assert_eq!(errors.len(), 0, "{}", errors.join("\n")); + } +} diff --git a/query-engine/driver-adapters/src/error.rs b/query-engine/driver-adapters/src/error.rs index f2fbb7dd9caf..4f4128088f49 100644 --- a/query-engine/driver-adapters/src/error.rs +++ b/query-engine/driver-adapters/src/error.rs @@ -12,7 +12,7 @@ pub(crate) fn into_quaint_error(napi_err: NapiError) -> QuaintError { QuaintError::raw_connector_error(status, reason) } -/// catches a panic thrown during the executuin of an asynchronous closure and transforms it into +/// catches a panic thrown during the execution of an asynchronous closure and transforms it into /// the Error variant of a napi::Result. pub(crate) async fn async_unwinding_panic(fut: F) -> napi::Result where diff --git a/query-engine/driver-adapters/src/proxy.rs b/query-engine/driver-adapters/src/proxy.rs index bdcab93a0c55..a708d75c0e32 100644 --- a/query-engine/driver-adapters/src/proxy.rs +++ b/query-engine/driver-adapters/src/proxy.rs @@ -4,6 +4,7 @@ use std::str::FromStr; use crate::async_js_function::AsyncJsFunction; use crate::conversion::JSArg; use crate::transaction::JsTransaction; +use metrics::increment_gauge; use napi::bindgen_prelude::{FromNapiValue, ToNapiValue}; use napi::threadsafe_function::{ErrorStrategy, ThreadsafeFunction}; use napi::{JsObject, JsString}; @@ -117,9 +118,7 @@ pub enum ColumnType { /// - BOOLEAN (BOOLEAN) -> e.g. `1` Boolean = 5, - /// The following PlanetScale type IDs are mapped into Char: - /// - CHAR (CHAR) -> e.g. `"c"` (String-encoded) - Char = 6, + Character = 6, /// The following PlanetScale type IDs are mapped into Text: /// - TEXT (TEXT) -> e.g. `"foo"` (String-encoded) @@ -184,7 +183,7 @@ pub enum ColumnType { BooleanArray = 69, /// Char array (CHAR_ARRAY in PostgreSQL) - CharArray = 70, + CharacterArray = 70, /// Text array (TEXT_ARRAY in PostgreSQL) TextArray = 71, @@ -250,6 +249,12 @@ fn js_value_to_quaint( column_type: ColumnType, column_name: &str, ) -> quaint::Result> { + let parse_number_as_i64 = |n: &serde_json::Number| { + n.as_i64().ok_or(conversion_error!( + "number must be an integer in column '{column_name}', got '{n}'" + )) + }; + // Note for the future: it may be worth revisiting how much bloat so many panics with different static // strings add to the compiled artefact, and in case we should come up with a restricted set of panic // messages, or even find a way of removing them altogether. @@ -257,35 +262,29 @@ fn js_value_to_quaint( ColumnType::Int32 => match json_value { serde_json::Value::Number(n) => { // n.as_i32() is not implemented, so we need to downcast from i64 instead - n.as_i64() - .ok_or(conversion_error!("number must be an integer")) + parse_number_as_i64(&n) .and_then(|n| -> quaint::Result { n.try_into() - .map_err(|e| conversion_error!("cannot convert {n} to i32: {e}")) + .map_err(|e| conversion_error!("cannot convert {n} to i32 in column '{column_name}': {e}")) }) .map(QuaintValue::int32) } - serde_json::Value::String(s) => s - .parse::() - .map(QuaintValue::int32) - .map_err(|e| conversion_error!("string-encoded number must be an i32, got {s}: {e}")), + serde_json::Value::String(s) => s.parse::().map(QuaintValue::int32).map_err(|e| { + conversion_error!("string-encoded number must be an i32 in column '{column_name}', got {s}: {e}") + }), serde_json::Value::Null => Ok(QuaintValue::null_int32()), mismatch => Err(conversion_error!( - "expected an i32 number in column {column_name}, found {mismatch}" + "expected an i32 number in column '{column_name}', found {mismatch}" )), }, ColumnType::Int64 => match json_value { - serde_json::Value::Number(n) => n - .as_i64() - .map(QuaintValue::int64) - .ok_or(conversion_error!("number must be an i64, got {n}")), - serde_json::Value::String(s) => s - .parse::() - .map(QuaintValue::int64) - .map_err(|e| conversion_error!("string-encoded number must be an i64, got {s}: {e}")), + serde_json::Value::Number(n) => parse_number_as_i64(&n).map(QuaintValue::int64), + serde_json::Value::String(s) => s.parse::().map(QuaintValue::int64).map_err(|e| { + conversion_error!("string-encoded number must be an i64 in column '{column_name}', got {s}: {e}") + }), serde_json::Value::Null => Ok(QuaintValue::null_int64()), mismatch => Err(conversion_error!( - "expected a string or number in column {column_name}, found {mismatch}" + "expected a string or number in column '{column_name}', found {mismatch}" )), }, ColumnType::Float => match json_value { @@ -293,36 +292,39 @@ fn js_value_to_quaint( // We assume that the JSON value is a valid f32 number, but we check for overflows anyway. serde_json::Value::Number(n) => n .as_f64() - .ok_or(conversion_error!("number must be a float, got {n}")) + .ok_or(conversion_error!( + "number must be a float in column '{column_name}', got {n}" + )) .and_then(f64_to_f32) .map(QuaintValue::float), serde_json::Value::Null => Ok(QuaintValue::null_float()), mismatch => Err(conversion_error!( - "expected an f32 number in column {column_name}, found {mismatch}" + "expected an f32 number in column '{column_name}', found {mismatch}" )), }, ColumnType::Double => match json_value { - serde_json::Value::Number(n) => n - .as_f64() - .map(QuaintValue::double) - .ok_or(conversion_error!("number must be a f64, got {n}")), + serde_json::Value::Number(n) => n.as_f64().map(QuaintValue::double).ok_or(conversion_error!( + "number must be a f64 in column '{column_name}', got {n}" + )), serde_json::Value::Null => Ok(QuaintValue::null_double()), mismatch => Err(conversion_error!( - "expected an f64 number in column {column_name}, found {mismatch}" + "expected an f64 number in column '{column_name}', found {mismatch}" )), }, ColumnType::Numeric => match json_value { - serde_json::Value::String(s) => BigDecimal::from_str(&s) - .map(QuaintValue::numeric) - .map_err(|e| conversion_error!("invalid numeric value when parsing {s}: {e}")), + serde_json::Value::String(s) => BigDecimal::from_str(&s).map(QuaintValue::numeric).map_err(|e| { + conversion_error!("invalid numeric value when parsing {s} in column '{column_name}': {e}") + }), serde_json::Value::Number(n) => n .as_f64() .and_then(BigDecimal::from_f64) - .ok_or(conversion_error!("number must be an f64, got {n}")) + .ok_or(conversion_error!( + "number must be an f64 in column '{column_name}', got {n}" + )) .map(QuaintValue::numeric), serde_json::Value::Null => Ok(QuaintValue::null_numeric()), mismatch => Err(conversion_error!( - "expected a string-encoded number in column {column_name}, found {mismatch}", + "expected a string-encoded number in column '{column_name}', found {mismatch}", )), }, ColumnType::Boolean => match json_value { @@ -332,62 +334,65 @@ fn js_value_to_quaint( Some(0) => Ok(QuaintValue::boolean(false)), Some(1) => Ok(QuaintValue::boolean(true)), _ => Err(conversion_error!( - "expected number-encoded boolean to be 0 or 1, got {n}" + "expected number-encoded boolean to be 0 or 1 in column '{column_name}', got {n}" )), }, serde_json::Value::String(s) => match s.as_str() { "false" | "FALSE" | "0" => Ok(QuaintValue::boolean(false)), "true" | "TRUE" | "1" => Ok(QuaintValue::boolean(true)), - _ => Err(conversion_error!("expected string-encoded boolean, got {s}")), + _ => Err(conversion_error!( + "expected string-encoded boolean in column '{column_name}', got {s}" + )), }, mismatch => Err(conversion_error!( - "expected a boolean in column {column_name}, found {mismatch}" + "expected a boolean in column '{column_name}', found {mismatch}" )), }, - ColumnType::Char => match json_value { + ColumnType::Character => match json_value { serde_json::Value::String(s) => match s.chars().next() { Some(c) => Ok(QuaintValue::character(c)), None => Ok(QuaintValue::null_character()), }, serde_json::Value::Null => Ok(QuaintValue::null_character()), mismatch => Err(conversion_error!( - "expected a string in column {column_name}, found {mismatch}" + "expected a string in column '{column_name}', found {mismatch}" )), }, ColumnType::Text => match json_value { serde_json::Value::String(s) => Ok(QuaintValue::text(s)), serde_json::Value::Null => Ok(QuaintValue::null_text()), mismatch => Err(conversion_error!( - "expected a string in column {column_name}, found {mismatch}" + "expected a string in column '{column_name}', found {mismatch}" )), }, ColumnType::Date => match json_value { serde_json::Value::String(s) => NaiveDate::parse_from_str(&s, "%Y-%m-%d") .map(QuaintValue::date) - .map_err(|_| conversion_error!("expected a date string, got {s}")), + .map_err(|_| conversion_error!("expected a date string in column '{column_name}', got {s}")), serde_json::Value::Null => Ok(QuaintValue::null_date()), mismatch => Err(conversion_error!( - "expected a string in column {column_name}, found {mismatch}" + "expected a string in column '{column_name}', found {mismatch}" )), }, ColumnType::Time => match json_value { - serde_json::Value::String(s) => NaiveTime::parse_from_str(&s, "%H:%M:%S") + serde_json::Value::String(s) => NaiveTime::parse_from_str(&s, "%H:%M:%S%.f") .map(QuaintValue::time) - .map_err(|_| conversion_error!("expected a time string, got {s}")), + .map_err(|_| conversion_error!("expected a time string in column '{column_name}', got {s}")), serde_json::Value::Null => Ok(QuaintValue::null_time()), mismatch => Err(conversion_error!( - "expected a string in column {column_name}, found {mismatch}" + "expected a string in column '{column_name}', found {mismatch}" )), }, ColumnType::DateTime => match json_value { + // TODO: change parsing order to prefer RFC3339 serde_json::Value::String(s) => chrono::NaiveDateTime::parse_from_str(&s, "%Y-%m-%d %H:%M:%S%.f") .map(|dt| DateTime::from_utc(dt, Utc)) .or_else(|_| DateTime::parse_from_rfc3339(&s).map(DateTime::::from)) .map(QuaintValue::datetime) - .map_err(|_| conversion_error!("expected a datetime string, found {s}")), + .map_err(|_| conversion_error!("expected a datetime string in column '{column_name}', found {s}")), serde_json::Value::Null => Ok(QuaintValue::null_datetime()), mismatch => Err(conversion_error!( - "expected a string in column {column_name}, found {mismatch}" + "expected a string in column '{column_name}', found {mismatch}" )), }, ColumnType::Json => { @@ -403,7 +408,7 @@ fn js_value_to_quaint( serde_json::Value::String(s) => Ok(QuaintValue::enum_variant(s)), serde_json::Value::Null => Ok(QuaintValue::null_enum()), mismatch => Err(conversion_error!( - "expected a string in column {column_name}, found {mismatch}" + "expected a string in column '{column_name}', found {mismatch}" )), }, ColumnType::Bytes => match json_value { @@ -413,19 +418,21 @@ fn js_value_to_quaint( .map(|value| value.as_i64().and_then(|maybe_byte| maybe_byte.try_into().ok())) .collect::>>() .map(QuaintValue::bytes) - .ok_or(conversion_error!("elements of the array must be u8")), + .ok_or(conversion_error!( + "elements of the array in column '{column_name}' must be u8" + )), serde_json::Value::Null => Ok(QuaintValue::null_bytes()), mismatch => Err(conversion_error!( - "expected a string or an array in column {column_name}, found {mismatch}", + "expected a string or an array in column '{column_name}', found {mismatch}", )), }, ColumnType::Uuid => match json_value { serde_json::Value::String(s) => uuid::Uuid::parse_str(&s) .map(QuaintValue::uuid) - .map_err(|_| conversion_error!("Expected a UUID string")), + .map_err(|_| conversion_error!("Expected a UUID string in column '{column_name}'")), serde_json::Value::Null => Ok(QuaintValue::null_bytes()), mismatch => Err(conversion_error!( - "Expected a UUID string in column {column_name}, found {mismatch}" + "Expected a UUID string in column '{column_name}', found {mismatch}" )), }, ColumnType::UnknownNumber => match json_value { @@ -433,9 +440,11 @@ fn js_value_to_quaint( .as_i64() .map(QuaintValue::int64) .or(n.as_f64().map(QuaintValue::double)) - .ok_or(conversion_error!("number must be an i64 or f64, got {n}")), + .ok_or(conversion_error!( + "number must be an i64 or f64 in column '{column_name}', got {n}" + )), mismatch => Err(conversion_error!( - "expected a either an i64 or a f64 in column {column_name}, found {mismatch}", + "expected a either an i64 or a f64 in column '{column_name}', found {mismatch}", )), }, @@ -445,7 +454,7 @@ fn js_value_to_quaint( ColumnType::DoubleArray => js_array_to_quaint(ColumnType::Double, json_value, column_name), ColumnType::NumericArray => js_array_to_quaint(ColumnType::Numeric, json_value, column_name), ColumnType::BooleanArray => js_array_to_quaint(ColumnType::Boolean, json_value, column_name), - ColumnType::CharArray => js_array_to_quaint(ColumnType::Char, json_value, column_name), + ColumnType::CharacterArray => js_array_to_quaint(ColumnType::Character, json_value, column_name), ColumnType::TextArray => js_array_to_quaint(ColumnType::Text, json_value, column_name), ColumnType::DateArray => js_array_to_quaint(ColumnType::Date, json_value, column_name), ColumnType::TimeArray => js_array_to_quaint(ColumnType::Time, json_value, column_name), @@ -476,7 +485,7 @@ fn js_array_to_quaint( )), serde_json::Value::Null => Ok(QuaintValue::null_array()), mismatch => Err(conversion_error!( - "expected an array in column {column_name}, found {mismatch}", + "expected an array in column '{column_name}', found {mismatch}", )), } } @@ -550,6 +559,12 @@ impl DriverProxy { pub async fn start_transaction(&self) -> quaint::Result> { let tx = self.start_transaction.call(()).await?; + + // Decrement for this gauge is done in JsTransaction::commit/JsTransaction::rollback + // Previously, it was done in JsTransaction::new, similar to the native Transaction. + // However, correct Dispatcher is lost there and increment does not register, so we moved + // it here instead. + increment_gauge!("prisma_client_queries_active", 1.0); Ok(Box::new(tx)) } } @@ -783,7 +798,7 @@ mod proxy_test { #[test] fn js_value_char_to_quaint() { - let column_type = ColumnType::Char; + let column_type = ColumnType::Character; // null test_null(QuaintValue::null_character(), column_type); @@ -832,9 +847,14 @@ mod proxy_test { let s = "23:59:59"; let json_value = serde_json::Value::String(s.to_string()); let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - let time: NaiveTime = NaiveTime::from_hms_opt(23, 59, 59).unwrap(); assert_eq!(quaint_value, QuaintValue::time(time)); + + let s = "13:02:20.321"; + let json_value = serde_json::Value::String(s.to_string()); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + let time: NaiveTime = NaiveTime::from_hms_milli_opt(13, 02, 20, 321).unwrap(); + assert_eq!(quaint_value, QuaintValue::time(time)); } #[test] @@ -935,7 +955,7 @@ mod proxy_test { assert_eq!( quaint_value.err().unwrap().to_string(), - "Conversion failed: expected an i32 number in column column_name[2], found {}" + "Conversion failed: expected an i32 number in column 'column_name[2]', found {}" ); } @@ -957,7 +977,7 @@ mod proxy_test { assert_eq!( quaint_value.err().unwrap().to_string(), - "Conversion failed: expected a string in column column_name[0], found 10" + "Conversion failed: expected a string in column 'column_name[0]', found 10" ); } } diff --git a/query-engine/driver-adapters/src/queryable.rs b/query-engine/driver-adapters/src/queryable.rs index d8b022d0fa49..ab154eccc139 100644 --- a/query-engine/driver-adapters/src/queryable.rs +++ b/query-engine/driver-adapters/src/queryable.rs @@ -10,7 +10,6 @@ use quaint::{ error::{Error, ErrorKind}, prelude::{Query as QuaintQuery, Queryable as QuaintQueryable, ResultSet, TransactionCapable}, visitor::{self, Visitor}, - Value, }; use tracing::{info_span, Instrument}; @@ -38,8 +37,8 @@ impl JsBaseQueryable { Self { proxy, flavour } } - /// visit a query according to the flavour of the JS connector - pub fn visit_query<'a>(&self, q: QuaintQuery<'a>) -> quaint::Result<(String, Vec>)> { + /// visit a quaint query AST according to the flavour of the JS connector + fn visit_quaint_query<'a>(&self, q: QuaintQuery<'a>) -> quaint::Result<(String, Vec>)> { match self.flavour { Flavour::Mysql => visitor::Mysql::build(q), Flavour::Postgres => visitor::Postgres::build(q), @@ -47,39 +46,57 @@ impl JsBaseQueryable { _ => unimplemented!("Unsupported flavour for JS connector {:?}", self.flavour), } } + + async fn build_query(&self, sql: &str, values: &[quaint::Value<'_>]) -> quaint::Result { + let sql: String = sql.to_string(); + + let converter = match self.flavour { + Flavour::Postgres => conversion::postgres::value_to_js_arg, + Flavour::Sqlite => conversion::sqlite::value_to_js_arg, + Flavour::Mysql => conversion::mysql::value_to_js_arg, + _ => unreachable!("Unsupported flavour for JS connector {:?}", self.flavour), + }; + + let args = values + .iter() + .map(converter) + .collect::>>()?; + + Ok(Query { sql, args }) + } } #[async_trait] impl QuaintQueryable for JsBaseQueryable { async fn query(&self, q: QuaintQuery<'_>) -> quaint::Result { - let (sql, params) = self.visit_query(q)?; + let (sql, params) = self.visit_quaint_query(q)?; self.query_raw(&sql, ¶ms).await } - async fn query_raw(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn query_raw(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { metrics::query("js.query_raw", sql, params, move || async move { self.do_query_raw(sql, params).await }) .await } - async fn query_raw_typed(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn query_raw_typed(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { self.query_raw(sql, params).await } async fn execute(&self, q: QuaintQuery<'_>) -> quaint::Result { - let (sql, params) = self.visit_query(q)?; + let (sql, params) = self.visit_quaint_query(q)?; self.execute_raw(&sql, ¶ms).await } - async fn execute_raw(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn execute_raw(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { metrics::query("js.execute_raw", sql, params, move || async move { self.do_execute_raw(sql, params).await }) .await } - async fn execute_raw_typed(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn execute_raw_typed(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { self.execute_raw(sql, params).await } @@ -134,16 +151,10 @@ impl JsBaseQueryable { format!(r#"-- Implicit "{}" query via underlying driver"#, stmt) } - async fn build_query(sql: &str, values: &[quaint::Value<'_>]) -> quaint::Result { - let sql: String = sql.to_string(); - let args = conversion::conv_params(values)?; - Ok(Query { sql, args }) - } - - async fn do_query_raw(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn do_query_raw(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { let len = params.len(); let serialization_span = info_span!("js:query:args", user_facing = true, "length" = %len); - let query = Self::build_query(sql, params).instrument(serialization_span).await?; + let query = self.build_query(sql, params).instrument(serialization_span).await?; let sql_span = info_span!("js:query:sql", user_facing = true, "db.statement" = %sql); let result_set = self.proxy.query_raw(query).instrument(sql_span).await?; @@ -154,10 +165,10 @@ impl JsBaseQueryable { result_set.try_into() } - async fn do_execute_raw(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn do_execute_raw(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { let len = params.len(); let serialization_span = info_span!("js:query:args", user_facing = true, "length" = %len); - let query = Self::build_query(sql, params).instrument(serialization_span).await?; + let query = self.build_query(sql, params).instrument(serialization_span).await?; let sql_span = info_span!("js:query:sql", user_facing = true, "db.statement" = %sql); let affected_rows = self.proxy.execute_raw(query).instrument(sql_span).await?; @@ -202,11 +213,11 @@ impl QuaintQueryable for JsQueryable { self.inner.query(q).await } - async fn query_raw(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn query_raw(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { self.inner.query_raw(sql, params).await } - async fn query_raw_typed(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn query_raw_typed(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { self.inner.query_raw_typed(sql, params).await } @@ -214,11 +225,11 @@ impl QuaintQueryable for JsQueryable { self.inner.execute(q).await } - async fn execute_raw(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn execute_raw(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { self.inner.execute_raw(sql, params).await } - async fn execute_raw_typed(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn execute_raw_typed(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { self.inner.execute_raw_typed(sql, params).await } diff --git a/query-engine/driver-adapters/src/result.rs b/query-engine/driver-adapters/src/result.rs index fc6f52bd2743..ad4ce7cbb546 100644 --- a/query-engine/driver-adapters/src/result.rs +++ b/query-engine/driver-adapters/src/result.rs @@ -1,5 +1,5 @@ use napi::{bindgen_prelude::FromNapiValue, Env, JsUnknown, NapiValue}; -use quaint::error::{Error as QuaintError, PostgresError}; +use quaint::error::{Error as QuaintError, ErrorKind, MysqlError, PostgresError, SqliteError}; use serde::Deserialize; #[derive(Deserialize)] @@ -13,18 +13,36 @@ pub struct PostgresErrorDef { hint: Option, } +#[derive(Deserialize)] +#[serde(remote = "MysqlError")] +pub struct MysqlErrorDef { + pub code: u16, + pub message: String, + pub state: String, +} + +#[derive(Deserialize)] +#[serde(remote = "SqliteError", rename_all = "camelCase")] +pub struct SqliteErrorDef { + pub extended_code: i32, + pub message: Option, +} + #[derive(Deserialize)] #[serde(tag = "kind")] /// Wrapper for JS-side errors -/// See driver-adapters/js/adapter-utils/src/types.ts file for example pub(crate) enum DriverAdapterError { /// Unexpected JS exception - GenericJsError { + GenericJs { id: i32, }, - - PostgresError(#[serde(with = "PostgresErrorDef")] PostgresError), - // in the future, expected errors that map to known user errors with PXXX codes will also go here + UnsupportedNativeDataType { + #[serde(rename = "type")] + native_type: String, + }, + Postgres(#[serde(with = "PostgresErrorDef")] PostgresError), + Mysql(#[serde(with = "MysqlErrorDef")] MysqlError), + Sqlite(#[serde(with = "SqliteErrorDef")] SqliteError), } impl FromNapiValue for DriverAdapterError { @@ -38,15 +56,22 @@ impl FromNapiValue for DriverAdapterError { impl From for QuaintError { fn from(value: DriverAdapterError) -> Self { match value { - DriverAdapterError::GenericJsError { id } => QuaintError::external_error(id), - DriverAdapterError::PostgresError(e) => e.into(), + DriverAdapterError::UnsupportedNativeDataType { native_type } => { + QuaintError::builder(ErrorKind::UnsupportedColumnType { + column_type: native_type, + }) + .build() + } + DriverAdapterError::GenericJs { id } => QuaintError::external_error(id), + DriverAdapterError::Postgres(e) => e.into(), + DriverAdapterError::Mysql(e) => e.into(), + DriverAdapterError::Sqlite(e) => e.into(), // in future, more error types would be added and we'll need to convert them to proper QuaintErrors here } } } /// Wrapper for JS-side result type -/// See driver-adapters/js/adapter-utils/src/types.ts file for example pub(crate) enum JsResult where T: FromNapiValue, diff --git a/query-engine/driver-adapters/src/transaction.rs b/query-engine/driver-adapters/src/transaction.rs index 0d26c7f863aa..d35a9019c6bc 100644 --- a/query-engine/driver-adapters/src/transaction.rs +++ b/query-engine/driver-adapters/src/transaction.rs @@ -1,5 +1,5 @@ use async_trait::async_trait; -use metrics::{decrement_gauge, increment_gauge}; +use metrics::decrement_gauge; use napi::{bindgen_prelude::FromNapiValue, JsObject}; use quaint::{ connector::{IsolationLevel, Transaction as QuaintTransaction}, @@ -22,8 +22,6 @@ pub(crate) struct JsTransaction { impl JsTransaction { pub(crate) fn new(inner: JsBaseQueryable, tx_proxy: TransactionProxy) -> Self { - increment_gauge!("prisma_client_queries_active", 1.0); - Self { inner, tx_proxy } } @@ -40,6 +38,7 @@ impl JsTransaction { #[async_trait] impl QuaintTransaction for JsTransaction { async fn commit(&self) -> quaint::Result<()> { + // increment of this gauge is done in DriverProxy::startTransaction decrement_gauge!("prisma_client_queries_active", 1.0); let commit_stmt = "COMMIT"; @@ -55,6 +54,7 @@ impl QuaintTransaction for JsTransaction { } async fn rollback(&self) -> quaint::Result<()> { + // increment of this gauge is done in DriverProxy::startTransaction decrement_gauge!("prisma_client_queries_active", 1.0); let rollback_stmt = "ROLLBACK"; diff --git a/query-engine/metrics/src/lib.rs b/query-engine/metrics/src/lib.rs index 7f34f84a8612..1965b56cb076 100644 --- a/query-engine/metrics/src/lib.rs +++ b/query-engine/metrics/src/lib.rs @@ -89,7 +89,7 @@ static METRIC_RENAMES: Lazy> (MOBC_POOL_CONNECTIONS_OPEN, ("prisma_pool_connections_open", "The number of pool connections currently open")), (MOBC_POOL_CONNECTIONS_BUSY, ("prisma_pool_connections_busy", "The number of pool connections currently executing datasource queries")), (MOBC_POOL_CONNECTIONS_IDLE, ("prisma_pool_connections_idle", "The number of pool connections that are not busy running a query")), - (MOBC_POOL_WAIT_COUNT, ("prisma_client_queries_wait", "The number of datasource queries currently waiting for an free connection")), + (MOBC_POOL_WAIT_COUNT, ("prisma_client_queries_wait", "The number of datasource queries currently waiting for a free connection")), (MOBC_POOL_WAIT_DURATION, ("prisma_client_queries_wait_histogram_ms", "The distribution of the time all datasource queries spent waiting for a free connection")), ]) }); diff --git a/query-engine/query-engine-node-api/Cargo.toml b/query-engine/query-engine-node-api/Cargo.toml index 74f9686189fc..187297b7529f 100644 --- a/query-engine/query-engine-node-api/Cargo.toml +++ b/query-engine/query-engine-node-api/Cargo.toml @@ -22,7 +22,7 @@ query-connector = { path = "../connectors/query-connector" } user-facing-errors = { path = "../../libs/user-facing-errors" } psl.workspace = true sql-connector = { path = "../connectors/sql-query-connector", package = "sql-query-connector" } -prisma-models = { path = "../prisma-models" } +query-structure = { path = "../query-structure" } driver-adapters = { path = "../driver-adapters" } napi.workspace = true napi-derive.workspace = true diff --git a/query-engine/query-engine-wasm/.gitignore b/query-engine/query-engine-wasm/.gitignore new file mode 100644 index 000000000000..a6f0e4dca125 --- /dev/null +++ b/query-engine/query-engine-wasm/.gitignore @@ -0,0 +1,7 @@ +/target +**/*.rs.bk +Cargo.lock +bin/ +pkg/ +wasm-pack.log +node_modules/ \ No newline at end of file diff --git a/query-engine/query-engine-wasm/.nvmrc b/query-engine/query-engine-wasm/.nvmrc new file mode 100644 index 000000000000..6569dfa4f323 --- /dev/null +++ b/query-engine/query-engine-wasm/.nvmrc @@ -0,0 +1 @@ +20.8.1 diff --git a/query-engine/query-engine-wasm/Cargo.toml b/query-engine/query-engine-wasm/Cargo.toml new file mode 100644 index 000000000000..95ecd228a152 --- /dev/null +++ b/query-engine/query-engine-wasm/Cargo.toml @@ -0,0 +1,36 @@ +[package] +name = "query-engine-wasm" +version = "0.1.0" +edition = "2021" + +[lib] +doc = false +crate-type = ["cdylib"] +name = "query_engine" + +[dependencies] +anyhow = "1" +async-trait = "0.1" +user-facing-errors = { path = "../../libs/user-facing-errors" } +psl.workspace = true +query-structure = { path = "../query-structure" } + +thiserror = "1" +connection-string.workspace = true +url = "2" +serde_json.workspace = true +serde.workspace = true +tokio = { version = "1.25", features = ["macros", "sync", "io-util", "time"] } +futures = "0.3" +wasm-bindgen = "=0.2.87" +wasm-bindgen-futures = "0.4" +serde-wasm-bindgen = "0.5" +js-sys = "0.3" +log = "0.4.6" +wasm-logger = "0.2.0" + +tracing = "0.1" +tracing-subscriber = { version = "0.3" } +tracing-futures = "0.2" +tsify = "0.4.5" +console_error_panic_hook = "0.1.7" diff --git a/query-engine/query-engine-wasm/README.md b/query-engine/query-engine-wasm/README.md new file mode 100644 index 000000000000..f5adc7eb2894 --- /dev/null +++ b/query-engine/query-engine-wasm/README.md @@ -0,0 +1,40 @@ +# @prisma/query-engine-wasm + +**INTERNAL PACKAGE, DO NOT USE** + +This is a Wasm-compatible version of the Query Engine library (libquery). +Currently, it just contains a skeleton of the public API, as some internal crates are still not Wasm-compatible. + +The published npm package is internal to Prisma. Its API will break without prior warning. + +## Setup + +``` +# Install the latest Rust version with `rustup` +# or update the latest Rust version with `rustup` +rustup update +rustup target add wasm32-unknown-unknown +cargo install wasm-bindgen +cargo install wasm-pack +``` + +## How to Build + +From the current folder: + +- `./build.sh $OUT_NPM_VERSION` + +where e.g. `OUT_NPM_VERSION="0.0.1"` is the version you want to publish this package on npm with. + +## How to Publish + +From the current folder: + +- `wasm-pack publish --access public` + +## How to Test + +To try importing the , you can run: + +- `nvm use` +- `node --experimental-wasm-modules ./example.js` diff --git a/query-engine/query-engine-wasm/build.rs b/query-engine/query-engine-wasm/build.rs new file mode 100644 index 000000000000..2e8fe20c0503 --- /dev/null +++ b/query-engine/query-engine-wasm/build.rs @@ -0,0 +1,11 @@ +use std::process::Command; + +fn store_git_commit_hash() { + let output = Command::new("git").args(["rev-parse", "HEAD"]).output().unwrap(); + let git_hash = String::from_utf8(output.stdout).unwrap(); + println!("cargo:rustc-env=GIT_HASH={git_hash}"); +} + +fn main() { + store_git_commit_hash(); +} diff --git a/query-engine/query-engine-wasm/build.sh b/query-engine/query-engine-wasm/build.sh new file mode 100755 index 000000000000..12d8328305ff --- /dev/null +++ b/query-engine/query-engine-wasm/build.sh @@ -0,0 +1,51 @@ +#!/bin/bash + +# Call this script as `./build.sh ` + +OUT_VERSION="$1" +OUT_FOLDER="pkg" +OUT_JSON="${OUT_FOLDER}/package.json" +OUT_TARGET="bundler" # Note(jkomyno): I wasn't able to make it work with `web` target +OUT_NPM_NAME="@prisma/query-engine-wasm" + +wasm-pack build --release --target $OUT_TARGET + +sleep 1 + +# Mark the package as a ES module, set the entry point to the query_engine.js file, mark the package as public +printf '%s\n' "$(jq '. + {"type": "module"} + {"main": "./query_engine.js"} + {"private": false}' $OUT_JSON)" > $OUT_JSON + +# Add the version +printf '%s\n' "$(jq --arg version "$OUT_VERSION" '. + {"version": $version}' $OUT_JSON)" > $OUT_JSON + +# Add the package name +printf '%s\n' "$(jq --arg name "$OUT_NPM_NAME" '. + {"name": $name}' $OUT_JSON)" > $OUT_JSON + +enable_cf_in_bindings() { + # Enable Cloudflare Workers in the generated JS bindings. + # The generated bindings are compatible with: + # - Node.js + # - Cloudflare Workers / Miniflare + + local FILE="$1" # e.g., `query_engine.js` + local BG_FILE="${FILE%.js}_bg.js" + local OUTPUT_FILE="${OUT_FOLDER}/${FILE}" + + cat < "$OUTPUT_FILE" +import * as imports from "./${BG_FILE}"; + +// switch between both syntax for Node.js and for workers (Cloudflare Workers) +import * as wkmod from "./${BG_FILE%.js}.wasm"; +import * as nodemod from "./${BG_FILE%.js}.wasm"; +if ((typeof process !== 'undefined') && (process.release.name === 'node')) { + imports.__wbg_set_wasm(nodemod); +} else { + const instance = new WebAssembly.Instance(wkmod.default, { "./${BG_FILE}": imports }); + imports.__wbg_set_wasm(instance.exports); +} + +export * from "./${BG_FILE}"; +EOF +} + +enable_cf_in_bindings "query_engine.js" diff --git a/query-engine/query-engine-wasm/example.js b/query-engine/query-engine-wasm/example.js new file mode 100644 index 000000000000..bca6d5ba95d7 --- /dev/null +++ b/query-engine/query-engine-wasm/example.js @@ -0,0 +1,54 @@ +/** + * Run with: `node --experimental-wasm-modules ./example.js` + * on Node.js 18+. + */ + +import { Pool } from '@neondatabase/serverless' +import { PrismaNeon } from '@prisma/adapter-neon' +import { bindAdapter } from '@prisma/driver-adapter-utils' +import { init, QueryEngine, getBuildTimeInfo } from './pkg/query_engine.js' + +async function main() { + // Always initialize the Wasm library before using it. + // This sets up the logging and panic hooks. + init() + + const connectionString = undefined + + const pool = new Pool({ connectionString }) + const adapter = new PrismaNeon(pool) + const driverAdapter = bindAdapter(adapter) + + console.log('buildTimeInfo', getBuildTimeInfo()) + + const options = { + datamodel: /* prisma */` + datasource db { + provider = "postgres" + url = env("DATABASE_URL") + } + + generator client { + provider = "prisma-client-js" + } + + model User { + id Int @id @default(autoincrement()) + } + `, + logLevel: 'info', + logQueries: true, + datasourceOverrides: {}, + env: process.env, + configDir: '/tmp', + ignoreEnvVarErrors: true, + } + const callback = () => { console.log('log-callback') } + + const queryEngine = new QueryEngine(options, callback, driverAdapter) + + await queryEngine.connect('trace') + await queryEngine.disconnect('trace') +} + +main() diff --git a/query-engine/query-engine-wasm/package-lock.json b/query-engine/query-engine-wasm/package-lock.json new file mode 100644 index 000000000000..c2d5a7a1162e --- /dev/null +++ b/query-engine/query-engine-wasm/package-lock.json @@ -0,0 +1,165 @@ +{ + "name": "query-engine-wasm", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "dependencies": { + "@neondatabase/serverless": "0.6.0", + "@prisma/adapter-neon": "5.5.2", + "@prisma/driver-adapter-utils": "5.5.2" + } + }, + "node_modules/@neondatabase/serverless": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/@neondatabase/serverless/-/serverless-0.6.0.tgz", + "integrity": "sha512-qXxBRYN0m2v8kVQBfMxbzNGn2xFAhTXFibzQlE++NfJ56Shz3m7+MyBBtXDlEH+3Wfa6lToDXf1MElocY4sJ3w==", + "dependencies": { + "@types/pg": "8.6.6" + } + }, + "node_modules/@prisma/adapter-neon": { + "version": "5.5.2", + "resolved": "https://registry.npmjs.org/@prisma/adapter-neon/-/adapter-neon-5.5.2.tgz", + "integrity": "sha512-XcpJ/fgh/sP7mlBFkqjIzEcU/kWnNyiZf19MBP366HF7vXg2UQTbGxmbbeFiohXSJ/rwyu1Qmos7IrKK+QJOgg==", + "dependencies": { + "@prisma/driver-adapter-utils": "5.5.2", + "postgres-array": "^3.0.2" + }, + "peerDependencies": { + "@neondatabase/serverless": "^0.6.0" + } + }, + "node_modules/@prisma/adapter-neon/node_modules/postgres-array": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-3.0.2.tgz", + "integrity": "sha512-6faShkdFugNQCLwucjPcY5ARoW1SlbnrZjmGl0IrrqewpvxvhSLHimCVzqeuULCbG0fQv7Dtk1yDbG3xv7Veog==", + "engines": { + "node": ">=12" + } + }, + "node_modules/@prisma/driver-adapter-utils": { + "version": "5.5.2", + "resolved": "https://registry.npmjs.org/@prisma/driver-adapter-utils/-/driver-adapter-utils-5.5.2.tgz", + "integrity": "sha512-lRkxjboGcIl2VkJNomZQ9b6vc2qGFnVwjaR/o3cTPGmmSxETx71cYRYcG/NHKrhvKxI6oKNZ/xzyuzPpg1+kJQ==", + "dependencies": { + "debug": "^4.3.4" + } + }, + "node_modules/@types/node": { + "version": "20.8.10", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.8.10.tgz", + "integrity": "sha512-TlgT8JntpcbmKUFzjhsyhGfP2fsiz1Mv56im6enJ905xG1DAYesxJaeSbGqQmAw8OWPdhyJGhGSQGKRNJ45u9w==", + "dependencies": { + "undici-types": "~5.26.4" + } + }, + "node_modules/@types/pg": { + "version": "8.6.6", + "resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.6.6.tgz", + "integrity": "sha512-O2xNmXebtwVekJDD+02udOncjVcMZQuTEQEMpKJ0ZRf5E7/9JJX3izhKUcUifBkyKpljyUM6BTgy2trmviKlpw==", + "dependencies": { + "@types/node": "*", + "pg-protocol": "*", + "pg-types": "^2.2.0" + } + }, + "node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/pg-int8": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/pg-protocol": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.6.0.tgz", + "integrity": "sha512-M+PDm637OY5WM307051+bsDia5Xej6d9IR4GwJse1qA1DIhiKlksvrneZOYQq42OM+spubpcNYEo2FcKQrDk+Q==" + }, + "node_modules/pg-types": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "dependencies": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/postgres-array": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", + "engines": { + "node": ">=4" + } + }, + "node_modules/postgres-bytea": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", + "integrity": "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-date": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", + "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-interval": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "dependencies": { + "xtend": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==" + }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "engines": { + "node": ">=0.4" + } + } + } +} diff --git a/query-engine/query-engine-wasm/package.json b/query-engine/query-engine-wasm/package.json new file mode 100644 index 000000000000..102db2ce14b5 --- /dev/null +++ b/query-engine/query-engine-wasm/package.json @@ -0,0 +1,9 @@ +{ + "type": "module", + "main": "./example.js", + "dependencies": { + "@neondatabase/serverless": "0.6.0", + "@prisma/adapter-neon": "5.5.2", + "@prisma/driver-adapter-utils": "5.5.2" + } +} diff --git a/query-engine/query-engine-wasm/src/engine.rs b/query-engine/query-engine-wasm/src/engine.rs new file mode 100644 index 000000000000..f9a06fabcf4b --- /dev/null +++ b/query-engine/query-engine-wasm/src/engine.rs @@ -0,0 +1,265 @@ +#![allow(dead_code)] +#![allow(unused_variables)] + +use crate::proxy; +use crate::{ + error::ApiError, + logger::{LogCallback, Logger}, +}; +use js_sys::{Function as JsFunction, Object as JsObject}; +use serde::{Deserialize, Serialize}; +use std::{ + collections::{BTreeMap, HashMap}, + path::PathBuf, + sync::Arc, +}; +use tokio::sync::RwLock; +use tracing_subscriber::filter::LevelFilter; +use tsify::Tsify; +use wasm_bindgen::prelude::wasm_bindgen; + +/// The main query engine used by JS +#[wasm_bindgen] +pub struct QueryEngine { + inner: RwLock, + logger: Logger, +} + +/// The state of the engine. +enum Inner { + /// Not connected, holding all data to form a connection. + Builder(EngineBuilder), + /// A connected engine, holding all data to disconnect and form a new + /// connection. Allows querying when on this state. + Connected(ConnectedEngine), +} + +/// Everything needed to connect to the database and have the core running. +struct EngineBuilder { + schema: Arc, + config_dir: PathBuf, + env: HashMap, +} + +/// Internal structure for querying and reconnecting with the engine. +struct ConnectedEngine { + schema: Arc, + config_dir: PathBuf, + env: HashMap, +} + +/// Returned from the `serverInfo` method in javascript. +#[derive(Debug, Serialize)] +#[serde(rename_all = "camelCase")] +struct ServerInfo { + commit: String, + version: String, + primary_connector: Option, +} + +/// Parameters defining the construction of an engine. +#[derive(Debug, Deserialize, Tsify)] +#[tsify(from_wasm_abi)] +#[serde(rename_all = "camelCase")] +pub struct ConstructorOptions { + datamodel: String, + log_level: String, + #[serde(default)] + log_queries: bool, + #[serde(default)] + datasource_overrides: BTreeMap, + #[serde(default)] + env: serde_json::Value, + config_dir: PathBuf, + #[serde(default)] + ignore_env_var_errors: bool, + #[serde(default)] + engine_protocol: Option, +} + +impl Inner { + /// Returns a builder if the engine is not connected + fn as_builder(&self) -> crate::Result<&EngineBuilder> { + match self { + Inner::Builder(ref builder) => Ok(builder), + Inner::Connected(_) => Err(ApiError::AlreadyConnected), + } + } + + /// Returns the engine if connected + fn as_engine(&self) -> crate::Result<&ConnectedEngine> { + match self { + Inner::Builder(_) => Err(ApiError::NotConnected), + Inner::Connected(ref engine) => Ok(engine), + } + } +} + +#[wasm_bindgen] +impl QueryEngine { + /// Parse a validated datamodel and configuration to allow connecting later on. + #[wasm_bindgen(constructor)] + pub fn new( + options: ConstructorOptions, + callback: JsFunction, + maybe_adapter: Option, + ) -> Result { + log::info!("Called `QueryEngine::new()`"); + + let log_callback = LogCallback(callback); + log::info!("Parsed `log_callback`"); + + let ConstructorOptions { + datamodel, + log_level, + log_queries, + datasource_overrides, + env, + config_dir, + ignore_env_var_errors, + engine_protocol, + } = options; + + let env = stringify_env_values(env)?; // we cannot trust anything JS sends us from process.env + let overrides: Vec<(_, _)> = datasource_overrides.into_iter().collect(); + + let mut schema = psl::validate(datamodel.into()); + let config = &mut schema.configuration; + + if let Some(adapter) = maybe_adapter { + let js_queryable = + proxy::from_wasm(adapter).map_err(|e| ApiError::configuration(e.as_string().unwrap_or_default()))?; + + let provider_name = schema.connector.provider_name(); + log::info!("Received driver adapter for {provider_name}."); + } + + schema + .diagnostics + .to_result() + .map_err(|err| ApiError::conversion(err, schema.db.source()))?; + + config + .resolve_datasource_urls_query_engine( + &overrides, + |key| env.get(key).map(ToString::to_string), + ignore_env_var_errors, + ) + .map_err(|err| ApiError::conversion(err, schema.db.source()))?; + + config + .validate_that_one_datasource_is_provided() + .map_err(|errors| ApiError::conversion(errors, schema.db.source()))?; + + let builder = EngineBuilder { + schema: Arc::new(schema), + config_dir, + env, + }; + + let log_level = log_level.parse::().unwrap(); + let logger = Logger::new(log_queries, log_level, log_callback); + + Ok(Self { + inner: RwLock::new(Inner::Builder(builder)), + logger, + }) + } + + /// Connect to the database, allow queries to be run. + #[wasm_bindgen] + pub async fn connect(&self, trace: String) -> Result<(), wasm_bindgen::JsError> { + log::info!("Called `QueryEngine::connect()`"); + Ok(()) + } + + /// Disconnect and drop the core. Can be reconnected later with `#connect`. + #[wasm_bindgen] + pub async fn disconnect(&self, trace: String) -> Result<(), wasm_bindgen::JsError> { + log::info!("Called `QueryEngine::disconnect()`"); + Ok(()) + } + + /// If connected, sends a query to the core and returns the response. + #[wasm_bindgen] + pub async fn query( + &self, + body: String, + trace: String, + tx_id: Option, + ) -> Result { + log::info!("Called `QueryEngine::query()`"); + Err(ApiError::configuration("Can't use `query` until `request_handlers` is Wasm-compatible.").into()) + } + + /// If connected, attempts to start a transaction in the core and returns its ID. + #[wasm_bindgen(js_name = startTransaction)] + pub async fn start_transaction(&self, input: String, trace: String) -> Result { + log::info!("Called `QueryEngine::start_transaction()`"); + Err(ApiError::configuration("Can't use `start_transaction` until `query_core` is Wasm-compatible.").into()) + } + + /// If connected, attempts to commit a transaction with id `tx_id` in the core. + #[wasm_bindgen(js_name = commitTransaction)] + pub async fn commit_transaction(&self, tx_id: String, trace: String) -> Result { + log::info!("Called `QueryEngine::commit_transaction()`"); + Err(ApiError::configuration("Can't use `commit_transaction` until `query_core` is Wasm-compatible.").into()) + } + + #[wasm_bindgen] + pub async fn dmmf(&self, trace: String) -> Result { + log::info!("Called `QueryEngine::dmmf()`"); + Err(ApiError::configuration("Can't use `dmmf` until `request_handlers` is Wasm-compatible.").into()) + } + + /// If connected, attempts to roll back a transaction with id `tx_id` in the core. + #[wasm_bindgen(js_name = rollbackTransaction)] + pub async fn rollback_transaction(&self, tx_id: String, trace: String) -> Result { + log::info!("Called `QueryEngine::rollback_transaction()`"); + Ok("{}".to_owned()) + } + + /// Loads the query schema. Only available when connected. + #[wasm_bindgen(js_name = sdlSchema)] + pub async fn sdl_schema(&self) -> Result { + log::info!("Called `QueryEngine::sdl_schema()`"); + Ok("{}".to_owned()) + } + + #[wasm_bindgen] + pub async fn metrics(&self, json_options: String) -> Result<(), wasm_bindgen::JsError> { + log::info!("Called `QueryEngine::metrics()`"); + Err(ApiError::configuration("Metrics is not enabled in Wasm.").into()) + } +} + +fn stringify_env_values(origin: serde_json::Value) -> crate::Result> { + use serde_json::Value; + + let msg = match origin { + Value::Object(map) => { + let mut result: HashMap = HashMap::new(); + + for (key, val) in map.into_iter() { + match val { + Value::Null => continue, + Value::String(val) => { + result.insert(key, val); + } + val => { + result.insert(key, val.to_string()); + } + } + } + + return Ok(result); + } + Value::Null => return Ok(Default::default()), + Value::Bool(_) => "Expected an object for the env constructor parameter, got a boolean.", + Value::Number(_) => "Expected an object for the env constructor parameter, got a number.", + Value::String(_) => "Expected an object for the env constructor parameter, got a string.", + Value::Array(_) => "Expected an object for the env constructor parameter, got an array.", + }; + + Err(ApiError::JsonDecode(msg.to_string())) +} diff --git a/query-engine/query-engine-wasm/src/error.rs b/query-engine/query-engine-wasm/src/error.rs new file mode 100644 index 000000000000..619e96564f6a --- /dev/null +++ b/query-engine/query-engine-wasm/src/error.rs @@ -0,0 +1,93 @@ +use psl::diagnostics::Diagnostics; +// use query_connector::error::ConnectorError; +// use query_core::CoreError; +use thiserror::Error; + +#[derive(Debug, Error)] +pub enum ApiError { + #[error("{:?}", _0)] + Conversion(Diagnostics, String), + + #[error("{}", _0)] + Configuration(String), + + // #[error("{}", _0)] + // Core(CoreError), + + // #[error("{}", _0)] + // Connector(ConnectorError), + #[error("Can't modify an already connected engine.")] + AlreadyConnected, + + #[error("Engine is not yet connected.")] + NotConnected, + + #[error("{}", _0)] + JsonDecode(String), +} + +impl From for user_facing_errors::Error { + fn from(err: ApiError) -> Self { + use std::fmt::Write as _; + + match err { + // ApiError::Connector(ConnectorError { + // user_facing_error: Some(err), + // .. + // }) => err.into(), + ApiError::Conversion(errors, dml_string) => { + let mut full_error = errors.to_pretty_string("schema.prisma", &dml_string); + write!(full_error, "\nValidation Error Count: {}", errors.errors().len()).unwrap(); + + user_facing_errors::Error::from(user_facing_errors::KnownError::new( + user_facing_errors::common::SchemaParserError { full_error }, + )) + } + // ApiError::Core(error) => user_facing_errors::Error::from(error), + other => user_facing_errors::Error::new_non_panic_with_current_backtrace(other.to_string()), + } + } +} + +impl ApiError { + pub fn conversion(diagnostics: Diagnostics, dml: impl ToString) -> Self { + Self::Conversion(diagnostics, dml.to_string()) + } + + pub fn configuration(msg: impl ToString) -> Self { + Self::Configuration(msg.to_string()) + } +} + +// impl From for ApiError { +// fn from(e: CoreError) -> Self { +// match e { +// CoreError::ConfigurationError(message) => Self::Configuration(message), +// core_error => Self::Core(core_error), +// } +// } +// } + +// impl From for ApiError { +// fn from(e: ConnectorError) -> Self { +// Self::Connector(e) +// } +// } + +impl From for ApiError { + fn from(e: url::ParseError) -> Self { + Self::configuration(format!("Error parsing connection string: {e}")) + } +} + +impl From for ApiError { + fn from(e: connection_string::Error) -> Self { + Self::configuration(format!("Error parsing connection string: {e}")) + } +} + +impl From for ApiError { + fn from(e: serde_json::Error) -> Self { + Self::JsonDecode(format!("{e}")) + } +} diff --git a/query-engine/query-engine-wasm/src/functions.rs b/query-engine/query-engine-wasm/src/functions.rs new file mode 100644 index 000000000000..e0f0a93aa5cd --- /dev/null +++ b/query-engine/query-engine-wasm/src/functions.rs @@ -0,0 +1,47 @@ +use crate::error::ApiError; +use serde::Serialize; +use tsify::Tsify; +use wasm_bindgen::prelude::wasm_bindgen; + +#[derive(Serialize, Tsify)] +#[tsify(into_wasm_abi)] +#[serde(rename_all = "camelCase")] +pub struct Version { + pub commit: &'static str, + pub version: &'static str, +} + +#[wasm_bindgen(js_name = "getBuildTimeInfo")] +pub fn version() -> Version { + Version { + commit: env!("GIT_HASH"), + version: env!("CARGO_PKG_VERSION"), + } +} + +#[wasm_bindgen] +pub fn dmmf(datamodel_string: String) -> Result { + let mut schema = psl::validate(datamodel_string.into()); + + schema + .diagnostics + .to_result() + .map_err(|errors| ApiError::conversion(errors, schema.db.source()))?; + + Ok("{}".to_string()) + + // let query_schema = query_core::schema::build(Arc::new(schema), true); + // let dmmf = dmmf::render_dmmf(&query_schema); + + // Ok(serde_json::to_string(&dmmf)?) +} + +#[wasm_bindgen] +pub fn debug_panic(panic_message: Option) -> Result<(), wasm_bindgen::JsError> { + let user_facing = user_facing_errors::Error::from_panic_payload(Box::new( + panic_message.unwrap_or_else(|| "query-engine-wasm debug panic".to_string()), + )); + let message = serde_json::to_string(&user_facing).unwrap(); + + Err(wasm_bindgen::JsError::new(&message)) +} diff --git a/query-engine/query-engine-wasm/src/lib.rs b/query-engine/query-engine-wasm/src/lib.rs new file mode 100644 index 000000000000..89b519515517 --- /dev/null +++ b/query-engine/query-engine-wasm/src/lib.rs @@ -0,0 +1,19 @@ +pub mod engine; +pub mod error; +pub mod functions; +pub mod logger; +mod proxy; + +pub(crate) type Result = std::result::Result; + +use wasm_bindgen::prelude::wasm_bindgen; + +/// Function that should be called before any other public function in this module. +#[wasm_bindgen] +pub fn init() { + // Set up temporary logging for the wasm module. + wasm_logger::init(wasm_logger::Config::default()); + + // Set up temporary panic hook for the wasm module. + std::panic::set_hook(Box::new(console_error_panic_hook::hook)); +} diff --git a/query-engine/query-engine-wasm/src/logger.rs b/query-engine/query-engine-wasm/src/logger.rs new file mode 100644 index 000000000000..561c48271b77 --- /dev/null +++ b/query-engine/query-engine-wasm/src/logger.rs @@ -0,0 +1,132 @@ +#![allow(dead_code)] + +use core::fmt; +use js_sys::Function as JsFunction; +use serde_json::Value; +use std::collections::BTreeMap; +use tracing::{ + field::{Field, Visit}, + level_filters::LevelFilter, + Dispatch, Level, Subscriber, +}; +use tracing_subscriber::{ + filter::{filter_fn, FilterExt}, + layer::SubscriberExt, + Layer, Registry, +}; +use wasm_bindgen::JsValue; + +pub(crate) struct LogCallback(pub JsFunction); + +unsafe impl Send for LogCallback {} +unsafe impl Sync for LogCallback {} + +pub(crate) struct Logger { + dispatcher: Dispatch, +} + +impl Logger { + /// Creates a new logger using a call layer + pub fn new(log_queries: bool, log_level: LevelFilter, log_callback: LogCallback) -> Self { + let is_sql_query = filter_fn(|meta| { + meta.target() == "quaint::connector::metrics" && meta.fields().iter().any(|f| f.name() == "query") + }); + + // is a mongodb query? + let is_mongo_query = filter_fn(|meta| meta.target() == "mongodb_query_connector::query"); + + // We need to filter the messages to send to our callback logging mechanism + let filters = if log_queries { + // Filter trace query events (for query log) or based in the defined log level + is_sql_query.or(is_mongo_query).or(log_level).boxed() + } else { + // Filter based in the defined log level + FilterExt::boxed(log_level) + }; + + let layer = CallbackLayer::new(log_callback).with_filter(filters); + + Self { + dispatcher: Dispatch::new(Registry::default().with(layer)), + } + } + + pub fn dispatcher(&self) -> Dispatch { + self.dispatcher.clone() + } +} + +pub struct JsonVisitor<'a> { + values: BTreeMap<&'a str, Value>, +} + +impl<'a> JsonVisitor<'a> { + pub fn new(level: &Level, target: &str) -> Self { + let mut values = BTreeMap::new(); + values.insert("level", serde_json::Value::from(level.to_string())); + + // NOTE: previous version used module_path, this is not correct and it should be _target_ + values.insert("module_path", serde_json::Value::from(target)); + + JsonVisitor { values } + } +} + +impl<'a> Visit for JsonVisitor<'a> { + fn record_debug(&mut self, field: &Field, value: &dyn fmt::Debug) { + match field.name() { + name if name.starts_with("r#") => { + self.values + .insert(&name[2..], serde_json::Value::from(format!("{value:?}"))); + } + name => { + self.values.insert(name, serde_json::Value::from(format!("{value:?}"))); + } + }; + } + + fn record_i64(&mut self, field: &Field, value: i64) { + self.values.insert(field.name(), serde_json::Value::from(value)); + } + + fn record_u64(&mut self, field: &Field, value: u64) { + self.values.insert(field.name(), serde_json::Value::from(value)); + } + + fn record_bool(&mut self, field: &Field, value: bool) { + self.values.insert(field.name(), serde_json::Value::from(value)); + } + + fn record_str(&mut self, field: &Field, value: &str) { + self.values.insert(field.name(), serde_json::Value::from(value)); + } +} + +impl<'a> ToString for JsonVisitor<'a> { + fn to_string(&self) -> String { + serde_json::to_string(&self.values).unwrap() + } +} + +pub(crate) struct CallbackLayer { + callback: LogCallback, +} + +impl CallbackLayer { + pub fn new(callback: LogCallback) -> Self { + CallbackLayer { callback } + } +} + +// A tracing layer for sending logs to a js callback, layers are composable, subscribers are not. +impl Layer for CallbackLayer { + fn on_event(&self, event: &tracing::Event<'_>, _ctx: tracing_subscriber::layer::Context<'_, S>) { + let mut visitor = JsonVisitor::new(event.metadata().level(), event.metadata().target()); + event.record(&mut visitor); + + let _ = self + .callback + .0 + .call1(&JsValue::NULL, &JsValue::from_str(&visitor.to_string())); + } +} diff --git a/query-engine/query-engine-wasm/src/proxy.rs b/query-engine/query-engine-wasm/src/proxy.rs new file mode 100644 index 000000000000..ad028e218236 --- /dev/null +++ b/query-engine/query-engine-wasm/src/proxy.rs @@ -0,0 +1,107 @@ +#![allow(dead_code)] +#![allow(unused_variables)] + +// This code will likely live in a separate crate, but for now it's here. + +use async_trait::async_trait; +use js_sys::{Function as JsFunction, JsString, Object as JsObject, Promise as JsPromise, Reflect as JsReflect}; +use serde::{de::DeserializeOwned, Serialize}; +use wasm_bindgen::{JsCast, JsValue}; + +type Result = std::result::Result; + +pub struct CommonProxy { + /// Execute a query given as SQL, interpolating the given parameters. + query_raw: JsFunction, + + /// Execute a query given as SQL, interpolating the given parameters and + /// returning the number of affected rows. + execute_raw: JsFunction, + + /// Return the flavour for this driver. + pub(crate) flavour: String, +} + +impl CommonProxy { + pub(crate) fn new(driver: &JsObject) -> Result { + let query_raw = JsReflect::get(driver, &"queryRaw".into())?.dyn_into::()?; + let execute_raw = JsReflect::get(driver, &"executeRaw".into())?.dyn_into::()?; + let flavour: String = JsReflect::get(driver, &"flavour".into())? + .dyn_into::()? + .into(); + + let common_proxy = Self { + query_raw, + execute_raw, + flavour, + }; + Ok(common_proxy) + } +} + +pub struct DriverProxy { + start_transaction: JsFunction, +} + +impl DriverProxy { + pub(crate) fn new(driver: &JsObject) -> Result { + let start_transaction = JsReflect::get(driver, &"startTransaction".into())?.dyn_into::()?; + + let driver_proxy = Self { start_transaction }; + Ok(driver_proxy) + } +} + +pub struct JsQueryable { + inner: CommonProxy, + driver_proxy: DriverProxy, +} + +impl JsQueryable { + pub fn new(inner: CommonProxy, driver_proxy: DriverProxy) -> Self { + Self { inner, driver_proxy } + } +} + +pub fn from_wasm(driver: JsObject) -> Result { + let common_proxy = CommonProxy::new(&driver)?; + let driver_proxy = DriverProxy::new(&driver)?; + + let js_queryable = JsQueryable::new(common_proxy, driver_proxy); + Ok(js_queryable) +} + +#[async_trait(?Send)] +trait JsAsyncFunc { + async fn call1_async(&self, arg1: T) -> Result + where + T: Serialize, + R: DeserializeOwned; + + fn call0_sync(&self) -> Result + where + R: DeserializeOwned; +} + +#[async_trait(?Send)] +impl JsAsyncFunc for JsFunction { + async fn call1_async(&self, arg1: T) -> Result + where + T: Serialize, + R: DeserializeOwned, + { + let arg1 = serde_wasm_bindgen::to_value(&arg1).map_err(|err| js_sys::Error::new(&err.to_string()))?; + let promise = self.call1(&JsValue::null(), &arg1)?; + let future = wasm_bindgen_futures::JsFuture::from(JsPromise::from(promise)); + let value = future.await?; + serde_wasm_bindgen::from_value(value).map_err(|err| js_sys::Error::new(&err.to_string())) + } + + fn call0_sync(&self) -> Result + where + R: DeserializeOwned, + { + let value = self.call0(&JsValue::null())?; + serde_wasm_bindgen::from_value(value).map_err(|err| js_sys::Error::new(&err.to_string())) + } +} diff --git a/query-engine/query-engine/src/server/mod.rs b/query-engine/query-engine/src/server/mod.rs index 75543dc7ee58..f3583df310d7 100644 --- a/query-engine/query-engine/src/server/mod.rs +++ b/query-engine/query-engine/src/server/mod.rs @@ -63,7 +63,7 @@ pub(crate) async fn routes(cx: Arc, req: Request) -> Result let mut res = match (req.method(), req.uri().path()) { (&Method::POST, "/") => request_handler(cx, req).await?, (&Method::GET, "/") if cx.enabled_features.contains(Feature::Playground) => playground_handler(), - (&Method::GET, "/status") => build_json_response(StatusCode::OK, r#"{"status":"ok"}"#), + (&Method::GET, "/status") => build_json_response(StatusCode::OK, &json!({"status": "ok"})), (&Method::GET, "/sdl") => { let schema = render_graphql_schema(cx.query_schema()); diff --git a/query-engine/prisma-models/Cargo.toml b/query-engine/query-structure/Cargo.toml similarity index 63% rename from query-engine/prisma-models/Cargo.toml rename to query-engine/query-structure/Cargo.toml index c7e012afebfb..db8cb68aaf83 100644 --- a/query-engine/prisma-models/Cargo.toml +++ b/query-engine/query-structure/Cargo.toml @@ -1,6 +1,6 @@ [package] edition = "2021" -name = "prisma-models" +name = "query-structure" version = "0.0.0" [dependencies] @@ -10,13 +10,16 @@ prisma-value = { path = "../../libs/prisma-value" } bigdecimal = "0.3" thiserror = "1.0" +getrandom = { version = "0.2" } uuid = { workspace = true, optional = true } -cuid = { version = "1.2", optional = true } +cuid = { git = "https://github.com/prisma/cuid-rust", branch = "wasm32-support", optional = true } nanoid = { version = "0.4.0", optional = true } chrono = { version = "0.4.6", features = ["serde"] } +[target.'cfg(target_arch = "wasm32")'.dependencies.getrandom] +version = "0.2" +features = ["js"] + [features] -# Support for generating default UUID, CUID, nanoid and datetime values. This -# implies random number generation works, so it won't compile on targets like -# wasm32. +# Support for generating default UUID, CUID, nanoid and datetime values. default_generators = ["uuid/v4", "cuid", "nanoid"] diff --git a/query-engine/prisma-models/src/composite_type.rs b/query-engine/query-structure/src/composite_type.rs similarity index 100% rename from query-engine/prisma-models/src/composite_type.rs rename to query-engine/query-structure/src/composite_type.rs diff --git a/query-engine/prisma-models/src/convert.rs b/query-engine/query-structure/src/convert.rs similarity index 100% rename from query-engine/prisma-models/src/convert.rs rename to query-engine/query-structure/src/convert.rs diff --git a/query-engine/prisma-models/src/default_value.rs b/query-engine/query-structure/src/default_value.rs similarity index 100% rename from query-engine/prisma-models/src/default_value.rs rename to query-engine/query-structure/src/default_value.rs diff --git a/query-engine/prisma-models/src/error.rs b/query-engine/query-structure/src/error.rs similarity index 100% rename from query-engine/prisma-models/src/error.rs rename to query-engine/query-structure/src/error.rs diff --git a/query-engine/prisma-models/src/field/composite.rs b/query-engine/query-structure/src/field/composite.rs similarity index 100% rename from query-engine/prisma-models/src/field/composite.rs rename to query-engine/query-structure/src/field/composite.rs diff --git a/query-engine/prisma-models/src/field/mod.rs b/query-engine/query-structure/src/field/mod.rs similarity index 100% rename from query-engine/prisma-models/src/field/mod.rs rename to query-engine/query-structure/src/field/mod.rs diff --git a/query-engine/prisma-models/src/field/relation.rs b/query-engine/query-structure/src/field/relation.rs similarity index 100% rename from query-engine/prisma-models/src/field/relation.rs rename to query-engine/query-structure/src/field/relation.rs diff --git a/query-engine/prisma-models/src/field/scalar.rs b/query-engine/query-structure/src/field/scalar.rs similarity index 98% rename from query-engine/prisma-models/src/field/scalar.rs rename to query-engine/query-structure/src/field/scalar.rs index 92039da53663..b8ef8ab204e2 100644 --- a/query-engine/prisma-models/src/field/scalar.rs +++ b/query-engine/query-structure/src/field/scalar.rs @@ -91,7 +91,7 @@ impl ScalarField { match scalar_field_type { ScalarFieldType::CompositeType(_) => { - unreachable!("Cannot convert a composite type to a type identifier. This error is typically caused by mistakenly using a composite type within a composite index.",) + unreachable!("This shouldn't be reached; composite types are not supported in compound unique indices.",) } ScalarFieldType::Enum(x) => TypeIdentifier::Enum(x), ScalarFieldType::BuiltInScalar(scalar) => scalar.into(), diff --git a/query-engine/prisma-models/src/field_selection.rs b/query-engine/query-structure/src/field_selection.rs similarity index 100% rename from query-engine/prisma-models/src/field_selection.rs rename to query-engine/query-structure/src/field_selection.rs diff --git a/query-engine/prisma-models/src/fields.rs b/query-engine/query-structure/src/fields.rs similarity index 100% rename from query-engine/prisma-models/src/fields.rs rename to query-engine/query-structure/src/fields.rs diff --git a/query-engine/connectors/query-connector/src/compare.rs b/query-engine/query-structure/src/filter/compare.rs similarity index 99% rename from query-engine/connectors/query-connector/src/compare.rs rename to query-engine/query-structure/src/filter/compare.rs index 783d847939c4..7757965050ad 100644 --- a/query-engine/connectors/query-connector/src/compare.rs +++ b/query-engine/query-structure/src/filter/compare.rs @@ -1,7 +1,7 @@ use super::*; use crate::filter::Filter; -use prisma_models::PrismaValue; +use prisma_value::PrismaValue; /// Comparing methods for scalar fields. pub trait ScalarCompare { diff --git a/query-engine/connectors/query-connector/src/filter/composite.rs b/query-engine/query-structure/src/filter/composite.rs similarity index 95% rename from query-engine/connectors/query-connector/src/filter/composite.rs rename to query-engine/query-structure/src/filter/composite.rs index cb113e9c0355..d231421c325f 100644 --- a/query-engine/connectors/query-connector/src/filter/composite.rs +++ b/query-engine/query-structure/src/filter/composite.rs @@ -1,7 +1,5 @@ -use crate::compare::CompositeCompare; -use crate::filter::Filter; -use prisma_models::{CompositeFieldRef, PrismaValue}; -// use std::sync::Arc; +use crate::{filter::Filter, CompositeCompare, CompositeFieldRef}; +use prisma_value::PrismaValue; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct CompositeFilter { diff --git a/query-engine/connectors/query-connector/src/filter/into_filter.rs b/query-engine/query-structure/src/filter/into_filter.rs similarity index 93% rename from query-engine/connectors/query-connector/src/filter/into_filter.rs rename to query-engine/query-structure/src/filter/into_filter.rs index d2c32782795e..b180b3b80c49 100644 --- a/query-engine/connectors/query-connector/src/filter/into_filter.rs +++ b/query-engine/query-structure/src/filter/into_filter.rs @@ -1,6 +1,7 @@ use super::*; + use crate::ScalarCompare; -use prisma_models::{SelectedField, SelectionResult}; +use crate::{SelectedField, SelectionResult}; pub trait IntoFilter { fn filter(self) -> Filter; diff --git a/query-engine/connectors/query-connector/src/filter/json.rs b/query-engine/query-structure/src/filter/json.rs similarity index 98% rename from query-engine/connectors/query-connector/src/filter/json.rs rename to query-engine/query-structure/src/filter/json.rs index c1cae9b81872..b0452106d19f 100644 --- a/query-engine/connectors/query-connector/src/filter/json.rs +++ b/query-engine/query-structure/src/filter/json.rs @@ -1,6 +1,5 @@ use super::scalar::*; -use crate::{Filter, JsonCompare, ScalarFilter}; -use prisma_models::ScalarFieldRef; +use crate::{Filter, JsonCompare, ScalarFieldRef, ScalarFilter}; #[derive(Debug, Clone, Eq, Hash, PartialEq)] pub enum JsonTargetType { diff --git a/query-engine/connectors/query-connector/src/filter/list.rs b/query-engine/query-structure/src/filter/list.rs similarity index 96% rename from query-engine/connectors/query-connector/src/filter/list.rs rename to query-engine/query-structure/src/filter/list.rs index ab1cf913880b..e71064c67091 100644 --- a/query-engine/connectors/query-connector/src/filter/list.rs +++ b/query-engine/query-structure/src/filter/list.rs @@ -1,6 +1,5 @@ use super::*; -use crate::compare::ScalarListCompare; -use prisma_models::{ScalarField, ScalarFieldRef}; +use crate::{ScalarField, ScalarFieldRef, ScalarListCompare}; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct ScalarListFilter { diff --git a/query-engine/connectors/query-connector/src/filter/mod.rs b/query-engine/query-structure/src/filter/mod.rs similarity index 99% rename from query-engine/connectors/query-connector/src/filter/mod.rs rename to query-engine/query-structure/src/filter/mod.rs index 856fb5207f95..a05d3e9c4095 100644 --- a/query-engine/connectors/query-connector/src/filter/mod.rs +++ b/query-engine/query-structure/src/filter/mod.rs @@ -5,6 +5,7 @@ //! [RelationCompare](/query-connector/trait.RelationCompare.html). //! [CompositeCompare](/query-connector/trait.RelationCompare.html). +mod compare; mod composite; mod into_filter; mod json; @@ -12,14 +13,16 @@ mod list; mod relation; mod scalar; +pub use compare::*; pub use composite::*; pub use into_filter::*; pub use json::*; pub use list::*; -use prisma_models::ScalarFieldRef; pub use relation::*; pub use scalar::*; +use crate::ScalarFieldRef; + #[derive(Debug, Clone, Eq, Hash, PartialEq)] pub enum Filter { And(Vec), diff --git a/query-engine/connectors/query-connector/src/filter/relation.rs b/query-engine/query-structure/src/filter/relation.rs similarity index 97% rename from query-engine/connectors/query-connector/src/filter/relation.rs rename to query-engine/query-structure/src/filter/relation.rs index 500e406e407f..d806ba2dec1e 100644 --- a/query-engine/connectors/query-connector/src/filter/relation.rs +++ b/query-engine/query-structure/src/filter/relation.rs @@ -1,6 +1,4 @@ -use crate::compare::RelationCompare; -use crate::filter::Filter; -use prisma_models::RelationField; +use crate::{filter::Filter, RelationCompare, RelationField}; #[derive(Clone, PartialEq, Eq, Hash)] pub struct RelationFilter { diff --git a/query-engine/connectors/query-connector/src/filter/scalar/compare.rs b/query-engine/query-structure/src/filter/scalar/compare.rs similarity index 99% rename from query-engine/connectors/query-connector/src/filter/scalar/compare.rs rename to query-engine/query-structure/src/filter/scalar/compare.rs index f93798441027..efbbb370f664 100644 --- a/query-engine/connectors/query-connector/src/filter/scalar/compare.rs +++ b/query-engine/query-structure/src/filter/scalar/compare.rs @@ -1,6 +1,5 @@ use super::*; use crate::*; -use prisma_models::*; impl ScalarCompare for ScalarFieldRef { /// Field is in a given value diff --git a/query-engine/connectors/query-connector/src/filter/scalar/condition/mod.rs b/query-engine/query-structure/src/filter/scalar/condition/mod.rs similarity index 99% rename from query-engine/connectors/query-connector/src/filter/scalar/condition/mod.rs rename to query-engine/query-structure/src/filter/scalar/condition/mod.rs index 4845fab126f1..ff32d3d52219 100644 --- a/query-engine/connectors/query-connector/src/filter/scalar/condition/mod.rs +++ b/query-engine/query-structure/src/filter/scalar/condition/mod.rs @@ -4,7 +4,6 @@ pub use value::{ConditionListValue, ConditionValue}; use super::*; use crate::*; -use prisma_models::*; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum ScalarCondition { diff --git a/query-engine/connectors/query-connector/src/filter/scalar/condition/value.rs b/query-engine/query-structure/src/filter/scalar/condition/value.rs similarity index 97% rename from query-engine/connectors/query-connector/src/filter/scalar/condition/value.rs rename to query-engine/query-structure/src/filter/scalar/condition/value.rs index a93a8e304adb..5a1b7b7aecb7 100644 --- a/query-engine/connectors/query-connector/src/filter/scalar/condition/value.rs +++ b/query-engine/query-structure/src/filter/scalar/condition/value.rs @@ -1,4 +1,5 @@ -use prisma_models::{PrismaListValue, PrismaValue, ScalarFieldRef}; +use crate::field::*; +use prisma_value::{PrismaListValue, PrismaValue}; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum ConditionValue { diff --git a/query-engine/connectors/query-connector/src/filter/scalar/mod.rs b/query-engine/query-structure/src/filter/scalar/mod.rs similarity index 99% rename from query-engine/connectors/query-connector/src/filter/scalar/mod.rs rename to query-engine/query-structure/src/filter/scalar/mod.rs index a2179b758b26..adc2cc6de320 100644 --- a/query-engine/connectors/query-connector/src/filter/scalar/mod.rs +++ b/query-engine/query-structure/src/filter/scalar/mod.rs @@ -6,7 +6,7 @@ pub use compare::*; pub use condition::*; pub use projection::*; -use prisma_models::*; +use crate::*; use std::collections::BTreeSet; diff --git a/query-engine/connectors/query-connector/src/filter/scalar/projection.rs b/query-engine/query-structure/src/filter/scalar/projection.rs similarity index 97% rename from query-engine/connectors/query-connector/src/filter/scalar/projection.rs rename to query-engine/query-structure/src/filter/scalar/projection.rs index 63170be55e5a..799ab0859cb9 100644 --- a/query-engine/connectors/query-connector/src/filter/scalar/projection.rs +++ b/query-engine/query-structure/src/filter/scalar/projection.rs @@ -1,4 +1,4 @@ -use prisma_models::ScalarFieldRef; +use crate::field::ScalarFieldRef; #[derive(Clone, PartialEq, Eq, Hash)] pub enum ScalarProjection { diff --git a/query-engine/prisma-models/src/internal_data_model.rs b/query-engine/query-structure/src/internal_data_model.rs similarity index 100% rename from query-engine/prisma-models/src/internal_data_model.rs rename to query-engine/query-structure/src/internal_data_model.rs diff --git a/query-engine/prisma-models/src/internal_enum.rs b/query-engine/query-structure/src/internal_enum.rs similarity index 100% rename from query-engine/prisma-models/src/internal_enum.rs rename to query-engine/query-structure/src/internal_enum.rs diff --git a/query-engine/prisma-models/src/lib.rs b/query-engine/query-structure/src/lib.rs similarity index 91% rename from query-engine/prisma-models/src/lib.rs rename to query-engine/query-structure/src/lib.rs index 87bdacfb8ce0..25519a6d856c 100644 --- a/query-engine/prisma-models/src/lib.rs +++ b/query-engine/query-structure/src/lib.rs @@ -13,11 +13,13 @@ mod order_by; mod parent_container; mod prisma_value_ext; mod projections; +mod query_arguments; mod record; mod relation; mod selection_result; mod zipper; +pub mod filter; pub mod prelude; pub use self::{default_value::*, native_type_instance::*, zipper::*}; @@ -27,11 +29,13 @@ pub use error::*; pub use field::*; pub use field_selection::*; pub use fields::*; +pub use filter::*; pub use internal_data_model::*; pub use internal_enum::*; pub use model::*; pub use order_by::*; pub use projections::*; +pub use query_arguments::*; pub use record::*; pub use relation::*; pub use selection_result::*; diff --git a/query-engine/prisma-models/src/model.rs b/query-engine/query-structure/src/model.rs similarity index 100% rename from query-engine/prisma-models/src/model.rs rename to query-engine/query-structure/src/model.rs diff --git a/query-engine/prisma-models/src/native_type_instance.rs b/query-engine/query-structure/src/native_type_instance.rs similarity index 100% rename from query-engine/prisma-models/src/native_type_instance.rs rename to query-engine/query-structure/src/native_type_instance.rs diff --git a/query-engine/prisma-models/src/order_by.rs b/query-engine/query-structure/src/order_by.rs similarity index 100% rename from query-engine/prisma-models/src/order_by.rs rename to query-engine/query-structure/src/order_by.rs diff --git a/query-engine/prisma-models/src/parent_container.rs b/query-engine/query-structure/src/parent_container.rs similarity index 100% rename from query-engine/prisma-models/src/parent_container.rs rename to query-engine/query-structure/src/parent_container.rs diff --git a/query-engine/prisma-models/src/prelude.rs b/query-engine/query-structure/src/prelude.rs similarity index 100% rename from query-engine/prisma-models/src/prelude.rs rename to query-engine/query-structure/src/prelude.rs diff --git a/query-engine/prisma-models/src/prisma_value_ext.rs b/query-engine/query-structure/src/prisma_value_ext.rs similarity index 100% rename from query-engine/prisma-models/src/prisma_value_ext.rs rename to query-engine/query-structure/src/prisma_value_ext.rs diff --git a/query-engine/prisma-models/src/projections/mod.rs b/query-engine/query-structure/src/projections/mod.rs similarity index 100% rename from query-engine/prisma-models/src/projections/mod.rs rename to query-engine/query-structure/src/projections/mod.rs diff --git a/query-engine/prisma-models/src/projections/model_projection.rs b/query-engine/query-structure/src/projections/model_projection.rs similarity index 100% rename from query-engine/prisma-models/src/projections/model_projection.rs rename to query-engine/query-structure/src/projections/model_projection.rs diff --git a/query-engine/connectors/query-connector/src/query_arguments.rs b/query-engine/query-structure/src/query_arguments.rs similarity index 99% rename from query-engine/connectors/query-connector/src/query_arguments.rs rename to query-engine/query-structure/src/query_arguments.rs index cb9e5509734d..f9c222d80dbe 100644 --- a/query-engine/connectors/query-connector/src/query_arguments.rs +++ b/query-engine/query-structure/src/query_arguments.rs @@ -1,5 +1,4 @@ -use crate::filter::Filter; -use prisma_models::*; +use crate::*; /// `QueryArguments` define various constraints queried data should fulfill: /// - `cursor`, `take`, `skip` page through the data. diff --git a/query-engine/prisma-models/src/record.rs b/query-engine/query-structure/src/record.rs similarity index 100% rename from query-engine/prisma-models/src/record.rs rename to query-engine/query-structure/src/record.rs diff --git a/query-engine/prisma-models/src/relation.rs b/query-engine/query-structure/src/relation.rs similarity index 100% rename from query-engine/prisma-models/src/relation.rs rename to query-engine/query-structure/src/relation.rs diff --git a/query-engine/prisma-models/src/selection_result.rs b/query-engine/query-structure/src/selection_result.rs similarity index 100% rename from query-engine/prisma-models/src/selection_result.rs rename to query-engine/query-structure/src/selection_result.rs diff --git a/query-engine/prisma-models/src/zipper.rs b/query-engine/query-structure/src/zipper.rs similarity index 100% rename from query-engine/prisma-models/src/zipper.rs rename to query-engine/query-structure/src/zipper.rs diff --git a/query-engine/prisma-models/tests/datamodel_converter_tests.rs b/query-engine/query-structure/tests/datamodel_converter_tests.rs similarity index 67% rename from query-engine/prisma-models/tests/datamodel_converter_tests.rs rename to query-engine/query-structure/tests/datamodel_converter_tests.rs index 0a45c80ed163..c7ef83e0617f 100644 --- a/query-engine/prisma-models/tests/datamodel_converter_tests.rs +++ b/query-engine/query-structure/tests/datamodel_converter_tests.rs @@ -1,6 +1,6 @@ #![allow(non_snake_case)] -use prisma_models::*; +use query_structure::*; use std::sync::Arc; #[test] @@ -38,31 +38,159 @@ fn converting_enums() { } } +// region: composite #[test] -fn converting_composite_types() { +fn converting_composite_types_compound() { let res = psl::parse_schema( r#" - datasource db { - provider = "mongodb" - url = "mongodb://localhost:27017/hello" - } + datasource db { + provider = "mongodb" + url = "mongodb://localhost:27017/hello" + } - model MyModel { - id String @id @default(auto()) @map("_id") @db.ObjectId - attribute Attribute + model Post { + id String @id @default(auto()) @map("_id") @db.ObjectId + author User @relation(fields: [authorId], references: [id]) + authorId String @db.ObjectId + attributes Attribute[] + + @@index([authorId, attributes]) + } + + type Attribute { + name String + value String + group String + } + + model User { + id String @id @default(auto()) @map("_id") @db.ObjectId + Post Post[] + } + "#, + ); - @@unique([attribute], name: "composite_index") - } + assert!(res.is_ok()); +} - type Attribute { - name String - value String - group String - } +#[test] +fn converting_composite_types_compound_unique() { + let res = psl::parse_schema( + r#" + datasource db { + provider = "mongodb" + url = "mongodb://localhost:27017/hello" + } + + model Post { + id String @id @default(auto()) @map("_id") @db.ObjectId + author User @relation(fields: [authorId], references: [id]) + authorId String @db.ObjectId + attributes Attribute[] + + @@unique([authorId, attributes]) + // ^^^^^^^^^^^^^^^^^^^^^^ + // Prisma does not currently support composite types in compound unique indices... + } + + type Attribute { + name String + value String + group String + } + + model User { + id String @id @default(auto()) @map("_id") @db.ObjectId + Post Post[] + } "#, ); - assert!(res.unwrap_err().contains("Indexes can only contain scalar attributes. Please remove \"attribute\" from the argument list of the indexes.")); + + assert!(res + .unwrap_err() + .contains(r#"Prisma does not currently support composite types in compound unique indices, please remove "attributes" from the index. See https://pris.ly/d/mongodb-composite-compound-indices for more details"#)); +} + +#[test] +fn converting_composite_types_nested() { + let res = psl::parse_schema( + r#" + datasource db { + provider = "mongodb" + url = "mongodb://localhost:27017/hello" + } + + type TheatersLocation { + address TheatersLocationAddress + geo TheatersLocationGeo + } + + type TheatersLocationAddress { + city String + state String + street1 String + street2 String? + zipcode String + } + + type TheatersLocationGeo { + coordinates Float[] + type String + } + + model theaters { + id String @id @default(auto()) @map("_id") @db.ObjectId + location TheatersLocation + theaterId Int + + @@index([location.geo], map: "geo index") + } + "#, + ); + + assert!(res.is_ok()); +} + +#[test] +fn converting_composite_types_nested_scalar() { + let res = psl::parse_schema( + r#" + datasource db { + provider = "mongodb" + url = "mongodb://localhost:27017/hello" + } + + type TheatersLocation { + address TheatersLocationAddress + geo TheatersLocationGeo + } + + type TheatersLocationAddress { + city String + state String + street1 String + street2 String? + zipcode String + } + + type TheatersLocationGeo { + coordinates Float[] + type String + } + + model theaters { + id String @id @default(auto()) @map("_id") @db.ObjectId + location TheatersLocation + theaterId Int + + @@index([location.geo.type], map: "geo index") + } + "#, + ); + + assert!(res.is_ok()); } +// endregion #[test] fn models_with_only_scalar_fields() { @@ -292,7 +420,7 @@ fn duplicate_relation_name() { fn convert(datamodel: &str) -> InternalDataModel { let schema = psl::parse_schema(datamodel).unwrap(); - prisma_models::convert(Arc::new(schema)) + query_structure::convert(Arc::new(schema)) } trait DatamodelAssertions { diff --git a/query-engine/request-handlers/Cargo.toml b/query-engine/request-handlers/Cargo.toml index f5fb433b13ba..ad838c5999b7 100644 --- a/query-engine/request-handlers/Cargo.toml +++ b/query-engine/request-handlers/Cargo.toml @@ -4,7 +4,7 @@ version = "0.1.0" edition = "2021" [dependencies] -prisma-models = { path = "../prisma-models" } +query-structure = { path = "../query-structure" } query-core = { path = "../core" } user-facing-errors = { path = "../../libs/user-facing-errors" } psl.workspace = true diff --git a/query-engine/request-handlers/src/handler.rs b/query-engine/request-handlers/src/handler.rs index df25616c2201..cd5d887718f0 100644 --- a/query-engine/request-handlers/src/handler.rs +++ b/query-engine/request-handlers/src/handler.rs @@ -2,7 +2,6 @@ use super::GQLResponse; use crate::{GQLError, PrismaResponse, RequestBody}; use futures::FutureExt; use indexmap::IndexMap; -use prisma_models::{parse_datetime, stringify_datetime, PrismaValue}; use query_core::{ constants::custom_types, protocol::EngineProtocol, @@ -11,6 +10,7 @@ use query_core::{ ArgumentValue, ArgumentValueObject, BatchDocument, BatchDocumentTransaction, CompactedDocument, Operation, QueryDocument, QueryExecutor, TxId, }; +use query_structure::{parse_datetime, stringify_datetime, PrismaValue}; use std::{collections::HashMap, fmt, panic::AssertUnwindSafe}; type ArgsToResult = (HashMap, IndexMap); diff --git a/query-engine/request-handlers/src/protocols/json/protocol_adapter.rs b/query-engine/request-handlers/src/protocols/json/protocol_adapter.rs index 208705268c1e..09ceeae20c0e 100644 --- a/query-engine/request-handlers/src/protocols/json/protocol_adapter.rs +++ b/query-engine/request-handlers/src/protocols/json/protocol_adapter.rs @@ -1,12 +1,12 @@ use crate::{FieldQuery, HandlerError, JsonSingleQuery, SelectionSet}; use bigdecimal::{BigDecimal, FromPrimitive}; use indexmap::IndexMap; -use prisma_models::{decode_bytes, parse_datetime, prelude::ParentContainer, Field}; use query_core::{ constants::custom_types, schema::{ObjectType, OutputField, QuerySchema}, ArgumentValue, Operation, Selection, }; +use query_structure::{decode_bytes, parse_datetime, prelude::ParentContainer, Field}; use serde_json::Value as JsonValue; use std::str::FromStr; diff --git a/query-engine/schema/Cargo.toml b/query-engine/schema/Cargo.toml index 0d4c06ad944e..12664344572d 100644 --- a/query-engine/schema/Cargo.toml +++ b/query-engine/schema/Cargo.toml @@ -4,7 +4,7 @@ version = "0.1.0" edition = "2021" [dependencies] -prisma-models = { path = "../prisma-models" } +query-structure = { path = "../query-structure" } psl.workspace = true rustc-hash = "1.1.0" once_cell = "1" diff --git a/query-engine/schema/README.md b/query-engine/schema/README.md index 4952270b0cd5..9522bdd563a0 100644 --- a/query-engine/schema/README.md +++ b/query-engine/schema/README.md @@ -1,6 +1,6 @@ # schema -This crate contains the logic responsible for building a query schema from a Prisma datamodel (presented as a `prisma_models::InternalDataModel`). +This crate contains the logic responsible for building a query schema from a Prisma datamodel (presented as a `query_structure::InternalDataModel`). ## Benchmarks diff --git a/query-engine/schema/src/build.rs b/query-engine/schema/src/build.rs index 6fa03c3fabbc..3c589989f21e 100644 --- a/query-engine/schema/src/build.rs +++ b/query-engine/schema/src/build.rs @@ -15,8 +15,8 @@ pub(crate) use output_types::{mutation_type, query_type}; use self::{enum_types::*, utils::*}; use crate::*; -use prisma_models::{ast, Field as ModelField, Model, RelationFieldRef, TypeIdentifier}; use psl::{datamodel_connector::ConnectorCapability, PreviewFeatures}; +use query_structure::{ast, Field as ModelField, Model, RelationFieldRef, TypeIdentifier}; pub fn build(schema: Arc, enable_raw_queries: bool) -> QuerySchema { let preview_features = schema.configuration.preview_features(); @@ -29,6 +29,6 @@ pub fn build_with_features( enable_raw_queries: bool, ) -> QuerySchema { let connector = schema.connector; - let internal_data_model = prisma_models::convert(schema); + let internal_data_model = query_structure::convert(schema); QuerySchema::new(enable_raw_queries, connector, preview_features, internal_data_model) } diff --git a/query-engine/schema/src/build/enum_types.rs b/query-engine/schema/src/build/enum_types.rs index 715a161b08b9..48490ccc914c 100644 --- a/query-engine/schema/src/build/enum_types.rs +++ b/query-engine/schema/src/build/enum_types.rs @@ -1,7 +1,7 @@ use super::*; use crate::EnumType; use constants::{filters, itx, json_null, ordering}; -use prisma_models::prelude::ParentContainer; +use query_structure::prelude::ParentContainer; pub(crate) fn sort_order_enum() -> EnumType { let ident = Identifier::new_prisma(IdentifierType::SortOrder); diff --git a/query-engine/schema/src/build/input_types/fields/arguments.rs b/query-engine/schema/src/build/input_types/fields/arguments.rs index 33400b7d0002..58393083f7fc 100644 --- a/query-engine/schema/src/build/input_types/fields/arguments.rs +++ b/query-engine/schema/src/build/input_types/fields/arguments.rs @@ -3,7 +3,7 @@ use constants::args; use input_types::objects::order_by_objects::OrderByOptions; use mutations::create_one; use objects::*; -use prisma_models::{prelude::ParentContainer, CompositeFieldRef}; +use query_structure::{prelude::ParentContainer, CompositeFieldRef}; /// Builds "where" argument. pub(crate) fn where_argument<'a>(ctx: &'a QuerySchema, model: &Model) -> InputField<'a> { diff --git a/query-engine/schema/src/build/input_types/fields/data_input_mapper/create.rs b/query-engine/schema/src/build/input_types/fields/data_input_mapper/create.rs index 5f00ee47c652..d952cb0d18d2 100644 --- a/query-engine/schema/src/build/input_types/fields/data_input_mapper/create.rs +++ b/query-engine/schema/src/build/input_types/fields/data_input_mapper/create.rs @@ -1,6 +1,6 @@ use super::*; use constants::*; -use prisma_models::CompositeFieldRef; +use query_structure::CompositeFieldRef; pub(crate) struct CreateDataInputFieldMapper { unchecked: bool, diff --git a/query-engine/schema/src/build/input_types/fields/data_input_mapper/mod.rs b/query-engine/schema/src/build/input_types/fields/data_input_mapper/mod.rs index 91ae15322687..b8269875527e 100644 --- a/query-engine/schema/src/build/input_types/fields/data_input_mapper/mod.rs +++ b/query-engine/schema/src/build/input_types/fields/data_input_mapper/mod.rs @@ -5,7 +5,7 @@ pub(crate) use create::*; pub(crate) use update::*; use super::*; -use prisma_models::prelude::*; +use query_structure::prelude::*; // Todo: This isn't final, this is only the first draft to get structure into the // wild cross-dependency waste that was the create/update inputs. diff --git a/query-engine/schema/src/build/input_types/fields/data_input_mapper/update.rs b/query-engine/schema/src/build/input_types/fields/data_input_mapper/update.rs index a9b0395d2e00..e6f051b70586 100644 --- a/query-engine/schema/src/build/input_types/fields/data_input_mapper/update.rs +++ b/query-engine/schema/src/build/input_types/fields/data_input_mapper/update.rs @@ -1,6 +1,6 @@ use super::*; use constants::*; -use prisma_models::CompositeFieldRef; +use query_structure::CompositeFieldRef; pub(crate) struct UpdateDataInputFieldMapper { unchecked: bool, diff --git a/query-engine/schema/src/build/input_types/fields/field_filter_types.rs b/query-engine/schema/src/build/input_types/fields/field_filter_types.rs index af2c77d006b4..84e6faa749ea 100644 --- a/query-engine/schema/src/build/input_types/fields/field_filter_types.rs +++ b/query-engine/schema/src/build/input_types/fields/field_filter_types.rs @@ -1,7 +1,7 @@ use super::{field_ref_type::WithFieldRefInputExt, objects::*, *}; use constants::{aggregations, filters}; -use prisma_models::{CompositeFieldRef, DefaultKind, NativeTypeInstance, PrismaValue}; use psl::datamodel_connector::ConnectorCapability; +use query_structure::{CompositeFieldRef, DefaultKind, NativeTypeInstance, PrismaValue}; /// Builds filter types for the given model field. pub(crate) fn get_field_filter_types( diff --git a/query-engine/schema/src/build/input_types/mod.rs b/query-engine/schema/src/build/input_types/mod.rs index 98c8caa84a38..14ff37722d6d 100644 --- a/query-engine/schema/src/build/input_types/mod.rs +++ b/query-engine/schema/src/build/input_types/mod.rs @@ -3,7 +3,7 @@ pub(crate) mod objects; use super::*; use fields::*; -use prisma_models::ScalarFieldRef; +use query_structure::ScalarFieldRef; fn map_scalar_input_type_for_field<'a>(ctx: &'a QuerySchema, field: &ScalarFieldRef) -> InputType<'a> { map_scalar_input_type(ctx, field.type_identifier(), field.is_list()) diff --git a/query-engine/schema/src/build/input_types/objects/filter_objects.rs b/query-engine/schema/src/build/input_types/objects/filter_objects.rs index 0ea555f77724..6ae66ca4219b 100644 --- a/query-engine/schema/src/build/input_types/objects/filter_objects.rs +++ b/query-engine/schema/src/build/input_types/objects/filter_objects.rs @@ -1,6 +1,6 @@ use super::*; use constants::filters; -use prisma_models::{prelude::ParentContainer, CompositeFieldRef}; +use query_structure::{prelude::ParentContainer, CompositeFieldRef}; pub(crate) fn scalar_filter_object_type( ctx: &'_ QuerySchema, diff --git a/query-engine/schema/src/build/input_types/objects/order_by_objects.rs b/query-engine/schema/src/build/input_types/objects/order_by_objects.rs index e5112c5c71f5..b36670f053d2 100644 --- a/query-engine/schema/src/build/input_types/objects/order_by_objects.rs +++ b/query-engine/schema/src/build/input_types/objects/order_by_objects.rs @@ -3,7 +3,7 @@ use std::borrow::Cow; use super::*; use constants::{aggregations, ordering}; use output_types::aggregation; -use prisma_models::prelude::ParentContainer; +use query_structure::prelude::ParentContainer; #[derive(Debug, Default, Clone, Copy)] pub(crate) struct OrderByOptions { diff --git a/query-engine/schema/src/build/mutations/create_many.rs b/query-engine/schema/src/build/mutations/create_many.rs index c96314e90a1a..9ef94df26240 100644 --- a/query-engine/schema/src/build/mutations/create_many.rs +++ b/query-engine/schema/src/build/mutations/create_many.rs @@ -3,8 +3,8 @@ use crate::{Identifier, IdentifierType, InputField, InputType, OutputField, Outp use constants::*; use input_types::{fields::data_input_mapper::*, list_union_type}; use output_types::objects; -use prisma_models::{Model, RelationFieldRef}; use psl::datamodel_connector::ConnectorCapability; +use query_structure::{Model, RelationFieldRef}; /// Builds a create many mutation field (e.g. createManyUsers) for given model. pub(crate) fn create_many(ctx: &'_ QuerySchema, model: Model) -> OutputField<'_> { diff --git a/query-engine/schema/src/build/mutations/create_one.rs b/query-engine/schema/src/build/mutations/create_one.rs index 0452e4885011..11699c7cce19 100644 --- a/query-engine/schema/src/build/mutations/create_one.rs +++ b/query-engine/schema/src/build/mutations/create_one.rs @@ -5,7 +5,7 @@ use crate::{ use constants::*; use input_types::fields::data_input_mapper::*; use output_types::objects; -use prisma_models::{Model, RelationFieldRef}; +use query_structure::{Model, RelationFieldRef}; /// Builds a create mutation field (e.g. createUser) for given model. pub(crate) fn create_one(ctx: &QuerySchema, model: Model) -> OutputField<'_> { diff --git a/query-engine/schema/src/build/output_types/aggregation/mod.rs b/query-engine/schema/src/build/output_types/aggregation/mod.rs index 30db9edca672..4b3043c6c804 100644 --- a/query-engine/schema/src/build/output_types/aggregation/mod.rs +++ b/query-engine/schema/src/build/output_types/aggregation/mod.rs @@ -1,5 +1,5 @@ use super::*; -use prisma_models::{prelude::ParentContainer, ScalarField}; +use query_structure::{prelude::ParentContainer, ScalarField}; pub(crate) mod group_by; pub(crate) mod plain; diff --git a/query-engine/schema/src/build/output_types/field.rs b/query-engine/schema/src/build/output_types/field.rs index 2fb5bce366df..29924c9d98c1 100644 --- a/query-engine/schema/src/build/output_types/field.rs +++ b/query-engine/schema/src/build/output_types/field.rs @@ -1,6 +1,6 @@ use super::*; use input_types::fields::arguments; -use prisma_models::{CompositeFieldRef, ScalarFieldRef}; +use query_structure::{CompositeFieldRef, ScalarFieldRef}; pub(crate) fn map_output_field(ctx: &'_ QuerySchema, model_field: ModelField) -> OutputField<'_> { let cloned_model_field = model_field.clone(); diff --git a/query-engine/schema/src/build/output_types/mutation_type.rs b/query-engine/schema/src/build/output_types/mutation_type.rs index 6aff5185de74..b0202360acb3 100644 --- a/query-engine/schema/src/build/output_types/mutation_type.rs +++ b/query-engine/schema/src/build/output_types/mutation_type.rs @@ -1,8 +1,8 @@ use super::*; use input_types::fields::arguments; use mutations::{create_many, create_one}; -use prisma_models::{DefaultKind, PrismaValue}; use psl::datamodel_connector::ConnectorCapability; +use query_structure::{DefaultKind, PrismaValue}; /// Builds the root `Mutation` type. pub(crate) fn mutation_fields(ctx: &QuerySchema) -> Vec { diff --git a/query-engine/schema/src/build/output_types/objects/composite.rs b/query-engine/schema/src/build/output_types/objects/composite.rs index f52d25de3c10..ca64cdaaaa55 100644 --- a/query-engine/schema/src/build/output_types/objects/composite.rs +++ b/query-engine/schema/src/build/output_types/objects/composite.rs @@ -1,7 +1,7 @@ #![allow(clippy::unnecessary_to_owned)] use super::*; -use prisma_models::CompositeType; +use query_structure::CompositeType; pub(crate) fn composite_object_type(ctx: &'_ QuerySchema, composite: CompositeType) -> ObjectType<'_> { ObjectType::new(Identifier::new_model(composite.name().to_owned()), move || { diff --git a/query-engine/schema/src/build/utils.rs b/query-engine/schema/src/build/utils.rs index 4eeafcb23c82..d7ee3106d230 100644 --- a/query-engine/schema/src/build/utils.rs +++ b/query-engine/schema/src/build/utils.rs @@ -1,6 +1,6 @@ use super::*; use once_cell::sync::Lazy; -use prisma_models::{walkers, DefaultKind}; +use query_structure::{walkers, DefaultKind}; use std::borrow::Cow; /// Input object type convenience wrapper function. diff --git a/query-engine/schema/src/enum_type.rs b/query-engine/schema/src/enum_type.rs index b9225b30cd17..7529e95d27da 100644 --- a/query-engine/schema/src/enum_type.rs +++ b/query-engine/schema/src/enum_type.rs @@ -1,5 +1,5 @@ use super::*; -use prisma_models::{InternalEnum, PrismaValue, ScalarFieldRef}; +use query_structure::{InternalEnum, PrismaValue, ScalarFieldRef}; #[derive(Debug, Clone, PartialEq)] pub enum EnumType { diff --git a/query-engine/schema/src/identifier_type.rs b/query-engine/schema/src/identifier_type.rs index 0d24efc0cc58..825a8dd741c9 100644 --- a/query-engine/schema/src/identifier_type.rs +++ b/query-engine/schema/src/identifier_type.rs @@ -1,5 +1,5 @@ use crate::{capitalize, constants::ordering, scalar_filter_name}; -use prisma_models::{ast::FieldArity, prelude::*, *}; +use query_structure::{ast::FieldArity, prelude::*, *}; /// Enum used to represent unique schema type names. /// It helps deferring the allocation + formatting of strings diff --git a/query-engine/schema/src/input_types.rs b/query-engine/schema/src/input_types.rs index 176a31a60a16..3a6c0610f600 100644 --- a/query-engine/schema/src/input_types.rs +++ b/query-engine/schema/src/input_types.rs @@ -1,7 +1,7 @@ use super::*; use fmt::Debug; use once_cell::sync::Lazy; -use prisma_models::{prelude::ParentContainer, DefaultKind}; +use query_structure::{prelude::ParentContainer, DefaultKind}; use std::{borrow::Cow, boxed::Box, fmt}; type InputObjectFields<'a> = diff --git a/query-engine/schema/src/output_types.rs b/query-engine/schema/src/output_types.rs index 7aa949f79083..32956d01d50b 100644 --- a/query-engine/schema/src/output_types.rs +++ b/query-engine/schema/src/output_types.rs @@ -1,7 +1,7 @@ use super::*; use fmt::Debug; use once_cell::sync::Lazy; -use prisma_models::ast::ModelId; +use query_structure::ast::ModelId; use std::{borrow::Cow, fmt}; #[derive(Debug, Clone)] diff --git a/query-engine/schema/src/query_schema.rs b/query-engine/schema/src/query_schema.rs index f48572db32fb..0324896aea07 100644 --- a/query-engine/schema/src/query_schema.rs +++ b/query-engine/schema/src/query_schema.rs @@ -1,9 +1,9 @@ use crate::{IdentifierType, ObjectType, OutputField}; -use prisma_models::{ast, InternalDataModel}; use psl::{ datamodel_connector::{Connector, ConnectorCapabilities, ConnectorCapability, RelationMode}, PreviewFeature, PreviewFeatures, }; +use query_structure::{ast, InternalDataModel}; use std::{collections::HashMap, fmt}; #[derive(Clone, Debug, Hash, Eq, PartialEq)] diff --git a/renovate.json b/renovate.json index 28cc31716a6f..4d8e7d2511d0 100644 --- a/renovate.json +++ b/renovate.json @@ -1,6 +1,7 @@ { + "$schema": "https://docs.renovatebot.com/renovate-schema.json", "extends": [ - "config:base" + "config:recommended" ], "cargo": { "enabled": false @@ -13,17 +14,47 @@ "before 5am every weekday", "every weekend" ], + "rangeStrategy": "pin", "separateMinorPatch": true, + "configMigration": true, "packageRules": [ { - "matchFiles": ["docker-compose.yml"], - "matchUpdateTypes": ["minor", "major"], + "matchFileNames": [ + "docker-compose.yml" + ], + "matchUpdateTypes": [ + "minor", + "major" + ], "enabled": false }, { "groupName": "Weekly vitess docker image version update", - "packageNames": ["vitess/vttestserver"], - "schedule": ["before 7am on Wednesday"] + "matchPackageNames": [ + "vitess/vttestserver" + ], + "schedule": [ + "before 7am on Wednesday" + ] + }, + { + "groupName": "Prisma Driver Adapters", + "matchPackageNames": [ + "@prisma/driver-adapter-utils" + ], + "matchPackagePrefixes": [ + "@prisma/adapter" + ], + "schedule": [ + "at any time" + ] + }, + { + "matchPackageNames": [ + "node", + "pnpm" + ], + "enabled": false } ] } diff --git a/schema-engine/cli/tests/cli_tests.rs b/schema-engine/cli/tests/cli_tests.rs index 18866f9b1c0a..bec62a2d3a31 100644 --- a/schema-engine/cli/tests/cli_tests.rs +++ b/schema-engine/cli/tests/cli_tests.rs @@ -183,6 +183,17 @@ fn test_create_database_mssql(api: TestApi) { assert!(output.status.success()); } +#[test_connector(tags(Sqlite))] +fn test_sqlite_url(api: TestApi) { + let base_dir = tempfile::tempdir().unwrap(); + let sqlite_path = base_dir.path().join("test.db"); + let url = format!("{}", sqlite_path.to_string_lossy()); + let output = api.run(&["--datasource", &url, "can-connect-to-database"]); + assert!(!output.status.success()); + let message = String::from_utf8(output.stderr).unwrap(); + assert!(message.contains("The provided database string is invalid. The scheme is not recognized in database URL.")); +} + #[test_connector(tags(Sqlite))] fn test_create_sqlite_database(api: TestApi) { let base_dir = tempfile::tempdir().unwrap(); diff --git a/schema-engine/core/src/lib.rs b/schema-engine/core/src/lib.rs index f4288f4305bc..92329a429663 100644 --- a/schema-engine/core/src/lib.rs +++ b/schema-engine/core/src/lib.rs @@ -89,9 +89,7 @@ fn connector_for_connection_string( let connector = MongoDbSchemaConnector::new(params); Ok(Box::new(connector)) } - Some(other) => Err(CoreError::url_parse_error(format!( - "`{other}` is not a known connection URL scheme. Prisma cannot determine the connector." - ))), + Some(_other) => Err(CoreError::url_parse_error("The scheme is not recognized")), None => Err(CoreError::user_facing(InvalidConnectionString { details: String::new(), })),