diff --git a/.buildkite/engineer b/.buildkite/engineer index 0b1adc2d8011..5de99cea5390 100755 --- a/.buildkite/engineer +++ b/.buildkite/engineer @@ -1,5 +1,47 @@ #!/usr/bin/env bash +set -e + +if [[ -z "$2" ]]; then + printf "Error: the name of the pipeline must be provided.\nExample: './engineer pipeline test'" 1>&2 + exit 1 +else + echo "We are in the $2 pipeline." +fi + +# Checks what's the diff with the previous commit +# This is used to detect if the previous commit was empty +GIT_DIFF=$(git diff --name-only HEAD HEAD~1 -- .) + +# Checks what's the diff with the previous commit, +# excluding some paths that do not need a run, +# because they do not affect tests running in Buildkite. +GIT_DIFF_WITH_IGNORED_PATHS=$(git diff --name-only HEAD HEAD~1 -- . ':!.github' ':!query-engine/driver-adapters/js' ':!query-engine/query-engine-wasm' ':!renovate.json' ':!*.md' ':!LICENSE' ':!CODEOWNERS';) + +# $2 is either "test" or "build", depending on the pipeline +# Example: ./.buildkite/engineer pipeline test +# We only want to check for changes and skip in the test pipeline. +if [[ "$2" == "test" ]]; then + # If GIT_DIFF is empty then the previous commit was empty + # We assume it's intended and we continue with the run + # Example use: to get a new engine hash built with identical code + if [ -z "${GIT_DIFF}" ]; then + echo "The previous commit is empty, this run will continue..." + else + # Checking if GIT_DIFF_WITH_IGNORED_PATHS is empty + # If it's empty then it's most likely that there are changes but they are in ignored paths. + # So we do not start Buildkite + if [ -z "${GIT_DIFF_WITH_IGNORED_PATHS}" ]; then + echo "No changes found for the previous commit in paths that are not ignored, this run will now be skipped." + exit 0 + else + # Note that printf works better for displaying line returns in CI + printf "Changes found for the previous commit in paths that are not ignored: \n\n%s\n\nThis run will continue...\n" "${GIT_DIFF_WITH_IGNORED_PATHS}" + fi + fi +fi + +# Check OS if [[ "$OSTYPE" == "linux-gnu" ]]; then OS=linux-amzn elif [[ "$OSTYPE" == "darwin"* ]]; then @@ -12,8 +54,7 @@ fi # Check if the system has engineer installed, if not, use a local copy. if ! type "engineer" &> /dev/null; then # Setup Prisma engine build & test tool (engineer). - set -e - curl --fail -sSL "https://prisma-engineer.s3-eu-west-1.amazonaws.com/1.59/latest/$OS/engineer.gz" --output engineer.gz + curl --fail -sSL "https://prisma-engineer.s3-eu-west-1.amazonaws.com/1.60/latest/$OS/engineer.gz" --output engineer.gz gzip -d engineer.gz chmod +x engineer @@ -22,6 +63,5 @@ if ! type "engineer" &> /dev/null; then rm -rf ./engineer else # Already installed on the system - set -e engineer "$@" fi diff --git a/.envrc b/.envrc index 3bd875aed813..48b1254c1700 100644 --- a/.envrc +++ b/.envrc @@ -22,6 +22,12 @@ export QE_LOG_LEVEL=debug # Set it to "trace" to enable query-graph debugging lo # export PRISMA_RENDER_DOT_FILE=1 # Uncomment to enable rendering a dot file of the Query Graph from an executed query. # export FMT_SQL=1 # Uncomment it to enable logging formatted SQL queries +### Uncomment to run driver adapters tests. See query-engine-driver-adapters.yml workflow for how tests run in CI. +# export EXTERNAL_TEST_EXECUTOR="$(pwd)/query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh" +# export DRIVER_ADAPTER=pg # Set to pg, neon or planetscale +# export PRISMA_DISABLE_QUAINT_EXECUTORS=1 # Disable quaint executors for driver adapters +# export DRIVER_ADAPTER_URL_OVERRIDE ="postgres://USER:PASSWORD@DATABASExxxx" # Override the database url for the driver adapter tests + # Mongo image requires additional wait time on arm arch for some reason. if uname -a | grep -q 'arm64'; then export INIT_WAIT_SEC="10" @@ -36,7 +42,9 @@ fi # Set up env vars and build inputs from flake.nix automatically for nix users. # If you don't use nix, you can safely ignore this. -if command -v nix &> /dev/null +# You can set the DISABLE_NIX environment variable if you're in an environment +# where nix is pre-installed (e.g. gitpod) but you don't want to use it. +if command -v nix &> /dev/null && [ -z ${DISABLE_NIX+x} ] then if nix flake metadata > /dev/null; then if type nix_direnv_watch_file &> /dev/null; then @@ -48,5 +56,6 @@ fi # Source the gitignored .envrc.local if it exists. if test -f .envrc.local; then + watch_file .envrc.local source .envrc.local fi diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml index bc2feb1b7b8d..4dbfa4855fc9 100644 --- a/.github/workflows/benchmark.yml +++ b/.github/workflows/benchmark.yml @@ -5,6 +5,8 @@ on: - main pull_request: paths-ignore: + - '.github/**' + - '!.github/workflows/benchmark.yml' - '.buildkite/**' - '*.md' - 'LICENSE' @@ -17,7 +19,7 @@ concurrency: jobs: benchmark: - name: "Run benchmarks on Linux" + name: 'Run benchmarks on Linux' runs-on: ubuntu-latest steps: diff --git a/.github/workflows/build-wasm.yml b/.github/workflows/build-wasm.yml index 4d715d57afd7..7969cd2dd462 100644 --- a/.github/workflows/build-wasm.yml +++ b/.github/workflows/build-wasm.yml @@ -5,15 +5,17 @@ on: - main pull_request: paths-ignore: - - ".buildkite/**" - - "*.md" - - "LICENSE" - - "CODEOWNERS" - - "renovate.json" + - '.github/**' + - '!.github/workflows/build-wasm.yml' + - '.buildkite/**' + - '*.md' + - 'LICENSE' + - 'CODEOWNERS' + - 'renovate.json' jobs: build: - name: "prisma-schema-wasm build ${{ github.event.ref }} for commit ${{ github.event.inputs.commit }}" + name: 'prisma-schema-wasm build ${{ github.event.ref }} for commit ${{ github.event.inputs.commit }}' runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/compilation.yml b/.github/workflows/compilation.yml index 713fb1a86505..d9f81f47772b 100644 --- a/.github/workflows/compilation.yml +++ b/.github/workflows/compilation.yml @@ -1,7 +1,9 @@ -name: "Release binary compilation test" +name: 'Release binary compilation test' on: pull_request: paths-ignore: + - '.github/**' + - '!.github/workflows/compilation.yml' - '.buildkite/**' - '*.md' - 'LICENSE' @@ -14,7 +16,7 @@ concurrency: jobs: test-crate-compilation: - name: "Compile top level crates on Linux" + name: 'Compile top level crates on Linux' strategy: fail-fast: false runs-on: ubuntu-latest @@ -22,17 +24,17 @@ jobs: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@stable - - run: "cargo clean && cargo build --release -p schema-engine-cli" - name: "Compile Migration Engine" + - run: 'cargo clean && cargo build --release -p schema-engine-cli' + name: 'Compile Migration Engine' - - run: "cargo clean && cargo build --release -p prisma-fmt" - name: "Compile prisma-fmt" + - run: 'cargo clean && cargo build --release -p prisma-fmt' + name: 'Compile prisma-fmt' - - run: "cargo clean && cargo build --release -p query-engine" - name: "Compile Query Engine Binary" + - run: 'cargo clean && cargo build --release -p query-engine' + name: 'Compile Query Engine Binary' - - run: "cargo clean && cargo build --release -p query-engine-node-api" - name: "Compile Query Engine Library" + - run: 'cargo clean && cargo build --release -p query-engine-node-api' + name: 'Compile Query Engine Library' - - name: "Check that Cargo.lock did not change" - run: "git diff --exit-code" + - name: 'Check that Cargo.lock did not change' + run: 'git diff --exit-code' diff --git a/.github/workflows/formatting.yml b/.github/workflows/formatting.yml index 107d842eef6b..50b635544b91 100644 --- a/.github/workflows/formatting.yml +++ b/.github/workflows/formatting.yml @@ -5,6 +5,8 @@ on: - main pull_request: paths-ignore: + - '.github/**' + - '!.github/workflows/formatting.yml' - '.buildkite/**' - '*.md' - 'LICENSE' @@ -19,7 +21,7 @@ jobs: clippy: runs-on: ubuntu-latest env: - RUSTFLAGS: "-Dwarnings" + RUSTFLAGS: '-Dwarnings' steps: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@stable diff --git a/.github/workflows/publish-prisma-schema-wasm.yml b/.github/workflows/publish-prisma-schema-wasm.yml index e166c05e5841..30ecd68a2152 100644 --- a/.github/workflows/publish-prisma-schema-wasm.yml +++ b/.github/workflows/publish-prisma-schema-wasm.yml @@ -1,6 +1,7 @@ name: Build and publish @prisma/prisma-schema-wasm +run-name: npm - release @prisma/prisma-schema-wasm@${{ github.event.inputs.enginesWrapperVersion }} from ${{ github.event.inputs.enginesHash }} on ${{ github.event.inputs.npmDistTag }} -concurrency: build-prisma-schema-wasm +concurrency: publish-prisma-schema-wasm on: # usually triggered via GH Actions Workflow in prisma/engines-wrapper repo @@ -12,7 +13,7 @@ on: required: true npmDistTag: required: true - default: "latest" + default: 'latest' jobs: build: @@ -21,7 +22,7 @@ jobs: steps: - name: Print input env: - THE_INPUT: "${{ toJson(github.event.inputs) }}" + THE_INPUT: '${{ toJson(github.event.inputs) }}' run: | echo $THE_INPUT @@ -30,29 +31,22 @@ jobs: ref: ${{ github.event.inputs.enginesHash }} - uses: cachix/install-nix-action@v23 - # - # Build - # - - - run: nix build .#prisma-schema-wasm - - # - # Publish - # + - name: Build + run: nix build .#prisma-schema-wasm - uses: actions/setup-node@v3 with: - node-version: "14.x" + node-version: '20.x' - - name: Set up NPM token + # This is needed to be done manually because of `PACKAGE_DIR` used later + - name: Set up NPM token for publishing later run: echo "//registry.npmjs.org/:_authToken=${{ secrets.NPM_TOKEN }}" > ~/.npmrc - - run: | + - name: Update version in package.json & Publish @prisma/prisma-schema-wasm + run: | + # Update version in package.json and return directory for later usage PACKAGE_DIR=$( nix run .#renderPrismaSchemaWasmPackage ${{ github.event.inputs.enginesWrapperVersion }}) npm publish "$PACKAGE_DIR" --access public --tag ${{ github.event.inputs.npmDistTag }} - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} - # # Failure handlers # @@ -60,11 +54,10 @@ jobs: - name: Set current job url in SLACK_FOOTER env var if: ${{ failure() }} run: echo "SLACK_FOOTER=<$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID|Click here to go to the job logs>" >> $GITHUB_ENV - - name: Slack Notification on Failure if: ${{ failure() }} uses: rtCamp/action-slack-notify@v2.2.1 env: - SLACK_TITLE: "prisma-schema-wasm publishing failed :x:" - SLACK_COLOR: "#FF0000" + SLACK_TITLE: 'prisma-schema-wasm publishing failed :x:' + SLACK_COLOR: '#FF0000' SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WASM_FAILING }} diff --git a/.github/workflows/quaint.yml b/.github/workflows/quaint.yml index d4a840728272..7b49e80a7bd0 100644 --- a/.github/workflows/quaint.yml +++ b/.github/workflows/quaint.yml @@ -17,14 +17,13 @@ jobs: features: - "--lib --features=all" - "--lib --no-default-features --features=sqlite" - - "--lib --no-default-features --features=sqlite --features=chrono --features=json --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=sqlite --features=pooled" - "--lib --no-default-features --features=postgresql" - - "--lib --no-default-features --features=postgresql --features=chrono --features=json --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=postgresql --features=pooled" - "--lib --no-default-features --features=mysql" - - "--lib --no-default-features --features=mysql --features=chrono --features=json --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=mysql --features=pooled" - "--lib --no-default-features --features=mssql" - - "--lib --no-default-features --features=mssql --features=chrono --features=json --features=uuid --features=pooled --features=serde-support --features=bigdecimal" - - "--doc --features=all" + - "--lib --no-default-features --features=mssql --features=pooled" env: TEST_MYSQL: "mysql://root:prisma@localhost:3306/prisma" TEST_MYSQL8: "mysql://root:prisma@localhost:3307/prisma" diff --git a/.github/workflows/query-engine-black-box.yml b/.github/workflows/query-engine-black-box.yml index 163ee3ad0141..5ebcd79cec4c 100644 --- a/.github/workflows/query-engine-black-box.yml +++ b/.github/workflows/query-engine-black-box.yml @@ -5,6 +5,8 @@ on: - main pull_request: paths-ignore: + - '.github/**' + - '!.github/workflows/query-engine-black-box.yml' - '.buildkite/**' - '*.md' - 'LICENSE' @@ -17,27 +19,27 @@ concurrency: jobs: rust-tests: - name: "Test query-engine as a black-box" + name: 'Test query-engine as a black-box' strategy: fail-fast: false matrix: database: - - name: "postgres15" + - name: 'postgres15' single_threaded: false - connector: "postgres" - version: "15" + connector: 'postgres' + version: '15' env: - LOG_LEVEL: "info" - LOG_QUERIES: "y" - RUST_LOG_FORMAT: "devel" - RUST_BACKTRACE: "1" - CLICOLOR_FORCE: "1" - CLOSED_TX_CLEANUP: "2" - SIMPLE_TEST_MODE: "1" - QUERY_BATCH_SIZE: "10" - TEST_RUNNER: "direct" + LOG_LEVEL: 'info' + LOG_QUERIES: 'y' + RUST_LOG_FORMAT: 'devel' + RUST_BACKTRACE: '1' + CLICOLOR_FORCE: '1' + CLOSED_TX_CLEANUP: '2' + SIMPLE_TEST_MODE: '1' + QUERY_BATCH_SIZE: '10' + TEST_RUNNER: 'direct' TEST_CONNECTOR: ${{ matrix.database.connector }} TEST_CONNECTOR_VERSION: ${{ matrix.database.version }} @@ -46,21 +48,25 @@ jobs: - uses: actions/checkout@v4 - name: Login to Docker Hub - uses: docker/login-action@v2 + uses: docker/login-action@v3 continue-on-error: true + env: + DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} + if: "${{ env.DOCKERHUB_USERNAME != '' && env.DOCKERHUB_TOKEN != '' }}" with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: "Start ${{ matrix.database.name }} (${{ matrix.engine_protocol }})" + - name: 'Start ${{ matrix.database.name }} (${{ matrix.engine_protocol }})' run: make start-${{ matrix.database.name }} - uses: dtolnay/rust-toolchain@stable - - run: export WORKSPACE_ROOT=$(pwd) && cargo build --package query-engine + - run: export WORKSPACE_ROOT=$(pwd) && cargo build --package query-engine env: CLICOLOR_FORCE: 1 - - run: export WORKSPACE_ROOT=$(pwd) && cargo test --package black-box-tests -- --test-threads=1 + - run: export WORKSPACE_ROOT=$(pwd) && cargo test --package black-box-tests -- --test-threads=1 env: CLICOLOR_FORCE: 1 diff --git a/.github/workflows/query-engine-driver-adapters.yml b/.github/workflows/query-engine-driver-adapters.yml new file mode 100644 index 000000000000..f3a3badfb804 --- /dev/null +++ b/.github/workflows/query-engine-driver-adapters.yml @@ -0,0 +1,95 @@ +name: Driver Adapters +on: + push: + branches: + - main + pull_request: + paths-ignore: + - '.github/**' + - '!.github/workflows/query-engine-driver-adapters.yml' + - '.buildkite/**' + - '*.md' + - 'LICENSE' + - 'CODEOWNERS' + - 'renovate.json' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + rust-query-engine-tests: + name: 'Test `${{ matrix.adapter.name }}` on node v${{ matrix.node_version }}' + + strategy: + fail-fast: false + matrix: + adapter: + - name: 'pg' + setup_task: 'dev-pg-postgres13' + - name: 'neon:ws' + setup_task: 'dev-neon-ws-postgres13' + - name: 'libsql' + setup_task: 'dev-libsql-sqlite' + - name: 'planetscale' + setup_task: 'dev-planetscale-vitess8' + node_version: ['18'] + env: + LOG_LEVEL: 'info' # Set to "debug" to trace the query engine and node process running the driver adapter + LOG_QUERIES: 'y' + RUST_LOG: 'info' + RUST_LOG_FORMAT: 'devel' + RUST_BACKTRACE: '1' + CLICOLOR_FORCE: '1' + CLOSED_TX_CLEANUP: '2' + SIMPLE_TEST_MODE: '1' + QUERY_BATCH_SIZE: '10' + WORKSPACE_ROOT: ${{ github.workspace }} + + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} + + - name: 'Setup Node.js' + uses: actions/setup-node@v3 + with: + node-version: ${{ matrix.node_version }} + + - name: 'Setup pnpm' + uses: pnpm/action-setup@v2 + with: + version: 8 + + - name: 'Get pnpm store directory' + shell: bash + run: | + echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV + + - name: 'Login to Docker Hub' + uses: docker/login-action@v3 + continue-on-error: true + env: + DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} + if: "${{ env.DOCKERHUB_USERNAME != '' && env.DOCKERHUB_TOKEN != '' }}" + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Extract Branch Name + id: extract-branch + run: | + branch="$(git show -s --format=%s | grep -o "DRIVER_ADAPTERS_BRANCH=[^ ]*" | cut -f2 -d=)" + if [ -n "$branch" ]; then + echo "Using $branch branch of driver adapters" + echo "DRIVER_ADAPTERS_BRANCH=$branch" >> "$GITHUB_ENV" + fi + + - run: make ${{ matrix.adapter.setup_task }} + + - uses: dtolnay/rust-toolchain@stable + + - name: 'Run tests' + run: cargo test --package query-engine-tests -- --test-threads=1 diff --git a/.github/workflows/query-engine.yml b/.github/workflows/query-engine.yml index 6ed522c94f13..762c3da4a50a 100644 --- a/.github/workflows/query-engine.yml +++ b/.github/workflows/query-engine.yml @@ -5,6 +5,8 @@ on: - main pull_request: paths-ignore: + - '.github/**' + - '!.github/workflows/query-engine.yml' - '.buildkite/**' - '*.md' - 'LICENSE' @@ -17,56 +19,56 @@ concurrency: jobs: rust-query-engine-tests: - name: "Test ${{ matrix.database.name }} (${{ matrix.engine_protocol }}) on Linux" + name: 'Test ${{ matrix.database.name }} (${{ matrix.engine_protocol }}) on Linux' strategy: fail-fast: false matrix: database: - - name: "vitess_5_7" + - name: 'vitess_5_7' single_threaded: true - connector: "vitess" - version: "5.7" - - name: "vitess_8_0" + connector: 'vitess' + version: '5.7' + - name: 'vitess_8_0' single_threaded: true - connector: "vitess" - version: "8.0" - - name: "postgres15" + connector: 'vitess' + version: '8.0' + - name: 'postgres15' single_threaded: true - connector: "postgres" - version: "15" - - name: "mssql_2022" + connector: 'postgres' + version: '15' + - name: 'mssql_2022' single_threaded: false - connector: "sqlserver" - version: "2022" - - name: "mongodb_4_2" + connector: 'sqlserver' + version: '2022' + - name: 'mongodb_4_2' single_threaded: true - connector: "mongodb" - version: "4.2" - - name: "cockroach_23_1" + connector: 'mongodb' + version: '4.2' + - name: 'cockroach_23_1' single_threaded: false - connector: "cockroachdb" - version: "23.1" - - name: "cockroach_22_2" + connector: 'cockroachdb' + version: '23.1' + - name: 'cockroach_22_2' single_threaded: false - connector: "cockroachdb" - version: "22.2" - - name: "cockroach_22_1_0" + connector: 'cockroachdb' + version: '22.2' + - name: 'cockroach_22_1_0' single_threaded: false - connector: "cockroachdb" - version: "22.1" + connector: 'cockroachdb' + version: '22.1' engine_protocol: [graphql, json] env: - LOG_LEVEL: "info" - LOG_QUERIES: "y" - RUST_LOG_FORMAT: "devel" - RUST_BACKTRACE: "1" - CLICOLOR_FORCE: "1" - CLOSED_TX_CLEANUP: "2" - SIMPLE_TEST_MODE: "1" - QUERY_BATCH_SIZE: "10" - TEST_RUNNER: "direct" + LOG_LEVEL: 'info' + LOG_QUERIES: 'y' + RUST_LOG_FORMAT: 'devel' + RUST_BACKTRACE: '1' + CLICOLOR_FORCE: '1' + CLOSED_TX_CLEANUP: '2' + SIMPLE_TEST_MODE: '1' + QUERY_BATCH_SIZE: '10' + TEST_RUNNER: 'direct' TEST_CONNECTOR: ${{ matrix.database.connector }} TEST_CONNECTOR_VERSION: ${{ matrix.database.version }} PRISMA_ENGINE_PROTOCOL: ${{ matrix.engine_protocol }} @@ -76,13 +78,17 @@ jobs: - uses: actions/checkout@v4 - name: Login to Docker Hub - uses: docker/login-action@v2 + uses: docker/login-action@v3 continue-on-error: true + env: + DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} + if: "${{ env.DOCKERHUB_USERNAME != '' && env.DOCKERHUB_TOKEN != '' }}" with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: "Start ${{ matrix.database.name }} (${{ matrix.engine_protocol }})" + - name: 'Start ${{ matrix.database.name }} (${{ matrix.engine_protocol }})' run: make start-${{ matrix.database.name }} - uses: dtolnay/rust-toolchain@stable diff --git a/.github/workflows/schema-engine.yml b/.github/workflows/schema-engine.yml index 20c263dc17d0..03d23317bbd0 100644 --- a/.github/workflows/schema-engine.yml +++ b/.github/workflows/schema-engine.yml @@ -6,6 +6,8 @@ on: pull_request: paths-ignore: # Generic + - '.github/**' + - '!.github/workflows/schema-engine.yml' - '.buildkite/**' - '*.md' - 'LICENSE' @@ -20,30 +22,30 @@ concurrency: jobs: test-mongodb-schema-connector: - name: "Test ${{ matrix.database.name }} on Linux" + name: 'Test ${{ matrix.database.name }} on Linux' strategy: fail-fast: false matrix: database: - - name: "mongodb42" - url: "mongodb://prisma:prisma@localhost:27016/?authSource=admin&retryWrites=true" - - name: "mongodb44" - url: "mongodb://prisma:prisma@localhost:27017/?authSource=admin&retryWrites=true" - - name: "mongodb5" - url: "mongodb://prisma:prisma@localhost:27018/?authSource=admin&retryWrites=true" + - name: 'mongodb42' + url: 'mongodb://prisma:prisma@localhost:27016/?authSource=admin&retryWrites=true' + - name: 'mongodb44' + url: 'mongodb://prisma:prisma@localhost:27017/?authSource=admin&retryWrites=true' + - name: 'mongodb5' + url: 'mongodb://prisma:prisma@localhost:27018/?authSource=admin&retryWrites=true' runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@stable - name: Login to Docker Hub - uses: docker/login-action@v2 + uses: docker/login-action@v3 continue-on-error: true with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: "Start ${{ matrix.database.name }}" + - name: 'Start ${{ matrix.database.name }}' run: make start-${{ matrix.database.name }}-single - run: cargo test -p mongodb-schema-connector @@ -52,54 +54,54 @@ jobs: TEST_DATABASE_URL: ${{ matrix.database.url }} test-linux: - name: "Test ${{ matrix.database.name }} on Linux" + name: 'Test ${{ matrix.database.name }} on Linux' strategy: fail-fast: false matrix: database: - name: mssql_2017 - url: "sqlserver://localhost:1434;database=master;user=SA;password=;trustServerCertificate=true;socket_timeout=60;isolationLevel=READ UNCOMMITTED" + url: 'sqlserver://localhost:1434;database=master;user=SA;password=;trustServerCertificate=true;socket_timeout=60;isolationLevel=READ UNCOMMITTED' - name: mssql_2019 - url: "sqlserver://localhost:1433;database=master;user=SA;password=;trustServerCertificate=true;socket_timeout=60;isolationLevel=READ UNCOMMITTED" + url: 'sqlserver://localhost:1433;database=master;user=SA;password=;trustServerCertificate=true;socket_timeout=60;isolationLevel=READ UNCOMMITTED' - name: mysql_5_6 - url: "mysql://root:prisma@localhost:3309" + url: 'mysql://root:prisma@localhost:3309' - name: mysql_5_7 - url: "mysql://root:prisma@localhost:3306" + url: 'mysql://root:prisma@localhost:3306' - name: mysql_8 - url: "mysql://root:prisma@localhost:3307" + url: 'mysql://root:prisma@localhost:3307' - name: mysql_mariadb - url: "mysql://root:prisma@localhost:3308" + url: 'mysql://root:prisma@localhost:3308' - name: postgres9 - url: "postgresql://postgres:prisma@localhost:5431" + url: 'postgresql://postgres:prisma@localhost:5431' - name: postgres10 - url: "postgresql://postgres:prisma@localhost:5432" + url: 'postgresql://postgres:prisma@localhost:5432' - name: postgres11 - url: "postgresql://postgres:prisma@localhost:5433" + url: 'postgresql://postgres:prisma@localhost:5433' - name: postgres12 - url: "postgresql://postgres:prisma@localhost:5434" + url: 'postgresql://postgres:prisma@localhost:5434' - name: postgres13 - url: "postgresql://postgres:prisma@localhost:5435" + url: 'postgresql://postgres:prisma@localhost:5435' - name: postgres14 - url: "postgresql://postgres:prisma@localhost:5437" + url: 'postgresql://postgres:prisma@localhost:5437' - name: postgres15 - url: "postgresql://postgres:prisma@localhost:5438" + url: 'postgresql://postgres:prisma@localhost:5438' - name: cockroach_23_1 - url: "postgresql://prisma@localhost:26260" + url: 'postgresql://prisma@localhost:26260' - name: cockroach_22_2 - url: "postgresql://prisma@localhost:26259" + url: 'postgresql://prisma@localhost:26259' - name: cockroach_22_1_0 - url: "postgresql://prisma@localhost:26257" + url: 'postgresql://prisma@localhost:26257' - name: sqlite url: sqlite - name: vitess_5_7 - url: "mysql://root:prisma@localhost:33577/test" - shadow_database_url: "mysql://root:prisma@localhost:33578/shadow" + url: 'mysql://root:prisma@localhost:33577/test' + shadow_database_url: 'mysql://root:prisma@localhost:33578/shadow' is_vitess: true single_threaded: true - name: vitess_8_0 - url: "mysql://root:prisma@localhost:33807/test" - shadow_database_url: "mysql://root:prisma@localhost:33808/shadow" + url: 'mysql://root:prisma@localhost:33807/test' + shadow_database_url: 'mysql://root:prisma@localhost:33808/shadow' is_vitess: true single_threaded: true @@ -109,13 +111,17 @@ jobs: - uses: dtolnay/rust-toolchain@stable - name: Login to Docker Hub - uses: docker/login-action@v2 + uses: docker/login-action@v3 continue-on-error: true + env: + DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} + if: "${{ env.DOCKERHUB_USERNAME != '' && env.DOCKERHUB_TOKEN != '' }}" with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: "Start ${{ matrix.database.name }}" + - name: 'Start ${{ matrix.database.name }}' run: make start-${{ matrix.database.name }} - run: cargo test -p sql-introspection-tests @@ -196,9 +202,9 @@ jobs: matrix: db: - name: mysql - url: "mysql://root@localhost:3306?connect_timeout=20&socket_timeout=60" + url: 'mysql://root@localhost:3306?connect_timeout=20&socket_timeout=60' - name: mariadb - url: "mysql://root@localhost:3306?connect_timeout=20&socket_timeout=60" + url: 'mysql://root@localhost:3306?connect_timeout=20&socket_timeout=60' rust: - stable os: @@ -206,7 +212,7 @@ jobs: runs-on: ${{ matrix.os }} - name: "Test ${{ matrix.db.name }} on Windows" + name: 'Test ${{ matrix.db.name }} on Windows' steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index 96c681075d1d..b852499205e9 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -5,6 +5,8 @@ on: - main pull_request: paths-ignore: + - '.github/**' + - '!.github/workflows/unit-tests.yml' - '.buildkite/**' - '*.md' - 'LICENSE' @@ -28,15 +30,16 @@ jobs: - uses: dtolnay/rust-toolchain@stable - run: | - cargo test --workspace \ - --exclude=query-engine \ - --exclude=query-engine-node-api \ - --exclude=black-box-tests \ - --exclude=query-engine-tests \ - --exclude=sql-migration-tests \ - --exclude=schema-engine-cli \ - --exclude=sql-schema-describer \ - --exclude=sql-introspection-tests \ - --exclude=mongodb-schema-connector + cargo test --workspace \ + --exclude=quaint \ + --exclude=query-engine \ + --exclude=query-engine-node-api \ + --exclude=black-box-tests \ + --exclude=query-engine-tests \ + --exclude=sql-migration-tests \ + --exclude=schema-engine-cli \ + --exclude=sql-schema-describer \ + --exclude=sql-introspection-tests \ + --exclude=mongodb-schema-connector env: CLICOLOR_FORCE: 1 diff --git a/.gitignore b/.gitignore index 43e03e31867d..75c06e9ce68b 100644 --- a/.gitignore +++ b/.gitignore @@ -46,3 +46,7 @@ dmmf.json graph.dot prisma-schema-wasm/nodejs + +# Ignore pnpm-lock.yaml +query-engine/driver-adapters/pnpm-lock.yaml +package-lock.json diff --git a/Cargo.lock b/Cargo.lock index c9fb7e1e1498..35eff530999a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -321,8 +321,9 @@ version = "0.1.0" dependencies = [ "anyhow", "enumflags2", - "indoc", + "indoc 2.0.3", "insta", + "query-engine-metrics", "query-engine-tests", "query-tests-setup", "reqwest", @@ -432,7 +433,7 @@ dependencies = [ "connection-string", "either", "enumflags2", - "indoc", + "indoc 2.0.3", "lsp-types", "once_cell", "psl-core", @@ -671,6 +672,16 @@ dependencies = [ "windows-sys 0.45.0", ] +[[package]] +name = "console_error_panic_hook" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06aeb73f470f66dcdbf7223caeebb85984942f22f1adb2a088cf9668146bbbc" +dependencies = [ + "cfg-if", + "wasm-bindgen", +] + [[package]] name = "convert_case" version = "0.4.0" @@ -859,29 +870,26 @@ dependencies = [ [[package]] name = "cuid" version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51294db11d38eb763c92936c5c88425d0090e27dce21dd15748134af9e53e739" +source = "git+https://github.com/prisma/cuid-rust?branch=wasm32-support#81309f9a11f70d178bb545971d51ceb7da692c52" dependencies = [ "base36", "cuid-util", "cuid2", - "hostname", "num", "once_cell", "rand 0.8.5", + "sha3", ] [[package]] name = "cuid-util" version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ea2bfe0336ff1b7ca74819b2df8dfae9afea358aff6b1688baa5c181d8c3713" +source = "git+https://github.com/prisma/cuid-rust?branch=wasm32-support#81309f9a11f70d178bb545971d51ceb7da692c52" [[package]] name = "cuid2" version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47d99cacd52fd67db7490ad051c8c1973fb75520174d69aabbae08c534c9d0e8" +source = "git+https://github.com/prisma/cuid-rust?branch=wasm32-support#81309f9a11f70d178bb545971d51ceb7da692c52" dependencies = [ "cuid-util", "num", @@ -889,14 +897,38 @@ dependencies = [ "sha3", ] +[[package]] +name = "darling" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d706e75d87e35569db781a9b5e2416cff1236a47ed380831f959382ccd5f858" +dependencies = [ + "darling_core 0.10.2", + "darling_macro 0.10.2", +] + [[package]] name = "darling" version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a01d95850c592940db9b8194bc39f4bc0e89dee5c4265e4b1807c34a9aba453c" dependencies = [ - "darling_core", - "darling_macro", + "darling_core 0.13.4", + "darling_macro 0.13.4", +] + +[[package]] +name = "darling_core" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0c960ae2da4de88a91b2d920c2a7233b400bc33cb28453a2987822d8392519b" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim 0.9.3", + "syn 1.0.109", ] [[package]] @@ -913,13 +945,24 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "darling_macro" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b5a2f4ac4969822c62224815d069952656cadc7084fdca9751e6d959189b72" +dependencies = [ + "darling_core 0.10.2", + "quote", + "syn 1.0.109", +] + [[package]] name = "darling_macro" version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c972679f83bdf9c42bd905396b6c3588a843a17f0f16dfcfa3e2c5d57441835" dependencies = [ - "darling_core", + "darling_core 0.13.4", "quote", "syn 1.0.109", ] @@ -949,7 +992,7 @@ version = "0.1.0" dependencies = [ "base64 0.13.1", "expect-test", - "indoc", + "indoc 2.0.3", "once_cell", "psl", "regex", @@ -990,7 +1033,7 @@ name = "diagnostics" version = "0.1.0" dependencies = [ "colored", - "indoc", + "indoc 2.0.3", "pest", ] @@ -1035,7 +1078,7 @@ dependencies = [ "expect-test", "flate2", "indexmap 1.9.3", - "indoc", + "indoc 2.0.3", "itertools", "pretty_assertions", "prisma-models", @@ -1067,6 +1110,7 @@ dependencies = [ "tokio", "tracing", "tracing-core", + "uuid", ] [[package]] @@ -1375,6 +1419,12 @@ dependencies = [ "syn 2.0.28", ] +[[package]] +name = "fuchsia-cprng" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" + [[package]] name = "funty" version = "2.0.0" @@ -1504,8 +1554,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" dependencies = [ "cfg-if", + "js-sys", "libc", "wasi 0.11.0+wasi-snapshot-preview1", + "wasm-bindgen", ] [[package]] @@ -1520,6 +1572,19 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" +[[package]] +name = "gloo-utils" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "037fcb07216cb3a30f7292bd0176b050b7b9a052ba830ef7d5d65f6dc64ba58e" +dependencies = [ + "js-sys", + "serde", + "serde_json", + "wasm-bindgen", + "web-sys", +] + [[package]] name = "graphql-parser" version = "0.3.0" @@ -1820,12 +1885,35 @@ dependencies = [ "hashbrown 0.14.0", ] +[[package]] +name = "indoc" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47741a8bc60fb26eb8d6e0238bbb26d8575ff623fdc97b1a2c00c050b9684ed8" +dependencies = [ + "indoc-impl", + "proc-macro-hack", +] + [[package]] name = "indoc" version = "2.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2c785eefb63ebd0e33416dfcb8d6da0bf27ce752843a45632a67bf10d4d4b5c4" +[[package]] +name = "indoc-impl" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce046d161f000fffde5f432a0d034d0341dc152643b2598ed5bfce44c4f3a8f0" +dependencies = [ + "proc-macro-hack", + "proc-macro2", + "quote", + "syn 1.0.109", + "unindent", +] + [[package]] name = "insta" version = "1.21.2" @@ -2308,9 +2396,9 @@ dependencies = [ [[package]] name = "mobc" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc79c4a77e312fee9c7bd4b957c12ad1196db73c4a81e5c0b13f02083c4f7f2f" +checksum = "0bdeff49b387edef305eccfe166af3e1483bb57902dbf369dddc42dc824df23b" dependencies = [ "async-trait", "futures-channel", @@ -2424,11 +2512,11 @@ dependencies = [ "enumflags2", "expect-test", "futures", - "indoc", + "indoc 2.0.3", "mongodb", "mongodb-client", "mongodb-schema-describer", - "names", + "names 0.12.0", "once_cell", "psl", "regex", @@ -2525,6 +2613,15 @@ dependencies = [ "uuid", ] +[[package]] +name = "names" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef320dab323286b50fb5cdda23f61c796a72a89998ab565ca32525c5c556f2da" +dependencies = [ + "rand 0.3.23", +] + [[package]] name = "names" version = "0.12.0" @@ -2970,6 +3067,12 @@ dependencies = [ "schema-ast", ] +[[package]] +name = "paste" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" + [[package]] name = "pbkdf2" version = "0.11.0" @@ -3151,7 +3254,7 @@ dependencies = [ [[package]] name = "postgres-native-tls" version = "0.5.0" -source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#429e76047f28e64761ad63bc6cc9335c3d3337b5" +source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#a1a2dc6d9584deaf70a14293c428e7b6ca614d98" dependencies = [ "native-tls", "tokio", @@ -3162,7 +3265,7 @@ dependencies = [ [[package]] name = "postgres-protocol" version = "0.6.4" -source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#429e76047f28e64761ad63bc6cc9335c3d3337b5" +source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#a1a2dc6d9584deaf70a14293c428e7b6ca614d98" dependencies = [ "base64 0.13.1", "byteorder", @@ -3179,7 +3282,7 @@ dependencies = [ [[package]] name = "postgres-types" version = "0.2.4" -source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#429e76047f28e64761ad63bc6cc9335c3d3337b5" +source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#a1a2dc6d9584deaf70a14293c428e7b6ca614d98" dependencies = [ "bit-vec", "bytes", @@ -3232,7 +3335,7 @@ dependencies = [ "dmmf", "enumflags2", "expect-test", - "indoc", + "indoc 2.0.3", "log", "lsp-types", "once_cell", @@ -3249,6 +3352,7 @@ dependencies = [ "bigdecimal", "chrono", "cuid", + "getrandom 0.2.10", "itertools", "nanoid", "prisma-value", @@ -3313,6 +3417,12 @@ dependencies = [ "version_check", ] +[[package]] +name = "proc-macro-hack" +version = "0.5.20+deprecated" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" + [[package]] name = "proc-macro2" version = "1.0.66" @@ -3384,7 +3494,7 @@ dependencies = [ "dissimilar", "either", "expect-test", - "indoc", + "indoc 2.0.3", "psl-core", ] @@ -3396,7 +3506,7 @@ dependencies = [ "chrono", "diagnostics", "enumflags2", - "indoc", + "indoc 2.0.3", "itertools", "lsp-types", "once_cell", @@ -3461,16 +3571,23 @@ dependencies = [ "either", "futures", "hex", + "indoc 0.3.6", "lru-cache", "metrics 0.18.1", "mobc", "mysql_async", + "names 0.11.0", "native-tls", "num_cpus", + "once_cell", + "paste", "percent-encoding", "postgres-native-tls", "postgres-types", + "quaint-test-macros", + "quaint-test-setup", "rusqlite", + "serde", "serde_json", "sqlformat", "thiserror", @@ -3484,6 +3601,30 @@ dependencies = [ "uuid", ] +[[package]] +name = "quaint-test-macros" +version = "0.1.0" +dependencies = [ + "darling 0.10.2", + "once_cell", + "proc-macro2", + "quaint-test-setup", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "quaint-test-setup" +version = "0.1.0" +dependencies = [ + "async-trait", + "bitflags 1.3.2", + "names 0.11.0", + "once_cell", + "quaint", + "tokio", +] + [[package]] name = "quanta" version = "0.9.3" @@ -3565,7 +3706,7 @@ dependencies = [ "enumflags2", "graphql-parser", "hyper", - "indoc", + "indoc 2.0.3", "mongodb-query-connector", "opentelemetry", "opentelemetry-otlp", @@ -3650,7 +3791,7 @@ dependencies = [ "colored", "enumflags2", "futures", - "indoc", + "indoc 2.0.3", "insta", "once_cell", "prisma-value", @@ -3666,11 +3807,40 @@ dependencies = [ "uuid", ] +[[package]] +name = "query-engine-wasm" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "connection-string", + "console_error_panic_hook", + "futures", + "js-sys", + "log", + "prisma-models", + "psl", + "serde", + "serde-wasm-bindgen", + "serde_json", + "thiserror", + "tokio", + "tracing", + "tracing-futures", + "tracing-subscriber", + "tsify", + "url", + "user-facing-errors", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-logger", +] + [[package]] name = "query-test-macros" version = "0.1.0" dependencies = [ - "darling", + "darling 0.13.4", "proc-macro2", "quote", "syn 1.0.109", @@ -3685,8 +3855,9 @@ dependencies = [ "enumflags2", "hyper", "indexmap 1.9.3", - "indoc", + "indoc 2.0.3", "itertools", + "jsonrpc-core", "nom", "once_cell", "parse-hyperlinks", @@ -3701,6 +3872,7 @@ dependencies = [ "request-handlers", "serde", "serde_json", + "sql-query-connector", "strip-ansi-escapes", "thiserror", "tokio", @@ -3742,6 +3914,29 @@ dependencies = [ "nibble_vec", ] +[[package]] +name = "rand" +version = "0.3.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64ac302d8f83c0c1974bf758f6b041c6c8ada916fbb44a609158ca8b064cc76c" +dependencies = [ + "libc", + "rand 0.4.6", +] + +[[package]] +name = "rand" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" +dependencies = [ + "fuchsia-cprng", + "libc", + "rand_core 0.3.1", + "rdrand", + "winapi", +] + [[package]] name = "rand" version = "0.7.3" @@ -3786,6 +3981,21 @@ dependencies = [ "rand_core 0.6.4", ] +[[package]] +name = "rand_core" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" +dependencies = [ + "rand_core 0.4.2", +] + +[[package]] +name = "rand_core" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" + [[package]] name = "rand_core" version = "0.5.1" @@ -3844,6 +4054,15 @@ dependencies = [ "num_cpus", ] +[[package]] +name = "rdrand" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" +dependencies = [ + "rand_core 0.3.1", +] + [[package]] name = "redox_syscall" version = "0.2.16" @@ -4262,7 +4481,7 @@ dependencies = [ "base64 0.13.1", "connection-string", "expect-test", - "indoc", + "indoc 2.0.3", "jsonrpc-core", "quaint", "schema-connector", @@ -4366,6 +4585,17 @@ dependencies = [ "serde_derive", ] +[[package]] +name = "serde-wasm-bindgen" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3b143e2833c57ab9ad3ea280d21fd34e285a42837aeb0ee301f4f41890fa00e" +dependencies = [ + "js-sys", + "serde", + "wasm-bindgen", +] + [[package]] name = "serde_bytes" version = "0.11.12" @@ -4386,6 +4616,17 @@ dependencies = [ "syn 2.0.28", ] +[[package]] +name = "serde_derive_internals" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e578a843d40b4189a4d66bba51d7684f57da5bd7c304c64e14bd63efbef49509" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.28", +] + [[package]] name = "serde_json" version = "1.0.104" @@ -4437,7 +4678,7 @@ version = "1.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e182d6ec6f05393cc0e5ed1bf81ad6db3a8feedf8ee515ecdd369809bcce8082" dependencies = [ - "darling", + "darling 0.13.4", "proc-macro2", "quote", "syn 1.0.109", @@ -4620,7 +4861,7 @@ checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" name = "sql-ddl" version = "0.1.0" dependencies = [ - "indoc", + "indoc 2.0.3", ] [[package]] @@ -4631,7 +4872,7 @@ dependencies = [ "connection-string", "enumflags2", "expect-test", - "indoc", + "indoc 2.0.3", "pretty_assertions", "psl", "quaint", @@ -4657,7 +4898,7 @@ dependencies = [ "connection-string", "enumflags2", "expect-test", - "indoc", + "indoc 2.0.3", "jsonrpc-core", "once_cell", "pretty_assertions", @@ -4718,7 +4959,7 @@ dependencies = [ "datamodel-renderer", "either", "enumflags2", - "indoc", + "indoc 2.0.3", "once_cell", "prisma-value", "psl", @@ -4749,7 +4990,7 @@ dependencies = [ "enumflags2", "expect-test", "indexmap 1.9.3", - "indoc", + "indoc 2.0.3", "once_cell", "pretty_assertions", "prisma-value", @@ -4816,6 +5057,12 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" +[[package]] +name = "strsim" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6446ced80d6c486436db5c078dde11a9f73d42b57fb273121e160b84f63d894c" + [[package]] name = "strsim" version = "0.10.0" @@ -5159,7 +5406,7 @@ dependencies = [ [[package]] name = "tokio-postgres" version = "0.7.7" -source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#429e76047f28e64761ad63bc6cc9335c3d3337b5" +source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#a1a2dc6d9584deaf70a14293c428e7b6ca614d98" dependencies = [ "async-trait", "byteorder", @@ -5490,6 +5737,31 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" +[[package]] +name = "tsify" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6b26cf145f2f3b9ff84e182c448eaf05468e247f148cf3d2a7d67d78ff023a0" +dependencies = [ + "gloo-utils", + "serde", + "serde_json", + "tsify-macros", + "wasm-bindgen", +] + +[[package]] +name = "tsify-macros" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a94b0f0954b3e59bfc2c246b4c8574390d94a4ad4ad246aaf2fb07d7dfd3b47" +dependencies = [ + "proc-macro2", + "quote", + "serde_derive_internals", + "syn 2.0.28", +] + [[package]] name = "twox-hash" version = "1.6.3" @@ -5569,6 +5841,12 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" +[[package]] +name = "unindent" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1766d682d402817b5ac4490b3c3002d91dfa0d22812f341609f97b08757359c" + [[package]] name = "unreachable" version = "1.0.0" @@ -5610,7 +5888,7 @@ name = "user-facing-errors" version = "0.1.0" dependencies = [ "backtrace", - "indoc", + "indoc 2.0.3", "itertools", "quaint", "serde", @@ -5731,9 +6009,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.84" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31f8dcbc21f30d9b8f2ea926ecb58f6b91192c17e9d33594b3df58b2007ca53b" +checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" dependencies = [ "cfg-if", "wasm-bindgen-macro", @@ -5741,16 +6019,16 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.84" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95ce90fd5bcc06af55a641a86428ee4229e44e07033963a2290a8e241607ccb9" +checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.28", "wasm-bindgen-shared", ] @@ -5768,9 +6046,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.84" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c21f77c0bedc37fd5dc21f897894a5ca01e7bb159884559461862ae90c0b4c5" +checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -5778,22 +6056,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.84" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2aff81306fcac3c7515ad4e177f521b5c9a15f2b08f4e32d823066102f35a5f6" +checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.28", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.84" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0046fef7e28c3804e5e38bfa31ea2a0f73905319b677e57ebe37e49358989b5d" +checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" [[package]] name = "wasm-logger" diff --git a/Cargo.toml b/Cargo.toml index 02e1f7373d04..4a3cd1450caf 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -24,48 +24,50 @@ members = [ "query-engine/prisma-models", "query-engine/query-engine", "query-engine/query-engine-node-api", + "query-engine/query-engine-wasm", "query-engine/request-handlers", "query-engine/schema", "libs/*", "prisma-fmt", "prisma-schema-wasm", "psl/*", -] - -# All path dependencies residing in the workspace directory automatically become members. -# The following shouldn't be considered members but embedded dependencies. -exclude = [ "quaint", - "quaint/test-macros", - "quaint/test-setup" ] [workspace.dependencies] psl = { path = "./psl/psl" } serde_json = { version = "1", features = ["float_roundtrip", "preserve_order"] } serde = { version = "1", features = ["derive"] } -tokio = { version = "1.25", features = ["rt-multi-thread", "macros", "sync", "io-std", "io-util", "parking_lot", "time"] } +tokio = { version = "1.25", features = [ + "rt-multi-thread", + "macros", + "sync", + "io-std", + "io-util", + "parking_lot", + "time", +] } user-facing-errors = { path = "./libs/user-facing-errors" } uuid = { version = "1", features = ["serde"] } indoc = "2.0.1" connection-string = "0.2" -napi = { version = "2.12.4", default-features = false, features = ["napi8", "tokio_rt", "serde-json"] } +napi = { version = "2.12.4", default-features = false, features = [ + "napi8", + "tokio_rt", + "serde-json", +] } napi-derive = "2.12.4" [workspace.dependencies.quaint] path = "quaint" features = [ - "bigdecimal", - "chrono", "expose-drivers", "fmt-sql", - "json", "mssql", "mysql", "pooled", "postgresql", "sqlite", - "uuid", ] [profile.dev.package.backtrace] @@ -80,7 +82,7 @@ strip = "symbols" [profile.release] lto = "fat" codegen-units = 1 -opt-level = 's' # Optimize for size. +opt-level = 's' # Optimize for size. [profile.profiling] inherits = "release" diff --git a/Makefile b/Makefile index 4645b32328e6..a30a32ca1871 100644 --- a/Makefile +++ b/Makefile @@ -2,6 +2,7 @@ CONFIG_PATH = ./query-engine/connector-test-kit-rs/test-configs CONFIG_FILE = .test_config SCHEMA_EXAMPLES_PATH = ./query-engine/example_schemas DEV_SCHEMA_FILE = dev_datamodel.prisma +DRIVER_ADAPTERS_BRANCH ?= main LIBRARY_EXT := $(shell \ case "$$(uname -s)" in \ @@ -44,7 +45,13 @@ release: ################# test-qe: +ifndef DRIVER_ADAPTER cargo test --package query-engine-tests +else + @echo "Executing query engine tests with $(DRIVER_ADAPTER) driver adapter"; \ + # Add your actual command for the "test-driver-adapter" task here + $(MAKE) test-driver-adapter-$(DRIVER_ADAPTER); +endif test-qe-verbose: cargo test --package query-engine-tests -- --nocapture @@ -67,7 +74,7 @@ test-qe-black-box: build-qe ########################### all-dbs-up: - docker compose -f docker-compose.yml up -d --remove-orphans + docker compose -f docker-compose.yml up --wait -d --remove-orphans all-dbs-down: docker compose -f docker-compose.yml down -v --remove-orphans @@ -77,145 +84,171 @@ start-sqlite: dev-sqlite: cp $(CONFIG_PATH)/sqlite $(CONFIG_FILE) +dev-libsql-sqlite: build-qe-napi build-connector-kit-js + cp $(CONFIG_PATH)/libsql-sqlite $(CONFIG_FILE) + +test-libsql-sqlite: dev-libsql-sqlite test-qe-st + +test-driver-adapter-libsql: test-libsql-sqlite + start-postgres9: - docker compose -f docker-compose.yml up -d --remove-orphans postgres9 + docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres9 dev-postgres9: start-postgres9 cp $(CONFIG_PATH)/postgres9 $(CONFIG_FILE) start-postgres10: - docker compose -f docker-compose.yml up -d --remove-orphans postgres10 + docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres10 dev-postgres10: start-postgres10 cp $(CONFIG_PATH)/postgres10 $(CONFIG_FILE) start-postgres11: - docker compose -f docker-compose.yml up -d --remove-orphans postgres11 + docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres11 dev-postgres11: start-postgres11 cp $(CONFIG_PATH)/postgres11 $(CONFIG_FILE) start-postgres12: - docker compose -f docker-compose.yml up -d --remove-orphans postgres12 + docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres12 dev-postgres12: start-postgres12 cp $(CONFIG_PATH)/postgres12 $(CONFIG_FILE) start-postgres13: - docker compose -f docker-compose.yml up -d --remove-orphans postgres13 + docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres13 dev-postgres13: start-postgres13 cp $(CONFIG_PATH)/postgres13 $(CONFIG_FILE) +start-pg-postgres13: build-qe-napi build-connector-kit-js start-postgres13 + +dev-pg-postgres13: start-pg-postgres13 + cp $(CONFIG_PATH)/pg-postgres13 $(CONFIG_FILE) + +test-pg-postgres13: dev-pg-postgres13 test-qe-st + +test-driver-adapter-pg: test-pg-postgres13 + +start-neon-postgres13: build-qe-napi build-connector-kit-js + docker compose -f docker-compose.yml up --wait -d --remove-orphans neon-postgres13 + +dev-neon-ws-postgres13: start-neon-postgres13 + cp $(CONFIG_PATH)/neon-ws-postgres13 $(CONFIG_FILE) + +test-neon-ws-postgres13: dev-neon-ws-postgres13 test-qe-st + +test-driver-adapter-neon: test-neon-ws-postgres13 + start-postgres14: - docker compose -f docker-compose.yml up -d --remove-orphans postgres14 + docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres14 dev-postgres14: start-postgres14 cp $(CONFIG_PATH)/postgres14 $(CONFIG_FILE) start-postgres15: - docker compose -f docker-compose.yml up -d --remove-orphans postgres15 + docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres15 dev-postgres15: start-postgres15 cp $(CONFIG_PATH)/postgres15 $(CONFIG_FILE) start-cockroach_23_1: - docker compose -f docker-compose.yml up -d --remove-orphans cockroach_23_1 + docker compose -f docker-compose.yml up --wait -d --remove-orphans cockroach_23_1 dev-cockroach_23_1: start-cockroach_23_1 cp $(CONFIG_PATH)/cockroach_23_1 $(CONFIG_FILE) start-cockroach_22_2: - docker compose -f docker-compose.yml up -d --remove-orphans cockroach_22_2 + docker compose -f docker-compose.yml up --wait -d --remove-orphans cockroach_22_2 dev-cockroach_22_2: start-cockroach_22_2 cp $(CONFIG_PATH)/cockroach_22_2 $(CONFIG_FILE) start-cockroach_22_1_0: - docker compose -f docker-compose.yml up -d --remove-orphans cockroach_22_1_0 + docker compose -f docker-compose.yml up --wait -d --remove-orphans cockroach_22_1_0 dev-cockroach_22_1_0: start-cockroach_22_1_0 cp $(CONFIG_PATH)/cockroach_22_1 $(CONFIG_FILE) start-cockroach_21_2_0_patched: - docker compose -f docker-compose.yml up -d --remove-orphans cockroach_21_2_0_patched + docker compose -f docker-compose.yml up --wait -d --remove-orphans cockroach_21_2_0_patched dev-cockroach_21_2_0_patched: start-cockroach_21_2_0_patched cp $(CONFIG_PATH)/cockroach_21_2_0_patched $(CONFIG_FILE) dev-pgbouncer: - docker compose -f docker-compose.yml up -d --remove-orphans pgbouncer postgres11 + docker compose -f docker-compose.yml up --wait -d --remove-orphans pgbouncer postgres11 start-mysql_5_7: - docker compose -f docker-compose.yml up -d --remove-orphans mysql-5-7 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mysql-5-7 dev-mysql: start-mysql_5_7 cp $(CONFIG_PATH)/mysql57 $(CONFIG_FILE) start-mysql_5_6: - docker compose -f docker-compose.yml up -d --remove-orphans mysql-5-6 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mysql-5-6 dev-mysql_5_6: start-mysql_5_6 cp $(CONFIG_PATH)/mysql56 $(CONFIG_FILE) start-mysql_8: - docker compose -f docker-compose.yml up -d --remove-orphans mysql-8-0 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mysql-8-0 dev-mysql8: start-mysql_8 cp $(CONFIG_PATH)/mysql8 $(CONFIG_FILE) start-mysql_mariadb: - docker compose -f docker-compose.yml up -d --remove-orphans mariadb-10-0 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mariadb-10-0 dev-mariadb: start-mysql_mariadb cp $(CONFIG_PATH)/mariadb $(CONFIG_FILE) start-mssql_2019: - docker compose -f docker-compose.yml up -d --remove-orphans mssql-2019 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mssql-2019 dev-mssql2019: start-mssql_2019 cp $(CONFIG_PATH)/sqlserver2019 $(CONFIG_FILE) start-mssql_2022: - docker compose -f docker-compose.yml up -d --remove-orphans mssql-2022 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mssql-2022 dev-mssql2022: start-mssql_2022 cp $(CONFIG_PATH)/sqlserver2022 $(CONFIG_FILE) start-mssql_edge: - docker compose -f docker-compose.yml up -d --remove-orphans azure-edge + docker compose -f docker-compose.yml up --wait -d --remove-orphans azure-edge dev-mssql_edge: start-mssql_edge cp $(CONFIG_PATH)/sqlserver2019 $(CONFIG_FILE) start-mssql_2017: - docker compose -f docker-compose.yml up -d --remove-orphans mssql-2017 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mssql-2017 dev-mssql2017: start-mssql_2017 cp $(CONFIG_PATH)/sqlserver2017 $(CONFIG_FILE) start-mongodb42-single: - docker compose -f docker-compose.yml up -d --remove-orphans mongo42-single + docker compose -f docker-compose.yml up --wait -d --remove-orphans mongo42-single start-mongodb44-single: - docker compose -f docker-compose.yml up -d --remove-orphans mongo44-single + docker compose -f docker-compose.yml up --wait -d --remove-orphans mongo44-single start-mongodb4-single: start-mongodb44-single start-mongodb5-single: - docker compose -f docker-compose.yml up -d --remove-orphans mongo5-single + docker compose -f docker-compose.yml up --wait -d --remove-orphans mongo5-single start-mongodb_4_2: - docker compose -f docker-compose.yml up -d --remove-orphans mongo42 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mongo42 start-mongodb_4_4: - docker compose -f docker-compose.yml up -d --remove-orphans mongo44 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mongo44 dev-mongodb_4_4: start-mongodb_4_4 cp $(CONFIG_PATH)/mongodb44 $(CONFIG_FILE) start-mongodb_5: - docker compose -f docker-compose.yml up -d --remove-orphans mongo5 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mongo5 dev-mongodb_5: start-mongodb_5 cp $(CONFIG_PATH)/mongodb5 $(CONFIG_FILE) @@ -224,21 +257,54 @@ dev-mongodb_4_2: start-mongodb_4_2 cp $(CONFIG_PATH)/mongodb42 $(CONFIG_FILE) start-vitess_5_7: - docker compose -f docker-compose.yml up -d --remove-orphans vitess-test-5_7 vitess-shadow-5_7 + docker compose -f docker-compose.yml up --wait -d --remove-orphans vitess-test-5_7 vitess-shadow-5_7 dev-vitess_5_7: start-vitess_5_7 cp $(CONFIG_PATH)/vitess_5_7 $(CONFIG_FILE) start-vitess_8_0: - docker compose -f docker-compose.yml up -d --remove-orphans vitess-test-8_0 vitess-shadow-8_0 + docker compose -f docker-compose.yml up --wait -d --remove-orphans vitess-test-8_0 vitess-shadow-8_0 dev-vitess_8_0: start-vitess_8_0 cp $(CONFIG_PATH)/vitess_8_0 $(CONFIG_FILE) +start-planetscale-vitess8: build-qe-napi build-connector-kit-js + docker compose -f docker-compose.yml up -d --remove-orphans planetscale-vitess8 + +dev-planetscale-vitess8: start-planetscale-vitess8 + cp $(CONFIG_PATH)/planetscale-vitess8 $(CONFIG_FILE) + +test-planetscale-vitess8: dev-planetscale-vitess8 test-qe-st + +test-driver-adapter-planetscale: test-planetscale-vitess8 + ###################### # Local dev commands # ###################### +build-qe-napi: + cargo build --package query-engine-node-api + +build-connector-kit-js: build-driver-adapters + cd query-engine/driver-adapters && pnpm i && pnpm build + +build-driver-adapters: ensure-prisma-present + @echo "Building driver adapters..." + @cd ../prisma && pnpm --filter "*adapter*" i + @echo "Driver adapters build completed."; + +ensure-prisma-present: + @if [ -d ../prisma ]; then \ + cd "$(realpath ../prisma)" && git fetch origin main; \ + LOCAL_CHANGES=$$(git diff --name-only HEAD origin/main -- 'packages/*adapter*'); \ + if [ -n "$$LOCAL_CHANGES" ]; then \ + echo "⚠️ ../prisma diverges from prisma/prisma main branch. Test results might diverge from those in CI ⚠️ "; \ + fi \ + else \ + echo "git clone --depth=1 https://github.com/prisma/prisma.git --branch=$(DRIVER_ADAPTERS_BRANCH) ../prisma"; \ + git clone --depth=1 https://github.com/prisma/prisma.git --branch=$(DRIVER_ADAPTERS_BRANCH) "../prisma" && echo "Prisma repository has been cloned to ../prisma"; \ + fi; + # Quick schema validation of whatever you have in the dev_datamodel.prisma file. validate: cargo run --bin test-cli -- validate-datamodel dev_datamodel.prisma @@ -273,7 +339,7 @@ use-local-query-engine: cp target/release/query-engine $(PRISMA2_BINARY_PATH)/query-engine-darwin show-metrics: - docker compose -f docker-compose.yml up -d --remove-orphans grafana prometheus + docker compose -f docker-compose.yml up --wait -d --remove-orphans grafana prometheus ## OpenTelemetry otel: diff --git a/README.md b/README.md index 6fd072072757..49c7c1a8ab39 100644 --- a/README.md +++ b/README.md @@ -203,6 +203,7 @@ integration tests. - Alternatively: Load the defined environment in `./.envrc` manually in your shell. **Setup:** + There are helper `make` commands to set up a test environment for a specific database connector you want to test. The commands set up a container (if needed) and write the `.test_config` file, which is picked up by the integration @@ -234,6 +235,31 @@ Other variables may or may not be useful. Run `cargo test` in the repository root. +### Testing driver adapters + +Please refer to the [Testing driver adapters](./query-engine/connector-test-kit-rs/README.md#testing-driver-adapters) section in the connector-test-kit-rs README. + +**ℹ️ Important note on developing features that require changes to the both the query engine, and driver adapters code** + +As explained in [Testing driver adapters](./query-engine/connector-test-kit-rs/README.md#testing-driver-adapters), running `DRIVER_ADAPTER=$adapter make qe-test` +will ensure you have prisma checked out in your filesystem in the same directory as prisma-engines. This is needed because the driver adapters code is symlinked in prisma-engines. + +When working on a feature or bugfix spanning adapters code and query-engine code, you will need to open sibling PRs in `prisma/prisma` and `prisma/prisma-engines` respectively. +Locally, each time you run `DRIVER_ADAPTER=$adapter make qe-test` tests will run using the driver adapters built from the source code in the working copy of prisma/prisma. All good. + +In CI, tho', we need to denote which branch of prisma/prisma we want to use for tests. In CI, there's no working copy of prisma/prisma before tests run. +The CI jobs clones prisma/prisma `main` branch by default, which doesn't include your local changes. To test in integration, we can tell CI to use the branch of prisma/prisma containing +the changes in adapters. To do it, you can use a simple convention in commit messages. Like this: + +``` +git commit -m "DRIVER_ADAPTERS_BRANCH=prisma-branch-with-changes-in-adapters [...]" +``` + +GitHub actions will then pick up the branch name and use it to clone that branch's code of prisma/prisma, and build the driver adapters code from there. + +When it's time to merge the sibling PRs, you'll need to merge the prisma/prisma PR first, so when merging the engines PR you have the code of the adapters ready in prisma/prisma `main` branch. + + ## Parallel rust-analyzer builds When rust-analzyer runs `cargo check` it will lock the build directory and stop any cargo commands from running until it has completed. This makes the build process feel a lot longer. It is possible to avoid this by setting a different build path for diff --git a/docker-compose.yml b/docker-compose.yml index 5f637f7d10a6..c0d4f179e0a4 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,163 +1,197 @@ -version: "3" +version: '3' services: cockroach_23_1: image: prismagraphql/cockroachdb-custom:23.1 + restart: unless-stopped command: | start-single-node --insecure ports: - - "26260:26257" + - '26260:26257' networks: - databases cockroach_22_2: image: prismagraphql/cockroachdb-custom:22.2 - restart: always + restart: unless-stopped command: start-single-node --insecure ports: - - "26259:26257" + - '26259:26257' networks: - databases cockroach_22_1_0: image: prismagraphql/cockroachdb-custom:22.1.0 - restart: always + restart: unless-stopped command: start-single-node --insecure ports: - - "26257:26257" + - '26257:26257' networks: - databases cockroach_21_2_0_patched: image: prismagraphql/cockroachdb-custom:21.2.0-patched - restart: always + restart: unless-stopped command: start-single-node --insecure ports: - - "26258:26257" + - '26258:26257' networks: - databases pgbouncer: image: brainsam/pgbouncer:latest - restart: always + restart: unless-stopped environment: - DB_HOST: "postgres11" - DB_PORT: "5432" - DB_USER: "postgres" - DB_PASSWORD: "prisma" - POOL_MODE: "transaction" - MAX_CLIENT_CONN: "1000" + DB_HOST: 'postgres11' + DB_PORT: '5432' + DB_USER: 'postgres' + DB_PASSWORD: 'prisma' + POOL_MODE: 'transaction' + MAX_CLIENT_CONN: '1000' networks: - databases ports: - - "6432:6432" + - '6432:6432' postgres9: image: postgres:9.6 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" + POSTGRES_PASSWORD: 'prisma' ports: - - "5431:5432" + - '5431:5432' networks: - databases postgres10: image: postgres:10 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" + POSTGRES_PASSWORD: 'prisma' ports: - - "5432:5432" + - '5432:5432' networks: - databases postgres11: image: postgres:11 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" + POSTGRES_PASSWORD: 'prisma' ports: - - "5433:5432" + - '5433:5432' networks: - databases postgres12: image: postgres:12 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" + POSTGRES_PASSWORD: 'prisma' ports: - - "5434:5432" + - '5434:5432' networks: - databases postgres13: image: postgres:13 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" + POSTGRES_PASSWORD: 'prisma' ports: - - "5435:5432" + - '5435:5432' networks: - databases + neon-postgres13: + image: ghcr.io/neondatabase/wsproxy:latest + restart: unless-stopped + environment: + # the port of the postgres13 within the databases network + APPEND_PORT: 'postgres13:5432' + ALLOW_ADDR_REGEX: '.*' + LOG_TRAFFIC: 'true' + LOG_CONN_INFO: 'true' + ports: + - '5488:80' + depends_on: + - postgres13 + networks: + - databases + + planetscale-vitess8: + build: ./docker/planetscale_proxy + environment: + MYSQL_HOST: 'vitess-test-8_0' + MYSQL_PORT: 33807 + MYSQL_DATABASE: 'test' + ports: + - '8085:8085' + depends_on: + - vitess-test-8_0 + restart: unless-stopped + healthcheck: + test: ['CMD', 'nc', '-z', '127.0.0.1', '8085'] + interval: 5s + timeout: 2s + retries: 20 + postgres14: image: postgres:14 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" - POSTGRES_HOST_AUTH_METHOD: "md5" - POSTGRES_INITDB_ARGS: "--auth-host=md5" + POSTGRES_PASSWORD: 'prisma' + POSTGRES_HOST_AUTH_METHOD: 'md5' + POSTGRES_INITDB_ARGS: '--auth-host=md5' ports: - - "5437:5432" + - '5437:5432' networks: - databases postgres15: image: postgres:15 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" - POSTGRES_HOST_AUTH_METHOD: "md5" - POSTGRES_INITDB_ARGS: "--auth-host=md5" + POSTGRES_PASSWORD: 'prisma' + POSTGRES_HOST_AUTH_METHOD: 'md5' + POSTGRES_INITDB_ARGS: '--auth-host=md5' ports: - - "5438:5432" + - '5438:5432' networks: - databases mysql-5-6: image: mysql:5.6.50 command: mysqld - restart: always + restart: unless-stopped platform: linux/x86_64 environment: MYSQL_USER: root MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3309:3306" + - '3309:3306' networks: - databases tmpfs: /var/lib/mysql mysql-5-7: - image: mysql:5.7.32 + image: mysql:5.7.44 command: mysqld - restart: always + restart: unless-stopped platform: linux/x86_64 environment: MYSQL_USER: root MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3306:3306" + - '3306:3306' networks: - databases tmpfs: /var/lib/mysql @@ -165,219 +199,246 @@ services: mysql-8-0: image: mysql:8.0.28 command: mysqld - restart: always + restart: unless-stopped platform: linux/x86_64 environment: MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3307:3306" + - '3307:3306' networks: - databases tmpfs: /var/lib/mysql8 mariadb-10-0: image: mariadb:10 - restart: always + restart: unless-stopped environment: MYSQL_USER: root MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3308:3306" + - '3308:3306' networks: - databases tmpfs: /var/lib/mariadb vitess-test-5_7: image: vitess/vttestserver:mysql57@sha256:23863a518b34330109c502ac61a396008f5f023e96263bcb2bb1b0f7f7d5dc7f - restart: always + restart: unless-stopped ports: - 33577:33577 environment: PORT: 33574 - KEYSPACES: "test" - NUM_SHARDS: "1" - MYSQL_BIND_HOST: "0.0.0.0" - FOREIGN_KEY_MODE: "disallow" + KEYSPACES: 'test' + NUM_SHARDS: '1' + MYSQL_BIND_HOST: '0.0.0.0' + FOREIGN_KEY_MODE: 'disallow' ENABLE_ONLINE_DDL: false + MYSQL_MAX_CONNECTIONS: 100000 + TABLET_REFRESH_INTERVAL: '500ms' + healthcheck: + test: ['CMD', 'mysqladmin', 'ping', '-h127.0.0.1', '-P33577'] + interval: 5s + timeout: 2s + retries: 20 vitess-test-8_0: image: vitess/vttestserver:mysql80@sha256:8bec2644d83cb322eb2cdd596d33c0f858243ba6ade9164c95dfcc519643094e - restart: always + restart: unless-stopped ports: - 33807:33807 environment: PORT: 33804 - KEYSPACES: "test" - NUM_SHARDS: "1" - MYSQL_BIND_HOST: "0.0.0.0" - FOREIGN_KEY_MODE: "disallow" - TABLET_REFRESH_INTERVAL: "500ms" + KEYSPACES: 'test' + NUM_SHARDS: '1' + MYSQL_BIND_HOST: '0.0.0.0' + FOREIGN_KEY_MODE: 'disallow' ENABLE_ONLINE_DDL: false + MYSQL_MAX_CONNECTIONS: 100000 + TABLET_REFRESH_INTERVAL: '500ms' + healthcheck: + test: ['CMD', 'mysqladmin', 'ping', '-h127.0.0.1', '-P33807'] + interval: 5s + timeout: 2s + retries: 20 vitess-shadow-5_7: image: vitess/vttestserver:mysql57@sha256:23863a518b34330109c502ac61a396008f5f023e96263bcb2bb1b0f7f7d5dc7f - restart: always + restart: unless-stopped ports: - 33578:33577 environment: PORT: 33574 - KEYSPACES: "shadow" - NUM_SHARDS: "1" - MYSQL_BIND_HOST: "0.0.0.0" - FOREIGN_KEY_MODE: "disallow" + KEYSPACES: 'shadow' + NUM_SHARDS: '1' + MYSQL_BIND_HOST: '0.0.0.0' + FOREIGN_KEY_MODE: 'disallow' ENABLE_ONLINE_DDL: false + MYSQL_MAX_CONNECTIONS: 100000 + TABLET_REFRESH_INTERVAL: '500ms' + healthcheck: + test: ['CMD', 'mysqladmin', 'ping', '-h127.0.0.1', '-P33577'] + interval: 5s + timeout: 2s + retries: 20 vitess-shadow-8_0: image: vitess/vttestserver:mysql80@sha256:8bec2644d83cb322eb2cdd596d33c0f858243ba6ade9164c95dfcc519643094e - restart: always + restart: unless-stopped ports: - 33808:33807 environment: PORT: 33804 - KEYSPACES: "shadow" - NUM_SHARDS: "1" - MYSQL_BIND_HOST: "0.0.0.0" - FOREIGN_KEY_MODE: "disallow" - TABLET_REFRESH_INTERVAL: "500ms" + KEYSPACES: 'shadow' + NUM_SHARDS: '1' + MYSQL_BIND_HOST: '0.0.0.0' + FOREIGN_KEY_MODE: 'disallow' ENABLE_ONLINE_DDL: false + MYSQL_MAX_CONNECTIONS: 100000 + TABLET_REFRESH_INTERVAL: '500ms' + healthcheck: + test: ['CMD', 'mysqladmin', 'ping', '-h127.0.0.1', '-P33807'] + interval: 5s + timeout: 2s + retries: 20 mssql-2017: image: mcr.microsoft.com/mssql/server:2017-latest - restart: always + restart: unless-stopped environment: - ACCEPT_EULA: "Y" - SA_PASSWORD: "" + ACCEPT_EULA: 'Y' + SA_PASSWORD: '' ports: - - "1434:1433" + - '1434:1433' networks: - databases - + mssql-2019: image: mcr.microsoft.com/mssql/server:2019-latest - restart: always + restart: unless-stopped environment: - ACCEPT_EULA: "Y" - SA_PASSWORD: "" + ACCEPT_EULA: 'Y' + SA_PASSWORD: '' ports: - - "1433:1433" + - '1433:1433' networks: - databases mssql-2022: image: mcr.microsoft.com/mssql/server:2022-latest - restart: always + restart: unless-stopped environment: - ACCEPT_EULA: "Y" - SA_PASSWORD: "" + ACCEPT_EULA: 'Y' + SA_PASSWORD: '' ports: - - "1435:1433" + - '1435:1433' networks: - databases azure-edge: image: mcr.microsoft.com/azure-sql-edge - restart: always + restart: unless-stopped environment: - ACCEPT_EULA: "Y" - MSSQL_SA_PASSWORD: "" + ACCEPT_EULA: 'Y' + MSSQL_SA_PASSWORD: '' ports: - - "1433:1433" + - '1433:1433' networks: - databases mongo42: image: prismagraphql/mongo-single-replica:4.2.17-bionic - restart: always + restart: unless-stopped environment: - MONGO_INITDB_ROOT_USERNAME: "prisma" - MONGO_INITDB_ROOT_PASSWORD: "prisma" + MONGO_INITDB_ROOT_USERNAME: 'prisma' + MONGO_INITDB_ROOT_PASSWORD: 'prisma' MONGO_PORT: 27016 INIT_WAIT_SEC: $INIT_WAIT_SEC networks: - databases ports: - - "27016:27016" + - '27016:27016' mongo44: image: prismagraphql/mongo-single-replica:4.4.3-bionic - restart: always + restart: unless-stopped environment: - MONGO_INITDB_ROOT_USERNAME: "prisma" - MONGO_INITDB_ROOT_PASSWORD: "prisma" + MONGO_INITDB_ROOT_USERNAME: 'prisma' + MONGO_INITDB_ROOT_PASSWORD: 'prisma' INIT_WAIT_SEC: $INIT_WAIT_SEC ports: - - "27017:27017" + - '27017:27017' networks: - databases mongo42-single: image: mongo:4.2 - restart: always + restart: unless-stopped environment: - MONGO_INITDB_ROOT_USERNAME: "prisma" - MONGO_INITDB_ROOT_PASSWORD: "prisma" + MONGO_INITDB_ROOT_USERNAME: 'prisma' + MONGO_INITDB_ROOT_PASSWORD: 'prisma' INIT_WAIT_SEC: $INIT_WAIT_SEC ports: - - "27016:27017" + - '27016:27017' networks: - databases mongo44-single: image: mongo:4.4 - restart: always + restart: unless-stopped environment: - MONGO_INITDB_ROOT_USERNAME: "prisma" - MONGO_INITDB_ROOT_PASSWORD: "prisma" + MONGO_INITDB_ROOT_USERNAME: 'prisma' + MONGO_INITDB_ROOT_PASSWORD: 'prisma' INIT_WAIT_SEC: $INIT_WAIT_SEC ports: - - "27017:27017" + - '27017:27017' networks: - databases mongo5: image: prismagraphql/mongo-single-replica:5.0.3 - restart: always + restart: unless-stopped environment: - MONGO_INITDB_ROOT_USERNAME: "prisma" - MONGO_INITDB_ROOT_PASSWORD: "prisma" + MONGO_INITDB_ROOT_USERNAME: 'prisma' + MONGO_INITDB_ROOT_PASSWORD: 'prisma' MONGO_PORT: 27018 INIT_WAIT_SEC: $INIT_WAIT_SEC ports: - - "27018:27018" + - '27018:27018' networks: - databases mongo5-single: image: mongo:5 - restart: always + restart: unless-stopped environment: - MONGO_INITDB_ROOT_USERNAME: "prisma" - MONGO_INITDB_ROOT_PASSWORD: "prisma" + MONGO_INITDB_ROOT_USERNAME: 'prisma' + MONGO_INITDB_ROOT_PASSWORD: 'prisma' INIT_WAIT_SEC: $INIT_WAIT_SEC ports: - - "27018:27017" + - '27018:27017' networks: - databases mongo-express: image: mongo-express - restart: always + restart: unless-stopped ports: - 8081:8081 environment: - ME_CONFIG_MONGODB_ADMINUSERNAME: "prisma" - ME_CONFIG_MONGODB_ADMINPASSWORD: "prisma" + ME_CONFIG_MONGODB_ADMINUSERNAME: 'prisma' + ME_CONFIG_MONGODB_ADMINPASSWORD: 'prisma' ME_CONFIG_MONGODB_URL: mongodb://prisma:prisma@mongo4-single:27017/ networks: - databases otel: image: jaegertracing/all-in-one:1.35 + restart: unless-stopped environment: - COLLECTOR_OTLP_ENABLED: "true" - COLLECTOR_ZIPKIN_HOST_PORT: ":9411" + COLLECTOR_OTLP_ENABLED: 'true' + COLLECTOR_ZIPKIN_HOST_PORT: ':9411' ports: - 6831:6831/udp - 6832:6832/udp @@ -392,6 +453,7 @@ services: prometheus: image: prom/prometheus + restart: unless-stopped volumes: - ${PWD}/metrics/prometheus:/prometheus-data command: --config.file=/prometheus-data/prometheus.yml diff --git a/docker/planetscale_proxy/Dockerfile b/docker/planetscale_proxy/Dockerfile new file mode 100644 index 000000000000..2411894d88f0 --- /dev/null +++ b/docker/planetscale_proxy/Dockerfile @@ -0,0 +1,15 @@ +FROM golang:1 + +RUN apt update && apt install netcat-openbsd -y +RUN cd /go/src && git clone https://github.com/prisma/planetscale-proxy.git +RUN cd /go/src/planetscale-proxy && go install . + +ENTRYPOINT /go/bin/ps-http-sim \ + -http-addr=0.0.0.0 \ + -http-port=8085 \ + -mysql-addr=$MYSQL_HOST \ + -mysql-port=$MYSQL_PORT \ + -mysql-idle-timeout=1200s \ + -mysql-no-pass \ + -mysql-max-rows=1000 \ + -mysql-dbname=$MYSQL_DATABASE diff --git a/flake.lock b/flake.lock index 725613da576b..c2750d0435ed 100644 --- a/flake.lock +++ b/flake.lock @@ -14,11 +14,11 @@ ] }, "locked": { - "lastModified": 1693163878, - "narHash": "sha256-HXuyMUVaRSoIA602jfFuYGXt6AMZ+WUxuvLq8iJmYTA=", + "lastModified": 1696384830, + "narHash": "sha256-j8ZsVqzmj5sOm5MW9cqwQJUZELFFwOislDmqDDEMl6k=", "owner": "ipetkov", "repo": "crane", - "rev": "43db881168bc65b568d36ceb614a0fc8b276191b", + "rev": "f2143cd27f8bd09ee4f0121336c65015a2a0a19c", "type": "github" }, "original": { @@ -30,11 +30,11 @@ "flake-compat": { "flake": false, "locked": { - "lastModified": 1673956053, - "narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=", + "lastModified": 1696267196, + "narHash": "sha256-AAQ/2sD+0D18bb8hKuEEVpHUYD1GmO2Uh/taFamn6XQ=", "owner": "edolstra", "repo": "flake-compat", - "rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9", + "rev": "4f910c9827911b1ec2bf26b5a062cd09f8d89f85", "type": "github" }, "original": { @@ -50,11 +50,11 @@ ] }, "locked": { - "lastModified": 1688466019, - "narHash": "sha256-VeM2akYrBYMsb4W/MmBo1zmaMfgbL4cH3Pu8PGyIwJ0=", + "lastModified": 1696343447, + "narHash": "sha256-B2xAZKLkkeRFG5XcHHSXXcP7To9Xzr59KXeZiRf4vdQ=", "owner": "hercules-ci", "repo": "flake-parts", - "rev": "8e8d955c22df93dbe24f19ea04f47a74adbdc5ec", + "rev": "c9afaba3dfa4085dbd2ccb38dfade5141e33d9d4", "type": "github" }, "original": { @@ -70,11 +70,11 @@ ] }, "locked": { - "lastModified": 1689068808, - "narHash": "sha256-6ixXo3wt24N/melDWjq70UuHQLxGV8jZvooRanIHXw0=", + "lastModified": 1694529238, + "narHash": "sha256-zsNZZGTGnMOf9YpHKJqMSsa0dXbfmxeoJ7xHlrt+xmY=", "owner": "numtide", "repo": "flake-utils", - "rev": "919d646de7be200f3bf08cb76ae1f09402b6f9b4", + "rev": "ff7b65b44d01cf9ba6a71320833626af21126384", "type": "github" }, "original": { @@ -90,11 +90,11 @@ ] }, "locked": { - "lastModified": 1660459072, - "narHash": "sha256-8DFJjXG8zqoONA1vXtgeKXy68KdJL5UaXR8NtVMUbx8=", + "lastModified": 1694102001, + "narHash": "sha256-vky6VPK1n1od6vXbqzOXnekrQpTL4hbPAwUhT5J9c9E=", "owner": "hercules-ci", "repo": "gitignore.nix", - "rev": "a20de23b925fd8264fd7fad6454652e142fd7f73", + "rev": "9e21c80adf67ebcb077d75bd5e7d724d21eeafd6", "type": "github" }, "original": { @@ -105,11 +105,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1689192006, - "narHash": "sha256-QM0f0d8oPphOTYJebsHioR9+FzJcy1QNIzREyubB91U=", + "lastModified": 1696193975, + "narHash": "sha256-mnQjUcYgp9Guu3RNVAB2Srr1TqKcPpRXmJf4LJk6KRY=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "2de8efefb6ce7f5e4e75bdf57376a96555986841", + "rev": "fdd898f8f79e8d2f99ed2ab6b3751811ef683242", "type": "github" }, "original": { @@ -139,11 +139,11 @@ ] }, "locked": { - "lastModified": 1693361441, - "narHash": "sha256-TRFdMQj9wSKMduNqe/1xF8TzcPWEdcn/hKWcVcZ5fO8=", + "lastModified": 1696558324, + "narHash": "sha256-TnnP4LGwDB8ZGE7h2n4nA9Faee8xPkMdNcyrzJ57cbw=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "1fb2aa49635e9f30b6fa211ab7c454f7175e1ba3", + "rev": "fdb37574a04df04aaa8cf7708f94a9309caebe2b", "type": "github" }, "original": { diff --git a/nix/all-engines.nix b/nix/all-engines.nix index 9235060463a7..0e6a1c05b236 100644 --- a/nix/all-engines.nix +++ b/nix/all-engines.nix @@ -1,4 +1,4 @@ -{ pkgs, flakeInputs, lib, self', ... }: +{ pkgs, flakeInputs, lib, self', rustToolchain, ... }: let stdenv = pkgs.clangStdenv; @@ -15,7 +15,7 @@ let src = srcPath; name = "prisma-engines-source"; }; - craneLib = flakeInputs.crane.mkLib pkgs; + craneLib = (flakeInputs.crane.mkLib pkgs).overrideToolchain rustToolchain.default; deps = craneLib.vendorCargoDeps { inherit src; }; libSuffix = stdenv.hostPlatform.extensions.sharedLibrary; in @@ -34,6 +34,7 @@ in ] ++ lib.optionals stdenv.isDarwin [ perl # required to build openssl darwin.apple_sdk.frameworks.Security + iconv ]; configurePhase = '' @@ -53,13 +54,15 @@ in cp target/release/prisma-fmt $out/bin/ cp target/release/libquery_engine${libSuffix} $out/lib/libquery_engine.node ''; + + dontStrip = true; }; packages.test-cli = lib.makeOverridable ({ profile }: stdenv.mkDerivation { name = "test-cli"; inherit src; - inherit (self'.packages.prisma-engines) buildInputs nativeBuildInputs configurePhase; + inherit (self'.packages.prisma-engines) buildInputs nativeBuildInputs configurePhase dontStrip; buildPhase = "cargo build --profile=${profile} --bin=test-cli"; @@ -76,7 +79,7 @@ in ({ profile }: stdenv.mkDerivation { name = "query-engine-bin"; inherit src; - inherit (self'.packages.prisma-engines) buildInputs nativeBuildInputs configurePhase; + inherit (self'.packages.prisma-engines) buildInputs nativeBuildInputs configurePhase dontStrip; buildPhase = "cargo build --profile=${profile} --bin=query-engine"; @@ -96,7 +99,7 @@ in ({ profile }: stdenv.mkDerivation { name = "query-engine-bin-and-lib"; inherit src; - inherit (self'.packages.prisma-engines) buildInputs nativeBuildInputs configurePhase; + inherit (self'.packages.prisma-engines) buildInputs nativeBuildInputs configurePhase dontStrip; buildPhase = '' cargo build --profile=${profile} --bin=query-engine diff --git a/nix/args.nix b/nix/args.nix index d3a2e54dbc8a..2254b7f5b138 100644 --- a/nix/args.nix +++ b/nix/args.nix @@ -4,10 +4,10 @@ let overlays = [ flakeInputs.rust-overlay.overlays.default - (self: super: - let toolchain = super.rust-bin.stable.latest; in - { cargo = toolchain.minimal; rustc = toolchain.minimal; rustToolchain = toolchain; }) ]; - in - { pkgs = import flakeInputs.nixpkgs { inherit system overlays; }; }; + in rec + { + pkgs = import flakeInputs.nixpkgs { inherit system overlays; }; + rustToolchain = pkgs.rust-bin.stable.latest; + }; } diff --git a/nix/shell.nix b/nix/shell.nix index c30ca9080d47..94661c972d01 100644 --- a/nix/shell.nix +++ b/nix/shell.nix @@ -1,7 +1,8 @@ -{ self', pkgs, ... }: +{ self', pkgs, rustToolchain, ... }: let - devToolchain = pkgs.rustToolchain.default.override { extensions = [ "rust-analyzer" "rust-src" ]; }; + devToolchain = rustToolchain.default.override { extensions = [ "rust-analyzer" "rust-src" ]; }; + nodejs = pkgs.nodejs_latest; in { devShells.default = pkgs.mkShell { @@ -9,9 +10,9 @@ in devToolchain pkgs.llvmPackages_latest.bintools - pkgs.nodejs - pkgs.nodePackages.typescript-language-server - pkgs.nodePackages.pnpm + nodejs + nodejs.pkgs.typescript-language-server + nodejs.pkgs.pnpm ]; inputsFrom = [ self'.packages.prisma-engines ]; shellHook = pkgs.lib.optionalString pkgs.stdenv.isLinux diff --git a/prisma-fmt/src/code_actions.rs b/prisma-fmt/src/code_actions.rs index b9dbdc58067d..27fdeddad159 100644 --- a/prisma-fmt/src/code_actions.rs +++ b/prisma-fmt/src/code_actions.rs @@ -48,7 +48,9 @@ pub(crate) fn available_actions(schema: String, params: CodeActionParams) -> Vec validated_schema.db.source(), config, model, - ) + ); + + multi_schema::add_schema_to_schemas(&mut actions, ¶ms, validated_schema.db.source(), config, model); } if matches!(datasource, Some(ds) if ds.active_provider == "mongodb") { @@ -99,6 +101,16 @@ pub(crate) fn available_actions(schema: String, params: CodeActionParams) -> Vec complete_relation.referencing_field(), ); } + + if validated_schema.relation_mode().uses_foreign_keys() { + relation_mode::replace_set_default_mysql( + &mut actions, + ¶ms, + validated_schema.db.source(), + complete_relation, + config, + ) + } } } @@ -195,6 +207,25 @@ fn format_field_attribute(attribute: &str) -> String { format!(" {attribute}\n") } +fn format_block_property( + property: &str, + value: &str, + indentation: IndentationType, + newline: NewlineType, + has_properties: bool, +) -> String { + let separator = if has_properties { newline.as_ref() } else { "" }; + + // * (soph) I don't super like needing to prefix this with ')' but + // * it would require further updating how we parse spans + // todo: update so that we have a concepts for: + // todo: - The entire url span + // todo: - The url arg span :: currently, url_span only represents this. + let formatted_attribute = format!(r#"){separator}{indentation}{property} = ["{value}"]"#); + + formatted_attribute +} + fn format_block_attribute( attribute: &str, indentation: IndentationType, diff --git a/prisma-fmt/src/code_actions/multi_schema.rs b/prisma-fmt/src/code_actions/multi_schema.rs index de117a867b06..0e47a008a910 100644 --- a/prisma-fmt/src/code_actions/multi_schema.rs +++ b/prisma-fmt/src/code_actions/multi_schema.rs @@ -1,5 +1,6 @@ use lsp_types::{CodeAction, CodeActionKind, CodeActionOrCommand, CodeActionParams}; use psl::{ + diagnostics::Span, parser_database::walkers::{EnumWalker, ModelWalker}, schema_ast::ast::WithSpan, Configuration, @@ -108,3 +109,70 @@ pub(super) fn add_schema_block_attribute_enum( actions.push(CodeActionOrCommand::CodeAction(action)) } + +pub(super) fn add_schema_to_schemas( + actions: &mut Vec, + params: &CodeActionParams, + schema: &str, + config: &Configuration, + model: ModelWalker<'_>, +) { + let datasource = match config.datasources.first() { + Some(ds) => ds, + None => return, + }; + + let span_diagnostics = + match super::diagnostics_for_span(schema, ¶ms.context.diagnostics, model.ast_model().span()) { + Some(sd) => sd, + None => return, + }; + + let diagnostics = match super::filter_diagnostics(span_diagnostics, "This schema is not defined in the datasource.") + { + Some(value) => value, + None => return, + }; + + let edit = match datasource.schemas_span { + Some(span) => { + let formatted_attribute = format!(r#"", "{}""#, model.schema_name().unwrap()); + super::create_text_edit( + schema, + formatted_attribute, + true, + // todo: update spans so that we can just append to the end of the _inside_ of the array. Instead of needing to re-append the `]` or taking the span end -1 + Span::new(span.start, span.end - 1), + params, + ) + } + None => { + let has_properties = datasource.provider_defined() + || datasource.url_defined() + || datasource.direct_url_defined() + || datasource.shadow_url_defined() + || datasource.relation_mode_defined() + || datasource.schemas_defined(); + + let formatted_attribute = super::format_block_property( + "schemas", + model.schema_name().unwrap(), + model.indentation(), + model.newline(), + has_properties, + ); + + super::create_text_edit(schema, formatted_attribute, true, datasource.url_span, params) + } + }; + + let action = CodeAction { + title: String::from("Add schema to schemas"), + kind: Some(CodeActionKind::QUICKFIX), + edit: Some(edit), + diagnostics: Some(diagnostics), + ..Default::default() + }; + + actions.push(CodeActionOrCommand::CodeAction(action)) +} diff --git a/prisma-fmt/src/code_actions/relation_mode.rs b/prisma-fmt/src/code_actions/relation_mode.rs index 28d9018220e7..751fb956073b 100644 --- a/prisma-fmt/src/code_actions/relation_mode.rs +++ b/prisma-fmt/src/code_actions/relation_mode.rs @@ -1,5 +1,5 @@ use lsp_types::{CodeAction, CodeActionKind, CodeActionOrCommand}; -use psl::schema_ast::ast::SourceConfig; +use psl::{parser_database::walkers::CompleteInlineRelationWalker, schema_ast::ast::SourceConfig, Configuration}; pub(crate) fn edit_referential_integrity( actions: &mut Vec, @@ -35,3 +35,51 @@ pub(crate) fn edit_referential_integrity( actions.push(CodeActionOrCommand::CodeAction(action)) } + +pub(crate) fn replace_set_default_mysql( + actions: &mut Vec, + params: &lsp_types::CodeActionParams, + schema: &str, + relation: CompleteInlineRelationWalker<'_>, + config: &Configuration, +) { + let datasource = match config.datasources.first() { + Some(ds) => ds, + None => return, + }; + + if datasource.active_connector.provider_name() != "mysql" { + return; + } + + let span = match relation.on_update_span() { + Some(span) => span, + None => return, + }; + + let span_diagnostics = match super::diagnostics_for_span(schema, ¶ms.context.diagnostics, span) { + Some(sd) => sd, + None => return, + }; + + let diagnostics = match + super::filter_diagnostics( + span_diagnostics, + "MySQL does not actually support the `SetDefault` referential action, so using it may result in unexpected errors.") { + Some(value) => value, + None => return, + }; + + let edit = super::create_text_edit(schema, "NoAction".to_owned(), false, span, params); + + let action = CodeAction { + title: r#"Replace SetDefault with NoAction"#.to_owned(), + + kind: Some(CodeActionKind::QUICKFIX), + edit: Some(edit), + diagnostics: Some(diagnostics), + ..Default::default() + }; + + actions.push(CodeActionOrCommand::CodeAction(action)) +} diff --git a/prisma-fmt/tests/code_actions/scenarios/multi_schema_add_to_existing_schemas/result.json b/prisma-fmt/tests/code_actions/scenarios/multi_schema_add_to_existing_schemas/result.json new file mode 100644 index 000000000000..0e3f2348b54a --- /dev/null +++ b/prisma-fmt/tests/code_actions/scenarios/multi_schema_add_to_existing_schemas/result.json @@ -0,0 +1,41 @@ +[ + { + "title": "Add schema to schemas", + "kind": "quickfix", + "diagnostics": [ + { + "range": { + "start": { + "line": 15, + "character": 13 + }, + "end": { + "line": 15, + "character": 19 + } + }, + "severity": 1, + "message": "This schema is not defined in the datasource. Read more on `@@schema` at https://pris.ly/d/multi-schema" + } + ], + "edit": { + "changes": { + "file:///path/to/schema.prisma": [ + { + "range": { + "start": { + "line": 8, + "character": 27 + }, + "end": { + "line": 8, + "character": 28 + } + }, + "newText": "\", \"base\"" + } + ] + } + } + } +] \ No newline at end of file diff --git a/prisma-fmt/tests/code_actions/scenarios/multi_schema_add_to_existing_schemas/schema.prisma b/prisma-fmt/tests/code_actions/scenarios/multi_schema_add_to_existing_schemas/schema.prisma new file mode 100644 index 000000000000..fc555c0756f5 --- /dev/null +++ b/prisma-fmt/tests/code_actions/scenarios/multi_schema_add_to_existing_schemas/schema.prisma @@ -0,0 +1,17 @@ +generator client { + provider = "prisma-client-js" + previewFeatures = ["multiSchema"] +} + +datasource db { + provider = "postgresql" + url = env("DATABASE_URL") + schemas = ["a", "b"] + relationMode = "prisma" +} + +model A { + id Int @id + + @@schema("base") +} diff --git a/prisma-fmt/tests/code_actions/scenarios/multi_schema_add_to_nonexisting_schemas/result.json b/prisma-fmt/tests/code_actions/scenarios/multi_schema_add_to_nonexisting_schemas/result.json new file mode 100644 index 000000000000..64f2acdb8098 --- /dev/null +++ b/prisma-fmt/tests/code_actions/scenarios/multi_schema_add_to_nonexisting_schemas/result.json @@ -0,0 +1,41 @@ +[ + { + "title": "Add schema to schemas", + "kind": "quickfix", + "diagnostics": [ + { + "range": { + "start": { + "line": 14, + "character": 13 + }, + "end": { + "line": 14, + "character": 19 + } + }, + "severity": 1, + "message": "This schema is not defined in the datasource. Read more on `@@schema` at https://pris.ly/d/multi-schema" + } + ], + "edit": { + "changes": { + "file:///path/to/schema.prisma": [ + { + "range": { + "start": { + "line": 7, + "character": 37 + }, + "end": { + "line": 7, + "character": 38 + } + }, + "newText": ")\n schemas = [\"base\"]" + } + ] + } + } + } +] \ No newline at end of file diff --git a/prisma-fmt/tests/code_actions/scenarios/multi_schema_add_to_nonexisting_schemas/schema.prisma b/prisma-fmt/tests/code_actions/scenarios/multi_schema_add_to_nonexisting_schemas/schema.prisma new file mode 100644 index 000000000000..aba6b13dc237 --- /dev/null +++ b/prisma-fmt/tests/code_actions/scenarios/multi_schema_add_to_nonexisting_schemas/schema.prisma @@ -0,0 +1,16 @@ +generator client { + provider = "prisma-client-js" + previewFeatures = ["multiSchema"] +} + +datasource db { + provider = "postgresql" + url = env("DATABASE_URL") + relationMode = "prisma" +} + +model A { + id Int @id + + @@schema("base") +} diff --git a/prisma-fmt/tests/code_actions/scenarios/relation_mode_mysql_foreign_keys_set_default/result.json b/prisma-fmt/tests/code_actions/scenarios/relation_mode_mysql_foreign_keys_set_default/result.json new file mode 100644 index 000000000000..d31f54355c36 --- /dev/null +++ b/prisma-fmt/tests/code_actions/scenarios/relation_mode_mysql_foreign_keys_set_default/result.json @@ -0,0 +1,41 @@ +[ + { + "title": "Replace SetDefault with NoAction", + "kind": "quickfix", + "diagnostics": [ + { + "range": { + "start": { + "line": 14, + "character": 62 + }, + "end": { + "line": 14, + "character": 82 + } + }, + "severity": 2, + "message": "MySQL does not actually support the `SetDefault` referential action, so using it may result in unexpected errors. Read more at https://pris.ly/d/mysql-set-default " + } + ], + "edit": { + "changes": { + "file:///path/to/schema.prisma": [ + { + "range": { + "start": { + "line": 14, + "character": 72 + }, + "end": { + "line": 14, + "character": 82 + } + }, + "newText": "NoAction" + } + ] + } + } + } +] \ No newline at end of file diff --git a/prisma-fmt/tests/code_actions/scenarios/relation_mode_mysql_foreign_keys_set_default/schema.prisma b/prisma-fmt/tests/code_actions/scenarios/relation_mode_mysql_foreign_keys_set_default/schema.prisma new file mode 100644 index 000000000000..b13952553002 --- /dev/null +++ b/prisma-fmt/tests/code_actions/scenarios/relation_mode_mysql_foreign_keys_set_default/schema.prisma @@ -0,0 +1,29 @@ +generator client { + provider = "prisma-client-js" +} + +datasource db { + provider = "mysql" + url = env("DATABASE_URL") + relationMode = "foreignKeys" +} + +/// multi line +/// commennttt +model Foo { + id Int @id + bar Bar @relation(fields: [bar_id], references: [id], onUpdate: SetDefault) + bar_id Int @unique + t Test +} + +model Bar { + id Int @id + foo Foo? +} + +// This is a test enum. +enum Test { + TestUno + TestDue +} diff --git a/prisma-fmt/tests/code_actions/tests.rs b/prisma-fmt/tests/code_actions/tests.rs index 41035ed65d93..dbd5ff44e96b 100644 --- a/prisma-fmt/tests/code_actions/tests.rs +++ b/prisma-fmt/tests/code_actions/tests.rs @@ -25,9 +25,12 @@ scenarios! { one_to_one_referencing_side_misses_unique_compound_field_indentation_four_spaces relation_mode_prisma_missing_index relation_mode_referential_integrity + relation_mode_mysql_foreign_keys_set_default multi_schema_one_model multi_schema_one_model_one_enum multi_schema_two_models + multi_schema_add_to_existing_schemas + multi_schema_add_to_nonexisting_schemas mongodb_at_map mongodb_at_map_with_validation_errors } diff --git a/prisma-schema-wasm/Cargo.toml b/prisma-schema-wasm/Cargo.toml index 6387aeedfbba..248c726c9ba4 100644 --- a/prisma-schema-wasm/Cargo.toml +++ b/prisma-schema-wasm/Cargo.toml @@ -7,6 +7,6 @@ edition = "2021" crate-type = ["cdylib"] [dependencies] -wasm-bindgen = "=0.2.84" +wasm-bindgen = "=0.2.87" wasm-logger = { version = "0.2.0", optional = true } prisma-fmt = { path = "../prisma-fmt" } diff --git a/psl/builtin-connectors/src/cockroach_datamodel_connector.rs b/psl/builtin-connectors/src/cockroach_datamodel_connector.rs index 1c698a644b58..5456deb59df6 100644 --- a/psl/builtin-connectors/src/cockroach_datamodel_connector.rs +++ b/psl/builtin-connectors/src/cockroach_datamodel_connector.rs @@ -57,7 +57,8 @@ const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(Connector MultiSchema | FilteredInlineChildNestedToOneDisconnect | InsertReturning | - UpdateReturning + UpdateReturning | + RowIn }); const SCALAR_TYPE_DEFAULTS: &[(ScalarType, CockroachType)] = &[ diff --git a/psl/builtin-connectors/src/mysql_datamodel_connector.rs b/psl/builtin-connectors/src/mysql_datamodel_connector.rs index d4688438d299..39995fb5d48d 100644 --- a/psl/builtin-connectors/src/mysql_datamodel_connector.rs +++ b/psl/builtin-connectors/src/mysql_datamodel_connector.rs @@ -58,7 +58,8 @@ const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(Connector SupportsTxIsolationReadUncommitted | SupportsTxIsolationReadCommitted | SupportsTxIsolationRepeatableRead | - SupportsTxIsolationSerializable + SupportsTxIsolationSerializable | + RowIn }); const CONSTRAINT_SCOPES: &[ConstraintScope] = &[ConstraintScope::GlobalForeignKey, ConstraintScope::ModelKeyIndex]; diff --git a/psl/builtin-connectors/src/postgres_datamodel_connector.rs b/psl/builtin-connectors/src/postgres_datamodel_connector.rs index 6cd160c40670..8fac79165c58 100644 --- a/psl/builtin-connectors/src/postgres_datamodel_connector.rs +++ b/psl/builtin-connectors/src/postgres_datamodel_connector.rs @@ -64,7 +64,8 @@ const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(Connector SupportsTxIsolationSerializable | NativeUpsert | InsertReturning | - UpdateReturning + UpdateReturning | + RowIn }); pub struct PostgresDatamodelConnector; diff --git a/psl/builtin-connectors/src/sqlite_datamodel_connector.rs b/psl/builtin-connectors/src/sqlite_datamodel_connector.rs index 6b66a6c524ca..d5e6041f9b43 100644 --- a/psl/builtin-connectors/src/sqlite_datamodel_connector.rs +++ b/psl/builtin-connectors/src/sqlite_datamodel_connector.rs @@ -24,7 +24,8 @@ const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(Connector OrderByNullsFirstLast | SupportsTxIsolationSerializable | NativeUpsert | - FilteredInlineChildNestedToOneDisconnect + FilteredInlineChildNestedToOneDisconnect | + RowIn // InsertReturning - While SQLite does support RETURNING, it does not return column information on the way back from the database. // This column type information is necessary in order to preserve consistency for some data types such as int, where values could overflow. // Since we care to stay consistent with reads, it is not enabled. diff --git a/psl/parser-database/src/context.rs b/psl/parser-database/src/context.rs index 54c06ddd9a42..450146953024 100644 --- a/psl/parser-database/src/context.rs +++ b/psl/parser-database/src/context.rs @@ -117,7 +117,7 @@ impl<'db> Context<'db> { /// - When you are done validating an attribute set, you must call /// `validate_visited_attributes()`. Otherwise, Context will helpfully panic. pub(super) fn visit_attributes(&mut self, ast_attributes: ast::AttributeContainer) { - if !self.attributes.attributes.is_empty() || !self.attributes.unused_attributes.is_empty() { + if self.attributes.attributes.is_some() || !self.attributes.unused_attributes.is_empty() { panic!( "`ctx.visit_attributes() called with {:?} while the Context is still validating previous attribute set on {:?}`", ast_attributes, @@ -125,9 +125,7 @@ impl<'db> Context<'db> { ); } - self.attributes.attributes.clear(); - self.attributes.unused_attributes.clear(); - self.attributes.extend_attributes(ast_attributes, self.ast); + self.attributes.set_attributes(ast_attributes, self.ast); } /// Look for an optional attribute with a name of the form @@ -139,8 +137,8 @@ impl<'db> Context<'db> { /// with a default that can be first, but with native types, arguments are /// purely positional. pub(crate) fn visit_datasource_scoped(&mut self) -> Option<(StringId, StringId, ast::AttributeId)> { - let attrs = - iter_attributes(&self.attributes.attributes, self.ast).filter(|(_, attr)| attr.name.name.contains('.')); + let attrs = iter_attributes(self.attributes.attributes.as_ref(), self.ast) + .filter(|(_, attr)| attr.name.name.contains('.')); let mut native_type_attr = None; let diagnostics = &mut self.diagnostics; @@ -173,7 +171,8 @@ impl<'db> Context<'db> { /// is defined. #[must_use] pub(crate) fn visit_optional_single_attr(&mut self, name: &'static str) -> bool { - let mut attrs = iter_attributes(&self.attributes.attributes, self.ast).filter(|(_, a)| a.name.name == name); + let mut attrs = + iter_attributes(self.attributes.attributes.as_ref(), self.ast).filter(|(_, a)| a.name.name == name); let (first_idx, first) = match attrs.next() { Some(first) => first, None => return false, @@ -182,7 +181,7 @@ impl<'db> Context<'db> { if attrs.next().is_some() { for (idx, attr) in - iter_attributes(&self.attributes.attributes, self.ast).filter(|(_, a)| a.name.name == name) + iter_attributes(self.attributes.attributes.as_ref(), self.ast).filter(|(_, a)| a.name.name == name) { diagnostics.push_error(DatamodelError::new_duplicate_attribute_error( &attr.name.name, @@ -206,7 +205,7 @@ impl<'db> Context<'db> { let mut has_valid_attribute = false; while !has_valid_attribute { - let first_attr = iter_attributes(&self.attributes.attributes, self.ast) + let first_attr = iter_attributes(self.attributes.attributes.as_ref(), self.ast) .filter(|(_, attr)| attr.name.name == name) .find(|(attr_id, _)| self.attributes.unused_attributes.contains(attr_id)); let (attr_id, attr) = if let Some(first_attr) = first_attr { @@ -297,7 +296,8 @@ impl<'db> Context<'db> { attribute.span, )) } - self.attributes.attributes.clear(); + + self.attributes.attributes = None; self.attributes.unused_attributes.clear(); } @@ -430,11 +430,11 @@ impl<'db> Context<'db> { // Implementation detail. Used for arguments validation. fn iter_attributes<'a, 'ast: 'a>( - attrs: &'a [ast::AttributeContainer], + attrs: Option<&'a ast::AttributeContainer>, ast: &'ast ast::SchemaAst, ) -> impl Iterator + 'a { attrs - .iter() + .into_iter() .flat_map(move |container| ast[*container].iter().enumerate().map(|a| (a, *container))) .map(|((idx, attr), container)| (ast::AttributeId::new_in_container(container, idx), attr)) } diff --git a/psl/parser-database/src/context/attributes.rs b/psl/parser-database/src/context/attributes.rs index 39655decf8b4..9f35f5cc3644 100644 --- a/psl/parser-database/src/context/attributes.rs +++ b/psl/parser-database/src/context/attributes.rs @@ -4,7 +4,7 @@ use crate::interner::StringId; #[derive(Default, Debug)] pub(super) struct AttributesValidationState { /// The attributes list being validated. - pub(super) attributes: Vec, + pub(super) attributes: Option, pub(super) unused_attributes: HashSet, // the _remaining_ attributes /// The attribute being validated. @@ -13,10 +13,11 @@ pub(super) struct AttributesValidationState { } impl AttributesValidationState { - pub(super) fn extend_attributes(&mut self, attributes: ast::AttributeContainer, ast: &ast::SchemaAst) { + pub(super) fn set_attributes(&mut self, attributes: ast::AttributeContainer, ast: &ast::SchemaAst) { let attribute_ids = (0..ast[attributes].len()).map(|idx| ast::AttributeId::new_in_container(attributes, idx)); + self.unused_attributes.clear(); self.unused_attributes.extend(attribute_ids); - self.attributes.push(attributes); + self.attributes = Some(attributes); } } diff --git a/psl/parser-database/src/walkers/relation/inline/complete.rs b/psl/parser-database/src/walkers/relation/inline/complete.rs index 1c5536e948a6..3f7b1b67dc60 100644 --- a/psl/parser-database/src/walkers/relation/inline/complete.rs +++ b/psl/parser-database/src/walkers/relation/inline/complete.rs @@ -2,6 +2,7 @@ use crate::{ walkers::{ModelWalker, RelationFieldId, RelationFieldWalker, ScalarFieldWalker}, ParserDatabase, ReferentialAction, }; +use diagnostics::Span; use schema_ast::ast; /// Represents a relation that has fields and references defined in one of the @@ -65,6 +66,10 @@ impl<'db> CompleteInlineRelationWalker<'db> { .unwrap_or(Cascade) } + pub fn on_update_span(self) -> Option { + self.referencing_field().attributes().on_update.map(|(_, span)| span) + } + /// Prisma allows setting the relation field as optional, even if one of the /// underlying scalar fields is required. For the purpose of referential /// actions, we count the relation field required if any of the underlying diff --git a/psl/parser-database/src/walkers/relation_field.rs b/psl/parser-database/src/walkers/relation_field.rs index 8e376a8c3ef1..b96380f03bf6 100644 --- a/psl/parser-database/src/walkers/relation_field.rs +++ b/psl/parser-database/src/walkers/relation_field.rs @@ -164,7 +164,7 @@ impl<'db> RelationFieldWalker<'db> { } /// The relation name. -#[derive(Debug, Clone)] +#[derive(Debug, Clone, PartialOrd)] pub enum RelationName<'db> { /// A relation name specified in the AST. Explicit(&'db str), @@ -185,17 +185,6 @@ impl<'db> PartialEq for RelationName<'db> { impl<'db> Eq for RelationName<'db> {} -impl<'db> PartialOrd for RelationName<'db> { - fn partial_cmp(&self, other: &Self) -> Option { - match (self, other) { - (Self::Explicit(l0), Self::Explicit(r0)) => l0.partial_cmp(r0), - (Self::Generated(l0), Self::Generated(r0)) => l0.partial_cmp(r0), - (Self::Explicit(l0), Self::Generated(r0)) => l0.partial_cmp(&r0.as_str()), - (Self::Generated(l0), Self::Explicit(r0)) => l0.as_str().partial_cmp(*r0), - } - } -} - impl<'db> Ord for RelationName<'db> { fn cmp(&self, other: &Self) -> std::cmp::Ordering { match (self, other) { diff --git a/psl/psl-core/src/common/preview_features.rs b/psl/psl-core/src/common/preview_features.rs index 93ddde63291c..544bf5b99164 100644 --- a/psl/psl-core/src/common/preview_features.rs +++ b/psl/psl-core/src/common/preview_features.rs @@ -45,6 +45,7 @@ features!( DataProxy, Deno, Distinct, + DriverAdapters, ExtendedIndexes, ExtendedWhereUnique, FieldReference, @@ -64,7 +65,6 @@ features!( NamedConstraints, NApi, NativeTypes, - DriverAdapters, OrderByAggregateGroup, OrderByNulls, OrderByRelation, @@ -82,6 +82,7 @@ features!( pub const ALL_PREVIEW_FEATURES: FeatureMap = FeatureMap { active: enumflags2::make_bitflags!(PreviewFeature::{ Deno + | DriverAdapters | FullTextIndex | FullTextSearch | Metrics @@ -123,9 +124,7 @@ pub const ALL_PREVIEW_FEATURES: FeatureMap = FeatureMap { | TransactionApi | UncheckedScalarInputs }), - hidden: enumflags2::make_bitflags!(PreviewFeature::{ - DriverAdapters - }), + hidden: enumflags2::BitFlags::EMPTY, }; #[derive(Debug)] diff --git a/psl/psl-core/src/datamodel_connector/capabilities.rs b/psl/psl-core/src/datamodel_connector/capabilities.rs index 7bfee8c02916..1b3f557e6285 100644 --- a/psl/psl-core/src/datamodel_connector/capabilities.rs +++ b/psl/psl-core/src/datamodel_connector/capabilities.rs @@ -103,6 +103,7 @@ capabilities!( NativeUpsert, InsertReturning, UpdateReturning, + RowIn, // Connector supports (a, b) IN (c, d) expression. ); /// Contains all capabilities that the connector is able to serve. diff --git a/psl/psl-core/src/validate/validation_pipeline/validations.rs b/psl/psl-core/src/validate/validation_pipeline/validations.rs index 5814317ee48a..4040844bb767 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations.rs @@ -123,6 +123,7 @@ pub(super) fn validate(ctx: &mut Context<'_>) { indexes::supports_clustering_setting(index, ctx); indexes::clustering_can_be_defined_only_once(index, ctx); indexes::opclasses_are_not_allowed_with_other_than_normal_indices(index, ctx); + indexes::composite_types_are_not_allowed_in_index(index, ctx); for field_attribute in index.scalar_field_attributes() { let span = index.ast_attribute().span; diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/indexes.rs b/psl/psl-core/src/validate/validation_pipeline/validations/indexes.rs index 7d16ce78414d..5f3288264016 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/indexes.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/indexes.rs @@ -386,6 +386,23 @@ pub(crate) fn opclasses_are_not_allowed_with_other_than_normal_indices(index: In } } +pub(crate) fn composite_types_are_not_allowed_in_index(index: IndexWalker<'_>, ctx: &mut Context<'_>) { + for field in index.fields() { + if field.scalar_field_type().as_composite_type().is_some() { + let message = format!( + "Indexes can only contain scalar attributes. Please remove {:?} from the argument list of the indexes.", + field.name() + ); + ctx.push_error(DatamodelError::new_attribute_validation_error( + &message, + index.attribute_name(), + index.ast_attribute().span, + )); + return; + } + } +} + pub(super) fn unique_client_name_does_not_clash_with_field(index: IndexWalker<'_>, ctx: &mut Context<'_>) { if !index.is_unique() { return; diff --git a/psl/psl/tests/config/generators.rs b/psl/psl/tests/config/generators.rs index 193ad28889ed..f10a9bda3eae 100644 --- a/psl/psl/tests/config/generators.rs +++ b/psl/psl/tests/config/generators.rs @@ -258,7 +258,7 @@ fn nice_error_for_unknown_generator_preview_feature() { .unwrap_err(); let expectation = expect![[r#" - error: The preview feature "foo" is not known. Expected one of: deno, fullTextIndex, fullTextSearch, metrics, multiSchema, postgresqlExtensions, tracing, views + error: The preview feature "foo" is not known. Expected one of: deno, driverAdapters, fullTextIndex, fullTextSearch, metrics, multiSchema, postgresqlExtensions, tracing, views --> schema.prisma:3  |   2 |  provider = "prisma-client-js" diff --git a/quaint/.github/workflows/test.yml b/quaint/.github/workflows/test.yml index a067743f714e..85d480919c41 100644 --- a/quaint/.github/workflows/test.yml +++ b/quaint/.github/workflows/test.yml @@ -46,13 +46,13 @@ jobs: features: - "--lib --features=all" - "--lib --no-default-features --features=sqlite" - - "--lib --no-default-features --features=sqlite --features=chrono --features=json --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=sqlite --features=pooled" - "--lib --no-default-features --features=postgresql" - - "--lib --no-default-features --features=postgresql --features=chrono --features=json --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=postgresql --features=pooled" - "--lib --no-default-features --features=mysql" - - "--lib --no-default-features --features=mysql --features=chrono --features=json --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=mysql --features=pooled" - "--lib --no-default-features --features=mssql" - - "--lib --no-default-features --features=mssql --features=chrono --features=json --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=mssql --features=pooled" - "--doc --features=all" env: TEST_MYSQL: "mysql://root:prisma@localhost:3306/prisma" diff --git a/quaint/.gitignore b/quaint/.gitignore index d14afc776cbd..10667362c4a3 100644 --- a/quaint/.gitignore +++ b/quaint/.gitignore @@ -1,9 +1,8 @@ **/target **/*.rs.bk -Cargo.lock .direnv/ .vscode/ docker-compose.override.yml db/ -!db/.gitkeep \ No newline at end of file +!db/.gitkeep diff --git a/quaint/Cargo.toml b/quaint/Cargo.toml index ccda5e087360..b699518d0910 100644 --- a/quaint/Cargo.toml +++ b/quaint/Cargo.toml @@ -29,18 +29,7 @@ docs = [] # way to access database-specific methods when you need extra control. expose-drivers = [] -all = [ - "chrono", - "json", - "mssql", - "mysql", - "pooled", - "postgresql", - "serde-support", - "sqlite", - "uuid", - "bigdecimal", -] +all = ["mssql", "mysql", "pooled", "postgresql", "sqlite"] vendored-openssl = [ "postgres-native-tls/vendored-openssl", @@ -59,13 +48,10 @@ postgresql = [ "byteorder", ] -json = ["serde_json", "base64"] -mssql = ["tiberius", "uuid", "chrono", "tokio-util", "tokio/time", "tokio/net", "either"] +mssql = ["tiberius", "tokio-util", "tokio/time", "tokio/net", "either"] mysql = ["mysql_async", "tokio/time", "lru-cache"] pooled = ["mobc"] -serde-support = ["serde", "chrono/serde"] sqlite = ["rusqlite", "tokio/sync"] -bigdecimal = ["bigdecimal_"] fmt-sql = ["sqlformat"] [dependencies] @@ -82,16 +68,17 @@ url = "2.1" hex = "0.4" either = { version = "1.6", optional = true } -base64 = { version = "0.12.3", optional = true } -chrono = { version = "0.4", optional = true, default-features = false } +base64 = { version = "0.12.3" } +chrono = { version = "0.4", default-features = false, features = ["serde"] } lru-cache = { version = "0.1", optional = true } -serde_json = { version = "1.0.48", optional = true, features = ["float_roundtrip"] } +serde_json = { version = "1.0.48", features = ["float_roundtrip"] } native-tls = { version = "0.2", optional = true } bit-vec = { version = "0.6.1", optional = true } bytes = { version = "1.0", optional = true } mobc = { version = "0.8", optional = true } serde = { version = "1.0", optional = true } sqlformat = { version = "0.2.0", optional = true } +uuid = { version = "1", features = ["v4"] } [dev-dependencies] once_cell = "1.3" @@ -99,9 +86,8 @@ indoc = "0.3" names = "0.11" paste = "1.0" serde = { version = "1.0", features = ["derive"] } -test-macros = { path = "test-macros" } -test-setup = { path = "test-setup" } -uuid = { version = "1", features = ["v4"] } +quaint-test-macros = { path = "quaint-test-macros" } +quaint-test-setup = { path = "quaint-test-setup" } tokio = { version = "1.0", features = ["rt-multi-thread", "macros", "time"] } [dependencies.byteorder] @@ -128,25 +114,36 @@ features = ["sql-browser-tokio", "chrono", "bigdecimal"] version = "0.11.2" optional = true default-features = false -features = ["sql-browser-tokio", "vendored-openssl", "chrono", "bigdecimal", "tds73", "winauth"] +features = [ + "sql-browser-tokio", + "vendored-openssl", + "chrono", + "bigdecimal", + "tds73", + "winauth", +] -[dependencies.bigdecimal_] +[dependencies.bigdecimal] version = "0.3" -optional = true -package = "bigdecimal" - -[dependencies.uuid] -version = "1" -optional = true [dependencies.tokio-postgres] -features = ["with-uuid-1", "with-chrono-0_4", "with-serde_json-1", "with-bit-vec-0_6"] +features = [ + "with-uuid-1", + "with-chrono-0_4", + "with-serde_json-1", + "with-bit-vec-0_6", +] git = "https://github.com/prisma/rust-postgres" branch = "pgbouncer-mode" optional = true [dependencies.postgres-types] -features = ["with-uuid-1", "with-chrono-0_4", "with-serde_json-1", "with-bit-vec-0_6"] +features = [ + "with-uuid-1", + "with-chrono-0_4", + "with-serde_json-1", + "with-bit-vec-0_6", +] git = "https://github.com/prisma/rust-postgres" branch = "pgbouncer-mode" optional = true diff --git a/quaint/README.md b/quaint/README.md index b7abfaefaf6c..92033db269b1 100644 --- a/quaint/README.md +++ b/quaint/README.md @@ -13,11 +13,6 @@ Quaint is an abstraction over certain SQL databases. It provides: - Pooling with [mobc](https://crates.io/crates/mobc) - Async/await and Futures 0.3 -### Documentation - -- [Released](https://docs.rs/quaint) -- [Main](https://prisma.github.io/quaint/quaint/index.html) - ### Feature flags - `mysql`: Support for MySQL databases. @@ -25,11 +20,6 @@ Quaint is an abstraction over certain SQL databases. It provides: - `sqlite`: Support for SQLite databases. - `mssql`: Support for Microsoft SQL Server databases. - `pooled`: A connection pool in `pooled::Quaint`. -- `json`: JSON type support with `serde_json` crate. -- `uuid`: UUID type support with `uuid` crate. -- `chrono`: DateTime type support with `chrono` crate. -- `serde-support`: Deserialize support from result set with `serde` crate. -- `bigdecimal`: Numeric values can be read as `BigDecimal`. - `vendored-openssl`: Statically links against a vendored OpenSSL library on non-Windows or non-Apple platforms. - `fmt-sql`: Enables logging SQL queries _formatted_. The `FMT_SQL` env var must be present for the formatting to be enabled. @@ -51,7 +41,7 @@ choice. ```sh > cargo build --features all - ``` +``` ### Testing @@ -77,11 +67,11 @@ The `FMT_SQL` environment variable can be used to log _formatted_ SQL queries. B This requires the rust nightly channel: ```sh -> cargo +nightly rustdoc --all-features +> cargo rustdoc --all-features ``` -Documentation index would be created at `$CARGO_TARGET_DIR/doc/quaint/index.html` +Documentation index would be created at `$CARGO_TARGET_DIR/doc/quaint/index.html`. ## Security -If you have a security issue to report, please contact us at [security@prisma.io](mailto:security@prisma.io?subject=[GitHub]%20Prisma%202%20Security%20Report%20Quaint) +If you have a security issue to report, please contact us at [security@prisma.io](mailto:security@prisma.io?subject=[GitHub]%20Prisma%202%20Security%20Report%20Quaint). diff --git a/quaint/docker-compose.yml b/quaint/docker-compose.yml index ec3c06faa289..47f1a3456a6e 100644 --- a/quaint/docker-compose.yml +++ b/quaint/docker-compose.yml @@ -1,14 +1,14 @@ -version: "3" +version: '3' services: postgres13: image: postgres:13 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" - PGDATA: "/pgtmpfs13" + POSTGRES_PASSWORD: 'prisma' + PGDATA: '/pgtmpfs13' ports: - - "5432:5432" + - '5432:5432' networks: - databases tmpfs: /pgtmpfs12 @@ -16,13 +16,13 @@ services: mysql57: image: mysql:5.7 command: mysqld - restart: always + restart: unless-stopped platform: linux/x86_64 environment: MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3306:3306" + - '3306:3306' networks: - databases tmpfs: /var/lib/mysql5.7 @@ -30,48 +30,48 @@ services: mysql8: image: mysql:8.0.22 command: mysqld - restart: always + restart: unless-stopped platform: linux/x86_64 environment: MYSQL_USER: root MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3307:3306" + - '3307:3306' networks: - databases tmpfs: /var/lib/mysql8 mariadb: image: mariadb:10 - restart: always + restart: unless-stopped environment: MYSQL_USER: root MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3308:3306" + - '3308:3306' networks: - databases tmpfs: /var/lib/mariadb mssql: image: mcr.microsoft.com/mssql/server:2022-latest - restart: always + restart: unless-stopped environment: - ACCEPT_EULA: "Y" - SA_PASSWORD: "" + ACCEPT_EULA: 'Y' + SA_PASSWORD: '' ports: - - "1433:1433" + - '1433:1433' networks: - databases cockroach_22_2: image: prismagraphql/cockroachdb-custom:22.2 - restart: always + restart: unless-stopped command: start-single-node --insecure ports: - - "26259:26257" + - '26259:26257' networks: - databases diff --git a/quaint/test-macros/Cargo.toml b/quaint/quaint-test-macros/Cargo.toml similarity index 73% rename from quaint/test-macros/Cargo.toml rename to quaint/quaint-test-macros/Cargo.toml index da106947a9d7..1d7ff6e33536 100644 --- a/quaint/test-macros/Cargo.toml +++ b/quaint/quaint-test-macros/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "test-macros" +name = "quaint-test-macros" version = "0.1.0" authors = ["Julius de Bruijn "] edition = "2018" @@ -13,4 +13,4 @@ syn = "1.0.5" quote = "1.0.2" proc-macro2 = "1.0.6" once_cell = "1.3.1" -test-setup = { path = "../test-setup" } +quaint-test-setup = { path = "../quaint-test-setup" } diff --git a/quaint/test-macros/src/lib.rs b/quaint/quaint-test-macros/src/lib.rs similarity index 100% rename from quaint/test-macros/src/lib.rs rename to quaint/quaint-test-macros/src/lib.rs diff --git a/quaint/test-macros/src/test_each_connector.rs b/quaint/quaint-test-macros/src/test_each_connector.rs similarity index 95% rename from quaint/test-macros/src/test_each_connector.rs rename to quaint/quaint-test-macros/src/test_each_connector.rs index 501c251413d4..87cf6da1a7e6 100644 --- a/quaint/test-macros/src/test_each_connector.rs +++ b/quaint/quaint-test-macros/src/test_each_connector.rs @@ -2,10 +2,11 @@ use darling::FromMeta; use once_cell::sync::Lazy; use proc_macro::TokenStream; use proc_macro2::Span; + +use quaint_test_setup::{ConnectorDefinition, Tags, CONNECTORS}; use quote::quote; use std::str::FromStr; use syn::{parse_macro_input, spanned::Spanned, AttributeArgs, Ident, ItemFn}; -use test_setup::{ConnectorDefinition, Tags, CONNECTORS}; static TAGS_FILTER: Lazy = Lazy::new(|| { let tags_str = std::env::var("TEST_EACH_CONNECTOR_TAGS").ok(); @@ -77,6 +78,7 @@ impl darling::FromMeta for TagsWrapper { } } +#[allow(clippy::needless_borrow)] pub fn test_each_connector_impl(attr: TokenStream, input: TokenStream) -> TokenStream { let attributes_meta: syn::AttributeArgs = parse_macro_input!(attr as AttributeArgs); let args = TestEachConnectorArgs::from_list(&attributes_meta); @@ -98,6 +100,7 @@ pub fn test_each_connector_impl(attr: TokenStream, input: TokenStream) -> TokenS output.into() } +#[allow(clippy::needless_borrow)] fn test_each_connector_async_wrapper_functions( args: &TestEachConnectorArgs, test_function: &ItemFn, @@ -127,7 +130,7 @@ fn test_each_connector_async_wrapper_functions( #test_fn_name(&mut api).await#optional_unwrap }; - test_setup::run_with_tokio(fut) + quaint_test_setup::run_with_tokio(fut) } }; diff --git a/quaint/test-setup/Cargo.toml b/quaint/quaint-test-setup/Cargo.toml similarity index 72% rename from quaint/test-setup/Cargo.toml rename to quaint/quaint-test-setup/Cargo.toml index 959be70d4fd8..b7ad87fed8fc 100644 --- a/quaint/test-setup/Cargo.toml +++ b/quaint/quaint-test-setup/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "test-setup" +name = "quaint-test-setup" version = "0.1.0" authors = ["Julius de Bruijn "] edition = "2018" @@ -9,5 +9,5 @@ once_cell = "1.3.1" bitflags = "1.2.1" async-trait = "0.1" names = "0.11" -tokio = { version = "1.0", features = ["rt-multi-thread"]} +tokio = { version = "1.0", features = ["rt-multi-thread"] } quaint = { path = "..", features = ["all"] } diff --git a/quaint/test-setup/src/lib.rs b/quaint/quaint-test-setup/src/lib.rs similarity index 100% rename from quaint/test-setup/src/lib.rs rename to quaint/quaint-test-setup/src/lib.rs diff --git a/quaint/test-setup/src/tags.rs b/quaint/quaint-test-setup/src/tags.rs similarity index 100% rename from quaint/test-setup/src/tags.rs rename to quaint/quaint-test-setup/src/tags.rs diff --git a/quaint/rust-toolchain b/quaint/rust-toolchain deleted file mode 100644 index 870bbe4e50e6..000000000000 --- a/quaint/rust-toolchain +++ /dev/null @@ -1 +0,0 @@ -stable \ No newline at end of file diff --git a/quaint/src/ast.rs b/quaint/src/ast.rs index 03f9bc234cd2..dc634423014a 100644 --- a/quaint/src/ast.rs +++ b/quaint/src/ast.rs @@ -11,6 +11,7 @@ mod conditions; mod conjunctive; mod cte; mod delete; +mod enums; mod expression; mod function; mod grouping; @@ -35,6 +36,7 @@ pub use conditions::ConditionTree; pub use conjunctive::Conjunctive; pub use cte::{CommonTableExpression, IntoCommonTableExpression}; pub use delete::Delete; +pub use enums::{EnumName, EnumVariant}; pub use expression::*; pub use function::*; pub use grouping::*; @@ -52,4 +54,4 @@ pub use table::*; pub use union::Union; pub use update::*; pub(crate) use values::Params; -pub use values::{IntoRaw, Raw, Value, Values}; +pub use values::{IntoRaw, Raw, Value, ValueType, Values}; diff --git a/quaint/src/ast/column.rs b/quaint/src/ast/column.rs index 87342bd56bcb..836b4ce96527 100644 --- a/quaint/src/ast/column.rs +++ b/quaint/src/ast/column.rs @@ -32,6 +32,12 @@ pub struct Column<'a> { pub(crate) alias: Option>, pub(crate) default: Option>, pub(crate) type_family: Option, + /// Whether the column is an enum. + pub(crate) is_enum: bool, + /// Whether the column is a (scalar) list. + pub(crate) is_list: bool, + /// Whether the column is part of a SELECT or RETURNING clause. + pub(crate) is_selected: bool, } /// Defines a default value for a `Column`. @@ -89,6 +95,33 @@ impl<'a> Column<'a> { self } + /// Sets whether the column points to an enum type. + pub fn set_is_enum(mut self, is_enum: bool) -> Self { + self.is_enum = is_enum; + self + } + + /// Sets whether the column points to an scalar list. + pub fn set_is_list(mut self, is_list: bool) -> Self { + self.is_list = is_list; + self + } + + /// Sets whether the column is selected. + /// + /// On Postgres, this defines whether an enum column should be casted to `TEXT` when rendered. + /// + /// Since enums are user-defined custom types, `tokio-postgres` fires an additional query + /// when selecting columns of type enum to know which custom type the column refers to. + /// Casting the enum column to `TEXT` avoid this roundtrip since `TEXT` is a builtin type. + /// + /// We don't want to cast every single enum columns to text though, as this would prevent indexes from being used, + /// so we use this additional field to granularly pick which columns we cast. + pub fn set_is_selected(mut self, is_selected: bool) -> Self { + self.is_selected = is_selected; + self + } + /// True when the default value is set and automatically generated in the /// database. pub fn default_autogen(&self) -> bool { diff --git a/quaint/src/ast/compare.rs b/quaint/src/ast/compare.rs index d92843a23557..9c7548303466 100644 --- a/quaint/src/ast/compare.rs +++ b/quaint/src/ast/compare.rs @@ -37,7 +37,7 @@ pub enum Compare<'a> { /// without visitor transformation in between. Raw(Box>, Cow<'a, str>, Box>), /// All json related comparators - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] JsonCompare(JsonCompare<'a>), /// `left` @@ to_tsquery(`value`) #[cfg(feature = "postgresql")] @@ -558,7 +558,7 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_contains(self, item: T) -> Compare<'a> where T: Into>; @@ -578,7 +578,7 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_contains(self, item: T) -> Compare<'a> where T: Into>; @@ -608,7 +608,7 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_begins_with(self, item: T) -> Compare<'a> where T: Into>; @@ -638,7 +638,7 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_begins_with(self, item: T) -> Compare<'a> where T: Into>; @@ -666,7 +666,7 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_ends_into(self, item: T) -> Compare<'a> where T: Into>; @@ -694,7 +694,7 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_ends_into(self, item: T) -> Compare<'a> where T: Into>; @@ -713,7 +713,7 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_type_equals(self, json_type: T) -> Compare<'a> where T: Into>; @@ -732,7 +732,7 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_type_not_equals(self, json_type: T) -> Compare<'a> where T: Into>; @@ -977,7 +977,7 @@ where left.compare_raw(raw_comparator.into(), right) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_contains(self, item: T) -> Compare<'a> where T: Into>, @@ -988,7 +988,7 @@ where val.json_array_contains(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_contains(self, item: T) -> Compare<'a> where T: Into>, @@ -999,7 +999,7 @@ where val.json_array_not_contains(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_begins_with(self, item: T) -> Compare<'a> where T: Into>, @@ -1010,7 +1010,7 @@ where val.json_array_begins_with(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_begins_with(self, item: T) -> Compare<'a> where T: Into>, @@ -1021,7 +1021,7 @@ where val.json_array_not_begins_with(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_ends_into(self, item: T) -> Compare<'a> where T: Into>, @@ -1032,7 +1032,7 @@ where val.json_array_ends_into(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_ends_into(self, item: T) -> Compare<'a> where T: Into>, @@ -1043,7 +1043,7 @@ where val.json_array_not_ends_into(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_type_equals(self, json_type: T) -> Compare<'a> where T: Into>, @@ -1054,7 +1054,7 @@ where val.json_type_equals(json_type) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_type_not_equals(self, json_type: T) -> Compare<'a> where T: Into>, diff --git a/quaint/src/ast/enums.rs b/quaint/src/ast/enums.rs new file mode 100644 index 000000000000..a4e93836d24b --- /dev/null +++ b/quaint/src/ast/enums.rs @@ -0,0 +1,79 @@ +use crate::Value; +use std::{borrow::Cow, fmt}; + +#[derive(Debug, Clone, PartialEq)] +pub struct EnumVariant<'a>(Cow<'a, str>); + +impl<'a> EnumVariant<'a> { + pub fn new(variant: impl Into>) -> Self { + Self(variant.into()) + } + + pub fn into_owned(self) -> String { + self.0.into_owned() + } + + pub fn into_text(self) -> Value<'a> { + Value::text(self.0) + } + + pub fn into_enum(self, name: Option>) -> Value<'a> { + match name { + Some(name) => Value::enum_variant_with_name(self.0, name), + None => Value::enum_variant(self.0), + } + } +} + +impl<'a> AsRef for EnumVariant<'a> { + fn as_ref(&self) -> &str { + self.0.as_ref() + } +} + +impl<'a> std::ops::Deref for EnumVariant<'a> { + type Target = str; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl<'a> fmt::Display for EnumVariant<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(self.as_ref()) + } +} + +impl<'a> From> for EnumVariant<'a> { + fn from(value: Cow<'a, str>) -> Self { + Self(value) + } +} + +impl<'a> From for EnumVariant<'a> { + fn from(value: String) -> Self { + Self(value.into()) + } +} + +impl<'a> From<&'a str> for EnumVariant<'a> { + fn from(value: &'a str) -> Self { + Self(value.into()) + } +} + +#[derive(Debug, Clone, PartialEq)] +pub struct EnumName<'a> { + pub name: Cow<'a, str>, + pub schema_name: Option>, +} + +impl<'a> EnumName<'a> { + pub fn new(name: impl Into>, schema_name: Option>>) -> Self { + Self { + name: name.into(), + schema_name: schema_name.map(|s| s.into()), + } + } +} diff --git a/quaint/src/ast/expression.rs b/quaint/src/ast/expression.rs index b3993abc523b..ea4c32a4fb61 100644 --- a/quaint/src/ast/expression.rs +++ b/quaint/src/ast/expression.rs @@ -1,4 +1,4 @@ -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] use super::compare::{JsonCompare, JsonType}; use crate::ast::*; use query::SelectQuery; @@ -43,38 +43,43 @@ impl<'a> Expression<'a> { } } - #[cfg(feature = "json")] pub(crate) fn is_json_expr(&self) -> bool { match &self.kind { - #[cfg(feature = "json")] - ExpressionKind::Parameterized(Value::Json(_)) => true, - #[cfg(feature = "json")] + ExpressionKind::Parameterized(Value { + typed: ValueType::Json(_), + .. + }) => true, + ExpressionKind::Value(expr) => expr.is_json_value(), - #[cfg(feature = "json")] + ExpressionKind::Function(fun) => fun.returns_json(), _ => false, } } #[allow(dead_code)] - #[cfg(feature = "json")] + pub(crate) fn is_json_value(&self) -> bool { match &self.kind { - #[cfg(feature = "json")] - ExpressionKind::Parameterized(Value::Json(_)) => true, - #[cfg(feature = "json")] + ExpressionKind::Parameterized(Value { + typed: ValueType::Json(_), + .. + }) => true, + ExpressionKind::Value(expr) => expr.is_json_value(), _ => false, } } #[allow(dead_code)] - #[cfg(feature = "json")] + pub(crate) fn into_json_value(self) -> Option { match self.kind { - #[cfg(feature = "json")] - ExpressionKind::Parameterized(Value::Json(json_val)) => json_val, - #[cfg(feature = "json")] + ExpressionKind::Parameterized(Value { + typed: ValueType::Json(json_val), + .. + }) => json_val, + ExpressionKind::Value(expr) => expr.into_json_value(), _ => None, } @@ -221,7 +226,10 @@ pub enum ExpressionKind<'a> { impl<'a> ExpressionKind<'a> { pub(crate) fn is_xml_value(&self) -> bool { match self { - Self::Parameterized(Value::Xml(_)) => true, + Self::Parameterized(Value { + typed: ValueType::Xml(_), + .. + }) => true, Self::Value(expr) => expr.is_xml_value(), _ => false, } @@ -427,7 +435,7 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::Raw(Box::new(self), raw_comparator.into(), Box::new(right.into())) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_contains(self, item: T) -> Compare<'a> where T: Into>, @@ -435,7 +443,7 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::JsonCompare(JsonCompare::ArrayContains(Box::new(self), Box::new(item.into()))) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_contains(self, item: T) -> Compare<'a> where T: Into>, @@ -443,7 +451,7 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::JsonCompare(JsonCompare::ArrayNotContains(Box::new(self), Box::new(item.into()))) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_begins_with(self, item: T) -> Compare<'a> where T: Into>, @@ -453,7 +461,7 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::Equals(Box::new(array_starts_with), Box::new(item.into())) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_begins_with(self, item: T) -> Compare<'a> where T: Into>, @@ -463,7 +471,7 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::NotEquals(Box::new(array_starts_with), Box::new(item.into())) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_ends_into(self, item: T) -> Compare<'a> where T: Into>, @@ -473,7 +481,7 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::Equals(Box::new(array_ends_into), Box::new(item.into())) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_ends_into(self, item: T) -> Compare<'a> where T: Into>, @@ -483,7 +491,7 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::NotEquals(Box::new(array_ends_into), Box::new(item.into())) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_type_equals(self, json_type: T) -> Compare<'a> where T: Into>, @@ -491,7 +499,7 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::JsonCompare(JsonCompare::TypeEquals(Box::new(self), json_type.into())) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_type_not_equals(self, json_type: T) -> Compare<'a> where T: Into>, diff --git a/quaint/src/ast/function.rs b/quaint/src/ast/function.rs index 123b95566b30..5b6373795485 100644 --- a/quaint/src/ast/function.rs +++ b/quaint/src/ast/function.rs @@ -3,17 +3,17 @@ mod average; mod coalesce; mod concat; mod count; -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] mod json_extract; -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] mod json_extract_array; -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] mod json_unquote; mod lower; mod maximum; mod minimum; mod row_number; -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] mod row_to_json; #[cfg(any(feature = "postgresql", feature = "mysql"))] mod search; @@ -28,19 +28,19 @@ pub use average::*; pub use coalesce::*; pub use concat::*; pub use count::*; -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] pub use json_extract::*; -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] pub(crate) use json_extract_array::*; -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] pub use json_unquote::*; pub use lower::*; pub use maximum::*; pub use minimum::*; pub use row_number::*; -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] pub use row_to_json::*; -#[cfg(any(feature = "postgresql", feature = "mysql"))] +#[cfg(feature = "mysql")] pub use search::*; pub use sum::*; pub use upper::*; @@ -61,13 +61,13 @@ pub struct Function<'a> { impl<'a> Function<'a> { pub fn returns_json(&self) -> bool { match self.typ_ { - #[cfg(all(feature = "json", feature = "postgresql"))] + #[cfg(feature = "postgresql")] FunctionType::RowToJson(_) => true, - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(feature = "mysql")] FunctionType::JsonExtract(_) => true, - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] FunctionType::JsonExtractLastArrayElem(_) => true, - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] FunctionType::JsonExtractFirstArrayElem(_) => true, _ => false, } @@ -77,7 +77,7 @@ impl<'a> Function<'a> { /// A database function type #[derive(Debug, Clone, PartialEq)] pub(crate) enum FunctionType<'a> { - #[cfg(all(feature = "json", feature = "postgresql"))] + #[cfg(feature = "postgresql")] RowToJson(RowToJson<'a>), RowNumber(RowNumber<'a>), Count(Count<'a>), @@ -90,13 +90,13 @@ pub(crate) enum FunctionType<'a> { Maximum(Maximum<'a>), Coalesce(Coalesce<'a>), Concat(Concat<'a>), - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] JsonExtract(JsonExtract<'a>), - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] JsonExtractLastArrayElem(JsonExtractLastArrayElem<'a>), - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] JsonExtractFirstArrayElem(JsonExtractFirstArrayElem<'a>), - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] JsonUnquote(JsonUnquote<'a>), #[cfg(any(feature = "postgresql", feature = "mysql"))] TextSearch(TextSearch<'a>), @@ -122,19 +122,19 @@ impl<'a> Aliasable<'a> for Function<'a> { } } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] function!(RowToJson); -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] function!(JsonExtract); -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] function!(JsonExtractLastArrayElem); -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] function!(JsonExtractFirstArrayElem); -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] function!(JsonUnquote); #[cfg(any(feature = "postgresql", feature = "mysql"))] diff --git a/quaint/src/ast/function/row_to_json.rs b/quaint/src/ast/function/row_to_json.rs index 7ce8e0c98cc6..9ffeb6653484 100644 --- a/quaint/src/ast/function/row_to_json.rs +++ b/quaint/src/ast/function/row_to_json.rs @@ -2,8 +2,7 @@ use super::Function; use crate::ast::Table; #[derive(Debug, Clone, PartialEq)] -#[cfg_attr(feature = "docs", doc(cfg(feature = "postgresql")))] -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] /// A representation of the `ROW_TO_JSON` function in the database. /// Only for `Postgresql` pub struct RowToJson<'a> { @@ -31,16 +30,15 @@ pub struct RowToJson<'a> { /// let result = conn.select(select).await?; /// /// assert_eq!( -/// Value::Json(Some(serde_json::json!({ +/// Value::json(serde_json::json!({ /// "toto": "hello_world" -/// }))), +/// })), /// result.into_single().unwrap()[0] /// ); /// # Ok(()) /// # } /// ``` -#[cfg_attr(feature = "docs", doc(cfg(feature = "postgresql")))] -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] pub fn row_to_json<'a, T>(expr: T, pretty_print: bool) -> Function<'a> where T: Into>, diff --git a/quaint/src/ast/insert.rs b/quaint/src/ast/insert.rs index e984ae0b273e..cd38fff87043 100644 --- a/quaint/src/ast/insert.rs +++ b/quaint/src/ast/insert.rs @@ -256,10 +256,6 @@ impl<'a> Insert<'a> { /// # } /// ``` #[cfg(any(feature = "postgresql", feature = "mssql", feature = "sqlite"))] - #[cfg_attr( - feature = "docs", - doc(cfg(any(feature = "postgresql", feature = "mssql", feature = "sqlite"))) - )] pub fn returning(mut self, columns: I) -> Self where K: Into>, diff --git a/quaint/src/ast/row.rs b/quaint/src/ast/row.rs index 3022b9127758..e556cee966af 100644 --- a/quaint/src/ast/row.rs +++ b/quaint/src/ast/row.rs @@ -1,4 +1,4 @@ -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] use super::compare::JsonType; use crate::ast::{Comparable, Compare, Expression}; use std::borrow::Cow; @@ -283,7 +283,7 @@ impl<'a> Comparable<'a> for Row<'a> { value.compare_raw(raw_comparator, right) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_contains(self, item: T) -> Compare<'a> where T: Into>, @@ -293,7 +293,7 @@ impl<'a> Comparable<'a> for Row<'a> { value.json_array_contains(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_contains(self, item: T) -> Compare<'a> where T: Into>, @@ -303,7 +303,7 @@ impl<'a> Comparable<'a> for Row<'a> { value.json_array_not_contains(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_begins_with(self, item: T) -> Compare<'a> where T: Into>, @@ -313,7 +313,7 @@ impl<'a> Comparable<'a> for Row<'a> { value.json_array_begins_with(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_begins_with(self, item: T) -> Compare<'a> where T: Into>, @@ -323,7 +323,7 @@ impl<'a> Comparable<'a> for Row<'a> { value.json_array_not_begins_with(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_ends_into(self, item: T) -> Compare<'a> where T: Into>, @@ -333,7 +333,7 @@ impl<'a> Comparable<'a> for Row<'a> { value.json_array_ends_into(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_ends_into(self, item: T) -> Compare<'a> where T: Into>, @@ -343,7 +343,7 @@ impl<'a> Comparable<'a> for Row<'a> { value.json_array_not_ends_into(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_type_equals(self, json_type: T) -> Compare<'a> where T: Into>, @@ -353,7 +353,7 @@ impl<'a> Comparable<'a> for Row<'a> { value.json_type_equals(json_type) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_type_not_equals(self, json_type: T) -> Compare<'a> where T: Into>, diff --git a/quaint/src/ast/select.rs b/quaint/src/ast/select.rs index 6ab8df310fe4..96d50ba645c5 100644 --- a/quaint/src/ast/select.rs +++ b/quaint/src/ast/select.rs @@ -453,6 +453,14 @@ impl<'a> Select<'a> { self } + pub fn join(mut self, join: J) -> Self + where + J: Into>, + { + self.joins.push(join.into()); + self + } + /// Adds an ordering to the `ORDER BY` section. /// /// ```rust @@ -651,7 +659,7 @@ impl<'a> Select<'a> { ctes.into_iter().collect() }) - .unwrap_or_else(Vec::new); + .unwrap_or_default(); if top_level { let clashing_names = self diff --git a/quaint/src/ast/table.rs b/quaint/src/ast/table.rs index 4b5d50161af9..4eca73f27bc7 100644 --- a/quaint/src/ast/table.rs +++ b/quaint/src/ast/table.rs @@ -344,6 +344,20 @@ impl<'a> Table<'a> { self } + + pub fn join(self, join: J) -> Self + where + J: Into>, + { + let join: Join = join.into(); + + match join { + Join::Inner(x) => self.inner_join(x), + Join::Left(x) => self.left_join(x), + Join::Right(x) => self.right_join(x), + Join::Full(x) => self.full_join(x), + } + } } impl<'a> From<&'a str> for Table<'a> { diff --git a/quaint/src/ast/update.rs b/quaint/src/ast/update.rs index 5d35929eac60..751655bd82e1 100644 --- a/quaint/src/ast/update.rs +++ b/quaint/src/ast/update.rs @@ -150,7 +150,6 @@ impl<'a> Update<'a> { /// # } /// ``` #[cfg(any(feature = "postgresql", feature = "sqlite"))] - #[cfg_attr(feature = "docs", doc(cfg(any(feature = "postgresql", feature = "sqlite"))))] pub fn returning(mut self, columns: I) -> Self where K: Into>, diff --git a/quaint/src/ast/values.rs b/quaint/src/ast/values.rs index 5296146646a7..a1bf4f41a26d 100644 --- a/quaint/src/ast/values.rs +++ b/quaint/src/ast/values.rs @@ -1,19 +1,16 @@ use crate::ast::*; use crate::error::{Error, ErrorKind}; -#[cfg(feature = "bigdecimal")] use bigdecimal::{BigDecimal, FromPrimitive, ToPrimitive}; -#[cfg(feature = "chrono")] use chrono::{DateTime, NaiveDate, NaiveTime, Utc}; -#[cfg(feature = "json")] use serde_json::{Number, Value as JsonValue}; +use std::fmt::Display; use std::{ borrow::{Borrow, Cow}, convert::TryFrom, fmt, str::FromStr, }; -#[cfg(feature = "uuid")] use uuid::Uuid; /// A value written to the query as-is without parameterization. @@ -36,11 +33,473 @@ where } } +/// A native-column type, i.e. the connector-specific type of the column. +#[derive(Debug, Clone, PartialEq)] +pub struct NativeColumnType<'a>(Cow<'a, str>); + +impl<'a> std::ops::Deref for NativeColumnType<'a> { + type Target = str; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl<'a> From<&'a str> for NativeColumnType<'a> { + fn from(s: &'a str) -> Self { + Self(Cow::Owned(s.to_uppercase())) + } +} + +#[derive(Debug, Clone, PartialEq)] +pub struct Value<'a> { + pub typed: ValueType<'a>, + pub native_column_type: Option>, +} + +impl<'a> Value<'a> { + /// Returns the native column type of the value, if any, in the form + /// of an UPCASE string. ex: "VARCHAR, BYTEA, DATE, TIMEZ" + pub fn native_column_type_name(&'a self) -> Option<&'a str> { + self.native_column_type.as_deref() + } + + /// Changes the value to include information about the native column type + pub fn with_native_column_type>>(mut self, column_type: Option) -> Self { + self.native_column_type = column_type.map(|ct| ct.into()); + self + } + + /// Creates a new 32-bit signed integer. + pub fn int32(value: I) -> Self + where + I: Into, + { + ValueType::int32(value).into_value() + } + + /// Creates a new 64-bit signed integer. + pub fn int64(value: I) -> Self + where + I: Into, + { + ValueType::int64(value).into_value() + } + + /// Creates a new decimal value. + pub fn numeric(value: BigDecimal) -> Self { + ValueType::numeric(value).into_value() + } + + /// Creates a new float value. + pub fn float(value: f32) -> Self { + ValueType::float(value).into_value() + } + + /// Creates a new double value. + pub fn double(value: f64) -> Self { + ValueType::double(value).into_value() + } + + /// Creates a new string value. + pub fn text(value: T) -> Self + where + T: Into>, + { + ValueType::text(value).into_value() + } + + /// Creates a new enum value. + pub fn enum_variant(value: T) -> Self + where + T: Into>, + { + ValueType::enum_variant(value).into_value() + } + + /// Creates a new enum value with the name of the enum attached. + pub fn enum_variant_with_name(value: T, name: U) -> Self + where + T: Into>, + U: Into>, + { + ValueType::enum_variant_with_name(value, name).into_value() + } + + /// Creates a new enum array value + pub fn enum_array(value: T) -> Self + where + T: IntoIterator>, + { + ValueType::enum_array(value).into_value() + } + + /// Creates a new enum array value with the name of the enum attached. + pub fn enum_array_with_name(value: T, name: U) -> Self + where + T: IntoIterator>, + U: Into>, + { + ValueType::enum_array_with_name(value, name).into_value() + } + + /// Creates a new bytes value. + pub fn bytes(value: B) -> Self + where + B: Into>, + { + ValueType::bytes(value).into_value() + } + + /// Creates a new boolean value. + pub fn boolean(value: B) -> Self + where + B: Into, + { + ValueType::boolean(value).into_value() + } + + /// Creates a new character value. + pub fn character(value: C) -> Self + where + C: Into, + { + ValueType::character(value).into_value() + } + + /// Creates a new array value. + pub fn array(value: I) -> Self + where + I: IntoIterator, + V: Into>, + { + ValueType::array(value).into_value() + } + + /// Creates a new uuid value. + pub fn uuid(value: Uuid) -> Self { + ValueType::uuid(value).into_value() + } + + /// Creates a new datetime value. + pub fn datetime(value: DateTime) -> Self { + ValueType::datetime(value).into_value() + } + + /// Creates a new date value. + pub fn date(value: NaiveDate) -> Self { + ValueType::date(value).into_value() + } + + /// Creates a new time value. + pub fn time(value: NaiveTime) -> Self { + ValueType::time(value).into_value() + } + + /// Creates a new JSON value. + pub fn json(value: serde_json::Value) -> Self { + ValueType::json(value).into_value() + } + + /// Creates a new XML value. + pub fn xml(value: T) -> Self + where + T: Into>, + { + ValueType::xml(value).into_value() + } + + /// `true` if the `Value` is null. + pub fn is_null(&self) -> bool { + self.typed.is_null() + } + + /// Returns a &str if the value is text, otherwise `None`. + pub fn as_str(&self) -> Option<&str> { + self.typed.as_str() + } + + /// `true` if the `Value` is text. + pub fn is_text(&self) -> bool { + self.typed.is_text() + } + + /// Returns a char if the value is a char, otherwise `None`. + pub fn as_char(&self) -> Option { + self.typed.as_char() + } + + /// Returns a cloned String if the value is text, otherwise `None`. + pub fn to_string(&self) -> Option { + self.typed.to_string() + } + + /// Transforms the `Value` to a `String` if it's text, + /// otherwise `None`. + pub fn into_string(self) -> Option { + self.typed.into_string() + } + + /// Returns whether this value is the `Bytes` variant. + pub fn is_bytes(&self) -> bool { + self.typed.is_bytes() + } + + /// Returns a bytes slice if the value is text or a byte slice, otherwise `None`. + pub fn as_bytes(&self) -> Option<&[u8]> { + self.typed.as_bytes() + } + + /// Returns a cloned `Vec` if the value is text or a byte slice, otherwise `None`. + pub fn to_bytes(&self) -> Option> { + self.typed.to_bytes() + } + + /// `true` if the `Value` is a 32-bit signed integer. + pub fn is_i32(&self) -> bool { + self.typed.is_i32() + } + + /// `true` if the `Value` is a 64-bit signed integer. + pub fn is_i64(&self) -> bool { + self.typed.is_i64() + } + + /// `true` if the `Value` is a signed integer. + pub fn is_integer(&self) -> bool { + self.typed.is_integer() + } + + /// Returns an `i64` if the value is a 64-bit signed integer, otherwise `None`. + pub fn as_i64(&self) -> Option { + self.typed.as_i64() + } + + /// Returns an `i32` if the value is a 32-bit signed integer, otherwise `None`. + pub fn as_i32(&self) -> Option { + self.typed.as_i32() + } + + /// Returns an `i64` if the value is a signed integer, otherwise `None`. + pub fn as_integer(&self) -> Option { + self.typed.as_integer() + } + + /// Returns a `f64` if the value is a double, otherwise `None`. + pub fn as_f64(&self) -> Option { + self.typed.as_f64() + } + + /// Returns a `f32` if the value is a double, otherwise `None`. + pub fn as_f32(&self) -> Option { + self.typed.as_f32() + } + + /// `true` if the `Value` is a numeric value or can be converted to one. + + pub fn is_numeric(&self) -> bool { + self.typed.is_numeric() + } + + /// Returns a bigdecimal, if the value is a numeric, float or double value, + /// otherwise `None`. + + pub fn into_numeric(self) -> Option { + self.typed.into_numeric() + } + + /// Returns a reference to a bigdecimal, if the value is a numeric. + /// Otherwise `None`. + + pub fn as_numeric(&self) -> Option<&BigDecimal> { + self.typed.as_numeric() + } + + /// `true` if the `Value` is a boolean value. + pub fn is_bool(&self) -> bool { + self.typed.is_bool() + } + + /// Returns a bool if the value is a boolean, otherwise `None`. + pub fn as_bool(&self) -> Option { + self.typed.as_bool() + } + + /// `true` if the `Value` is an Array. + pub fn is_array(&self) -> bool { + self.typed.is_array() + } + + /// `true` if the `Value` is of UUID type. + pub fn is_uuid(&self) -> bool { + self.typed.is_uuid() + } + + /// Returns an UUID if the value is of UUID type, otherwise `None`. + pub fn as_uuid(&self) -> Option { + self.typed.as_uuid() + } + + /// `true` if the `Value` is a DateTime. + pub fn is_datetime(&self) -> bool { + self.typed.is_datetime() + } + + /// Returns a `DateTime` if the value is a `DateTime`, otherwise `None`. + pub fn as_datetime(&self) -> Option> { + self.typed.as_datetime() + } + + /// `true` if the `Value` is a Date. + pub fn is_date(&self) -> bool { + self.typed.is_date() + } + + /// Returns a `NaiveDate` if the value is a `Date`, otherwise `None`. + pub fn as_date(&self) -> Option { + self.typed.as_date() + } + + /// `true` if the `Value` is a `Time`. + pub fn is_time(&self) -> bool { + self.typed.is_time() + } + + /// Returns a `NaiveTime` if the value is a `Time`, otherwise `None`. + pub fn as_time(&self) -> Option { + self.typed.as_time() + } + + /// `true` if the `Value` is a JSON value. + pub fn is_json(&self) -> bool { + self.typed.is_json() + } + + /// Returns a reference to a JSON Value if of Json type, otherwise `None`. + pub fn as_json(&self) -> Option<&serde_json::Value> { + self.typed.as_json() + } + + /// Transforms to a JSON Value if of Json type, otherwise `None`. + pub fn into_json(self) -> Option { + self.typed.into_json() + } + + /// Returns a `Vec` if the value is an array of `T`, otherwise `None`. + pub fn into_vec(self) -> Option> + where + T: TryFrom>, + { + self.typed.into_vec() + } + + /// Returns a cloned Vec if the value is an array of T, otherwise `None`. + pub fn to_vec(&self) -> Option> + where + T: TryFrom>, + { + self.typed.to_vec() + } + + pub fn null_int32() -> Self { + ValueType::Int32(None).into() + } + + pub fn null_int64() -> Self { + ValueType::Int64(None).into() + } + + pub fn null_float() -> Self { + ValueType::Float(None).into() + } + + pub fn null_double() -> Self { + ValueType::Double(None).into() + } + + pub fn null_text() -> Self { + ValueType::Text(None).into() + } + + pub fn null_enum() -> Self { + ValueType::Enum(None, None).into() + } + + pub fn null_enum_array() -> Self { + ValueType::EnumArray(None, None).into() + } + + pub fn null_bytes() -> Self { + ValueType::Bytes(None).into() + } + + pub fn null_boolean() -> Self { + ValueType::Boolean(None).into() + } + + pub fn null_character() -> Self { + ValueType::Char(None).into() + } + + pub fn null_array() -> Self { + ValueType::Array(None).into() + } + + pub fn null_numeric() -> Self { + ValueType::Numeric(None).into() + } + + pub fn null_json() -> Self { + ValueType::Json(None).into() + } + + pub fn null_xml() -> Self { + ValueType::Xml(None).into() + } + + pub fn null_uuid() -> Self { + ValueType::Uuid(None).into() + } + + pub fn null_datetime() -> Self { + ValueType::DateTime(None).into() + } + + pub fn null_date() -> Self { + ValueType::Date(None).into() + } + + pub fn null_time() -> Self { + ValueType::Time(None).into() + } +} + +impl<'a> Display for Value<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.typed.fmt(f) + } +} + +impl<'a> From> for Value<'a> { + fn from(inner: ValueType<'a>) -> Self { + Self { + typed: inner, + native_column_type: Default::default(), + } + } +} + +impl<'a> From> for ValueType<'a> { + fn from(val: Value<'a>) -> Self { + val.typed + } +} + /// A value we must parameterize for the prepared statement. Null values should be /// defined by their corresponding type variants with a `None` value for best /// compatibility. #[derive(Debug, Clone, PartialEq)] -pub enum Value<'a> { +pub enum ValueType<'a> { /// 32-bit signed integer. Int32(Option), /// 64-bit signed integer. @@ -52,7 +511,13 @@ pub enum Value<'a> { /// String value. Text(Option>), /// Database enum value. - Enum(Option>), + /// The optional `EnumName` is only used on PostgreSQL. + /// Read more about it here: https://github.com/prisma/prisma-engines/pull/4280 + Enum(Option>, Option>), + /// Database enum array (PostgreSQL specific). + /// We use a different variant than `ValueType::Array` to uplift the `EnumName` + /// and have it available even for empty enum arrays. + EnumArray(Option>>, Option>), /// Bytes value. Bytes(Option>), /// Boolean value. @@ -62,36 +527,24 @@ pub enum Value<'a> { /// An array value (PostgreSQL). Array(Option>>), /// A numeric value. - #[cfg(feature = "bigdecimal")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "bigdecimal")))] Numeric(Option), - #[cfg(feature = "json")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "json")))] /// A JSON value. Json(Option), /// A XML value. Xml(Option>), - #[cfg(feature = "uuid")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "uuid")))] /// An UUID value. Uuid(Option), - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] /// A datetime value. DateTime(Option>), - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] /// A date value. Date(Option), - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] /// A time value. Time(Option), } pub(crate) struct Params<'a>(pub(crate) &'a [Value<'a>]); -impl<'a> fmt::Display for Params<'a> { +impl<'a> Display for Params<'a> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let len = self.0.len(); @@ -107,19 +560,32 @@ impl<'a> fmt::Display for Params<'a> { } } -impl<'a> fmt::Display for Value<'a> { +impl<'a> fmt::Display for ValueType<'a> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let res = match self { - Value::Int32(val) => val.map(|v| write!(f, "{v}")), - Value::Int64(val) => val.map(|v| write!(f, "{v}")), - Value::Float(val) => val.map(|v| write!(f, "{v}")), - Value::Double(val) => val.map(|v| write!(f, "{v}")), - Value::Text(val) => val.as_ref().map(|v| write!(f, "\"{v}\"")), - Value::Bytes(val) => val.as_ref().map(|v| write!(f, "<{} bytes blob>", v.len())), - Value::Enum(val) => val.as_ref().map(|v| write!(f, "\"{v}\"")), - Value::Boolean(val) => val.map(|v| write!(f, "{v}")), - Value::Char(val) => val.map(|v| write!(f, "'{v}'")), - Value::Array(vals) => vals.as_ref().map(|vals| { + ValueType::Int32(val) => val.map(|v| write!(f, "{v}")), + ValueType::Int64(val) => val.map(|v| write!(f, "{v}")), + ValueType::Float(val) => val.map(|v| write!(f, "{v}")), + ValueType::Double(val) => val.map(|v| write!(f, "{v}")), + ValueType::Text(val) => val.as_ref().map(|v| write!(f, "\"{v}\"")), + ValueType::Bytes(val) => val.as_ref().map(|v| write!(f, "<{} bytes blob>", v.len())), + ValueType::Enum(val, _) => val.as_ref().map(|v| write!(f, "\"{v}\"")), + ValueType::EnumArray(vals, _) => vals.as_ref().map(|vals| { + let len = vals.len(); + + write!(f, "[")?; + for (i, val) in vals.iter().enumerate() { + write!(f, "{val}")?; + + if i < (len - 1) { + write!(f, ",")?; + } + } + write!(f, "]") + }), + ValueType::Boolean(val) => val.map(|v| write!(f, "{v}")), + ValueType::Char(val) => val.map(|v| write!(f, "'{v}'")), + ValueType::Array(vals) => vals.as_ref().map(|vals| { let len = vals.len(); write!(f, "[")?; @@ -132,19 +598,14 @@ impl<'a> fmt::Display for Value<'a> { } write!(f, "]") }), - Value::Xml(val) => val.as_ref().map(|v| write!(f, "{v}")), - #[cfg(feature = "bigdecimal")] - Value::Numeric(val) => val.as_ref().map(|v| write!(f, "{v}")), - #[cfg(feature = "json")] - Value::Json(val) => val.as_ref().map(|v| write!(f, "{v}")), - #[cfg(feature = "uuid")] - Value::Uuid(val) => val.map(|v| write!(f, "\"{v}\"")), - #[cfg(feature = "chrono")] - Value::DateTime(val) => val.map(|v| write!(f, "\"{v}\"")), - #[cfg(feature = "chrono")] - Value::Date(val) => val.map(|v| write!(f, "\"{v}\"")), - #[cfg(feature = "chrono")] - Value::Time(val) => val.map(|v| write!(f, "\"{v}\"")), + ValueType::Xml(val) => val.as_ref().map(|v| write!(f, "{v}")), + + ValueType::Numeric(val) => val.as_ref().map(|v| write!(f, "{v}")), + ValueType::Json(val) => val.as_ref().map(|v| write!(f, "{v}")), + ValueType::Uuid(val) => val.map(|v| write!(f, "\"{v}\"")), + ValueType::DateTime(val) => val.map(|v| write!(f, "\"{v}\"")), + ValueType::Date(val) => val.map(|v| write!(f, "\"{v}\"")), + ValueType::Time(val) => val.map(|v| write!(f, "\"{v}\"")), }; match res { @@ -154,48 +615,55 @@ impl<'a> fmt::Display for Value<'a> { } } -#[cfg(feature = "json")] -#[cfg_attr(feature = "docs", doc(cfg(feature = "json")))] impl<'a> From> for serde_json::Value { fn from(pv: Value<'a>) -> Self { + pv.typed.into() + } +} + +impl<'a> From> for serde_json::Value { + fn from(pv: ValueType<'a>) -> Self { let res = match pv { - Value::Int32(i) => i.map(|i| serde_json::Value::Number(Number::from(i))), - Value::Int64(i) => i.map(|i| serde_json::Value::Number(Number::from(i))), - Value::Float(f) => f.map(|f| match Number::from_f64(f as f64) { + ValueType::Int32(i) => i.map(|i| serde_json::Value::Number(Number::from(i))), + ValueType::Int64(i) => i.map(|i| serde_json::Value::Number(Number::from(i))), + ValueType::Float(f) => f.map(|f| match Number::from_f64(f as f64) { Some(number) => serde_json::Value::Number(number), None => serde_json::Value::Null, }), - Value::Double(f) => f.map(|f| match Number::from_f64(f) { + ValueType::Double(f) => f.map(|f| match Number::from_f64(f) { Some(number) => serde_json::Value::Number(number), None => serde_json::Value::Null, }), - Value::Text(cow) => cow.map(|cow| serde_json::Value::String(cow.into_owned())), - Value::Bytes(bytes) => bytes.map(|bytes| serde_json::Value::String(base64::encode(bytes))), - Value::Enum(cow) => cow.map(|cow| serde_json::Value::String(cow.into_owned())), - Value::Boolean(b) => b.map(serde_json::Value::Bool), - Value::Char(c) => c.map(|c| { + ValueType::Text(cow) => cow.map(|cow| serde_json::Value::String(cow.into_owned())), + ValueType::Bytes(bytes) => bytes.map(|bytes| serde_json::Value::String(base64::encode(bytes))), + ValueType::Enum(cow, _) => cow.map(|cow| serde_json::Value::String(cow.into_owned())), + ValueType::EnumArray(values, _) => values.map(|values| { + serde_json::Value::Array( + values + .into_iter() + .map(|value| serde_json::Value::String(value.into_owned())) + .collect(), + ) + }), + ValueType::Boolean(b) => b.map(serde_json::Value::Bool), + ValueType::Char(c) => c.map(|c| { let bytes = [c as u8]; let s = std::str::from_utf8(&bytes) .expect("interpret byte as UTF-8") .to_string(); serde_json::Value::String(s) }), - Value::Xml(cow) => cow.map(|cow| serde_json::Value::String(cow.into_owned())), - Value::Array(v) => { + ValueType::Xml(cow) => cow.map(|cow| serde_json::Value::String(cow.into_owned())), + ValueType::Array(v) => { v.map(|v| serde_json::Value::Array(v.into_iter().map(serde_json::Value::from).collect())) } - #[cfg(feature = "bigdecimal")] - Value::Numeric(d) => d.map(|d| serde_json::to_value(d.to_f64().unwrap()).unwrap()), - #[cfg(feature = "json")] - Value::Json(v) => v, - #[cfg(feature = "uuid")] - Value::Uuid(u) => u.map(|u| serde_json::Value::String(u.hyphenated().to_string())), - #[cfg(feature = "chrono")] - Value::DateTime(dt) => dt.map(|dt| serde_json::Value::String(dt.to_rfc3339())), - #[cfg(feature = "chrono")] - Value::Date(date) => date.map(|date| serde_json::Value::String(format!("{date}"))), - #[cfg(feature = "chrono")] - Value::Time(time) => time.map(|time| serde_json::Value::String(format!("{time}"))), + + ValueType::Numeric(d) => d.map(|d| serde_json::to_value(d.to_f64().unwrap()).unwrap()), + ValueType::Json(v) => v, + ValueType::Uuid(u) => u.map(|u| serde_json::Value::String(u.hyphenated().to_string())), + ValueType::DateTime(dt) => dt.map(|dt| serde_json::Value::String(dt.to_rfc3339())), + ValueType::Date(date) => date.map(|date| serde_json::Value::String(format!("{date}"))), + ValueType::Time(time) => time.map(|time| serde_json::Value::String(format!("{time}"))), }; match res { @@ -205,260 +673,266 @@ impl<'a> From> for serde_json::Value { } } -impl<'a> Value<'a> { +impl<'a> ValueType<'a> { + pub fn into_value(self) -> Value<'a> { + self.into() + } + /// Creates a new 32-bit signed integer. - pub fn int32(value: I) -> Self + pub(crate) fn int32(value: I) -> Self where I: Into, { - Value::Int32(Some(value.into())) + Self::Int32(Some(value.into())) } /// Creates a new 64-bit signed integer. - pub fn int64(value: I) -> Self + pub(crate) fn int64(value: I) -> Self where I: Into, { - Value::Int64(Some(value.into())) - } - - /// Creates a new 32-bit signed integer. - pub fn integer(value: I) -> Self - where - I: Into, - { - Value::Int32(Some(value.into())) + Self::Int64(Some(value.into())) } /// Creates a new decimal value. - #[cfg(feature = "bigdecimal")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "bigdecimal")))] - pub const fn numeric(value: BigDecimal) -> Self { - Value::Numeric(Some(value)) + + pub(crate) fn numeric(value: BigDecimal) -> Self { + Self::Numeric(Some(value)) } /// Creates a new float value. - pub const fn float(value: f32) -> Self { + pub(crate) fn float(value: f32) -> Self { Self::Float(Some(value)) } /// Creates a new double value. - pub const fn double(value: f64) -> Self { + pub(crate) fn double(value: f64) -> Self { Self::Double(Some(value)) } /// Creates a new string value. - pub fn text(value: T) -> Self + pub(crate) fn text(value: T) -> Self where T: Into>, { - Value::Text(Some(value.into())) + Self::Text(Some(value.into())) } /// Creates a new enum value. - pub fn enum_variant(value: T) -> Self + pub(crate) fn enum_variant(value: T) -> Self where - T: Into>, + T: Into>, + { + Self::Enum(Some(value.into()), None) + } + + /// Creates a new enum value with the name of the enum attached. + pub(crate) fn enum_variant_with_name(value: T, enum_name: U) -> Self + where + T: Into>, + U: Into>, + { + Self::Enum(Some(value.into()), Some(enum_name.into())) + } + + /// Creates a new enum array value + pub(crate) fn enum_array(value: T) -> Self + where + T: IntoIterator>, { - Value::Enum(Some(value.into())) + Self::EnumArray(Some(value.into_iter().collect()), None) + } + + /// Creates a new enum array value with the name of the enum attached. + pub(crate) fn enum_array_with_name(value: T, name: U) -> Self + where + T: IntoIterator>, + U: Into>, + { + Self::EnumArray(Some(value.into_iter().collect()), Some(name.into())) } /// Creates a new bytes value. - pub fn bytes(value: B) -> Self + pub(crate) fn bytes(value: B) -> Self where B: Into>, { - Value::Bytes(Some(value.into())) + Self::Bytes(Some(value.into())) } /// Creates a new boolean value. - pub fn boolean(value: B) -> Self + pub(crate) fn boolean(value: B) -> Self where B: Into, { - Value::Boolean(Some(value.into())) + Self::Boolean(Some(value.into())) } /// Creates a new character value. - pub fn character(value: C) -> Self + pub(crate) fn character(value: C) -> Self where C: Into, { - Value::Char(Some(value.into())) + Self::Char(Some(value.into())) } /// Creates a new array value. - pub fn array(value: I) -> Self + pub(crate) fn array(value: I) -> Self where I: IntoIterator, V: Into>, { - Value::Array(Some(value.into_iter().map(|v| v.into()).collect())) + Self::Array(Some(value.into_iter().map(|v| v.into()).collect())) } /// Creates a new uuid value. - #[cfg(feature = "uuid")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "uuid")))] - pub const fn uuid(value: Uuid) -> Self { - Value::Uuid(Some(value)) + pub(crate) fn uuid(value: Uuid) -> Self { + Self::Uuid(Some(value)) } /// Creates a new datetime value. - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] - pub const fn datetime(value: DateTime) -> Self { - Value::DateTime(Some(value)) + pub(crate) fn datetime(value: DateTime) -> Self { + Self::DateTime(Some(value)) } /// Creates a new date value. - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] - pub const fn date(value: NaiveDate) -> Self { - Value::Date(Some(value)) + pub(crate) fn date(value: NaiveDate) -> Self { + Self::Date(Some(value)) } /// Creates a new time value. - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] - pub const fn time(value: NaiveTime) -> Self { - Value::Time(Some(value)) + pub(crate) fn time(value: NaiveTime) -> Self { + Self::Time(Some(value)) } /// Creates a new JSON value. - #[cfg(feature = "json")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "json")))] - pub const fn json(value: serde_json::Value) -> Self { - Value::Json(Some(value)) + pub(crate) fn json(value: serde_json::Value) -> Self { + Self::Json(Some(value)) } /// Creates a new XML value. - pub fn xml(value: T) -> Self + pub(crate) fn xml(value: T) -> Self where T: Into>, { - Value::Xml(Some(value.into())) + Self::Xml(Some(value.into())) } /// `true` if the `Value` is null. - pub const fn is_null(&self) -> bool { + pub fn is_null(&self) -> bool { match self { - Value::Int32(i) => i.is_none(), - Value::Int64(i) => i.is_none(), - Value::Float(i) => i.is_none(), - Value::Double(i) => i.is_none(), - Value::Text(t) => t.is_none(), - Value::Enum(e) => e.is_none(), - Value::Bytes(b) => b.is_none(), - Value::Boolean(b) => b.is_none(), - Value::Char(c) => c.is_none(), - Value::Array(v) => v.is_none(), - Value::Xml(s) => s.is_none(), - #[cfg(feature = "bigdecimal")] - Value::Numeric(r) => r.is_none(), - #[cfg(feature = "uuid")] - Value::Uuid(u) => u.is_none(), - #[cfg(feature = "chrono")] - Value::DateTime(dt) => dt.is_none(), - #[cfg(feature = "chrono")] - Value::Date(d) => d.is_none(), - #[cfg(feature = "chrono")] - Value::Time(t) => t.is_none(), - #[cfg(feature = "json")] - Value::Json(json) => json.is_none(), + Self::Int32(i) => i.is_none(), + Self::Int64(i) => i.is_none(), + Self::Float(i) => i.is_none(), + Self::Double(i) => i.is_none(), + Self::Text(t) => t.is_none(), + Self::Enum(e, _) => e.is_none(), + Self::EnumArray(e, _) => e.is_none(), + Self::Bytes(b) => b.is_none(), + Self::Boolean(b) => b.is_none(), + Self::Char(c) => c.is_none(), + Self::Array(v) => v.is_none(), + Self::Xml(s) => s.is_none(), + Self::Numeric(r) => r.is_none(), + Self::Uuid(u) => u.is_none(), + Self::DateTime(dt) => dt.is_none(), + Self::Date(d) => d.is_none(), + Self::Time(t) => t.is_none(), + Self::Json(json) => json.is_none(), } } /// `true` if the `Value` is text. - pub const fn is_text(&self) -> bool { - matches!(self, Value::Text(_)) + pub(crate) fn is_text(&self) -> bool { + matches!(self, Self::Text(_)) } /// Returns a &str if the value is text, otherwise `None`. - pub fn as_str(&self) -> Option<&str> { + pub(crate) fn as_str(&self) -> Option<&str> { match self { - Value::Text(Some(cow)) => Some(cow.borrow()), - Value::Bytes(Some(cow)) => std::str::from_utf8(cow.as_ref()).ok(), + Self::Text(Some(cow)) => Some(cow.borrow()), + Self::Bytes(Some(cow)) => std::str::from_utf8(cow.as_ref()).ok(), _ => None, } } /// Returns a char if the value is a char, otherwise `None`. - pub const fn as_char(&self) -> Option { + pub(crate) fn as_char(&self) -> Option { match self { - Value::Char(c) => *c, + Self::Char(c) => *c, _ => None, } } /// Returns a cloned String if the value is text, otherwise `None`. - pub fn to_string(&self) -> Option { + pub(crate) fn to_string(&self) -> Option { match self { - Value::Text(Some(cow)) => Some(cow.to_string()), - Value::Bytes(Some(cow)) => std::str::from_utf8(cow.as_ref()).map(|s| s.to_owned()).ok(), + Self::Text(Some(cow)) => Some(cow.to_string()), + Self::Bytes(Some(cow)) => std::str::from_utf8(cow.as_ref()).map(|s| s.to_owned()).ok(), _ => None, } } /// Transforms the `Value` to a `String` if it's text, /// otherwise `None`. - pub fn into_string(self) -> Option { + pub(crate) fn into_string(self) -> Option { match self { - Value::Text(Some(cow)) => Some(cow.into_owned()), - Value::Bytes(Some(cow)) => String::from_utf8(cow.into_owned()).ok(), + Self::Text(Some(cow)) => Some(cow.into_owned()), + Self::Bytes(Some(cow)) => String::from_utf8(cow.into_owned()).ok(), _ => None, } } /// Returns whether this value is the `Bytes` variant. - pub const fn is_bytes(&self) -> bool { - matches!(self, Value::Bytes(_)) + pub(crate) fn is_bytes(&self) -> bool { + matches!(self, Self::Bytes(_)) } /// Returns a bytes slice if the value is text or a byte slice, otherwise `None`. - pub fn as_bytes(&self) -> Option<&[u8]> { + pub(crate) fn as_bytes(&self) -> Option<&[u8]> { match self { - Value::Text(Some(cow)) => Some(cow.as_ref().as_bytes()), - Value::Bytes(Some(cow)) => Some(cow.as_ref()), + Self::Text(Some(cow)) => Some(cow.as_ref().as_bytes()), + Self::Bytes(Some(cow)) => Some(cow.as_ref()), _ => None, } } /// Returns a cloned `Vec` if the value is text or a byte slice, otherwise `None`. - pub fn to_bytes(&self) -> Option> { + pub(crate) fn to_bytes(&self) -> Option> { match self { - Value::Text(Some(cow)) => Some(cow.to_string().into_bytes()), - Value::Bytes(Some(cow)) => Some(cow.to_vec()), + Self::Text(Some(cow)) => Some(cow.to_string().into_bytes()), + Self::Bytes(Some(cow)) => Some(cow.to_vec()), _ => None, } } /// `true` if the `Value` is a 32-bit signed integer. - pub const fn is_i32(&self) -> bool { - matches!(self, Value::Int32(_)) + pub(crate) fn is_i32(&self) -> bool { + matches!(self, Self::Int32(_)) } /// `true` if the `Value` is a 64-bit signed integer. - pub const fn is_i64(&self) -> bool { - matches!(self, Value::Int64(_)) + pub(crate) fn is_i64(&self) -> bool { + matches!(self, Self::Int64(_)) } /// `true` if the `Value` is a signed integer. - pub const fn is_integer(&self) -> bool { - matches!(self, Value::Int32(_) | Value::Int64(_)) + pub fn is_integer(&self) -> bool { + matches!(self, Self::Int32(_) | Self::Int64(_)) } /// Returns an `i64` if the value is a 64-bit signed integer, otherwise `None`. - pub const fn as_i64(&self) -> Option { + pub(crate) fn as_i64(&self) -> Option { match self { - Value::Int64(i) => *i, + Self::Int64(i) => *i, _ => None, } } /// Returns an `i32` if the value is a 32-bit signed integer, otherwise `None`. - pub const fn as_i32(&self) -> Option { + pub(crate) fn as_i32(&self) -> Option { match self { - Value::Int32(i) => *i, + Self::Int32(i) => *i, _ => None, } } @@ -466,189 +940,164 @@ impl<'a> Value<'a> { /// Returns an `i64` if the value is a signed integer, otherwise `None`. pub fn as_integer(&self) -> Option { match self { - Value::Int32(i) => i.map(|i| i as i64), - Value::Int64(i) => *i, + Self::Int32(i) => i.map(|i| i as i64), + Self::Int64(i) => *i, _ => None, } } /// Returns a `f64` if the value is a double, otherwise `None`. - pub const fn as_f64(&self) -> Option { + pub(crate) fn as_f64(&self) -> Option { match self { - Value::Double(Some(f)) => Some(*f), + Self::Double(Some(f)) => Some(*f), _ => None, } } /// Returns a `f32` if the value is a double, otherwise `None`. - pub const fn as_f32(&self) -> Option { + pub(crate) fn as_f32(&self) -> Option { match self { - Value::Float(Some(f)) => Some(*f), + Self::Float(Some(f)) => Some(*f), _ => None, } } /// `true` if the `Value` is a numeric value or can be converted to one. - #[cfg(feature = "bigdecimal")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "bigdecimal")))] - pub const fn is_numeric(&self) -> bool { - matches!(self, Value::Numeric(_) | Value::Float(_) | Value::Double(_)) + + pub(crate) fn is_numeric(&self) -> bool { + matches!(self, Self::Numeric(_) | Self::Float(_) | Self::Double(_)) } /// Returns a bigdecimal, if the value is a numeric, float or double value, /// otherwise `None`. - #[cfg(feature = "bigdecimal")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "bigdecimal")))] - pub fn into_numeric(self) -> Option { + + pub(crate) fn into_numeric(self) -> Option { match self { - Value::Numeric(d) => d, - Value::Float(f) => f.and_then(BigDecimal::from_f32), - Value::Double(f) => f.and_then(BigDecimal::from_f64), + Self::Numeric(d) => d, + Self::Float(f) => f.and_then(BigDecimal::from_f32), + Self::Double(f) => f.and_then(BigDecimal::from_f64), _ => None, } } /// Returns a reference to a bigdecimal, if the value is a numeric. /// Otherwise `None`. - #[cfg(feature = "bigdecimal")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "bigdecimal")))] - pub const fn as_numeric(&self) -> Option<&BigDecimal> { + + pub(crate) fn as_numeric(&self) -> Option<&BigDecimal> { match self { - Value::Numeric(d) => d.as_ref(), + Self::Numeric(d) => d.as_ref(), _ => None, } } /// `true` if the `Value` is a boolean value. - pub const fn is_bool(&self) -> bool { + pub(crate) fn is_bool(&self) -> bool { match self { - Value::Boolean(_) => true, + Self::Boolean(_) => true, // For schemas which don't tag booleans - Value::Int32(Some(i)) if *i == 0 || *i == 1 => true, - Value::Int64(Some(i)) if *i == 0 || *i == 1 => true, + Self::Int32(Some(i)) if *i == 0 || *i == 1 => true, + Self::Int64(Some(i)) if *i == 0 || *i == 1 => true, _ => false, } } /// Returns a bool if the value is a boolean, otherwise `None`. - pub const fn as_bool(&self) -> Option { + pub(crate) fn as_bool(&self) -> Option { match self { - Value::Boolean(b) => *b, + Self::Boolean(b) => *b, // For schemas which don't tag booleans - Value::Int32(Some(i)) if *i == 0 || *i == 1 => Some(*i == 1), - Value::Int64(Some(i)) if *i == 0 || *i == 1 => Some(*i == 1), + Self::Int32(Some(i)) if *i == 0 || *i == 1 => Some(*i == 1), + Self::Int64(Some(i)) if *i == 0 || *i == 1 => Some(*i == 1), _ => None, } } /// `true` if the `Value` is an Array. - pub const fn is_array(&self) -> bool { - matches!(self, Value::Array(_)) + pub(crate) fn is_array(&self) -> bool { + matches!(self, Self::Array(_)) } /// `true` if the `Value` is of UUID type. - #[cfg(feature = "uuid")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "uuid")))] - pub const fn is_uuid(&self) -> bool { - matches!(self, Value::Uuid(_)) + pub(crate) fn is_uuid(&self) -> bool { + matches!(self, Self::Uuid(_)) } /// Returns an UUID if the value is of UUID type, otherwise `None`. - #[cfg(feature = "uuid")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "uuid")))] - pub const fn as_uuid(&self) -> Option { + pub(crate) fn as_uuid(&self) -> Option { match self { - Value::Uuid(u) => *u, + Self::Uuid(u) => *u, _ => None, } } /// `true` if the `Value` is a DateTime. - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] - pub const fn is_datetime(&self) -> bool { - matches!(self, Value::DateTime(_)) + pub(crate) fn is_datetime(&self) -> bool { + matches!(self, Self::DateTime(_)) } /// Returns a `DateTime` if the value is a `DateTime`, otherwise `None`. - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] - pub const fn as_datetime(&self) -> Option> { + pub(crate) fn as_datetime(&self) -> Option> { match self { - Value::DateTime(dt) => *dt, + Self::DateTime(dt) => *dt, _ => None, } } /// `true` if the `Value` is a Date. - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] - pub const fn is_date(&self) -> bool { - matches!(self, Value::Date(_)) + pub(crate) fn is_date(&self) -> bool { + matches!(self, Self::Date(_)) } /// Returns a `NaiveDate` if the value is a `Date`, otherwise `None`. - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] - pub const fn as_date(&self) -> Option { + pub(crate) fn as_date(&self) -> Option { match self { - Value::Date(dt) => *dt, + Self::Date(dt) => *dt, _ => None, } } /// `true` if the `Value` is a `Time`. - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] - pub const fn is_time(&self) -> bool { - matches!(self, Value::Time(_)) + pub(crate) fn is_time(&self) -> bool { + matches!(self, Self::Time(_)) } /// Returns a `NaiveTime` if the value is a `Time`, otherwise `None`. - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] - pub const fn as_time(&self) -> Option { + pub(crate) fn as_time(&self) -> Option { match self { - Value::Time(time) => *time, + Self::Time(time) => *time, _ => None, } } /// `true` if the `Value` is a JSON value. - #[cfg(feature = "json")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "json")))] - pub const fn is_json(&self) -> bool { - matches!(self, Value::Json(_)) + pub(crate) fn is_json(&self) -> bool { + matches!(self, Self::Json(_)) } /// Returns a reference to a JSON Value if of Json type, otherwise `None`. - #[cfg(feature = "json")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "json")))] - pub const fn as_json(&self) -> Option<&serde_json::Value> { + pub(crate) fn as_json(&self) -> Option<&serde_json::Value> { match self { - Value::Json(Some(j)) => Some(j), + Self::Json(Some(j)) => Some(j), _ => None, } } /// Transforms to a JSON Value if of Json type, otherwise `None`. - #[cfg(feature = "json")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "json")))] - pub fn into_json(self) -> Option { + pub(crate) fn into_json(self) -> Option { match self { - Value::Json(Some(j)) => Some(j), + Self::Json(Some(j)) => Some(j), _ => None, } } /// Returns a `Vec` if the value is an array of `T`, otherwise `None`. - pub fn into_vec(self) -> Option> + pub(crate) fn into_vec(self) -> Option> where // Implement From T: TryFrom>, { match self { - Value::Array(Some(vec)) => { + Self::Array(Some(vec)) => { let rslt: Result, _> = vec.into_iter().map(T::try_from).collect(); match rslt { Err(_) => None, @@ -660,12 +1109,12 @@ impl<'a> Value<'a> { } /// Returns a cloned Vec if the value is an array of T, otherwise `None`. - pub fn to_vec(&self) -> Option> + pub(crate) fn to_vec(&self) -> Option> where T: TryFrom>, { match self { - Value::Array(Some(vec)) => { + Self::Array(Some(vec)) => { let rslt: Result, _> = vec.clone().into_iter().map(T::try_from).collect(); match rslt { Err(_) => None, @@ -681,23 +1130,18 @@ value!(val: i64, Int64, val); value!(val: i32, Int32, val); value!(val: bool, Boolean, val); value!(val: &'a str, Text, val.into()); +value!(val: &'a String, Text, val.into()); +value!(val: &'a &str, Text, (*val).into()); value!(val: String, Text, val.into()); value!(val: usize, Int64, i64::try_from(val).unwrap()); value!(val: &'a [u8], Bytes, val.into()); value!(val: f64, Double, val); value!(val: f32, Float, val); - -#[cfg(feature = "chrono")] value!(val: DateTime, DateTime, val); -#[cfg(feature = "chrono")] value!(val: chrono::NaiveTime, Time, val); -#[cfg(feature = "chrono")] value!(val: chrono::NaiveDate, Date, val); -#[cfg(feature = "bigdecimal")] value!(val: BigDecimal, Numeric, val); -#[cfg(feature = "json")] value!(val: JsonValue, Json, val); -#[cfg(feature = "uuid")] value!(val: Uuid, Uuid, val); impl<'a> TryFrom> for i64 { @@ -705,6 +1149,7 @@ impl<'a> TryFrom> for i64 { fn try_from(value: Value<'a>) -> Result { value + .typed .as_i64() .ok_or_else(|| Error::builder(ErrorKind::conversion("Not an i64")).build()) } @@ -720,7 +1165,6 @@ impl<'a> TryFrom> for i32 { } } -#[cfg(feature = "bigdecimal")] impl<'a> TryFrom> for BigDecimal { type Error = Error; @@ -761,8 +1205,6 @@ impl<'a> TryFrom> for bool { } } -#[cfg(feature = "chrono")] -#[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] impl<'a> TryFrom> for DateTime { type Error = Error; @@ -777,24 +1219,24 @@ impl<'a> TryFrom<&Value<'a>> for Option { type Error = Error; fn try_from(value: &Value<'a>) -> Result, Self::Error> { - match value { - val @ Value::Text(Some(_)) => { - let text = val.as_str().unwrap(); + match &value.typed { + ValueType::Text(Some(_)) => { + let text = value.typed.as_str().unwrap(); match std::net::IpAddr::from_str(text) { Ok(ip) => Ok(Some(ip)), Err(e) => Err(e.into()), } } - val @ Value::Bytes(Some(_)) => { - let text = val.as_str().unwrap(); + ValueType::Bytes(Some(_)) => { + let text = value.typed.as_str().unwrap(); match std::net::IpAddr::from_str(text) { Ok(ip) => Ok(Some(ip)), Err(e) => Err(e.into()), } } - v if v.is_null() => Ok(None), + _ if value.typed.is_null() => Ok(None), v => { let kind = ErrorKind::conversion(format!("Couldn't convert value of type `{v:?}` to std::net::IpAddr.")); @@ -805,30 +1247,29 @@ impl<'a> TryFrom<&Value<'a>> for Option { } } -#[cfg(feature = "uuid")] impl<'a> TryFrom<&Value<'a>> for Option { type Error = Error; fn try_from(value: &Value<'a>) -> Result, Self::Error> { - match value { - Value::Uuid(uuid) => Ok(*uuid), - val @ Value::Text(Some(_)) => { - let text = val.as_str().unwrap(); + match &value.typed { + ValueType::Uuid(uuid) => Ok(*uuid), + ValueType::Text(Some(_)) => { + let text = value.typed.as_str().unwrap(); match uuid::Uuid::from_str(text) { Ok(ip) => Ok(Some(ip)), Err(e) => Err(e.into()), } } - val @ Value::Bytes(Some(_)) => { - let text = val.as_str().unwrap(); + ValueType::Bytes(Some(_)) => { + let text = value.typed.as_str().unwrap(); match uuid::Uuid::from_str(text) { Ok(ip) => Ok(Some(ip)), Err(e) => Err(e.into()), } } - v if v.is_null() => Ok(None), + _ if value.typed.is_null() => Ok(None), v => { let kind = ErrorKind::conversion(format!("Couldn't convert value of type `{v:?}` to uuid::Uuid.")); @@ -926,62 +1367,59 @@ impl<'a> IntoIterator for Values<'a> { #[cfg(test)] mod tests { use super::*; - #[cfg(feature = "chrono")] use std::str::FromStr; #[test] fn a_parameterized_value_of_ints32_can_be_converted_into_a_vec() { let pv = Value::array(vec![1]); - let values: Vec = pv.into_vec().expect("convert into Vec"); + let values: Vec = pv.typed.into_vec().expect("convert into Vec"); assert_eq!(values, vec![1]); } #[test] fn a_parameterized_value_of_ints64_can_be_converted_into_a_vec() { let pv = Value::array(vec![1_i64]); - let values: Vec = pv.into_vec().expect("convert into Vec"); + let values: Vec = pv.typed.into_vec().expect("convert into Vec"); assert_eq!(values, vec![1]); } #[test] fn a_parameterized_value_of_reals_can_be_converted_into_a_vec() { let pv = Value::array(vec![1.0]); - let values: Vec = pv.into_vec().expect("convert into Vec"); + let values: Vec = pv.typed.into_vec().expect("convert into Vec"); assert_eq!(values, vec![1.0]); } #[test] fn a_parameterized_value_of_texts_can_be_converted_into_a_vec() { let pv = Value::array(vec!["test"]); - let values: Vec = pv.into_vec().expect("convert into Vec"); + let values: Vec = pv.typed.into_vec().expect("convert into Vec"); assert_eq!(values, vec!["test"]); } #[test] fn a_parameterized_value_of_booleans_can_be_converted_into_a_vec() { let pv = Value::array(vec![true]); - let values: Vec = pv.into_vec().expect("convert into Vec"); + let values: Vec = pv.typed.into_vec().expect("convert into Vec"); assert_eq!(values, vec![true]); } #[test] - #[cfg(feature = "chrono")] fn a_parameterized_value_of_datetimes_can_be_converted_into_a_vec() { let datetime = DateTime::from_str("2019-07-27T05:30:30Z").expect("parsing date/time"); let pv = Value::array(vec![datetime]); - let values: Vec> = pv.into_vec().expect("convert into Vec"); + let values: Vec> = pv.typed.into_vec().expect("convert into Vec"); assert_eq!(values, vec![datetime]); } #[test] fn a_parameterized_value_of_an_array_cant_be_converted_into_a_vec_of_the_wrong_type() { let pv = Value::array(vec![1]); - let rslt: Option> = pv.into_vec(); + let rslt: Option> = pv.typed.into_vec(); assert!(rslt.is_none()); } #[test] - #[cfg(feature = "chrono")] fn display_format_for_datetime() { let dt: DateTime = DateTime::from_str("2019-07-27T05:30:30Z").expect("failed while parsing date"); let pv = Value::datetime(dt); @@ -990,7 +1428,6 @@ mod tests { } #[test] - #[cfg(feature = "chrono")] fn display_format_for_date() { let date = NaiveDate::from_ymd_opt(2022, 8, 11).unwrap(); let pv = Value::date(date); @@ -999,7 +1436,6 @@ mod tests { } #[test] - #[cfg(feature = "chrono")] fn display_format_for_time() { let time = NaiveTime::from_hms_opt(16, 17, 00).unwrap(); let pv = Value::time(time); @@ -1008,7 +1444,6 @@ mod tests { } #[test] - #[cfg(feature = "uuid")] fn display_format_for_uuid() { let id = Uuid::from_str("67e5504410b1426f9247bb680e5fe0c8").unwrap(); let pv = Value::uuid(id); diff --git a/quaint/src/connector/connection_info.rs b/quaint/src/connector/connection_info.rs index 7b09472c3312..50029b16c15b 100644 --- a/quaint/src/connector/connection_info.rs +++ b/quaint/src/connector/connection_info.rs @@ -18,19 +18,15 @@ use std::convert::TryFrom; pub enum ConnectionInfo { /// A PostgreSQL connection URL. #[cfg(feature = "postgresql")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "postgresql")))] Postgres(PostgresUrl), /// A MySQL connection URL. #[cfg(feature = "mysql")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "mysql")))] Mysql(MysqlUrl), /// A SQL Server connection URL. #[cfg(feature = "mssql")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "mssql")))] Mssql(MssqlUrl), /// A SQLite connection URL. #[cfg(feature = "sqlite")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "sqlite")))] Sqlite { /// The filesystem path of the SQLite database. file_path: String, @@ -38,7 +34,6 @@ pub enum ConnectionInfo { db_name: String, }, #[cfg(feature = "sqlite")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "sqlite")))] InMemorySqlite { db_name: String }, } @@ -236,16 +231,12 @@ impl ConnectionInfo { #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub enum SqlFamily { #[cfg(feature = "postgresql")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "postgresql")))] Postgres, #[cfg(feature = "mysql")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "mysql")))] Mysql, #[cfg(feature = "sqlite")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "sqlite")))] Sqlite, #[cfg(feature = "mssql")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "mssql")))] Mssql, } diff --git a/quaint/src/connector/metrics.rs b/quaint/src/connector/metrics.rs index e806f98c1d8f..2705a40b32b2 100644 --- a/quaint/src/connector/metrics.rs +++ b/quaint/src/connector/metrics.rs @@ -36,7 +36,7 @@ where trace_query(&query_fmt, params, result, start); } else { - trace_query(&query, params, result, start); + trace_query(query, params, result, start); }; } diff --git a/quaint/src/connector/mssql.rs b/quaint/src/connector/mssql.rs index 848e708c7dbb..cef092edb9d7 100644 --- a/quaint/src/connector/mssql.rs +++ b/quaint/src/connector/mssql.rs @@ -30,7 +30,6 @@ pub use tiberius; /// Wraps a connection url and exposes the parsing logic used by Quaint, /// including default values. #[derive(Debug, Clone)] -#[cfg_attr(feature = "docs", doc(cfg(feature = "mssql")))] pub struct MssqlUrl { connection_string: String, query_params: MssqlQueryParams, @@ -38,7 +37,6 @@ pub struct MssqlUrl { /// TLS mode when connecting to SQL Server. #[derive(Debug, Clone, Copy)] -#[cfg_attr(feature = "docs", doc(cfg(feature = "mssql")))] pub enum EncryptMode { /// All traffic is encrypted. On, @@ -270,7 +268,6 @@ impl MssqlQueryParams { /// A connector interface for the SQL Server database. #[derive(Debug)] -#[cfg_attr(feature = "docs", doc(cfg(feature = "mssql")))] pub struct Mssql { client: Mutex>>, url: MssqlUrl, diff --git a/quaint/src/connector/mssql/conversion.rs b/quaint/src/connector/mssql/conversion.rs index 60e3f4ab6eb7..870654ad5de3 100644 --- a/quaint/src/connector/mssql/conversion.rs +++ b/quaint/src/connector/mssql/conversion.rs @@ -1,39 +1,31 @@ -use crate::ast::Value; -#[cfg(not(feature = "bigdecimal"))] -use crate::error::*; -#[cfg(feature = "bigdecimal")] +use crate::ast::{Value, ValueType}; + use bigdecimal::BigDecimal; use std::{borrow::Cow, convert::TryFrom}; -#[cfg(feature = "bigdecimal")] + use tiberius::ToSql; use tiberius::{ColumnData, FromSql, IntoSql}; impl<'a> IntoSql<'a> for &'a Value<'a> { fn into_sql(self) -> ColumnData<'a> { - match self { - Value::Int32(val) => val.into_sql(), - Value::Int64(val) => val.into_sql(), - Value::Float(val) => val.into_sql(), - Value::Double(val) => val.into_sql(), - Value::Text(val) => val.as_deref().into_sql(), - Value::Bytes(val) => val.as_deref().into_sql(), - Value::Enum(val) => val.as_deref().into_sql(), - Value::Boolean(val) => val.into_sql(), - Value::Char(val) => val.as_ref().map(|val| format!("{val}")).into_sql(), - Value::Xml(val) => val.as_deref().into_sql(), - Value::Array(_) => panic!("Arrays are not supported on SQL Server."), - #[cfg(feature = "bigdecimal")] - Value::Numeric(val) => (*val).to_sql(), - #[cfg(feature = "json")] - Value::Json(val) => val.as_ref().map(|val| serde_json::to_string(&val).unwrap()).into_sql(), - #[cfg(feature = "uuid")] - Value::Uuid(val) => val.into_sql(), - #[cfg(feature = "chrono")] - Value::DateTime(val) => val.into_sql(), - #[cfg(feature = "chrono")] - Value::Date(val) => val.into_sql(), - #[cfg(feature = "chrono")] - Value::Time(val) => val.into_sql(), + match &self.typed { + ValueType::Int32(val) => val.into_sql(), + ValueType::Int64(val) => val.into_sql(), + ValueType::Float(val) => val.into_sql(), + ValueType::Double(val) => val.into_sql(), + ValueType::Text(val) => val.as_deref().into_sql(), + ValueType::Bytes(val) => val.as_deref().into_sql(), + ValueType::Enum(val, _) => val.as_deref().into_sql(), + ValueType::Boolean(val) => val.into_sql(), + ValueType::Char(val) => val.as_ref().map(|val| format!("{val}")).into_sql(), + ValueType::Xml(val) => val.as_deref().into_sql(), + ValueType::Array(_) | ValueType::EnumArray(_, _) => panic!("Arrays are not supported on SQL Server."), + ValueType::Numeric(val) => (*val).to_sql(), + ValueType::Json(val) => val.as_ref().map(|val| serde_json::to_string(&val).unwrap()).into_sql(), + ValueType::Uuid(val) => val.into_sql(), + ValueType::DateTime(val) => val.into_sql(), + ValueType::Date(val) => val.into_sql(), + ValueType::Time(val) => val.into_sql(), } } } @@ -43,65 +35,53 @@ impl TryFrom> for Value<'static> { fn try_from(cd: ColumnData<'static>) -> crate::Result { let res = match cd { - ColumnData::U8(num) => Value::Int32(num.map(i32::from)), - ColumnData::I16(num) => Value::Int32(num.map(i32::from)), - ColumnData::I32(num) => Value::Int32(num.map(i32::from)), - ColumnData::I64(num) => Value::Int64(num.map(i64::from)), - ColumnData::F32(num) => Value::Float(num), - ColumnData::F64(num) => Value::Double(num), - ColumnData::Bit(b) => Value::Boolean(b), - ColumnData::String(s) => Value::Text(s), - ColumnData::Guid(uuid) => Value::Uuid(uuid), - ColumnData::Binary(bytes) => Value::Bytes(bytes), - #[cfg(feature = "bigdecimal")] - numeric @ ColumnData::Numeric(_) => Value::Numeric(BigDecimal::from_sql(&numeric)?), - #[cfg(not(feature = "bigdecimal"))] - _numeric @ ColumnData::Numeric(_) => { - let kind = ErrorKind::conversion("Please enable `bigdecimal` feature to read numeric values"); - return Err(Error::builder(kind).build()); - } - #[cfg(feature = "chrono")] + ColumnData::U8(num) => ValueType::Int32(num.map(i32::from)), + ColumnData::I16(num) => ValueType::Int32(num.map(i32::from)), + ColumnData::I32(num) => ValueType::Int32(num.map(i32::from)), + ColumnData::I64(num) => ValueType::Int64(num.map(i64::from)), + ColumnData::F32(num) => ValueType::Float(num), + ColumnData::F64(num) => ValueType::Double(num), + ColumnData::Bit(b) => ValueType::Boolean(b), + ColumnData::String(s) => ValueType::Text(s), + ColumnData::Guid(uuid) => ValueType::Uuid(uuid), + ColumnData::Binary(bytes) => ValueType::Bytes(bytes), + numeric @ ColumnData::Numeric(_) => ValueType::Numeric(BigDecimal::from_sql(&numeric)?), dt @ ColumnData::DateTime(_) => { use tiberius::time::chrono::{DateTime, NaiveDateTime, Utc}; let dt = NaiveDateTime::from_sql(&dt)?.map(|dt| DateTime::::from_utc(dt, Utc)); - Value::DateTime(dt) + ValueType::DateTime(dt) } - #[cfg(feature = "chrono")] dt @ ColumnData::SmallDateTime(_) => { use tiberius::time::chrono::{DateTime, NaiveDateTime, Utc}; let dt = NaiveDateTime::from_sql(&dt)?.map(|dt| DateTime::::from_utc(dt, Utc)); - Value::DateTime(dt) + ValueType::DateTime(dt) } - #[cfg(feature = "chrono")] dt @ ColumnData::Time(_) => { use tiberius::time::chrono::NaiveTime; - Value::Time(NaiveTime::from_sql(&dt)?) + ValueType::Time(NaiveTime::from_sql(&dt)?) } - #[cfg(feature = "chrono")] dt @ ColumnData::Date(_) => { use tiberius::time::chrono::NaiveDate; - Value::Date(NaiveDate::from_sql(&dt)?) + ValueType::Date(NaiveDate::from_sql(&dt)?) } - #[cfg(feature = "chrono")] dt @ ColumnData::DateTime2(_) => { use tiberius::time::chrono::{DateTime, NaiveDateTime, Utc}; let dt = NaiveDateTime::from_sql(&dt)?.map(|dt| DateTime::::from_utc(dt, Utc)); - Value::DateTime(dt) + ValueType::DateTime(dt) } - #[cfg(feature = "chrono")] dt @ ColumnData::DateTimeOffset(_) => { use tiberius::time::chrono::{DateTime, Utc}; - Value::DateTime(DateTime::::from_sql(&dt)?) + ValueType::DateTime(DateTime::::from_sql(&dt)?) } - ColumnData::Xml(cow) => Value::Xml(cow.map(|xml_data| Cow::Owned(xml_data.into_owned().into_string()))), + ColumnData::Xml(cow) => ValueType::Xml(cow.map(|xml_data| Cow::Owned(xml_data.into_owned().into_string()))), }; - Ok(res) + Ok(Value::from(res)) } } diff --git a/quaint/src/connector/mssql/error.rs b/quaint/src/connector/mssql/error.rs index cbee867e1f03..f9b6f5e95ab6 100644 --- a/quaint/src/connector/mssql/error.rs +++ b/quaint/src/connector/mssql/error.rs @@ -4,7 +4,10 @@ use tiberius::error::IoErrorKind; impl From for Error { fn from(e: tiberius::error::Error) -> Error { match e { - tiberius::error::Error::Io { kind, message } if kind == IoErrorKind::UnexpectedEof => { + tiberius::error::Error::Io { + kind: IoErrorKind::UnexpectedEof, + message, + } => { let mut builder = Error::builder(ErrorKind::ConnectionClosed); builder.set_original_message(message); builder.build() diff --git a/quaint/src/connector/mysql.rs b/quaint/src/connector/mysql.rs index e4be7b47c404..4b6f27a583da 100644 --- a/quaint/src/connector/mysql.rs +++ b/quaint/src/connector/mysql.rs @@ -24,6 +24,8 @@ use std::{ use tokio::sync::Mutex; use url::{Host, Url}; +pub use error::MysqlError; + /// The underlying MySQL driver. Only available with the `expose-drivers` /// Cargo feature. #[cfg(feature = "expose-drivers")] @@ -33,7 +35,6 @@ use super::IsolationLevel; /// A connector interface for the MySQL database. #[derive(Debug)] -#[cfg_attr(feature = "docs", doc(cfg(feature = "mysql")))] pub struct Mysql { pub(crate) conn: Mutex, pub(crate) url: MysqlUrl, @@ -44,7 +45,6 @@ pub struct Mysql { /// Wraps a connection url and exposes the parsing logic used by quaint, including default values. #[derive(Debug, Clone)] -#[cfg_attr(feature = "docs", doc(cfg(feature = "mysql")))] pub struct MysqlUrl { url: Url, query_params: MysqlUrlQueryParams, @@ -560,7 +560,7 @@ impl Queryable for Mysql { let version_string = rows .get(0) - .and_then(|row| row.get("version").and_then(|version| version.to_string())); + .and_then(|row| row.get("version").and_then(|version| version.typed.to_string())); Ok(version_string) } diff --git a/quaint/src/connector/mysql/conversion.rs b/quaint/src/connector/mysql/conversion.rs index ea634f8dc87f..659cc0790c07 100644 --- a/quaint/src/connector/mysql/conversion.rs +++ b/quaint/src/connector/mysql/conversion.rs @@ -1,9 +1,8 @@ use crate::{ - ast::Value, + ast::{Value, ValueType}, connector::{queryable::TakeRow, TypeIdentifier}, error::{Error, ErrorKind}, }; -#[cfg(feature = "chrono")] use chrono::{DateTime, Datelike, NaiveDate, NaiveDateTime, NaiveTime, Timelike, Utc}; use mysql_async::{ self as my, @@ -20,18 +19,18 @@ pub fn conv_params(params: &[Value<'_>]) -> crate::Result { let mut values = Vec::with_capacity(params.len()); for pv in params { - let res = match pv { - Value::Int32(i) => i.map(|i| my::Value::Int(i as i64)), - Value::Int64(i) => i.map(my::Value::Int), - Value::Float(f) => f.map(my::Value::Float), - Value::Double(f) => f.map(my::Value::Double), - Value::Text(s) => s.clone().map(|s| my::Value::Bytes((*s).as_bytes().to_vec())), - Value::Bytes(bytes) => bytes.clone().map(|bytes| my::Value::Bytes(bytes.into_owned())), - Value::Enum(s) => s.clone().map(|s| my::Value::Bytes((*s).as_bytes().to_vec())), - Value::Boolean(b) => b.map(|b| my::Value::Int(b as i64)), - Value::Char(c) => c.map(|c| my::Value::Bytes(vec![c as u8])), - Value::Xml(s) => s.as_ref().map(|s| my::Value::Bytes((s).as_bytes().to_vec())), - Value::Array(_) => { + let res = match &pv.typed { + ValueType::Int32(i) => i.map(|i| my::Value::Int(i as i64)), + ValueType::Int64(i) => i.map(my::Value::Int), + ValueType::Float(f) => f.map(my::Value::Float), + ValueType::Double(f) => f.map(my::Value::Double), + ValueType::Text(s) => s.clone().map(|s| my::Value::Bytes((*s).as_bytes().to_vec())), + ValueType::Bytes(bytes) => bytes.clone().map(|bytes| my::Value::Bytes(bytes.into_owned())), + ValueType::Enum(s, _) => s.clone().map(|s| my::Value::Bytes((*s).as_bytes().to_vec())), + ValueType::Boolean(b) => b.map(|b| my::Value::Int(b as i64)), + ValueType::Char(c) => c.map(|c| my::Value::Bytes(vec![c as u8])), + ValueType::Xml(s) => s.as_ref().map(|s| my::Value::Bytes((s).as_bytes().to_vec())), + ValueType::Array(_) | ValueType::EnumArray(_, _) => { let msg = "Arrays are not supported in MySQL."; let kind = ErrorKind::conversion(msg); @@ -40,10 +39,9 @@ pub fn conv_params(params: &[Value<'_>]) -> crate::Result { return Err(builder.build()); } - #[cfg(feature = "bigdecimal")] - Value::Numeric(f) => f.as_ref().map(|f| my::Value::Bytes(f.to_string().as_bytes().to_vec())), - #[cfg(feature = "json")] - Value::Json(s) => match s { + + ValueType::Numeric(f) => f.as_ref().map(|f| my::Value::Bytes(f.to_string().as_bytes().to_vec())), + ValueType::Json(s) => match s { Some(ref s) => { let json = serde_json::to_string(s)?; let bytes = json.into_bytes(); @@ -52,18 +50,14 @@ pub fn conv_params(params: &[Value<'_>]) -> crate::Result { } None => None, }, - #[cfg(feature = "uuid")] - Value::Uuid(u) => u.map(|u| my::Value::Bytes(u.hyphenated().to_string().into_bytes())), - #[cfg(feature = "chrono")] - Value::Date(d) => { + ValueType::Uuid(u) => u.map(|u| my::Value::Bytes(u.hyphenated().to_string().into_bytes())), + ValueType::Date(d) => { d.map(|d| my::Value::Date(d.year() as u16, d.month() as u8, d.day() as u8, 0, 0, 0, 0)) } - #[cfg(feature = "chrono")] - Value::Time(t) => { + ValueType::Time(t) => { t.map(|t| my::Value::Time(false, 0, t.hour() as u8, t.minute() as u8, t.second() as u8, 0)) } - #[cfg(feature = "chrono")] - Value::DateTime(dt) => dt.map(|dt| { + ValueType::DateTime(dt) => dt.map(|dt| { my::Value::Date( dt.year() as u16, dt.month() as u8, @@ -227,7 +221,6 @@ impl TakeRow for my::Row { let res = match value { // JSON is returned as bytes. - #[cfg(feature = "json")] my::Value::Bytes(b) if column.is_json() => { serde_json::from_slice(&b).map(Value::json).map_err(|_| { let msg = "Unable to convert bytes to JSON"; @@ -241,7 +234,6 @@ impl TakeRow for my::Row { Value::enum_variant(s) } // NEWDECIMAL returned as bytes. See https://mariadb.com/kb/en/resultset-row/#decimal-binary-encoding - #[cfg(feature = "bigdecimal")] my::Value::Bytes(b) if column.is_real() => { let s = String::from_utf8(b).map_err(|_| { let msg = "Could not convert NEWDECIMAL from bytes to String."; @@ -276,7 +268,6 @@ impl TakeRow for my::Row { })?), my::Value::Float(f) => Value::from(f), my::Value::Double(f) => Value::from(f), - #[cfg(feature = "chrono")] my::Value::Date(year, month, day, hour, min, sec, micro) => { if day == 0 || month == 0 { let msg = format!( @@ -294,7 +285,6 @@ impl TakeRow for my::Row { Value::datetime(DateTime::::from_utc(dt, Utc)) } - #[cfg(feature = "chrono")] my::Value::Time(is_neg, days, hours, minutes, seconds, micros) => { if is_neg { let kind = ErrorKind::conversion("Failed to convert a negative time"); @@ -311,25 +301,21 @@ impl TakeRow for my::Row { Value::time(time) } my::Value::NULL => match column { - t if t.is_bool() => Value::Boolean(None), - t if t.is_enum() => Value::Enum(None), - t if t.is_null() => Value::Int32(None), - t if t.is_int64() => Value::Int64(None), - t if t.is_int32() => Value::Int32(None), - t if t.is_float() => Value::Float(None), - t if t.is_double() => Value::Double(None), - t if t.is_text() => Value::Text(None), - t if t.is_bytes() => Value::Bytes(None), - #[cfg(feature = "bigdecimal")] - t if t.is_real() => Value::Numeric(None), - #[cfg(feature = "chrono")] - t if t.is_datetime() => Value::DateTime(None), - #[cfg(feature = "chrono")] - t if t.is_time() => Value::Time(None), - #[cfg(feature = "chrono")] - t if t.is_date() => Value::Date(None), - #[cfg(feature = "json")] - t if t.is_json() => Value::Json(None), + t if t.is_bool() => Value::null_boolean(), + t if t.is_enum() => Value::null_enum(), + t if t.is_null() => Value::null_int32(), + t if t.is_int64() => Value::null_int64(), + t if t.is_int32() => Value::null_int32(), + t if t.is_float() => Value::null_float(), + t if t.is_double() => Value::null_double(), + t if t.is_text() => Value::null_text(), + t if t.is_bytes() => Value::null_bytes(), + + t if t.is_real() => Value::null_numeric(), + t if t.is_datetime() => Value::null_datetime(), + t if t.is_time() => Value::null_time(), + t if t.is_date() => Value::null_date(), + t if t.is_json() => Value::null_json(), typ => { let msg = format!("Value of type {typ:?} is not supported with the current configuration"); @@ -337,16 +323,6 @@ impl TakeRow for my::Row { return Err(Error::builder(kind).build()); } }, - #[cfg(not(feature = "chrono"))] - typ => { - let msg = format!( - "Value of type {:?} is not supported with the current configuration", - typ - ); - - let kind = ErrorKind::conversion(msg); - Err(Error::builder(kind).build())? - } }; Ok(res) diff --git a/quaint/src/connector/mysql/error.rs b/quaint/src/connector/mysql/error.rs index 8b381e1581bb..dd7c3d3bfa66 100644 --- a/quaint/src/connector/mysql/error.rs +++ b/quaint/src/connector/mysql/error.rs @@ -1,22 +1,29 @@ use crate::error::{DatabaseConstraint, Error, ErrorKind}; use mysql_async as my; -impl From for Error { - fn from(e: my::Error) -> Error { - use my::ServerError; +pub struct MysqlError { + pub code: u16, + pub message: String, + pub state: String, +} - match e { - my::Error::Io(my::IoError::Tls(err)) => Error::builder(ErrorKind::TlsError { - message: err.to_string(), - }) - .build(), - my::Error::Io(my::IoError::Io(err)) if err.kind() == std::io::ErrorKind::UnexpectedEof => { - Error::builder(ErrorKind::ConnectionClosed).build() - } - my::Error::Io(io_error) => Error::builder(ErrorKind::ConnectionError(io_error.into())).build(), - my::Error::Driver(e) => Error::builder(ErrorKind::QueryError(e.into())).build(), - my::Error::Server(ServerError { ref message, code, .. }) if code == 1062 => { - let constraint = message +impl From<&my::ServerError> for MysqlError { + fn from(value: &my::ServerError) -> Self { + MysqlError { + code: value.code, + message: value.message.to_owned(), + state: value.state.to_owned(), + } + } +} + +impl From for Error { + fn from(error: MysqlError) -> Self { + let code = error.code; + match code { + 1062 => { + let constraint = error + .message .split_whitespace() .last() .and_then(|s| s.split('\'').nth(1)) @@ -29,12 +36,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1451 || code == 1452 => { - let constraint = message + 1451 | 1452 => { + let constraint = error + .message .split_whitespace() .nth(17) .and_then(|s| s.split('`').nth(1)) @@ -45,12 +53,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1263 => { - let constraint = message + 1263 => { + let constraint = error + .message .split_whitespace() .last() .and_then(|s| s.split('\'').nth(1)) @@ -62,22 +71,23 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1264 => { + 1264 => { let mut builder = Error::builder(ErrorKind::ValueOutOfRange { - message: message.clone(), + message: error.message.clone(), }); builder.set_original_code(code.to_string()); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1364 || code == 1048 => { - let constraint = message + 1364 | 1048 => { + let constraint = error + .message .split_whitespace() .nth(1) .and_then(|s| s.split('\'').nth(1)) @@ -88,12 +98,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1049 => { - let db_name = message + 1049 => { + let db_name = error + .message .split_whitespace() .last() .and_then(|s| s.split('\'').nth(1)) @@ -103,12 +114,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1007 => { - let db_name = message + 1007 => { + let db_name = error + .message .split_whitespace() .nth(3) .and_then(|s| s.split('\'').nth(1)) @@ -118,12 +130,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1044 => { - let db_name = message + 1044 => { + let db_name = error + .message .split_whitespace() .last() .and_then(|s| s.split('\'').nth(1)) @@ -133,12 +146,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1045 => { - let user = message + 1045 => { + let user = error + .message .split_whitespace() .nth(4) .and_then(|s| s.split('@').next()) @@ -149,12 +163,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1146 => { - let table = message + 1146 => { + let table = error + .message .split_whitespace() .nth(1) .and_then(|s| s.split('\'').nth(1)) @@ -165,12 +180,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1054 => { - let column = message + 1054 => { + let column = error + .message .split_whitespace() .nth(2) .and_then(|s| s.split('\'').nth(1)) @@ -179,68 +195,77 @@ impl From for Error { let mut builder = Error::builder(ErrorKind::ColumnNotFound { column }); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { - ref message, - code, - state: _, - }) if code == 1406 => { - let column = message.split_whitespace().flat_map(|s| s.split('\'')).nth(6).into(); + 1406 => { + let column = error + .message + .split_whitespace() + .flat_map(|s| s.split('\'')) + .nth(6) + .into(); let kind = ErrorKind::LengthMismatch { column }; let mut builder = Error::builder(kind); builder.set_original_code(code.to_string()); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { - ref message, - code, - state: _, - }) if code == 1191 => { + 1191 => { let kind = ErrorKind::MissingFullTextSearchIndex; let mut builder = Error::builder(kind); builder.set_original_code(code.to_string()); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { - ref message, - code, - state: _, - }) if code == 1213 => { + 1213 => { let mut builder = Error::builder(ErrorKind::TransactionWriteConflict); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { - ref message, - code, - ref state, - }) => { + _ => { let kind = ErrorKind::QueryError( - my::Error::Server(ServerError { - message: message.clone(), + my::Error::Server(my::ServerError { + message: error.message.clone(), code, - state: state.clone(), + state: error.state.clone(), }) .into(), ); let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } + } + } +} + +impl From for Error { + fn from(e: my::Error) -> Error { + match e { + my::Error::Io(my::IoError::Tls(err)) => Error::builder(ErrorKind::TlsError { + message: err.to_string(), + }) + .build(), + my::Error::Io(my::IoError::Io(err)) if err.kind() == std::io::ErrorKind::UnexpectedEof => { + Error::builder(ErrorKind::ConnectionClosed).build() + } + my::Error::Io(io_error) => Error::builder(ErrorKind::ConnectionError(io_error.into())).build(), + my::Error::Driver(e) => Error::builder(ErrorKind::QueryError(e.into())).build(), + my::Error::Server(ref server_error) => { + let mysql_error: MysqlError = server_error.into(); + mysql_error.into() + } e => Error::builder(ErrorKind::QueryError(e.into())).build(), } } diff --git a/quaint/src/connector/postgres.rs b/quaint/src/connector/postgres.rs index d4dc008bd5f9..766be38b27e4 100644 --- a/quaint/src/connector/postgres.rs +++ b/quaint/src/connector/postgres.rs @@ -27,6 +27,8 @@ use tokio_postgres::{ }; use url::{Host, Url}; +pub use error::PostgresError; + pub(crate) const DEFAULT_SCHEMA: &str = "public"; /// The underlying postgres driver. Only available with the `expose-drivers` @@ -55,7 +57,6 @@ impl Debug for PostgresClient { /// A connector interface for the PostgreSQL database. #[derive(Debug)] -#[cfg_attr(feature = "docs", doc(cfg(feature = "postgresql")))] pub struct PostgreSql { client: PostgresClient, pg_bouncer: bool, @@ -65,14 +66,12 @@ pub struct PostgreSql { } #[derive(Debug, Clone, Copy, PartialEq, Eq)] -#[cfg_attr(feature = "docs", doc(cfg(feature = "postgresql")))] pub enum SslAcceptMode { Strict, AcceptInvalidCerts, } #[derive(Debug, Clone)] -#[cfg_attr(feature = "docs", doc(cfg(feature = "postgresql")))] pub struct SslParams { certificate_file: Option, identity_file: Option, @@ -180,7 +179,6 @@ impl PostgresFlavour { /// Wraps a connection url and exposes the parsing logic used by Quaint, /// including default values. #[derive(Debug, Clone)] -#[cfg_attr(feature = "docs", doc(cfg(feature = "postgresql")))] pub struct PostgresUrl { url: Url, query_params: PostgresUrlQueryParams, @@ -1178,7 +1176,7 @@ mod tests { let result_set = client.query_raw("SHOW search_path", &[]).await.unwrap(); let row = result_set.first().unwrap(); - row[0].to_string() + row[0].typed.to_string() } // Safe @@ -1230,7 +1228,7 @@ mod tests { let result_set = client.query_raw("SHOW search_path", &[]).await.unwrap(); let row = result_set.first().unwrap(); - row[0].to_string() + row[0].typed.to_string() } // Safe @@ -1281,7 +1279,7 @@ mod tests { let result_set = client.query_raw("SHOW search_path", &[]).await.unwrap(); let row = result_set.first().unwrap(); - row[0].to_string() + row[0].typed.to_string() } // Safe @@ -1332,7 +1330,7 @@ mod tests { let result_set = client.query_raw("SHOW search_path", &[]).await.unwrap(); let row = result_set.first().unwrap(); - row[0].to_string() + row[0].typed.to_string() } // Safe @@ -1383,7 +1381,7 @@ mod tests { let result_set = client.query_raw("SHOW search_path", &[]).await.unwrap(); let row = result_set.first().unwrap(); - row[0].to_string() + row[0].typed.to_string() } // Safe @@ -1480,9 +1478,7 @@ mod tests { let url = Url::parse(&CONN_STR).unwrap(); let conn = Quaint::new(url.as_str()).await.unwrap(); - let res = conn - .query_raw("SELECT $1", &[Value::integer(1), Value::integer(2)]) - .await; + let res = conn.query_raw("SELECT $1", &[Value::int32(1), Value::int32(2)]).await; assert!(res.is_err()); diff --git a/quaint/src/connector/postgres/conversion.rs b/quaint/src/connector/postgres/conversion.rs index b4b0a256ad11..efe4debd9b94 100644 --- a/quaint/src/connector/postgres/conversion.rs +++ b/quaint/src/connector/postgres/conversion.rs @@ -1,18 +1,16 @@ -#[cfg(feature = "bigdecimal")] mod decimal; use crate::{ - ast::Value, + ast::{Value, ValueType}, connector::queryable::{GetRow, ToColumnNames}, error::{Error, ErrorKind}, }; -#[cfg(feature = "bigdecimal")] + use bigdecimal::{num_bigint::BigInt, BigDecimal, FromPrimitive, ToPrimitive}; use bit_vec::BitVec; use bytes::BytesMut; -#[cfg(feature = "chrono")] use chrono::{DateTime, NaiveDateTime, Utc}; -#[cfg(feature = "bigdecimal")] + pub(crate) use decimal::DecimalWrapper; use postgres_types::{FromSql, ToSql, WrongType}; use std::{convert::TryFrom, error::Error as StdError}; @@ -21,7 +19,6 @@ use tokio_postgres::{ Row as PostgresRow, Statement as PostgresStatement, }; -#[cfg(feature = "uuid")] use uuid::Uuid; pub(crate) fn conv_params<'a>(params: &'a [Value<'a>]) -> Vec<&'a (dyn types::ToSql + Sync)> { @@ -39,31 +36,25 @@ pub(crate) fn params_to_types(params: &[Value<'_>]) -> Vec { return PostgresType::UNKNOWN; } - match p { - Value::Int32(_) => PostgresType::INT4, - Value::Int64(_) => PostgresType::INT8, - Value::Float(_) => PostgresType::FLOAT4, - Value::Double(_) => PostgresType::FLOAT8, - Value::Text(_) => PostgresType::TEXT, - // Enums are special types, we can't statically infer them, so we let PG infer it - Value::Enum(_) => PostgresType::UNKNOWN, - Value::Bytes(_) => PostgresType::BYTEA, - Value::Boolean(_) => PostgresType::BOOL, - Value::Char(_) => PostgresType::CHAR, - #[cfg(feature = "bigdecimal")] - Value::Numeric(_) => PostgresType::NUMERIC, - #[cfg(feature = "json")] - Value::Json(_) => PostgresType::JSONB, - Value::Xml(_) => PostgresType::XML, - #[cfg(feature = "uuid")] - Value::Uuid(_) => PostgresType::UUID, - #[cfg(feature = "chrono")] - Value::DateTime(_) => PostgresType::TIMESTAMPTZ, - #[cfg(feature = "chrono")] - Value::Date(_) => PostgresType::TIMESTAMP, - #[cfg(feature = "chrono")] - Value::Time(_) => PostgresType::TIME, - Value::Array(ref arr) => { + match &p.typed { + ValueType::Int32(_) => PostgresType::INT4, + ValueType::Int64(_) => PostgresType::INT8, + ValueType::Float(_) => PostgresType::FLOAT4, + ValueType::Double(_) => PostgresType::FLOAT8, + ValueType::Text(_) => PostgresType::TEXT, + // Enums are user-defined types, we can't statically infer them, so we let PG infer it + ValueType::Enum(_, _) | ValueType::EnumArray(_, _) => PostgresType::UNKNOWN, + ValueType::Bytes(_) => PostgresType::BYTEA, + ValueType::Boolean(_) => PostgresType::BOOL, + ValueType::Char(_) => PostgresType::CHAR, + ValueType::Numeric(_) => PostgresType::NUMERIC, + ValueType::Json(_) => PostgresType::JSONB, + ValueType::Xml(_) => PostgresType::XML, + ValueType::Uuid(_) => PostgresType::UUID, + ValueType::DateTime(_) => PostgresType::TIMESTAMPTZ, + ValueType::Date(_) => PostgresType::TIMESTAMP, + ValueType::Time(_) => PostgresType::TIME, + ValueType::Array(ref arr) => { let arr = arr.as_ref().unwrap(); // If the array is empty, we can't infer the type so we let PG infer it @@ -76,37 +67,31 @@ pub(crate) fn params_to_types(params: &[Value<'_>]) -> Vec { // If the array does not contain the same types of values, we let PG infer the type if arr .iter() - .any(|val| std::mem::discriminant(first) != std::mem::discriminant(val)) + .any(|val| std::mem::discriminant(&first.typed) != std::mem::discriminant(&val.typed)) { return PostgresType::UNKNOWN; } - match first { - Value::Int32(_) => PostgresType::INT4_ARRAY, - Value::Int64(_) => PostgresType::INT8_ARRAY, - Value::Float(_) => PostgresType::FLOAT4_ARRAY, - Value::Double(_) => PostgresType::FLOAT8_ARRAY, - Value::Text(_) => PostgresType::TEXT_ARRAY, + match first.typed { + ValueType::Int32(_) => PostgresType::INT4_ARRAY, + ValueType::Int64(_) => PostgresType::INT8_ARRAY, + ValueType::Float(_) => PostgresType::FLOAT4_ARRAY, + ValueType::Double(_) => PostgresType::FLOAT8_ARRAY, + ValueType::Text(_) => PostgresType::TEXT_ARRAY, // Enums are special types, we can't statically infer them, so we let PG infer it - Value::Enum(_) => PostgresType::UNKNOWN, - Value::Bytes(_) => PostgresType::BYTEA_ARRAY, - Value::Boolean(_) => PostgresType::BOOL_ARRAY, - Value::Char(_) => PostgresType::CHAR_ARRAY, - #[cfg(feature = "bigdecimal")] - Value::Numeric(_) => PostgresType::NUMERIC_ARRAY, - #[cfg(feature = "json")] - Value::Json(_) => PostgresType::JSONB_ARRAY, - Value::Xml(_) => PostgresType::XML_ARRAY, - #[cfg(feature = "uuid")] - Value::Uuid(_) => PostgresType::UUID_ARRAY, - #[cfg(feature = "chrono")] - Value::DateTime(_) => PostgresType::TIMESTAMPTZ_ARRAY, - #[cfg(feature = "chrono")] - Value::Date(_) => PostgresType::TIMESTAMP_ARRAY, - #[cfg(feature = "chrono")] - Value::Time(_) => PostgresType::TIME_ARRAY, + ValueType::Enum(_, _) | ValueType::EnumArray(_, _) => PostgresType::UNKNOWN, + ValueType::Bytes(_) => PostgresType::BYTEA_ARRAY, + ValueType::Boolean(_) => PostgresType::BOOL_ARRAY, + ValueType::Char(_) => PostgresType::CHAR_ARRAY, + ValueType::Numeric(_) => PostgresType::NUMERIC_ARRAY, + ValueType::Json(_) => PostgresType::JSONB_ARRAY, + ValueType::Xml(_) => PostgresType::XML_ARRAY, + ValueType::Uuid(_) => PostgresType::UUID_ARRAY, + ValueType::DateTime(_) => PostgresType::TIMESTAMPTZ_ARRAY, + ValueType::Date(_) => PostgresType::TIMESTAMP_ARRAY, + ValueType::Time(_) => PostgresType::TIME_ARRAY, // In the case of nested arrays, we let PG infer the type - Value::Array(_) => PostgresType::UNKNOWN, + ValueType::Array(_) => PostgresType::UNKNOWN, } } } @@ -142,10 +127,8 @@ impl<'a> FromSql<'a> for EnumString { } } -#[cfg(feature = "chrono")] struct TimeTz(chrono::NaiveTime); -#[cfg(feature = "chrono")] impl<'a> FromSql<'a> for TimeTz { fn from_sql(_ty: &PostgresType, raw: &'a [u8]) -> Result> { // We assume UTC. @@ -162,10 +145,8 @@ impl<'a> FromSql<'a> for TimeTz { /// of 2 decimals. /// /// Postgres docs: https://www.postgresql.org/docs/current/datatype-money.html -#[cfg(feature = "bigdecimal")] struct NaiveMoney(BigDecimal); -#[cfg(feature = "bigdecimal")] impl<'a> FromSql<'a> for NaiveMoney { fn from_sql(_ty: &PostgresType, raw: &'a [u8]) -> Result> { let cents = i64::from_sql(&PostgresType::INT8, raw)?; @@ -182,206 +163,194 @@ impl GetRow for PostgresRow { fn get_result_row(&self) -> crate::Result>> { fn convert(row: &PostgresRow, i: usize) -> crate::Result> { let result = match *row.columns()[i].type_() { - PostgresType::BOOL => Value::Boolean(row.try_get(i)?), + PostgresType::BOOL => ValueType::Boolean(row.try_get(i)?).into_value(), PostgresType::INT2 => match row.try_get(i)? { Some(val) => { let val: i16 = val; Value::int32(val) } - None => Value::Int32(None), + None => Value::null_int32(), }, PostgresType::INT4 => match row.try_get(i)? { Some(val) => { let val: i32 = val; Value::int32(val) } - None => Value::Int32(None), + None => Value::null_int32(), }, PostgresType::INT8 => match row.try_get(i)? { Some(val) => { let val: i64 = val; Value::int64(val) } - None => Value::Int64(None), + None => Value::null_int64(), }, PostgresType::FLOAT4 => match row.try_get(i)? { Some(val) => { let val: f32 = val; Value::float(val) } - None => Value::Float(None), + None => Value::null_float(), }, PostgresType::FLOAT8 => match row.try_get(i)? { Some(val) => { let val: f64 = val; Value::double(val) } - None => Value::Double(None), + None => Value::null_double(), }, PostgresType::BYTEA => match row.try_get(i)? { Some(val) => { let val: &[u8] = val; Value::bytes(val.to_owned()) } - None => Value::Bytes(None), + None => Value::null_bytes(), }, PostgresType::BYTEA_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec>> = val; - let byteas = val.into_iter().map(|b| Value::Bytes(b.map(Into::into))); + let byteas = val.into_iter().map(|b| ValueType::Bytes(b.map(Into::into))); Value::array(byteas) } - None => Value::Array(None), + None => Value::null_array(), }, - #[cfg(feature = "bigdecimal")] PostgresType::NUMERIC => { let dw: Option = row.try_get(i)?; - Value::Numeric(dw.map(|dw| dw.0)) + ValueType::Numeric(dw.map(|dw| dw.0)).into_value() } - #[cfg(feature = "bigdecimal")] PostgresType::MONEY => match row.try_get(i)? { Some(val) => { let val: NaiveMoney = val; Value::numeric(val.0) } - None => Value::Numeric(None), + None => Value::null_numeric(), }, - #[cfg(feature = "chrono")] PostgresType::TIMESTAMP => match row.try_get(i)? { Some(val) => { let ts: NaiveDateTime = val; let dt = DateTime::::from_utc(ts, Utc); Value::datetime(dt) } - None => Value::DateTime(None), + None => Value::null_datetime(), }, - #[cfg(feature = "chrono")] PostgresType::TIMESTAMPTZ => match row.try_get(i)? { Some(val) => { let ts: DateTime = val; Value::datetime(ts) } - None => Value::DateTime(None), + None => Value::null_datetime(), }, - #[cfg(feature = "chrono")] PostgresType::DATE => match row.try_get(i)? { Some(val) => Value::date(val), - None => Value::Date(None), + None => Value::null_date(), }, - #[cfg(feature = "chrono")] PostgresType::TIME => match row.try_get(i)? { Some(val) => Value::time(val), - None => Value::Time(None), + None => Value::null_time(), }, - #[cfg(feature = "chrono")] PostgresType::TIMETZ => match row.try_get(i)? { Some(val) => { let time: TimeTz = val; Value::time(time.0) } - None => Value::Time(None), + None => Value::null_time(), }, - #[cfg(feature = "uuid")] PostgresType::UUID => match row.try_get(i)? { Some(val) => { let val: Uuid = val; Value::uuid(val) } - None => Value::Uuid(None), + None => ValueType::Uuid(None).into_value(), }, - #[cfg(feature = "uuid")] PostgresType::UUID_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; - let val = val.into_iter().map(Value::Uuid); + let val = val.into_iter().map(ValueType::Uuid); Value::array(val) } - None => Value::Array(None), + None => Value::null_array(), }, - #[cfg(feature = "json")] - PostgresType::JSON | PostgresType::JSONB => Value::Json(row.try_get(i)?), + PostgresType::JSON | PostgresType::JSONB => ValueType::Json(row.try_get(i)?).into_value(), PostgresType::INT2_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; - let ints = val.into_iter().map(|i| Value::Int32(i.map(|i| i as i32))); + let ints = val.into_iter().map(|i| ValueType::Int32(i.map(|i| i as i32))); Value::array(ints) } - None => Value::Array(None), + None => Value::null_array(), }, PostgresType::INT4_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; - let ints = val.into_iter().map(Value::Int32); + let ints = val.into_iter().map(ValueType::Int32); Value::array(ints) } - None => Value::Array(None), + None => Value::null_array(), }, PostgresType::INT8_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; - let ints = val.into_iter().map(Value::Int64); + let ints = val.into_iter().map(ValueType::Int64); Value::array(ints) } - None => Value::Array(None), + None => Value::null_array(), }, PostgresType::FLOAT4_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; - let floats = val.into_iter().map(Value::Float); + let floats = val.into_iter().map(ValueType::Float); Value::array(floats) } - None => Value::Array(None), + None => Value::null_array(), }, PostgresType::FLOAT8_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; - let floats = val.into_iter().map(Value::Double); + let floats = val.into_iter().map(ValueType::Double); Value::array(floats) } - None => Value::Array(None), + None => Value::null_array(), }, PostgresType::BOOL_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; - let bools = val.into_iter().map(Value::Boolean); + let bools = val.into_iter().map(ValueType::Boolean); Value::array(bools) } - None => Value::Array(None), + None => Value::null_array(), }, - #[cfg(feature = "chrono")] PostgresType::TIMESTAMP_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; let dates = val .into_iter() - .map(|dt| Value::DateTime(dt.map(|dt| DateTime::::from_utc(dt, Utc)))); + .map(|dt| ValueType::DateTime(dt.map(|dt| DateTime::::from_utc(dt, Utc)))); Value::array(dates) } - None => Value::Array(None), + None => Value::null_array(), }, - #[cfg(feature = "bigdecimal")] PostgresType::NUMERIC_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; let decimals = val .into_iter() - .map(|dec| Value::Numeric(dec.map(|dec| dec.0.to_string().parse().unwrap()))); + .map(|dec| ValueType::Numeric(dec.map(|dec| dec.0.to_string().parse().unwrap()))); Value::array(decimals) } - None => Value::Array(None), + None => Value::null_array(), }, PostgresType::TEXT_ARRAY | PostgresType::NAME_ARRAY | PostgresType::VARCHAR_ARRAY => { match row.try_get(i)? { @@ -390,148 +359,139 @@ impl GetRow for PostgresRow { Value::array(strings.into_iter().map(|s| s.map(|s| s.to_string()))) } - None => Value::Array(None), + None => Value::null_array(), } } - #[cfg(feature = "bigdecimal")] PostgresType::MONEY_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; - let nums = val.into_iter().map(|num| Value::Numeric(num.map(|num| num.0))); + let nums = val.into_iter().map(|num| ValueType::Numeric(num.map(|num| num.0))); Value::array(nums) } - None => Value::Array(None), + None => Value::null_array(), }, PostgresType::OID_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; - let nums = val.into_iter().map(|oid| Value::Int64(oid.map(|oid| oid as i64))); + let nums = val.into_iter().map(|oid| ValueType::Int64(oid.map(|oid| oid as i64))); Value::array(nums) } - None => Value::Array(None), + None => Value::null_array(), }, - #[cfg(feature = "chrono")] PostgresType::TIMESTAMPTZ_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec>> = val; - let dates = val.into_iter().map(Value::DateTime); + let dates = val.into_iter().map(ValueType::DateTime); Value::array(dates) } - None => Value::Array(None), + None => Value::null_array(), }, - #[cfg(feature = "chrono")] PostgresType::DATE_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; - let dates = val.into_iter().map(Value::Date); + let dates = val.into_iter().map(ValueType::Date); Value::array(dates) } - None => Value::Array(None), + None => Value::null_array(), }, - #[cfg(feature = "chrono")] PostgresType::TIME_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; - let times = val.into_iter().map(Value::Time); + let times = val.into_iter().map(ValueType::Time); Value::array(times) } - None => Value::Array(None), + None => Value::null_array(), }, - #[cfg(feature = "chrono")] PostgresType::TIMETZ_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; - let timetzs = val.into_iter().map(|time| Value::Time(time.map(|time| time.0))); + let timetzs = val.into_iter().map(|time| ValueType::Time(time.map(|time| time.0))); Value::array(timetzs) } - None => Value::Array(None), + None => Value::null_array(), }, - #[cfg(feature = "json")] PostgresType::JSON_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; - let jsons = val.into_iter().map(Value::Json); + let jsons = val.into_iter().map(ValueType::Json); Value::array(jsons) } - None => Value::Array(None), + None => Value::null_array(), }, - #[cfg(feature = "json")] PostgresType::JSONB_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; - let jsons = val.into_iter().map(Value::Json); + let jsons = val.into_iter().map(ValueType::Json); Value::array(jsons) } - None => Value::Array(None), + None => Value::null_array(), }, PostgresType::OID => match row.try_get(i)? { Some(val) => { let val: u32 = val; Value::int64(val) } - None => Value::Int64(None), + None => Value::null_int64(), }, PostgresType::CHAR => match row.try_get(i)? { Some(val) => { let val: i8 = val; Value::character((val as u8) as char) } - None => Value::Char(None), + None => Value::null_character(), }, PostgresType::INET | PostgresType::CIDR => match row.try_get(i)? { Some(val) => { let val: std::net::IpAddr = val; Value::text(val.to_string()) } - None => Value::Text(None), + None => Value::null_text(), }, PostgresType::INET_ARRAY | PostgresType::CIDR_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; let addrs = val .into_iter() - .map(|ip| Value::Text(ip.map(|ip| ip.to_string().into()))); + .map(|ip| ValueType::Text(ip.map(|ip| ip.to_string().into()))); Value::array(addrs) } - None => Value::Array(None), + None => Value::null_array(), }, PostgresType::BIT | PostgresType::VARBIT => match row.try_get(i)? { Some(val) => { let val: BitVec = val; Value::text(bits_to_string(&val)?) } - None => Value::Text(None), + None => Value::null_text(), }, PostgresType::BIT_ARRAY | PostgresType::VARBIT_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; - let stringified = val - .into_iter() + val.into_iter() .map(|bits| match bits { - Some(bits) => bits_to_string(&bits).map(Value::text), - None => Ok(Value::Text(None)), + Some(bits) => bits_to_string(&bits).map(|s| ValueType::Text(Some(s.into()))), + None => Ok(ValueType::Text(None)), }) - .collect::>>()?; - - Value::array(stringified) + .collect::>>() + .map(Value::array)? } - None => Value::Array(None), + None => Value::null_array(), }, PostgresType::XML => match row.try_get(i)? { Some(val) => { let val: XmlString = val; Value::xml(val.0) } - None => Value::Xml(None), + None => Value::null_xml(), }, PostgresType::XML_ARRAY => match row.try_get(i)? { Some(val) => { @@ -540,35 +500,37 @@ impl GetRow for PostgresRow { Value::array(xmls) } - None => Value::Array(None), + None => Value::null_array(), }, ref x => match x.kind() { - Kind::Enum(_) => match row.try_get(i)? { + Kind::Enum => match row.try_get(i)? { Some(val) => { let val: EnumString = val; Value::enum_variant(val.value) } - None => Value::Enum(None), + None => Value::null_enum(), }, Kind::Array(inner) => match inner.kind() { - Kind::Enum(_) => match row.try_get(i)? { + Kind::Enum => match row.try_get(i)? { Some(val) => { let val: Vec> = val; - let variants = val.into_iter().map(|x| Value::Enum(x.map(|x| x.value.into()))); + let variants = val + .into_iter() + .map(|x| ValueType::Enum(x.map(|x| x.value.into()), None)); Ok(Value::array(variants)) } - None => Ok(Value::Array(None)), + None => Ok(Value::null_array()), }, _ => match row.try_get(i) { Ok(Some(val)) => { let val: Vec> = val; - let strings = val.into_iter().map(|str| Value::Text(str.map(Into::into))); + let strings = val.into_iter().map(|str| ValueType::Text(str.map(Into::into))); Ok(Value::array(strings)) } - Ok(None) => Ok(Value::Array(None)), + Ok(None) => Ok(Value::null_array()), Err(err) => { if err.source().map(|err| err.is::()).unwrap_or(false) { let kind = ErrorKind::UnsupportedColumnType { @@ -588,7 +550,7 @@ impl GetRow for PostgresRow { Ok(Value::text(val)) } - Ok(None) => Ok(Value::Text(None)), + Ok(None) => Ok(Value::from(ValueType::Text(None))), Err(err) => { if err.source().map(|err| err.is::()).unwrap_or(false) { let kind = ErrorKind::UnsupportedColumnType { @@ -630,8 +592,8 @@ impl<'a> ToSql for Value<'a> { ty: &PostgresType, out: &mut BytesMut, ) -> Result> { - let res = match (self, ty) { - (Value::Int32(integer), &PostgresType::INT2) => match integer { + let res = match (&self.typed, ty) { + (ValueType::Int32(integer), &PostgresType::INT2) => match integer { Some(i) => { let integer = i16::try_from(*i).map_err(|_| { let kind = ErrorKind::conversion(format!( @@ -645,9 +607,9 @@ impl<'a> ToSql for Value<'a> { } _ => None, }, - (Value::Int32(integer), &PostgresType::INT4) => integer.map(|integer| integer.to_sql(ty, out)), - (Value::Int32(integer), &PostgresType::INT8) => integer.map(|integer| (integer as i64).to_sql(ty, out)), - (Value::Int64(integer), &PostgresType::INT2) => match integer { + (ValueType::Int32(integer), &PostgresType::INT4) => integer.map(|integer| integer.to_sql(ty, out)), + (ValueType::Int32(integer), &PostgresType::INT8) => integer.map(|integer| (integer as i64).to_sql(ty, out)), + (ValueType::Int64(integer), &PostgresType::INT2) => match integer { Some(i) => { let integer = i16::try_from(*i).map_err(|_| { let kind = ErrorKind::conversion(format!( @@ -661,7 +623,7 @@ impl<'a> ToSql for Value<'a> { } _ => None, }, - (Value::Int64(integer), &PostgresType::INT4) => match integer { + (ValueType::Int64(integer), &PostgresType::INT4) => match integer { Some(i) => { let integer = i32::try_from(*i).map_err(|_| { let kind = ErrorKind::conversion(format!( @@ -675,20 +637,22 @@ impl<'a> ToSql for Value<'a> { } _ => None, }, - (Value::Int64(integer), &PostgresType::INT8) => integer.map(|integer| integer.to_sql(ty, out)), - #[cfg(feature = "bigdecimal")] - (Value::Int32(integer), &PostgresType::NUMERIC) => integer + (ValueType::Int64(integer), &PostgresType::INT8) => integer.map(|integer| integer.to_sql(ty, out)), + (ValueType::Int32(integer), &PostgresType::NUMERIC) => integer .map(|integer| BigDecimal::from_i32(integer).unwrap()) .map(DecimalWrapper) .map(|dw| dw.to_sql(ty, out)), - #[cfg(feature = "bigdecimal")] - (Value::Int64(integer), &PostgresType::NUMERIC) => integer + (ValueType::Int64(integer), &PostgresType::NUMERIC) => integer .map(|integer| BigDecimal::from_i64(integer).unwrap()) .map(DecimalWrapper) .map(|dw| dw.to_sql(ty, out)), - (Value::Int32(integer), &PostgresType::TEXT) => integer.map(|integer| format!("{integer}").to_sql(ty, out)), - (Value::Int64(integer), &PostgresType::TEXT) => integer.map(|integer| format!("{integer}").to_sql(ty, out)), - (Value::Int32(integer), &PostgresType::OID) => match integer { + (ValueType::Int32(integer), &PostgresType::TEXT) => { + integer.map(|integer| format!("{integer}").to_sql(ty, out)) + } + (ValueType::Int64(integer), &PostgresType::TEXT) => { + integer.map(|integer| format!("{integer}").to_sql(ty, out)) + } + (ValueType::Int32(integer), &PostgresType::OID) => match integer { Some(i) => { let integer = u32::try_from(*i).map_err(|_| { let kind = ErrorKind::conversion(format!( @@ -702,7 +666,7 @@ impl<'a> ToSql for Value<'a> { } _ => None, }, - (Value::Int64(integer), &PostgresType::OID) => match integer { + (ValueType::Int64(integer), &PostgresType::OID) => match integer { Some(i) => { let integer = u32::try_from(*i).map_err(|_| { let kind = ErrorKind::conversion(format!( @@ -716,43 +680,38 @@ impl<'a> ToSql for Value<'a> { } _ => None, }, - (Value::Int32(integer), _) => integer.map(|integer| integer.to_sql(ty, out)), - (Value::Int64(integer), _) => integer.map(|integer| integer.to_sql(ty, out)), - (Value::Float(float), &PostgresType::FLOAT8) => float.map(|float| (float as f64).to_sql(ty, out)), - #[cfg(feature = "bigdecimal")] - (Value::Float(float), &PostgresType::NUMERIC) => float + (ValueType::Int32(integer), _) => integer.map(|integer| integer.to_sql(ty, out)), + (ValueType::Int64(integer), _) => integer.map(|integer| integer.to_sql(ty, out)), + (ValueType::Float(float), &PostgresType::FLOAT8) => float.map(|float| (float as f64).to_sql(ty, out)), + (ValueType::Float(float), &PostgresType::NUMERIC) => float .map(|float| BigDecimal::from_f32(float).unwrap()) .map(DecimalWrapper) .map(|dw| dw.to_sql(ty, out)), - (Value::Float(float), _) => float.map(|float| float.to_sql(ty, out)), - (Value::Double(double), &PostgresType::FLOAT4) => double.map(|double| (double as f32).to_sql(ty, out)), - #[cfg(feature = "bigdecimal")] - (Value::Double(double), &PostgresType::NUMERIC) => double + (ValueType::Float(float), _) => float.map(|float| float.to_sql(ty, out)), + (ValueType::Double(double), &PostgresType::FLOAT4) => double.map(|double| (double as f32).to_sql(ty, out)), + (ValueType::Double(double), &PostgresType::NUMERIC) => double .map(|double| BigDecimal::from_f64(double).unwrap()) .map(DecimalWrapper) .map(|dw| dw.to_sql(ty, out)), - (Value::Double(double), _) => double.map(|double| double.to_sql(ty, out)), - #[cfg(feature = "bigdecimal")] - (Value::Numeric(decimal), &PostgresType::FLOAT4) => decimal.as_ref().map(|decimal| { + (ValueType::Double(double), _) => double.map(|double| double.to_sql(ty, out)), + (ValueType::Numeric(decimal), &PostgresType::FLOAT4) => decimal.as_ref().map(|decimal| { let f = decimal.to_string().parse::().expect("decimal to f32 conversion"); f.to_sql(ty, out) }), - #[cfg(feature = "bigdecimal")] - (Value::Numeric(decimal), &PostgresType::FLOAT8) => decimal.as_ref().map(|decimal| { + (ValueType::Numeric(decimal), &PostgresType::FLOAT8) => decimal.as_ref().map(|decimal| { let f = decimal.to_string().parse::().expect("decimal to f64 conversion"); f.to_sql(ty, out) }), - #[cfg(feature = "bigdecimal")] - (Value::Array(values), &PostgresType::FLOAT4_ARRAY) => values.as_ref().map(|values| { + (ValueType::Array(values), &PostgresType::FLOAT4_ARRAY) => values.as_ref().map(|values| { let mut floats = Vec::with_capacity(values.len()); for value in values.iter() { - let float = match value { - Value::Numeric(n) => n.as_ref().and_then(|n| n.to_string().parse::().ok()), - Value::Int64(n) => n.map(|n| n as f32), - Value::Float(f) => *f, - Value::Double(d) => d.map(|d| d as f32), - v if v.is_null() => None, + let float = match &value.typed { + ValueType::Numeric(n) => n.as_ref().and_then(|n| n.to_string().parse::().ok()), + ValueType::Int64(n) => n.map(|n| n as f32), + ValueType::Float(f) => *f, + ValueType::Double(d) => d.map(|d| d as f32), + _ if value.is_null() => None, v => { let kind = ErrorKind::conversion(format!( "Couldn't add value of type `{v:?}` into a float array." @@ -767,16 +726,15 @@ impl<'a> ToSql for Value<'a> { floats.to_sql(ty, out) }), - #[cfg(feature = "bigdecimal")] - (Value::Array(values), &PostgresType::FLOAT8_ARRAY) => values.as_ref().map(|values| { + (ValueType::Array(values), &PostgresType::FLOAT8_ARRAY) => values.as_ref().map(|values| { let mut floats = Vec::with_capacity(values.len()); for value in values.iter() { - let float = match value { - Value::Numeric(n) => n.as_ref().and_then(|n| n.to_string().parse::().ok()), - Value::Int64(n) => n.map(|n| n as f64), - Value::Float(f) => f.map(|f| f as f64), - Value::Double(d) => *d, + let float = match &value.typed { + ValueType::Numeric(n) => n.as_ref().and_then(|n| n.to_string().parse::().ok()), + ValueType::Int64(n) => n.map(|n| n as f64), + ValueType::Float(f) => f.map(|f| f as f64), + ValueType::Double(d) => *d, v if v.is_null() => None, v => { let kind = ErrorKind::conversion(format!( @@ -792,8 +750,7 @@ impl<'a> ToSql for Value<'a> { floats.to_sql(ty, out) }), - #[cfg(feature = "bigdecimal")] - (Value::Numeric(decimal), &PostgresType::MONEY) => decimal.as_ref().map(|decimal| { + (ValueType::Numeric(decimal), &PostgresType::MONEY) => decimal.as_ref().map(|decimal| { let decimal = (decimal * BigInt::from_i32(100).unwrap()).round(0); let i = decimal.to_i64().ok_or_else(|| { @@ -803,21 +760,17 @@ impl<'a> ToSql for Value<'a> { i.to_sql(ty, out) }), - #[cfg(feature = "bigdecimal")] - (Value::Numeric(decimal), &PostgresType::NUMERIC) => decimal + (ValueType::Numeric(decimal), &PostgresType::NUMERIC) => decimal .as_ref() .map(|decimal| DecimalWrapper(decimal.clone()).to_sql(ty, out)), - #[cfg(feature = "bigdecimal")] - (Value::Numeric(float), _) => float + (ValueType::Numeric(float), _) => float .as_ref() .map(|float| DecimalWrapper(float.clone()).to_sql(ty, out)), - #[cfg(feature = "uuid")] - (Value::Text(string), &PostgresType::UUID) => string.as_ref().map(|string| { + (ValueType::Text(string), &PostgresType::UUID) => string.as_ref().map(|string| { let parsed_uuid: Uuid = string.parse()?; parsed_uuid.to_sql(ty, out) }), - #[cfg(feature = "uuid")] - (Value::Array(values), &PostgresType::UUID_ARRAY) => values.as_ref().map(|values| { + (ValueType::Array(values), &PostgresType::UUID_ARRAY) => values.as_ref().map(|values| { let parsed_uuid: Vec> = values .iter() .map(>::try_from) @@ -825,83 +778,82 @@ impl<'a> ToSql for Value<'a> { parsed_uuid.to_sql(ty, out) }), - (Value::Text(string), &PostgresType::INET) | (Value::Text(string), &PostgresType::CIDR) => { + (ValueType::Text(string), &PostgresType::INET) | (ValueType::Text(string), &PostgresType::CIDR) => { string.as_ref().map(|string| { let parsed_ip_addr: std::net::IpAddr = string.parse()?; parsed_ip_addr.to_sql(ty, out) }) } - (Value::Array(values), &PostgresType::INET_ARRAY) | (Value::Array(values), &PostgresType::CIDR_ARRAY) => { - values.as_ref().map(|values| { - let parsed_ip_addr: Vec> = values - .iter() - .map(>::try_from) - .collect::>()?; + (ValueType::Array(values), &PostgresType::INET_ARRAY) + | (ValueType::Array(values), &PostgresType::CIDR_ARRAY) => values.as_ref().map(|values| { + let parsed_ip_addr: Vec> = values + .iter() + .map(>::try_from) + .collect::>()?; - parsed_ip_addr.to_sql(ty, out) - }) - } - #[cfg(feature = "json")] - (Value::Text(string), &PostgresType::JSON) | (Value::Text(string), &PostgresType::JSONB) => string + parsed_ip_addr.to_sql(ty, out) + }), + (ValueType::Text(string), &PostgresType::JSON) | (ValueType::Text(string), &PostgresType::JSONB) => string .as_ref() .map(|string| serde_json::from_str::(string)?.to_sql(ty, out)), - (Value::Text(string), &PostgresType::BIT) | (Value::Text(string), &PostgresType::VARBIT) => { + (ValueType::Text(string), &PostgresType::BIT) | (ValueType::Text(string), &PostgresType::VARBIT) => { string.as_ref().map(|string| { let bits: BitVec = string_to_bits(string)?; bits.to_sql(ty, out) }) } - (Value::Text(string), _) => string.as_ref().map(|ref string| string.to_sql(ty, out)), - (Value::Array(values), &PostgresType::BIT_ARRAY) | (Value::Array(values), &PostgresType::VARBIT_ARRAY) => { - values.as_ref().map(|values| { - let bitvecs: Vec> = values - .iter() - .map(>::try_from) - .collect::>>()?; + (ValueType::Text(string), _) => string.as_ref().map(|ref string| string.to_sql(ty, out)), + (ValueType::Array(values), &PostgresType::BIT_ARRAY) + | (ValueType::Array(values), &PostgresType::VARBIT_ARRAY) => values.as_ref().map(|values| { + let bitvecs: Vec> = values + .iter() + .map(|value| value.try_into()) + .collect::>>()?; - bitvecs.to_sql(ty, out) - }) - } - (Value::Bytes(bytes), _) => bytes.as_ref().map(|bytes| bytes.as_ref().to_sql(ty, out)), - (Value::Enum(string), _) => string.as_ref().map(|string| { + bitvecs.to_sql(ty, out) + }), + (ValueType::Bytes(bytes), _) => bytes.as_ref().map(|bytes| bytes.as_ref().to_sql(ty, out)), + (ValueType::Enum(string, _), _) => string.as_ref().map(|string| { out.extend_from_slice(string.as_bytes()); Ok(IsNull::No) }), - (Value::Boolean(boo), _) => boo.map(|boo| boo.to_sql(ty, out)), - (Value::Char(c), _) => c.map(|c| (c as i8).to_sql(ty, out)), - (Value::Array(vec), typ) if matches!(typ.kind(), Kind::Array(_)) => { + (ValueType::Boolean(boo), _) => boo.map(|boo| boo.to_sql(ty, out)), + (ValueType::Char(c), _) => c.map(|c| (c as i8).to_sql(ty, out)), + (ValueType::Array(vec), typ) if matches!(typ.kind(), Kind::Array(_)) => { vec.as_ref().map(|vec| vec.to_sql(ty, out)) } - (Value::Array(vec), typ) => { + (ValueType::EnumArray(variants, _), typ) if matches!(typ.kind(), Kind::Array(_)) => variants + .as_ref() + .map(|vec| vec.iter().map(|val| val.as_ref()).collect::>().to_sql(ty, out)), + (ValueType::EnumArray(variants, _), typ) => { + let kind = ErrorKind::conversion(format!( + "Couldn't serialize value `{variants:?}` into a `{typ}`. Value is a list but `{typ}` is not." + )); + + return Err(Error::builder(kind).build().into()); + } + (ValueType::Array(vec), typ) => { let kind = ErrorKind::conversion(format!( "Couldn't serialize value `{vec:?}` into a `{typ}`. Value is a list but `{typ}` is not." )); return Err(Error::builder(kind).build().into()); } - #[cfg(feature = "json")] - (Value::Json(value), _) => value.as_ref().map(|value| value.to_sql(ty, out)), - (Value::Xml(value), _) => value.as_ref().map(|value| value.to_sql(ty, out)), - #[cfg(feature = "uuid")] - (Value::Uuid(value), _) => value.map(|value| value.to_sql(ty, out)), - #[cfg(feature = "chrono")] - (Value::DateTime(value), &PostgresType::DATE) => value.map(|value| value.date_naive().to_sql(ty, out)), - #[cfg(feature = "chrono")] - (Value::Date(value), _) => value.map(|value| value.to_sql(ty, out)), - #[cfg(feature = "chrono")] - (Value::Time(value), _) => value.map(|value| value.to_sql(ty, out)), - #[cfg(feature = "chrono")] - (Value::DateTime(value), &PostgresType::TIME) => value.map(|value| value.time().to_sql(ty, out)), - #[cfg(feature = "chrono")] - (Value::DateTime(value), &PostgresType::TIMETZ) => value.map(|value| { + (ValueType::Json(value), _) => value.as_ref().map(|value| value.to_sql(ty, out)), + (ValueType::Xml(value), _) => value.as_ref().map(|value| value.to_sql(ty, out)), + (ValueType::Uuid(value), _) => value.map(|value| value.to_sql(ty, out)), + (ValueType::DateTime(value), &PostgresType::DATE) => value.map(|value| value.date_naive().to_sql(ty, out)), + (ValueType::Date(value), _) => value.map(|value| value.to_sql(ty, out)), + (ValueType::Time(value), _) => value.map(|value| value.to_sql(ty, out)), + (ValueType::DateTime(value), &PostgresType::TIME) => value.map(|value| value.time().to_sql(ty, out)), + (ValueType::DateTime(value), &PostgresType::TIMETZ) => value.map(|value| { let result = value.time().to_sql(ty, out)?; // We assume UTC. see https://www.postgresql.org/docs/9.5/datatype-datetime.html out.extend_from_slice(&[0; 4]); Ok(result) }), - #[cfg(feature = "chrono")] - (Value::DateTime(value), _) => value.map(|value| value.naive_utc().to_sql(ty, out)), + (ValueType::DateTime(value), _) => value.map(|value| value.naive_utc().to_sql(ty, out)), }; match res { @@ -957,12 +909,18 @@ impl<'a> TryFrom<&Value<'a>> for Option { fn try_from(value: &Value<'a>) -> Result, Self::Error> { match value { - val @ Value::Text(Some(_)) => { + val @ Value { + typed: ValueType::Text(Some(_)), + .. + } => { let text = val.as_str().unwrap(); string_to_bits(text).map(Option::Some) } - val @ Value::Bytes(Some(_)) => { + val @ Value { + typed: ValueType::Bytes(Some(_)), + .. + } => { let text = val.as_str().unwrap(); string_to_bits(text).map(Option::Some) diff --git a/quaint/src/connector/postgres/error.rs b/quaint/src/connector/postgres/error.rs index 40634e2aa336..d4e5ec7837fe 100644 --- a/quaint/src/connector/postgres/error.rs +++ b/quaint/src/connector/postgres/error.rs @@ -1,37 +1,64 @@ +use std::fmt::{Display, Formatter}; + +use tokio_postgres::error::DbError; + use crate::error::{DatabaseConstraint, Error, ErrorKind, Name}; -impl From for Error { - fn from(e: tokio_postgres::error::Error) -> Error { - use tokio_postgres::error::DbError; +#[derive(Debug)] +pub struct PostgresError { + pub code: String, + pub message: String, + pub severity: String, + pub detail: Option, + pub column: Option, + pub hint: Option, +} - if e.is_closed() { - return Error::builder(ErrorKind::ConnectionClosed).build(); +impl std::error::Error for PostgresError {} + +impl Display for PostgresError { + // copy of DbError::fmt + fn fmt(&self, fmt: &mut Formatter<'_>) -> std::fmt::Result { + write!(fmt, "{}: {}", self.severity, self.message)?; + if let Some(detail) = &self.detail { + write!(fmt, "\nDETAIL: {}", detail)?; + } + if let Some(hint) = &self.hint { + write!(fmt, "\nHINT: {}", hint)?; } + Ok(()) + } +} - match e.code().map(|c| c.code()) { - Some(code) if code == "22001" => { - let code = code.to_string(); +impl From<&DbError> for PostgresError { + fn from(value: &DbError) -> Self { + PostgresError { + code: value.code().code().to_string(), + severity: value.severity().to_string(), + message: value.message().to_string(), + detail: value.detail().map(ToString::to_string), + column: value.column().map(ToString::to_string), + hint: value.hint().map(ToString::to_string), + } + } +} +impl From for Error { + fn from(value: PostgresError) -> Self { + match value.code.as_str() { + "22001" => { let mut builder = Error::builder(ErrorKind::LengthMismatch { column: Name::Unavailable, }); - builder.set_original_code(code); - - let db_error = e.into_source().and_then(|e| e.downcast::().ok()); - if let Some(db_error) = db_error { - builder.set_original_message(db_error.to_string()); - } + builder.set_original_code(&value.code); + builder.set_original_message(value.to_string()); builder.build() } - Some(code) if code == "23505" => { - let code = code.to_string(); - - let db_error = e.into_source().and_then(|e| e.downcast::().ok()); - let detail = db_error.as_ref().and_then(|e| e.detail()).map(ToString::to_string); - - let constraint = detail + "23505" => { + let constraint = value + .detail .as_ref() .and_then(|d| d.split(")=(").next()) .and_then(|d| d.split(" (").nth(1).map(|s| s.replace('\"', ""))) @@ -41,189 +68,138 @@ impl From for Error { let kind = ErrorKind::UniqueConstraintViolation { constraint }; let mut builder = Error::builder(kind); - builder.set_original_code(code); + builder.set_original_code(value.code); - if let Some(detail) = detail { + if let Some(detail) = value.detail { builder.set_original_message(detail); } builder.build() } - // Even lipstick will not save this... - Some(code) if code == "23502" => { - let code = code.to_string(); - let db_error = e.into_source().and_then(|e| e.downcast::().ok()); - let detail = db_error.as_ref().and_then(|e| e.detail()).map(ToString::to_string); - - let constraint = db_error - .as_ref() - .map(|e| e.column()) - .map(DatabaseConstraint::fields) - .unwrap_or(DatabaseConstraint::CannotParse); + // Even lipstick will not save this... + "23502" => { + let constraint = DatabaseConstraint::fields(value.column); let kind = ErrorKind::NullConstraintViolation { constraint }; let mut builder = Error::builder(kind); - builder.set_original_code(code); + builder.set_original_code(value.code); - if let Some(detail) = detail { + if let Some(detail) = value.detail { builder.set_original_message(detail); } builder.build() } - Some(code) if code == "23503" => { - let code = code.to_string(); - let db_error = e.into_source().and_then(|e| e.downcast::().ok()); - - match db_error.as_ref().and_then(|e| e.column()) { - Some(column) => { - let mut builder = Error::builder(ErrorKind::ForeignKeyConstraintViolation { - constraint: DatabaseConstraint::fields(Some(column)), - }); - - builder.set_original_code(code); - - if let Some(message) = db_error.as_ref().map(|e| e.message()) { - builder.set_original_message(message); - } - - builder.build() - } - None => { - let constraint = db_error - .as_ref() - .map(|e| e.message()) - .and_then(|e| e.split_whitespace().nth(10)) - .and_then(|s| s.split('"').nth(1)) - .map(ToString::to_string) - .map(DatabaseConstraint::Index) - .unwrap_or(DatabaseConstraint::CannotParse); - - let kind = ErrorKind::ForeignKeyConstraintViolation { constraint }; - let mut builder = Error::builder(kind); - - builder.set_original_code(code); - - if let Some(message) = db_error.as_ref().map(|e| e.message()) { - builder.set_original_message(message); - } - - builder.build() - } - } - } - Some(code) if code == "3D000" => { - let code = code.to_string(); - let db_error = e.into_source().and_then(|e| e.downcast::().ok()); - let message = db_error.as_ref().map(|e| e.message()); + "23503" => match value.column { + Some(column) => { + let mut builder = Error::builder(ErrorKind::ForeignKeyConstraintViolation { + constraint: DatabaseConstraint::fields(Some(column)), + }); - let db_name = message - .as_ref() - .and_then(|s| s.split_whitespace().nth(1)) + builder.set_original_code(value.code); + builder.set_original_message(value.message); + + builder.build() + } + None => { + let constraint = value + .message + .split_whitespace() + .nth(10) + .and_then(|s| s.split('"').nth(1)) + .map(ToString::to_string) + .map(DatabaseConstraint::Index) + .unwrap_or(DatabaseConstraint::CannotParse); + + let kind = ErrorKind::ForeignKeyConstraintViolation { constraint }; + let mut builder = Error::builder(kind); + + builder.set_original_code(value.code); + builder.set_original_message(value.message); + + builder.build() + } + }, + "3D000" => { + let db_name = value + .message + .split_whitespace() + .nth(1) .and_then(|s| s.split('"').nth(1)) .into(); let kind = ErrorKind::DatabaseDoesNotExist { db_name }; let mut builder = Error::builder(kind); - builder.set_original_code(code); - - if let Some(message) = message { - builder.set_original_message(message); - } + builder.set_original_code(value.code); + builder.set_original_message(value.message); builder.build() } - Some(code) if code == "28000" => { - let code = code.to_string(); - let db_error = e.into_source().and_then(|e| e.downcast::().ok()); - let message = db_error.as_ref().map(|e| e.message()); - - let db_name = message - .as_ref() - .and_then(|m| m.split_whitespace().nth(5)) + "28000" => { + let db_name = value + .message + .split_whitespace() + .nth(5) .and_then(|s| s.split('"').nth(1)) .into(); let kind = ErrorKind::DatabaseAccessDenied { db_name }; let mut builder = Error::builder(kind); - builder.set_original_code(code); - - if let Some(message) = message { - builder.set_original_message(message); - } + builder.set_original_code(value.code); + builder.set_original_message(value.message); builder.build() } - Some(code) if code == "28P01" => { - let code = code.to_string(); - let db_error = e.into_source().and_then(|e| e.downcast::().ok()); - let message = db_error.as_ref().map(|e| e.message()); + "28P01" => { + let message = value.message; let user = message - .as_ref() - .and_then(|m| m.split_whitespace().last()) + .split_whitespace() + .last() .and_then(|s| s.split('"').nth(1)) .into(); let kind = ErrorKind::AuthenticationFailed { user }; let mut builder = Error::builder(kind); - builder.set_original_code(code); - - if let Some(message) = message { - builder.set_original_message(message); - } + builder.set_original_code(value.code); + builder.set_original_message(message); builder.build() } - Some(code) if code == "40001" => { - let code = code.to_string(); - let db_error = e.into_source().and_then(|e| e.downcast::().ok()); - let message = db_error.as_ref().map(|e| e.message()); - let mut builder = Error::builder(ErrorKind::TransactionWriteConflict); - - builder.set_original_code(code); + "40001" => { + let mut builder: crate::error::ErrorBuilder = Error::builder(ErrorKind::TransactionWriteConflict); - if let Some(message) = message { - builder.set_original_message(message); - } + builder.set_original_code(value.code); + builder.set_original_message(value.message); builder.build() } - Some(code) if code == "42P01" => { - let code = code.to_string(); - let db_error = e.into_source().and_then(|e| e.downcast::().ok()); - let message = db_error.as_ref().map(|e| e.message()); - - let table = message - .as_ref() - .and_then(|m| m.split_whitespace().nth(1)) + "42P01" => { + let table = value + .message + .split_whitespace() + .nth(1) .and_then(|s| s.split('"').nth(1)) .into(); let kind = ErrorKind::TableDoesNotExist { table }; let mut builder = Error::builder(kind); - builder.set_original_code(code); - - if let Some(message) = message { - builder.set_original_message(message); - } + builder.set_original_code(value.code); + builder.set_original_message(value.message); builder.build() } - Some(code) if code == "42703" => { - let code = code.to_string(); - let db_error = e.into_source().and_then(|e| e.downcast::().ok()); - let message = db_error.as_ref().map(|e| e.message()); - - let column = message - .as_ref() - .and_then(|m| m.split_whitespace().nth(1)) + "42703" => { + let column = value + .message + .split_whitespace() + .nth(1) .map(|s| s.split('\"')) .and_then(|mut s| match (s.next(), s.next()) { (Some(column), _) if !column.is_empty() => Some(column), @@ -235,98 +211,106 @@ impl From for Error { let kind = ErrorKind::ColumnNotFound { column }; let mut builder = Error::builder(kind); - builder.set_original_code(code); - - if let Some(message) = message { - builder.set_original_message(message); - } - + builder.set_original_code(value.code); + builder.set_original_message(value.message); builder.build() } - Some(code) if code == "42P04" => { - let code = code.to_string(); - let db_error = e.into_source().and_then(|e| e.downcast::().ok()); - let message = db_error.as_ref().map(|e| e.message()); - - let db_name = message - .as_ref() - .and_then(|m| m.split_whitespace().nth(1)) + "42P04" => { + let db_name = value + .message + .split_whitespace() + .nth(1) .and_then(|s| s.split('"').nth(1)) .into(); let kind = ErrorKind::DatabaseAlreadyExists { db_name }; let mut builder = Error::builder(kind); - builder.set_original_code(code); + builder.set_original_code(value.code); + builder.set_original_message(value.message); - if let Some(message) = message { - builder.set_original_message(message); - } + builder.build() + } + + _ => { + let code = value.code.to_owned(); + let message = value.to_string(); + let mut builder = Error::builder(ErrorKind::QueryError(value.into())); + builder.set_original_code(code); + builder.set_original_message(message); builder.build() } - code => { - // This is necessary, on top of the other conversions, for the cases where a - // native_tls error comes wrapped in a tokio_postgres error. - if let Some(tls_error) = try_extracting_tls_error(&e) { - return tls_error; - } + } + } +} - // Same for IO errors. - if let Some(io_error) = try_extracting_io_error(&e) { - return io_error; - } +impl From for Error { + fn from(e: tokio_postgres::error::Error) -> Error { + if e.is_closed() { + return Error::builder(ErrorKind::ConnectionClosed).build(); + } - #[cfg(feature = "uuid")] - if let Some(uuid_error) = try_extracting_uuid_error(&e) { - return uuid_error; - } + if let Some(db_error) = e.as_db_error() { + return PostgresError::from(db_error).into(); + } - let reason = format!("{e}"); - - match reason.as_str() { - "error connecting to server: timed out" => { - let mut builder = Error::builder(ErrorKind::ConnectTimeout); - - if let Some(code) = code { - builder.set_original_code(code); - }; - - builder.set_original_message(reason); - builder.build() - } // sigh... - // https://github.com/sfackler/rust-postgres/blob/0c84ed9f8201f4e5b4803199a24afa2c9f3723b2/tokio-postgres/src/connect_tls.rs#L37 - "error performing TLS handshake: server does not support TLS" => { - let mut builder = Error::builder(ErrorKind::TlsError { - message: reason.clone(), - }); - - if let Some(code) = code { - builder.set_original_code(code); - }; - - builder.set_original_message(reason); - builder.build() - } // double sigh - _ => { - let code = code.map(|c| c.to_string()); - let mut builder = Error::builder(ErrorKind::QueryError(e.into())); - - if let Some(code) = code { - builder.set_original_code(code); - }; - - builder.set_original_message(reason); - builder.build() - } - } + if let Some(tls_error) = try_extracting_tls_error(&e) { + return tls_error; + } + + // Same for IO errors. + if let Some(io_error) = try_extracting_io_error(&e) { + return io_error; + } + + if let Some(uuid_error) = try_extracting_uuid_error(&e) { + return uuid_error; + } + + let reason = format!("{e}"); + let code = e.code().map(|c| c.code()); + + match reason.as_str() { + "error connecting to server: timed out" => { + let mut builder = Error::builder(ErrorKind::ConnectTimeout); + + if let Some(code) = code { + builder.set_original_code(code); + }; + + builder.set_original_message(reason); + builder.build() + } // sigh... + // https://github.com/sfackler/rust-postgres/blob/0c84ed9f8201f4e5b4803199a24afa2c9f3723b2/tokio-postgres/src/connect_tls.rs#L37 + "error performing TLS handshake: server does not support TLS" => { + let mut builder = Error::builder(ErrorKind::TlsError { + message: reason.clone(), + }); + + if let Some(code) = code { + builder.set_original_code(code); + }; + + builder.set_original_message(reason); + builder.build() + } // double sigh + _ => { + let code = code.map(|c| c.to_string()); + let mut builder = Error::builder(ErrorKind::QueryError(e.into())); + + if let Some(code) = code { + builder.set_original_code(code); + }; + + builder.set_original_message(reason); + builder.build() } } } } -#[cfg(feature = "uuid")] fn try_extracting_uuid_error(err: &tokio_postgres::error::Error) -> Option { use std::error::Error as _; diff --git a/quaint/src/connector/result_set.rs b/quaint/src/connector/result_set.rs index dedc49d23ff9..b98d252a0579 100644 --- a/quaint/src/connector/result_set.rs +++ b/quaint/src/connector/result_set.rs @@ -5,10 +5,8 @@ pub use index::*; pub use result_row::*; use crate::{ast::Value, error::*}; -use std::sync::Arc; - -#[cfg(feature = "json")] use serde_json::Map; +use std::sync::Arc; /// Encapsulates a set of results and their respective column names. #[derive(Debug, Default)] @@ -108,8 +106,6 @@ impl Iterator for ResultSetIterator { } } -#[cfg(feature = "json")] -#[cfg_attr(feature = "docs", doc(cfg(feature = "json")))] impl From for serde_json::Value { fn from(result_set: ResultSet) -> Self { let columns: Vec = result_set.columns().iter().map(ToString::to_string).collect(); diff --git a/quaint/src/connector/sqlite.rs b/quaint/src/connector/sqlite.rs index da85697a5936..3a1ef72b4883 100644 --- a/quaint/src/connector/sqlite.rs +++ b/quaint/src/connector/sqlite.rs @@ -1,6 +1,8 @@ mod conversion; mod error; +pub use error::SqliteError; + pub use rusqlite::{params_from_iter, version as sqlite_version}; use super::IsolationLevel; @@ -21,7 +23,6 @@ pub(crate) const DEFAULT_SQLITE_SCHEMA_NAME: &str = "main"; pub use rusqlite; /// A connector interface for the SQLite database -#[cfg_attr(feature = "docs", doc(cfg(feature = "sqlite")))] pub struct Sqlite { pub(crate) client: Mutex, } @@ -29,7 +30,6 @@ pub struct Sqlite { /// Wraps a connection url and exposes the parsing logic used by Quaint, /// including default values. #[derive(Debug)] -#[cfg_attr(feature = "docs", doc(cfg(feature = "sqlite")))] pub struct SqliteParams { pub connection_limit: Option, /// This is not a `PathBuf` because we need to `ATTACH` the database to the path, and this can diff --git a/quaint/src/connector/sqlite/conversion.rs b/quaint/src/connector/sqlite/conversion.rs index 68442d2a7202..fced37abca4c 100644 --- a/quaint/src/connector/sqlite/conversion.rs +++ b/quaint/src/connector/sqlite/conversion.rs @@ -1,7 +1,7 @@ use std::convert::TryFrom; use crate::{ - ast::Value, + ast::{Value, ValueType}, connector::{ queryable::{GetRow, ToColumnNames}, TypeIdentifier, @@ -14,7 +14,6 @@ use rusqlite::{ Column, Error as RusqlError, Row as SqliteRow, Rows as SqliteRows, }; -#[cfg(feature = "chrono")] use chrono::TimeZone; impl TypeIdentifier for Column<'_> { @@ -139,19 +138,16 @@ impl<'a> GetRow for SqliteRow<'a> { let pv = match self.get_ref_unwrap(i) { ValueRef::Null => match column { // NOTE: A value without decl_type would be Int32(None) - c if c.is_int32() | c.is_null() => Value::Int32(None), - c if c.is_int64() => Value::Int64(None), - c if c.is_text() => Value::Text(None), - c if c.is_bytes() => Value::Bytes(None), - c if c.is_float() => Value::Float(None), - c if c.is_double() => Value::Double(None), - #[cfg(feature = "bigdecimal")] - c if c.is_real() => Value::Numeric(None), - #[cfg(feature = "chrono")] - c if c.is_datetime() => Value::DateTime(None), - #[cfg(feature = "chrono")] - c if c.is_date() => Value::Date(None), - c if c.is_bool() => Value::Boolean(None), + c if c.is_int32() | c.is_null() => Value::null_int32(), + c if c.is_int64() => Value::null_int64(), + c if c.is_text() => Value::null_text(), + c if c.is_bytes() => Value::null_bytes(), + c if c.is_float() => Value::null_float(), + c if c.is_double() => Value::null_double(), + c if c.is_real() => Value::null_numeric(), + c if c.is_datetime() => Value::null_datetime(), + c if c.is_date() => Value::null_date(), + c if c.is_bool() => Value::null_boolean(), c => match c.decl_type() { Some(n) => { let msg = format!("Value {n} not supported"); @@ -160,7 +156,7 @@ impl<'a> GetRow for SqliteRow<'a> { return Err(Error::builder(kind).build()); } // When we don't know what to do, the default value would be Int32(None) - None => Value::Int32(None), + None => Value::null_int32(), }, }, ValueRef::Integer(i) => { @@ -172,12 +168,10 @@ impl<'a> GetRow for SqliteRow<'a> { Value::boolean(true) } } - #[cfg(feature = "chrono")] c if c.is_date() => { let dt = chrono::NaiveDateTime::from_timestamp_opt(i / 1000, 0).unwrap(); Value::date(dt.date()) } - #[cfg(feature = "chrono")] c if c.is_datetime() => { let dt = chrono::Utc.timestamp_millis_opt(i).unwrap(); Value::datetime(dt) @@ -196,14 +190,12 @@ impl<'a> GetRow for SqliteRow<'a> { _ => Value::int64(i), } } - #[cfg(feature = "bigdecimal")] ValueRef::Real(f) if column.is_real() => { use bigdecimal::{BigDecimal, FromPrimitive}; Value::numeric(BigDecimal::from_f64(f).unwrap()) } ValueRef::Real(f) => Value::double(f), - #[cfg(feature = "chrono")] ValueRef::Text(bytes) if column.is_datetime() => { let parse_res = std::str::from_utf8(bytes).map_err(|_| { let builder = Error::builder(ErrorKind::ConversionError( @@ -251,17 +243,17 @@ impl<'a> ToColumnNames for SqliteRows<'a> { impl<'a> ToSql for Value<'a> { fn to_sql(&self) -> Result { - let value = match self { - Value::Int32(integer) => integer.map(ToSqlOutput::from), - Value::Int64(integer) => integer.map(ToSqlOutput::from), - Value::Float(float) => float.map(|f| f as f64).map(ToSqlOutput::from), - Value::Double(double) => double.map(ToSqlOutput::from), - Value::Text(cow) => cow.as_ref().map(|cow| ToSqlOutput::from(cow.as_ref())), - Value::Enum(cow) => cow.as_ref().map(|cow| ToSqlOutput::from(cow.as_ref())), - Value::Boolean(boo) => boo.map(ToSqlOutput::from), - Value::Char(c) => c.map(|c| ToSqlOutput::from(c as u8)), - Value::Bytes(bytes) => bytes.as_ref().map(|bytes| ToSqlOutput::from(bytes.as_ref())), - Value::Array(_) => { + let value = match &self.typed { + ValueType::Int32(integer) => integer.map(ToSqlOutput::from), + ValueType::Int64(integer) => integer.map(ToSqlOutput::from), + ValueType::Float(float) => float.map(|f| f as f64).map(ToSqlOutput::from), + ValueType::Double(double) => double.map(ToSqlOutput::from), + ValueType::Text(cow) => cow.as_ref().map(|cow| ToSqlOutput::from(cow.as_ref())), + ValueType::Enum(cow, _) => cow.as_ref().map(|cow| ToSqlOutput::from(cow.as_ref())), + ValueType::Boolean(boo) => boo.map(ToSqlOutput::from), + ValueType::Char(c) => c.map(|c| ToSqlOutput::from(c as u8)), + ValueType::Bytes(bytes) => bytes.as_ref().map(|bytes| ToSqlOutput::from(bytes.as_ref())), + ValueType::Array(_) | ValueType::EnumArray(_, _) => { let msg = "Arrays are not supported in SQLite."; let kind = ErrorKind::conversion(msg); @@ -270,29 +262,23 @@ impl<'a> ToSql for Value<'a> { return Err(RusqlError::ToSqlConversionFailure(Box::new(builder.build()))); } - #[cfg(feature = "bigdecimal")] - Value::Numeric(d) => d + ValueType::Numeric(d) => d .as_ref() .map(|d| ToSqlOutput::from(d.to_string().parse::().expect("BigDecimal is not a f64."))), - #[cfg(feature = "json")] - Value::Json(value) => value.as_ref().map(|value| { + ValueType::Json(value) => value.as_ref().map(|value| { let stringified = serde_json::to_string(value) .map_err(|err| RusqlError::ToSqlConversionFailure(Box::new(err))) .unwrap(); ToSqlOutput::from(stringified) }), - Value::Xml(cow) => cow.as_ref().map(|cow| ToSqlOutput::from(cow.as_ref())), - #[cfg(feature = "uuid")] - Value::Uuid(value) => value.map(|value| ToSqlOutput::from(value.hyphenated().to_string())), - #[cfg(feature = "chrono")] - Value::DateTime(value) => value.map(|value| ToSqlOutput::from(value.timestamp_millis())), - #[cfg(feature = "chrono")] - Value::Date(date) => date + ValueType::Xml(cow) => cow.as_ref().map(|cow| ToSqlOutput::from(cow.as_ref())), + ValueType::Uuid(value) => value.map(|value| ToSqlOutput::from(value.hyphenated().to_string())), + ValueType::DateTime(value) => value.map(|value| ToSqlOutput::from(value.timestamp_millis())), + ValueType::Date(date) => date .and_then(|date| date.and_hms_opt(0, 0, 0)) .map(|dt| ToSqlOutput::from(dt.timestamp_millis())), - #[cfg(feature = "chrono")] - Value::Time(time) => time + ValueType::Time(time) => time .and_then(|time| chrono::NaiveDate::from_ymd_opt(1970, 1, 1).map(|d| (d, time))) .and_then(|(date, time)| { use chrono::Timelike; diff --git a/quaint/src/connector/sqlite/error.rs b/quaint/src/connector/sqlite/error.rs index fa8b83f3f28a..c10b335cb3c0 100644 --- a/quaint/src/connector/sqlite/error.rs +++ b/quaint/src/connector/sqlite/error.rs @@ -1,69 +1,45 @@ +use std::fmt; + use crate::error::*; use rusqlite::ffi; use rusqlite::types::FromSqlError; -impl From for Error { - fn from(e: rusqlite::Error) -> Error { - match e { - rusqlite::Error::ToSqlConversionFailure(error) => match error.downcast::() { - Ok(error) => *error, - Err(error) => { - let mut builder = Error::builder(ErrorKind::QueryError(error)); - - builder.set_original_message("Could not interpret parameters in an SQLite query."); - - builder.build() - } - }, - rusqlite::Error::InvalidQuery => { - let mut builder = Error::builder(ErrorKind::QueryError(e.into())); - - builder.set_original_message( - "Could not interpret the query or its parameters. Check the syntax and parameter types.", - ); - - builder.build() - } - rusqlite::Error::ExecuteReturnedResults => { - let mut builder = Error::builder(ErrorKind::QueryError(e.into())); - builder.set_original_message("Execute returned results, which is not allowed in SQLite."); - - builder.build() - } - - rusqlite::Error::QueryReturnedNoRows => Error::builder(ErrorKind::NotFound).build(), +#[derive(Debug)] +pub struct SqliteError { + pub extended_code: i32, + pub message: Option, +} - rusqlite::Error::SqliteFailure( - ffi::Error { - code: ffi::ErrorCode::ConstraintViolation, - extended_code: 2067, - }, - Some(description), - ) => { - let constraint = description - .split(": ") - .nth(1) - .map(|s| s.split(", ")) - .map(|i| i.flat_map(|s| s.split('.').last())) - .map(DatabaseConstraint::fields) - .unwrap_or(DatabaseConstraint::CannotParse); +impl fmt::Display for SqliteError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "Error code {}: {}", + self.extended_code, + ffi::code_to_str(self.extended_code) + ) + } +} - let kind = ErrorKind::UniqueConstraintViolation { constraint }; - let mut builder = Error::builder(kind); +impl std::error::Error for SqliteError {} - builder.set_original_code("2067"); - builder.set_original_message(description); +impl SqliteError { + pub fn new(extended_code: i32, message: Option) -> Self { + Self { extended_code, message } + } - builder.build() - } + pub fn primary_code(&self) -> i32 { + self.extended_code & 0xFF + } +} - rusqlite::Error::SqliteFailure( - ffi::Error { - code: ffi::ErrorCode::ConstraintViolation, - extended_code: 1555, - }, - Some(description), - ) => { +impl From for Error { + fn from(error: SqliteError) -> Self { + match error { + SqliteError { + extended_code: ffi::SQLITE_CONSTRAINT_UNIQUE | ffi::SQLITE_CONSTRAINT_PRIMARYKEY, + message: Some(description), + } => { let constraint = description .split(": ") .nth(1) @@ -75,19 +51,16 @@ impl From for Error { let kind = ErrorKind::UniqueConstraintViolation { constraint }; let mut builder = Error::builder(kind); - builder.set_original_code("1555"); + builder.set_original_code(error.extended_code.to_string()); builder.set_original_message(description); builder.build() } - rusqlite::Error::SqliteFailure( - ffi::Error { - code: ffi::ErrorCode::ConstraintViolation, - extended_code: 1299, - }, - Some(description), - ) => { + SqliteError { + extended_code: ffi::SQLITE_CONSTRAINT_NOTNULL, + message: Some(description), + } => { let constraint = description .split(": ") .nth(1) @@ -99,64 +72,41 @@ impl From for Error { let kind = ErrorKind::NullConstraintViolation { constraint }; let mut builder = Error::builder(kind); - builder.set_original_code("1299"); - builder.set_original_message(description); - - builder.build() - } - - rusqlite::Error::SqliteFailure( - ffi::Error { - code: ffi::ErrorCode::ConstraintViolation, - extended_code: 787, - }, - Some(description), - ) => { - let mut builder = Error::builder(ErrorKind::ForeignKeyConstraintViolation { - constraint: DatabaseConstraint::ForeignKey, - }); - - builder.set_original_code("787"); + builder.set_original_code(error.extended_code.to_string()); builder.set_original_message(description); builder.build() } - rusqlite::Error::SqliteFailure( - ffi::Error { - code: ffi::ErrorCode::ConstraintViolation, - extended_code: 1811, - }, - Some(description), - ) => { + SqliteError { + extended_code: ffi::SQLITE_CONSTRAINT_FOREIGNKEY | ffi::SQLITE_CONSTRAINT_TRIGGER, + message: Some(description), + } => { let mut builder = Error::builder(ErrorKind::ForeignKeyConstraintViolation { constraint: DatabaseConstraint::ForeignKey, }); - builder.set_original_code("1811"); + builder.set_original_code(error.extended_code.to_string()); builder.set_original_message(description); builder.build() } - rusqlite::Error::SqliteFailure( - ffi::Error { - code: ffi::ErrorCode::DatabaseBusy, - extended_code, - }, - description, - ) => { + SqliteError { extended_code, message } if error.primary_code() == ffi::SQLITE_BUSY => { let mut builder = Error::builder(ErrorKind::SocketTimeout); builder.set_original_code(format!("{extended_code}")); - if let Some(description) = description { + if let Some(description) = message { builder.set_original_message(description); } builder.build() } - rusqlite::Error::SqliteFailure(ffi::Error { extended_code, .. }, ref description) => match description { + SqliteError { + extended_code, + ref message, + } => match message { Some(d) if d.starts_with("no such table") => { let table = d.split(": ").last().into(); let kind = ErrorKind::TableDoesNotExist { table }; @@ -188,8 +138,8 @@ impl From for Error { builder.build() } _ => { - let description = description.as_ref().map(|d| d.to_string()); - let mut builder = Error::builder(ErrorKind::QueryError(e.into())); + let description = message.as_ref().map(|d| d.to_string()); + let mut builder = Error::builder(ErrorKind::QueryError(error.into())); builder.set_original_code(format!("{extended_code}")); if let Some(description) = description { @@ -199,31 +149,50 @@ impl From for Error { builder.build() } }, + } + } +} - rusqlite::Error::SqlInputError { - error: ffi::Error { extended_code, .. }, - ref msg, - .. - } => match msg { - d if d.starts_with("no such column: ") => { - let column = d.split("no such column: ").last().into(); - let kind = ErrorKind::ColumnNotFound { column }; - - let mut builder = Error::builder(kind); - builder.set_original_code(extended_code.to_string()); - builder.set_original_message(d); +impl From for Error { + fn from(e: rusqlite::Error) -> Error { + match e { + rusqlite::Error::ToSqlConversionFailure(error) => match error.downcast::() { + Ok(error) => *error, + Err(error) => { + let mut builder = Error::builder(ErrorKind::QueryError(error)); - builder.build() - } - _ => { - let description = msg.clone(); - let mut builder = Error::builder(ErrorKind::QueryError(e.into())); - builder.set_original_code(extended_code.to_string()); - builder.set_original_message(description); + builder.set_original_message("Could not interpret parameters in an SQLite query."); builder.build() } }, + rusqlite::Error::InvalidQuery => { + let mut builder = Error::builder(ErrorKind::QueryError(e.into())); + + builder.set_original_message( + "Could not interpret the query or its parameters. Check the syntax and parameter types.", + ); + + builder.build() + } + rusqlite::Error::ExecuteReturnedResults => { + let mut builder = Error::builder(ErrorKind::QueryError(e.into())); + builder.set_original_message("Execute returned results, which is not allowed in SQLite."); + + builder.build() + } + + rusqlite::Error::QueryReturnedNoRows => Error::builder(ErrorKind::NotFound).build(), + + rusqlite::Error::SqliteFailure(ffi::Error { code: _, extended_code }, message) => { + SqliteError::new(extended_code, message).into() + } + + rusqlite::Error::SqlInputError { + error: ffi::Error { extended_code, .. }, + msg, + .. + } => SqliteError::new(extended_code, Some(msg)).into(), e => Error::builder(ErrorKind::QueryError(e.into())).build(), } diff --git a/quaint/src/error.rs b/quaint/src/error.rs index 73f88dc90b30..705bb6b37ee0 100644 --- a/quaint/src/error.rs +++ b/quaint/src/error.rs @@ -6,6 +6,10 @@ use thiserror::Error; #[cfg(feature = "pooled")] use std::time::Duration; +pub use crate::connector::mysql::MysqlError; +pub use crate::connector::postgres::PostgresError; +pub use crate::connector::sqlite::SqliteError; + #[derive(Debug, PartialEq, Eq)] pub enum DatabaseConstraint { Fields(Vec), @@ -245,11 +249,6 @@ pub enum ErrorKind { #[error("Value out of range error. {}", message)] ValueOutOfRange { message: String }, - #[cfg(feature = "serde-support")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "serde-support")))] - #[error("Deserializing a ResultRow {:?}", _0)] - FromRowError(serde::de::value::Error), - #[error( "Incorrect number of parameters given to a statement. Expected {}: got: {}.", expected, @@ -317,8 +316,6 @@ impl From for ErrorKind { } } -#[cfg(feature = "bigdecimal")] -#[cfg_attr(feature = "docs", doc(cfg(feature = "bigdecimal")))] impl From for Error { fn from(e: bigdecimal::ParseBigDecimalError) -> Self { let kind = ErrorKind::conversion(format!("{e}")); @@ -326,8 +323,6 @@ impl From for Error { } } -#[cfg(feature = "json")] -#[cfg_attr(feature = "docs", doc(cfg(feature = "json")))] impl From for Error { fn from(_: serde_json::Error) -> Self { Self::builder(ErrorKind::conversion("Malformed JSON data.")).build() @@ -395,7 +390,6 @@ impl From for Error { } } -#[cfg(feature = "uuid")] impl From for Error { fn from(e: uuid::Error) -> Self { Error::builder(ErrorKind::UUIDError(format!("{e}"))).build() diff --git a/quaint/src/lib.rs b/quaint/src/lib.rs index 1fa817fddf55..1458a6ae1615 100644 --- a/quaint/src/lib.rs +++ b/quaint/src/lib.rs @@ -104,8 +104,6 @@ //! # } //! ``` -#![cfg_attr(feature = "docs", feature(doc_cfg))] - #[cfg(not(any(feature = "sqlite", feature = "postgresql", feature = "mysql", feature = "mssql")))] compile_error!("one of 'sqlite', 'postgresql', 'mysql' or 'mssql' features must be enabled"); @@ -115,24 +113,19 @@ mod macros; #[macro_use] extern crate metrics; -#[cfg(feature = "bigdecimal")] -extern crate bigdecimal_ as bigdecimal; +extern crate bigdecimal; pub mod ast; pub mod connector; pub mod error; #[cfg(feature = "pooled")] -#[cfg_attr(feature = "docs", doc(cfg(pooled)))] pub mod pooled; pub mod prelude; -#[cfg(feature = "serde-support")] -#[cfg_attr(feature = "docs", doc(cfg(feature = "serde-support")))] -pub mod serde; pub mod single; #[cfg(test)] mod tests; pub mod visitor; -pub use ast::Value; +pub use ast::{Value, ValueType}; pub type Result = std::result::Result; diff --git a/quaint/src/macros.rs b/quaint/src/macros.rs index 6289fe0bac23..cfb52bc0c6e1 100644 --- a/quaint/src/macros.rs +++ b/quaint/src/macros.rs @@ -88,21 +88,33 @@ macro_rules! val { macro_rules! value { ($target:ident: $kind:ty,$paramkind:ident,$that:expr) => { - impl<'a> From<$kind> for crate::ast::Value<'a> { + impl<'a> From<$kind> for crate::ast::ValueType<'a> { fn from(that: $kind) -> Self { let $target = that; - crate::ast::Value::$paramkind(Some($that)) + crate::ast::ValueType::$paramkind(Some($that)) } } - impl<'a> From> for crate::ast::Value<'a> { + impl<'a> From> for crate::ast::ValueType<'a> { fn from(that: Option<$kind>) -> Self { match that { - Some(val) => crate::ast::Value::from(val), - None => crate::ast::Value::$paramkind(None), + Some(val) => crate::ast::ValueType::from(val), + None => crate::ast::ValueType::$paramkind(None), } } } + + impl<'a> From<$kind> for crate::ast::Value<'a> { + fn from(that: $kind) -> Self { + crate::ast::Value::from(crate::ast::ValueType::from(that)) + } + } + + impl<'a> From> for crate::ast::Value<'a> { + fn from(that: Option<$kind>) -> Self { + crate::ast::Value::from(crate::ast::ValueType::from(that)) + } + } }; } diff --git a/quaint/src/serde.rs b/quaint/src/serde.rs deleted file mode 100644 index aa26bb98c2b0..000000000000 --- a/quaint/src/serde.rs +++ /dev/null @@ -1,330 +0,0 @@ -//! Convert results from the database into any type implementing `serde::Deserialize`. - -use std::borrow::Cow; - -use crate::{ - ast::Value, - connector::{ResultRow, ResultSet}, - error::{Error, ErrorKind}, -}; -use serde::{de::Error as SerdeError, de::*}; - -impl ResultSet { - /// Takes the first row and deserializes it. - #[allow(clippy::wrong_self_convention)] - pub fn from_first(self) -> crate::Result { - from_row(self.into_single()?) - } -} - -/// Deserialize each row of a [`ResultSet`](../connector/struct.ResultSet.html). -/// -/// For an example, see the docs for [`from_row`](fn.from_row.html). -pub fn from_rows(result_set: ResultSet) -> crate::Result> { - let mut deserialized_rows = Vec::with_capacity(result_set.len()); - - for row in result_set { - deserialized_rows.push(from_row(row)?) - } - - Ok(deserialized_rows) -} - -/// Deserialize a row into any type implementing `Deserialize`. -/// -/// ``` -/// # use serde::Deserialize; -/// # use quaint::ast::Value; -/// # -/// # #[derive(Deserialize, Debug, PartialEq)] -/// # struct User { -/// # id: u64, -/// # name: String, -/// # } -/// # -/// # fn main() -> Result<(), Box> { -/// # -/// # let row = quaint::serde::make_row(vec![ -/// # ("id", Value::from(12)), -/// # ("name", "Georgina".into()), -/// # ]); -/// # -/// # -/// let user: User = quaint::serde::from_row(row)?; -/// -/// assert_eq!(user, User { name: "Georgina".to_string(), id: 12 }); -/// # Ok(()) -/// # } -/// ``` -pub fn from_row(row: ResultRow) -> crate::Result { - let deserializer = RowDeserializer(row); - - T::deserialize(deserializer).map_err(|e| Error::builder(ErrorKind::FromRowError(e)).build()) -} - -type DeserializeError = serde::de::value::Error; - -#[derive(Debug)] -struct RowDeserializer(ResultRow); - -impl<'de> Deserializer<'de> for RowDeserializer { - type Error = DeserializeError; - - fn deserialize_any>(self, visitor: V) -> Result { - let ResultRow { columns, mut values } = self.0; - - let kvs = columns.iter().enumerate().map(move |(v, k)| { - // The unwrap is safe if `columns` is correct. - let value = values.get_mut(v).unwrap(); - let taken_value = std::mem::replace(value, Value::Int64(None)); - (k.as_str(), taken_value) - }); - - let deserializer = serde::de::value::MapDeserializer::new(kvs); - - visitor.visit_map(deserializer) - } - - serde::forward_to_deserialize_any! { - bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str string bytes byte_buf - option unit unit_struct newtype_struct seq tuple tuple_struct map - struct enum identifier ignored_any - } -} - -impl<'de> IntoDeserializer<'de, DeserializeError> for Value<'de> { - type Deserializer = ValueDeserializer<'de>; - - fn into_deserializer(self) -> Self::Deserializer { - ValueDeserializer(self) - } -} - -#[derive(Debug)] -pub struct ValueDeserializer<'a>(Value<'a>); - -impl<'de> Deserializer<'de> for ValueDeserializer<'de> { - type Error = DeserializeError; - - fn deserialize_any>(self, visitor: V) -> Result { - match self.0 { - Value::Text(Some(s)) => visitor.visit_string(s.into_owned()), - Value::Text(None) => visitor.visit_none(), - Value::Bytes(Some(bytes)) => visitor.visit_bytes(bytes.as_ref()), - Value::Bytes(None) => visitor.visit_none(), - Value::Enum(Some(s)) => visitor.visit_string(s.into_owned()), - Value::Enum(None) => visitor.visit_none(), - Value::Int32(Some(i)) => visitor.visit_i32(i), - Value::Int32(None) => visitor.visit_none(), - Value::Int64(Some(i)) => visitor.visit_i64(i), - Value::Int64(None) => visitor.visit_none(), - Value::Boolean(Some(b)) => visitor.visit_bool(b), - Value::Boolean(None) => visitor.visit_none(), - Value::Char(Some(c)) => visitor.visit_char(c), - Value::Char(None) => visitor.visit_none(), - Value::Float(Some(num)) => visitor.visit_f64(num as f64), - Value::Float(None) => visitor.visit_none(), - Value::Double(Some(num)) => visitor.visit_f64(num), - Value::Double(None) => visitor.visit_none(), - - #[cfg(feature = "bigdecimal")] - Value::Numeric(Some(num)) => { - use crate::bigdecimal::ToPrimitive; - visitor.visit_f64(num.to_f64().unwrap()) - } - #[cfg(feature = "bigdecimal")] - Value::Numeric(None) => visitor.visit_none(), - - #[cfg(feature = "uuid")] - Value::Uuid(Some(uuid)) => visitor.visit_string(uuid.to_string()), - #[cfg(feature = "uuid")] - Value::Uuid(None) => visitor.visit_none(), - - #[cfg(feature = "json")] - Value::Json(Some(value)) => { - let de = value.into_deserializer(); - - de.deserialize_any(visitor) - .map_err(|err| serde::de::value::Error::custom(format!("Error deserializing JSON value: {err}"))) - } - #[cfg(feature = "json")] - Value::Json(None) => visitor.visit_none(), - - Value::Xml(Some(s)) => visitor.visit_string(s.into_owned()), - Value::Xml(None) => visitor.visit_none(), - - #[cfg(feature = "chrono")] - Value::DateTime(Some(dt)) => visitor.visit_string(dt.to_rfc3339()), - #[cfg(feature = "chrono")] - Value::DateTime(None) => visitor.visit_none(), - - #[cfg(feature = "chrono")] - Value::Date(Some(d)) => visitor.visit_string(format!("{d}")), - #[cfg(feature = "chrono")] - Value::Date(None) => visitor.visit_none(), - - #[cfg(feature = "chrono")] - Value::Time(Some(t)) => visitor.visit_string(format!("{t}")), - #[cfg(feature = "chrono")] - Value::Time(None) => visitor.visit_none(), - - Value::Array(Some(values)) => { - let deserializer = serde::de::value::SeqDeserializer::new(values.into_iter()); - visitor.visit_seq(deserializer) - } - Value::Array(None) => visitor.visit_none(), - } - } - - fn deserialize_option>(self, visitor: V) -> Result { - if self.0.is_null() { - visitor.visit_none() - } else { - visitor.visit_some(self) - } - } - - fn deserialize_bytes(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - if let Value::Bytes(Some(bytes)) = self.0 { - match bytes { - Cow::Borrowed(bytes) => visitor.visit_borrowed_bytes(bytes), - Cow::Owned(bytes) => visitor.visit_byte_buf(bytes), - } - } else { - Err(DeserializeError::invalid_type( - Unexpected::Other(&format!("{:?}", self.0)), - &visitor, - )) - } - } - - serde::forward_to_deserialize_any! { - bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str byte_buf - string unit unit_struct newtype_struct seq tuple tuple_struct map - struct enum identifier ignored_any - } -} - -#[doc(hidden)] -pub fn make_row(cols: Vec<(&'static str, Value<'static>)>) -> ResultRow { - let mut columns = Vec::with_capacity(cols.len()); - let mut values = Vec::with_capacity(cols.len()); - - for (name, value) in cols.into_iter() { - columns.push(name.to_owned()); - values.push(value); - } - - ResultRow { - values, - columns: std::sync::Arc::new(columns), - } -} - -#[cfg(test)] -mod tests { - use super::*; - use chrono::{DateTime, Utc}; - use serde::Deserialize; - - #[derive(Deserialize, Debug, PartialEq)] - struct User { - id: u64, - name: String, - bio: Option, - } - - #[derive(Deserialize, PartialEq, Debug)] - struct Cat { - age: f32, - birthday: DateTime, - human: User, - } - - #[test] - fn deserialize_user() { - let row = make_row(vec![("id", Value::integer(12)), ("name", "Georgina".into())]); - let user: User = from_row(row).unwrap(); - - assert_eq!( - user, - User { - id: 12, - name: "Georgina".to_owned(), - bio: None, - } - ) - } - - #[test] - fn from_rows_works() { - let first_row = make_row(vec![ - ("id", Value::integer(12)), - ("name", "Georgina".into()), - ("bio", Value::Text(None)), - ]); - let second_row = make_row(vec![ - ("id", 33.into()), - ("name", "Philbert".into()), - ( - "bio", - "Invented sliced bread on a meditation retreat in the Himalayas.".into(), - ), - ]); - - let result_set = ResultSet { - columns: std::sync::Arc::clone(&first_row.columns), - rows: vec![first_row.values, second_row.values], - last_insert_id: None, - }; - - let users: Vec = from_rows(result_set).unwrap(); - - assert_eq!( - users, - &[ - User { - id: 12, - name: "Georgina".to_owned(), - bio: None, - }, - User { - id: 33, - name: "Philbert".to_owned(), - bio: Some("Invented sliced bread on a meditation retreat in the Himalayas.".into()), - } - ] - ); - } - - #[test] - fn deserialize_cat() { - let row = make_row(vec![ - ("age", Value::numeric("18.800001".parse().unwrap())), - ("birthday", Value::datetime("2019-08-01T20:00:00Z".parse().unwrap())), - ( - "human", - Value::json(serde_json::json!({ - "id": 19, - "name": "Georgina" - })), - ), - ]); - let cat: Cat = from_row(row).unwrap(); - - let expected_cat = Cat { - age: 18.800001, - birthday: "2019-08-01T20:00:00Z".parse().unwrap(), - human: User { - name: "Georgina".into(), - id: 19, - bio: None, - }, - }; - - assert_eq!(cat, expected_cat); - } -} diff --git a/quaint/src/single.rs b/quaint/src/single.rs index 3dcb6eb86a33..82042f58010b 100644 --- a/quaint/src/single.rs +++ b/quaint/src/single.rs @@ -167,7 +167,6 @@ impl Quaint { } #[cfg(feature = "sqlite")] - #[cfg_attr(feature = "docs", doc(cfg(sqlite)))] /// Open a new SQLite database in memory. pub fn new_in_memory() -> crate::Result { Ok(Quaint { diff --git a/quaint/src/tests/query.rs b/quaint/src/tests/query.rs index 9fc67e9d662f..06bebe1a9601 100644 --- a/quaint/src/tests/query.rs +++ b/quaint/src/tests/query.rs @@ -1,15 +1,15 @@ mod error; use super::test_api::*; -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] use crate::ast::JsonPath; use crate::{ connector::{IsolationLevel, Queryable, TransactionCapable}, error::ErrorKind, prelude::*, }; -use test_macros::test_each_connector; -use test_setup::Tags; +use quaint_test_macros::test_each_connector; +use quaint_test_setup::Tags; #[test_each_connector] async fn single_value(api: &mut dyn TestApi) -> crate::Result<()> { @@ -33,7 +33,7 @@ async fn aliased_value(api: &mut dyn TestApi) -> crate::Result<()> { #[test_each_connector] async fn aliased_null(api: &mut dyn TestApi) -> crate::Result<()> { - let query = Select::default().value(val!(Value::Int64(None)).alias("test")); + let query = Select::default().value(val!(Value::null_int64()).alias("test")); let res = api.conn().select(query).await?; let row = res.get(0).unwrap(); @@ -307,8 +307,8 @@ async fn where_equals(api: &mut dyn TestApi) -> crate::Result<()> { let table = api.create_temp_table("id int, name varchar(255)").await?; let insert = Insert::multi_into(&table, vec!["id", "name"]) - .values(vec![Value::integer(1), Value::text("Musti")]) - .values(vec![Value::integer(2), Value::text("Naukio")]); + .values(vec![Value::int32(1), Value::text("Musti")]) + .values(vec![Value::int32(2), Value::text("Naukio")]); api.conn().insert(insert.into()).await?; @@ -328,8 +328,8 @@ async fn where_like(api: &mut dyn TestApi) -> crate::Result<()> { let table = api.create_temp_table("id int, name varchar(255)").await?; let insert = Insert::multi_into(&table, vec!["id", "name"]) - .values(vec![Value::integer(1), Value::text("Musti")]) - .values(vec![Value::integer(2), Value::text("Naukio")]); + .values(vec![Value::int32(1), Value::text("Musti")]) + .values(vec![Value::int32(2), Value::text("Naukio")]); api.conn().insert(insert.into()).await?; @@ -349,8 +349,8 @@ async fn where_not_like(api: &mut dyn TestApi) -> crate::Result<()> { let table = api.create_temp_table("id int, name varchar(255)").await?; let insert = Insert::multi_into(&table, vec!["id", "name"]) - .values(vec![Value::integer(1), Value::text("Musti")]) - .values(vec![Value::integer(2), Value::text("Naukio")]); + .values(vec![Value::int32(1), Value::text("Musti")]) + .values(vec![Value::int32(2), Value::text("Naukio")]); api.conn().insert(insert.into()).await?; @@ -371,14 +371,14 @@ async fn inner_join(api: &mut dyn TestApi) -> crate::Result<()> { let table2 = api.create_temp_table("t1_id int, is_cat int").await?; let insert = Insert::multi_into(&table1, vec!["id", "name"]) - .values(vec![Value::integer(1), Value::text("Musti")]) - .values(vec![Value::integer(2), Value::text("Belka")]); + .values(vec![Value::int32(1), Value::text("Musti")]) + .values(vec![Value::int32(2), Value::text("Belka")]); api.conn().insert(insert.into()).await?; let insert = Insert::multi_into(&table2, vec!["t1_id", "is_cat"]) - .values(vec![Value::integer(1), Value::integer(1)]) - .values(vec![Value::integer(2), Value::integer(0)]); + .values(vec![Value::int32(1), Value::int32(1)]) + .values(vec![Value::int32(2), Value::int32(0)]); api.conn().insert(insert.into()).await?; @@ -414,18 +414,18 @@ async fn table_inner_join(api: &mut dyn TestApi) -> crate::Result<()> { let table3 = api.create_temp_table("id int, foo int").await?; let insert = Insert::multi_into(&table1, vec!["id", "name"]) - .values(vec![Value::integer(1), Value::text("Musti")]) - .values(vec![Value::integer(2), Value::text("Belka")]); + .values(vec![Value::int32(1), Value::text("Musti")]) + .values(vec![Value::int32(2), Value::text("Belka")]); api.conn().insert(insert.into()).await?; let insert = Insert::multi_into(&table2, vec!["t1_id", "is_cat"]) - .values(vec![Value::integer(1), Value::integer(1)]) - .values(vec![Value::integer(2), Value::integer(0)]); + .values(vec![Value::int32(1), Value::int32(1)]) + .values(vec![Value::int32(2), Value::int32(0)]); api.conn().insert(insert.into()).await?; - let insert = Insert::multi_into(&table3, vec!["id", "foo"]).values(vec![Value::integer(1), Value::integer(1)]); + let insert = Insert::multi_into(&table3, vec!["id", "foo"]).values(vec![Value::int32(1), Value::int32(1)]); api.conn().insert(insert.into()).await?; @@ -466,13 +466,12 @@ async fn left_join(api: &mut dyn TestApi) -> crate::Result<()> { let table2 = api.create_temp_table("t1_id int, is_cat int").await?; let insert = Insert::multi_into(&table1, vec!["id", "name"]) - .values(vec![Value::integer(1), Value::text("Musti")]) - .values(vec![Value::integer(2), Value::text("Belka")]); + .values(vec![Value::int32(1), Value::text("Musti")]) + .values(vec![Value::int32(2), Value::text("Belka")]); api.conn().insert(insert.into()).await?; - let insert = - Insert::multi_into(&table2, vec!["t1_id", "is_cat"]).values(vec![Value::integer(1), Value::integer(1)]); + let insert = Insert::multi_into(&table2, vec!["t1_id", "is_cat"]).values(vec![Value::int32(1), Value::int32(1)]); api.conn().insert(insert.into()).await?; @@ -508,17 +507,16 @@ async fn table_left_join(api: &mut dyn TestApi) -> crate::Result<()> { let table3 = api.create_temp_table("id int, foo int").await?; let insert = Insert::multi_into(&table1, vec!["id", "name"]) - .values(vec![Value::integer(1), Value::text("Musti")]) - .values(vec![Value::integer(2), Value::text("Belka")]); + .values(vec![Value::int32(1), Value::text("Musti")]) + .values(vec![Value::int32(2), Value::text("Belka")]); api.conn().insert(insert.into()).await?; - let insert = - Insert::multi_into(&table2, vec!["t1_id", "is_cat"]).values(vec![Value::integer(1), Value::integer(1)]); + let insert = Insert::multi_into(&table2, vec!["t1_id", "is_cat"]).values(vec![Value::int32(1), Value::int32(1)]); api.conn().insert(insert.into()).await?; - let insert = Insert::multi_into(&table3, vec!["id", "foo"]).values(vec![Value::integer(1), Value::integer(1)]); + let insert = Insert::multi_into(&table3, vec!["id", "foo"]).values(vec![Value::int32(1), Value::int32(1)]); api.conn().insert(insert.into()).await?; @@ -558,8 +556,8 @@ async fn limit_no_offset(api: &mut dyn TestApi) -> crate::Result<()> { let table = api.create_temp_table("id int, name varchar(255)").await?; let insert = Insert::multi_into(&table, vec!["id", "name"]) - .values(vec![Value::integer(1), Value::text("Musti")]) - .values(vec![Value::integer(2), Value::text("Naukio")]); + .values(vec![Value::int32(1), Value::text("Musti")]) + .values(vec![Value::int32(2), Value::text("Naukio")]); api.conn().insert(insert.into()).await?; @@ -580,8 +578,8 @@ async fn offset_no_limit(api: &mut dyn TestApi) -> crate::Result<()> { let table = api.create_temp_table("id int, name varchar(255)").await?; let insert = Insert::multi_into(&table, vec!["id", "name"]) - .values(vec![Value::integer(1), Value::text("Musti")]) - .values(vec![Value::integer(2), Value::text("Naukio")]); + .values(vec![Value::int32(1), Value::text("Musti")]) + .values(vec![Value::int32(2), Value::text("Naukio")]); api.conn().insert(insert.into()).await?; @@ -602,9 +600,9 @@ async fn limit_with_offset(api: &mut dyn TestApi) -> crate::Result<()> { let table = api.create_temp_table("id int, name varchar(255)").await?; let insert = Insert::multi_into(&table, vec!["id", "name"]) - .values(vec![Value::integer(1), Value::text("Musti")]) - .values(vec![Value::integer(2), Value::text("Naukio")]) - .values(vec![Value::integer(3), Value::text("Belka")]); + .values(vec![Value::int32(1), Value::text("Musti")]) + .values(vec![Value::int32(2), Value::text("Naukio")]) + .values(vec![Value::int32(3), Value::text("Belka")]); api.conn().insert(insert.into()).await?; @@ -625,9 +623,9 @@ async fn limit_with_offset_no_given_order(api: &mut dyn TestApi) -> crate::Resul let table = api.create_temp_table("id int, name varchar(255)").await?; let insert = Insert::multi_into(&table, vec!["id", "name"]) - .values(vec![Value::integer(1), Value::text("Musti")]) - .values(vec![Value::integer(2), Value::text("Naukio")]) - .values(vec![Value::integer(3), Value::text("Belka")]); + .values(vec![Value::int32(1), Value::text("Musti")]) + .values(vec![Value::int32(2), Value::text("Naukio")]) + .values(vec![Value::int32(3), Value::text("Belka")]); api.conn().insert(insert.into()).await?; @@ -1374,15 +1372,12 @@ async fn float_columns_cast_to_f32(api: &mut dyn TestApi) -> crate::Result<()> { // left: `Numeric(Some(BigDecimal("1.0")))`, // right: `Double(Some(1.0))`' #[test_each_connector(tags("mysql"), ignore("mysql8"))] -#[cfg(feature = "bigdecimal")] + async fn newdecimal_conversion_is_handled_correctly(api: &mut dyn TestApi) -> crate::Result<()> { - let select = Select::default().value(sum(Value::integer(1)).alias("theone")); + let select = Select::default().value(sum(Value::int32(1)).alias("theone")); let result = api.conn().select(select).await?; - assert_eq!( - Value::Numeric(Some("1.0".parse().unwrap())), - result.into_single().unwrap()[0] - ); + assert_eq!(Value::numeric("1.0".parse().unwrap()), result.into_single().unwrap()[0]); Ok(()) } @@ -1412,7 +1407,6 @@ async fn unsigned_integers_are_handled(api: &mut dyn TestApi) -> crate::Result<( Ok(()) } -#[cfg(feature = "json")] #[test_each_connector(tags("mysql", "postgresql"))] async fn json_filtering_works(api: &mut dyn TestApi) -> crate::Result<()> { let json_type = match api.system() { @@ -1664,15 +1658,29 @@ async fn enum_values(api: &mut dyn TestApi) -> crate::Result<()> { .await?; api.conn() - .insert(Insert::single_into(&table).value("value", "A").into()) + .insert( + Insert::single_into(&table) + .value( + "value", + Value::enum_variant_with_name("A", EnumName::new(&type_name, Option::::None)), + ) + .into(), + ) .await?; api.conn() - .insert(Insert::single_into(&table).value("value", "B").into()) + .insert( + Insert::single_into(&table) + .value( + "value", + Value::enum_variant_with_name("B", EnumName::new(&type_name, Option::::None)), + ) + .into(), + ) .await?; api.conn() - .insert(Insert::single_into(&table).value("value", Value::Enum(None)).into()) + .insert(Insert::single_into(&table).value("value", Value::null_enum()).into()) .await?; let select = Select::from_table(&table).column("value").order_by("id".ascend()); @@ -1685,13 +1693,13 @@ async fn enum_values(api: &mut dyn TestApi) -> crate::Result<()> { assert_eq!(Some(&Value::enum_variant("B")), row.at(0)); let row = res.get(2).unwrap(); - assert_eq!(Some(&Value::Enum(None)), row.at(0)); + assert_eq!(Some(&Value::null_enum()), row.at(0)); Ok(()) } #[test_each_connector(tags("postgresql"))] -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] async fn row_to_json_normal(api: &mut dyn TestApi) -> crate::Result<()> { let cte = Select::default() .value(val!("hello_world").alias("toto")) @@ -1700,9 +1708,9 @@ async fn row_to_json_normal(api: &mut dyn TestApi) -> crate::Result<()> { let result = api.conn().select(select).await?; assert_eq!( - Value::Json(Some(serde_json::json!({ + Value::json(serde_json::json!({ "toto": "hello_world" - }))), + })), result.into_single().unwrap()[0] ); @@ -1710,7 +1718,7 @@ async fn row_to_json_normal(api: &mut dyn TestApi) -> crate::Result<()> { } #[test_each_connector(tags("postgresql"))] -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] async fn row_to_json_pretty(api: &mut dyn TestApi) -> crate::Result<()> { let cte = Select::default() .value(val!("hello_world").alias("toto")) @@ -1719,9 +1727,9 @@ async fn row_to_json_pretty(api: &mut dyn TestApi) -> crate::Result<()> { let result = api.conn().select(select).await?; assert_eq!( - Value::Json(Some(serde_json::json!({ + Value::json(serde_json::json!({ "toto": "hello_world" - }))), + })), result.into_single().unwrap()[0] ); @@ -2001,7 +2009,6 @@ async fn insert_default_keyword(api: &mut dyn TestApi) -> crate::Result<()> { Ok(()) } -#[cfg(feature = "bigdecimal")] #[test_each_connector(tags("postgresql"))] async fn ints_read_write_to_numeric(api: &mut dyn TestApi) -> crate::Result<()> { use bigdecimal::BigDecimal; @@ -2010,9 +2017,9 @@ async fn ints_read_write_to_numeric(api: &mut dyn TestApi) -> crate::Result<()> let table = api.create_temp_table("id int, value numeric(12,2)").await?; let insert = Insert::multi_into(&table, ["id", "value"]) - .values(vec![Value::integer(1), Value::double(1234.5)]) - .values(vec![Value::integer(2), Value::integer(1234)]) - .values(vec![Value::integer(3), Value::integer(12345)]); + .values(vec![Value::int32(1), Value::double(1234.5)]) + .values(vec![Value::int32(2), Value::int32(1234)]) + .values(vec![Value::int32(3), Value::int32(12345)]); api.conn().execute(insert.into()).await?; @@ -2030,7 +2037,6 @@ async fn ints_read_write_to_numeric(api: &mut dyn TestApi) -> crate::Result<()> Ok(()) } -#[cfg(feature = "bigdecimal")] #[test_each_connector(tags("postgresql"))] async fn bigdecimal_read_write_to_floating(api: &mut dyn TestApi) -> crate::Result<()> { use bigdecimal::BigDecimal; @@ -2040,7 +2046,7 @@ async fn bigdecimal_read_write_to_floating(api: &mut dyn TestApi) -> crate::Resu let val = BigDecimal::from_str("0.1").unwrap(); let insert = Insert::multi_into(&table, ["id", "a", "b"]).values(vec![ - Value::integer(1), + Value::int32(1), Value::numeric(val.clone()), Value::numeric(val.clone()), ]); @@ -2058,7 +2064,7 @@ async fn bigdecimal_read_write_to_floating(api: &mut dyn TestApi) -> crate::Resu #[test_each_connector] async fn coalesce_fun(api: &mut dyn TestApi) -> crate::Result<()> { - let exprs: Vec = vec![Value::Text(None).into(), Value::text("Individual").into()]; + let exprs: Vec = vec![Value::null_text().into(), Value::text("Individual").into()]; let select = Select::default().value(coalesce(exprs).alias("val")); let row = api.conn().select(select).await?.into_single()?; @@ -2067,22 +2073,21 @@ async fn coalesce_fun(api: &mut dyn TestApi) -> crate::Result<()> { Ok(()) } -#[cfg(feature = "json")] fn value_into_json(value: &Value) -> Option { - match value.clone() { + match value.typed.clone() { // MariaDB returns JSON as text - Value::Text(Some(text)) => { + ValueType::Text(Some(text)) => { let json: serde_json::Value = serde_json::from_str(&text) .unwrap_or_else(|_| panic!("expected parsable text to json, found {}", text)); Some(json) } - Value::Json(Some(json)) => Some(json), + ValueType::Json(Some(json)) => Some(json), _ => None, } } -#[cfg(all(feature = "json", feature = "mysql"))] +#[cfg(feature = "mysql")] #[test_each_connector(tags("mysql"))] async fn json_extract_path_fun(api: &mut dyn TestApi) -> crate::Result<()> { let table = api @@ -2133,7 +2138,7 @@ async fn json_extract_path_fun(api: &mut dyn TestApi) -> crate::Result<()> { Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] async fn json_extract_array_path_postgres(api: &mut dyn TestApi, json_type: &str) -> crate::Result<()> { let table = api .create_temp_table(&format!("{}, obj {}", api.autogen_id("id"), json_type)) @@ -2192,7 +2197,7 @@ async fn json_extract_array_path_postgres(api: &mut dyn TestApi, json_type: &str Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_extract_array_path_fun_on_jsonb(api: &mut dyn TestApi) -> crate::Result<()> { json_extract_array_path_postgres(api, "jsonb").await?; @@ -2200,7 +2205,7 @@ async fn json_extract_array_path_fun_on_jsonb(api: &mut dyn TestApi) -> crate::R Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_extract_array_path_fun_on_json(api: &mut dyn TestApi) -> crate::Result<()> { json_extract_array_path_postgres(api, "json").await?; @@ -2208,7 +2213,7 @@ async fn json_extract_array_path_fun_on_json(api: &mut dyn TestApi) -> crate::Re Ok(()) } -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] async fn json_array_contains(api: &mut dyn TestApi, json_type: &str) -> crate::Result<()> { let table = api .create_temp_table(&format!("{}, obj {}", api.autogen_id("id"), json_type)) @@ -2287,7 +2292,7 @@ async fn json_array_contains(api: &mut dyn TestApi, json_type: &str) -> crate::R Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_contains_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::Result<()> { json_array_contains(api, "jsonb").await?; @@ -2295,7 +2300,7 @@ async fn json_array_contains_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::Resul Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_contains_fun_pg_json(api: &mut dyn TestApi) -> crate::Result<()> { json_array_contains(api, "json").await?; @@ -2303,7 +2308,7 @@ async fn json_array_contains_fun_pg_json(api: &mut dyn TestApi) -> crate::Result Ok(()) } -#[cfg(all(feature = "json", feature = "mysql"))] +#[cfg(feature = "mysql")] #[test_each_connector(tags("mysql"))] async fn json_array_contains_fun(api: &mut dyn TestApi) -> crate::Result<()> { json_array_contains(api, "json").await?; @@ -2311,7 +2316,7 @@ async fn json_array_contains_fun(api: &mut dyn TestApi) -> crate::Result<()> { Ok(()) } -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] async fn json_array_not_contains(api: &mut dyn TestApi, json_type: &str) -> crate::Result<()> { let table = api .create_temp_table(&format!("{}, obj {}", api.autogen_id("id"), json_type)) @@ -2345,7 +2350,7 @@ async fn json_array_not_contains(api: &mut dyn TestApi, json_type: &str) -> crat Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_not_contains_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::Result<()> { json_array_not_contains(api, "jsonb").await?; @@ -2353,7 +2358,7 @@ async fn json_array_not_contains_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::R Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_not_contains_fun_pg_json(api: &mut dyn TestApi) -> crate::Result<()> { json_array_not_contains(api, "json").await?; @@ -2361,7 +2366,7 @@ async fn json_array_not_contains_fun_pg_json(api: &mut dyn TestApi) -> crate::Re Ok(()) } -#[cfg(all(feature = "json", feature = "mysql"))] +#[cfg(feature = "mysql")] #[test_each_connector(tags("mysql"))] async fn json_array_not_contains_fun(api: &mut dyn TestApi) -> crate::Result<()> { json_array_not_contains(api, "json").await?; @@ -2369,7 +2374,7 @@ async fn json_array_not_contains_fun(api: &mut dyn TestApi) -> crate::Result<()> Ok(()) } -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] async fn json_array_begins_with(api: &mut dyn TestApi, json_type: &str) -> crate::Result<()> { let table = api .create_temp_table(&format!("{}, obj {}", api.autogen_id("id"), json_type)) @@ -2437,7 +2442,7 @@ async fn json_array_begins_with(api: &mut dyn TestApi, json_type: &str) -> crate Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_begins_with_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::Result<()> { json_array_begins_with(api, "jsonb").await?; @@ -2445,7 +2450,7 @@ async fn json_array_begins_with_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::Re Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_begins_with_fun_pg_json(api: &mut dyn TestApi) -> crate::Result<()> { json_array_begins_with(api, "json").await?; @@ -2453,7 +2458,7 @@ async fn json_array_begins_with_fun_pg_json(api: &mut dyn TestApi) -> crate::Res Ok(()) } -#[cfg(all(feature = "json", feature = "mysql"))] +#[cfg(feature = "mysql")] #[test_each_connector(tags("mysql"))] async fn json_array_begins_with_fun(api: &mut dyn TestApi) -> crate::Result<()> { json_array_begins_with(api, "json").await?; @@ -2461,7 +2466,7 @@ async fn json_array_begins_with_fun(api: &mut dyn TestApi) -> crate::Result<()> Ok(()) } -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] async fn json_array_not_begins_with(api: &mut dyn TestApi, json_type: &str) -> crate::Result<()> { let table = api .create_temp_table(&format!("{}, obj {}", api.autogen_id("id"), json_type)) @@ -2496,7 +2501,7 @@ async fn json_array_not_begins_with(api: &mut dyn TestApi, json_type: &str) -> c Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_not_begins_with_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::Result<()> { json_array_not_begins_with(api, "jsonb").await?; @@ -2504,7 +2509,7 @@ async fn json_array_not_begins_with_fun_pg_jsonb(api: &mut dyn TestApi) -> crate Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_not_begins_with_fun_pg_json(api: &mut dyn TestApi) -> crate::Result<()> { json_array_not_begins_with(api, "json").await?; @@ -2512,7 +2517,7 @@ async fn json_array_not_begins_with_fun_pg_json(api: &mut dyn TestApi) -> crate: Ok(()) } -#[cfg(all(feature = "json", feature = "mysql"))] +#[cfg(feature = "mysql")] #[test_each_connector(tags("mysql"))] async fn json_array_not_begins_with_fun(api: &mut dyn TestApi) -> crate::Result<()> { json_array_not_begins_with(api, "json").await?; @@ -2520,7 +2525,7 @@ async fn json_array_not_begins_with_fun(api: &mut dyn TestApi) -> crate::Result< Ok(()) } -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] async fn json_array_ends_into(api: &mut dyn TestApi, json_type: &str) -> crate::Result<()> { let table = api .create_temp_table(&format!("{}, obj {}", api.autogen_id("id"), json_type)) @@ -2589,7 +2594,7 @@ async fn json_array_ends_into(api: &mut dyn TestApi, json_type: &str) -> crate:: Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_ends_into_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::Result<()> { json_array_ends_into(api, "jsonb").await?; @@ -2597,7 +2602,7 @@ async fn json_array_ends_into_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::Resu Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_ends_into_fun_pg_json(api: &mut dyn TestApi) -> crate::Result<()> { json_array_ends_into(api, "json").await?; @@ -2605,7 +2610,7 @@ async fn json_array_ends_into_fun_pg_json(api: &mut dyn TestApi) -> crate::Resul Ok(()) } -#[cfg(all(feature = "json", feature = "mysql"))] +#[cfg(feature = "mysql")] #[test_each_connector(tags("mysql"))] async fn json_array_ends_into_fun(api: &mut dyn TestApi) -> crate::Result<()> { json_array_ends_into(api, "json").await?; @@ -2613,7 +2618,7 @@ async fn json_array_ends_into_fun(api: &mut dyn TestApi) -> crate::Result<()> { Ok(()) } -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] async fn json_array_not_ends_into(api: &mut dyn TestApi, json_type: &str) -> crate::Result<()> { let table = api .create_temp_table(&format!("{}, obj {}", api.autogen_id("id"), json_type)) @@ -2649,7 +2654,7 @@ async fn json_array_not_ends_into(api: &mut dyn TestApi, json_type: &str) -> cra Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_not_ends_into_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::Result<()> { json_array_not_ends_into(api, "jsonb").await?; @@ -2657,7 +2662,7 @@ async fn json_array_not_ends_into_fun_pg_jsonb(api: &mut dyn TestApi) -> crate:: Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_not_ends_into_fun_pg_json(api: &mut dyn TestApi) -> crate::Result<()> { json_array_not_ends_into(api, "json").await?; @@ -2665,7 +2670,7 @@ async fn json_array_not_ends_into_fun_pg_json(api: &mut dyn TestApi) -> crate::R Ok(()) } -#[cfg(all(feature = "json", feature = "mysql"))] +#[cfg(feature = "mysql")] #[test_each_connector(tags("mysql"))] async fn json_array_not_ends_into_fun(api: &mut dyn TestApi) -> crate::Result<()> { json_array_not_ends_into(api, "json").await?; @@ -2673,7 +2678,7 @@ async fn json_array_not_ends_into_fun(api: &mut dyn TestApi) -> crate::Result<() Ok(()) } -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] async fn json_gt_gte_lt_lte(api: &mut dyn TestApi, json_type: &str) -> crate::Result<()> { let table = api .create_temp_table(&format!("{}, json {}", api.autogen_id("id"), json_type)) @@ -2817,7 +2822,7 @@ async fn json_gt_gte_lt_lte(api: &mut dyn TestApi, json_type: &str) -> crate::Re Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_gt_gte_lt_lte_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::Result<()> { json_gt_gte_lt_lte(api, "jsonb").await?; @@ -2825,7 +2830,7 @@ async fn json_gt_gte_lt_lte_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::Result Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_gt_gte_lt_lte_fun_pg_json(api: &mut dyn TestApi) -> crate::Result<()> { json_gt_gte_lt_lte(api, "json").await?; @@ -2833,7 +2838,7 @@ async fn json_gt_gte_lt_lte_fun_pg_json(api: &mut dyn TestApi) -> crate::Result< Ok(()) } -#[cfg(all(feature = "json", feature = "mysql"))] +#[cfg(feature = "mysql")] #[test_each_connector(tags("mysql"))] async fn json_gt_gte_lt_lte_fun(api: &mut dyn TestApi) -> crate::Result<()> { json_gt_gte_lt_lte(api, "json").await?; @@ -2991,7 +2996,7 @@ async fn generate_binary_uuid(api: &mut dyn TestApi) -> crate::Result<()> { let val = res.into_single()?; // If it is a byte type and has a value, it's a generated UUID. - assert!(matches!(val, Value::Bytes(x) if x.is_some())); + assert!(matches!(val.typed, ValueType::Bytes(x) if x.is_some())); Ok(()) } @@ -3004,7 +3009,7 @@ async fn generate_swapped_binary_uuid(api: &mut dyn TestApi) -> crate::Result<() let val = res.into_single()?; // If it is a byte type and has a value, it's a generated UUID. - assert!(matches!(val, Value::Bytes(x) if x.is_some())); + assert!(matches!(val.typed, ValueType::Bytes(x) if x.is_some())); Ok(()) } @@ -3017,7 +3022,7 @@ async fn generate_native_uuid(api: &mut dyn TestApi) -> crate::Result<()> { let val = res.into_single()?; // If it is a text type and has a value, it's a generated string UUID. - assert!(matches!(val, Value::Text(x) if x.is_some())); + assert!(matches!(val.typed, ValueType::Text(x) if x.is_some())); Ok(()) } @@ -3085,7 +3090,6 @@ async fn query_raw_typed_numeric(api: &mut dyn TestApi) -> crate::Result<()> { Ok(()) } -#[cfg(feature = "chrono")] #[test_each_connector(tags("postgresql"))] async fn query_raw_typed_date(api: &mut dyn TestApi) -> crate::Result<()> { use chrono::DateTime; @@ -3117,7 +3121,6 @@ async fn query_raw_typed_date(api: &mut dyn TestApi) -> crate::Result<()> { Ok(()) } -#[cfg(feature = "json")] #[test_each_connector(tags("postgresql"))] async fn query_raw_typed_json(api: &mut dyn TestApi) -> crate::Result<()> { use serde_json::json; @@ -3162,25 +3165,25 @@ async fn order_by_nulls_first_last(api: &mut dyn TestApi) -> crate::Result<()> { let insert = Insert::single_into(&table) .value("name", "b") - .value("age", Value::Int32(None)); + .value("age", Value::null_int32()); api.conn().insert(insert.into()).await?; let insert = Insert::single_into(&table) - .value("name", Value::Text(None)) + .value("name", Value::null_text()) .value("age", 2); api.conn().insert(insert.into()).await?; let insert = Insert::single_into(&table) - .value("name", Value::Text(None)) - .value("age", Value::Text(None)); + .value("name", Value::null_text()) + .value("age", Value::null_text()); api.conn().insert(insert.into()).await?; // name ASC NULLS FIRST let select = Select::from_table(table.clone()).order_by("name".ascend_nulls_first()); let res = api.conn().select(select).await?; - assert_eq!(res.get(0).unwrap()["name"], Value::Text(None)); - assert_eq!(res.get(1).unwrap()["name"], Value::Text(None)); + assert_eq!(res.get(0).unwrap()["name"], Value::null_text()); + assert_eq!(res.get(1).unwrap()["name"], Value::null_text()); assert_eq!(res.get(2).unwrap()["name"], Value::text("a")); assert_eq!(res.get(3).unwrap()["name"], Value::text("b")); @@ -3190,15 +3193,15 @@ async fn order_by_nulls_first_last(api: &mut dyn TestApi) -> crate::Result<()> { assert_eq!(res.get(0).unwrap()["name"], Value::text("a")); assert_eq!(res.get(1).unwrap()["name"], Value::text("b")); - assert_eq!(res.get(2).unwrap()["name"], Value::Text(None)); - assert_eq!(res.get(3).unwrap()["name"], Value::Text(None)); + assert_eq!(res.get(2).unwrap()["name"], Value::null_text()); + assert_eq!(res.get(3).unwrap()["name"], Value::null_text()); // name DESC NULLS FIRST let select = Select::from_table(table.clone()).order_by("name".descend_nulls_first()); let res = api.conn().select(select).await?; - assert_eq!(res.get(0).unwrap()["name"], Value::Text(None)); - assert_eq!(res.get(1).unwrap()["name"], Value::Text(None)); + assert_eq!(res.get(0).unwrap()["name"], Value::null_text()); + assert_eq!(res.get(1).unwrap()["name"], Value::null_text()); assert_eq!(res.get(2).unwrap()["name"], Value::text("b")); assert_eq!(res.get(3).unwrap()["name"], Value::text("a")); @@ -3208,8 +3211,8 @@ async fn order_by_nulls_first_last(api: &mut dyn TestApi) -> crate::Result<()> { assert_eq!(res.get(0).unwrap()["name"], Value::text("b")); assert_eq!(res.get(1).unwrap()["name"], Value::text("a")); - assert_eq!(res.get(2).unwrap()["name"], Value::Text(None)); - assert_eq!(res.get(3).unwrap()["name"], Value::Text(None)); + assert_eq!(res.get(2).unwrap()["name"], Value::null_text()); + assert_eq!(res.get(3).unwrap()["name"], Value::null_text()); // name ASC NULLS FIRST, age ASC NULLS FIRST let select = Select::from_table(table.clone()) @@ -3217,17 +3220,17 @@ async fn order_by_nulls_first_last(api: &mut dyn TestApi) -> crate::Result<()> { .order_by("age".ascend_nulls_first()); let res = api.conn().select(select).await?; - assert_eq!(res.get(0).unwrap()["name"], Value::Text(None)); - assert_eq!(res.get(0).unwrap()["age"], Value::Int32(None)); + assert_eq!(res.get(0).unwrap()["name"], Value::null_text()); + assert_eq!(res.get(0).unwrap()["age"], Value::null_int32()); - assert_eq!(res.get(1).unwrap()["name"], Value::Text(None)); + assert_eq!(res.get(1).unwrap()["name"], Value::null_text()); assert_eq!(res.get(1).unwrap()["age"], Value::int32(2)); assert_eq!(res.get(2).unwrap()["name"], Value::text("a")); assert_eq!(res.get(2).unwrap()["age"], Value::int32(1)); assert_eq!(res.get(3).unwrap()["name"], Value::text("b")); - assert_eq!(res.get(3).unwrap()["age"], Value::Int32(None)); + assert_eq!(res.get(3).unwrap()["age"], Value::null_int32()); // name ASC NULLS LAST, age ASC NULLS LAST let select = Select::from_table(table.clone()) @@ -3239,13 +3242,13 @@ async fn order_by_nulls_first_last(api: &mut dyn TestApi) -> crate::Result<()> { assert_eq!(res.get(0).unwrap()["age"], Value::int32(1)); assert_eq!(res.get(1).unwrap()["name"], Value::text("b")); - assert_eq!(res.get(1).unwrap()["age"], Value::Int32(None)); + assert_eq!(res.get(1).unwrap()["age"], Value::null_int32()); - assert_eq!(res.get(2).unwrap()["name"], Value::Text(None)); + assert_eq!(res.get(2).unwrap()["name"], Value::null_text()); assert_eq!(res.get(2).unwrap()["age"], Value::int32(2)); - assert_eq!(res.get(3).unwrap()["name"], Value::Text(None)); - assert_eq!(res.get(3).unwrap()["age"], Value::Int32(None)); + assert_eq!(res.get(3).unwrap()["name"], Value::null_text()); + assert_eq!(res.get(3).unwrap()["age"], Value::null_int32()); // name DESC NULLS FIRST, age DESC NULLS FIRST let select = Select::from_table(table.clone()) @@ -3253,14 +3256,14 @@ async fn order_by_nulls_first_last(api: &mut dyn TestApi) -> crate::Result<()> { .order_by("age".descend_nulls_first()); let res = api.conn().select(select).await?; - assert_eq!(res.get(0).unwrap()["name"], Value::Text(None)); - assert_eq!(res.get(0).unwrap()["age"], Value::Int32(None)); + assert_eq!(res.get(0).unwrap()["name"], Value::null_text()); + assert_eq!(res.get(0).unwrap()["age"], Value::null_int32()); - assert_eq!(res.get(1).unwrap()["name"], Value::Text(None)); + assert_eq!(res.get(1).unwrap()["name"], Value::null_text()); assert_eq!(res.get(1).unwrap()["age"], Value::int32(2)); assert_eq!(res.get(2).unwrap()["name"], Value::text("b")); - assert_eq!(res.get(2).unwrap()["age"], Value::Int32(None)); + assert_eq!(res.get(2).unwrap()["age"], Value::null_int32()); assert_eq!(res.get(3).unwrap()["name"], Value::text("a")); assert_eq!(res.get(3).unwrap()["age"], Value::int32(1)); @@ -3272,16 +3275,16 @@ async fn order_by_nulls_first_last(api: &mut dyn TestApi) -> crate::Result<()> { let res = api.conn().select(select).await?; assert_eq!(res.get(0).unwrap()["name"], Value::text("b")); - assert_eq!(res.get(0).unwrap()["age"], Value::Int32(None)); + assert_eq!(res.get(0).unwrap()["age"], Value::null_int32()); assert_eq!(res.get(1).unwrap()["name"], Value::text("a")); assert_eq!(res.get(1).unwrap()["age"], Value::int32(1)); - assert_eq!(res.get(2).unwrap()["name"], Value::Text(None)); + assert_eq!(res.get(2).unwrap()["name"], Value::null_text()); assert_eq!(res.get(2).unwrap()["age"], Value::int32(2)); - assert_eq!(res.get(3).unwrap()["name"], Value::Text(None)); - assert_eq!(res.get(3).unwrap()["age"], Value::Int32(None)); + assert_eq!(res.get(3).unwrap()["name"], Value::null_text()); + assert_eq!(res.get(3).unwrap()["age"], Value::null_int32()); // name ASC NULLS LAST, age DESC NULLS FIRST let select = Select::from_table(table.clone()) @@ -3293,12 +3296,12 @@ async fn order_by_nulls_first_last(api: &mut dyn TestApi) -> crate::Result<()> { assert_eq!(res.get(0).unwrap()["age"], Value::int32(1)); assert_eq!(res.get(1).unwrap()["name"], Value::text("b")); - assert_eq!(res.get(1).unwrap()["age"], Value::Int32(None)); + assert_eq!(res.get(1).unwrap()["age"], Value::null_int32()); - assert_eq!(res.get(2).unwrap()["name"], Value::Text(None)); - assert_eq!(res.get(2).unwrap()["age"], Value::Int32(None)); + assert_eq!(res.get(2).unwrap()["name"], Value::null_text()); + assert_eq!(res.get(2).unwrap()["age"], Value::null_int32()); - assert_eq!(res.get(3).unwrap()["name"], Value::Text(None)); + assert_eq!(res.get(3).unwrap()["name"], Value::null_text()); assert_eq!(res.get(3).unwrap()["age"], Value::int32(2)); // name DESC NULLS FIRST, age ASC NULLS LAST @@ -3307,14 +3310,14 @@ async fn order_by_nulls_first_last(api: &mut dyn TestApi) -> crate::Result<()> { .order_by("age".ascend_nulls_last()); let res = api.conn().select(select).await?; - assert_eq!(res.get(0).unwrap()["name"], Value::Text(None)); + assert_eq!(res.get(0).unwrap()["name"], Value::null_text()); assert_eq!(res.get(0).unwrap()["age"], Value::int32(2)); - assert_eq!(res.get(1).unwrap()["name"], Value::Text(None)); - assert_eq!(res.get(1).unwrap()["age"], Value::Int32(None)); + assert_eq!(res.get(1).unwrap()["name"], Value::null_text()); + assert_eq!(res.get(1).unwrap()["age"], Value::null_int32()); assert_eq!(res.get(2).unwrap()["name"], Value::text("b")); - assert_eq!(res.get(2).unwrap()["age"], Value::Int32(None)); + assert_eq!(res.get(2).unwrap()["age"], Value::null_int32()); assert_eq!(res.get(3).unwrap()["name"], Value::text("a")); assert_eq!(res.get(3).unwrap()["age"], Value::int32(1)); @@ -3396,7 +3399,7 @@ async fn any_in_expression(api: &mut dyn TestApi) -> crate::Result<()> { Ok(()) } -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] #[test_each_connector(tags("postgresql", "mysql"))] async fn json_unquote_fun(api: &mut dyn TestApi) -> crate::Result<()> { let json_type = match api.system() { @@ -3434,7 +3437,7 @@ async fn json_unquote_fun(api: &mut dyn TestApi) -> crate::Result<()> { Ok(()) } -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] #[test_each_connector(tags("postgresql", "mysql"))] async fn json_col_equal_json_col(api: &mut dyn TestApi) -> crate::Result<()> { let json_type = match api.system() { diff --git a/quaint/src/tests/query/error.rs b/quaint/src/tests/query/error.rs index 63bfd3ef0357..69c57332b6d3 100644 --- a/quaint/src/tests/query/error.rs +++ b/quaint/src/tests/query/error.rs @@ -3,7 +3,7 @@ use crate::{ connector::Queryable, error::{DatabaseConstraint, ErrorKind, Name}, }; -use test_macros::test_each_connector; +use quaint_test_macros::test_each_connector; #[test_each_connector] async fn table_does_not_exist(api: &mut dyn TestApi) -> crate::Result<()> { @@ -129,7 +129,7 @@ async fn null_constraint_violation(api: &mut dyn TestApi) -> crate::Result<()> { let insert = Insert::single_into(&table).value("id1", 50).value("id2", 55); api.conn().insert(insert.into()).await?; - let update = Update::table(&table).set("id2", Value::Int64(None)); + let update = Update::table(&table).set("id2", ValueType::Int64(None)); let res = api.conn().update(update).await; assert!(res.is_err()); @@ -257,7 +257,6 @@ async fn ms_my_foreign_key_constraint_violation(api: &mut dyn TestApi) -> crate: Ok(()) } -#[cfg(feature = "chrono")] #[test_each_connector(tags("mysql"))] async fn garbage_datetime_values(api: &mut dyn TestApi) -> crate::Result<()> { api.conn() @@ -354,7 +353,6 @@ async fn should_execute_multi_statement_queries_with_raw_cmd(api: &mut dyn TestA Ok(()) } -#[cfg(feature = "uuid")] #[test_each_connector(tags("postgresql"))] async fn uuid_length_error(api: &mut dyn TestApi) -> crate::Result<()> { let table = api.create_temp_table("value uuid").await?; @@ -415,7 +413,8 @@ async fn array_into_scalar_should_fail(api: &mut dyn TestApi) -> crate::Result<( let err = result.unwrap_err(); - assert!(err.to_string().contains("Couldn't serialize value `Some([Text(Some(\"abc\")), Text(Some(\"def\"))])` into a `text`. Value is a list but `text` is not.")); + assert!(err.to_string().contains("Couldn't serialize value")); + assert!(err.to_string().contains("Value is a list but `text` is not.")); Ok(()) } diff --git a/quaint/src/tests/test_api.rs b/quaint/src/tests/test_api.rs index 301813a70ff9..cd612628d95c 100644 --- a/quaint/src/tests/test_api.rs +++ b/quaint/src/tests/test_api.rs @@ -1,4 +1,4 @@ -use test_setup::Tags; +use quaint_test_setup::Tags; #[cfg(feature = "mssql")] pub mod mssql; diff --git a/quaint/src/tests/test_api/mssql.rs b/quaint/src/tests/test_api/mssql.rs index 2df550709947..164b3fb7ddeb 100644 --- a/quaint/src/tests/test_api/mssql.rs +++ b/quaint/src/tests/test_api/mssql.rs @@ -2,8 +2,8 @@ use super::TestApi; use crate::{connector::Queryable, single::Quaint}; use names::Generator; use once_cell::sync::Lazy; +use quaint_test_setup::Tags; use std::env; -use test_setup::Tags; pub static CONN_STR: Lazy = Lazy::new(|| env::var("TEST_MSSQL").expect("TEST_MSSQL env var")); @@ -109,7 +109,7 @@ impl<'a> TestApi for MsSql<'a> { self.names.next().unwrap().replace('-', "") } - fn connector_tag(&self) -> test_setup::Tags { + fn connector_tag(&self) -> quaint_test_setup::Tags { Tags::MSSQL } } diff --git a/quaint/src/tests/test_api/mysql.rs b/quaint/src/tests/test_api/mysql.rs index 376e15692713..764100564fdc 100644 --- a/quaint/src/tests/test_api/mysql.rs +++ b/quaint/src/tests/test_api/mysql.rs @@ -2,8 +2,8 @@ use super::TestApi; use crate::{connector::Queryable, single::Quaint}; use names::Generator; use once_cell::sync::Lazy; +use quaint_test_setup::Tags; use std::env; -use test_setup::Tags; pub static CONN_STR: Lazy = Lazy::new(|| env::var("TEST_MYSQL").expect("TEST_MYSQL env var")); pub static CONN_STR8: Lazy = Lazy::new(|| env::var("TEST_MYSQL8").expect("TEST_MYSQL8 env var")); diff --git a/quaint/src/tests/test_api/postgres.rs b/quaint/src/tests/test_api/postgres.rs index 8ba29eaeaf64..791d8b07b041 100644 --- a/quaint/src/tests/test_api/postgres.rs +++ b/quaint/src/tests/test_api/postgres.rs @@ -2,8 +2,8 @@ use super::TestApi; use crate::{connector::Queryable, single::Quaint}; use names::Generator; use once_cell::sync::Lazy; +use quaint_test_setup::Tags; use std::env; -use test_setup::Tags; pub static CONN_STR: Lazy = Lazy::new(|| env::var("TEST_PSQL").expect("TEST_PSQL env var")); pub static CRDB_CONN_STR: Lazy = Lazy::new(|| env::var("TEST_CRDB").expect("TEST_CRDB env var")); @@ -108,7 +108,7 @@ impl<'a> TestApi for PostgreSql<'a> { self.names.next().unwrap().replace('-', "") } - fn connector_tag(&self) -> test_setup::Tags { + fn connector_tag(&self) -> quaint_test_setup::Tags { Tags::POSTGRES } } diff --git a/quaint/src/tests/test_api/sqlite.rs b/quaint/src/tests/test_api/sqlite.rs index 2dd7732f68f9..bde13715d587 100644 --- a/quaint/src/tests/test_api/sqlite.rs +++ b/quaint/src/tests/test_api/sqlite.rs @@ -1,7 +1,7 @@ use super::TestApi; use crate::{connector::Queryable, single::Quaint}; use names::Generator; -use test_setup::Tags; +use quaint_test_setup::Tags; pub(crate) async fn sqlite_test_api<'a>() -> crate::Result> { Sqlite::new().await @@ -99,7 +99,7 @@ impl<'a> TestApi for Sqlite<'a> { self.names.next().unwrap().replace('-', "") } - fn connector_tag(&self) -> test_setup::Tags { + fn connector_tag(&self) -> quaint_test_setup::Tags { Tags::SQLITE } } diff --git a/quaint/src/tests/types/mssql.rs b/quaint/src/tests/types/mssql.rs index 2f9a125022cb..ac404dd8af38 100644 --- a/quaint/src/tests/types/mssql.rs +++ b/quaint/src/tests/types/mssql.rs @@ -1,6 +1,5 @@ #![allow(clippy::approx_constant)] -#[cfg(feature = "bigdecimal")] mod bigdecimal; use crate::tests::test_api::*; @@ -8,7 +7,7 @@ use crate::tests::test_api::*; test_type!(nvarchar_limited( mssql, "NVARCHAR(10)", - Value::Text(None), + Value::null_text(), Value::text("foobar"), Value::text("余"), )); @@ -16,7 +15,7 @@ test_type!(nvarchar_limited( test_type!(nvarchar_max( mssql, "NVARCHAR(max)", - Value::Text(None), + Value::null_text(), Value::text("foobar"), Value::text("余"), Value::text("test¥฿😀😁😂😃😄😅😆😇😈😉😊😋😌😍😎😏😐😑😒😓😔😕😖😗😘😙😚😛😜😝😞😟😠😡😢😣😤😥�😧😨😩😪😫😬😭😮😯😰😱😲😳😴😵😶😷😸😹😺😻😼😽😾😿🙀🙁�🙂🙃🙄🙅🙆🙇🙈🙉🙊🙋🙌🙍🙎🙏ऀँंःऄअआइईउऊऋऌऍऎएऐऑऒओऔकखगघङचछजझञटठडढणतथदधनऩपफबभमयर€₭₮₯₰₱₲₳₴₵₶₷₸₹₺₻₼₽₾₿⃀"), @@ -25,7 +24,7 @@ test_type!(nvarchar_max( test_type!(ntext( mssql, "NTEXT", - Value::Text(None), + Value::null_text(), Value::text("foobar"), Value::text("余"), )); @@ -33,23 +32,23 @@ test_type!(ntext( test_type!(varchar_limited( mssql, "VARCHAR(10)", - Value::Text(None), + Value::null_text(), Value::text("foobar"), )); test_type!(varchar_max( mssql, "VARCHAR(max)", - Value::Text(None), + Value::null_text(), Value::text("foobar"), )); -test_type!(text(mssql, "TEXT", Value::Text(None), Value::text("foobar"))); +test_type!(text(mssql, "TEXT", Value::null_text(), Value::text("foobar"))); test_type!(tinyint( mssql, "tinyint", - Value::Int32(None), + Value::null_int32(), Value::int32(u8::MIN), Value::int32(u8::MAX), )); @@ -57,7 +56,7 @@ test_type!(tinyint( test_type!(smallint( mssql, "smallint", - Value::Int32(None), + Value::null_int32(), Value::int32(i16::MIN), Value::int32(i16::MAX), )); @@ -65,7 +64,7 @@ test_type!(smallint( test_type!(int( mssql, "int", - Value::Int32(None), + Value::null_int32(), Value::int32(i32::MIN), Value::int32(i32::MAX), )); @@ -73,35 +72,35 @@ test_type!(int( test_type!(bigint( mssql, "bigint", - Value::Int64(None), + Value::null_int64(), Value::int64(i64::MIN), Value::int64(i64::MAX), )); -test_type!(float_24(mssql, "float(24)", Value::Float(None), Value::float(1.23456),)); +test_type!(float_24(mssql, "float(24)", Value::null_float(), Value::float(1.23456),)); -test_type!(real(mssql, "real", Value::Float(None), Value::float(1.123456))); +test_type!(real(mssql, "real", Value::null_float(), Value::float(1.123456))); test_type!(float_53( mssql, "float(53)", - Value::Double(None), + Value::null_double(), Value::double(1.1234567891) )); -test_type!(money(mssql, "money", Value::Double(None), Value::double(3.14))); +test_type!(money(mssql, "money", Value::null_double(), Value::double(3.14))); test_type!(smallmoney( mssql, "smallmoney", - Value::Double(None), + Value::null_double(), Value::double(3.14) )); test_type!(boolean( mssql, "bit", - Value::Boolean(None), + Value::null_boolean(), Value::boolean(true), Value::boolean(false), )); @@ -109,60 +108,54 @@ test_type!(boolean( test_type!(binary( mssql, "binary(8)", - Value::Bytes(None), + Value::null_bytes(), Value::bytes(b"DEADBEEF".to_vec()), )); test_type!(varbinary( mssql, "varbinary(8)", - Value::Bytes(None), + Value::null_bytes(), Value::bytes(b"DEADBEEF".to_vec()), )); test_type!(image( mssql, "image", - Value::Bytes(None), + Value::null_bytes(), Value::bytes(b"DEADBEEF".to_vec()), )); -#[cfg(feature = "chrono")] test_type!(date( mssql, "date", - Value::Date(None), + Value::null_date(), Value::date(chrono::NaiveDate::from_ymd_opt(2020, 4, 20).unwrap()) )); -#[cfg(feature = "chrono")] test_type!(time( mssql, "time", - Value::Time(None), + Value::null_time(), Value::time(chrono::NaiveTime::from_hms_opt(16, 20, 00).unwrap()) )); -#[cfg(feature = "chrono")] -test_type!(datetime2(mssql, "datetime2", Value::DateTime(None), { +test_type!(datetime2(mssql, "datetime2", Value::null_datetime(), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:00Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) })); -#[cfg(feature = "chrono")] -test_type!(datetime(mssql, "datetime", Value::DateTime(None), { +test_type!(datetime(mssql, "datetime", Value::null_datetime(), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:22Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) })); -#[cfg(feature = "chrono")] -test_type!(datetimeoffset(mssql, "datetimeoffset", Value::DateTime(None), { +test_type!(datetimeoffset(mssql, "datetimeoffset", Value::null_datetime(), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:22Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) })); -#[cfg(feature = "chrono")] -test_type!(smalldatetime(mssql, "smalldatetime", Value::DateTime(None), { +test_type!(smalldatetime(mssql, "smalldatetime", Value::null_datetime(), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:00Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) })); diff --git a/quaint/src/tests/types/mssql/bigdecimal.rs b/quaint/src/tests/types/mssql/bigdecimal.rs index 821d419ad4a5..8fe3761624d2 100644 --- a/quaint/src/tests/types/mssql/bigdecimal.rs +++ b/quaint/src/tests/types/mssql/bigdecimal.rs @@ -2,11 +2,10 @@ use super::*; use crate::bigdecimal::BigDecimal; use std::str::FromStr; -#[cfg(feature = "bigdecimal")] test_type!(numeric( mssql, "numeric(10,2)", - Value::Numeric(None), + Value::null_numeric(), Value::numeric(BigDecimal::from_str("3.14")?) )); @@ -148,21 +147,21 @@ test_type!(numeric_38_6( test_type!(money( mssql, "money", - (Value::Numeric(None), Value::Double(None)), + (Value::null_numeric(), Value::null_double()), (Value::numeric(BigDecimal::from_str("3.14")?), Value::double(3.14)) )); test_type!(smallmoney( mssql, "smallmoney", - (Value::Numeric(None), Value::Double(None)), + (Value::null_numeric(), Value::null_double()), (Value::numeric(BigDecimal::from_str("3.14")?), Value::double(3.14)) )); test_type!(float_24( mssql, "float(24)", - (Value::Numeric(None), Value::Float(None)), + (Value::null_numeric(), Value::null_float()), ( Value::numeric(BigDecimal::from_str("1.123456")?), Value::float(1.123456) @@ -172,7 +171,7 @@ test_type!(float_24( test_type!(real( mssql, "real", - (Value::Numeric(None), Value::Float(None)), + (Value::null_numeric(), Value::null_float()), ( Value::numeric(BigDecimal::from_str("1.123456")?), Value::float(1.123456) @@ -182,7 +181,7 @@ test_type!(real( test_type!(float_53( mssql, "float(53)", - (Value::Numeric(None), Value::Double(None)), + (Value::null_numeric(), Value::null_double()), ( Value::numeric(BigDecimal::from_str("1.123456789012345")?), Value::double(1.123456789012345) diff --git a/quaint/src/tests/types/mysql.rs b/quaint/src/tests/types/mysql.rs index 15e2c4f6478b..ade4e5d2a1f2 100644 --- a/quaint/src/tests/types/mysql.rs +++ b/quaint/src/tests/types/mysql.rs @@ -2,16 +2,14 @@ use crate::tests::test_api::*; -#[cfg(feature = "bigdecimal")] use std::str::FromStr; -#[cfg(feature = "bigdecimal")] use crate::bigdecimal::BigDecimal; test_type!(tinyint( mysql, "tinyint(4)", - Value::Int32(None), + Value::null_int32(), Value::int32(i8::MIN), Value::int32(i8::MAX) )); @@ -27,7 +25,7 @@ test_type!(tinyint1( test_type!(tinyint_unsigned( mysql, "tinyint(4) unsigned", - Value::Int32(None), + Value::null_int32(), Value::int32(0), Value::int32(255) )); @@ -35,7 +33,7 @@ test_type!(tinyint_unsigned( test_type!(year( mysql, "year", - Value::Int32(None), + Value::null_int32(), Value::int32(1984), Value::int32(2049) )); @@ -43,7 +41,7 @@ test_type!(year( test_type!(smallint( mysql, "smallint", - Value::Int32(None), + Value::null_int32(), Value::int32(i16::MIN), Value::int32(i16::MAX) )); @@ -51,7 +49,7 @@ test_type!(smallint( test_type!(smallint_unsigned( mysql, "smallint unsigned", - Value::Int32(None), + Value::null_int32(), Value::int32(0), Value::int32(65535) )); @@ -59,7 +57,7 @@ test_type!(smallint_unsigned( test_type!(mediumint( mysql, "mediumint", - Value::Int32(None), + Value::null_int32(), Value::int32(-8388608), Value::int32(8388607) )); @@ -67,7 +65,7 @@ test_type!(mediumint( test_type!(mediumint_unsigned( mysql, "mediumint unsigned", - Value::Int64(None), + Value::null_int64(), Value::int64(0), Value::int64(16777215) )); @@ -75,7 +73,7 @@ test_type!(mediumint_unsigned( test_type!(int( mysql, "int", - Value::Int32(None), + Value::null_int32(), Value::int32(i32::MIN), Value::int32(i32::MAX) )); @@ -83,7 +81,7 @@ test_type!(int( test_type!(int_unsigned( mysql, "int unsigned", - Value::Int64(None), + Value::null_int64(), Value::int64(0), Value::int64(2173158296i64), Value::int64(4294967295i64) @@ -100,21 +98,19 @@ test_type!(int_unsigned_not_null( test_type!(bigint( mysql, "bigint", - Value::Int64(None), + Value::null_int64(), Value::int64(i64::MIN), Value::int64(i64::MAX) )); -#[cfg(feature = "bigdecimal")] test_type!(decimal( mysql, "decimal(10,2)", - Value::Numeric(None), + Value::null_numeric(), Value::numeric(bigdecimal::BigDecimal::from_str("3.14").unwrap()) )); // Highest mantissa on MySQL -#[cfg(feature = "bigdecimal")] test_type!(decimal_65_6( mysql, "decimal(65, 6)", @@ -123,22 +119,20 @@ test_type!(decimal_65_6( )?), )); -#[cfg(feature = "bigdecimal")] test_type!(float_decimal( mysql, "float", - (Value::Numeric(None), Value::Float(None)), + (Value::null_numeric(), Value::null_float()), ( Value::numeric(bigdecimal::BigDecimal::from_str("3.14").unwrap()), Value::float(3.14) ) )); -#[cfg(feature = "bigdecimal")] test_type!(double_decimal( mysql, "double", - (Value::Numeric(None), Value::Double(None)), + (Value::null_numeric(), Value::null_double()), ( Value::numeric(bigdecimal::BigDecimal::from_str("3.14").unwrap()), Value::double(3.14) @@ -148,95 +142,95 @@ test_type!(double_decimal( test_type!(bit1( mysql, "bit(1)", - (Value::Bytes(None), Value::Boolean(None)), - (Value::integer(0), Value::boolean(false)), - (Value::integer(1), Value::boolean(true)), + (Value::null_bytes(), Value::null_boolean()), + (Value::int32(0), Value::boolean(false)), + (Value::int32(1), Value::boolean(true)), )); test_type!(bit64( mysql, "bit(64)", - Value::Bytes(None), + Value::null_bytes(), Value::bytes(vec![0, 0, 0, 0, 0, 6, 107, 58]) )); -test_type!(char(mysql, "char(255)", Value::Text(None), Value::text("foobar"))); -test_type!(float(mysql, "float", Value::Float(None), Value::float(1.12345),)); -test_type!(double(mysql, "double", Value::Double(None), Value::double(1.12314124))); -test_type!(varchar(mysql, "varchar(255)", Value::Text(None), Value::text("foobar"))); -test_type!(tinytext(mysql, "tinytext", Value::Text(None), Value::text("foobar"))); -test_type!(text(mysql, "text", Value::Text(None), Value::text("foobar"))); -test_type!(longtext(mysql, "longtext", Value::Text(None), Value::text("foobar"))); +test_type!(char(mysql, "char(255)", Value::null_text(), Value::text("foobar"))); +test_type!(float(mysql, "float", Value::null_float(), Value::float(1.12345),)); +test_type!(double(mysql, "double", Value::null_double(), Value::double(1.12314124))); +test_type!(varchar( + mysql, + "varchar(255)", + Value::null_text(), + Value::text("foobar") +)); +test_type!(tinytext(mysql, "tinytext", Value::null_text(), Value::text("foobar"))); +test_type!(text(mysql, "text", Value::null_text(), Value::text("foobar"))); +test_type!(longtext(mysql, "longtext", Value::null_text(), Value::text("foobar"))); test_type!(binary(mysql, "binary(5)", Value::bytes(vec![1, 2, 3, 0, 0]))); test_type!(varbinary(mysql, "varbinary(255)", Value::bytes(vec![1, 2, 3]))); test_type!(mediumtext( mysql, "mediumtext", - Value::Text(None), + Value::null_text(), Value::text("foobar") )); test_type!(tinyblob( mysql, "tinyblob", - Value::Bytes(None), + Value::null_bytes(), Value::bytes(vec![1, 2, 3]) )); test_type!(mediumblob( mysql, "mediumblob", - Value::Bytes(None), + Value::null_bytes(), Value::bytes(vec![1, 2, 3]) )); test_type!(longblob( mysql, "longblob", - Value::Bytes(None), + Value::null_bytes(), Value::bytes(vec![1, 2, 3]) )); -test_type!(blob(mysql, "blob", Value::Bytes(None), Value::bytes(vec![1, 2, 3]))); +test_type!(blob(mysql, "blob", Value::null_bytes(), Value::bytes(vec![1, 2, 3]))); test_type!(enum( mysql, "enum('pollicle_dogs','jellicle_cats')", - Value::Enum(None), + Value::null_enum(), Value::enum_variant("jellicle_cats"), Value::enum_variant("pollicle_dogs") )); -#[cfg(feature = "json")] test_type!(json( mysql, "json", - Value::Json(None), + Value::null_json(), Value::json(serde_json::json!({"this": "is", "a": "json", "number": 2})) )); -#[cfg(feature = "chrono")] -test_type!(date(mysql, "date", Value::Date(None), { +test_type!(date(mysql, "date", Value::null_date(), { let dt = chrono::DateTime::parse_from_rfc3339("2020-04-20T00:00:00Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) })); -#[cfg(feature = "chrono")] test_type!(time( mysql, "time", - Value::Time(None), + Value::null_time(), Value::time(chrono::NaiveTime::from_hms_opt(16, 20, 00).unwrap()) )); -#[cfg(feature = "chrono")] -test_type!(datetime(mysql, "datetime", Value::DateTime(None), { +test_type!(datetime(mysql, "datetime", Value::null_datetime(), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:22Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) })); -#[cfg(feature = "chrono")] test_type!(timestamp(mysql, "timestamp", { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:22Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) diff --git a/quaint/src/tests/types/postgres.rs b/quaint/src/tests/types/postgres.rs index a098fa7a5eb4..d69a8dbb3424 100644 --- a/quaint/src/tests/types/postgres.rs +++ b/quaint/src/tests/types/postgres.rs @@ -1,14 +1,12 @@ -#[cfg(feature = "bigdecimal")] mod bigdecimal; use crate::tests::test_api::*; -#[cfg(any(feature = "bigdecimal", feature = "uuid"))] use std::str::FromStr; test_type!(boolean( postgresql, "boolean", - Value::Boolean(None), + Value::null_boolean(), Value::boolean(true), Value::boolean(false), )); @@ -16,19 +14,19 @@ test_type!(boolean( test_type!(boolean_array( postgresql, "boolean[]", - Value::Array(None), + Value::null_array(), Value::array(vec![ Value::boolean(true), Value::boolean(false), Value::boolean(true), - Value::Boolean(None) + Value::null_boolean() ]), )); test_type!(int2( postgresql, "int2", - Value::Int32(None), + Value::null_int32(), Value::int32(i16::MIN), Value::int32(i16::MAX), )); @@ -36,7 +34,7 @@ test_type!(int2( test_type!(int2_with_int64( postgresql, "int2", - (Value::Int64(None), Value::Int32(None)), + (Value::null_int64(), Value::null_int32()), (Value::int64(i16::MIN), Value::int32(i16::MIN)), (Value::int64(i16::MAX), Value::int32(i16::MAX)) )); @@ -44,12 +42,12 @@ test_type!(int2_with_int64( test_type!(int2_array( postgresql, "int2[]", - Value::Array(None), + Value::null_array(), Value::array(vec![ Value::int32(1), Value::int32(2), Value::int32(3), - Value::Int32(None) + Value::null_int32() ]), )); @@ -57,15 +55,23 @@ test_type!(int2_array_with_i64( postgresql, "int2[]", ( - Value::array(vec![Value::int64(i16::MIN), Value::int64(i16::MAX), Value::Int64(None)]), - Value::array(vec![Value::int32(i16::MIN), Value::int32(i16::MAX), Value::Int32(None)]) + Value::array(vec![ + Value::int64(i16::MIN), + Value::int64(i16::MAX), + Value::null_int64() + ]), + Value::array(vec![ + Value::int32(i16::MIN), + Value::int32(i16::MAX), + Value::null_int32() + ]) ) )); test_type!(int4( postgresql, "int4", - Value::Int32(None), + Value::null_int32(), Value::int32(i32::MIN), Value::int32(i32::MAX), )); @@ -73,7 +79,7 @@ test_type!(int4( test_type!(int4_with_i64( postgresql, "int4", - (Value::Int64(None), Value::Int32(None)), + (Value::null_int64(), Value::null_int32()), (Value::int64(i32::MIN), Value::int32(i32::MIN)), (Value::int64(i32::MAX), Value::int32(i32::MAX)) )); @@ -81,23 +87,35 @@ test_type!(int4_with_i64( test_type!(int4_array( postgresql, "int4[]", - Value::Array(None), - Value::array(vec![Value::int32(i32::MIN), Value::int32(i32::MAX), Value::Int32(None)]), + Value::null_array(), + Value::array(vec![ + Value::int32(i32::MIN), + Value::int32(i32::MAX), + Value::null_int32() + ]), )); test_type!(int4_array_with_i64( postgresql, "int4[]", ( - Value::array(vec![Value::int64(i32::MIN), Value::int64(i32::MAX), Value::Int64(None)]), - Value::array(vec![Value::int32(i32::MIN), Value::int32(i32::MAX), Value::Int32(None)]) + Value::array(vec![ + Value::int64(i32::MIN), + Value::int64(i32::MAX), + Value::null_int64() + ]), + Value::array(vec![ + Value::int32(i32::MIN), + Value::int32(i32::MAX), + Value::null_int32() + ]) ) )); test_type!(int8( postgresql, "int8", - Value::Int64(None), + Value::null_int64(), Value::int64(i64::MIN), Value::int64(i64::MAX), )); @@ -105,36 +123,36 @@ test_type!(int8( test_type!(int8_array( postgresql, "int8[]", - Value::Array(None), + Value::null_array(), Value::array(vec![ Value::int64(1), Value::int64(2), Value::int64(3), - Value::Int64(None) + Value::null_int64() ]), )); -test_type!(float4(postgresql, "float4", Value::Float(None), Value::float(1.234))); +test_type!(float4(postgresql, "float4", Value::null_float(), Value::float(1.234))); test_type!(float4_array( postgresql, "float4[]", - Value::Array(None), - Value::array(vec![Value::float(1.1234), Value::float(4.321), Value::Float(None)]) + Value::null_array(), + Value::array(vec![Value::float(1.1234), Value::float(4.321), Value::null_float()]) )); test_type!(float8( postgresql, "float8", - Value::Double(None), + Value::null_double(), Value::double(1.12345764), )); test_type!(float8_array( postgresql, "float8[]", - Value::Array(None), - Value::array(vec![Value::double(1.1234), Value::double(4.321), Value::Double(None)]) + Value::null_array(), + Value::array(vec![Value::double(1.1234), Value::double(4.321), Value::null_double()]) )); // NOTE: OIDs are unsigned 32-bit integers (see https://www.postgresql.org/docs/9.4/datatype-oid.html) @@ -142,7 +160,7 @@ test_type!(float8_array( test_type!(oid_with_i32( postgresql, "oid", - (Value::Int32(None), Value::Int64(None)), + (Value::null_int32(), Value::null_int64()), (Value::int32(i32::MAX), Value::int64(i32::MAX)), (Value::int32(u32::MIN as i32), Value::int64(u32::MIN)), )); @@ -150,7 +168,7 @@ test_type!(oid_with_i32( test_type!(oid_with_i64( postgresql, "oid", - Value::Int64(None), + Value::null_int64(), Value::int64(u32::MAX), Value::int64(u32::MIN), )); @@ -158,12 +176,12 @@ test_type!(oid_with_i64( test_type!(oid_array( postgresql, "oid[]", - Value::Array(None), + Value::null_array(), Value::array(vec![ Value::int64(1), Value::int64(2), Value::int64(3), - Value::Int64(None) + Value::null_int64() ]), )); @@ -188,234 +206,220 @@ test_type!(serial8( Value::int64(i64::MAX), )); -test_type!(char(postgresql, "char(6)", Value::Text(None), Value::text("foobar"))); +test_type!(char(postgresql, "char(6)", Value::null_text(), Value::text("foobar"))); test_type!(char_array( postgresql, "char(6)[]", - Value::Array(None), - Value::array(vec![Value::text("foobar"), Value::text("omgwtf"), Value::Text(None)]) + Value::null_array(), + Value::array(vec![Value::text("foobar"), Value::text("omgwtf"), Value::null_text()]) )); test_type!(varchar( postgresql, "varchar(255)", - Value::Text(None), + Value::null_text(), Value::text("foobar") )); test_type!(varchar_array( postgresql, "varchar(255)[]", - Value::Array(None), - Value::array(vec![Value::text("foobar"), Value::text("omgwtf"), Value::Text(None)]) + Value::null_array(), + Value::array(vec![Value::text("foobar"), Value::text("omgwtf"), Value::null_text()]) )); -test_type!(text(postgresql, "text", Value::Text(None), Value::text("foobar"))); +test_type!(text(postgresql, "text", Value::null_text(), Value::text("foobar"))); test_type!(text_array( postgresql, "text[]", - Value::Array(None), - Value::array(vec![Value::text("foobar"), Value::text("omgwtf"), Value::Text(None)]) + Value::null_array(), + Value::array(vec![Value::text("foobar"), Value::text("omgwtf"), Value::null_text()]) )); -test_type!(bit(postgresql, "bit(4)", Value::Text(None), Value::text("1001"))); +test_type!(bit(postgresql, "bit(4)", Value::null_text(), Value::text("1001"))); test_type!(bit_array( postgresql, "bit(4)[]", - Value::Array(None), - Value::array(vec![Value::text("1001"), Value::text("0110"), Value::Text(None)]) + Value::null_array(), + Value::array(vec![Value::text("1001"), Value::text("0110"), Value::null_text()]) )); test_type!(varbit( postgresql, "varbit(20)", - Value::Text(None), + Value::null_text(), Value::text("001010101") )); test_type!(varbit_array( postgresql, "varbit(20)[]", - Value::Array(None), + Value::null_array(), Value::array(vec![ Value::text("001010101"), Value::text("01101111"), - Value::Text(None) + Value::null_text() ]) )); -test_type!(inet(postgresql, "inet", Value::Text(None), Value::text("127.0.0.1"))); +test_type!(inet(postgresql, "inet", Value::null_text(), Value::text("127.0.0.1"))); test_type!(inet_array( postgresql, "inet[]", - Value::Array(None), + Value::null_array(), Value::array(vec![ Value::text("127.0.0.1"), Value::text("192.168.1.1"), - Value::Text(None) + Value::null_text() ]) )); -#[cfg(feature = "json")] test_type!(json( postgresql, "json", - Value::Json(None), + Value::null_json(), Value::json(serde_json::json!({"foo": "bar"})) )); -#[cfg(feature = "json")] test_type!(json_array( postgresql, "json[]", - Value::Array(None), + Value::null_array(), Value::array(vec![ Value::json(serde_json::json!({"foo": "bar"})), Value::json(serde_json::json!({"omg": false})), - Value::Json(None) + Value::null_json() ]) )); -#[cfg(feature = "json")] test_type!(jsonb( postgresql, "jsonb", - Value::Json(None), + Value::null_json(), Value::json(serde_json::json!({"foo": "bar"})) )); -#[cfg(feature = "json")] test_type!(jsonb_array( postgresql, "jsonb[]", - Value::Array(None), + Value::null_array(), Value::array(vec![ Value::json(serde_json::json!({"foo": "bar"})), Value::json(serde_json::json!({"omg": false})), - Value::Json(None) + Value::null_json() ]) )); -test_type!(xml(postgresql, "xml", Value::Xml(None), Value::xml("1",))); +test_type!(xml(postgresql, "xml", Value::null_xml(), Value::xml("1",))); test_type!(xml_array( postgresql, "xml[]", - Value::Array(None), + Value::null_array(), Value::array(vec![ Value::text("1"), Value::text("2"), - Value::Text(None) + Value::null_text() ]) )); -#[cfg(feature = "uuid")] test_type!(uuid( postgresql, "uuid", - Value::Uuid(None), + Value::null_uuid(), Value::uuid(uuid::Uuid::from_str("936DA01F-9ABD-4D9D-80C7-02AF85C822A8").unwrap()) )); -#[cfg(feature = "uuid")] test_type!(uuid_array( postgresql, "uuid[]", - Value::Array(None), + Value::null_array(), Value::array(vec![ Value::uuid(uuid::Uuid::from_str("936DA01F-9ABD-4D9D-80C7-02AF85C822A8").unwrap()), - Value::Uuid(None) + Value::null_uuid(), ]) )); -#[cfg(feature = "chrono")] test_type!(date( postgresql, "date", - Value::Date(None), + Value::null_date(), Value::date(chrono::NaiveDate::from_ymd_opt(2020, 4, 20).unwrap()) )); -#[cfg(feature = "chrono")] test_type!(date_array( postgresql, "date[]", - Value::Array(None), + Value::null_array(), Value::array(vec![ Value::date(chrono::NaiveDate::from_ymd_opt(2020, 4, 20).unwrap()), - Value::Date(None) + Value::null_date() ]) )); -#[cfg(feature = "chrono")] test_type!(time( postgresql, "time", - Value::Time(None), + Value::null_time(), Value::time(chrono::NaiveTime::from_hms_opt(16, 20, 00).unwrap()) )); -#[cfg(feature = "chrono")] test_type!(time_array( postgresql, "time[]", - Value::Array(None), + Value::null_array(), Value::array(vec![ Value::time(chrono::NaiveTime::from_hms_opt(16, 20, 00).unwrap()), - Value::Time(None) + Value::null_time() ]) )); -#[cfg(feature = "chrono")] -test_type!(timestamp(postgresql, "timestamp", Value::DateTime(None), { +test_type!(timestamp(postgresql, "timestamp", Value::null_datetime(), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:22Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) })); -#[cfg(feature = "chrono")] -test_type!(timestamp_array(postgresql, "timestamp[]", Value::Array(None), { +test_type!(timestamp_array(postgresql, "timestamp[]", Value::null_array(), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:22Z").unwrap(); Value::array(vec![ Value::datetime(dt.with_timezone(&chrono::Utc)), - Value::DateTime(None), + Value::null_datetime(), ]) })); -#[cfg(feature = "chrono")] -test_type!(timestamptz(postgresql, "timestamptz", Value::DateTime(None), { +test_type!(timestamptz(postgresql, "timestamptz", Value::null_datetime(), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:22Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) })); -#[cfg(feature = "chrono")] -test_type!(timestamptz_array(postgresql, "timestamptz[]", Value::Array(None), { +test_type!(timestamptz_array(postgresql, "timestamptz[]", Value::null_array(), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:22Z").unwrap(); Value::array(vec![ Value::datetime(dt.with_timezone(&chrono::Utc)), - Value::DateTime(None), + Value::null_datetime(), ]) })); test_type!(bytea( postgresql, "bytea", - Value::Bytes(None), + Value::null_bytes(), Value::bytes(b"DEADBEEF".to_vec()) )); test_type!(bytea_array( postgresql, "bytea[]", - Value::Array(None), + Value::null_array(), Value::array(vec![ Value::bytes(b"DEADBEEF".to_vec()), Value::bytes(b"BEEFBEEF".to_vec()), - Value::Bytes(None) + Value::null_bytes() ]) )); diff --git a/quaint/src/tests/types/postgres/bigdecimal.rs b/quaint/src/tests/types/postgres/bigdecimal.rs index f79c23a8ad78..894b2c967629 100644 --- a/quaint/src/tests/types/postgres/bigdecimal.rs +++ b/quaint/src/tests/types/postgres/bigdecimal.rs @@ -4,7 +4,7 @@ use crate::bigdecimal::BigDecimal; test_type!(decimal( postgresql, "decimal(10,2)", - Value::Numeric(None), + Value::null_numeric(), Value::numeric(BigDecimal::from_str("3.14")?) )); @@ -177,28 +177,28 @@ test_type!(decimal_128_6( test_type!(decimal_array( postgresql, "decimal(10,2)[]", - Value::Array(None), + Value::null_array(), Value::array(vec![BigDecimal::from_str("3.14")?, BigDecimal::from_str("5.12")?]) )); test_type!(money( postgresql, "money", - Value::Numeric(None), + Value::null_numeric(), Value::numeric(BigDecimal::from_str("1.12")?) )); test_type!(money_array( postgresql, "money[]", - Value::Array(None), + Value::null_array(), Value::array(vec![BigDecimal::from_str("1.12")?, BigDecimal::from_str("1.12")?]) )); test_type!(float4( postgresql, "float4", - (Value::Numeric(None), Value::Float(None)), + (Value::null_numeric(), Value::null_float()), ( Value::numeric(BigDecimal::from_str("1.123456")?), Value::float(1.123456) @@ -208,7 +208,7 @@ test_type!(float4( test_type!(float8( postgresql, "float8", - (Value::Numeric(None), Value::Double(None)), + (Value::null_numeric(), Value::null_double()), ( Value::numeric(BigDecimal::from_str("1.123456")?), Value::double(1.123456) diff --git a/quaint/src/tests/types/sqlite.rs b/quaint/src/tests/types/sqlite.rs index 39aca6de2d52..e16e77c55526 100644 --- a/quaint/src/tests/types/sqlite.rs +++ b/quaint/src/tests/types/sqlite.rs @@ -1,17 +1,15 @@ #![allow(clippy::approx_constant)] use crate::tests::test_api::sqlite_test_api; -#[cfg(feature = "chrono")] use crate::tests::test_api::TestApi; -#[cfg(feature = "chrono")] use crate::{ast::*, connector::Queryable}; -#[cfg(feature = "bigdecimal")] + use std::str::FromStr; test_type!(integer( sqlite, "INTEGER", - Value::Int32(None), + Value::null_int32(), Value::int32(i8::MIN), Value::int32(i8::MAX), Value::int32(i16::MIN), @@ -23,79 +21,74 @@ test_type!(integer( test_type!(big_int( sqlite, "BIGINT", - Value::Int64(None), + Value::null_int64(), Value::int64(i64::MIN), Value::int64(i64::MAX), )); -test_type!(real(sqlite, "REAL", Value::Double(None), Value::double(1.12345))); +test_type!(real(sqlite, "REAL", Value::null_double(), Value::double(1.12345))); -#[cfg(feature = "bigdecimal")] test_type!(float_decimal( sqlite, "FLOAT", - (Value::Numeric(None), Value::Float(None)), + (Value::null_numeric(), Value::null_float()), ( Value::numeric(bigdecimal::BigDecimal::from_str("3.14").unwrap()), Value::double(3.14) ) )); -#[cfg(feature = "bigdecimal")] test_type!(double_decimal( sqlite, "DOUBLE", - (Value::Numeric(None), Value::Double(None)), + (Value::null_numeric(), Value::null_double()), ( Value::numeric(bigdecimal::BigDecimal::from_str("3.14").unwrap()), Value::double(3.14) ) )); -test_type!(text(sqlite, "TEXT", Value::Text(None), Value::text("foobar huhuu"))); +test_type!(text(sqlite, "TEXT", Value::null_text(), Value::text("foobar huhuu"))); test_type!(blob( sqlite, "BLOB", - Value::Bytes(None), + Value::null_bytes(), Value::bytes(b"DEADBEEF".to_vec()) )); -test_type!(float(sqlite, "FLOAT", Value::Float(None), Value::double(1.23))); +test_type!(float(sqlite, "FLOAT", Value::null_float(), Value::double(1.23))); test_type!(double( sqlite, "DOUBLE", - Value::Double(None), + Value::null_double(), Value::double(1.2312313213) )); test_type!(boolean( sqlite, "BOOLEAN", - Value::Boolean(None), + Value::null_boolean(), Value::boolean(true), Value::boolean(false) )); -#[cfg(feature = "chrono")] test_type!(date( sqlite, "DATE", - Value::Date(None), + Value::null_date(), Value::date(chrono::NaiveDate::from_ymd_opt(1984, 1, 1).unwrap()) )); -#[cfg(feature = "chrono")] test_type!(datetime( sqlite, "DATETIME", - Value::DateTime(None), + Value::null_datetime(), Value::datetime(chrono::DateTime::from_str("2020-07-29T09:23:44.458Z").unwrap()) )); -#[cfg(feature = "chrono")] -#[test_macros::test_each_connector(tags("sqlite"))] +#[quaint_test_macros::test_each_connector(tags("sqlite"))] async fn test_type_text_datetime_rfc3339(api: &mut dyn TestApi) -> crate::Result<()> { let table = api.create_type_table("DATETIME").await?; let dt = chrono::Utc::now(); @@ -115,8 +108,7 @@ async fn test_type_text_datetime_rfc3339(api: &mut dyn TestApi) -> crate::Result Ok(()) } -#[cfg(feature = "chrono")] -#[test_macros::test_each_connector(tags("sqlite"))] +#[quaint_test_macros::test_each_connector(tags("sqlite"))] async fn test_type_text_datetime_rfc2822(api: &mut dyn TestApi) -> crate::Result<()> { let table = api.create_type_table("DATETIME").await?; let dt = chrono::DateTime::parse_from_rfc2822("Tue, 1 Jul 2003 10:52:37 +0200") @@ -138,8 +130,7 @@ async fn test_type_text_datetime_rfc2822(api: &mut dyn TestApi) -> crate::Result Ok(()) } -#[cfg(feature = "chrono")] -#[test_macros::test_each_connector(tags("sqlite"))] +#[quaint_test_macros::test_each_connector(tags("sqlite"))] async fn test_type_text_datetime_custom(api: &mut dyn TestApi) -> crate::Result<()> { let table = api.create_type_table("DATETIME").await?; @@ -161,7 +152,7 @@ async fn test_type_text_datetime_custom(api: &mut dyn TestApi) -> crate::Result< Ok(()) } -#[test_macros::test_each_connector(tags("sqlite"))] +#[quaint_test_macros::test_each_connector(tags("sqlite"))] async fn test_get_int64_from_int32_field_fails(api: &mut dyn TestApi) -> crate::Result<()> { let table = api.create_type_table("INT").await?; diff --git a/quaint/src/tests/upsert.rs b/quaint/src/tests/upsert.rs index c2808aca997b..7428963141f7 100644 --- a/quaint/src/tests/upsert.rs +++ b/quaint/src/tests/upsert.rs @@ -1,6 +1,6 @@ use super::test_api::*; use crate::{connector::Queryable, prelude::*}; -use test_macros::test_each_connector; +use quaint_test_macros::test_each_connector; #[test_each_connector(tags("postgresql", "sqlite"))] async fn upsert_on_primary_key(api: &mut dyn TestApi) -> crate::Result<()> { diff --git a/quaint/src/visitor.rs b/quaint/src/visitor.rs index 9f4d9bcb5bcd..8424bc7fbb2b 100644 --- a/quaint/src/visitor.rs +++ b/quaint/src/visitor.rs @@ -121,22 +121,22 @@ pub trait Visitor<'a> { /// Visit a non-parameterized value. fn visit_raw_value(&mut self, value: Value<'a>) -> Result; - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract(&mut self, json_extract: JsonExtract<'a>) -> Result; - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract_last_array_item(&mut self, extract: JsonExtractLastArrayElem<'a>) -> Result; - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract_first_array_item(&mut self, extract: JsonExtractFirstArrayElem<'a>) -> Result; - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_array_contains(&mut self, left: Expression<'a>, right: Expression<'a>, not: bool) -> Result; - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_type_equals(&mut self, left: Expression<'a>, right: JsonType<'a>, not: bool) -> Result; - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_unquote(&mut self, json_unquote: JsonUnquote<'a>) -> Result; #[cfg(any(feature = "postgresql", feature = "mysql"))] @@ -148,10 +148,38 @@ pub trait Visitor<'a> { #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_text_search_relevance(&mut self, text_search_relevance: TextSearchRelevance<'a>) -> Result; + fn visit_parameterized_enum(&mut self, variant: EnumVariant<'a>, name: Option>) -> Result { + match name { + Some(name) => self.add_parameter(Value::enum_variant_with_name(variant, name)), + None => self.add_parameter(Value::enum_variant(variant)), + } + self.parameter_substitution()?; + + Ok(()) + } + + fn visit_parameterized_enum_array(&mut self, variants: Vec>, name: Option>) -> Result { + let enum_variants: Vec<_> = variants + .into_iter() + .map(|variant| variant.into_enum(name.clone())) + .collect(); + + self.add_parameter(Value::array(enum_variants)); + self.parameter_substitution()?; + + Ok(()) + } + /// A visit to a value we parameterize fn visit_parameterized(&mut self, value: Value<'a>) -> Result { - self.add_parameter(value); - self.parameter_substitution() + match value.typed { + ValueType::Enum(Some(variant), name) => self.visit_parameterized_enum(variant, name), + ValueType::EnumArray(Some(variants), name) => self.visit_parameterized_enum_array(variants, name), + _ => { + self.add_parameter(value); + self.parameter_substitution() + } + } } /// The join statements in the query @@ -915,7 +943,7 @@ pub trait Visitor<'a> { self.write(" ")?; self.visit_expression(*right) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] Compare::JsonCompare(json_compare) => match json_compare { JsonCompare::ArrayContains(left, right) => self.visit_json_array_contains(*left, *right, false), JsonCompare::ArrayNotContains(left, right) => self.visit_json_array_contains(*left, *right, true), @@ -997,7 +1025,7 @@ pub trait Visitor<'a> { FunctionType::AggregateToString(agg) => { self.visit_aggregate_to_string(agg.value.as_ref().clone())?; } - #[cfg(all(feature = "json", feature = "postgresql"))] + #[cfg(feature = "postgresql")] FunctionType::RowToJson(row_to_json) => { self.write("ROW_TO_JSON")?; self.surround_with("(", ")", |ref mut s| s.visit_table(row_to_json.expr, false))? @@ -1029,19 +1057,19 @@ pub trait Visitor<'a> { self.write("COALESCE")?; self.surround_with("(", ")", |s| s.visit_columns(coalesce.exprs))?; } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] FunctionType::JsonExtract(json_extract) => { self.visit_json_extract(json_extract)?; } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] FunctionType::JsonExtractFirstArrayElem(extract) => { self.visit_json_extract_first_array_item(extract)?; } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] FunctionType::JsonExtractLastArrayElem(extract) => { self.visit_json_extract_last_array_item(extract)?; } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] FunctionType::JsonUnquote(unquote) => { self.visit_json_unquote(unquote)?; } diff --git a/quaint/src/visitor/mssql.rs b/quaint/src/visitor/mssql.rs index 111b43ed8ebe..bf1550b96c31 100644 --- a/quaint/src/visitor/mssql.rs +++ b/quaint/src/visitor/mssql.rs @@ -1,5 +1,5 @@ use super::Visitor; -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] use crate::prelude::{JsonExtract, JsonType, JsonUnquote}; use crate::{ ast::{ @@ -8,7 +8,7 @@ use crate::{ }, error::{Error, ErrorKind}, prelude::{Aliasable, Average, Query}, - visitor, Value, + visitor, Value, ValueType, }; use std::{convert::TryFrom, fmt::Write, iter}; @@ -17,7 +17,6 @@ static GENERATED_KEYS: &str = "@generated_keys"; /// A visitor to generate queries for the SQL Server database. /// /// The returned parameter values can be used directly with the tiberius crate. -#[cfg_attr(feature = "docs", doc(cfg(feature = "mssql")))] pub struct Mssql<'a> { query: String, parameters: Vec>, @@ -310,27 +309,27 @@ impl<'a> Visitor<'a> for Mssql<'a> { } fn visit_raw_value(&mut self, value: Value<'a>) -> visitor::Result { - let res = match value { - Value::Int32(i) => i.map(|i| self.write(i)), - Value::Int64(i) => i.map(|i| self.write(i)), - Value::Float(d) => d.map(|f| match f { + let res = match value.typed { + ValueType::Int32(i) => i.map(|i| self.write(i)), + ValueType::Int64(i) => i.map(|i| self.write(i)), + ValueType::Float(d) => d.map(|f| match f { f if f.is_nan() => self.write("'NaN'"), f if f == f32::INFINITY => self.write("'Infinity'"), f if f == f32::NEG_INFINITY => self.write("'-Infinity"), v => self.write(format!("{v:?}")), }), - Value::Double(d) => d.map(|f| match f { + ValueType::Double(d) => d.map(|f| match f { f if f.is_nan() => self.write("'NaN'"), f if f == f64::INFINITY => self.write("'Infinity'"), f if f == f64::NEG_INFINITY => self.write("'-Infinity"), v => self.write(format!("{v:?}")), }), - Value::Text(t) => t.map(|t| self.write(format!("'{t}'"))), - Value::Enum(e) => e.map(|e| self.write(e)), - Value::Bytes(b) => b.map(|b| self.write(format!("0x{}", hex::encode(b)))), - Value::Boolean(b) => b.map(|b| self.write(if b { 1 } else { 0 })), - Value::Char(c) => c.map(|c| self.write(format!("'{c}'"))), - Value::Array(_) => { + ValueType::Text(t) => t.map(|t| self.write(format!("'{t}'"))), + ValueType::Enum(e, _) => e.map(|e| self.write(e)), + ValueType::Bytes(b) => b.map(|b| self.write(format!("0x{}", hex::encode(b)))), + ValueType::Boolean(b) => b.map(|b| self.write(if b { 1 } else { 0 })), + ValueType::Char(c) => c.map(|c| self.write(format!("'{c}'"))), + ValueType::Array(_) | ValueType::EnumArray(_, _) => { let msg = "Arrays are not supported in T-SQL."; let kind = ErrorKind::conversion(msg); @@ -339,33 +338,29 @@ impl<'a> Visitor<'a> for Mssql<'a> { return Err(builder.build()); } - #[cfg(feature = "json")] - Value::Json(j) => j.map(|j| self.write(format!("'{}'", serde_json::to_string(&j).unwrap()))), - #[cfg(feature = "bigdecimal")] - Value::Numeric(r) => r.map(|r| self.write(r)), - #[cfg(feature = "uuid")] - Value::Uuid(uuid) => uuid.map(|uuid| { + + ValueType::Json(j) => j.map(|j| self.write(format!("'{}'", serde_json::to_string(&j).unwrap()))), + + ValueType::Numeric(r) => r.map(|r| self.write(r)), + ValueType::Uuid(uuid) => uuid.map(|uuid| { let s = format!("CONVERT(uniqueidentifier, N'{}')", uuid.hyphenated()); self.write(s) }), - #[cfg(feature = "chrono")] - Value::DateTime(dt) => dt.map(|dt| { + ValueType::DateTime(dt) => dt.map(|dt| { let s = format!("CONVERT(datetimeoffset, N'{}')", dt.to_rfc3339()); self.write(s) }), - #[cfg(feature = "chrono")] - Value::Date(date) => date.map(|date| { + ValueType::Date(date) => date.map(|date| { let s = format!("CONVERT(date, N'{date}')"); self.write(s) }), - #[cfg(feature = "chrono")] - Value::Time(time) => time.map(|time| { + ValueType::Time(time) => time.map(|time| { let s = format!("CONVERT(time, N'{time}')"); self.write(s) }), // Style 3 is keep all whitespace + internal DTD processing: // https://docs.microsoft.com/en-us/sql/t-sql/functions/cast-and-convert-transact-sql?redirectedfrom=MSDN&view=sql-server-ver15#xml-styles - Value::Xml(cow) => cow.map(|cow| self.write(format!("CONVERT(XML, N'{cow}', 3)"))), + ValueType::Xml(cow) => cow.map(|cow| self.write(format!("CONVERT(XML, N'{cow}', 3)"))), }; match res { @@ -394,7 +389,7 @@ impl<'a> Visitor<'a> for Mssql<'a> { self.visit_parameterized(limit)?; self.write(" ROWS ONLY") } - (None, Some(offset)) if self.order_by_set || offset.as_i64().map(|i| i > 0).unwrap_or(false) => { + (None, Some(offset)) if self.order_by_set || offset.typed.as_i64().map(|i| i > 0).unwrap_or(false) => { add_ordering(self)?; self.write(" OFFSET ")?; @@ -636,12 +631,12 @@ impl<'a> Visitor<'a> for Mssql<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract(&mut self, _json_extract: JsonExtract<'a>) -> visitor::Result { unimplemented!("JSON filtering is not yet supported on MSSQL") } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_array_contains( &mut self, _left: Expression<'a>, @@ -651,12 +646,12 @@ impl<'a> Visitor<'a> for Mssql<'a> { unimplemented!("JSON filtering is not yet supported on MSSQL") } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_type_equals(&mut self, _left: Expression<'a>, _json_type: JsonType, _not: bool) -> visitor::Result { unimplemented!("JSON_TYPE is not yet supported on MSSQL") } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_unquote(&mut self, _json_unquote: JsonUnquote<'a>) -> visitor::Result { unimplemented!("JSON filtering is not yet supported on MSSQL") } @@ -684,7 +679,7 @@ impl<'a> Visitor<'a> for Mssql<'a> { unimplemented!("Full-text search is not yet supported on MSSQL") } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract_last_array_item( &mut self, _extract: crate::prelude::JsonExtractLastArrayElem<'a>, @@ -692,7 +687,7 @@ impl<'a> Visitor<'a> for Mssql<'a> { unimplemented!("JSON filtering is not yet supported on MSSQL") } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract_first_array_item( &mut self, _extract: crate::prelude::JsonExtractFirstArrayElem<'a>, @@ -752,11 +747,11 @@ mod tests { #[test] fn test_aliased_null() { let expected_sql = "SELECT @P1 AS [test]"; - let query = Select::default().value(val!(Value::Int32(None)).alias("test")); + let query = Select::default().value(val!(ValueType::Int32(None)).alias("test")); let (sql, params) = Mssql::build(query).unwrap(); assert_eq!(expected_sql, sql); - assert_eq!(vec![Value::Int32(None)], params); + assert_eq!(vec![Value::null_int32()], params); } #[test] @@ -1195,7 +1190,7 @@ mod tests { #[test] fn test_raw_null() { - let (sql, params) = Mssql::build(Select::default().value(Value::Text(None).raw())).unwrap(); + let (sql, params) = Mssql::build(Select::default().value(ValueType::Text(None).raw())).unwrap(); assert_eq!("SELECT null", sql); assert!(params.is_empty()); } @@ -1248,7 +1243,7 @@ mod tests { } #[test] - #[cfg(feature = "json")] + fn test_raw_json() { let (sql, params) = Mssql::build(Select::default().value(serde_json::json!({ "foo": "bar" }).raw())).unwrap(); assert_eq!("SELECT '{\"foo\":\"bar\"}'", sql); @@ -1256,7 +1251,6 @@ mod tests { } #[test] - #[cfg(feature = "uuid")] fn test_raw_uuid() { let uuid = uuid::Uuid::new_v4(); let (sql, params) = Mssql::build(Select::default().value(uuid.raw())).unwrap(); @@ -1270,7 +1264,6 @@ mod tests { } #[test] - #[cfg(feature = "chrono")] fn test_raw_datetime() { let dt = chrono::Utc::now(); let (sql, params) = Mssql::build(Select::default().value(dt.raw())).unwrap(); diff --git a/quaint/src/visitor/mysql.rs b/quaint/src/visitor/mysql.rs index 68bc62ec617f..26d0f0d5fd65 100644 --- a/quaint/src/visitor/mysql.rs +++ b/quaint/src/visitor/mysql.rs @@ -8,7 +8,6 @@ use std::fmt::{self, Write}; /// A visitor to generate queries for the MySQL database. /// /// The returned parameter values can be used directly with the mysql crate. -#[cfg_attr(feature = "docs", doc(cfg(feature = "mysql")))] pub struct Mysql<'a> { query: String, parameters: Vec>, @@ -34,7 +33,6 @@ impl<'a> Mysql<'a> { } fn visit_numeric_comparison(&mut self, left: Expression<'a>, right: Expression<'a>, sign: &str) -> visitor::Result { - #[cfg(feature = "json")] fn json_to_quaint_value<'a>(json: serde_json::Value) -> crate::Result> { match json { serde_json::Value::String(str) => Ok(Value::text(str)), @@ -61,7 +59,6 @@ impl<'a> Mysql<'a> { } match (left, right) { - #[cfg(feature = "json")] (left, right) if left.is_json_value() && right.is_fun_retuning_json() => { let quaint_value = json_to_quaint_value(left.into_json_value().unwrap())?; @@ -69,7 +66,7 @@ impl<'a> Mysql<'a> { self.write(format!(" {sign} "))?; self.visit_expression(right)?; } - #[cfg(feature = "json")] + (left, right) if left.is_fun_retuning_json() && right.is_json_value() => { let quaint_value = json_to_quaint_value(right.into_json_value().unwrap())?; @@ -122,27 +119,27 @@ impl<'a> Visitor<'a> for Mysql<'a> { } fn visit_raw_value(&mut self, value: Value<'a>) -> visitor::Result { - let res = match value { - Value::Int32(i) => i.map(|i| self.write(i)), - Value::Int64(i) => i.map(|i| self.write(i)), - Value::Float(d) => d.map(|f| match f { + let res = match &value.typed { + ValueType::Int32(i) => i.map(|i| self.write(i)), + ValueType::Int64(i) => i.map(|i| self.write(i)), + ValueType::Float(d) => d.map(|f| match f { f if f.is_nan() => self.write("'NaN'"), f if f == f32::INFINITY => self.write("'Infinity'"), f if f == f32::NEG_INFINITY => self.write("'-Infinity"), v => self.write(format!("{v:?}")), }), - Value::Double(d) => d.map(|f| match f { + ValueType::Double(d) => d.map(|f| match f { f if f.is_nan() => self.write("'NaN'"), f if f == f64::INFINITY => self.write("'Infinity'"), f if f == f64::NEG_INFINITY => self.write("'-Infinity"), v => self.write(format!("{v:?}")), }), - Value::Text(t) => t.map(|t| self.write(format!("'{t}'"))), - Value::Enum(e) => e.map(|e| self.write(e)), - Value::Bytes(b) => b.map(|b| self.write(format!("x'{}'", hex::encode(b)))), - Value::Boolean(b) => b.map(|b| self.write(b)), - Value::Char(c) => c.map(|c| self.write(format!("'{c}'"))), - Value::Array(_) => { + ValueType::Text(t) => t.as_ref().map(|t| self.write(format!("'{t}'"))), + ValueType::Enum(e, _) => e.as_ref().map(|e| self.write(e)), + ValueType::Bytes(b) => b.as_ref().map(|b| self.write(format!("x'{}'", hex::encode(b)))), + ValueType::Boolean(b) => b.map(|b| self.write(b)), + ValueType::Char(c) => c.map(|c| self.write(format!("'{c}'"))), + ValueType::Array(_) | ValueType::EnumArray(_, _) => { let msg = "Arrays are not supported in MySQL."; let kind = ErrorKind::conversion(msg); @@ -151,25 +148,21 @@ impl<'a> Visitor<'a> for Mysql<'a> { return Err(builder.build()); } - #[cfg(feature = "bigdecimal")] - Value::Numeric(r) => r.map(|r| self.write(r)), - #[cfg(feature = "json")] - Value::Json(j) => match j { + + ValueType::Numeric(r) => r.as_ref().map(|r| self.write(r)), + + ValueType::Json(j) => match j { Some(ref j) => { let s = serde_json::to_string(&j)?; Some(self.write(format!("CONVERT('{s}', JSON)"))) } None => None, }, - #[cfg(feature = "uuid")] - Value::Uuid(uuid) => uuid.map(|uuid| self.write(format!("'{}'", uuid.hyphenated()))), - #[cfg(feature = "chrono")] - Value::DateTime(dt) => dt.map(|dt| self.write(format!("'{}'", dt.to_rfc3339(),))), - #[cfg(feature = "chrono")] - Value::Date(date) => date.map(|date| self.write(format!("'{date}'"))), - #[cfg(feature = "chrono")] - Value::Time(time) => time.map(|time| self.write(format!("'{time}'"))), - Value::Xml(cow) => cow.map(|cow| self.write(format!("'{cow}'"))), + ValueType::Uuid(uuid) => uuid.map(|uuid| self.write(format!("'{}'", uuid.hyphenated()))), + ValueType::DateTime(dt) => dt.map(|dt| self.write(format!("'{}'", dt.to_rfc3339(),))), + ValueType::Date(date) => date.map(|date| self.write(format!("'{date}'"))), + ValueType::Time(time) => time.map(|time| self.write(format!("'{time}'"))), + ValueType::Xml(cow) => cow.as_ref().map(|cow| self.write(format!("'{cow}'"))), }; match res { @@ -298,8 +291,20 @@ impl<'a> Visitor<'a> for Mysql<'a> { self.write(" OFFSET ")?; self.visit_parameterized(offset) } - (None, Some(Value::Int32(Some(offset)))) if offset < 1 => Ok(()), - (None, Some(Value::Int64(Some(offset)))) if offset < 1 => Ok(()), + ( + None, + Some(Value { + typed: ValueType::Int32(Some(offset)), + .. + }), + ) if offset < 1 => Ok(()), + ( + None, + Some(Value { + typed: ValueType::Int64(Some(offset)), + .. + }), + ) if offset < 1 => Ok(()), (None, Some(offset)) => { self.write(" LIMIT ")?; self.visit_parameterized(Value::from(9_223_372_036_854_775_807i64))?; @@ -321,7 +326,6 @@ impl<'a> Visitor<'a> for Mysql<'a> { } fn visit_equals(&mut self, left: Expression<'a>, right: Expression<'a>) -> visitor::Result { - #[cfg(feature = "json")] { if right.is_json_expr() || left.is_json_expr() { self.surround_with("(", ")", |ref mut s| { @@ -345,15 +349,9 @@ impl<'a> Visitor<'a> for Mysql<'a> { self.visit_regular_equality_comparison(left, right) } } - - #[cfg(not(feature = "json"))] - { - self.visit_regular_equality_comparison(left, right) - } } fn visit_not_equals(&mut self, left: Expression<'a>, right: Expression<'a>) -> visitor::Result { - #[cfg(feature = "json")] { if right.is_json_expr() || left.is_json_expr() { self.surround_with("(", ")", |ref mut s| { @@ -377,14 +375,9 @@ impl<'a> Visitor<'a> for Mysql<'a> { self.visit_regular_difference_comparison(left, right) } } - - #[cfg(not(feature = "json"))] - { - self.visit_regular_difference_comparison(left, right) - } } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract(&mut self, json_extract: JsonExtract<'a>) -> visitor::Result { if json_extract.extract_as_string { self.write("JSON_UNQUOTE(")?; @@ -409,7 +402,7 @@ impl<'a> Visitor<'a> for Mysql<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_array_contains(&mut self, left: Expression<'a>, right: Expression<'a>, not: bool) -> visitor::Result { self.write("JSON_CONTAINS(")?; self.visit_expression(left)?; @@ -424,7 +417,7 @@ impl<'a> Visitor<'a> for Mysql<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_type_equals(&mut self, left: Expression<'a>, json_type: JsonType<'a>, not: bool) -> visitor::Result { self.write("(")?; self.write("JSON_TYPE")?; @@ -438,27 +431,27 @@ impl<'a> Visitor<'a> for Mysql<'a> { match json_type { JsonType::Array => { - self.visit_expression(Value::text("ARRAY").into())?; + self.visit_expression(Expression::from(Value::text("ARRAY")))?; } JsonType::Boolean => { - self.visit_expression(Value::text("BOOLEAN").into())?; + self.visit_expression(Expression::from(Value::text("BOOLEAN")))?; } JsonType::Number => { - self.visit_expression(Value::text("INTEGER").into())?; + self.visit_expression(Expression::from(Value::text("INTEGER")))?; self.write(" OR JSON_TYPE(")?; self.visit_expression(left)?; self.write(")")?; self.write(" = ")?; - self.visit_expression(Value::text("DOUBLE").into())?; + self.visit_expression(Expression::from(Value::text("DOUBLE")))?; } JsonType::Object => { - self.visit_expression(Value::text("OBJECT").into())?; + self.visit_expression(Expression::from(Value::text("OBJECT")))?; } JsonType::String => { - self.visit_expression(Value::text("STRING").into())?; + self.visit_expression(Expression::from(Value::text("STRING")))?; } JsonType::Null => { - self.visit_expression(Value::text("NULL").into())?; + self.visit_expression(Expression::from(Value::text("NULL")))?; } JsonType::ColumnRef(column) => { self.write("JSON_TYPE")?; @@ -536,7 +529,7 @@ impl<'a> Visitor<'a> for Mysql<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract_last_array_item(&mut self, extract: JsonExtractLastArrayElem<'a>) -> visitor::Result { self.write("JSON_EXTRACT(")?; self.visit_expression(*extract.expr.clone())?; @@ -549,7 +542,7 @@ impl<'a> Visitor<'a> for Mysql<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract_first_array_item(&mut self, extract: JsonExtractFirstArrayElem<'a>) -> visitor::Result { self.write("JSON_EXTRACT(")?; self.visit_expression(*extract.expr)?; @@ -560,7 +553,7 @@ impl<'a> Visitor<'a> for Mysql<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_unquote(&mut self, json_unquote: JsonUnquote<'a>) -> visitor::Result { self.write("JSON_UNQUOTE(")?; self.visit_expression(*json_unquote.expr)?; @@ -727,7 +720,6 @@ mod tests { ); } - #[cfg(feature = "json")] #[test] fn equality_with_a_json_value() { let expected = expected_values( @@ -742,7 +734,6 @@ mod tests { assert_eq!(expected.1, params); } - #[cfg(feature = "json")] #[test] fn difference_with_a_json_value() { let expected = expected_values( @@ -760,7 +751,7 @@ mod tests { #[test] fn test_raw_null() { - let (sql, params) = Mysql::build(Select::default().value(Value::Text(None).raw())).unwrap(); + let (sql, params) = Mysql::build(Select::default().value(ValueType::Text(None).raw())).unwrap(); assert_eq!("SELECT null", sql); assert!(params.is_empty()); } @@ -788,7 +779,7 @@ mod tests { #[test] fn test_raw_bytes() { - let (sql, params) = Mysql::build(Select::default().value(Value::bytes(vec![1, 2, 3]).raw())).unwrap(); + let (sql, params) = Mysql::build(Select::default().value(ValueType::bytes(vec![1, 2, 3]).raw())).unwrap(); assert_eq!("SELECT x'010203'", sql); assert!(params.is_empty()); } @@ -806,7 +797,7 @@ mod tests { #[test] fn test_raw_char() { - let (sql, params) = Mysql::build(Select::default().value(Value::character('a').raw())).unwrap(); + let (sql, params) = Mysql::build(Select::default().value(ValueType::character('a').raw())).unwrap(); assert_eq!("SELECT 'a'", sql); assert!(params.is_empty()); } @@ -859,7 +850,7 @@ mod tests { } #[test] - #[cfg(feature = "json")] + fn test_raw_json() { let (sql, params) = Mysql::build(Select::default().value(serde_json::json!({ "foo": "bar" }).raw())).unwrap(); assert_eq!("SELECT CONVERT('{\"foo\":\"bar\"}', JSON)", sql); @@ -867,7 +858,6 @@ mod tests { } #[test] - #[cfg(feature = "uuid")] fn test_raw_uuid() { let uuid = uuid::Uuid::new_v4(); let (sql, params) = Mysql::build(Select::default().value(uuid.raw())).unwrap(); @@ -878,7 +868,6 @@ mod tests { } #[test] - #[cfg(feature = "chrono")] fn test_raw_datetime() { let dt = chrono::Utc::now(); let (sql, params) = Mysql::build(Select::default().value(dt.raw())).unwrap(); @@ -915,9 +904,9 @@ mod tests { } #[test] - #[cfg(feature = "json")] + fn test_json_negation() { - let conditions = ConditionTree::not("json".equals(Value::Json(Some(serde_json::Value::Null)))); + let conditions = ConditionTree::not("json".equals(ValueType::Json(Some(serde_json::Value::Null)))); let (sql, _) = Mysql::build(Select::from_table("test").so_that(conditions)).unwrap(); assert_eq!( @@ -927,9 +916,9 @@ mod tests { } #[test] - #[cfg(feature = "json")] + fn test_json_not_negation() { - let conditions = ConditionTree::not("json".not_equals(Value::Json(Some(serde_json::Value::Null)))); + let conditions = ConditionTree::not("json".not_equals(ValueType::Json(Some(serde_json::Value::Null)))); let (sql, _) = Mysql::build(Select::from_table("test").so_that(conditions)).unwrap(); assert_eq!( @@ -944,7 +933,7 @@ mod tests { let table_2 = "table2"; let join = table_2.alias("j").on(("j", "id").equals(Column::from(("t1", "id2")))); - let a = table_1.clone().alias("t1"); + let a = table_1.alias("t1"); let selection = Select::from_table(a).column(("t1", "id")).inner_join(join); let id1 = Column::from((table_1, "id")); @@ -965,7 +954,7 @@ mod tests { let table_2 = "table2"; let join = table_2.alias("j").on(("j", "id").equals(Column::from(("t1", "id2")))); - let a = table_1.clone().alias("t1"); + let a = table_1.alias("t1"); let selection = Select::from_table(a).column(("t1", "id")).inner_join(join); let id1 = Column::from((table_1, "id")); diff --git a/quaint/src/visitor/postgres.rs b/quaint/src/visitor/postgres.rs index 8dc02180881b..fda8a6132037 100644 --- a/quaint/src/visitor/postgres.rs +++ b/quaint/src/visitor/postgres.rs @@ -2,13 +2,15 @@ use crate::{ ast::*, visitor::{self, Visitor}, }; -use std::fmt::{self, Write}; +use std::{ + fmt::{self, Write}, + ops::Deref, +}; /// A visitor to generate queries for the PostgreSQL database. /// /// The returned parameter values implement the `ToSql` trait from postgres and /// can be used directly with the database. -#[cfg_attr(feature = "docs", doc(cfg(feature = "postgresql")))] pub struct Postgres<'a> { query: String, parameters: Vec>, @@ -47,6 +49,98 @@ impl<'a> Visitor<'a> for Postgres<'a> { self.write(self.parameters.len()) } + fn visit_parameterized_enum(&mut self, variant: EnumVariant<'a>, name: Option>) -> visitor::Result { + self.add_parameter(variant.into_text()); + + // Since enums are user-defined custom types, tokio-postgres fires an additional query + // when parameterizing values of type enum to know which custom type the value refers to. + // Casting the enum value to `TEXT` avoid this roundtrip since `TEXT` is a builtin type. + if let Some(enum_name) = name { + self.surround_with("CAST(", ")", |ref mut s| { + s.parameter_substitution()?; + s.write("::text")?; + s.write(" AS ")?; + if let Some(schema_name) = enum_name.schema_name { + s.surround_with_backticks(schema_name.deref())?; + s.write(".")? + } + s.surround_with_backticks(enum_name.name.deref()) + })?; + } else { + self.parameter_substitution()?; + } + + Ok(()) + } + + fn visit_parameterized_enum_array( + &mut self, + variants: Vec>, + name: Option>, + ) -> visitor::Result { + let len = variants.len(); + + // Since enums are user-defined custom types, tokio-postgres fires an additional query + // when parameterizing values of type enum to know which custom type the value refers to. + // Casting the enum value to `TEXT` avoid this roundtrip since `TEXT` is a builtin type. + if let Some(enum_name) = name.clone() { + self.surround_with("ARRAY[", "]", |s| { + for (i, variant) in variants.into_iter().enumerate() { + s.add_parameter(variant.into_text()); + s.parameter_substitution()?; + s.write("::text")?; + + if i < (len - 1) { + s.write(", ")?; + } + } + + Ok(()) + })?; + + self.write("::")?; + if let Some(schema_name) = enum_name.schema_name { + self.surround_with_backticks(schema_name.deref())?; + self.write(".")? + } + self.surround_with_backticks(enum_name.name.deref())?; + self.write("[]")?; + } else { + self.visit_parameterized(Value::array( + variants.into_iter().map(|variant| variant.into_enum(name.clone())), + ))?; + } + + Ok(()) + } + + /// A database column identifier + fn visit_column(&mut self, column: Column<'a>) -> visitor::Result { + match column.table { + Some(table) => { + self.visit_table(table, false)?; + self.write(".")?; + self.delimited_identifiers(&[&*column.name])?; + } + _ => self.delimited_identifiers(&[&*column.name])?, + }; + + if column.is_enum && column.is_selected { + if column.is_list { + self.write("::text[]")?; + } else { + self.write("::text")?; + } + } + + if let Some(alias) = column.alias { + self.write(" AS ")?; + self.delimited_identifiers(&[&*alias])?; + } + + Ok(()) + } + fn visit_limit_and_offset(&mut self, limit: Option>, offset: Option>) -> visitor::Result { match (limit, offset) { (Some(limit), Some(offset)) => { @@ -69,32 +163,32 @@ impl<'a> Visitor<'a> for Postgres<'a> { } fn visit_raw_value(&mut self, value: Value<'a>) -> visitor::Result { - let res = match value { - Value::Int32(i) => i.map(|i| self.write(i)), - Value::Int64(i) => i.map(|i| self.write(i)), - Value::Text(t) => t.map(|t| self.write(format!("'{t}'"))), - Value::Enum(e) => e.map(|e| self.write(e)), - Value::Bytes(b) => b.map(|b| self.write(format!("E'{}'", hex::encode(b)))), - Value::Boolean(b) => b.map(|b| self.write(b)), - Value::Xml(cow) => cow.map(|cow| self.write(format!("'{cow}'"))), - Value::Char(c) => c.map(|c| self.write(format!("'{c}'"))), - Value::Float(d) => d.map(|f| match f { + let res = match &value.typed { + ValueType::Int32(i) => i.map(|i| self.write(i)), + ValueType::Int64(i) => i.map(|i| self.write(i)), + ValueType::Text(t) => t.as_ref().map(|t| self.write(format!("'{t}'"))), + ValueType::Enum(e, _) => e.as_ref().map(|e| self.write(e)), + ValueType::Bytes(b) => b.as_ref().map(|b| self.write(format!("E'{}'", hex::encode(b)))), + ValueType::Boolean(b) => b.map(|b| self.write(b)), + ValueType::Xml(cow) => cow.as_ref().map(|cow| self.write(format!("'{cow}'"))), + ValueType::Char(c) => c.map(|c| self.write(format!("'{c}'"))), + ValueType::Float(d) => d.map(|f| match f { f if f.is_nan() => self.write("'NaN'"), f if f == f32::INFINITY => self.write("'Infinity'"), f if f == f32::NEG_INFINITY => self.write("'-Infinity"), v => self.write(format!("{v:?}")), }), - Value::Double(d) => d.map(|f| match f { + ValueType::Double(d) => d.map(|f| match f { f if f.is_nan() => self.write("'NaN'"), f if f == f64::INFINITY => self.write("'Infinity'"), f if f == f64::NEG_INFINITY => self.write("'-Infinity"), v => self.write(format!("{v:?}")), }), - Value::Array(ary) => ary.map(|ary| { + ValueType::Array(ary) => ary.as_ref().map(|ary| { self.surround_with("'{", "}'", |ref mut s| { let len = ary.len(); - for (i, item) in ary.into_iter().enumerate() { + for (i, item) in ary.iter().enumerate() { s.write(item)?; if i < len - 1 { @@ -105,18 +199,41 @@ impl<'a> Visitor<'a> for Postgres<'a> { Ok(()) }) }), - #[cfg(feature = "json")] - Value::Json(j) => j.map(|j| self.write(format!("'{}'", serde_json::to_string(&j).unwrap()))), - #[cfg(feature = "bigdecimal")] - Value::Numeric(r) => r.map(|r| self.write(r)), - #[cfg(feature = "uuid")] - Value::Uuid(uuid) => uuid.map(|uuid| self.write(format!("'{}'", uuid.hyphenated()))), - #[cfg(feature = "chrono")] - Value::DateTime(dt) => dt.map(|dt| self.write(format!("'{}'", dt.to_rfc3339(),))), - #[cfg(feature = "chrono")] - Value::Date(date) => date.map(|date| self.write(format!("'{date}'"))), - #[cfg(feature = "chrono")] - Value::Time(time) => time.map(|time| self.write(format!("'{time}'"))), + ValueType::EnumArray(variants, name) => variants.as_ref().map(|variants| { + self.surround_with("ARRAY[", "]", |ref mut s| { + let len = variants.len(); + + for (i, item) in variants.iter().enumerate() { + s.surround_with("'", "'", |t| t.write(item))?; + + if i < len - 1 { + s.write(",")?; + } + } + + Ok(()) + })?; + + if let Some(enum_name) = name { + self.write("::")?; + if let Some(schema_name) = &enum_name.schema_name { + self.surround_with_backticks(schema_name.deref())?; + self.write(".")? + } + self.surround_with_backticks(enum_name.name.deref())?; + } + + Ok(()) + }), + ValueType::Json(j) => j + .as_ref() + .map(|j| self.write(format!("'{}'", serde_json::to_string(&j).unwrap()))), + + ValueType::Numeric(r) => r.as_ref().map(|r| self.write(r)), + ValueType::Uuid(uuid) => uuid.map(|uuid| self.write(format!("'{}'", uuid.hyphenated()))), + ValueType::DateTime(dt) => dt.map(|dt| self.write(format!("'{}'", dt.to_rfc3339(),))), + ValueType::Date(date) => date.map(|date| self.write(format!("'{date}'"))), + ValueType::Time(time) => time.map(|time| self.write(format!("'{time}'"))), }; match res { @@ -229,14 +346,12 @@ impl<'a> Visitor<'a> for Postgres<'a> { fn visit_equals(&mut self, left: Expression<'a>, right: Expression<'a>) -> visitor::Result { // LHS must be cast to json/xml-text if the right is a json/xml-text value and vice versa. let right_cast = match left { - #[cfg(feature = "json")] _ if left.is_json_value() => "::jsonb", _ if left.is_xml_value() => "::text", _ => "", }; let left_cast = match right { - #[cfg(feature = "json")] _ if right.is_json_value() => "::jsonb", _ if right.is_xml_value() => "::text", _ => "", @@ -254,14 +369,12 @@ impl<'a> Visitor<'a> for Postgres<'a> { fn visit_not_equals(&mut self, left: Expression<'a>, right: Expression<'a>) -> visitor::Result { // LHS must be cast to json/xml-text if the right is a json/xml-text value and vice versa. let right_cast = match left { - #[cfg(feature = "json")] _ if left.is_json_value() => "::jsonb", _ if left.is_xml_value() => "::text", _ => "", }; let left_cast = match right { - #[cfg(feature = "json")] _ if right.is_json_value() => "::jsonb", _ if right.is_xml_value() => "::text", _ => "", @@ -276,7 +389,7 @@ impl<'a> Visitor<'a> for Postgres<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract(&mut self, json_extract: JsonExtract<'a>) -> visitor::Result { match json_extract.path { #[cfg(feature = "mysql")] @@ -316,7 +429,7 @@ impl<'a> Visitor<'a> for Postgres<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_unquote(&mut self, json_unquote: JsonUnquote<'a>) -> visitor::Result { self.write("(")?; self.visit_expression(*json_unquote.expr)?; @@ -326,7 +439,7 @@ impl<'a> Visitor<'a> for Postgres<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_array_contains(&mut self, left: Expression<'a>, right: Expression<'a>, not: bool) -> visitor::Result { if not { self.write("( NOT ")?; @@ -343,7 +456,7 @@ impl<'a> Visitor<'a> for Postgres<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract_last_array_item(&mut self, extract: JsonExtractLastArrayElem<'a>) -> visitor::Result { self.write("(")?; self.visit_expression(*extract.expr)?; @@ -353,7 +466,7 @@ impl<'a> Visitor<'a> for Postgres<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract_first_array_item(&mut self, extract: JsonExtractFirstArrayElem<'a>) -> visitor::Result { self.write("(")?; self.visit_expression(*extract.expr)?; @@ -363,7 +476,7 @@ impl<'a> Visitor<'a> for Postgres<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_type_equals(&mut self, left: Expression<'a>, json_type: JsonType<'a>, not: bool) -> visitor::Result { self.write("JSONB_TYPEOF")?; self.write("(")?; @@ -723,7 +836,6 @@ mod tests { assert_eq!(expected_sql, sql); } - #[cfg(feature = "json")] #[test] fn equality_with_a_json_value() { let expected = expected_values( @@ -738,7 +850,6 @@ mod tests { assert_eq!(expected.1, params); } - #[cfg(feature = "json")] #[test] fn equality_with_a_lhs_json_value() { // A bit artificial, but checks if the ::jsonb casting is done correctly on the right side as well. @@ -755,7 +866,6 @@ mod tests { assert_eq!(expected.1, params); } - #[cfg(feature = "json")] #[test] fn difference_with_a_json_value() { let expected = expected_values( @@ -771,7 +881,6 @@ mod tests { assert_eq!(expected.1, params); } - #[cfg(feature = "json")] #[test] fn difference_with_a_lhs_json_value() { let expected = expected_values( @@ -849,7 +958,7 @@ mod tests { #[test] fn test_raw_null() { - let (sql, params) = Postgres::build(Select::default().value(Value::Text(None).raw())).unwrap(); + let (sql, params) = Postgres::build(Select::default().value(Value::null_text().raw())).unwrap(); assert_eq!("SELECT null", sql); assert!(params.is_empty()); } @@ -901,7 +1010,7 @@ mod tests { } #[test] - #[cfg(feature = "json")] + fn test_raw_json() { let (sql, params) = Postgres::build(Select::default().value(serde_json::json!({ "foo": "bar" }).raw())).unwrap(); @@ -910,7 +1019,6 @@ mod tests { } #[test] - #[cfg(feature = "uuid")] fn test_raw_uuid() { let uuid = uuid::Uuid::new_v4(); let (sql, params) = Postgres::build(Select::default().value(uuid.raw())).unwrap(); @@ -921,7 +1029,6 @@ mod tests { } #[test] - #[cfg(feature = "chrono")] fn test_raw_datetime() { let dt = chrono::Utc::now(); let (sql, params) = Postgres::build(Select::default().value(dt.raw())).unwrap(); @@ -937,6 +1044,18 @@ mod tests { assert_eq!(r#"SELECT "foo".* FROM "foo" WHERE "bar" ILIKE $1"#, sql); } + #[test] + fn test_raw_enum_array() { + let enum_array = Value::enum_array_with_name( + vec![EnumVariant::new("A"), EnumVariant::new("B")], + EnumName::new("Alphabet", Some("foo")), + ); + let (sql, params) = Postgres::build(Select::default().value(enum_array.raw())).unwrap(); + + assert_eq!("SELECT ARRAY['A','B']::\"foo\".\"Alphabet\"", sql); + assert!(params.is_empty()); + } + #[test] fn test_like_cast_to_string() { let expected = expected_values( diff --git a/quaint/src/visitor/sqlite.rs b/quaint/src/visitor/sqlite.rs index 91d3240df67d..9c15ef651694 100644 --- a/quaint/src/visitor/sqlite.rs +++ b/quaint/src/visitor/sqlite.rs @@ -10,7 +10,6 @@ use std::fmt::{self, Write}; /// /// The returned parameter values implement the `ToSql` trait from rusqlite and /// can be used directly with the database. -#[cfg_attr(feature = "docs", doc(cfg(feature = "sqlite")))] pub struct Sqlite<'a> { query: String, parameters: Vec>, @@ -74,27 +73,27 @@ impl<'a> Visitor<'a> for Sqlite<'a> { } fn visit_raw_value(&mut self, value: Value<'a>) -> visitor::Result { - let res = match value { - Value::Int32(i) => i.map(|i| self.write(i)), - Value::Int64(i) => i.map(|i| self.write(i)), - Value::Text(t) => t.map(|t| self.write(format!("'{t}'"))), - Value::Enum(e) => e.map(|e| self.write(e)), - Value::Bytes(b) => b.map(|b| self.write(format!("x'{}'", hex::encode(b)))), - Value::Boolean(b) => b.map(|b| self.write(b)), - Value::Char(c) => c.map(|c| self.write(format!("'{c}'"))), - Value::Float(d) => d.map(|f| match f { + let res = match &value.typed { + ValueType::Int32(i) => i.map(|i| self.write(i)), + ValueType::Int64(i) => i.map(|i| self.write(i)), + ValueType::Text(t) => t.as_ref().map(|t| self.write(format!("'{t}'"))), + ValueType::Enum(e, _) => e.as_ref().map(|e| self.write(e)), + ValueType::Bytes(b) => b.as_ref().map(|b| self.write(format!("x'{}'", hex::encode(b)))), + ValueType::Boolean(b) => b.map(|b| self.write(b)), + ValueType::Char(c) => c.map(|c| self.write(format!("'{c}'"))), + ValueType::Float(d) => d.map(|f| match f { f if f.is_nan() => self.write("'NaN'"), f if f == f32::INFINITY => self.write("'Infinity'"), f if f == f32::NEG_INFINITY => self.write("'-Infinity"), v => self.write(format!("{v:?}")), }), - Value::Double(d) => d.map(|f| match f { + ValueType::Double(d) => d.map(|f| match f { f if f.is_nan() => self.write("'NaN'"), f if f == f64::INFINITY => self.write("'Infinity'"), f if f == f64::NEG_INFINITY => self.write("'-Infinity"), v => self.write(format!("{v:?}")), }), - Value::Array(_) => { + ValueType::Array(_) | ValueType::EnumArray(_, _) => { let msg = "Arrays are not supported in SQLite."; let kind = ErrorKind::conversion(msg); @@ -103,25 +102,21 @@ impl<'a> Visitor<'a> for Sqlite<'a> { return Err(builder.build()); } - #[cfg(feature = "json")] - Value::Json(j) => match j { + + ValueType::Json(j) => match j { Some(ref j) => { let s = serde_json::to_string(j)?; Some(self.write(format!("'{s}'"))) } None => None, }, - #[cfg(feature = "bigdecimal")] - Value::Numeric(r) => r.map(|r| self.write(r)), - #[cfg(feature = "uuid")] - Value::Uuid(uuid) => uuid.map(|uuid| self.write(format!("'{}'", uuid.hyphenated()))), - #[cfg(feature = "chrono")] - Value::DateTime(dt) => dt.map(|dt| self.write(format!("'{}'", dt.to_rfc3339(),))), - #[cfg(feature = "chrono")] - Value::Date(date) => date.map(|date| self.write(format!("'{date}'"))), - #[cfg(feature = "chrono")] - Value::Time(time) => time.map(|time| self.write(format!("'{time}'"))), - Value::Xml(cow) => cow.map(|cow| self.write(format!("'{cow}'"))), + + ValueType::Numeric(r) => r.as_ref().map(|r| self.write(r)), + ValueType::Uuid(uuid) => uuid.map(|uuid| self.write(format!("'{}'", uuid.hyphenated()))), + ValueType::DateTime(dt) => dt.map(|dt| self.write(format!("'{}'", dt.to_rfc3339(),))), + ValueType::Date(date) => date.map(|date| self.write(format!("'{date}'"))), + ValueType::Time(time) => time.map(|time| self.write(format!("'{time}'"))), + ValueType::Xml(cow) => cow.as_ref().map(|cow| self.write(format!("'{cow}'"))), }; match res { @@ -279,12 +274,12 @@ impl<'a> Visitor<'a> for Sqlite<'a> { }) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract(&mut self, _json_extract: JsonExtract<'a>) -> visitor::Result { unimplemented!("JSON filtering is not yet supported on SQLite") } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_array_contains( &mut self, _left: Expression<'a>, @@ -294,7 +289,7 @@ impl<'a> Visitor<'a> for Sqlite<'a> { unimplemented!("JSON filtering is not yet supported on SQLite") } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_type_equals(&mut self, _left: Expression<'a>, _json_type: JsonType, _not: bool) -> visitor::Result { unimplemented!("JSON_TYPE is not yet supported on SQLite") } @@ -319,17 +314,17 @@ impl<'a> Visitor<'a> for Sqlite<'a> { unimplemented!("Full-text search is not yet supported on SQLite") } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract_last_array_item(&mut self, _extract: JsonExtractLastArrayElem<'a>) -> visitor::Result { unimplemented!("JSON filtering is not yet supported on SQLite") } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract_first_array_item(&mut self, _extract: JsonExtractFirstArrayElem<'a>) -> visitor::Result { unimplemented!("JSON filtering is not yet supported on SQLite") } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_unquote(&mut self, _json_unquote: JsonUnquote<'a>) -> visitor::Result { unimplemented!("JSON filtering is not yet supported on SQLite") } @@ -435,11 +430,11 @@ mod tests { #[test] fn test_aliased_null() { let expected_sql = "SELECT ? AS `test`"; - let query = Select::default().value(val!(Value::Text(None)).alias("test")); + let query = Select::default().value(val!(Value::null_text()).alias("test")); let (sql, params) = Sqlite::build(query).unwrap(); assert_eq!(expected_sql, sql); - assert_eq!(vec![Value::Text(None)], params); + assert_eq!(vec![Value::null_text()], params); } #[test] @@ -864,7 +859,7 @@ mod tests { #[test] fn test_raw_null() { - let (sql, params) = Sqlite::build(Select::default().value(Value::Text(None).raw())).unwrap(); + let (sql, params) = Sqlite::build(Select::default().value(Value::null_text().raw())).unwrap(); assert_eq!("SELECT null", sql); assert!(params.is_empty()); } @@ -916,7 +911,7 @@ mod tests { } #[test] - #[cfg(feature = "json")] + fn test_raw_json() { let (sql, params) = Sqlite::build(Select::default().value(serde_json::json!({ "foo": "bar" }).raw())).unwrap(); assert_eq!("SELECT '{\"foo\":\"bar\"}'", sql); @@ -924,7 +919,6 @@ mod tests { } #[test] - #[cfg(feature = "uuid")] fn test_raw_uuid() { let uuid = uuid::Uuid::new_v4(); let (sql, params) = Sqlite::build(Select::default().value(uuid.raw())).unwrap(); @@ -935,7 +929,6 @@ mod tests { } #[test] - #[cfg(feature = "chrono")] fn test_raw_datetime() { let dt = chrono::Utc::now(); let (sql, params) = Sqlite::build(Select::default().value(dt.raw())).unwrap(); diff --git a/query-engine/black-box-tests/Cargo.toml b/query-engine/black-box-tests/Cargo.toml index 8bea1998d7c1..056ee2bcdb43 100644 --- a/query-engine/black-box-tests/Cargo.toml +++ b/query-engine/black-box-tests/Cargo.toml @@ -14,3 +14,4 @@ tokio.workspace = true user-facing-errors.workspace = true insta = "1.7.1" enumflags2 = "0.7" +query-engine-metrics = {path = "../metrics"} diff --git a/query-engine/black-box-tests/tests/metrics/smoke_tests.rs b/query-engine/black-box-tests/tests/metrics/smoke_tests.rs index e3826d9cafe2..3397de75af99 100644 --- a/query-engine/black-box-tests/tests/metrics/smoke_tests.rs +++ b/query-engine/black-box-tests/tests/metrics/smoke_tests.rs @@ -15,6 +15,7 @@ mod smoke_tests { } #[connector_test] + #[rustfmt::skip] async fn expected_metrics_rendered(r: Runner) -> TestResult<()> { let mut qe_cmd = query_engine_cmd(r.prisma_dml(), "57582"); qe_cmd.arg("--enable-metrics"); @@ -54,17 +55,57 @@ mod smoke_tests { .text() .await .unwrap(); + + // I would have loved to use insta in here and check the snapshot but the order of the metrics is not guaranteed + // And I opted for the manual checking of invariant data that provided enough confidence instead - assert!(metrics.contains("prisma_client_queries_total counter")); - assert!(metrics.contains("prisma_datasource_queries_total counter")); - assert!(metrics.contains("prisma_pool_connections_open counter")); - assert!(metrics.contains("prisma_client_queries_active gauge")); - assert!(metrics.contains("prisma_client_queries_wait gauge")); - assert!(metrics.contains("prisma_pool_connections_busy gauge")); - assert!(metrics.contains("prisma_pool_connections_idle gauge")); - assert!(metrics.contains("prisma_pool_connections_opened_total gauge")); - assert!(metrics.contains("prisma_datasource_queries_duration_histogram_ms_bucket")); - }) - .await + // counters + assert_eq!(metrics.matches("# HELP prisma_client_queries_total The total number of Prisma Client queries executed").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_client_queries_total counter").count(), 1); + + assert_eq!(metrics.matches("# HELP prisma_datasource_queries_total The total number of datasource queries executed").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_datasource_queries_total counter").count(), 1); + + assert_eq!(metrics.matches("# HELP prisma_pool_connections_closed_total The total number of pool connections closed").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_pool_connections_closed_total counter").count(), 1); + + assert_eq!(metrics.matches("# HELP prisma_pool_connections_opened_total The total number of pool connections opened").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_pool_connections_opened_total counter").count(), 1); + + // gauges + assert_eq!(metrics.matches("# HELP prisma_client_queries_active The number of currently active Prisma Client queries").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_client_queries_active gauge").count(), 1); + + assert_eq!(metrics.matches("# HELP prisma_client_queries_wait The number of datasource queries currently waiting for a free connection").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_client_queries_wait gauge").count(), 1); + + assert_eq!(metrics.matches("# HELP prisma_pool_connections_busy The number of pool connections currently executing datasource queries").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_pool_connections_busy gauge").count(), 1); + + assert_eq!(metrics.matches("# HELP prisma_pool_connections_idle The number of pool connections that are not busy running a query").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_pool_connections_idle gauge").count(), 1); + + assert_eq!(metrics.matches("# HELP prisma_pool_connections_open The number of pool connections currently open").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_pool_connections_open gauge").count(), 1); + + // histograms + assert_eq!(metrics.matches("# HELP prisma_client_queries_duration_histogram_ms The distribution of the time Prisma Client queries took to run end to end").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_client_queries_duration_histogram_ms histogram").count(), 1); + + assert_eq!(metrics.matches("# HELP prisma_client_queries_wait_histogram_ms The distribution of the time all datasource queries spent waiting for a free connection").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_client_queries_wait_histogram_ms histogram").count(), 1); + + assert_eq!(metrics.matches("# HELP prisma_datasource_queries_duration_histogram_ms The distribution of the time datasource queries took to run").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_datasource_queries_duration_histogram_ms histogram").count(), 1); + + // Check that exist as many metrics as being accepted + let accepted_metric_count = query_engine_metrics::ACCEPT_LIST.len(); + let displayed_metric_count = metrics.matches("# TYPE").count(); + let non_prisma_metric_count = displayed_metric_count - metrics.matches("# TYPE prisma").count(); + + assert_eq!(displayed_metric_count, accepted_metric_count); + assert_eq!(non_prisma_metric_count, 0); + + }).await } } diff --git a/query-engine/connector-test-kit-rs/README.md b/query-engine/connector-test-kit-rs/README.md index e6821193a628..97d19467879a 100644 --- a/query-engine/connector-test-kit-rs/README.md +++ b/query-engine/connector-test-kit-rs/README.md @@ -1,6 +1,5 @@ # Query Engine Test Kit - A Full Guide -The test kit is a (currently incomplete) port of the Scala test kit, located in `../connector-test-kit`. -It's fully focused on integration testing the query engine through request-response assertions. +The test kit is focused on integration testing the query engine through request-response assertions. ## Test organization @@ -35,8 +34,10 @@ Contains the main bulk of logic to make tests run, which is mostly invisible to Tests are executed in the context of *one* _connector_ (with version) and _runner_. Some tests may only be specified to run for a subset of connectors or versions, in which case they will be skipped. Testing all connectors at once is not supported, however, for example, CI will run all the different connectors and versions concurrently in separate runs. ### Configuration + Tests must be configured to run There's a set of env vars that is always useful to have and an optional one. Always useful to have: + ```shell export WORKSPACE_ROOT=/path/to/engines/repository/root ``` @@ -54,6 +55,7 @@ As previously stated, the above can be omitted in favor of the `.test_config` co "version": "10" } ``` + The config file must be either in the current working folder from which you invoke a test run or in `$WORKSPACE_ROOT`. It's recommended to use the file-based config as it's easier to switch between providers with an open IDE (reloading env vars would usually require reloading the IDE). The workspace root makefile contains a series of convenience commands to setup different connector test configs, e.g. `make dev-postgres10` sets up the correct test config file for the tests to pick up. @@ -63,6 +65,7 @@ On the note of docker containers: Most connectors require an endpoint to run aga If you choose to set up the databases yourself, please note that the connection strings used in the tests (found in the files in `/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/`) to set up user, password and database for the test user. ### Running + Note that by default tests run concurrently. - VSCode should automatically detect tests and display `run test`. @@ -71,6 +74,36 @@ Note that by default tests run concurrently. - A single test can be tested with the normal cargo rust facilities from command line, e.g. `cargo test --package query-engine-tests --test query_engine_tests --all-features -- queries::filters::where_unique::where_unique::no_unique_fields --exact --nocapture` where `queries::filters::where_unique::where_unique::no_unique_fields` can be substituted for the path you want to test. - If you want to test a single relation test, define the `RELATION_TEST_IDX` env var with its index. +#### Running tests through driver adapters + +The query engine is able to delegate query execution to javascript through driver adapters. +This means that instead of drivers being implemented in Rust, it's a layer of adapters over NodeJs +drivers the code that actually communicates with the databases. See [`adapter-*` packages in prisma/prisma](https://github.com/prisma/prisma/tree/main/packages) + +To run tests through a driver adapters, you should also configure the following environment variables: + +* `EXTERNAL_TEST_EXECUTOR`: tells the query engine test kit to use an external process to run the queries, this is a node process running a program that will read the queries to run from STDIN, and return responses to STDOUT. The connector kit follows a protocol over JSON RPC for this communication. +* `DRIVER_ADAPTER`: tells the test executor to use a particular driver adapter. Set to `neon`, `planetscale` or any other supported adapter. +* `DRIVER_ADAPTER_CONFIG`: a json string with the configuration for the driver adapter. This is adapter specific. See the [github workflow for driver adapter tests](.github/workflows/query-engine-driver-adapters.yml) for examples on how to configure the driver adapters. + +Example: + +```shell +export EXTERNAL_TEST_EXECUTOR="$WORKSPACE_ROOT/query-engine/driver-adapters/connector-test-kit-executor/script/start_node.sh" +export DRIVER_ADAPTER=neon +export DRIVER_ADAPTER_CONFIG ='{ "proxyUrl": "127.0.0.1:5488/v1" }' +```` + +We have provided helpers to run the query-engine tests with driver adapters, these helpers set all the required environment +variables for you: + +```shell +DRIVER_ADAPTER=$adapter make test-qe +``` + +Where `$adapter` is one of the supported adapters: `neon`, `planetscale`, `libsql`. + + ## Authoring tests The following is an example on how to write a new test suite, as extending or changing an existing one follows the same rules and considerations. diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/src/utils/raw.rs b/query-engine/connector-test-kit-rs/query-engine-tests/src/utils/raw.rs index 0db7983e3d83..54f2cd040ca9 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/src/utils/raw.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/src/utils/raw.rs @@ -2,7 +2,7 @@ use chrono::{DateTime, FixedOffset}; use prisma_value::encode_bytes; use query_tests_setup::{TestError, TestResult}; -pub fn fmt_query_raw(query: &str, params: Vec) -> String { +pub fn fmt_query_raw(query: &str, params: impl IntoIterator) -> String { let params = params_to_json(params); let params = serde_json::to_string(¶ms).unwrap(); @@ -13,7 +13,7 @@ pub fn fmt_query_raw(query: &str, params: Vec) -> String { ) } -pub fn fmt_execute_raw(query: &str, params: Vec) -> String { +pub fn fmt_execute_raw(query: &str, params: impl IntoIterator) -> String { let params = params_to_json(params); let params = serde_json::to_string(¶ms).unwrap(); @@ -66,7 +66,7 @@ impl RawParam { } } -fn params_to_json(params: Vec) -> Vec { +fn params_to_json(params: impl IntoIterator) -> Vec { params.into_iter().map(serde_json::Value::from).collect::>() } diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs index e45cef8ac306..9aa34a943560 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs @@ -213,7 +213,7 @@ mod interactive_tx { Ok(()) } - #[connector_test] + #[connector_test(exclude(JS))] async fn batch_queries_failure(mut runner: Runner) -> TestResult<()> { // Tx expires after five second. let tx_id = runner.start_tx(5000, 5000, None).await?; @@ -256,7 +256,7 @@ mod interactive_tx { Ok(()) } - #[connector_test] + #[connector_test(exclude(JS))] async fn tx_expiration_failure_cycle(mut runner: Runner) -> TestResult<()> { // Tx expires after one seconds. let tx_id = runner.start_tx(5000, 1000, None).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs index 260fd58e61fd..77a56f46c34b 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs @@ -9,7 +9,7 @@ mod metrics { use query_engine_tests::*; use serde_json::Value; - #[connector_test] + #[connector_test(exclude(Js))] async fn metrics_are_recorded(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneTestModel(data: { id: 1 }) { id }}"#), @@ -40,7 +40,7 @@ mod metrics { Ok(()) } - #[connector_test] + #[connector_test(exclude(Js))] async fn metrics_tx_do_not_go_negative(mut runner: Runner) -> TestResult<()> { let tx_id = runner.start_tx(5000, 5000, None).await?; runner.set_active_tx(tx_id.clone()); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_default.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_default.rs index d4aa49116ea0..8ea08acc85da 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_default.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_default.rs @@ -66,7 +66,7 @@ mod one2one_req { } /// Deleting the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(required_with_default))] + #[connector_test(schema(required_with_default), exclude(MongoDb, MySQL, JS))] async fn delete_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, child: { create: { id: 1 }}}) { id }}"#), @@ -167,7 +167,7 @@ mod one2one_opt { } /// Deleting the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(optional_with_default))] + #[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL, JS))] async fn delete_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, child: { create: { id: 1 }}}) { id }}"#), @@ -270,7 +270,7 @@ mod one2many_req { } /// Deleting the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(required_with_default))] + #[connector_test(schema(required_with_default), exclude(MongoDb, MySQL, JS))] async fn delete_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, children: { create: { id: 1 }}}) { id }}"#), @@ -371,7 +371,7 @@ mod one2many_opt { } /// Deleting the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(optional_with_default))] + #[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL, JS))] async fn delete_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, children: { create: { id: 1 }}}) { id }}"#), diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_default.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_default.rs index 270bb927c265..b0e566ffcb55 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_default.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_default.rs @@ -68,7 +68,7 @@ mod one2one_req { } /// Updating the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(required_with_default))] + #[connector_test(schema(required_with_default), exclude(MongoDb, MySQL, JS))] async fn update_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", child: { create: { id: 1 }}}) { id }}"#), @@ -171,7 +171,7 @@ mod one2one_opt { } /// Updating the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(optional_with_default))] + #[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL, JS))] async fn update_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", child: { create: { id: 1 }}}) { id }}"#), @@ -276,7 +276,7 @@ mod one2many_req { } /// Updating the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(required_with_default))] + #[connector_test(schema(required_with_default), exclude(MongoDb, MySQL, JS))] async fn update_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#), @@ -379,7 +379,7 @@ mod one2many_opt { } /// Updating the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(optional_with_default))] + #[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL, JS))] async fn update_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#), diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/mod.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/mod.rs index f3f05163eeb2..8a2cbc7f24a2 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/mod.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/mod.rs @@ -19,6 +19,7 @@ mod prisma_16760; mod prisma_17103; mod prisma_18517; mod prisma_20799; +mod prisma_21369; mod prisma_5952; mod prisma_6173; mod prisma_7010; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_21369.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_21369.rs new file mode 100644 index 000000000000..f25a83629dac --- /dev/null +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_21369.rs @@ -0,0 +1,17 @@ +use query_engine_tests::*; + +#[test_suite(schema(generic), exclude(MongoDb))] +mod prisma_21369 { + #[connector_test] + async fn select_null_works(runner: Runner) -> TestResult<()> { + let query = fmt_query_raw("SELECT NULL AS result", []); + let result = run_query!(runner, query); + + assert_eq!( + result, + r#"{"data":{"queryRaw":[{"result":{"prisma__type":"null","prisma__value":null}}]}}"# + ); + + Ok(()) + } +} diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batch/transactional_batch.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batch/transactional_batch.rs index a3a7c7d34a9b..8c6e24db67ea 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batch/transactional_batch.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batch/transactional_batch.rs @@ -72,7 +72,7 @@ mod transactional { let batch_results = runner.batch(queries, true, None).await?; let batch_request_idx = batch_results.errors().get(0).unwrap().batch_request_idx(); - assert_eq!(batch_request_idx, Some(1usize)); + assert_eq!(batch_request_idx, Some(1)); Ok(()) } diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/json.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/json.rs index acc4fb0ec3b0..5a2ddc350d06 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/json.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/json.rs @@ -122,6 +122,44 @@ mod json { Ok(()) } + #[connector_test(capabilities(AdvancedJsonNullability))] + async fn json_null_must_not_be_confused_with_literal_string(runner: Runner) -> TestResult<()> { + create_row(&runner, r#"{ id: 1, json: "\"null\"" }"#).await?; + + match runner.protocol() { + query_engine_tests::EngineProtocol::Graphql => { + let res = run_query!(runner, r#"{ findManyTestModel { json } }"#); + + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"json":"\"null\""}]}}"### + ); + } + query_engine_tests::EngineProtocol::Json => { + let res = runner + .query_json( + r#"{ + "modelName": "TestModel", + "action": "findMany", + "query": { + "selection": { + "json": true + } + } + }"#, + ) + .await?; + + insta::assert_snapshot!( + res.to_string(), + @r###"{"data":{"findManyTestModel":[{"json":{"$type":"Json","value":"\"null\""}}]}}"### + ); + } + } + + Ok(()) + } + async fn create_test_data(runner: &Runner) -> TestResult<()> { create_row(runner, r#"{ id: 1, json: "{}" }"#).await?; create_row(runner, r#"{ id: 2, json: "{\"a\":\"b\"}" }"#).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/extended_relation_filters.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/extended_relation_filters.rs index 8dec5383fd0a..dea57eb1e0e4 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/extended_relation_filters.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/extended_relation_filters.rs @@ -368,7 +368,7 @@ mod ext_rel_filters { test_data(&runner).await?; insta::assert_snapshot!( - run_query!(&runner, r#"{ findManyAlbum(where: { Tracks: { some: { OR:[{ MediaType: {is: { Name: { equals: "MediaType1" }}}}, { Genre: { is: { Name: { equals: "Genre2" }}}}]}}}) { Title }}"#), + run_query!(&runner, r#"{ findManyAlbum(where: { Tracks: { some: { OR:[{ MediaType: {is: { Name: { equals: "MediaType1" }}}}, { Genre: { is: { Name: { equals: "Genre2" }}}}]}}}, orderBy: { Title: asc }) { Title }}"#), @r###"{"data":{"findManyAlbum":[{"Title":"Album1"},{"Title":"Album3"},{"Title":"Album4"},{"Title":"Album5"}]}}"### ); @@ -507,7 +507,7 @@ mod ext_rel_filters { test_data(&runner).await?; insta::assert_snapshot!( - run_query!(&runner, r#"{ findManyGenre(where: { Tracks: { some: {} }}) { Name }}"#), + run_query!(&runner, r#"{ findManyGenre(where: { Tracks: { some: {} }}, orderBy: { Name: asc }) { Name }}"#), @r###"{"data":{"findManyGenre":[{"Name":"Genre1"},{"Name":"Genre2"},{"Name":"Genre3"}]}}"### ); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/many_relation.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/many_relation.rs index e47c98c4cf2b..2f50edbe2628 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/many_relation.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/many_relation.rs @@ -276,6 +276,142 @@ mod many_relation { Ok(()) } + fn schema_2() -> String { + let schema = indoc! { + r#" + model Blog { + #id(id, Int, @id) + name String + posts Post[] + } + + model Post { + #id(id, Int, @id) + + blog_id Int + blog Blog @relation(fields: [blog_id], references: [id]) + + comment Comment? + } + + model Comment { + #id(id, Int, @id) + popularity Int + + postId Int @unique + post Post @relation(fields: [postId], references: [id]) + } + "# + }; + + schema.to_owned() + } + + // 2 levels to-many/to-one relation filter, all combinations. + #[connector_test(schema(schema_2))] + async fn l2_m_1_rel_all(runner: Runner) -> TestResult<()> { + // Seed + run_query!( + &runner, + r#"mutation { createOneBlog(data: { + id: 1, + name: "blog1", + posts: { + create: [ + { id: 1, comment: { create: { id: 1, popularity: 10 } } }, + { id: 2, comment: { create: { id: 2, popularity: 50 } } }, + { id: 3, comment: { create: { id: 3, popularity: 100 } } }, + ] + } + }) { id } } + "# + ); + + run_query!( + &runner, + r#"mutation { createOneBlog(data: { + id: 2, + name: "blog2", + posts: { + create: [ + { id: 4, comment: { create: { id: 4, popularity: 1000 } } }, + { id: 5, comment: { create: { id: 5, popularity: 1000 } } }, + ] + } + }) { id } } + "# + ); + + // posts without comment + run_query!( + &runner, + r#"mutation { createOneBlog(data: { + id: 3, + name: "blog3", + posts: { + create: [ + { id: 6 }, + { id: 7 }, + ] + } + }) { id } } + "# + ); + + // blog without posts + run_query!( + &runner, + r#"mutation { createOneBlog(data: { id: 4, name: "blog4" }) { id } } "# + ); + + // some / is + insta::assert_snapshot!( + run_query!(&runner, r#"query { findManyBlog(where: { posts: { some: { comment: { is: { popularity: { lt: 1000 } } } } } }) { name }}"#), + @r###"{"data":{"findManyBlog":[{"name":"blog1"}]}}"### + ); + + // some / isNot + // TODO: Investigate why MongoDB returns a different result + match_connector_result!( + &runner, + r#"query { findManyBlog(where: { posts: { some: { comment: { isNot: { popularity: { gt: 100 } } } } } }) { name }}"#, + MongoDb(_) => vec![r#"{"data":{"findManyBlog":[{"name":"blog1"}]}}"#], + _ => vec![r#"{"data":{"findManyBlog":[{"name":"blog1"},{"name":"blog3"}]}}"#] + ); + + // none / is + insta::assert_snapshot!( + run_query!(&runner, r#"query { findManyBlog(where: { posts: { none: { comment: { is: { popularity: { lt: 1000 } } } } } }) { name }}"#), + @r###"{"data":{"findManyBlog":[{"name":"blog2"},{"name":"blog3"},{"name":"blog4"}]}}"### + ); + + // none / isNot + // TODO: Investigate why MongoDB returns a different result + match_connector_result!( + &runner, + r#"query { findManyBlog(where: { posts: { none: { comment: { isNot: { popularity: { gt: 100 } } } } } }) { name }}"#, + MongoDb(_) => vec![r#"{"data":{"findManyBlog":[{"name":"blog2"},{"name":"blog3"},{"name":"blog4"}]}}"#], + _ => vec![r#"{"data":{"findManyBlog":[{"name":"blog2"},{"name":"blog4"}]}}"#] + ); + + // every / is + insta::assert_snapshot!( + run_query!(&runner, r#"query { findManyBlog(where: { posts: { every: { comment: { is: { popularity: { gte: 1000 } } } } } }) { name }}"#), + @r###"{"data":{"findManyBlog":[{"name":"blog2"},{"name":"blog4"}]}}"### + ); + + // every / isNot + // TODO: Investigate why MongoDB returns a different result + match_connector_result!( + &runner, + r#"query { findManyBlog(where: { posts: { every: { comment: { isNot: { popularity: { gte: 1000 } } } } } }) { name }}"#, + MongoDb(_) => vec![r#"{"data":{"findManyBlog":[{"name":"blog1"},{"name":"blog4"}]}}"#], + _ => vec![r#"{"data":{"findManyBlog":[{"name":"blog1"},{"name":"blog3"},{"name":"blog4"}]}}"#] + ); + + Ok(()) + } + // Note: Only the original author knows why this is considered crazy. #[connector_test] async fn crazy_filters(runner: Runner) -> TestResult<()> { diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/one_relation.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/one_relation.rs index e0d74adcaaf2..cca380f81138 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/one_relation.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/one_relation.rs @@ -9,26 +9,26 @@ mod one_relation { let schema = indoc! { r#" model Blog { - #id(id, String, @id, @default(cuid())) + #id(blogId, String, @id, @default(cuid())) name String post Post? } model Post { - #id(id, String, @id, @default(cuid())) + #id(postId, String, @id, @default(cuid())) title String popularity Int blogId String? @unique - blog Blog? @relation(fields: [blogId], references: [id]) + blog Blog? @relation(fields: [blogId], references: [blogId]) comment Comment? } model Comment { - #id(id, String, @id, @default(cuid())) + #id(commentId, String, @id, @default(cuid())) text String likes Int postId String? @unique - post Post? @relation(fields: [postId], references: [id]) + post Post? @relation(fields: [postId], references: [postId]) } "# }; @@ -52,6 +52,10 @@ mod one_relation { #[connector_test] async fn l1_one_rel(runner: Runner) -> TestResult<()> { test_data(&runner).await?; + run_query!( + &runner, + r#"mutation { createOneBlog( data: { name: "blog 4" } ) { name } }"# + ); insta::assert_snapshot!( run_query!(&runner, r#"query { findManyPost(where: { title: { equals: "post 2" }}) { title }}"#), @@ -59,12 +63,12 @@ mod one_relation { ); insta::assert_snapshot!( - run_query!(&runner, r#"{findManyPost(where:{blog:{is:{name:{equals: "blog 1"}}}}) { title }}"#), + run_query!(&runner, r#"{findManyPost(where:{blog:{is:{ name:{equals: "blog 1"}}}}) { title }}"#), @r###"{"data":{"findManyPost":[{"title":"post 1"}]}}"### ); insta::assert_snapshot!( - run_query!(&runner, r#"{findManyBlog(where: { post: { is:{popularity: { gte: 100 }}}}){name}}"#), + run_query!(&runner, r#"{findManyBlog(where: { post: { is:{ popularity: { gte: 100 }}}}){name}}"#), @r###"{"data":{"findManyBlog":[{"name":"blog 2"},{"name":"blog 3"}]}}"### ); @@ -73,9 +77,11 @@ mod one_relation { @r###"{"data":{"findManyBlog":[{"name":"blog 3"}]}}"### ); - insta::assert_snapshot!( - run_query!(&runner, r#"{findManyBlog(where: { post: { isNot:{popularity: { gte: 500 }}}}){name}}"#), - @r###"{"data":{"findManyBlog":[{"name":"blog 1"},{"name":"blog 2"}]}}"### + match_connector_result!( + &runner, + r#"{findManyBlog(where: { post: { isNot:{ popularity: { gte: 500 }}}}){name}}"#, + MongoDb(_) => vec![r#"{"data":{"findManyBlog":[{"name":"blog 1"},{"name":"blog 2"}]}}"#], + _ => vec![r#"{"data":{"findManyBlog":[{"name":"blog 1"},{"name":"blog 2"},{"name":"blog 4"}]}}"#] ); runner @@ -317,6 +323,89 @@ mod one_relation { Ok(()) } + // https://github.com/prisma/prisma/issues/21356 + fn schema_21356() -> String { + let schema = indoc! { + r#"model User { + #id(id, Int, @id) + name String? + + posts Post[] + + userId Int + userId2 Int + @@unique([userId, userId2]) + } + + model Post { + #id(id, Int, @id) + title String? + + userId Int? + userId_2 Int? + author User? @relation(fields: [userId, userId_2], references: [userId, userId2]) + }"# + }; + + schema.to_owned() + } + + #[connector_test(schema(schema_21356))] + async fn repro_21356(runner: Runner) -> TestResult<()> { + run_query!( + &runner, + r#"mutation { createOneUser(data: { id: 1, userId: 1, userId2: 1, name: "Bob", posts: { create: { id: 1, title: "Hello" } } }) { id } }"# + ); + + insta::assert_snapshot!( + run_query!(&runner, r#"{ findManyUser(where: { posts: { some: { author: { name: "Bob" } } } }) { id } }"#), + @r###"{"data":{"findManyUser":[{"id":1}]}}"### + ); + + Ok(()) + } + + // https://github.com/prisma/prisma/issues/21366 + fn schema_21366() -> String { + let schema = indoc! { + r#"model device { + #id(id, Int, @id) + + device_id String @unique + current_state device_state? @relation(fields: [device_id], references: [device_id], onDelete: NoAction) + } + + model device_state { + #id(id, Int, @id) + + device_id String @unique + device device[] + }"# + }; + + schema.to_owned() + } + + #[connector_test(schema(schema_21366))] + async fn repro_21366(runner: Runner) -> TestResult<()> { + run_query!( + &runner, + r#"mutation { + createOnedevice(data: { id: 1, current_state: { create: { id: 1, device_id: "1" } } }) { + id + } + } + "# + ); + + insta::assert_snapshot!( + run_query!(&runner, r#"{ findManydevice_state(where: { device: { some: { device_id: "1" } } }) { id } }"#), + @r###"{"data":{"findManydevice_state":[{"id":1}]}}"### + ); + + Ok(()) + } + async fn test_data(runner: &Runner) -> TestResult<()> { runner .query(indoc! { r#" diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/scalar_list/json.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/scalar_list/json.rs index ab6ddbdcd304..79c511edbc52 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/scalar_list/json.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/scalar_list/json.rs @@ -70,8 +70,9 @@ mod json { // TODO: This specific query currently cannot be sent from the JS client. // The client _always_ sends an array as plain json and never as an array of json. // We're temporarily ignoring it for the JSON protocol because we can't differentiate a list of json values from a json array. + // Similarly, this does not currently work with driver adapters. // https://github.com/prisma/prisma/issues/18019 - if runner.protocol().is_graphql() { + if runner.protocol().is_graphql() && !runner.is_external_executor() { match_connector_result!( &runner, r#"mutation { @@ -161,8 +162,9 @@ mod json { // TODO: This specific query currently cannot be sent from the JS client. // The client _always_ sends an array as plain json and never as an array of json. // We're temporarily ignoring it for the JSON protocol because we can't differentiate a list of json values from a json array. + // Similarly, this does not currently work with driver adapters. // https://github.com/prisma/prisma/issues/18019 - if runner.protocol().is_graphql() { + if runner.protocol().is_graphql() && !runner.is_external_executor() { match_connector_result!( &runner, r#"mutation { diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create.rs index 55cdb5a85bed..1247b3e27bea 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create.rs @@ -353,21 +353,6 @@ mod json_create { #[connector_test(capabilities(AdvancedJsonNullability))] async fn create_json_errors(runner: Runner) -> TestResult<()> { - // On the JSON protocol, this succeeds because `null` is serialized as JSON. - // It doesn't matter since the client does _not_ allow to send null values, but only DbNull or JsonNull. - if runner.protocol().is_graphql() { - assert_error!( - &runner, - r#"mutation { - createOneTestModel(data: { id: 1, json: null }) { - json - } - }"#, - 2009, - "A value is required but not set" - ); - } - assert_error!( &runner, r#"mutation { diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create_many.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create_many.rs index f82e217bb670..35a044b1473d 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create_many.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create_many.rs @@ -360,23 +360,6 @@ mod json_create_many { #[connector_test(capabilities(AdvancedJsonNullability))] async fn create_many_json_errors(runner: Runner) -> TestResult<()> { - // On the JSON protocol, this succeeds because `null` is serialized as JSON. - // It doesn't matter since the client does _not_ allow to send null values, but only DbNull or JsonNull. - if runner.protocol().is_graphql() { - assert_error!( - &runner, - r#"mutation { - createManyTestModel(data: [ - { id: 1, json: null }, - ]) { - count - } - }"#, - 2009, - "A value is required but not set" - ); - } - assert_error!( &runner, r#"mutation { diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update.rs index c2ed7f92cb42..4fe0726a3cc5 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update.rs @@ -764,21 +764,6 @@ mod json_update { #[connector_test(capabilities(AdvancedJsonNullability))] async fn update_json_errors(runner: Runner) -> TestResult<()> { - // On the JSON protocol, this succeeds because `null` is serialized as JSON. - // It doesn't matter since the client does _not_ allow to send null values, but only DbNull or JsonNull. - if runner.protocol().is_graphql() { - assert_error!( - &runner, - r#"mutation { - updateOneTestModel(where: { id: 1 }, data: { json: null }) { - json - } - }"#, - 2009, - "A value is required but not set" - ); - } - assert_error!( &runner, r#"mutation { diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update_many.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update_many.rs index fd0068761a55..7e969e21cdce 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update_many.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update_many.rs @@ -386,21 +386,6 @@ mod json_update_many { #[connector_test(capabilities(AdvancedJsonNullability))] async fn update_json_errors(runner: Runner) -> TestResult<()> { - // On the JSON protocol, this succeeds because `null` is serialized as JSON. - // It doesn't matter since the client does _not_ allow to send null values, but only DbNull or JsonNull. - if runner.protocol().is_graphql() { - assert_error!( - &runner, - r#"mutation { - updateManyTestModel(where: { id: 1 }, data: { json: null }) { - json - } - }"#, - 2009, - "A value is required but not set" - ); - } - assert_error!( &runner, r#"mutation { diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/Cargo.toml b/query-engine/connector-test-kit-rs/query-tests-setup/Cargo.toml index be09bc26ac40..088a0d4b2d34 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/Cargo.toml +++ b/query-engine/connector-test-kit-rs/query-tests-setup/Cargo.toml @@ -11,6 +11,7 @@ qe-setup = { path = "../qe-setup" } request-handlers = { path = "../../request-handlers" } tokio.workspace = true query-core = { path = "../../core" } +sql-query-connector = { path = "../../connectors/sql-query-connector" } query-engine = { path = "../../query-engine"} psl.workspace = true user-facing-errors = { path = "../../../libs/user-facing-errors" } @@ -31,6 +32,7 @@ hyper = { version = "0.14", features = ["full"] } indexmap = { version = "1.0", features = ["serde-1"] } query-engine-metrics = {path = "../../metrics"} quaint.workspace = true +jsonrpc-core = "17" # Only this version is vetted, upgrade only after going through the code, # as this is a small crate with little user base. diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs index 8807b4e0dbd8..4af4e763298a 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs @@ -20,6 +20,21 @@ pub struct TestConfig { #[serde(rename = "version")] connector_version: Option, + /// An external process to execute the test queries and produced responses for assertion + /// Used when testing driver adapters, this process is expected to be a javascript process + /// loading the library engine (as a library, or WASM modules) and providing it with a + /// driver adapter. + /// Env key: `EXTERNAL_TEST_EXECUTOR` + external_test_executor: Option, + + /// The driver adapter to use when running tests, will be forwarded to the external test + /// executor by setting the `DRIVER_ADAPTER` env var when spawning the executor process + driver_adapter: Option, + + /// The driver adapter configuration to forward as a stringified JSON object to the external + /// test executor by setting the `DRIVER_ADAPTER_CONFIG` env var when spawning the executor + driver_adapter_config: Option, + /// Indicates whether or not the tests are running in CI context. /// Env key: `BUILDKITE` #[serde(default)] @@ -35,13 +50,22 @@ const CONFIG_LOAD_FAILED: &str = r####" Test config can come from the environment, or a config file. -♻️ Environment +♻️ Environment variables + +Be sure to have WORKSPACE_ROOT set to the root of the prisma-engines +repository. -Set the following env vars: +Set the following vars to denote the connector under test - TEST_CONNECTOR - TEST_CONNECTOR_VERSION (optional) +And optionally, to test driver adapters + +- EXTERNAL_TEST_EXECUTOR +- DRIVER_ADAPTER +- DRIVER_ADAPTER_CONFIG (optional, not required by all driver adapters) + 📁 Config file Use the Makefile. @@ -51,8 +75,9 @@ fn exit_with_message(msg: &str) -> ! { use std::io::{stderr, Write}; let stderr = stderr(); let mut sink = stderr.lock(); + sink.write_all(b"Error in the test configuration:\n").unwrap(); sink.write_all(msg.as_bytes()).unwrap(); - sink.write_all(b"\n").unwrap(); + sink.write_all(b"Aborting test process\n").unwrap(); std::process::exit(1) } @@ -60,31 +85,44 @@ fn exit_with_message(msg: &str) -> ! { impl TestConfig { /// Loads a configuration. File-based config has precedence over env config. pub(crate) fn load() -> Self { - let config = match Self::from_file().or_else(Self::from_env) { + let mut config = match Self::from_file().or_else(Self::from_env) { Some(config) => config, None => exit_with_message(CONFIG_LOAD_FAILED), }; + config.fill_defaults(); config.validate(); config.log_info(); + config } + #[rustfmt::skip] fn log_info(&self) { println!("******************************"); println!("* Test run information:"); println!( "* Connector: {} {}", self.connector, - self.connector_version.as_ref().unwrap_or(&"".to_owned()) + self.connector_version().unwrap_or_default() ); println!("* CI? {}", self.is_ci); + if self.external_test_executor.as_ref().is_some() { + println!("* External test executor: {}", self.external_test_executor().unwrap_or_default()); + println!("* Driver adapter: {}", self.driver_adapter().unwrap_or_default()); + println!("* Driver adapter url override: {}", self.json_stringify_driver_adapter_config()); + } println!("******************************"); } fn from_env() -> Option { let connector = std::env::var("TEST_CONNECTOR").ok(); let connector_version = std::env::var("TEST_CONNECTOR_VERSION").ok(); + let external_test_executor = std::env::var("EXTERNAL_TEST_EXECUTOR").ok(); + let driver_adapter = std::env::var("DRIVER_ADAPTER").ok(); + let driver_adapter_config = std::env::var("DRIVER_ADAPTER_CONFIG") + .map(|config| serde_json::from_str::(config.as_str()).ok()) + .unwrap_or_default(); // Just care for a set value for now. let is_ci = std::env::var("BUILDKITE").is_ok(); @@ -93,16 +131,18 @@ impl TestConfig { connector, connector_version, is_ci, + external_test_executor, + driver_adapter, + driver_adapter_config, }) } fn from_file() -> Option { let current_dir = env::current_dir().ok(); - let workspace_root = std::env::var("WORKSPACE_ROOT").ok().map(PathBuf::from); current_dir .and_then(|path| Self::try_path(config_path(path))) - .or_else(|| workspace_root.and_then(|path| Self::try_path(config_path(path)))) + .or_else(|| Self::workspace_root().and_then(|path| Self::try_path(config_path(path)))) } fn try_path(path: PathBuf) -> Option { @@ -115,6 +155,33 @@ impl TestConfig { }) } + /// if the loaded value for external_test_executor is "default" (case insensitive), + /// and the workspace_root is set, then use the default external test executor. + fn fill_defaults(&mut self) { + const DEFAULT_TEST_EXECUTOR: &str = + "query-engine/driver-adapters/connector-test-kit-executor/script/start_node.sh"; + + if self + .external_test_executor + .as_ref() + .filter(|s| s.eq_ignore_ascii_case("default")) + .is_some() + { + self.external_test_executor = Self::workspace_root() + .map(|path| path.join(DEFAULT_TEST_EXECUTOR)) + .or_else(|| { + exit_with_message( + "WORKSPACE_ROOT needs to be correctly set to the root of the prisma-engines repository", + ) + }) + .and_then(|path| path.to_str().map(|s| s.to_owned())); + } + } + + fn workspace_root() -> Option { + env::var("WORKSPACE_ROOT").ok().map(PathBuf::from) + } + fn validate(&self) { if self.connector.is_empty() { exit_with_message("A test connector is required but was not set."); @@ -138,6 +205,46 @@ impl TestConfig { | Ok(ConnectorVersion::Sqlite) => (), Err(err) => exit_with_message(&err.to_string()), } + + if let Some(file) = self.external_test_executor.as_ref() { + let path = PathBuf::from(file); + let md = path.metadata(); + if !path.exists() || md.is_err() || !md.unwrap().is_file() { + exit_with_message(&format!("The external test executor path `{}` must be a file", file)); + } + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + let is_executable = match path.metadata() { + Err(_) => false, + Ok(md) => md.permissions().mode() & 0o111 != 0, + }; + if !is_executable { + exit_with_message(&format!( + "The external test executor file `{}` must be have permissions to execute", + file + )); + } + } + } + + if self.external_test_executor.is_some() && self.driver_adapter.is_none() { + exit_with_message( + "When using an external test executor, the driver adapter (DRIVER_ADAPTER env var) must be set.", + ); + } + + if self.driver_adapter.is_some() && self.external_test_executor.is_none() { + exit_with_message( + "When using a driver adapter, the external test executor (EXTERNAL_TEST_EXECUTOR env var) must be set.", + ); + } + + if self.driver_adapter.is_none() && self.driver_adapter_config.is_some() { + exit_with_message( + "When using a driver adapter config, the driver adapter (DRIVER_ADAPTER env var) must be set.", + ); + } } pub fn connector(&self) -> &str { @@ -145,13 +252,28 @@ impl TestConfig { } pub(crate) fn connector_version(&self) -> Option<&str> { - self.connector_version.as_ref().map(AsRef::as_ref) + self.connector_version.as_deref() } pub fn is_ci(&self) -> bool { self.is_ci } + pub fn external_test_executor(&self) -> Option<&str> { + self.external_test_executor.as_deref() + } + + pub fn driver_adapter(&self) -> Option<&str> { + self.driver_adapter.as_deref() + } + + pub fn json_stringify_driver_adapter_config(&self) -> String { + self.driver_adapter_config + .as_ref() + .map(|value| value.to_string()) + .unwrap_or("{}".to_string()) + } + pub fn test_connector(&self) -> TestResult<(ConnectorTag, ConnectorVersion)> { let version = ConnectorVersion::try_from((self.connector(), self.connector_version()))?; let tag = match version { @@ -166,6 +288,23 @@ impl TestConfig { Ok((tag, version)) } + + #[rustfmt::skip] + pub fn for_external_executor(&self) -> Vec<(String, String)> { + vec!( + ( + "DRIVER_ADAPTER".to_string(), + self.driver_adapter.clone().unwrap_or_default()), + ( + "DRIVER_ADAPTER_CONFIG".to_string(), + self.json_stringify_driver_adapter_config() + ), + ( + "PRISMA_DISABLE_QUAINT_EXECUTORS".to_string(), + "1".to_string(), + ), + ) + } } fn config_path(mut path: PathBuf) -> PathBuf { diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js.rs new file mode 100644 index 000000000000..2ec8513baeda --- /dev/null +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js.rs @@ -0,0 +1,14 @@ +mod external_process; + +use super::*; +use external_process::*; +use serde::de::DeserializeOwned; +use std::{collections::HashMap, sync::atomic::AtomicU64}; +use tokio::io::{AsyncBufReadExt, AsyncWriteExt, BufReader}; + +pub(crate) async fn executor_process_request( + method: &str, + params: serde_json::Value, +) -> Result> { + EXTERNAL_PROCESS.request(method, params).await +} diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/external_process.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/external_process.rs new file mode 100644 index 000000000000..583d5058c62e --- /dev/null +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/external_process.rs @@ -0,0 +1,185 @@ +use super::*; +use once_cell::sync::Lazy; +use serde::de::DeserializeOwned; +use std::{fmt::Display, io::Write as _, sync::atomic::Ordering}; +use tokio::sync::{mpsc, oneshot}; + +type Result = std::result::Result>; + +#[derive(Debug)] +struct GenericError(String); + +impl Display for GenericError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} + +impl std::error::Error for GenericError {} + +pub(crate) struct ExecutorProcess { + task_handle: mpsc::Sender, + request_id_counter: AtomicU64, +} + +fn exit_with_message(status_code: i32, message: &str) -> ! { + let stdout = std::io::stdout(); + stdout.lock().write_all(message.as_bytes()).unwrap(); + std::process::exit(status_code) +} + +impl ExecutorProcess { + fn new() -> Result { + let (sender, receiver) = mpsc::channel::(300); + + let handle = std::thread::spawn(|| match start_rpc_thread(receiver) { + Ok(()) => (), + Err(err) => { + exit_with_message(1, &err.to_string()); + } + }); + + std::thread::spawn(move || { + if let Err(e) = handle.join() { + exit_with_message( + 1, + &format!( + "rpc thread panicked with: {}", + e.downcast::().unwrap_or_default() + ), + ); + } + }); + + Ok(ExecutorProcess { + task_handle: sender, + request_id_counter: Default::default(), + }) + } + + /// Convenient façade. Allocates more than necessary, but this is only for testing. + #[tracing::instrument(skip(self))] + pub(crate) async fn request(&self, method: &str, params: serde_json::Value) -> Result { + let (sender, receiver) = oneshot::channel(); + let params = if let serde_json::Value::Object(params) = params { + params + } else { + panic!("params aren't an object") + }; + let method_call = jsonrpc_core::MethodCall { + jsonrpc: Some(jsonrpc_core::Version::V2), + method: method.to_owned(), + params: jsonrpc_core::Params::Map(params), + id: jsonrpc_core::Id::Num(self.request_id_counter.fetch_add(1, Ordering::Relaxed)), + }; + + self.task_handle.send((method_call, sender)).await?; + let raw_response = receiver.await?; + tracing::debug!(%raw_response); + let response = serde_json::from_value(raw_response)?; + Ok(response) + } +} + +pub(super) static EXTERNAL_PROCESS: Lazy = + Lazy::new(|| match std::thread::spawn(ExecutorProcess::new).join() { + Ok(Ok(process)) => process, + Ok(Err(err)) => exit_with_message(1, &format!("Failed to start node process. Details: {err}")), + Err(err) => { + let err = err.downcast_ref::().map(ToOwned::to_owned).unwrap_or_default(); + exit_with_message(1, &format!("Panic while trying to start node process.\nDetails: {err}")) + } + }); + +type ReqImpl = (jsonrpc_core::MethodCall, oneshot::Sender); + +fn start_rpc_thread(mut receiver: mpsc::Receiver) -> Result<()> { + use std::process::Stdio; + use tokio::process::Command; + + let path = crate::CONFIG + .external_test_executor() + .unwrap_or_else(|| exit_with_message(1, "start_rpc_thread() error: external test executor is not set")); + + tokio::runtime::Builder::new_current_thread() + .enable_io() + .build() + .unwrap() + .block_on(async move { + let process = match Command::new(path) + .envs(CONFIG.for_external_executor()) + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::inherit()) + .spawn() + { + Ok(process) => process, + Err(err) => exit_with_message(1, &format!("Failed to spawn the executor process: `{path}`. Details: {err}\n")), + }; + + let mut stdout = BufReader::new(process.stdout.unwrap()).lines(); + let mut stdin = process.stdin.unwrap(); + let mut pending_requests: HashMap> = + HashMap::new(); + + loop { + tokio::select! { + line = stdout.next_line() => { + match line { + // Two error modes in here: the external process can response with + // something that is not a jsonrpc response (basically any normal logging + // output), or it can respond with a jsonrpc response that represents a + // failure. + Ok(Some(line)) => // new response + { + match serde_json::from_str::(&line) { + Ok(response) => { + let sender = pending_requests.remove(response.id()).unwrap(); + match response { + jsonrpc_core::Output::Success(success) => { + // The other end may be dropped if the whole + // request future was dropped and not polled to + // completion, so we ignore send errors here. + _ = sender.send(success.result); + } + jsonrpc_core::Output::Failure(err) => { + panic!("error response from jsonrpc: {err:?}") + } + } + } + Err(err) => { + tracing::error!(%err, "error when decoding response from child node process. Response was: `{}`", &line); + continue + } + }; + } + Ok(None) => // end of the stream + { + exit_with_message(1, "child node process stdout closed") + } + Err(err) => // log it + { + tracing::error!(%err, "Error when reading from child node process"); + } + } + } + request = receiver.recv() => { + match request { + None => // channel closed + { + exit_with_message(1, "The json-rpc client channel was closed"); + } + Some((request, response_sender)) => { + pending_requests.insert(request.id.clone(), response_sender); + let mut req = serde_json::to_vec(&request).unwrap(); + req.push(b'\n'); + stdin.write_all(&req).await.unwrap(); + } + } + } + } + } + }); + + Ok(()) +} diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs index 035acedb5696..d92bb5e96314 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs @@ -1,4 +1,5 @@ mod cockroachdb; +mod js; mod mongodb; mod mysql; mod postgres; @@ -6,10 +7,12 @@ mod sql_server; mod sqlite; mod vitess; -pub use mysql::*; +pub use mysql::MySqlVersion; pub(crate) use cockroachdb::*; +pub(crate) use js::*; pub(crate) use mongodb::*; +pub(crate) use mysql::*; pub(crate) use postgres::*; pub(crate) use sql_server::*; pub(crate) use sqlite::*; @@ -24,7 +27,7 @@ pub trait ConnectorTagInterface { /// The name of the datamodel provider for this connector. /// Must match valid datamodel provider strings. - fn datamodel_provider(&self) -> &'static str; + fn datamodel_provider(&self) -> &str; /// Returns the renderer to be used for templating the datamodel (the models portion). fn datamodel_renderer(&self) -> Box; @@ -35,7 +38,7 @@ pub trait ConnectorTagInterface { /// Defines where relational constraints are handled: /// - "prisma" is handled in the Query Engine core /// - "foreignKeys" lets the database handle them - fn relation_mode(&self) -> &'static str { + fn relation_mode(&self) -> &str { "foreignKeys" } } @@ -299,10 +302,14 @@ pub(crate) fn should_run( .any(|only| ConnectorVersion::try_from(*only).unwrap().matches_pattern(&version)); } - if exclude - .iter() - .any(|excl| ConnectorVersion::try_from(*excl).unwrap().matches_pattern(&version)) - { + if CONFIG.external_test_executor().is_some() && exclude.iter().any(|excl| excl.0.to_uppercase() == "JS") { + println!("Excluded test execution for JS driver adapters. Skipping test"); + return false; + }; + + if exclude.iter().any(|excl| { + ConnectorVersion::try_from(*excl).map_or(false, |connector_version| connector_version.matches_pattern(&version)) + }) { println!("Connector excluded. Skipping test."); return false; } diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/error.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/error.rs index 79fb457015be..041c63f9dd45 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/error.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/error.rs @@ -6,6 +6,9 @@ use thiserror::Error; #[allow(clippy::large_enum_variant)] #[derive(Debug, Error)] pub enum TestError { + #[error("Handler Error: {0}")] + RequestHandlerError(request_handlers::HandlerError), + #[error("Parse error: {0}")] ParseError(String), @@ -26,6 +29,9 @@ pub enum TestError { #[error("Raw execute error: {0}")] RawExecute(QuaintError), + + #[error("External process error: {0}")] + External(#[from] Box), } impl TestError { diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/lib.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/lib.rs index 0cdf3d1d3ef6..2e79581a0c78 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/lib.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/lib.rs @@ -43,7 +43,12 @@ pub static ENGINE_PROTOCOL: Lazy = Lazy::new(|| std::env::var("PRISMA_ENGINE_PROTOCOL").unwrap_or_else(|_| "graphql".to_owned())); /// Teardown of a test setup. -async fn teardown_project(datamodel: &str, db_schemas: &[&str]) -> TestResult<()> { +async fn teardown_project(datamodel: &str, db_schemas: &[&str], schema_id: Option) -> TestResult<()> { + if let Some(schema_id) = schema_id { + let params = serde_json::json!({ "schemaId": schema_id }); + executor_process_request::("teardown", params).await?; + } + Ok(qe_setup::teardown(datamodel, db_schemas).await?) } @@ -167,7 +172,9 @@ fn run_relation_link_test_impl( test_fn(&runner, &dm).await.unwrap(); - teardown_project(&datamodel, Default::default()).await.unwrap(); + teardown_project(&datamodel, Default::default(), runner.schema_id()) + .await + .unwrap(); } .with_subscriber(test_tracing_subscriber( ENV_LOG_LEVEL.to_string(), @@ -275,10 +282,13 @@ fn run_connector_test_impl( ) .await .unwrap(); + let schema_id = runner.schema_id(); test_fn(runner).await.unwrap(); - crate::teardown_project(&datamodel, db_schemas).await.unwrap(); + crate::teardown_project(&datamodel, db_schemas, schema_id) + .await + .unwrap(); } .with_subscriber(test_tracing_subscriber( ENV_LOG_LEVEL.to_string(), diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/query_result.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/query_result.rs index 83855fde1c59..4c85e70ac7c6 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/query_result.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/query_result.rs @@ -1,15 +1,66 @@ +use query_core::constants::custom_types; use request_handlers::{GQLError, PrismaResponse}; +use serde::{Deserialize, Serialize}; +#[derive(Serialize, Deserialize, Debug, PartialEq)] +struct SimpleGqlErrorResponse { + #[serde(skip_serializing_if = "Vec::is_empty")] + errors: Vec, +} + +#[derive(Serialize, Deserialize, Debug, PartialEq)] +struct SimpleGqlResponse { + #[serde(skip_serializing_if = "SimpleGqlResponse::data_is_empty")] + #[serde(default)] + data: serde_json::Value, + #[serde(skip_serializing_if = "Vec::is_empty")] + #[serde(default)] + errors: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + #[serde(default)] + extensions: Option, +} + +impl SimpleGqlResponse { + fn data_is_empty(data: &serde_json::Value) -> bool { + match data { + serde_json::Value::Object(o) => o.is_empty(), + serde_json::Value::Null => true, + _ => false, + } + } +} + +#[derive(Serialize, Deserialize, Debug, PartialEq)] +#[serde(rename_all = "camelCase")] +struct SimpleGqlBatchResponse { + batch_result: Vec, + #[serde(skip_serializing_if = "Vec::is_empty")] + #[serde(default)] + errors: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + extensions: Option, +} -#[derive(Debug)] +#[derive(Debug, Serialize, Deserialize, PartialEq)] +#[serde(untagged)] +enum Response { + Error(SimpleGqlErrorResponse), + Multi(SimpleGqlBatchResponse), + Single(SimpleGqlResponse), +} + +#[derive(Debug, Deserialize, PartialEq)] +#[serde(transparent)] pub struct QueryResult { - response: PrismaResponse, + response: Response, } impl QueryResult { pub fn failed(&self) -> bool { match self.response { - PrismaResponse::Single(ref s) => s.errors().next().is_some(), - PrismaResponse::Multi(ref m) => m.errors().next().is_some(), + Response::Error(ref s) => !s.errors.is_empty(), + Response::Single(ref s) => !s.errors.is_empty(), + Response::Multi(ref m) => !(m.errors.is_empty() && m.batch_result.iter().all(|res| res.errors.is_empty())), } } @@ -70,8 +121,13 @@ impl QueryResult { pub fn errors(&self) -> Vec<&GQLError> { match self.response { - PrismaResponse::Single(ref s) => s.errors().collect(), - PrismaResponse::Multi(ref m) => m.errors().collect(), + Response::Error(ref s) => s.errors.iter().collect(), + Response::Single(ref s) => s.errors.iter().collect(), + Response::Multi(ref m) => m + .errors + .iter() + .chain(m.batch_result.iter().flat_map(|res| res.errors.iter())) + .collect(), } } @@ -82,6 +138,20 @@ impl QueryResult { pub fn to_string_pretty(&self) -> String { serde_json::to_string_pretty(&self.response).unwrap() } + + /// Transform a JSON protocol response to a GraphQL protocol response, by removing the type + /// tags. + pub(crate) fn detag(&mut self) { + match &mut self.response { + Response::Error(_) => (), + Response::Single(res) => detag_value(&mut res.data), + Response::Multi(res) => { + for res in &mut res.batch_result { + detag_value(&mut res.data) + } + } + } + } } impl ToString for QueryResult { @@ -92,6 +162,149 @@ impl ToString for QueryResult { impl From for QueryResult { fn from(response: PrismaResponse) -> Self { - Self { response } + match response { + PrismaResponse::Single(res) => QueryResult { + response: Response::Single(SimpleGqlResponse { + data: serde_json::to_value(res.data).unwrap(), + errors: res.errors, + extensions: (!res.extensions.is_empty()).then(|| serde_json::to_value(&res.extensions).unwrap()), + }), + }, + PrismaResponse::Multi(reses) => QueryResult { + response: Response::Multi(SimpleGqlBatchResponse { + batch_result: reses + .batch_result + .into_iter() + .map(|res| SimpleGqlResponse { + data: serde_json::to_value(&res.data).unwrap(), + errors: res.errors, + extensions: (!res.extensions.is_empty()) + .then(|| serde_json::to_value(&res.extensions).unwrap()), + }) + .collect(), + errors: reses.errors, + extensions: (!reses.extensions.is_empty()) + .then(|| serde_json::to_value(&reses.extensions).unwrap()), + }), + }, + } + } +} + +fn detag_value(val: &mut serde_json::Value) { + match val { + serde_json::Value::Object(obj) => { + if obj.len() == 2 && obj.contains_key(custom_types::TYPE) && obj.contains_key(custom_types::VALUE) { + let mut new_val = obj.remove(custom_types::VALUE).unwrap(); + detag_value(&mut new_val); + *val = new_val; + } else { + for elem in obj.values_mut() { + detag_value(elem); + } + } + } + serde_json::Value::Array(arr) => { + for elem in arr { + detag_value(elem) + } + } + _ => (), + } +} + +#[cfg(test)] +mod tests { + use super::*; + use serde_json::json; + + #[test] + fn test_deserializing_successful_batch_response() { + let response = "{\"batchResult\":[{\"data\":{\"findUniqueTestModelOrThrow\":{\"id\":1}}},{\"data\":{\"findUniqueTestModelOrThrow\":{\"id\":2}}}]}"; + let result: QueryResult = serde_json::from_str(response).unwrap(); + + let expected = QueryResult { + response: Response::Multi(SimpleGqlBatchResponse { + batch_result: vec![ + SimpleGqlResponse { + data: json!({ + "findUniqueTestModelOrThrow": { + "id": 1, + }, + }), + errors: vec![], + extensions: None, + }, + SimpleGqlResponse { + data: json!({ + "findUniqueTestModelOrThrow": { + "id": 2, + }, + }), + errors: vec![], + extensions: None, + }, + ], + errors: vec![], + extensions: None, + }), + }; + assert_eq!(result, expected); + } + + #[test] + fn test_deserializing_error_batch_response() { + let response = r#" +{ + "batchResult":[ + { + "data":{ + "findUniqueTestModelOrThrow":{ + "id":2 + } + } + }, + { + "errors":[ + { + "error":"An operation failed because it depends on one or more records that were required but not found. Expected a record, found none.", + "user_facing_error":{ + "is_panic":false, + "message":"An operation failed because it depends on one or more records that were required but not found. Expected a record, found none.", + "meta":{ + "cause":"Expected a record, found none." + }, + "error_code":"P2025" + } + } + ] + } + ] +}"#; + let result: QueryResult = serde_json::from_str(response).unwrap(); + + let expected = QueryResult { + response: Response::Multi(SimpleGqlBatchResponse { + batch_result: vec![ + SimpleGqlResponse { + data: json!({"findUniqueTestModelOrThrow": {"id": 2}}), + errors: vec![], + extensions: None, + }, + SimpleGqlResponse { + data: serde_json::Value::Null, + errors: vec![GQLError::from_user_facing_error(user_facing_errors::KnownError { + message: "An operation failed because it depends on one or more records that were required but not found. Expected a record, found none.".to_string(), + meta: json!({"cause": "Expected a record, found none."}), + error_code: std::borrow::Cow::from("P2025"), + }.into())], + extensions: None, + }, + ], + errors: vec![], + extensions: None, + }), + }; + assert_eq!(result, expected); } } diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/json_adapter/request.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/json_adapter/request.rs index 0486c291e8ba..0eee2d9e6cb6 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/json_adapter/request.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/json_adapter/request.rs @@ -1,4 +1,4 @@ -use crate::TestResult; +use crate::{TestError, TestResult}; use indexmap::IndexMap; use itertools::Itertools; use prisma_models::PrismaValue; @@ -18,24 +18,28 @@ pub struct JsonRequest; impl JsonRequest { /// Translates a GraphQL query to a JSON query. This is used to keep the same test-suite running on both protocols. pub fn from_graphql(gql: &str, query_schema: &QuerySchema) -> TestResult { - let operation = GraphQLProtocolAdapter::convert_query_to_operation(gql, None).unwrap(); - let operation_name = operation.name(); - let schema_field = query_schema - .find_query_field(operation_name) - .unwrap_or_else(|| query_schema.find_mutation_field(operation_name).unwrap()); - let model_name = schema_field - .model() - .map(|m| query_schema.internal_data_model.walk(m).name().to_owned()); - let query_tag = schema_field.query_tag().unwrap().to_owned(); - let selection = operation.into_selection(); - - let output = JsonSingleQuery { - model_name, - action: Action::new(query_tag), - query: graphql_selection_to_json_field_query(selection, &schema_field), - }; - - Ok(output) + match GraphQLProtocolAdapter::convert_query_to_operation(gql, None) { + Ok(operation) => { + let operation_name = operation.name(); + let schema_field = query_schema + .find_query_field(operation_name) + .unwrap_or_else(|| query_schema.find_mutation_field(operation_name).unwrap()); + let model_name = schema_field + .model() + .map(|m| query_schema.internal_data_model.walk(m).name().to_owned()); + let query_tag = schema_field.query_tag().unwrap().to_owned(); + let selection = operation.into_selection(); + + let output = JsonSingleQuery { + model_name, + action: Action::new(query_tag), + query: graphql_selection_to_json_field_query(selection, &schema_field), + }; + + Ok(output) + } + Err(err) => Err(TestError::RequestHandlerError(err)), + } } } diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/mod.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/mod.rs index e9fce19c2c15..03e2dce5c5e0 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/mod.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/mod.rs @@ -1,8 +1,12 @@ mod json_adapter; pub use json_adapter::*; +use serde::Deserialize; -use crate::{ConnectorTag, ConnectorVersion, QueryResult, TestLogCapture, TestResult, ENGINE_PROTOCOL}; +use crate::{ + executor_process_request, ConnectorTag, ConnectorVersion, QueryResult, TestError, TestLogCapture, TestResult, + ENGINE_PROTOCOL, +}; use colored::Colorize; use query_core::{ protocol::EngineProtocol, @@ -11,18 +15,76 @@ use query_core::{ }; use query_engine_metrics::MetricRegistry; use request_handlers::{ - load_executor, BatchTransactionOption, ConnectorMode, GraphqlBody, JsonBatchQuery, JsonBody, JsonSingleQuery, - MultiQuery, RequestBody, RequestHandler, + BatchTransactionOption, ConnectorMode, GraphqlBody, JsonBatchQuery, JsonBody, JsonSingleQuery, MultiQuery, + RequestBody, RequestHandler, +}; +use serde_json::json; +use std::{ + env, + sync::{atomic::AtomicUsize, Arc}, }; -use std::{env, sync::Arc}; pub type TxResult = Result<(), user_facing_errors::Error>; pub(crate) type Executor = Box; +#[derive(Deserialize, Debug)] +struct Empty {} + +#[derive(Deserialize, Debug)] +#[serde(untagged)] +enum TransactionEndResponse { + Error(user_facing_errors::Error), + Ok(Empty), +} + +impl From for TxResult { + fn from(value: TransactionEndResponse) -> Self { + match value { + TransactionEndResponse::Ok(_) => Ok(()), + TransactionEndResponse::Error(error) => Err(error), + } + } +} + +pub enum RunnerExecutor { + // Builtin is a runner that uses the query engine in-process, issuing queries against a + // `core::InterpretingExecutor` that uses the particular connector under test in the test suite. + Builtin(Executor), + + // External is a runner that uses an external process that responds to queries piped to its STDIN + // in JsonRPC format. In particular this is used to test the query engine against a node process + // running a library engine configured to use a javascript driver adapter to connect to a database. + // + // In this struct variant, usize represents the index of the schema used for the test suite to + // execute queries against. When the suite starts, a message with the schema and the id is sent to + // the external process, which will create a new instance of the library engine configured to + // access that schema. + // + // Everytime a query is sent to the external process, it's provided the id of the schema, so the + // process knows how to associate the query to the instance of the library engine that will dispatch + // it. + External(usize), +} + +impl RunnerExecutor { + async fn new_external(url: &str, schema: &str) -> TestResult { + static COUNTER: AtomicUsize = AtomicUsize::new(0); + let id = COUNTER.fetch_add(1, std::sync::atomic::Ordering::Relaxed); + + executor_process_request( + "initializeSchema", + json!({ "schema": schema, "schemaId": id, "url": url }), + ) + .await?; + + Ok(RunnerExecutor::External(id)) + } +} + /// Direct engine runner. pub struct Runner { - executor: Executor, + executor: RunnerExecutor, query_schema: QuerySchemaRef, version: ConnectorVersion, connector_tag: ConnectorTag, @@ -34,6 +96,13 @@ pub struct Runner { } impl Runner { + pub(crate) fn schema_id(&self) -> Option { + match self.executor { + RunnerExecutor::Builtin(_) => None, + RunnerExecutor::External(schema_id) => Some(schema_id), + } + } + pub fn prisma_dml(&self) -> &str { self.query_schema.internal_data_model.schema.db.source() } @@ -49,18 +118,22 @@ impl Runner { qe_setup::setup(&datamodel, db_schemas).await?; let protocol = EngineProtocol::from(&ENGINE_PROTOCOL.to_string()); - let schema = psl::parse_schema(datamodel).unwrap(); + let schema = psl::parse_schema(&datamodel).unwrap(); let data_source = schema.configuration.datasources.first().unwrap(); let url = data_source.load_url(|key| env::var(key).ok()).unwrap(); - let connector_mode = ConnectorMode::Rust; - let executor = load_executor( - connector_mode, - data_source, - schema.configuration.preview_features(), - &url, - ) - .await?; + let executor = match crate::CONFIG.external_test_executor() { + Some(_) => RunnerExecutor::new_external(&url, &datamodel).await?, + None => RunnerExecutor::Builtin( + request_handlers::load_executor( + ConnectorMode::Rust, + data_source, + schema.configuration.preview_features(), + &url, + ) + .await?, + ), + }; let query_schema: QuerySchemaRef = Arc::new(schema::build(Arc::new(schema), true)); Ok(Self { @@ -82,9 +155,33 @@ impl Runner { { let query = query.into(); + let executor = match &self.executor { + RunnerExecutor::Builtin(e) => e, + RunnerExecutor::External(schema_id) => match JsonRequest::from_graphql(&query, self.query_schema()) { + Ok(json_query) => { + let response_str: String = + executor_process_request("query", json!({ "query": json_query, "schemaId": schema_id, "txId": self.current_tx_id.as_ref().map(ToString::to_string) })).await?; + let mut response: QueryResult = serde_json::from_str(&response_str).unwrap(); + response.detag(); + return Ok(response); + } + // Conversion from graphql to JSON might fail, and in that case we should consider the error + // (a Handler error) as an error response. + Err(TestError::RequestHandlerError(err)) => { + let gql_err = request_handlers::GQLError::from_handler_error(err); + let gql_res = request_handlers::GQLResponse::from(gql_err); + let prisma_res = request_handlers::PrismaResponse::Single(gql_res); + let mut response = QueryResult::from(prisma_res); + response.detag(); + return Ok(response); + } + Err(err) => return Err(err), + }, + }; + tracing::debug!("Querying: {}", query.clone().green()); - let handler = RequestHandler::new(&*self.executor, &self.query_schema, self.protocol); + let handler = RequestHandler::new(&**executor, &self.query_schema, self.protocol); let request_body = match self.protocol { EngineProtocol::Json => { @@ -127,7 +224,20 @@ impl Runner { println!("{}", query.bright_green()); - let handler = RequestHandler::new(&*self.executor, &self.query_schema, EngineProtocol::Json); + let executor = match &self.executor { + RunnerExecutor::Builtin(e) => e, + RunnerExecutor::External(_) => { + let response_str: String = executor_process_request( + "query", + json!({ "query": query, "txId": self.current_tx_id.as_ref().map(ToString::to_string) }), + ) + .await?; + let response: QueryResult = serde_json::from_str(&response_str).unwrap(); + return Ok(response); + } + }; + + let handler = RequestHandler::new(&**executor, &self.query_schema, EngineProtocol::Json); let serialized_query: JsonSingleQuery = serde_json::from_str(&query).unwrap(); let request_body = RequestBody::Json(JsonBody::Single(serialized_query)); @@ -164,7 +274,12 @@ impl Runner { transaction: bool, isolation_level: Option, ) -> TestResult { - let handler = RequestHandler::new(&*self.executor, &self.query_schema, self.protocol); + let executor = match &self.executor { + RunnerExecutor::External(_) => todo!(), + RunnerExecutor::Builtin(e) => e, + }; + + let handler = RequestHandler::new(&**executor, &self.query_schema, self.protocol); let body = RequestBody::Json(JsonBody::Batch(JsonBatchQuery { batch: queries .into_iter() @@ -184,7 +299,32 @@ impl Runner { transaction: bool, isolation_level: Option, ) -> TestResult { - let handler = RequestHandler::new(&*self.executor, &self.query_schema, self.protocol); + let executor = match &self.executor { + RunnerExecutor::External(schema_id) => { + // Translate the GraphQL query to JSON + let batch = queries + .into_iter() + .map(|query| JsonRequest::from_graphql(&query, self.query_schema())) + .collect::>>() + .unwrap(); + let transaction = match transaction { + true => Some(BatchTransactionOption { isolation_level }), + false => None, + }; + let json_query = JsonBody::Batch(JsonBatchQuery { batch, transaction }); + let response_str: String = executor_process_request( + "query", + json!({ "query": json_query, "schemaId": schema_id, "txId": self.current_tx_id.as_ref().map(ToString::to_string) }) + ).await?; + + let mut response: QueryResult = serde_json::from_str(&response_str).unwrap(); + response.detag(); + return Ok(response); + } + RunnerExecutor::Builtin(e) => e, + }; + + let handler = RequestHandler::new(&**executor, &self.query_schema, self.protocol); let body = match self.protocol { EngineProtocol::Json => { // Translate the GraphQL query to JSON @@ -227,31 +367,74 @@ impl Runner { isolation_level: Option, ) -> TestResult { let tx_opts = TransactionOptions::new(max_acquisition_millis, valid_for_millis, isolation_level); - - let id = self - .executor - .start_tx(self.query_schema.clone(), self.protocol, tx_opts) - .await?; - Ok(id) + match &self.executor { + RunnerExecutor::Builtin(executor) => { + let id = executor + .start_tx(self.query_schema.clone(), self.protocol, tx_opts) + .await?; + Ok(id) + } + RunnerExecutor::External(schema_id) => { + #[derive(Deserialize, Debug)] + #[serde(untagged)] + enum StartTransactionResponse { + Ok { id: String }, + Error(user_facing_errors::Error), + } + let response: StartTransactionResponse = + executor_process_request("startTx", json!({ "schemaId": schema_id, "options": tx_opts })).await?; + + match response { + StartTransactionResponse::Ok { id } => Ok(id.into()), + StartTransactionResponse::Error(err) => { + Err(crate::TestError::InteractiveTransactionError(err.message().into())) + } + } + } + } } pub async fn commit_tx(&self, tx_id: TxId) -> TestResult { - let res = self.executor.commit_tx(tx_id).await; + match &self.executor { + RunnerExecutor::Builtin(executor) => { + let res = executor.commit_tx(tx_id).await; + + if let Err(error) = res { + Ok(Err(error.into())) + } else { + Ok(Ok(())) + } + } + RunnerExecutor::External(schema_id) => { + let response: TransactionEndResponse = + executor_process_request("commitTx", json!({ "schemaId": schema_id, "txId": tx_id.to_string() })) + .await?; - if let Err(error) = res { - Ok(Err(error.into())) - } else { - Ok(Ok(())) + Ok(response.into()) + } } } pub async fn rollback_tx(&self, tx_id: TxId) -> TestResult { - let res = self.executor.rollback_tx(tx_id).await; - - if let Err(error) = res { - Ok(Err(error.into())) - } else { - Ok(Ok(())) + match &self.executor { + RunnerExecutor::Builtin(executor) => { + let res = executor.rollback_tx(tx_id).await; + + if let Err(error) = res { + Ok(Err(error.into())) + } else { + Ok(Ok(())) + } + } + RunnerExecutor::External(schema_id) => { + let response: TransactionEndResponse = executor_process_request( + "rollbackTx", + json!({ "schemaId": schema_id, "txId": tx_id.to_string() }), + ) + .await?; + + Ok(response.into()) + } } } @@ -276,7 +459,18 @@ impl Runner { } pub async fn get_logs(&mut self) -> Vec { - self.log_capture.get_logs().await + let mut logs = self.log_capture.get_logs().await; + match &self.executor { + RunnerExecutor::Builtin(_) => logs, + RunnerExecutor::External(schema_id) => { + let mut external_logs: Vec = + executor_process_request("getLogs", json!({ "schemaId": schema_id })) + .await + .unwrap(); + logs.append(&mut external_logs); + logs + } + } } pub fn connector_version(&self) -> &ConnectorVersion { @@ -286,4 +480,8 @@ impl Runner { pub fn protocol(&self) -> EngineProtocol { self.protocol } + + pub fn is_external_executor(&self) -> bool { + matches!(self.executor, RunnerExecutor::External(_)) + } } diff --git a/query-engine/connector-test-kit-rs/test-configs/libsql-sqlite b/query-engine/connector-test-kit-rs/test-configs/libsql-sqlite new file mode 100644 index 000000000000..9638e3a22840 --- /dev/null +++ b/query-engine/connector-test-kit-rs/test-configs/libsql-sqlite @@ -0,0 +1,5 @@ +{ + "connector": "sqlite", + "driver_adapter": "libsql", + "external_test_executor": "default" +} \ No newline at end of file diff --git a/query-engine/connector-test-kit-rs/test-configs/neon-ws-postgres13 b/query-engine/connector-test-kit-rs/test-configs/neon-ws-postgres13 new file mode 100644 index 000000000000..0097d8c91f57 --- /dev/null +++ b/query-engine/connector-test-kit-rs/test-configs/neon-ws-postgres13 @@ -0,0 +1,7 @@ +{ + "connector": "postgres", + "version": "13", + "driver_adapter": "neon:ws", + "driver_adapter_config": { "proxyUrl": "127.0.0.1:5488/v1" }, + "external_test_executor": "default" +} \ No newline at end of file diff --git a/query-engine/connector-test-kit-rs/test-configs/pg-postgres13 b/query-engine/connector-test-kit-rs/test-configs/pg-postgres13 new file mode 100644 index 000000000000..00f0c75ed736 --- /dev/null +++ b/query-engine/connector-test-kit-rs/test-configs/pg-postgres13 @@ -0,0 +1,6 @@ +{ + "connector": "postgres", + "version": "13", + "driver_adapter": "pg", + "external_test_executor": "default" +} \ No newline at end of file diff --git a/query-engine/connector-test-kit-rs/test-configs/planetscale-vitess8 b/query-engine/connector-test-kit-rs/test-configs/planetscale-vitess8 new file mode 100644 index 000000000000..48c89c79427c --- /dev/null +++ b/query-engine/connector-test-kit-rs/test-configs/planetscale-vitess8 @@ -0,0 +1,7 @@ +{ + "connector": "vitess", + "version": "8.0", + "driver_adapter": "planetscale", + "driver_adapter_config": { "proxyUrl": "http://root:root@127.0.0.1:8085" }, + "external_test_executor": "default" +} diff --git a/query-engine/connector-test-kit-rs/test-configs/postgres13 b/query-engine/connector-test-kit-rs/test-configs/postgres13 index 84fb5e1c04f1..f7b61cb4f888 100644 --- a/query-engine/connector-test-kit-rs/test-configs/postgres13 +++ b/query-engine/connector-test-kit-rs/test-configs/postgres13 @@ -1,3 +1,4 @@ { "connector": "postgres", - "version": "13"} \ No newline at end of file + "version": "13" +} \ No newline at end of file diff --git a/query-engine/connectors/mongodb-query-connector/Cargo.toml b/query-engine/connectors/mongodb-query-connector/Cargo.toml index d41210342107..c4a02eaa8643 100644 --- a/query-engine/connectors/mongodb-query-connector/Cargo.toml +++ b/query-engine/connectors/mongodb-query-connector/Cargo.toml @@ -22,6 +22,7 @@ tracing-futures = "0.2" uuid.workspace = true indexmap = "1.7" query-engine-metrics = {path = "../../metrics"} +cuid = { git = "https://github.com/prisma/cuid-rust", branch = "wasm32-support" } [dependencies.prisma-models] path = "../../prisma-models" @@ -46,9 +47,6 @@ workspace = true [dependencies.serde] workspace = true -[dependencies.cuid] -version = "1.2" - [dependencies.user-facing-errors] features = ["sql"] workspace = true diff --git a/query-engine/connectors/mongodb-query-connector/src/query_builder/group_by_builder.rs b/query-engine/connectors/mongodb-query-connector/src/query_builder/group_by_builder.rs index b7667a45825f..4ea3d4590446 100644 --- a/query-engine/connectors/mongodb-query-connector/src/query_builder/group_by_builder.rs +++ b/query-engine/connectors/mongodb-query-connector/src/query_builder/group_by_builder.rs @@ -39,7 +39,7 @@ impl std::fmt::Display for AggregationType { impl GroupByBuilder { pub fn new() -> Self { - Self { ..Default::default() } + Default::default() } pub fn render(&self, by_fields: Vec) -> (Document, Option) { diff --git a/query-engine/connectors/sql-query-connector/Cargo.toml b/query-engine/connectors/sql-query-connector/Cargo.toml index 5fe3052f2e8d..62d0be640761 100644 --- a/query-engine/connectors/sql-query-connector/Cargo.toml +++ b/query-engine/connectors/sql-query-connector/Cargo.toml @@ -25,6 +25,7 @@ uuid.workspace = true opentelemetry = { version = "0.17", features = ["tokio"] } tracing-opentelemetry = "0.17.3" quaint.workspace = true +cuid = { git = "https://github.com/prisma/cuid-rust", branch = "wasm32-support" } [dependencies.connector-interface] package = "query-connector" @@ -44,9 +45,6 @@ version = "0.4" features = ["derive"] version = "1.0" -[dependencies.cuid] -version = "1.2" - [dependencies.user-facing-errors] features = ["sql"] path = "../../../libs/user-facing-errors" diff --git a/query-engine/connectors/sql-query-connector/src/cursor_condition.rs b/query-engine/connectors/sql-query-connector/src/cursor_condition.rs index 4fa10dde9483..34373eaf3d5b 100644 --- a/query-engine/connectors/sql-query-connector/src/cursor_condition.rs +++ b/query-engine/connectors/sql-query-connector/src/cursor_condition.rs @@ -207,7 +207,7 @@ pub(crate) fn build( None => ConditionTree::NoCondition, Some(ref cursor) => { let cursor_fields: Vec<_> = cursor.as_scalar_fields().expect("Cursor fields contain non-scalars."); - let cursor_values: Vec<_> = cursor.db_values(); + let cursor_values: Vec<_> = cursor.db_values(ctx); let cursor_columns: Vec<_> = cursor_fields.as_slice().as_columns(ctx).collect(); let cursor_row = Row::from(cursor_columns); @@ -226,7 +226,7 @@ pub(crate) fn build( let order_subquery = order_by_defs .iter() .flat_map(|j| &j.joins) - .fold(order_subquery, |acc, join| acc.left_join(join.data.clone())); + .fold(order_subquery, |acc, join| acc.join(join.data.clone())); let len = definitions.len(); let reverse = query_arguments.needs_reversed_order(); @@ -469,7 +469,7 @@ fn cursor_order_def_aggregation_scalar( order_by: &OrderByScalarAggregation, order_by_def: &OrderByDefinition, ) -> CursorOrderDefinition { - let coalesce_exprs: Vec = vec![order_by_def.order_column.clone(), Value::integer(0).into()]; + let coalesce_exprs: Vec = vec![order_by_def.order_column.clone(), Value::int32(0).into()]; // We coalesce the order column to 0 when it's compared to the cmp table since the aggregations joins // might return NULL on relations that have no connected records @@ -493,7 +493,7 @@ fn cursor_order_def_aggregation_rel( // cf: part #2 of the SQL query above, when a field is nullable. let fks = foreign_keys_from_order_path(&order_by.path, &order_by_def.joins); - let coalesce_exprs: Vec = vec![order_by_def.order_column.clone(), Value::integer(0).into()]; + let coalesce_exprs: Vec = vec![order_by_def.order_column.clone(), Value::int32(0).into()]; // We coalesce the order column to 0 when it's compared to the cmp table since the aggregations joins // might return NULL on relations that have no connected records let order_column: Expression = coalesce(coalesce_exprs).into(); diff --git a/query-engine/connectors/sql-query-connector/src/database/js.rs b/query-engine/connectors/sql-query-connector/src/database/js.rs index 1dced9453fa3..0d4714871e59 100644 --- a/query-engine/connectors/sql-query-connector/src/database/js.rs +++ b/query-engine/connectors/sql-query-connector/src/database/js.rs @@ -11,40 +11,25 @@ use quaint::{ connector::{IsolationLevel, Transaction}, prelude::{Queryable as QuaintQueryable, *}, }; -use std::{ - collections::{hash_map::Entry, HashMap}, - sync::{Arc, Mutex}, -}; +use std::sync::{Arc, Mutex}; -/// Registry is the type for the global registry of driver adapters. -type Registry = HashMap; +static ACTIVE_DRIVER_ADAPTER: Lazy>> = Lazy::new(|| Mutex::new(None)); -/// REGISTRY is the global registry of Driver Adapters. -static REGISTRY: Lazy> = Lazy::new(|| Mutex::new(HashMap::new())); +fn active_driver_adapter(provider: &str) -> connector::Result { + let lock = ACTIVE_DRIVER_ADAPTER.lock().unwrap(); -fn registered_driver_adapter(provider: &str) -> connector::Result { - let lock = REGISTRY.lock().unwrap(); - lock.get(provider) + lock.as_ref() + .map(|conn_ref| conn_ref.to_owned()) .ok_or(ConnectorError::from_kind(ErrorKind::UnsupportedConnector(format!( "A driver adapter for {} was not registered", provider )))) - .map(|conn_ref| conn_ref.to_owned()) } -pub fn register_driver_adapter(provider: &str, connector: Arc) -> Result<(), String> { - let mut lock = REGISTRY.lock().unwrap(); - let entry = lock.entry(provider.to_string()); - match entry { - Entry::Occupied(_) => Err(format!( - "A driver adapter for {} was already registered, and cannot be overridden.", - provider - )), - Entry::Vacant(v) => { - v.insert(DriverAdapter { connector }); - Ok(()) - } - } +pub fn activate_driver_adapter(connector: Arc) { + let mut lock = ACTIVE_DRIVER_ADAPTER.lock().unwrap(); + + *lock = Some(DriverAdapter { connector }); } pub struct Js { @@ -69,7 +54,7 @@ impl FromSource for Js { url: &str, features: psl::PreviewFeatures, ) -> connector_interface::Result { - let connector = registered_driver_adapter(source.active_provider)?; + let connector = active_driver_adapter(source.active_provider)?; let connection_info = get_connection_info(url)?; Ok(Js { @@ -117,7 +102,7 @@ impl Connector for Js { // declaration, so finally I couldn't come up with anything better then wrapping a QuaintQueryable // in this object, and implementing TransactionCapable (and quaint::Queryable) explicitly for it. #[derive(Clone)] -struct DriverAdapter { +pub struct DriverAdapter { connector: Arc, } diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/read.rs b/query-engine/connectors/sql-query-connector/src/database/operations/read.rs index 6de72d5e744c..470628de1132 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/read.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/read.rs @@ -166,7 +166,7 @@ pub(crate) async fn get_related_m2m_record_ids( // [DTODO] To verify: We might need chunked fetch here (too many parameters in the query). let select = Select::from_table(table) - .so_that(query_builder::in_conditions(&from_columns, from_record_ids)) + .so_that(query_builder::in_conditions(&from_columns, from_record_ids, ctx)) .columns(from_columns.into_iter().chain(to_columns.into_iter())); let parent_model_id = from_field.model().primary_identifier(); diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/update.rs b/query-engine/connectors/sql-query-connector/src/database/operations/update.rs index 75657406d6ec..2270d6c6fefa 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/update.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/update.rs @@ -1,7 +1,7 @@ use super::read::get_single_record; use crate::column_metadata::{self, ColumnMetadata}; -use crate::filter_conversion::AliasedCondition; +use crate::filter::FilterBuilder; use crate::query_builder::write::{build_update_and_set_query, chunk_update_with_ids}; use crate::row::ToSqlRow; use crate::{Context, QueryExt, Queryable}; @@ -31,8 +31,9 @@ pub(crate) async fn update_one_with_selection( return get_single_record(conn, model, &filter, &selected_fields, &[], ctx).await; } - let update = build_update_and_set_query(model, args, Some(&selected_fields), ctx) - .so_that(build_update_one_filter(record_filter).aliased_condition_from(None, false, ctx)); + let cond = FilterBuilder::without_top_level_joins().visit_filter(build_update_one_filter(record_filter), ctx); + + let update = build_update_and_set_query(model, args, Some(&selected_fields), ctx).so_that(cond); let field_names: Vec<_> = selected_fields.db_names().collect(); let idents = selected_fields.type_identifiers_with_arities(); @@ -101,7 +102,7 @@ pub(crate) async fn update_many_from_filter( ctx: &Context<'_>, ) -> crate::Result { let update = build_update_and_set_query(model, args, None, ctx); - let filter_condition = record_filter.filter.aliased_condition_from(None, false, ctx); + let filter_condition = FilterBuilder::without_top_level_joins().visit_filter(record_filter.filter, ctx); let update = update.so_that(filter_condition); let count = conn.execute(update.into()).await?; @@ -117,7 +118,7 @@ pub(crate) async fn update_many_from_ids_and_filter( args: WriteArgs, ctx: &Context<'_>, ) -> crate::Result<(usize, Vec)> { - let filter_condition = record_filter.filter.aliased_condition_from(None, false, ctx); + let filter_condition = FilterBuilder::without_top_level_joins().visit_filter(record_filter.filter.clone(), ctx); let ids: Vec = conn.filter_selectors(model, record_filter, ctx).await?; if ids.is_empty() { diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/upsert.rs b/query-engine/connectors/sql-query-connector/src/database/operations/upsert.rs index 23b14ea93a77..cfd473923ffc 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/upsert.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/upsert.rs @@ -1,6 +1,6 @@ use crate::{ column_metadata, - filter_conversion::AliasedCondition, + filter::FilterBuilder, model_extensions::AsColumns, query_builder::write::{build_update_and_set_query, create_record}, row::ToSqlRow, @@ -21,7 +21,7 @@ pub(crate) async fn native_upsert( let meta = column_metadata::create(&field_names, &idents); - let where_condition = upsert.filter().aliased_condition_from(None, false, ctx); + let where_condition = FilterBuilder::without_top_level_joins().visit_filter(upsert.filter().clone(), ctx); let update = build_update_and_set_query(upsert.model(), upsert.update().clone(), None, ctx).so_that(where_condition); diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/write.rs b/query-engine/connectors/sql-query-connector/src/database/operations/write.rs index f23b01a457d0..425f4ac1d4b3 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/write.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/write.rs @@ -1,6 +1,6 @@ use super::update::*; use crate::column_metadata; -use crate::filter_conversion::AliasedCondition; +use crate::filter::FilterBuilder; use crate::row::ToSqlRow; use crate::{ error::SqlError, model_extensions::*, query_builder::write, sql_trace::SqlTraceComment, Context, QueryExt, @@ -361,24 +361,24 @@ pub(crate) async fn delete_records( record_filter: RecordFilter, ctx: &Context<'_>, ) -> crate::Result { - let filter_condition = record_filter.clone().filter.aliased_condition_from(None, false, ctx); - let ids = conn.filter_selectors(model, record_filter, ctx).await?; - let ids: Vec<&SelectionResult> = ids.iter().collect(); - let count = ids.len(); + let filter_condition = FilterBuilder::without_top_level_joins().visit_filter(record_filter.clone().filter, ctx); - if count == 0 { - return Ok(count); - } + // If we have selectors, then we must chunk the mutation into multiple if necessary and add the ids to the filter. + let row_count = if record_filter.has_selectors() { + let ids: Vec<_> = record_filter.selectors.as_ref().unwrap().iter().collect(); + let mut row_count = 0; - let mut row_count = 0; - for delete in write::delete_many(model, ids.as_slice(), filter_condition, ctx) { - row_count += conn.execute(delete).await?; - } + for delete in write::delete_many_from_ids_and_filter(model, ids.as_slice(), filter_condition, ctx) { + row_count += conn.execute(delete).await?; + } - match usize::try_from(row_count) { - Ok(row_count) => Ok(row_count), - Err(_) => Ok(count), - } + row_count + } else { + conn.execute(write::delete_many_from_filter(model, filter_condition, ctx)) + .await? + }; + + Ok(row_count as usize) } /// Connect relations defined in `child_ids` to a parent defined in `parent_id`. diff --git a/query-engine/connectors/sql-query-connector/src/filter/alias.rs b/query-engine/connectors/sql-query-connector/src/filter/alias.rs new file mode 100644 index 000000000000..61686929d400 --- /dev/null +++ b/query-engine/connectors/sql-query-connector/src/filter/alias.rs @@ -0,0 +1,75 @@ +use crate::{model_extensions::AsColumn, *}; + +use prisma_models::ScalarField; +use quaint::prelude::Column; + +#[derive(Clone, Copy, Debug)] +/// A distinction in aliasing to separate the parent table and the joined data +/// in the statement. +#[derive(Default)] +pub enum AliasMode { + #[default] + Table, + Join, +} + +#[derive(Clone, Copy, Debug, Default)] +/// Aliasing tool to count the nesting level to help with heavily nested +/// self-related queries. +pub(crate) struct Alias { + counter: usize, + mode: AliasMode, +} + +impl Alias { + /// Increment the alias as a new copy. + /// + /// Use when nesting one level down to a new subquery. `AliasMode` is + /// required due to the fact the current mode can be in `AliasMode::Join`. + pub fn inc(&self, mode: AliasMode) -> Self { + Self { + counter: self.counter + 1, + mode, + } + } + + /// Flip the alias to a different mode keeping the same nesting count. + pub fn flip(&self, mode: AliasMode) -> Self { + Self { + counter: self.counter, + mode, + } + } + + /// A string representation of the current alias. The current mode can be + /// overridden by defining the `mode_override`. + pub fn to_string(&self, mode_override: Option) -> String { + match mode_override.unwrap_or(self.mode) { + AliasMode::Table => format!("t{}", self.counter), + AliasMode::Join => format!("j{}", self.counter), + } + } +} + +pub(crate) trait AliasedColumn { + /// Conversion to a column. Column will point to the given alias if provided, otherwise the fully qualified path. + /// + /// Alias should be used only when nesting, making the top level queries + /// more explicit. + fn aliased_col(self, alias: Option, ctx: &Context<'_>) -> Column<'static>; +} + +impl AliasedColumn for &ScalarField { + fn aliased_col(self, alias: Option, ctx: &Context<'_>) -> Column<'static> { + self.as_column(ctx).aliased_col(alias, ctx) + } +} + +impl AliasedColumn for Column<'static> { + fn aliased_col(self, alias: Option, _ctx: &Context<'_>) -> Column<'static> { + match alias { + Some(alias) => self.table(alias.to_string(None)), + None => self, + } + } +} diff --git a/query-engine/connectors/sql-query-connector/src/filter/mod.rs b/query-engine/connectors/sql-query-connector/src/filter/mod.rs new file mode 100644 index 000000000000..bbf3557b16b7 --- /dev/null +++ b/query-engine/connectors/sql-query-connector/src/filter/mod.rs @@ -0,0 +1,42 @@ +mod alias; +mod visitor; + +use connector_interface::Filter; +use quaint::prelude::*; +use visitor::*; + +use crate::{context::Context, join_utils::AliasedJoin}; + +pub(crate) struct FilterBuilder {} +pub(crate) struct FilterBuilderWithJoins {} +pub(crate) struct FilterBuilderWithoutJoins {} + +impl FilterBuilder { + pub(crate) fn with_top_level_joins() -> FilterBuilderWithJoins { + FilterBuilderWithJoins {} + } + + pub(crate) fn without_top_level_joins() -> FilterBuilderWithoutJoins { + FilterBuilderWithoutJoins {} + } +} + +impl FilterBuilderWithJoins { + /// Visits a filter and return additional top-level joins that need to be manually dealt with. + pub(crate) fn visit_filter( + &self, + filter: Filter, + ctx: &Context, + ) -> (ConditionTree<'static>, Option>) { + FilterVisitor::with_top_level_joins().visit_filter(filter, ctx) + } +} + +impl FilterBuilderWithoutJoins { + /// Visits a filter without any top-level joins. Can be safely used in any context. + pub(crate) fn visit_filter(&self, filter: Filter, ctx: &Context) -> ConditionTree<'static> { + let (cond, _) = FilterVisitor::without_top_level_joins().visit_filter(filter, ctx); + + cond + } +} diff --git a/query-engine/connectors/sql-query-connector/src/filter_conversion.rs b/query-engine/connectors/sql-query-connector/src/filter/visitor.rs similarity index 61% rename from query-engine/connectors/sql-query-connector/src/filter_conversion.rs rename to query-engine/connectors/sql-query-connector/src/filter/visitor.rs index a95df0ce5aa1..6ab32f89735f 100644 --- a/query-engine/connectors/sql-query-connector/src/filter_conversion.rs +++ b/query-engine/connectors/sql-query-connector/src/filter/visitor.rs @@ -1,208 +1,366 @@ +use super::alias::*; +use crate::join_utils::{compute_one2m_join, AliasedJoin}; use crate::{model_extensions::*, Context}; + use connector_interface::filter::*; use prisma_models::prelude::*; +use psl::datamodel_connector::ConnectorCapability; use quaint::ast::concat; use quaint::ast::*; use std::convert::TryInto; -#[derive(Clone, Copy, Debug)] -/// A distinction in aliasing to separate the parent table and the joined data -/// in the statement. -#[derive(Default)] -pub enum AliasMode { - #[default] - Table, - Join, +pub(crate) trait FilterVisitorExt { + fn visit_filter(&mut self, filter: Filter, ctx: &Context<'_>) + -> (ConditionTree<'static>, Option>); + fn visit_relation_filter( + &mut self, + filter: RelationFilter, + ctx: &Context<'_>, + ) -> (ConditionTree<'static>, Option>); + fn visit_scalar_filter(&mut self, filter: ScalarFilter, ctx: &Context<'_>) -> ConditionTree<'static>; + fn visit_scalar_list_filter(&mut self, filter: ScalarListFilter, ctx: &Context<'_>) -> ConditionTree<'static>; + fn visit_one_relation_is_null_filter( + &mut self, + filter: OneRelationIsNullFilter, + ctx: &Context<'_>, + ) -> (ConditionTree<'static>, Option>); + fn visit_aggregation_filter(&mut self, filter: AggregationFilter, ctx: &Context<'_>) -> ConditionTree<'static>; } -#[derive(Clone, Copy, Debug, Default)] -/// Aliasing tool to count the nesting level to help with heavily nested -/// self-related queries. -pub(crate) struct Alias { - counter: usize, - mode: AliasMode, +#[derive(Debug, Clone, Default)] +pub(crate) struct FilterVisitor { + /// The last alias that's been rendered. + last_alias: Option, + /// The parent alias, used when rendering nested filters so that a child filter can refer to its join. + parent_alias: Option, + /// Whether filters can return top-level joins. + with_top_level_joins: bool, + /// Whether this visitor traverses nested filters. + is_nested: bool, + /// Whether the visitor is in a NOT clause. + reverse: bool, } -impl Alias { - /// Increment the alias as a new copy. - /// - /// Use when nesting one level down to a new subquery. `AliasMode` is - /// required due to the fact the current mode can be in `AliasMode::Join`. - pub fn inc(&self, mode: AliasMode) -> Self { +impl FilterVisitor { + pub fn with_top_level_joins() -> Self { Self { - counter: self.counter + 1, - mode, + with_top_level_joins: true, + ..Default::default() } } - /// Flip the alias to a different mode keeping the same nesting count. - pub fn flip(&self, mode: AliasMode) -> Self { + pub fn without_top_level_joins() -> Self { Self { - counter: self.counter, - mode, - } - } - - /// A string representation of the current alias. The current mode can be - /// overridden by defining the `mode_override`. - pub fn to_string(&self, mode_override: Option) -> String { - match mode_override.unwrap_or(self.mode) { - AliasMode::Table => format!("t{}", self.counter), - AliasMode::Join => format!("j{}", self.counter), + with_top_level_joins: false, + ..Default::default() } } -} -#[derive(Clone)] -pub struct ConditionState { - reverse: bool, - alias: Option, -} + /// Returns the next join/table alias by increasing the counter of the last alias. + fn next_alias(&mut self, mode: AliasMode) -> Alias { + let next_alias = self.last_alias.unwrap_or_default().inc(mode); + self.last_alias = Some(next_alias); -impl ConditionState { - fn new(alias: Option, reverse: bool) -> Self { - Self { reverse, alias } + next_alias } - fn invert_reverse(self) -> Self { - Self::new(self.alias, !self.reverse) + /// Returns the parent alias, if there's one set, so that nested filters can refer to the parent join/table. + fn parent_alias(&self) -> Option { + self.parent_alias } - fn alias(&self) -> Option { - self.alias + /// A top-level join can be rendered if we're explicitly allowing it or if we're in a nested visitor. + fn can_render_join(&self) -> bool { + self.with_top_level_joins || self.is_nested } + /// Returns whether the visitor is in a NOT clause. fn reverse(&self) -> bool { self.reverse } -} -pub(crate) trait AliasedCondition { - /// Conversion to a query condition tree. Columns will point to the given - /// alias if provided, otherwise using the fully qualified path. - /// - /// Alias should be used only when nesting, making the top level queries - /// more explicit. - fn aliased_cond(self, state: ConditionState, ctx: &Context<'_>) -> ConditionTree<'static>; - - fn aliased_condition_from(&self, alias: Option, reverse: bool, ctx: &Context<'_>) -> ConditionTree<'static> - where - Self: Sized + Clone, - { - self.clone().aliased_cond(ConditionState::new(alias, reverse), ctx) + fn invert_reverse(&mut self, f: impl FnOnce(&mut Self) -> T) -> T { + self.reverse = !self.reverse; + let res = f(self); + self.reverse = !self.reverse; + res } -} -trait AliasedSelect { - /// Conversion to a select. Columns will point to the given - /// alias if provided, otherwise using the fully qualified path. - /// - /// Alias should be used only when nesting, making the top level queries - /// more explicit. - fn aliased_sel(self, alias: Option, ctx: &Context<'_>) -> Select<'static>; -} + fn update_last_alias(&mut self, nested_visitor: &Self) -> &mut Self { + if let Some(alias) = nested_visitor.last_alias { + self.last_alias = Some(alias); + } -trait AliasedColumn { - /// Conversion to a column. Column will point to the given alias if provided, otherwise the fully qualified path. - /// - /// Alias should be used only when nesting, making the top level queries - /// more explicit. - fn aliased_col(self, alias: Option, ctx: &Context<'_>) -> Column<'static>; -} + self + } -impl AliasedColumn for &ScalarFieldRef { - fn aliased_col(self, alias: Option, ctx: &Context<'_>) -> Column<'static> { - self.as_column(ctx).aliased_col(alias, ctx) + fn create_nested_visitor(&self, parent_alias: Alias) -> Self { + let mut nested_visitor = self.clone(); + nested_visitor.is_nested = true; + nested_visitor.parent_alias = Some(parent_alias); + + nested_visitor + } + + fn visit_nested_filter(&mut self, parent_alias: Alias, f: impl FnOnce(&mut Self) -> T) -> T { + let mut nested_visitor = self.create_nested_visitor(parent_alias); + let res = f(&mut nested_visitor); + // Ensures the alias counter is updated after building the nested filter so that we don't render duplicate aliases. + self.update_last_alias(&nested_visitor); + + res } -} -impl AliasedColumn for Column<'static> { - fn aliased_col(self, alias: Option, _ctx: &Context<'_>) -> Column<'static> { - match alias { - Some(alias) => self.table(alias.to_string(None)), - None => self, + fn visit_relation_filter_select( + &mut self, + filter: RelationFilter, + ctx: &Context<'_>, + ) -> (ModelProjection, Select<'static>) { + let is_many_to_many = filter.field.relation().is_many_to_many(); + // HACK: This is temporary. A fix should be done in Quaint instead of branching out here. + // See https://www.notion.so/prismaio/Spec-Faulty-Tuple-Join-on-SQL-Server-55b8232fb44f4a6cb4d3f36428f17bac + // for more info + let support_row_in = filter + .field + .dm + .schema + .connector + .capabilities() + .contains(ConnectorCapability::RowIn); + let has_compound_fields = filter.field.linking_fields().into_inner().len() > 1; + + // If the relation is an M2M relation we don't have a choice but to join + // If the connector does not support (a, b) IN (SELECT c, d) and there are several linking fields, then we must use a join. + // Hint: SQL Server does not support `ROW() IN ()`. + if is_many_to_many || (!support_row_in && has_compound_fields) { + self.visit_relation_filter_select_no_row(filter, ctx) + } else { + self.visit_relation_filter_select_with_row(filter, ctx) } } + + /// Traverses a relation filter using this rough SQL structure: + /// + /// ```sql + /// (parent.id) IN ( + /// SELECT id FROM parent + /// INNER JOIN child ON (child.parent_id = parent.id) + /// WHERE + /// ) + /// ``` + /// We need this in two cases: + /// - For M2M relations, as we need to traverse the join table so the join is not superfluous + /// - SQL Server because it does not support (a, b) IN (subselect) + fn visit_relation_filter_select_no_row( + &mut self, + filter: RelationFilter, + ctx: &Context<'_>, + ) -> (ModelProjection, Select<'static>) { + let alias = self.next_alias(AliasMode::Table); + let condition = filter.condition; + let table = filter.field.as_table(ctx); + let ids = ModelProjection::from(filter.field.model().primary_identifier()); + + let selected_identifier: Vec = filter + .field + .identifier_columns(ctx) + .map(|col| col.aliased_col(Some(alias), ctx)) + .collect(); + + let join_columns: Vec = filter + .field + .join_columns(ctx) + .map(|c| c.aliased_col(Some(alias), ctx)) + .collect(); + + let related_table = filter.field.related_model().as_table(ctx); + let related_join_columns: Vec<_> = ModelProjection::from(filter.field.related_field().linking_fields()) + .as_columns(ctx) + .map(|col| col.aliased_col(Some(alias.flip(AliasMode::Join)), ctx)) + .collect(); + + let (nested_conditions, nested_joins) = self + .visit_nested_filter(alias.flip(AliasMode::Join), |nested_visitor| { + nested_visitor.visit_filter(*filter.nested_filter, ctx) + }); + + let nested_conditions = nested_conditions.invert_if(condition.invert_of_subselect()); + let nested_conditons = selected_identifier + .clone() + .into_iter() + .fold(nested_conditions, |acc, column| acc.and(column.is_not_null())); + + let join = related_table + .alias(alias.to_string(Some(AliasMode::Join))) + .on(Row::from(related_join_columns).equals(Row::from(join_columns))); + + let select = Select::from_table(table.alias(alias.to_string(Some(AliasMode::Table)))) + .columns(selected_identifier) + .inner_join(join) + .so_that(nested_conditons); + + let select = if let Some(nested_joins) = nested_joins { + nested_joins.into_iter().fold(select, |acc, join| acc.join(join.data)) + } else { + select + }; + + (ids, select) + } + + /// Traverses a relation filter using this rough SQL structure: + /// + /// ```sql + /// (parent.id1, parent.id2) IN ( + /// SELECT id1, id2 FROM child + /// WHERE + /// ) + /// ``` + fn visit_relation_filter_select_with_row( + &mut self, + filter: RelationFilter, + ctx: &Context<'_>, + ) -> (ModelProjection, Select<'static>) { + let alias = self.next_alias(AliasMode::Table); + let condition = filter.condition; + let linking_fields = ModelProjection::from(filter.field.linking_fields()); + + let related_table = filter.field.related_model().as_table(ctx); + // Select linking fields to match the linking fields of the parent record + let related_columns: Vec<_> = filter + .field + .related_field() + .join_columns(ctx) + .map(|col| col.aliased_col(Some(alias), ctx)) + .collect(); + + let (nested_conditions, nested_joins) = + self.visit_nested_filter(alias, |this| this.visit_filter(*filter.nested_filter, ctx)); + let nested_conditions = nested_conditions.invert_if(condition.invert_of_subselect()); + + let conditions = related_columns + .clone() + .into_iter() + .fold(nested_conditions, |acc, column| acc.and(column.is_not_null())); + + let select = Select::from_table(related_table.alias(alias.to_string(Some(AliasMode::Table)))) + .columns(related_columns) + .so_that(conditions); + + let select = if let Some(nested_joins) = nested_joins { + nested_joins.into_iter().fold(select, |acc, join| acc.join(join.data)) + } else { + select + }; + + (linking_fields, select) + } } -impl AliasedCondition for Filter { - /// Conversion from a `Filter` to a query condition tree. Aliased when in a nested `SELECT`. - fn aliased_cond(self, state: ConditionState, ctx: &Context<'_>) -> ConditionTree<'static> { - match self { +impl FilterVisitorExt for FilterVisitor { + fn visit_filter( + &mut self, + filter: Filter, + ctx: &Context<'_>, + ) -> (ConditionTree<'static>, Option>) { + match filter { Filter::And(mut filters) => match filters.len() { - n if n == 0 => ConditionTree::NoCondition, - n if n == 1 => filters.pop().unwrap().aliased_cond(state, ctx), + 0 => (ConditionTree::NoCondition, None), + 1 => self.visit_filter(filters.pop().unwrap(), ctx), _ => { - let exprs = filters - .into_iter() - .map(|f| f.aliased_cond(state.clone(), ctx)) - .map(Expression::from) - .collect(); + let mut exprs = Vec::with_capacity(filters.len()); + let mut top_level_joins = vec![]; + + for filter in filters { + let (conditions, nested_joins) = self.visit_filter(filter, ctx); + + exprs.push(Expression::from(conditions)); - ConditionTree::And(exprs) + if let Some(nested_joins) = nested_joins { + top_level_joins.extend(nested_joins); + } + } + + (ConditionTree::And(exprs), Some(top_level_joins)) } }, Filter::Or(mut filters) => match filters.len() { - n if n == 0 => ConditionTree::NegativeCondition, - n if n == 1 => filters.pop().unwrap().aliased_cond(state, ctx), + 0 => (ConditionTree::NegativeCondition, None), + 1 => self.visit_filter(filters.pop().unwrap(), ctx), _ => { - let exprs = filters - .into_iter() - .map(|f| f.aliased_cond(state.clone(), ctx)) - .map(Expression::from) - .collect(); + let mut exprs = Vec::with_capacity(filters.len()); + let mut top_level_joins = vec![]; + + for filter in filters { + let (conditions, nested_joins) = self.visit_filter(filter, ctx); - ConditionTree::Or(exprs) + exprs.push(Expression::from(conditions)); + + if let Some(nested_joins) = nested_joins { + top_level_joins.extend(nested_joins); + } + } + + (ConditionTree::Or(exprs), Some(top_level_joins)) } }, Filter::Not(mut filters) => match filters.len() { - n if n == 0 => ConditionTree::NoCondition, - n if n == 1 => filters.pop().unwrap().aliased_cond(state.invert_reverse(), ctx).not(), + 0 => (ConditionTree::NoCondition, None), + 1 => { + let (cond, joins) = self.invert_reverse(|this| this.visit_filter(filters.pop().unwrap(), ctx)); + + (cond.not(), joins) + } _ => { - let exprs = filters - .into_iter() - .map(|f| f.aliased_cond(state.clone().invert_reverse(), ctx).not()) - .map(Expression::from) - .collect(); + let mut exprs = Vec::with_capacity(filters.len()); + let mut top_level_joins = vec![]; + + for filter in filters { + let (conditions, nested_joins) = self.invert_reverse(|this| this.visit_filter(filter, ctx)); + let inverted_conditions = conditions.not(); - ConditionTree::And(exprs) + exprs.push(Expression::from(inverted_conditions)); + + if let Some(nested_joins) = nested_joins { + top_level_joins.extend(nested_joins); + } + } + + (ConditionTree::And(exprs), Some(top_level_joins)) } }, - Filter::Scalar(filter) => filter.aliased_cond(state, ctx), - Filter::OneRelationIsNull(filter) => filter.aliased_cond(state, ctx), - Filter::Relation(filter) => filter.aliased_cond(state, ctx), + Filter::Scalar(filter) => (self.visit_scalar_filter(filter, ctx), None), + Filter::OneRelationIsNull(filter) => self.visit_one_relation_is_null_filter(filter, ctx), + Filter::Relation(filter) => self.visit_relation_filter(filter, ctx), Filter::BoolFilter(b) => { if b { - ConditionTree::NoCondition + (ConditionTree::NoCondition, None) } else { - ConditionTree::NegativeCondition + (ConditionTree::NegativeCondition, None) } } - Filter::Aggregation(filter) => filter.aliased_cond(state, ctx), - Filter::ScalarList(filter) => filter.aliased_cond(state, ctx), - Filter::Empty => ConditionTree::NoCondition, + Filter::Aggregation(filter) => (self.visit_aggregation_filter(filter, ctx), None), + Filter::ScalarList(filter) => (self.visit_scalar_list_filter(filter, ctx), None), + Filter::Empty => (ConditionTree::NoCondition, None), Filter::Composite(_) => unimplemented!("SQL connectors do not support composites yet."), } } -} -impl AliasedCondition for ScalarFilter { - /// Conversion from a `ScalarFilter` to a query condition tree. Aliased when in a nested `SELECT`. - fn aliased_cond(self, state: ConditionState, ctx: &Context<'_>) -> ConditionTree<'static> { - match self.condition { + fn visit_scalar_filter(&mut self, filter: ScalarFilter, ctx: &Context<'_>) -> ConditionTree<'static> { + match filter.condition { ScalarCondition::Search(_, _) | ScalarCondition::NotSearch(_, _) => { - let mut projections = match self.condition.clone() { + let mut projections = match filter.condition.clone() { ScalarCondition::Search(_, proj) => proj, ScalarCondition::NotSearch(_, proj) => proj, _ => unreachable!(), }; - projections.push(self.projection); + projections.push(filter.projection); let columns: Vec = projections .into_iter() .map(|p| match p { - ScalarProjection::Single(field) => field.aliased_col(state.alias(), ctx), + ScalarProjection::Single(field) => field.aliased_col(self.parent_alias(), ctx), ScalarProjection::Compound(_) => { unreachable!("Full-text search does not support compound fields") } @@ -213,232 +371,223 @@ impl AliasedCondition for ScalarFilter { convert_scalar_filter( comparable, - self.condition, - state.reverse(), - self.mode, + filter.condition, + self.reverse(), + filter.mode, &[], - state.alias(), + self.parent_alias(), false, ctx, ) } - _ => scalar_filter_aliased_cond(self, state.alias(), state.reverse(), ctx), + _ => scalar_filter_aliased_cond(filter, self.parent_alias(), self.reverse(), ctx), } } -} - -fn scalar_filter_aliased_cond( - sf: ScalarFilter, - alias: Option, - reverse: bool, - ctx: &Context<'_>, -) -> ConditionTree<'static> { - match sf.projection { - ScalarProjection::Single(field) => { - let comparable: Expression = field.aliased_col(alias, ctx).into(); - convert_scalar_filter(comparable, sf.condition, reverse, sf.mode, &[field], alias, false, ctx) - } - ScalarProjection::Compound(fields) => { - let columns: Vec> = fields - .clone() - .into_iter() - .map(|field| field.aliased_col(alias, ctx)) - .collect(); - - convert_scalar_filter( - Row::from(columns).into(), - sf.condition, - reverse, - sf.mode, - &fields, - alias, - false, - ctx, - ) - } - } -} + fn visit_relation_filter( + &mut self, + filter: RelationFilter, + ctx: &Context<'_>, + ) -> (ConditionTree<'static>, Option>) { + let parent_alias = self.parent_alias().map(|a| a.to_string(None)); + + match &filter.condition { + // { to_one: { isNot: { ... } } } + RelationCondition::NoRelatedRecord if self.can_render_join() && !filter.field.is_list() => { + let alias = self.next_alias(AliasMode::Join); + + let linking_fields_null: Vec<_> = + ModelProjection::from(filter.field.related_model().primary_identifier()) + .as_columns(ctx) + .map(|c| c.aliased_col(Some(alias), ctx)) + .map(|c| c.is_null()) + .map(Expression::from) + .collect(); + let null_filter = ConditionTree::And(linking_fields_null); -impl AliasedCondition for ScalarListFilter { - fn aliased_cond(self, state: ConditionState, ctx: &Context<'_>) -> ConditionTree<'static> { - let comparable: Expression = self.field.aliased_col(state.alias(), ctx).into(); + let join = compute_one2m_join( + &filter.field, + alias.to_string(None).as_str(), + parent_alias.as_deref(), + ctx, + ); - convert_scalar_list_filter(comparable, self.condition, &self.field, state.alias(), ctx) - } -} + let mut output_joins = vec![join]; -fn convert_scalar_list_filter( - comparable: Expression<'static>, - cond: ScalarListCondition, - field: &ScalarFieldRef, - alias: Option, - ctx: &Context<'_>, -) -> ConditionTree<'static> { - let condition = match cond { - ScalarListCondition::Contains(ConditionValue::Value(val)) => { - comparable.compare_raw("@>", convert_list_pv(field, vec![val])) - } - ScalarListCondition::Contains(ConditionValue::FieldRef(field_ref)) => { - let field_ref_expr: Expression = field_ref.aliased_col(alias, ctx).into(); + let (conditions, nested_joins) = self.visit_nested_filter(alias, |nested_visitor| { + nested_visitor + .invert_reverse(|nested_visitor| nested_visitor.visit_filter(*filter.nested_filter, ctx)) + }); - // This code path is only reachable for connectors with `ScalarLists` capability - field_ref_expr.equals(comparable.any()) - } - ScalarListCondition::ContainsEvery(ConditionListValue::List(vals)) => { - comparable.compare_raw("@>", convert_list_pv(field, vals)) - } - ScalarListCondition::ContainsEvery(ConditionListValue::FieldRef(field_ref)) => { - comparable.compare_raw("@>", field_ref.aliased_col(alias, ctx)) - } - ScalarListCondition::ContainsSome(ConditionListValue::List(vals)) => { - comparable.compare_raw("&&", convert_list_pv(field, vals)) - } - ScalarListCondition::ContainsSome(ConditionListValue::FieldRef(field_ref)) => { - comparable.compare_raw("&&", field_ref.aliased_col(alias, ctx)) - } - ScalarListCondition::IsEmpty(true) => comparable.compare_raw("=", Value::Array(Some(vec![])).raw()), - ScalarListCondition::IsEmpty(false) => comparable.compare_raw("<>", Value::Array(Some(vec![])).raw()), - }; + if let Some(nested_joins) = nested_joins { + output_joins.extend(nested_joins); + } - ConditionTree::single(condition) -} + (conditions.not().or(null_filter), Some(output_joins)) + } + // { to_one: { is: { ... } } } + RelationCondition::ToOneRelatedRecord if self.can_render_join() && !filter.field.is_list() => { + let alias = self.next_alias(AliasMode::Join); + + let linking_fields_not_null: Vec<_> = + ModelProjection::from(filter.field.related_model().primary_identifier()) + .as_columns(ctx) + .map(|c| c.aliased_col(Some(alias), ctx)) + .map(|c| c.is_not_null()) + .map(Expression::from) + .collect(); + let not_null_filter = ConditionTree::And(linking_fields_not_null); -impl AliasedCondition for RelationFilter { - /// Conversion from a `RelationFilter` to a query condition tree. Aliased when in a nested `SELECT`. - fn aliased_cond(self, state: ConditionState, ctx: &Context<'_>) -> ConditionTree<'static> { - let ids = ModelProjection::from(self.field.model().primary_identifier()).as_columns(ctx); - let columns: Vec> = ids.map(|col| col.aliased_col(state.alias(), ctx)).collect(); + let join = compute_one2m_join( + &filter.field, + alias.to_string(None).as_str(), + parent_alias.as_deref(), + ctx, + ); + let mut output_joins = vec![join]; - let condition = self.condition; - let sub_select = self.aliased_sel(state.alias().map(|a| a.inc(AliasMode::Table)), ctx); + let (conditions, nested_joins) = self.visit_nested_filter(alias, |nested_visitor| { + nested_visitor.visit_filter(*filter.nested_filter, ctx) + }); - let comparison = match condition { - RelationCondition::AtLeastOneRelatedRecord => Row::from(columns).in_selection(sub_select), - RelationCondition::EveryRelatedRecord => Row::from(columns).not_in_selection(sub_select), - RelationCondition::NoRelatedRecord => Row::from(columns).not_in_selection(sub_select), - RelationCondition::ToOneRelatedRecord => Row::from(columns).in_selection(sub_select), - }; + if let Some(nested_joins) = nested_joins { + output_joins.extend(nested_joins); + }; - comparison.into() - } -} + (conditions.and(not_null_filter), Some(output_joins)) + } -impl AliasedSelect for RelationFilter { - /// The subselect part of the `RelationFilter` `ConditionTree`. - fn aliased_sel<'a>(self, alias: Option, ctx: &Context<'_>) -> Select<'static> { - let alias = alias.unwrap_or_default(); - let condition = self.condition; + _ => { + let condition = filter.condition; + let (ids, sub_select) = self.visit_relation_filter_select(filter, ctx); + let columns: Vec> = ids + .as_columns(ctx) + .map(|col| col.aliased_col(self.parent_alias(), ctx)) + .collect(); - let table = self.field.as_table(ctx); - let selected_identifier: Vec = self - .field - .identifier_columns(ctx) - .map(|col| col.aliased_col(Some(alias), ctx)) - .collect(); + let comparison = match condition { + RelationCondition::AtLeastOneRelatedRecord => Row::from(columns).in_selection(sub_select), + RelationCondition::EveryRelatedRecord => Row::from(columns).not_in_selection(sub_select), + RelationCondition::NoRelatedRecord => Row::from(columns).not_in_selection(sub_select), + RelationCondition::ToOneRelatedRecord => Row::from(columns).in_selection(sub_select), + }; - let join_columns: Vec = self - .field - .join_columns(ctx) - .map(|c| c.aliased_col(Some(alias), ctx)) - .collect(); + (comparison.into(), None) + } + } + } - let related_table = self.field.related_model().as_table(ctx); - let related_join_columns: Vec<_> = ModelProjection::from(self.field.related_field().linking_fields()) - .as_columns(ctx) - .map(|col| col.aliased_col(Some(alias.flip(AliasMode::Join)), ctx)) - .collect(); + fn visit_one_relation_is_null_filter( + &mut self, + filter: OneRelationIsNullFilter, + ctx: &Context<'_>, + ) -> (ConditionTree<'static>, Option>) { + let parent_alias = self.parent_alias(); + let parent_alias_string = parent_alias.as_ref().map(|a| a.to_string(None)); + + // If the relation is inlined, we simply check whether the linking fields are null. + // + // ```sql + // SELECT "Parent"."id" FROM "Parent" + // WHERE "Parent"."childId" IS NULL; + // ``` + if filter.field.is_inlined_on_enclosing_model() { + let conditions: Vec<_> = ModelProjection::from(filter.field.linking_fields()) + .as_columns(ctx) + .map(|c| c.opt_table(parent_alias_string.clone())) + .map(|c| c.is_null()) + .map(Expression::from) + .collect(); - let nested_conditions = self - .nested_filter - .aliased_condition_from(Some(alias.flip(AliasMode::Join)), false, ctx) - .invert_if(condition.invert_of_subselect()); + return (ConditionTree::And(conditions), None); + } - let conditions = selected_identifier - .clone() - .into_iter() - .fold(nested_conditions, |acc, column| acc.and(column.is_not_null())); + // If the relation is not inlined and we can use joins, then we join the relation and check whether the related linking fields are null. + // + // ```sql + // SELECT "Parent"."id" FROM "Parent" + // LEFT JOIN "Child" AS "j1" ON ("j1"."parentId" = "Parent"."id") + // WHERE "j1"."parentId" IS NULL OFFSET; + // ``` + if self.can_render_join() { + let alias = self.next_alias(AliasMode::Join); + + let conditions: Vec<_> = ModelProjection::from(filter.field.related_field().linking_fields()) + .as_columns(ctx) + .map(|c| c.aliased_col(Some(alias), ctx)) + .map(|c| c.is_null()) + .map(Expression::from) + .collect(); - let join = related_table - .alias(alias.to_string(Some(AliasMode::Join))) - .on(Row::from(related_join_columns).equals(Row::from(join_columns))); + let join = compute_one2m_join( + &filter.field, + alias.to_string(None).as_str(), + parent_alias_string.as_deref(), + ctx, + ); - Select::from_table(table.alias(alias.to_string(Some(AliasMode::Table)))) - .columns(selected_identifier) - .inner_join(join) - .so_that(conditions) - } -} + return (ConditionTree::And(conditions), Some(vec![join])); + } -impl AliasedCondition for OneRelationIsNullFilter { - /// Conversion from a `OneRelationIsNullFilter` to a query condition tree. Aliased when in a nested `SELECT`. - fn aliased_cond(self, state: ConditionState, ctx: &Context<'_>) -> ConditionTree<'static> { - let alias = state.alias().map(|a| a.to_string(None)); + // Otherwise, we use a NOT IN clause and a subselect to find the related records that are nulls. + // + // ```sql + // SELECT "Parent"."id" FROM "Parent" + // WHERE ("Parent".id) NOT IN ( + // SELECT "Child"."parentId" FROM "Child" WHERE "Child"."parentId" IS NOT NULL + // ) + // ``` + let relation = filter.field.relation(); + let table = relation.as_table(ctx); + let relation_table = match parent_alias { + Some(ref alias) => table.alias(alias.to_string(None)), + None => table, + }; - let condition = if self.field.relation_is_inlined_in_parent() { - self.field + let columns_not_null = + filter + .field + .related_field() .as_columns(ctx) .fold(ConditionTree::NoCondition, |acc, column| { - let column_is_null = column.opt_table(alias.clone()).is_null(); + let column_is_not_null = column.opt_table(parent_alias_string.clone()).is_not_null(); match acc { - ConditionTree::NoCondition => column_is_null.into(), - cond => cond.and(column_is_null), + ConditionTree::NoCondition => column_is_not_null.into(), + cond => cond.and(column_is_not_null), } - }) - } else { - let relation = self.field.relation(); - let table = relation.as_table(ctx); - let relation_table = match alias { - Some(ref alias) => table.alias(alias.to_string()), - None => table, - }; - - let columns_not_null = - self.field - .related_field() - .as_columns(ctx) - .fold(ConditionTree::NoCondition, |acc, column| { - let column_is_not_null = column.opt_table(alias.clone()).is_not_null(); - - match acc { - ConditionTree::NoCondition => column_is_not_null.into(), - cond => cond.and(column_is_not_null), - } - }); - - // If the table is aliased, we need to use that alias in the SELECT too - // eg: SELECT .x FROM table AS - let columns: Vec<_> = self - .field - .related_field() - .scalar_fields() - .iter() - .map(|f| f.as_column(ctx).opt_table(alias.clone())) - .collect(); + }); - let sub_select = Select::from_table(relation_table) - .columns(columns) - .and_where(columns_not_null); + // If the table is aliased, we need to use that alias in the SELECT too + // eg: SELECT .x FROM table AS + let columns: Vec<_> = filter + .field + .related_field() + .scalar_fields() + .iter() + .map(|f| f.as_column(ctx).opt_table(parent_alias_string.clone())) + .collect(); - let id_columns: Vec> = ModelProjection::from(self.field.linking_fields()) - .as_columns(ctx) - .map(|c| c.opt_table(alias.clone())) - .collect(); + let sub_select = Select::from_table(relation_table) + .columns(columns) + .and_where(columns_not_null); - Row::from(id_columns).not_in_selection(sub_select).into() - }; + let id_columns: Vec> = ModelProjection::from(filter.field.linking_fields()) + .as_columns(ctx) + .map(|c| c.opt_table(parent_alias_string.clone())) + .collect(); - ConditionTree::single(condition) + ( + ConditionTree::single(Row::from(id_columns).not_in_selection(sub_select)), + None, + ) } -} -impl AliasedCondition for AggregationFilter { - /// Conversion from an `AggregationFilter` to a query condition tree. Aliased when in a nested `SELECT`. - fn aliased_cond(self, state: ConditionState, ctx: &Context<'_>) -> ConditionTree<'static> { - let alias = state.alias(); - let reverse = state.reverse(); - match self { + fn visit_aggregation_filter(&mut self, filter: AggregationFilter, ctx: &Context<'_>) -> ConditionTree<'static> { + let alias = self.parent_alias(); + let reverse = self.reverse(); + + match filter { AggregationFilter::Count(filter) => aggregate_conditions(*filter, alias, reverse, |x| count(x).into(), ctx), AggregationFilter::Average(filter) => aggregate_conditions(*filter, alias, reverse, |x| avg(x).into(), ctx), AggregationFilter::Sum(filter) => aggregate_conditions(*filter, alias, reverse, |x| sum(x).into(), ctx), @@ -446,6 +595,74 @@ impl AliasedCondition for AggregationFilter { AggregationFilter::Max(filter) => aggregate_conditions(*filter, alias, reverse, |x| max(x).into(), ctx), } } + + fn visit_scalar_list_filter(&mut self, filter: ScalarListFilter, ctx: &Context<'_>) -> ConditionTree<'static> { + let comparable: Expression = filter.field.aliased_col(self.parent_alias(), ctx).into(); + let cond = filter.condition; + let field = &filter.field; + let alias = self.parent_alias(); + + let condition = match cond { + ScalarListCondition::Contains(ConditionValue::Value(val)) => { + comparable.compare_raw("@>", convert_list_pv(field, vec![val], ctx)) + } + ScalarListCondition::Contains(ConditionValue::FieldRef(field_ref)) => { + let field_ref_expr: Expression = field_ref.aliased_col(alias, ctx).into(); + + // This code path is only reachable for connectors with `ScalarLists` capability + field_ref_expr.equals(comparable.any()) + } + ScalarListCondition::ContainsEvery(ConditionListValue::List(vals)) => { + comparable.compare_raw("@>", convert_list_pv(field, vals, ctx)) + } + ScalarListCondition::ContainsEvery(ConditionListValue::FieldRef(field_ref)) => { + comparable.compare_raw("@>", field_ref.aliased_col(alias, ctx)) + } + ScalarListCondition::ContainsSome(ConditionListValue::List(vals)) => { + comparable.compare_raw("&&", convert_list_pv(field, vals, ctx)) + } + ScalarListCondition::ContainsSome(ConditionListValue::FieldRef(field_ref)) => { + comparable.compare_raw("&&", field_ref.aliased_col(alias, ctx)) + } + ScalarListCondition::IsEmpty(true) => comparable.compare_raw("=", ValueType::Array(Some(vec![])).raw()), + ScalarListCondition::IsEmpty(false) => comparable.compare_raw("<>", ValueType::Array(Some(vec![])).raw()), + }; + + ConditionTree::single(condition) + } +} + +fn scalar_filter_aliased_cond( + sf: ScalarFilter, + alias: Option, + reverse: bool, + ctx: &Context<'_>, +) -> ConditionTree<'static> { + match sf.projection { + ScalarProjection::Single(field) => { + let comparable: Expression = field.aliased_col(alias, ctx).into(); + + convert_scalar_filter(comparable, sf.condition, reverse, sf.mode, &[field], alias, false, ctx) + } + ScalarProjection::Compound(fields) => { + let columns: Vec> = fields + .clone() + .into_iter() + .map(|field| field.aliased_col(alias, ctx)) + .collect(); + + convert_scalar_filter( + Row::from(columns).into(), + sf.condition, + reverse, + sf.mode, + &fields, + alias, + false, + ctx, + ) + } + } } fn aggregate_conditions( @@ -458,10 +675,7 @@ fn aggregate_conditions( where T: Fn(Column) -> Expression, { - let sf = match filter { - Filter::Scalar(sf) => sf, - _ => unimplemented!(), - }; + let sf = filter.into_scalar().unwrap(); match sf.projection { ScalarProjection::Compound(_) => { @@ -702,13 +916,13 @@ fn default_scalar_filter( let mut sql_values = Values::with_capacity(values.len()); for pv in values { - let list_value = convert_pvs(fields, pv.into_list().unwrap()); + let list_value = convert_pvs(fields, pv.into_list().unwrap(), ctx); sql_values.push(list_value); } comparable.in_selection(sql_values) } - _ => comparable.in_selection(convert_pvs(fields, values)), + _ => comparable.in_selection(convert_pvs(fields, values, ctx)), }, ScalarCondition::In(ConditionListValue::FieldRef(field_ref)) => { // This code path is only reachable for connectors with `ScalarLists` capability @@ -719,13 +933,13 @@ fn default_scalar_filter( let mut sql_values = Values::with_capacity(values.len()); for pv in values { - let list_value = convert_pvs(fields, pv.into_list().unwrap()); + let list_value = convert_pvs(fields, pv.into_list().unwrap(), ctx); sql_values.push(list_value); } comparable.not_in_selection(sql_values) } - _ => comparable.not_in_selection(convert_pvs(fields, values)), + _ => comparable.not_in_selection(convert_pvs(fields, values, ctx)), }, ScalarCondition::NotIn(ConditionListValue::FieldRef(field_ref)) => { // This code path is only reachable for connectors with `ScalarLists` capability @@ -854,7 +1068,7 @@ fn insensitive_scalar_filter( let mut sql_values = Values::with_capacity(values.len()); for pv in values { - let list_value = convert_pvs(fields, pv.into_list().unwrap()); + let list_value = convert_pvs(fields, pv.into_list().unwrap(), ctx); sql_values.push(list_value); } @@ -885,7 +1099,7 @@ fn insensitive_scalar_filter( let mut sql_values = Values::with_capacity(values.len()); for pv in values { - let list_value = convert_pvs(fields, pv.into_list().unwrap()); + let list_value = convert_pvs(fields, pv.into_list().unwrap(), ctx); sql_values.push(list_value); } @@ -951,7 +1165,7 @@ fn convert_value<'a>( ctx: &Context<'_>, ) -> Expression<'a> { match value.into() { - ConditionValue::Value(pv) => convert_pv(field, pv), + ConditionValue::Value(pv) => convert_pv(field, pv, ctx), ConditionValue::FieldRef(field_ref) => field_ref.aliased_col(alias, ctx).into(), } } @@ -963,29 +1177,29 @@ fn convert_first_value<'a>( ctx: &Context<'_>, ) -> Expression<'a> { match value.into() { - ConditionValue::Value(pv) => convert_pv(fields.first().unwrap(), pv), + ConditionValue::Value(pv) => convert_pv(fields.first().unwrap(), pv, ctx), ConditionValue::FieldRef(field_ref) => field_ref.aliased_col(alias, ctx).into(), } } -fn convert_pv<'a>(field: &ScalarFieldRef, pv: PrismaValue) -> Expression<'a> { - field.value(pv).into() +fn convert_pv<'a>(field: &ScalarFieldRef, pv: PrismaValue, ctx: &Context<'_>) -> Expression<'a> { + field.value(pv, ctx).into() } -fn convert_list_pv<'a>(field: &ScalarFieldRef, values: Vec) -> Expression<'a> { - Value::Array(Some(values.into_iter().map(|val| field.value(val)).collect())).into() +fn convert_list_pv<'a>(field: &ScalarFieldRef, values: Vec, ctx: &Context<'_>) -> Expression<'a> { + Expression::from(Value::array(values.into_iter().map(|val| field.value(val, ctx)))) } -fn convert_pvs<'a>(fields: &[ScalarFieldRef], values: Vec) -> Vec> { +fn convert_pvs<'a>(fields: &[ScalarFieldRef], values: Vec, ctx: &Context<'_>) -> Vec> { if fields.len() == values.len() { fields .iter() .zip(values) - .map(|(field, value)| field.value(value)) + .map(|(field, value)| field.value(value, ctx)) .collect() } else { let field = fields.first().unwrap(); - values.into_iter().map(|value| field.value(value)).collect() + values.into_iter().map(|value| field.value(value, ctx)).collect() } } @@ -1046,7 +1260,7 @@ impl JsonFilterExt for (Expression<'static>, Expression<'static>) { } // array_contains (value) (ConditionValue::Value(value), JsonTargetType::Array) => { - let contains = expr_json.clone().json_array_contains(convert_pv(field, value)); + let contains = expr_json.clone().json_array_contains(convert_pv(field, value, ctx)); if reverse { contains.or(expr_json.json_type_not_equals(JsonType::Array)).into() @@ -1104,7 +1318,7 @@ impl JsonFilterExt for (Expression<'static>, Expression<'static>) { } // array_starts_with (value) (ConditionValue::Value(value), JsonTargetType::Array) => { - let starts_with = expr_json.clone().json_array_begins_with(convert_pv(field, value)); + let starts_with = expr_json.clone().json_array_begins_with(convert_pv(field, value, ctx)); if reverse { starts_with.or(expr_json.json_type_not_equals(JsonType::Array)).into() @@ -1164,7 +1378,7 @@ impl JsonFilterExt for (Expression<'static>, Expression<'static>) { } // array_ends_with (value) (ConditionValue::Value(value), JsonTargetType::Array) => { - let ends_with = expr_json.clone().json_array_ends_into(convert_pv(field, value)); + let ends_with = expr_json.clone().json_array_ends_into(convert_pv(field, value, ctx)); if reverse { ends_with.or(expr_json.json_type_not_equals(JsonType::Array)).into() diff --git a/query-engine/connectors/sql-query-connector/src/join_utils.rs b/query-engine/connectors/sql-query-connector/src/join_utils.rs index dbec0e430951..4b4d2fc8aa24 100644 --- a/query-engine/connectors/sql-query-connector/src/join_utils.rs +++ b/query-engine/connectors/sql-query-connector/src/join_utils.rs @@ -1,4 +1,4 @@ -use crate::{filter_conversion::AliasedCondition, model_extensions::*, Context}; +use crate::{filter::FilterBuilder, model_extensions::*, Context}; use connector_interface::Filter; use prisma_models::*; use quaint::prelude::*; @@ -6,7 +6,7 @@ use quaint::prelude::*; #[derive(Debug, Clone)] pub(crate) struct AliasedJoin { // Actual join data to be passed to quaint - pub(crate) data: JoinData<'static>, + pub(crate) data: Join<'static>, // Alias used for the join. eg: LEFT JOIN ... AS pub(crate) alias: String, } @@ -22,7 +22,7 @@ pub(crate) fn compute_aggr_join( filter: Option, aggregator_alias: &str, join_alias: &str, - previous_join: Option<&AliasedJoin>, + previous_join: Option<&str>, ctx: &Context<'_>, ) -> AliasedJoin { let join_alias = format!("{}_{}", join_alias, &rf.related_model().name()); @@ -65,7 +65,7 @@ fn compute_aggr_join_one2m( filter: Option, aggregator_alias: &str, join_alias: &str, - previous_join: Option<&AliasedJoin>, + previous_join: Option<&str>, ctx: &Context<'_>, ) -> AliasedJoin { let (left_fields, right_fields) = if rf.is_inlined_on_enclosing_model() { @@ -77,9 +77,9 @@ fn compute_aggr_join_one2m( ) }; let select_columns = right_fields.iter().map(|f| f.as_column(ctx)); - let conditions: ConditionTree = filter - .map(|f| f.aliased_condition_from(None, false, ctx)) - .unwrap_or(ConditionTree::NoCondition); + let (conditions, joins) = filter + .map(|f| FilterBuilder::with_top_level_joins().visit_filter(f, ctx)) + .unwrap_or((ConditionTree::NoCondition, None)); // + SELECT Child. FROM Child WHERE let query = Select::from_table(rf.related_model().as_table(ctx)) @@ -98,11 +98,17 @@ fn compute_aggr_join_one2m( // + GROUP BY Child. let query = right_fields.iter().fold(query, |acc, f| acc.group_by(f.as_column(ctx))); + let query = if let Some(joins) = joins { + joins.into_iter().fold(query, |acc, join| acc.join(join.data)) + } else { + query + }; + let pairs = left_fields.into_iter().zip(right_fields); let on_conditions: Vec = pairs .map(|(a, b)| { let col_a = match previous_join { - Some(prev_join) => Column::from((prev_join.alias.to_owned(), a.db_name().to_owned())), + Some(prev_join) => Column::from((prev_join.to_owned(), a.db_name().to_owned())), None => a.as_column(ctx), }; let col_b = Column::from((join_alias.to_owned(), b.db_name().to_owned())); @@ -120,7 +126,7 @@ fn compute_aggr_join_one2m( .on(ConditionTree::And(on_conditions)); AliasedJoin { - data: join, + data: Join::Left(join), alias: join_alias.to_owned(), } } @@ -141,7 +147,7 @@ fn compute_aggr_join_m2m( filter: Option, aggregator_alias: &str, join_alias: &str, - previous_join: Option<&AliasedJoin>, + previous_join: Option<&str>, ctx: &Context<'_>, ) -> AliasedJoin { // m2m join table (_ParentToChild) @@ -155,15 +161,21 @@ fn compute_aggr_join_m2m( // Parent primary identifiers let parent_ids: ModelProjection = rf.model().primary_identifier().into(); // Rendered filters - let conditions: ConditionTree = filter - .map(|f| f.aliased_condition_from(None, false, ctx)) - .unwrap_or(ConditionTree::NoCondition); + let (conditions, joins) = filter + .map(|f| FilterBuilder::with_top_level_joins().visit_filter(f, ctx)) + .unwrap_or((ConditionTree::NoCondition, None)); // + SELECT _ParentToChild.ChildId FROM Child WHERE let query = Select::from_table(child_model.as_table(ctx)) .columns(m2m_child_columns.clone()) .so_that(conditions); + let query = if let Some(joins) = joins { + joins.into_iter().fold(query, |acc, join| acc.join(join.data)) + } else { + query + }; + let aggr_expr = match aggregation { AggregationType::Count => count(m2m_child_columns.clone()), }; @@ -196,7 +208,7 @@ fn compute_aggr_join_m2m( let on_conditions: Vec = pairs .map(|(a, b)| { let col_a = match previous_join { - Some(prev_join) => Column::from((prev_join.alias.to_owned(), a.db_name().to_owned())), + Some(prev_join) => Column::from((prev_join.to_owned(), a.db_name().to_owned())), None => a.as_column(ctx), }; let col_b = Column::from((join_alias.to_owned(), b.name.to_string())); @@ -216,48 +228,33 @@ fn compute_aggr_join_m2m( AliasedJoin { alias: join_alias.to_owned(), - data: join, + data: Join::Left(join), } } pub(crate) fn compute_one2m_join( - rf: &RelationFieldRef, - join_prefix: &str, - previous_join: Option<&AliasedJoin>, + field: &RelationFieldRef, + alias: &str, + parent_alias: Option<&str>, ctx: &Context<'_>, ) -> AliasedJoin { - let (left_fields, right_fields) = if rf.is_inlined_on_enclosing_model() { - (rf.scalar_fields(), rf.referenced_fields()) - } else { - ( - rf.related_field().referenced_fields(), - rf.related_field().scalar_fields(), - ) - }; - - let right_table_alias = format!("{}_{}", join_prefix, rf.related_model().name()); - - let related_model = rf.related_model(); - let pairs = left_fields.into_iter().zip(right_fields); - - let on_conditions: Vec = pairs - .map(|(a, b)| { - let a_col = match previous_join { - Some(prev_join) => Column::from((prev_join.alias.to_owned(), a.db_name().to_owned())), - None => a.as_column(ctx), - }; + let join_columns: Vec = field + .join_columns(ctx) + .map(|c| c.opt_table(parent_alias.map(ToOwned::to_owned))) + .collect(); - let b_col = Column::from((right_table_alias.clone(), b.db_name().to_owned())); + let related_table = field.related_model().as_table(ctx); + let related_join_columns: Vec<_> = ModelProjection::from(field.related_field().linking_fields()) + .as_columns(ctx) + .map(|col| col.table(alias.to_owned())) + .collect(); - a_col.equals(b_col).into() - }) - .collect::>(); + let join = related_table + .alias(alias.to_owned()) + .on(Row::from(related_join_columns).equals(Row::from(join_columns))); AliasedJoin { - alias: right_table_alias.to_owned(), - data: related_model - .as_table(ctx) - .alias(right_table_alias) - .on(ConditionTree::And(on_conditions)), + alias: alias.to_owned(), + data: Join::Left(join), } } diff --git a/query-engine/connectors/sql-query-connector/src/lib.rs b/query-engine/connectors/sql-query-connector/src/lib.rs index 06aa1e376c4a..ed1528ded6b5 100644 --- a/query-engine/connectors/sql-query-connector/src/lib.rs +++ b/query-engine/connectors/sql-query-connector/src/lib.rs @@ -6,7 +6,7 @@ mod context; mod cursor_condition; mod database; mod error; -mod filter_conversion; +mod filter; mod join_utils; mod model_extensions; mod nested_aggregations; @@ -19,11 +19,11 @@ mod sql_trace; mod value; mod value_ext; -use self::{column_metadata::*, context::Context, filter_conversion::*, query_ext::QueryExt, row::*}; +use self::{column_metadata::*, context::Context, query_ext::QueryExt, row::*}; use quaint::prelude::Queryable; #[cfg(feature = "driver-adapters")] -pub use database::{register_driver_adapter, Js}; +pub use database::{activate_driver_adapter, Js}; pub use database::{FromSource, Mssql, Mysql, PostgreSql, Sqlite}; pub use error::SqlError; diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/column.rs b/query-engine/connectors/sql-query-connector/src/model_extensions/column.rs index 02ed3776b147..445bada9c45c 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/column.rs +++ b/query-engine/connectors/sql-query-connector/src/model_extensions/column.rs @@ -96,7 +96,10 @@ impl AsColumn for ScalarField { let full_table_name = super::table::db_name_with_schema(&self.container().as_model().unwrap(), ctx); let col = self.db_name().to_string(); - let column = Column::from((full_table_name, col)).type_family(self.type_family()); - column.default(quaint::ast::DefaultValue::Generated) + Column::from((full_table_name, col)) + .type_family(self.type_family()) + .set_is_enum(self.type_identifier().is_enum()) + .set_is_list(self.is_list()) + .default(quaint::ast::DefaultValue::Generated) } } diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs b/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs index 429cff058241..7eb414dd92a8 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs +++ b/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs @@ -1,46 +1,84 @@ +use crate::context::Context; use chrono::Utc; use prisma_models::{ScalarField, TypeIdentifier}; use prisma_value::PrismaValue; use quaint::{ - ast::Value, - prelude::{TypeDataLength, TypeFamily}, + ast::{EnumName, Value, ValueType}, + prelude::{EnumVariant, TypeDataLength, TypeFamily}, }; -pub trait ScalarFieldExt { - fn value<'a>(&self, pv: PrismaValue) -> Value<'a>; +pub(crate) trait ScalarFieldExt { + fn value<'a>(&self, pv: PrismaValue, ctx: &Context<'_>) -> Value<'a>; fn type_family(&self) -> TypeFamily; } impl ScalarFieldExt for ScalarField { - fn value<'a>(&self, pv: PrismaValue) -> Value<'a> { - match (pv, self.type_identifier()) { + fn value<'a>(&self, pv: PrismaValue, ctx: &Context<'_>) -> Value<'a> { + let value = match (pv, self.type_identifier()) { (PrismaValue::String(s), _) => s.into(), (PrismaValue::Float(f), _) => f.into(), (PrismaValue::Boolean(b), _) => b.into(), (PrismaValue::DateTime(d), _) => d.with_timezone(&Utc).into(), + (PrismaValue::Enum(e), TypeIdentifier::Enum(enum_id)) => { + let enum_walker = self.dm.clone().zip(enum_id); + let enum_name = enum_walker.db_name().to_owned(); + let schema_name = enum_walker + .schema_name() + .map(ToOwned::to_owned) + .or(Some(ctx.schema_name().to_owned())); + + Value::enum_variant_with_name(e, EnumName::new(enum_name, schema_name)) + } + (PrismaValue::List(vals), TypeIdentifier::Enum(enum_id)) => { + let enum_walker = self.dm.clone().zip(enum_id); + let variants: Vec<_> = vals + .into_iter() + .map(|val| val.into_string().unwrap()) + .map(EnumVariant::new) + .collect(); + + let enum_name = enum_walker.db_name().to_owned(); + let schema_name = enum_walker + .schema_name() + .map(ToOwned::to_owned) + .or(Some(ctx.schema_name().to_owned())); + + Value::enum_array_with_name(variants, EnumName::new(enum_name, schema_name)) + } (PrismaValue::Enum(e), _) => e.into(), (PrismaValue::Int(i), _) => i.into(), (PrismaValue::BigInt(i), _) => i.into(), (PrismaValue::Uuid(u), _) => u.to_string().into(), - (PrismaValue::List(l), _) => Value::Array(Some(l.into_iter().map(|x| self.value(x)).collect())), - (PrismaValue::Json(s), _) => Value::Json(Some(serde_json::from_str::(&s).unwrap())), - (PrismaValue::Bytes(b), _) => Value::Bytes(Some(b.into())), + (PrismaValue::List(l), _) => Value::array(l.into_iter().map(|x| self.value(x, ctx))), + (PrismaValue::Json(s), _) => Value::json(serde_json::from_str::(&s).unwrap()), + (PrismaValue::Bytes(b), _) => Value::bytes(b), (PrismaValue::Object(_), _) => unimplemented!(), (PrismaValue::Null, ident) => match ident { - TypeIdentifier::String => Value::Text(None), - TypeIdentifier::Float => Value::Numeric(None), - TypeIdentifier::Decimal => Value::Numeric(None), - TypeIdentifier::Boolean => Value::Boolean(None), - TypeIdentifier::Enum(_) => Value::Enum(None), - TypeIdentifier::Json => Value::Json(None), - TypeIdentifier::DateTime => Value::DateTime(None), - TypeIdentifier::UUID => Value::Uuid(None), - TypeIdentifier::Int => Value::Int32(None), - TypeIdentifier::BigInt => Value::Int64(None), - TypeIdentifier::Bytes => Value::Bytes(None), + TypeIdentifier::String => Value::null_text(), + TypeIdentifier::Float => Value::null_numeric(), + TypeIdentifier::Decimal => Value::null_numeric(), + TypeIdentifier::Boolean => Value::null_boolean(), + TypeIdentifier::Enum(enum_id) => { + let enum_walker = self.dm.clone().zip(enum_id); + let enum_name = enum_walker.db_name().to_owned(); + let schema_name = enum_walker + .schema_name() + .map(ToOwned::to_owned) + .or(Some(ctx.schema_name().to_owned())); + + ValueType::Enum(None, Some(EnumName::new(enum_name, schema_name))).into_value() + } + TypeIdentifier::Json => Value::null_json(), + TypeIdentifier::DateTime => Value::null_datetime(), + TypeIdentifier::UUID => Value::null_uuid(), + TypeIdentifier::Int => Value::null_int32(), + TypeIdentifier::BigInt => Value::null_int64(), + TypeIdentifier::Bytes => Value::null_bytes(), TypeIdentifier::Unsupported => unreachable!("No unsupported field should reach that path"), }, - } + }; + + value.with_native_column_type(self.native_type().map(|nt| nt.name())) } fn type_family(&self) -> TypeFamily { @@ -81,10 +119,10 @@ pub fn convert_lossy<'a>(pv: PrismaValue) -> Value<'a> { PrismaValue::Int(i) => i.into(), PrismaValue::BigInt(i) => i.into(), PrismaValue::Uuid(u) => u.to_string().into(), - PrismaValue::List(l) => Value::Array(Some(l.into_iter().map(convert_lossy).collect())), - PrismaValue::Json(s) => Value::Json(serde_json::from_str(&s).unwrap()), - PrismaValue::Bytes(b) => Value::Bytes(Some(b.into())), - PrismaValue::Null => Value::Int32(None), // Can't tell which type the null is supposed to be. + PrismaValue::List(l) => Value::array(l.into_iter().map(convert_lossy)), + PrismaValue::Json(s) => Value::json(serde_json::from_str(&s).unwrap()), + PrismaValue::Bytes(b) => Value::bytes(b), + PrismaValue::Null => Value::null_int32(), // Can't tell which type the null is supposed to be. PrismaValue::Object(_) => unimplemented!(), } } diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/selection_result.rs b/query-engine/connectors/sql-query-connector/src/model_extensions/selection_result.rs index 25e864ddd7c0..25d994b1d64d 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/selection_result.rs +++ b/query-engine/connectors/sql-query-connector/src/model_extensions/selection_result.rs @@ -1,10 +1,11 @@ use super::ScalarFieldExt; +use crate::context::Context; use prisma_models::{PrismaValue, SelectedField, SelectionResult}; use quaint::Value; pub(crate) trait SelectionResultExt { fn misses_autogen_value(&self) -> bool; - fn db_values<'a>(&self) -> Vec>; + fn db_values<'a>(&self, ctx: &Context<'_>) -> Vec>; fn add_autogen_value(&mut self, value: V) -> bool where @@ -30,11 +31,11 @@ impl SelectionResultExt for SelectionResult { false } - fn db_values<'a>(&self) -> Vec> { + fn db_values<'a>(&self, ctx: &Context<'_>) -> Vec> { self.pairs .iter() .map(|(selection, v)| match selection { - SelectedField::Scalar(sf) => sf.value(v.clone()), + SelectedField::Scalar(sf) => sf.value(v.clone(), ctx), SelectedField::Composite(_cf) => todo!(), // [Composites] todo }) .collect() diff --git a/query-engine/connectors/sql-query-connector/src/ordering.rs b/query-engine/connectors/sql-query-connector/src/ordering.rs index 72485bacb2cb..cf49698405ef 100644 --- a/query-engine/connectors/sql-query-connector/src/ordering.rs +++ b/query-engine/connectors/sql-query-connector/src/ordering.rs @@ -115,7 +115,7 @@ impl OrderByBuilder { let (joins, order_column) = self.compute_joins_aggregation(order_by, ctx); let order_definition: OrderDefinition = match order_by.sort_aggregation { SortAggregation::Count => { - let exprs: Vec = vec![order_column.clone().into(), Value::integer(0).into()]; + let exprs: Vec = vec![order_column.clone().into(), Value::int32(0).into()]; // We coalesce the order by expr to 0 so that if there's no relation, // `COALESCE(NULL, 0)` will return `0`, thus preserving the order @@ -142,11 +142,12 @@ impl OrderByBuilder { .expect("An order by relation aggregation has to have at least one hop"); // Unwraps are safe because the SQL connector doesn't yet support any other type of orderBy hop but the relation hop. - let mut joins = vec![]; + let mut joins: Vec = vec![]; + for (i, hop) in rest_hops.iter().enumerate() { let previous_join = if i > 0 { joins.get(i - 1) } else { None }; - - let join = compute_one2m_join(hop.as_relation_hop().unwrap(), &self.join_prefix(), previous_join, ctx); + let previous_alias = previous_join.map(|j| j.alias.as_str()); + let join = compute_one2m_join(hop.as_relation_hop().unwrap(), &self.join_prefix(), previous_alias, ctx); joins.push(join); } @@ -156,6 +157,8 @@ impl OrderByBuilder { _ => unreachable!("Order by relation aggregation other than count are not supported"), }; + let previous_alias = joins.last().map(|j| j.alias.as_str()); + // We perform the aggregation on the last join let last_aggr_join = compute_aggr_join( last_hop.as_relation_hop().unwrap(), @@ -163,7 +166,7 @@ impl OrderByBuilder { None, ORDER_AGGREGATOR_ALIAS, &self.join_prefix(), - joins.last(), + previous_alias, ctx, ); @@ -181,11 +184,12 @@ impl OrderByBuilder { order_by: &OrderByScalar, ctx: &Context<'_>, ) -> (Vec, Column<'static>) { - let mut joins = vec![]; + let mut joins: Vec = vec![]; for (i, hop) in order_by.path.iter().enumerate() { let previous_join = if i > 0 { joins.get(i - 1) } else { None }; - let join = compute_one2m_join(hop.as_relation_hop().unwrap(), &self.join_prefix(), previous_join, ctx); + let previous_alias = previous_join.map(|j| j.alias.as_str()); + let join = compute_one2m_join(hop.as_relation_hop().unwrap(), &self.join_prefix(), previous_alias, ctx); joins.push(join); } diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/mod.rs b/query-engine/connectors/sql-query-connector/src/query_builder/mod.rs index b121eb308249..f9a3d43905e3 100644 --- a/query-engine/connectors/sql-query-connector/src/query_builder/mod.rs +++ b/query-engine/connectors/sql-query-connector/src/query_builder/mod.rs @@ -1,6 +1,7 @@ pub(crate) mod read; pub(crate) mod write; +use crate::context::Context; use crate::model_extensions::SelectionResultExt; use prisma_models::SelectionResult; use quaint::ast::{Column, Comparable, ConditionTree, Query, Row, Values}; @@ -10,6 +11,7 @@ const PARAMETER_LIMIT: usize = 2000; pub(super) fn chunked_conditions( columns: &[Column<'static>], records: &[&SelectionResult], + ctx: &Context<'_>, f: F, ) -> Vec> where @@ -19,7 +21,7 @@ where records .chunks(PARAMETER_LIMIT) .map(|chunk| { - let tree = in_conditions(columns, chunk.iter().copied()); + let tree = in_conditions(columns, chunk.iter().copied(), ctx); f(tree).into() }) .collect() @@ -28,11 +30,12 @@ where pub(super) fn in_conditions<'a>( columns: &'a [Column<'static>], results: impl IntoIterator, + ctx: &Context<'_>, ) -> ConditionTree<'static> { let mut values = Values::empty(); for result in results.into_iter() { - let vals: Vec<_> = result.db_values(); + let vals: Vec<_> = result.db_values(ctx); values.push(vals) } diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/read.rs b/query-engine/connectors/sql-query-connector/src/query_builder/read.rs index 2aa0a80169de..a5385f1dd56a 100644 --- a/query-engine/connectors/sql-query-connector/src/query_builder/read.rs +++ b/query-engine/connectors/sql-query-connector/src/query_builder/read.rs @@ -1,6 +1,6 @@ use crate::{ - cursor_condition, filter_conversion::AliasedCondition, model_extensions::*, nested_aggregations, - ordering::OrderByBuilder, sql_trace::SqlTraceComment, Context, + cursor_condition, filter::FilterBuilder, model_extensions::*, nested_aggregations, ordering::OrderByBuilder, + sql_trace::SqlTraceComment, Context, }; use connector_interface::{filter::Filter, AggregationSelection, QueryArguments, RelAggregationSelection}; use itertools::Itertools; @@ -65,10 +65,10 @@ impl SelectDefinition for QueryArguments { let limit = if self.ignore_take { None } else { self.take_abs() }; let skip = if self.ignore_skip { 0 } else { self.skip.unwrap_or(0) }; - let filter: ConditionTree = self + let (filter, filter_joins) = self .filter - .map(|f| f.aliased_condition_from(None, false, ctx)) - .unwrap_or(ConditionTree::NoCondition); + .map(|f| FilterBuilder::with_top_level_joins().visit_filter(f, ctx)) + .unwrap_or((ConditionTree::NoCondition, None)); let conditions = match (filter, cursor_condition) { (ConditionTree::NoCondition, cursor) => cursor, @@ -80,13 +80,21 @@ impl SelectDefinition for QueryArguments { let joined_table = order_by_definitions .iter() .flat_map(|j| &j.joins) - .fold(model.as_table(ctx), |acc, join| acc.left_join(join.clone().data)); + .fold(model.as_table(ctx), |acc, join| acc.join(join.clone().data)); // Add joins necessary to the nested aggregations let joined_table = aggregation_joins .joins .into_iter() - .fold(joined_table, |acc, join| acc.left_join(join.data)); + .fold(joined_table, |acc, join| acc.join(join.data)); + + let joined_table = if let Some(filter_joins) = filter_joins { + filter_joins + .into_iter() + .fold(joined_table, |acc, join| acc.join(join.data)) + } else { + joined_table + }; let select_ast = Select::from_table(joined_table) .so_that(conditions) @@ -116,7 +124,9 @@ where T: SelectDefinition, { let (select, additional_selection_set) = query.into_select(model, aggr_selections, ctx); - let select = columns.fold(select, |acc, col| acc.column(col)); + let select = columns + .map(|c| c.set_is_selected(true)) + .fold(select, |acc, col| acc.column(col)); let select = select.append_trace(&Span::current()).add_trace_id(ctx.trace_id); @@ -210,7 +220,7 @@ pub(crate) fn group_by_aggregate( let (base_query, _) = args.into_select(model, &[], ctx); let select_query = selections.iter().fold(base_query, |select, next_op| match next_op { - AggregationSelection::Field(field) => select.column(field.as_column(ctx)), + AggregationSelection::Field(field) => select.column(field.as_column(ctx).set_is_selected(true)), AggregationSelection::Count { all, fields } => { let select = fields.iter().fold(select, |select, next_field| { @@ -247,7 +257,11 @@ pub(crate) fn group_by_aggregate( ); match having { - Some(filter) => grouped.having(filter.aliased_condition_from(None, false, ctx)), + Some(filter) => { + let cond = FilterBuilder::without_top_level_joins().visit_filter(filter, ctx); + + grouped.having(cond) + } None => grouped, } } diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/write.rs b/query-engine/connectors/sql-query-connector/src/query_builder/write.rs index c18cb9bd6613..b9356842b285 100644 --- a/query-engine/connectors/sql-query-connector/src/query_builder/write.rs +++ b/query-engine/connectors/sql-query-connector/src/query_builder/write.rs @@ -28,11 +28,11 @@ pub(crate) fn create_record( .try_into() .expect("Create calls can only use PrismaValue write expressions (right now)."); - insert.value(db_name.to_owned(), field.value(value)) + insert.value(db_name.to_owned(), field.value(value, ctx)) }); Insert::from(insert) - .returning(selected_fields.as_columns(ctx)) + .returning(selected_fields.as_columns(ctx).map(|c| c.set_is_selected(true))) .append_trace(&Span::current()) .add_trace_id(ctx.trace_id) } @@ -64,7 +64,7 @@ pub(crate) fn create_records_nonempty( .try_into() .expect("Create calls can only use PrismaValue write expressions (right now)."); - row.push(field.value(value).into()); + row.push(field.value(value, ctx).into()); } None => row.push(default_value()), @@ -124,12 +124,12 @@ pub(crate) fn build_update_and_set_query( let value: Expression = match val.try_into_scalar().unwrap() { ScalarWriteOperation::Field(_) => unimplemented!(), - ScalarWriteOperation::Set(rhs) => field.value(rhs).into(), + ScalarWriteOperation::Set(rhs) => field.value(rhs, ctx).into(), ScalarWriteOperation::Add(rhs) if field.is_list() => { let e: Expression = Column::from((table.clone(), name.clone())).into(); let vals: Vec<_> = match rhs { - PrismaValue::List(vals) => vals.into_iter().map(|val| field.value(val)).collect(), - _ => vec![field.value(rhs)], + PrismaValue::List(vals) => vals.into_iter().map(|val| field.value(val, ctx)).collect(), + _ => vec![field.value(rhs, ctx)], }; // Postgres only @@ -137,22 +137,22 @@ pub(crate) fn build_update_and_set_query( } ScalarWriteOperation::Add(rhs) => { let e: Expression<'_> = Column::from((table.clone(), name.clone())).into(); - e + field.value(rhs).into() + e + field.value(rhs, ctx).into() } ScalarWriteOperation::Substract(rhs) => { let e: Expression<'_> = Column::from((table.clone(), name.clone())).into(); - e - field.value(rhs).into() + e - field.value(rhs, ctx).into() } ScalarWriteOperation::Multiply(rhs) => { let e: Expression<'_> = Column::from((table.clone(), name.clone())).into(); - e * field.value(rhs).into() + e * field.value(rhs, ctx).into() } ScalarWriteOperation::Divide(rhs) => { let e: Expression<'_> = Column::from((table.clone(), name.clone())).into(); - e / field.value(rhs).into() + e / field.value(rhs, ctx).into() } ScalarWriteOperation::Unset(_) => unreachable!("Unset is not supported on SQL connectors"), @@ -164,7 +164,7 @@ pub(crate) fn build_update_and_set_query( let query = query.append_trace(&Span::current()).add_trace_id(ctx.trace_id); let query = if let Some(selected_fields) = selected_fields { - query.returning(selected_fields.as_columns(ctx)) + query.returning(selected_fields.as_columns(ctx).map(|c| c.set_is_selected(true))) } else { query }; @@ -183,14 +183,26 @@ pub(crate) fn chunk_update_with_ids( .as_columns(ctx) .collect(); - let query = super::chunked_conditions(&columns, ids, |conditions| { + let query = super::chunked_conditions(&columns, ids, ctx, |conditions| { update.clone().so_that(conditions.and(filter_condition.clone())) }); Ok(query) } -pub(crate) fn delete_many( +pub(crate) fn delete_many_from_filter( + model: &Model, + filter_condition: ConditionTree<'static>, + ctx: &Context<'_>, +) -> Query<'static> { + Delete::from_table(model.as_table(ctx)) + .so_that(filter_condition) + .append_trace(&Span::current()) + .add_trace_id(ctx.trace_id) + .into() +} + +pub(crate) fn delete_many_from_ids_and_filter( model: &Model, ids: &[&SelectionResult], filter_condition: ConditionTree<'static>, @@ -200,11 +212,8 @@ pub(crate) fn delete_many( .as_columns(ctx) .collect(); - super::chunked_conditions(&columns, ids, |conditions| { - Delete::from_table(model.as_table(ctx)) - .so_that(conditions.and(filter_condition.clone())) - .append_trace(&Span::current()) - .add_trace_id(ctx.trace_id) + super::chunked_conditions(&columns, ids, ctx, |conditions| { + delete_many_from_filter(model, conditions.and(filter_condition.clone()), ctx) }) } @@ -223,9 +232,9 @@ pub(crate) fn create_relation_table_records( let insert = Insert::multi_into(relation.as_table(ctx), columns); let insert: MultiRowInsert = child_ids.iter().fold(insert, |insert, child_id| { - let mut values: Vec<_> = parent_id.db_values(); + let mut values: Vec<_> = parent_id.db_values(ctx); - values.extend(child_id.db_values()); + values.extend(child_id.db_values(ctx)); insert.values(values) }); @@ -244,14 +253,14 @@ pub(crate) fn delete_relation_table_records( let mut parent_columns: Vec<_> = parent_field.related_field().m2m_columns(ctx); let child_columns: Vec<_> = parent_field.m2m_columns(ctx); - let parent_id_values = parent_id.db_values(); + let parent_id_values = parent_id.db_values(ctx); let parent_id_criteria = if parent_columns.len() > 1 { Row::from(parent_columns).equals(parent_id_values) } else { parent_columns.pop().unwrap().equals(parent_id_values) }; - let child_id_criteria = super::in_conditions(&child_columns, child_ids); + let child_id_criteria = super::in_conditions(&child_columns, child_ids, ctx); Delete::from_table(relation.as_table(ctx)) .so_that(parent_id_criteria.and(child_id_criteria)) diff --git a/query-engine/connectors/sql-query-connector/src/query_ext.rs b/query-engine/connectors/sql-query-connector/src/query_ext.rs index 8194a70fd6fa..2dba40dcb7fc 100644 --- a/query-engine/connectors/sql-query-connector/src/query_ext.rs +++ b/query-engine/connectors/sql-query-connector/src/query_ext.rs @@ -1,6 +1,7 @@ +use crate::filter::FilterBuilder; use crate::{ column_metadata, error::*, model_extensions::*, sql_trace::trace_parent_to_string, sql_trace::SqlTraceComment, - value_ext::IntoTypedJsonExtension, AliasedCondition, ColumnMetadata, Context, SqlRow, ToSqlRow, + value_ext::IntoTypedJsonExtension, ColumnMetadata, Context, SqlRow, ToSqlRow, }; use async_trait::async_trait; use connector_interface::{filter::Filter, RecordFilter}; @@ -126,12 +127,13 @@ impl QueryExt for Q { ) -> crate::Result> { let model_id: ModelProjection = model.primary_identifier().into(); let id_cols: Vec> = model_id.as_columns(ctx).collect(); + let condition = FilterBuilder::without_top_level_joins().visit_filter(filter, ctx); let select = Select::from_table(model.as_table(ctx)) .columns(id_cols) .append_trace(&Span::current()) .add_trace_id(ctx.trace_id) - .so_that(filter.aliased_condition_from(None, false, ctx)); + .so_that(condition); self.select_ids(select, model_id, ctx).await } diff --git a/query-engine/connectors/sql-query-connector/src/row.rs b/query-engine/connectors/sql-query-connector/src/row.rs index 9dfd05751c56..250ee7d9420f 100644 --- a/query-engine/connectors/sql-query-connector/src/row.rs +++ b/query-engine/connectors/sql-query-connector/src/row.rs @@ -3,7 +3,7 @@ use bigdecimal::{BigDecimal, FromPrimitive, ToPrimitive}; use chrono::{DateTime, NaiveDate, Utc}; use connector_interface::{coerce_null_to_zero_value, AggregationResult, AggregationSelection}; use prisma_models::{ConversionFailure, FieldArity, PrismaValue, Record, TypeIdentifier}; -use quaint::{ast::Value, connector::ResultRow}; +use quaint::{connector::ResultRow, Value, ValueType}; use std::{io, str::FromStr}; use uuid::Uuid; @@ -92,12 +92,12 @@ impl ToSqlRow for ResultRow { for (i, p_value) in self.into_iter().enumerate().take(row_width) { let pv = match (meta[i].identifier(), meta[i].arity()) { - (type_identifier, FieldArity::List) => match p_value { + (type_identifier, FieldArity::List) => match p_value.typed { value if value.is_null() => Ok(PrismaValue::List(Vec::new())), - Value::Array(None) => Ok(PrismaValue::List(Vec::new())), - Value::Array(Some(l)) => l + ValueType::Array(None) => Ok(PrismaValue::List(Vec::new())), + ValueType::Array(Some(l)) => l .into_iter() - .map(|p_value| row_value_to_prisma_value(p_value, meta[i])) + .map(|val| row_value_to_prisma_value(val, meta[i])) .collect::>>() .map(PrismaValue::List), _ => { @@ -140,35 +140,35 @@ fn row_value_to_prisma_value(p_value: Value, meta: ColumnMetadata<'_>) -> Result }; Ok(match meta.identifier() { - TypeIdentifier::Boolean => match p_value { + TypeIdentifier::Boolean => match p_value.typed { value if value.is_null() => PrismaValue::Null, - Value::Int32(Some(i)) => PrismaValue::Boolean(i != 0), - Value::Int64(Some(i)) => PrismaValue::Boolean(i != 0), - Value::Boolean(Some(b)) => PrismaValue::Boolean(b), - Value::Bytes(Some(bytes)) if bytes.as_ref() == [0u8] => PrismaValue::Boolean(false), - Value::Bytes(Some(bytes)) if bytes.as_ref() == [1u8] => PrismaValue::Boolean(true), + ValueType::Int32(Some(i)) => PrismaValue::Boolean(i != 0), + ValueType::Int64(Some(i)) => PrismaValue::Boolean(i != 0), + ValueType::Boolean(Some(b)) => PrismaValue::Boolean(b), + ValueType::Bytes(Some(bytes)) if bytes.as_ref() == [0u8] => PrismaValue::Boolean(false), + ValueType::Bytes(Some(bytes)) if bytes.as_ref() == [1u8] => PrismaValue::Boolean(true), _ => return Err(create_error(&p_value)), }, - TypeIdentifier::Enum(_) => match p_value { + TypeIdentifier::Enum(_) => match p_value.typed { value if value.is_null() => PrismaValue::Null, - Value::Enum(Some(cow)) => PrismaValue::Enum(cow.into_owned()), - Value::Text(Some(cow)) => PrismaValue::Enum(cow.into_owned()), + ValueType::Enum(Some(cow), _) => PrismaValue::Enum(cow.into_owned()), + ValueType::Text(Some(cow)) => PrismaValue::Enum(cow.into_owned()), _ => return Err(create_error(&p_value)), }, - TypeIdentifier::Json => match p_value { + TypeIdentifier::Json => match p_value.typed { value if value.is_null() => PrismaValue::Null, - Value::Text(Some(json)) => PrismaValue::Json(json.into()), - Value::Json(Some(json)) => PrismaValue::Json(json.to_string()), + ValueType::Text(Some(json)) => PrismaValue::Json(json.into()), + ValueType::Json(Some(json)) => PrismaValue::Json(json.to_string()), _ => return Err(create_error(&p_value)), }, - TypeIdentifier::UUID => match p_value { + TypeIdentifier::UUID => match p_value.typed { value if value.is_null() => PrismaValue::Null, - Value::Text(Some(uuid)) => PrismaValue::Uuid(Uuid::parse_str(&uuid)?), - Value::Uuid(Some(uuid)) => PrismaValue::Uuid(uuid), + ValueType::Text(Some(uuid)) => PrismaValue::Uuid(Uuid::parse_str(&uuid)?), + ValueType::Uuid(Some(uuid)) => PrismaValue::Uuid(uuid), _ => return Err(create_error(&p_value)), }, - TypeIdentifier::DateTime => match p_value { + TypeIdentifier::DateTime => match p_value.typed { value if value.is_null() => PrismaValue::Null, value if value.is_integer() => { let ts = value.as_integer().unwrap(); @@ -179,47 +179,47 @@ fn row_value_to_prisma_value(p_value: Value, meta: ColumnMetadata<'_>) -> Result PrismaValue::DateTime(datetime.into()) } - Value::DateTime(Some(dt)) => PrismaValue::DateTime(dt.into()), - Value::Text(Some(ref dt_string)) => { + ValueType::DateTime(Some(dt)) => PrismaValue::DateTime(dt.into()), + ValueType::Text(Some(ref dt_string)) => { let dt = DateTime::parse_from_rfc3339(dt_string) .or_else(|_| DateTime::parse_from_rfc2822(dt_string)) .map_err(|_| create_error(&p_value))?; PrismaValue::DateTime(dt.with_timezone(&Utc).into()) } - Value::Date(Some(d)) => { + ValueType::Date(Some(d)) => { let dt = DateTime::::from_utc(d.and_hms_opt(0, 0, 0).unwrap(), Utc); PrismaValue::DateTime(dt.into()) } - Value::Time(Some(t)) => { + ValueType::Time(Some(t)) => { let d = NaiveDate::from_ymd_opt(1970, 1, 1).unwrap(); let dt = DateTime::::from_utc(d.and_time(t), Utc); PrismaValue::DateTime(dt.into()) } _ => return Err(create_error(&p_value)), }, - TypeIdentifier::Float | TypeIdentifier::Decimal => match p_value { + TypeIdentifier::Float | TypeIdentifier::Decimal => match p_value.typed { value if value.is_null() => PrismaValue::Null, - Value::Numeric(Some(f)) => PrismaValue::Float(f.normalized()), - Value::Double(Some(f)) => match f { + ValueType::Numeric(Some(f)) => PrismaValue::Float(f.normalized()), + ValueType::Double(Some(f)) => match f { f if f.is_nan() => return Err(create_error(&p_value)), f if f.is_infinite() => return Err(create_error(&p_value)), _ => PrismaValue::Float(BigDecimal::from_f64(f).unwrap().normalized()), }, - Value::Float(Some(f)) => match f { + ValueType::Float(Some(f)) => match f { f if f.is_nan() => return Err(create_error(&p_value)), f if f.is_infinite() => return Err(create_error(&p_value)), _ => PrismaValue::Float(BigDecimal::from_f32(f).unwrap().normalized()), }, - Value::Int32(Some(i)) => match BigDecimal::from_i32(i) { + ValueType::Int32(Some(i)) => match BigDecimal::from_i32(i) { Some(dec) => PrismaValue::Float(dec), None => return Err(create_error(&p_value)), }, - Value::Int64(Some(i)) => match BigDecimal::from_i64(i) { + ValueType::Int64(Some(i)) => match BigDecimal::from_i64(i) { Some(dec) => PrismaValue::Float(dec), None => return Err(create_error(&p_value)), }, - Value::Text(_) | Value::Bytes(_) => { + ValueType::Text(_) | ValueType::Bytes(_) => { let dec: BigDecimal = p_value .as_str() .expect("text/bytes as str") @@ -230,61 +230,61 @@ fn row_value_to_prisma_value(p_value: Value, meta: ColumnMetadata<'_>) -> Result } _ => return Err(create_error(&p_value)), }, - TypeIdentifier::Int => match p_value { + TypeIdentifier::Int => match p_value.typed { value if value.is_null() => PrismaValue::Null, - Value::Int32(Some(i)) => PrismaValue::Int(i as i64), - Value::Int64(Some(i)) => PrismaValue::Int(i), - Value::Bytes(Some(bytes)) => PrismaValue::Int(interpret_bytes_as_i64(&bytes)), - Value::Text(Some(ref txt)) => { + ValueType::Int32(Some(i)) => PrismaValue::Int(i as i64), + ValueType::Int64(Some(i)) => PrismaValue::Int(i), + ValueType::Bytes(Some(bytes)) => PrismaValue::Int(interpret_bytes_as_i64(&bytes)), + ValueType::Text(Some(ref txt)) => { PrismaValue::Int(i64::from_str(txt.trim_start_matches('\0')).map_err(|_| create_error(&p_value))?) } - Value::Float(Some(f)) => { + ValueType::Float(Some(f)) => { sanitize_f32(f, "Int")?; PrismaValue::Int(big_decimal_to_i64(BigDecimal::from_f32(f).unwrap(), "Int")?) } - Value::Double(Some(f)) => { + ValueType::Double(Some(f)) => { sanitize_f64(f, "Int")?; PrismaValue::Int(big_decimal_to_i64(BigDecimal::from_f64(f).unwrap(), "Int")?) } - Value::Numeric(Some(dec)) => PrismaValue::Int(big_decimal_to_i64(dec, "Int")?), - Value::Boolean(Some(bool)) => PrismaValue::Int(bool as i64), + ValueType::Numeric(Some(dec)) => PrismaValue::Int(big_decimal_to_i64(dec, "Int")?), + ValueType::Boolean(Some(bool)) => PrismaValue::Int(bool as i64), other => to_prisma_value(other)?, }, - TypeIdentifier::BigInt => match p_value { + TypeIdentifier::BigInt => match p_value.typed { value if value.is_null() => PrismaValue::Null, - Value::Int32(Some(i)) => PrismaValue::BigInt(i as i64), - Value::Int64(Some(i)) => PrismaValue::BigInt(i), - Value::Bytes(Some(bytes)) => PrismaValue::BigInt(interpret_bytes_as_i64(&bytes)), - Value::Text(Some(ref txt)) => { + ValueType::Int32(Some(i)) => PrismaValue::BigInt(i as i64), + ValueType::Int64(Some(i)) => PrismaValue::BigInt(i), + ValueType::Bytes(Some(bytes)) => PrismaValue::BigInt(interpret_bytes_as_i64(&bytes)), + ValueType::Text(Some(ref txt)) => { PrismaValue::BigInt(i64::from_str(txt.trim_start_matches('\0')).map_err(|_| create_error(&p_value))?) } - Value::Float(Some(f)) => { + ValueType::Float(Some(f)) => { sanitize_f32(f, "BigInt")?; PrismaValue::BigInt(big_decimal_to_i64(BigDecimal::from_f32(f).unwrap(), "BigInt")?) } - Value::Double(Some(f)) => { + ValueType::Double(Some(f)) => { sanitize_f64(f, "BigInt")?; PrismaValue::BigInt(big_decimal_to_i64(BigDecimal::from_f64(f).unwrap(), "BigInt")?) } - Value::Numeric(Some(dec)) => PrismaValue::BigInt(big_decimal_to_i64(dec, "BigInt")?), - Value::Boolean(Some(bool)) => PrismaValue::BigInt(bool as i64), + ValueType::Numeric(Some(dec)) => PrismaValue::BigInt(big_decimal_to_i64(dec, "BigInt")?), + ValueType::Boolean(Some(bool)) => PrismaValue::BigInt(bool as i64), other => to_prisma_value(other)?, }, - TypeIdentifier::String => match p_value { + TypeIdentifier::String => match p_value.typed { value if value.is_null() => PrismaValue::Null, - Value::Uuid(Some(uuid)) => PrismaValue::String(uuid.to_string()), - Value::Json(Some(ref json_value)) => { + ValueType::Uuid(Some(uuid)) => PrismaValue::String(uuid.to_string()), + ValueType::Json(Some(ref json_value)) => { PrismaValue::String(serde_json::to_string(json_value).map_err(|_| create_error(&p_value))?) } other => to_prisma_value(other)?, }, - TypeIdentifier::Bytes => match p_value { + TypeIdentifier::Bytes => match p_value.typed { value if value.is_null() => PrismaValue::Null, - Value::Bytes(Some(bytes)) => PrismaValue::Bytes(bytes.into()), + ValueType::Bytes(Some(bytes)) => PrismaValue::Bytes(bytes.into()), _ => return Err(create_error(&p_value)), }, TypeIdentifier::Unsupported => unreachable!("No unsupported field should reach that path"), diff --git a/query-engine/connectors/sql-query-connector/src/value.rs b/query-engine/connectors/sql-query-connector/src/value.rs index 086314ed7419..4c31fc9eedb9 100644 --- a/query-engine/connectors/sql-query-connector/src/value.rs +++ b/query-engine/connectors/sql-query-connector/src/value.rs @@ -2,44 +2,44 @@ use crate::row::{sanitize_f32, sanitize_f64}; use bigdecimal::{BigDecimal, FromPrimitive}; use chrono::{DateTime, NaiveDate, Utc}; use prisma_models::PrismaValue; -use quaint::Value; +use quaint::ValueType; -pub fn to_prisma_value(quaint_value: Value<'_>) -> crate::Result { - let val = match quaint_value { - Value::Int32(i) => i.map(|i| PrismaValue::Int(i as i64)).unwrap_or(PrismaValue::Null), - Value::Int64(i) => i.map(PrismaValue::Int).unwrap_or(PrismaValue::Null), - Value::Float(Some(f)) => { +pub fn to_prisma_value<'a, T: Into>>(qv: T) -> crate::Result { + let val = match qv.into() { + ValueType::Int32(i) => i.map(|i| PrismaValue::Int(i as i64)).unwrap_or(PrismaValue::Null), + ValueType::Int64(i) => i.map(PrismaValue::Int).unwrap_or(PrismaValue::Null), + ValueType::Float(Some(f)) => { sanitize_f32(f, "BigDecimal")?; PrismaValue::Float(BigDecimal::from_f32(f).unwrap().normalized()) } - Value::Float(None) => PrismaValue::Null, + ValueType::Float(None) => PrismaValue::Null, - Value::Double(Some(f)) => { + ValueType::Double(Some(f)) => { sanitize_f64(f, "BigDecimal")?; PrismaValue::Float(BigDecimal::from_f64(f).unwrap().normalized()) } - Value::Double(None) => PrismaValue::Null, + ValueType::Double(None) => PrismaValue::Null, - Value::Numeric(d) => d + ValueType::Numeric(d) => d // chop the trailing zeroes off so javascript doesn't start rounding things wrong .map(|d| PrismaValue::Float(d.normalized())) .unwrap_or(PrismaValue::Null), - Value::Text(s) => s + ValueType::Text(s) => s .map(|s| PrismaValue::String(s.into_owned())) .unwrap_or(PrismaValue::Null), - Value::Enum(s) => s + ValueType::Enum(s, _) => s .map(|s| PrismaValue::Enum(s.into_owned())) .unwrap_or(PrismaValue::Null), - Value::Boolean(b) => b.map(PrismaValue::Boolean).unwrap_or(PrismaValue::Null), + ValueType::Boolean(b) => b.map(PrismaValue::Boolean).unwrap_or(PrismaValue::Null), - Value::Array(Some(v)) => { + ValueType::Array(Some(v)) => { let mut res = Vec::with_capacity(v.len()); for v in v.into_iter() { @@ -49,22 +49,33 @@ pub fn to_prisma_value(quaint_value: Value<'_>) -> crate::Result { PrismaValue::List(res) } - Value::Array(None) => PrismaValue::Null, + ValueType::Array(None) => PrismaValue::Null, - Value::Json(val) => val + ValueType::EnumArray(Some(v), name) => { + let mut res = Vec::with_capacity(v.len()); + + for v in v.into_iter() { + res.push(to_prisma_value(ValueType::Enum(Some(v), name.clone()))?); + } + + PrismaValue::List(res) + } + ValueType::EnumArray(None, _) => PrismaValue::Null, + + ValueType::Json(val) => val .map(|val| PrismaValue::Json(val.to_string())) .unwrap_or(PrismaValue::Null), - Value::Uuid(uuid) => uuid.map(PrismaValue::Uuid).unwrap_or(PrismaValue::Null), + ValueType::Uuid(uuid) => uuid.map(PrismaValue::Uuid).unwrap_or(PrismaValue::Null), - Value::Date(d) => d + ValueType::Date(d) => d .map(|d| { let dt = DateTime::::from_utc(d.and_hms_opt(0, 0, 0).unwrap(), Utc); PrismaValue::DateTime(dt.into()) }) .unwrap_or(PrismaValue::Null), - Value::Time(t) => t + ValueType::Time(t) => t .map(|t| { let d = NaiveDate::from_ymd_opt(1970, 1, 1).unwrap(); let dt = DateTime::::from_utc(d.and_time(t), Utc); @@ -72,19 +83,19 @@ pub fn to_prisma_value(quaint_value: Value<'_>) -> crate::Result { }) .unwrap_or(PrismaValue::Null), - Value::DateTime(dt) => dt + ValueType::DateTime(dt) => dt .map(|dt| PrismaValue::DateTime(dt.into())) .unwrap_or(PrismaValue::Null), - Value::Char(c) => c + ValueType::Char(c) => c .map(|c| PrismaValue::String(c.to_string())) .unwrap_or(PrismaValue::Null), - Value::Bytes(bytes) => bytes + ValueType::Bytes(bytes) => bytes .map(|b| PrismaValue::Bytes(b.into_owned())) .unwrap_or(PrismaValue::Null), - Value::Xml(s) => s + ValueType::Xml(s) => s .map(|s| PrismaValue::String(s.into_owned())) .unwrap_or(PrismaValue::Null), }; diff --git a/query-engine/connectors/sql-query-connector/src/value_ext.rs b/query-engine/connectors/sql-query-connector/src/value_ext.rs index 1d9a82427592..a84c9da0380b 100644 --- a/query-engine/connectors/sql-query-connector/src/value_ext.rs +++ b/query-engine/connectors/sql-query-connector/src/value_ext.rs @@ -11,24 +11,24 @@ impl<'a> IntoTypedJsonExtension for quaint::Value<'a> { return "null".to_owned(); } - let type_name = match self { - quaint::Value::Int32(_) => "int", - quaint::Value::Int64(_) => "bigint", - quaint::Value::Float(_) => "float", - quaint::Value::Double(_) => "double", - quaint::Value::Text(_) => "string", - quaint::Value::Enum(_) => "enum", - quaint::Value::Bytes(_) => "bytes", - quaint::Value::Boolean(_) => "bool", - quaint::Value::Char(_) => "char", - quaint::Value::Numeric(_) => "decimal", - quaint::Value::Json(_) => "json", - quaint::Value::Xml(_) => "xml", - quaint::Value::Uuid(_) => "uuid", - quaint::Value::DateTime(_) => "datetime", - quaint::Value::Date(_) => "date", - quaint::Value::Time(_) => "time", - quaint::Value::Array(_) => "array", + let type_name = match self.typed { + quaint::ValueType::Int32(_) => "int", + quaint::ValueType::Int64(_) => "bigint", + quaint::ValueType::Float(_) => "float", + quaint::ValueType::Double(_) => "double", + quaint::ValueType::Text(_) => "string", + quaint::ValueType::Enum(_, _) => "enum", + quaint::ValueType::Bytes(_) => "bytes", + quaint::ValueType::Boolean(_) => "bool", + quaint::ValueType::Char(_) => "char", + quaint::ValueType::Numeric(_) => "decimal", + quaint::ValueType::Json(_) => "json", + quaint::ValueType::Xml(_) => "xml", + quaint::ValueType::Uuid(_) => "uuid", + quaint::ValueType::DateTime(_) => "datetime", + quaint::ValueType::Date(_) => "date", + quaint::ValueType::Time(_) => "time", + quaint::ValueType::Array(_) | quaint::ValueType::EnumArray(_, _) => "array", }; type_name.to_owned() @@ -37,12 +37,12 @@ impl<'a> IntoTypedJsonExtension for quaint::Value<'a> { fn as_typed_json(self) -> serde_json::Value { let type_name = self.type_name(); - let json_value = match self { - quaint::Value::Array(Some(values)) => { + let json_value = match self.typed { + quaint::ValueType::Array(Some(values)) => { serde_json::Value::Array(values.into_iter().map(|value| value.as_typed_json()).collect()) } - quaint::Value::Int64(Some(value)) => serde_json::Value::String(value.to_string()), - quaint::Value::Numeric(Some(decimal)) => serde_json::Value::String(decimal.normalized().to_string()), + quaint::ValueType::Int64(Some(value)) => serde_json::Value::String(value.to_string()), + quaint::ValueType::Numeric(Some(decimal)) => serde_json::Value::String(decimal.normalized().to_string()), x => serde_json::Value::from(x), }; diff --git a/query-engine/core/Cargo.toml b/query-engine/core/Cargo.toml index c9700bb85f19..caadf6cdba00 100644 --- a/query-engine/core/Cargo.toml +++ b/query-engine/core/Cargo.toml @@ -29,7 +29,7 @@ tracing-subscriber = { version = "0.3", features = ["env-filter"] } tracing-opentelemetry = "0.17.4" user-facing-errors = { path = "../../libs/user-facing-errors" } uuid = "1" -cuid = "1.2" +cuid = { git = "https://github.com/prisma/cuid-rust", branch = "wasm32-support" } schema = { path = "../schema" } lru = "0.7.7" enumflags2 = "0.7" diff --git a/query-engine/core/src/executor/mod.rs b/query-engine/core/src/executor/mod.rs index 35ed20ab0c55..ddbb7dfc8429 100644 --- a/query-engine/core/src/executor/mod.rs +++ b/query-engine/core/src/executor/mod.rs @@ -21,7 +21,7 @@ use crate::{ }; use async_trait::async_trait; use connector::Connector; -use serde::Deserialize; +use serde::{Deserialize, Serialize}; use tracing::Dispatch; #[async_trait] @@ -57,14 +57,14 @@ pub trait QueryExecutor: TransactionManager { fn primary_connector(&self) -> &(dyn Connector + Send + Sync); } -#[derive(Debug, Deserialize)] +#[derive(Debug, Serialize, Deserialize)] pub struct TransactionOptions { /// Maximum wait time for tx acquisition in milliseconds. - #[serde(rename(deserialize = "max_wait"))] + #[serde(rename = "max_wait")] pub max_acquisition_millis: u64, /// Time in milliseconds after which the transaction rolls back automatically. - #[serde(rename(deserialize = "timeout"))] + #[serde(rename = "timeout")] pub valid_for_millis: u64, /// Isolation level to use for the transaction. @@ -72,7 +72,7 @@ pub struct TransactionOptions { /// An optional pre-defined transaction id. Some value might be provided in case we want to generate /// a new id at the beginning of the transaction - #[serde(skip_deserializing)] + #[serde(skip)] pub new_tx_id: Option, } diff --git a/query-engine/core/src/interactive_transactions/mod.rs b/query-engine/core/src/interactive_transactions/mod.rs index 79eba2bb82e5..ce125e8fa17e 100644 --- a/query-engine/core/src/interactive_transactions/mod.rs +++ b/query-engine/core/src/interactive_transactions/mod.rs @@ -1,5 +1,6 @@ use crate::CoreError; use connector::Transaction; +use serde::Deserialize; use std::fmt::Display; use tokio::time::{Duration, Instant}; @@ -37,7 +38,7 @@ pub(crate) use messages::*; /// the TransactionActorManager can reply with a helpful error message which explains that no operation can be performed on a closed transaction /// rather than an error message stating that the transaction does not exist. -#[derive(Debug, Clone, Hash, Eq, PartialEq)] +#[derive(Debug, Clone, Hash, Eq, PartialEq, Deserialize)] pub struct TxId(String); const MINIMUM_TX_ID_LENGTH: usize = 24; diff --git a/query-engine/core/src/lib.rs b/query-engine/core/src/lib.rs index fb6806e44501..7970c96139b7 100644 --- a/query-engine/core/src/lib.rs +++ b/query-engine/core/src/lib.rs @@ -18,7 +18,10 @@ pub use self::{ query_document::*, telemetry::*, }; -pub use connector::{error::ConnectorError, Connector}; +pub use connector::{ + error::{ConnectorError, ErrorKind as ConnectorErrorKind}, + Connector, +}; mod error; mod interactive_transactions; diff --git a/query-engine/core/src/query_graph_builder/write/delete.rs b/query-engine/core/src/query_graph_builder/write/delete.rs index 91cfe088a089..a5bca5af7758 100644 --- a/query-engine/core/src/query_graph_builder/write/delete.rs +++ b/query-engine/core/src/query_graph_builder/write/delete.rs @@ -1,7 +1,7 @@ use super::*; use crate::{ query_ast::*, - query_graph::{QueryGraph, QueryGraphDependency}, + query_graph::{Node, QueryGraph, QueryGraphDependency}, ArgumentListLookup, FilteredQuery, ParsedField, }; use connector::filter::Filter; @@ -52,6 +52,7 @@ pub(crate) fn delete_record( )?; graph.add_result_node(&read_node); + Ok(()) } @@ -62,31 +63,45 @@ pub fn delete_many_records( model: Model, mut field: ParsedField<'_>, ) -> QueryGraphBuilderResult<()> { - graph.flag_transactional(); - let filter = match field.arguments.lookup(args::WHERE) { Some(where_arg) => extract_filter(where_arg.value.try_into()?, &model)?, None => Filter::empty(), }; let model_id = model.primary_identifier(); - let read_query = utils::read_ids_infallible(model.clone(), model_id, filter.clone()); - let record_filter = filter.into(); + let record_filter = filter.clone().into(); let delete_many = WriteQuery::DeleteManyRecords(DeleteManyRecords { model: model.clone(), record_filter, }); - let read_query_node = graph.create_node(read_query); let delete_many_node = graph.create_node(Query::Write(delete_many)); - utils::insert_emulated_on_delete(graph, query_schema, &model, &read_query_node, &delete_many_node)?; + if query_schema.relation_mode().is_prisma() { + graph.flag_transactional(); - graph.create_edge( - &read_query_node, - &delete_many_node, - QueryGraphDependency::ExecutionOrder, - )?; + let read_query = utils::read_ids_infallible(model.clone(), model_id.clone(), filter); + let read_query_node = graph.create_node(read_query); + + utils::insert_emulated_on_delete(graph, query_schema, &model, &read_query_node, &delete_many_node)?; + + graph.create_edge( + &read_query_node, + &delete_many_node, + QueryGraphDependency::ProjectedDataDependency( + model_id, + Box::new(|mut delete_many_node, ids| { + if let Node::Query(Query::Write(WriteQuery::DeleteManyRecords(ref mut dmr))) = delete_many_node { + dmr.record_filter = ids.into(); + } + + Ok(delete_many_node) + }), + ), + )?; + } + + graph.add_result_node(&delete_many_node); Ok(()) } diff --git a/query-engine/core/src/query_graph_builder/write/upsert.rs b/query-engine/core/src/query_graph_builder/write/upsert.rs index 69d362b09b70..0a01e43e73c0 100644 --- a/query-engine/core/src/query_graph_builder/write/upsert.rs +++ b/query-engine/core/src/query_graph_builder/write/upsert.rs @@ -156,6 +156,23 @@ pub(crate) fn upsert_record( } graph.create_edge(&if_node, &create_node, QueryGraphDependency::Else)?; + + // Pass-in the read parent record result to the update node RecordFilter to avoid a redundant read. + graph.create_edge( + &read_parent_records_node, + &update_node, + QueryGraphDependency::ProjectedDataDependency( + model_id.clone(), + Box::new(move |mut update_node, parent_ids| { + if let Node::Query(Query::Write(WriteQuery::UpdateRecord(ref mut ur))) = update_node { + ur.set_selectors(parent_ids); + } + + Ok(update_node) + }), + ), + )?; + graph.create_edge( &update_node, &read_node_update, diff --git a/query-engine/core/src/response_ir/internal.rs b/query-engine/core/src/response_ir/internal.rs index d07d625b4fe6..2ad67707f22c 100644 --- a/query-engine/core/src/response_ir/internal.rs +++ b/query-engine/core/src/response_ir/internal.rs @@ -211,7 +211,7 @@ fn find_nested_aggregate_output_field<'a, 'b>( fn coerce_non_numeric(value: PrismaValue, output: &OutputType<'_>) -> PrismaValue { match (value, &output.inner) { - (PrismaValue::Int(x), InnerOutputType::Scalar(ScalarType::String)) if x == 0 => PrismaValue::Null, + (PrismaValue::Int(0), InnerOutputType::Scalar(ScalarType::String)) => PrismaValue::Null, (x, _) => x, } } diff --git a/query-engine/dmmf/Cargo.toml b/query-engine/dmmf/Cargo.toml index f4a8bfb6e6f2..cc92c914d4e6 100644 --- a/query-engine/dmmf/Cargo.toml +++ b/query-engine/dmmf/Cargo.toml @@ -10,7 +10,7 @@ serde.workspace = true serde_json.workspace = true schema = { path = "../schema" } indexmap = { version = "1.7", features = ["serde-1"] } -prisma-models = { path = "../prisma-models" } +prisma-models = { path = "../prisma-models", features = ["default_generators"] } [dev-dependencies] expect-test = "1.2.2" diff --git a/query-engine/dmmf/src/ast_builders/datamodel_ast_builder.rs b/query-engine/dmmf/src/ast_builders/datamodel_ast_builder.rs index 1cc66275e8ca..8d078719d4fa 100644 --- a/query-engine/dmmf/src/ast_builders/datamodel_ast_builder.rs +++ b/query-engine/dmmf/src/ast_builders/datamodel_ast_builder.rs @@ -133,17 +133,15 @@ fn model_to_dmmf(model: walkers::ModelWalker<'_>) -> Model { primary_key, unique_fields: model .indexes() - .filter_map(|i| { - (i.is_unique() && !i.is_defined_on_field()).then(|| i.fields().map(|f| f.name().to_owned()).collect()) - }) + .filter(|&i| i.is_unique() && !i.is_defined_on_field()) + .map(|i| i.fields().map(|f| f.name().to_owned()).collect()) .collect(), unique_indexes: model .indexes() - .filter_map(|i| { - (i.is_unique() && !i.is_defined_on_field()).then(|| UniqueIndex { - name: i.name().map(ToOwned::to_owned), - fields: i.fields().map(|f| f.name().to_owned()).collect(), - }) + .filter(|&i| i.is_unique() && !i.is_defined_on_field()) + .map(|i| UniqueIndex { + name: i.name().map(ToOwned::to_owned), + fields: i.fields().map(|f| f.name().to_owned()).collect(), }) .collect(), } diff --git a/query-engine/dmmf/src/tests/tests.rs b/query-engine/dmmf/src/tests/tests.rs index f1d597710b56..53f11a455ee3 100644 --- a/query-engine/dmmf/src/tests/tests.rs +++ b/query-engine/dmmf/src/tests/tests.rs @@ -19,7 +19,10 @@ fn views_ignore() { } fn assert_comment(actual: Option<&String>, expected: &str) { - assert!(actual.is_some_and(|c| c.as_str() == expected)) + match actual { + Some(actual) => assert_eq!(actual.as_str(), expected), + None => panic!("Expected comment: {}", expected), + } } #[test] @@ -87,6 +90,54 @@ fn unsupported_in_composite_type() { dmmf_from_schema(schema); } +// Regression test for https://github.com/prisma/prisma/issues/20986 +#[test] +fn unusupported_in_compound_unique_must_not_panic() { + let schema = r#" + datasource db { + provider = "postgresql" + url = env("TEST_DATABASE_URL") + } + + generator client { + provider = "postgresql" + } + + model A { + id Int @id + field Int + unsupported Unsupported("tstzrange") + + @@unique([field, unsupported]) + } + "#; + + dmmf_from_schema(schema); +} + +#[test] +fn unusupported_in_compound_id_must_not_panic() { + let schema = r#" + datasource db { + provider = "postgresql" + url = env("TEST_DATABASE_URL") + } + + generator client { + provider = "postgresql" + } + + model A { + field Int @unique + unsupported Unsupported("tstzrange") + + @@id([field, unsupported]) + } + "#; + + dmmf_from_schema(schema); +} + const SNAPSHOTS_PATH: &str = concat!( env!("CARGO_MANIFEST_DIR"), "/src", diff --git a/query-engine/driver-adapters/.gitignore b/query-engine/driver-adapters/.gitignore new file mode 100644 index 000000000000..dab5c8905550 --- /dev/null +++ b/query-engine/driver-adapters/.gitignore @@ -0,0 +1,3 @@ +node_modules +adapter-* +driver-adapter-utils diff --git a/query-engine/driver-adapters/Cargo.toml b/query-engine/driver-adapters/Cargo.toml index 39e6804eca28..4c0b55bb0a92 100644 --- a/query-engine/driver-adapters/Cargo.toml +++ b/query-engine/driver-adapters/Cargo.toml @@ -13,6 +13,7 @@ psl.workspace = true tracing = "0.1" tracing-core = "0.1" metrics = "0.18" +uuid = { version = "1", features = ["v4"] } # Note: these deps are temporarily specified here to avoid importing them from tiberius (the SQL server driver). # They will be imported from quaint-core instead in a future PR. diff --git a/query-engine/driver-adapters/connector-test-kit-executor/.gitignore b/query-engine/driver-adapters/connector-test-kit-executor/.gitignore new file mode 100644 index 000000000000..37b61ff565c7 --- /dev/null +++ b/query-engine/driver-adapters/connector-test-kit-executor/.gitignore @@ -0,0 +1,3 @@ +node_modules +pnpm-debug.log +dist/ diff --git a/query-engine/driver-adapters/connector-test-kit-executor/package.json b/query-engine/driver-adapters/connector-test-kit-executor/package.json new file mode 100644 index 000000000000..153b833df1e1 --- /dev/null +++ b/query-engine/driver-adapters/connector-test-kit-executor/package.json @@ -0,0 +1,38 @@ +{ + "engines": { + "node": ">=16.13", + "pnpm": ">=8.6.6 <9" + }, + "name": "connector-test-kit-executor", + "version": "0.0.1", + "description": "", + "main": "dist/index.mjs", + "module": "dist/index.mjs", + "private": true, + "scripts": { + "build": "tsup ./src/index.ts --format esm --dts" + }, + "keywords": [], + "author": "", + "sideEffects": false, + "license": "Apache-2.0", + "dependencies": { + "@libsql/client": "0.3.5", + "@neondatabase/serverless": "^0.6.0", + "@planetscale/database": "1.11.0", + "@prisma/adapter-libsql": "workspace:*", + "@prisma/adapter-neon": "workspace:*", + "@prisma/adapter-pg": "workspace:*", + "@prisma/adapter-planetscale": "workspace:*", + "@prisma/driver-adapter-utils": "workspace:*", + "@types/pg": "^8.10.2", + "pg": "^8.11.3", + "undici": "^5.26.5", + "ws": "^8.14.2" + }, + "devDependencies": { + "@types/node": "^20.5.1", + "tsup": "^7.2.0", + "typescript": "5.2.2" + } +} \ No newline at end of file diff --git a/query-engine/driver-adapters/js/pnpm-lock.yaml b/query-engine/driver-adapters/connector-test-kit-executor/pnpm-lock.yaml similarity index 75% rename from query-engine/driver-adapters/js/pnpm-lock.yaml rename to query-engine/driver-adapters/connector-test-kit-executor/pnpm-lock.yaml index 0c4b01a68f67..d4f9fa09277d 100644 --- a/query-engine/driver-adapters/js/pnpm-lock.yaml +++ b/query-engine/driver-adapters/connector-test-kit-executor/pnpm-lock.yaml @@ -4,133 +4,76 @@ settings: autoInstallPeers: true excludeLinksFromLockfile: false -importers: - - .: - devDependencies: - '@types/node': - specifier: ^20.5.1 - version: 20.5.1 - tsup: - specifier: ^7.2.0 - version: 7.2.0(typescript@5.1.6) - typescript: - specifier: ^5.1.6 - version: 5.1.6 - - adapter-neon: - dependencies: - '@jkomyno/prisma-driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils - devDependencies: - '@neondatabase/serverless': - specifier: ^0.6.0 - version: 0.6.0 - - adapter-pg: - dependencies: - '@jkomyno/prisma-driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils - devDependencies: - '@types/pg': - specifier: ^8.10.2 - version: 8.10.2 - pg: - specifier: ^8.11.3 - version: 8.11.3 - - adapter-planetscale: - dependencies: - '@jkomyno/prisma-driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils - devDependencies: - '@planetscale/database': - specifier: ^1.11.0 - version: 1.11.0 - - driver-adapter-utils: - dependencies: - debug: - specifier: ^4.3.4 - version: 4.3.4 - devDependencies: - '@types/debug': - specifier: ^4.1.8 - version: 4.1.8 - - smoke-test-js: - dependencies: - '@jkomyno/prisma-adapter-neon': - specifier: workspace:* - version: link:../adapter-neon - '@jkomyno/prisma-adapter-pg': - specifier: workspace:* - version: link:../adapter-pg - '@jkomyno/prisma-adapter-planetscale': - specifier: workspace:* - version: link:../adapter-planetscale - '@jkomyno/prisma-driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils - '@neondatabase/serverless': - specifier: ^0.6.0 - version: 0.6.0 - '@planetscale/database': - specifier: ^1.11.0 - version: 1.11.0 - '@prisma/client': - specifier: 5.3.0-integration-feat-driver-adapters-in-client.1 - version: 5.3.0-integration-feat-driver-adapters-in-client.1(prisma@5.3.0-integration-feat-driver-adapters-in-client.1) - pg: - specifier: ^8.11.3 - version: 8.11.3 - superjson: - specifier: ^1.13.1 - version: 1.13.1 - undici: - specifier: ^5.23.0 - version: 5.23.0 - devDependencies: - '@types/node': - specifier: ^20.5.1 - version: 20.5.1 - '@types/pg': - specifier: ^8.10.2 - version: 8.10.2 - cross-env: - specifier: ^7.0.3 - version: 7.0.3 - prisma: - specifier: 5.3.0-integration-feat-driver-adapters-in-client.1 - version: 5.3.0-integration-feat-driver-adapters-in-client.1 - tsx: - specifier: ^3.12.7 - version: 3.12.7 +dependencies: + '@libsql/client': + specifier: 0.3.5 + version: 0.3.5 + '@neondatabase/serverless': + specifier: ^0.6.0 + version: 0.6.0 + '@planetscale/database': + specifier: 1.11.0 + version: 1.11.0 + '@prisma/adapter-libsql': + specifier: ../adapter-libsql + version: link:../adapter-libsql + '@prisma/adapter-neon': + specifier: ../adapter-neon + version: link:../adapter-neon + '@prisma/adapter-pg': + specifier: ../adapter-pg + version: link:../adapter-pg + '@prisma/adapter-planetscale': + specifier: ../adapter-planetscale + version: link:../adapter-planetscale + '@prisma/driver-adapter-utils': + specifier: ../driver-adapter-utils + version: link:../driver-adapter-utils + '@types/pg': + specifier: ^8.10.2 + version: 8.10.2 + pg: + specifier: ^8.11.3 + version: 8.11.3 + undici: + specifier: ^5.26.5 + version: 5.26.5 + +devDependencies: + '@types/node': + specifier: ^20.5.1 + version: 20.5.1 + tsup: + specifier: ^7.2.0 + version: 7.2.0(typescript@5.1.6) + tsx: + specifier: ^3.12.7 + version: 3.12.7 + typescript: + specifier: ^5.1.6 + version: 5.1.6 packages: - /@esbuild-kit/cjs-loader@2.4.2: - resolution: {integrity: sha512-BDXFbYOJzT/NBEtp71cvsrGPwGAMGRB/349rwKuoxNSiKjPraNNnlK6MIIabViCjqZugu6j+xeMDlEkWdHHJSg==} + /@esbuild-kit/cjs-loader@2.4.4: + resolution: {integrity: sha512-NfsJX4PdzhwSkfJukczyUiZGc7zNNWZcEAyqeISpDnn0PTfzMJR1aR8xAIPskBejIxBJbIgCCMzbaYa9SXepIg==} dependencies: - '@esbuild-kit/core-utils': 3.2.2 - get-tsconfig: 4.7.0 + '@esbuild-kit/core-utils': 3.3.2 + get-tsconfig: 4.7.2 dev: true - /@esbuild-kit/core-utils@3.2.2: - resolution: {integrity: sha512-Ub6LaRaAgF80dTSzUdXpFLM1pVDdmEVB9qb5iAzSpyDlX/mfJTFGOnZ516O05p5uWWteNviMKi4PAyEuRxI5gA==} + /@esbuild-kit/core-utils@3.3.2: + resolution: {integrity: sha512-sPRAnw9CdSsRmEtnsl2WXWdyquogVpB3yZ3dgwJfe8zrOzTsV7cJvmwrKVa+0ma5BoiGJ+BoqkMvawbayKUsqQ==} dependencies: esbuild: 0.18.20 source-map-support: 0.5.21 dev: true - /@esbuild-kit/esm-loader@2.5.5: - resolution: {integrity: sha512-Qwfvj/qoPbClxCRNuac1Du01r9gvNOT+pMYtJDapfB1eoGN1YlJ1BixLyL9WVENRx5RXgNLdfYdx/CuswlGhMw==} + /@esbuild-kit/esm-loader@2.6.5: + resolution: {integrity: sha512-FxEMIkJKnodyA1OaCUoEvbYRkoZlLZ4d/eXFu9Fh8CbBBgP5EmZxrfTRyN0qpXZ4vOvqnE5YdRdcrmUUXuU+dA==} dependencies: - '@esbuild-kit/core-utils': 3.2.2 - get-tsconfig: 4.7.0 + '@esbuild-kit/core-utils': 3.3.2 + get-tsconfig: 4.7.2 dev: true /@esbuild/android-arm64@0.18.20: @@ -331,13 +274,18 @@ packages: dev: true optional: true + /@fastify/busboy@2.0.0: + resolution: {integrity: sha512-JUFJad5lv7jxj926GPgymrWQxxjPYuJNiNjNMzqT+HiuP6Vl3dk5xzG+8sTX96np0ZAluvaMzPsjhHZ5rNuNQQ==} + engines: {node: '>=14'} + dev: false + /@jridgewell/gen-mapping@0.3.3: resolution: {integrity: sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ==} engines: {node: '>=6.0.0'} dependencies: '@jridgewell/set-array': 1.1.2 '@jridgewell/sourcemap-codec': 1.4.15 - '@jridgewell/trace-mapping': 0.3.19 + '@jridgewell/trace-mapping': 0.3.20 dev: true /@jridgewell/resolve-uri@3.1.1: @@ -354,17 +302,122 @@ packages: resolution: {integrity: sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==} dev: true - /@jridgewell/trace-mapping@0.3.19: - resolution: {integrity: sha512-kf37QtfW+Hwx/buWGMPcR60iF9ziHa6r/CZJIHbmcm4+0qrXiVdxegAH0F6yddEVQ7zdkjcGCgCzUu+BcbhQxw==} + /@jridgewell/trace-mapping@0.3.20: + resolution: {integrity: sha512-R8LcPeWZol2zR8mmH3JeKQ6QRCFb7XgUhV9ZlGhHLGyg4wpPiPZNQOOWhFZhxKw8u//yTbNGI42Bx/3paXEQ+Q==} dependencies: '@jridgewell/resolve-uri': 3.1.1 '@jridgewell/sourcemap-codec': 1.4.15 dev: true + /@libsql/client@0.3.5: + resolution: {integrity: sha512-4fZxGh0qKW5dtp1yuQLRvRAtbt02V4jzjM9sHSmz5k25xZTLg7/GlNudKdqKZrjJXEV5PvDNsczupBtedZZovw==} + dependencies: + '@libsql/hrana-client': 0.5.5 + js-base64: 3.7.5 + libsql: 0.1.34 + transitivePeerDependencies: + - bufferutil + - encoding + - utf-8-validate + dev: false + + /@libsql/darwin-arm64@0.1.34: + resolution: {integrity: sha512-Wv8jvkj/fUAO8DF3A4HaddCMldUUpKcg/WW1sY95FNsSHOxktyxqU80jAp/tCuZ85GQIJozvgSr51/ARIC0gsw==} + cpu: [arm64] + os: [darwin] + requiresBuild: true + dev: false + optional: true + + /@libsql/darwin-x64@0.1.34: + resolution: {integrity: sha512-2NQXD9nUzC08hg7FdcZLq5uTEwGz1KbD7YvUzQb/psO1lO/E/p83wl1es1082+Pp0z5pSPDWQeRTuccD41L+3w==} + cpu: [x64] + os: [darwin] + requiresBuild: true + dev: false + optional: true + + /@libsql/hrana-client@0.5.5: + resolution: {integrity: sha512-i+hDBpiV719poqEiHupUUZYKJ9YSbCRFe5Q2PQ0v3mHIftePH6gayLjp2u6TXbqbO/Dv6y8yyvYlBXf/kFfRZA==} + dependencies: + '@libsql/isomorphic-fetch': 0.1.10 + '@libsql/isomorphic-ws': 0.1.5 + js-base64: 3.7.5 + node-fetch: 3.3.2 + transitivePeerDependencies: + - bufferutil + - encoding + - utf-8-validate + dev: false + + /@libsql/isomorphic-fetch@0.1.10: + resolution: {integrity: sha512-dH0lMk50gKSvEKD78xWMu60SY1sjp1sY//iFLO0XMmBwfVfG136P9KOk06R4maBdlb8KMXOzJ1D28FR5ZKnHTA==} + dependencies: + '@types/node-fetch': 2.6.7 + node-fetch: 2.7.0 + transitivePeerDependencies: + - encoding + dev: false + + /@libsql/isomorphic-ws@0.1.5: + resolution: {integrity: sha512-DtLWIH29onUYR00i0GlQ3UdcTRC6EP4u9w/h9LxpUZJWRMARk6dQwZ6Jkd+QdwVpuAOrdxt18v0K2uIYR3fwFg==} + dependencies: + '@types/ws': 8.5.8 + ws: 8.14.2 + transitivePeerDependencies: + - bufferutil + - utf-8-validate + dev: false + + /@libsql/linux-arm64-gnu@0.1.34: + resolution: {integrity: sha512-r3dY1FDYZ7eX5HX7HyAoYSqK5FPugj5NSB5Bt/nz+ygBWdXASgSKxkE/RqjJIM59vXwv300iJX9qhR5fXv8sTw==} + cpu: [arm64] + os: [linux] + requiresBuild: true + dev: false + optional: true + + /@libsql/linux-arm64-musl@0.1.34: + resolution: {integrity: sha512-9AE/eNb9eQRcNsLxqtpLJxVEoIMmItrdwqJDImPJtOp10rhp4U0x/9RGKerl9Mg3ObVj676pyhAR2KzyudrOfQ==} + cpu: [arm64] + os: [linux] + requiresBuild: true + dev: false + optional: true + + /@libsql/linux-x64-gnu@0.1.34: + resolution: {integrity: sha512-o8toY1Txstjt13fBhZbFe8sNAW6OaS6qVcp1Bd6bHkCLSBLZ6pjJmwzQN8rFv9QFBPAnaKP3lI4vaOXXw7huTA==} + cpu: [x64] + os: [linux] + requiresBuild: true + dev: false + optional: true + + /@libsql/linux-x64-musl@0.1.34: + resolution: {integrity: sha512-EldEmcAxxNPSCjJ73oFxg81PDDIpDbPqK/QOrhmmGYLvYwrnQtVRUIbARf80JQvcy6bCxOO/Q9dh6wGhnyHyYA==} + cpu: [x64] + os: [linux] + requiresBuild: true + dev: false + optional: true + + /@libsql/win32-x64-msvc@0.1.34: + resolution: {integrity: sha512-jnv0qfVMnrVv00r+wUOe6DHrHuao9y1w1lN543cV2J1JdQNJT/eSZzhyZFSlS3T2ZUvXfZfZ5GeL8U18IAID6w==} + cpu: [x64] + os: [win32] + requiresBuild: true + dev: false + optional: true + + /@neon-rs/load@0.0.4: + resolution: {integrity: sha512-kTPhdZyTQxB+2wpiRcFWrDcejc4JI6tkPuS7UZCG4l6Zvc5kU/gGQ/ozvHTh1XR5tS+UlfAfGuPajjzQjCiHCw==} + dev: false + /@neondatabase/serverless@0.6.0: resolution: {integrity: sha512-qXxBRYN0m2v8kVQBfMxbzNGn2xFAhTXFibzQlE++NfJ56Shz3m7+MyBBtXDlEH+3Wfa6lToDXf1MElocY4sJ3w==} dependencies: '@types/pg': 8.6.6 + dev: false /@nodelib/fs.scandir@2.1.5: resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} @@ -390,60 +443,39 @@ packages: /@planetscale/database@1.11.0: resolution: {integrity: sha512-aWbU+D/IRHoDE9975y+Q4c+EwwAWxCPwFId+N1AhQVFXzbeJMkj6KN2iQtoi03elcLMRdfT+V3i9Z4WRw+/oIA==} engines: {node: '>=16'} - - /@prisma/client@5.3.0-integration-feat-driver-adapters-in-client.1(prisma@5.3.0-integration-feat-driver-adapters-in-client.1): - resolution: {integrity: sha512-izGFo8RFgmHibBzQGRx66xfh08LcGaOysNWvMRgqT018kZ8c98qqfI0/E+LFgxb3Ar0hqz2zX8M4Fa56KvI6cw==} - engines: {node: '>=16.13'} - requiresBuild: true - peerDependencies: - prisma: '*' - peerDependenciesMeta: - prisma: - optional: true - dependencies: - '@prisma/engines-version': 5.3.0-28.3457e5de04da1741c969a80068702ad103e99553 - prisma: 5.3.0-integration-feat-driver-adapters-in-client.1 dev: false - /@prisma/engines-version@5.3.0-28.3457e5de04da1741c969a80068702ad103e99553: - resolution: {integrity: sha512-eb+8hgURyTu1qAWmTxgZCgBjf0UV6REC525fa1XnPpL6hxMZ7cEtFCX0f9GDopa/piCM9pq5H2ttthGOKQyVLA==} - dev: false - - /@prisma/engines@5.3.0-integration-feat-driver-adapters-in-client.1: - resolution: {integrity: sha512-euFOT9Wq0dVVXZjcLP/6/XRPr04dm4t9DtKJXUCk5Kja87bAy+knLdcC6Pkmbbjhi0fTThiKQOOxKxWBfXrr4A==} - requiresBuild: true - - /@types/debug@4.1.8: - resolution: {integrity: sha512-/vPO1EPOs306Cvhwv7KfVfYvOJqA/S/AXjaHQiJboCZzcNDb+TIJFN9/2C9DZ//ijSKWioNyUxD792QmDJ+HKQ==} + /@types/node-fetch@2.6.7: + resolution: {integrity: sha512-lX17GZVpJ/fuCjguZ5b3TjEbSENxmEk1B2z02yoXSK9WMEWRivhdSY73wWMn6bpcCDAOh6qAdktpKHIlkDk2lg==} dependencies: - '@types/ms': 0.7.31 - dev: true - - /@types/ms@0.7.31: - resolution: {integrity: sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA==} - dev: true + '@types/node': 20.5.1 + form-data: 4.0.0 + dev: false /@types/node@20.5.1: resolution: {integrity: sha512-4tT2UrL5LBqDwoed9wZ6N3umC4Yhz3W3FloMmiiG4JwmUJWpie0c7lcnUNd4gtMKuDEO4wRVS8B6Xa0uMRsMKg==} - dev: true - - /@types/node@20.5.9: - resolution: {integrity: sha512-PcGNd//40kHAS3sTlzKB9C9XL4K0sTup8nbG5lC14kzEteTNuAFh9u5nA0o5TWnSG2r/JNPRXFVcHJIIeRlmqQ==} /@types/pg@8.10.2: resolution: {integrity: sha512-MKFs9P6nJ+LAeHLU3V0cODEOgyThJ3OAnmOlsZsxux6sfQs3HRXR5bBn7xG5DjckEFhTAxsXi7k7cd0pCMxpJw==} dependencies: - '@types/node': 20.5.9 + '@types/node': 20.5.1 pg-protocol: 1.6.0 pg-types: 4.0.1 - dev: true + dev: false /@types/pg@8.6.6: resolution: {integrity: sha512-O2xNmXebtwVekJDD+02udOncjVcMZQuTEQEMpKJ0ZRf5E7/9JJX3izhKUcUifBkyKpljyUM6BTgy2trmviKlpw==} dependencies: - '@types/node': 20.5.9 + '@types/node': 20.5.1 pg-protocol: 1.6.0 pg-types: 2.2.0 + dev: false + + /@types/ws@8.5.8: + resolution: {integrity: sha512-flUksGIQCnJd6sZ1l5dqCEG/ksaoAg/eUwiLAGTJQcfgvZJKF++Ta4bJA6A5aPSJmsr+xlseHn4KLgVlNnvPTg==} + dependencies: + '@types/node': 20.5.1 + dev: false /any-promise@1.3.0: resolution: {integrity: sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==} @@ -462,6 +494,10 @@ packages: engines: {node: '>=8'} dev: true + /asynckit@0.4.0: + resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} + dev: false + /balanced-match@1.0.2: resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} dev: true @@ -492,9 +528,10 @@ packages: /buffer-writer@2.0.0: resolution: {integrity: sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==} engines: {node: '>=4'} + dev: false - /bundle-require@4.0.1(esbuild@0.18.20): - resolution: {integrity: sha512-9NQkRHlNdNpDBGmLpngF3EFDcwodhMUuLz9PaWYciVcQF9SE4LFjM2DB/xV1Li5JiuDMv7ZUWuC3rGbqR0MAXQ==} + /bundle-require@4.0.2(esbuild@0.18.20): + resolution: {integrity: sha512-jwzPOChofl67PSTW2SGubV9HBQAhhR2i6nskiOThauo9dzwDUgOWQScFVaJkjEfYX+UXiD+LEx8EblQMc2wIag==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} peerDependencies: esbuild: '>=0.17' @@ -503,13 +540,6 @@ packages: load-tsconfig: 0.2.5 dev: true - /busboy@1.6.0: - resolution: {integrity: sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA==} - engines: {node: '>=10.16.0'} - dependencies: - streamsearch: 1.1.0 - dev: false - /cac@6.7.14: resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} engines: {node: '>=8'} @@ -530,6 +560,13 @@ packages: fsevents: 2.3.3 dev: true + /combined-stream@1.0.8: + resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} + engines: {node: '>= 0.8'} + dependencies: + delayed-stream: 1.0.0 + dev: false + /commander@4.1.1: resolution: {integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==} engines: {node: '>= 6'} @@ -539,21 +576,6 @@ packages: resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} dev: true - /copy-anything@3.0.5: - resolution: {integrity: sha512-yCEafptTtb4bk7GLEQoM8KVJpxAfdBJYaXyzQEgQQQgYrZiDp8SJmGKlYza6CYjEDNstAdNdKA3UuoULlEbS6w==} - engines: {node: '>=12.13'} - dependencies: - is-what: 4.1.15 - dev: false - - /cross-env@7.0.3: - resolution: {integrity: sha512-+/HKd6EgcQCJGh2PSjZuUitQBQynKor4wrFbRg4DtAgS1aWO+gU52xpH7M9ScGgXSYmAVS9bIJ8EzuaGw0oNAw==} - engines: {node: '>=10.14', npm: '>=6', yarn: '>=1'} - hasBin: true - dependencies: - cross-spawn: 7.0.3 - dev: true - /cross-spawn@7.0.3: resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==} engines: {node: '>= 8'} @@ -563,6 +585,11 @@ packages: which: 2.0.2 dev: true + /data-uri-to-buffer@4.0.1: + resolution: {integrity: sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==} + engines: {node: '>= 12'} + dev: false + /debug@4.3.4: resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==} engines: {node: '>=6.0'} @@ -573,6 +600,17 @@ packages: optional: true dependencies: ms: 2.1.2 + dev: true + + /delayed-stream@1.0.0: + resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} + engines: {node: '>=0.4.0'} + dev: false + + /detect-libc@2.0.2: + resolution: {integrity: sha512-UX6sGumvvqSaXgdKGUsgZWqcUyIXZ/vZTrlRT/iobiKhGL0zL4d3osHj3uqllWJK+i+sixDS/3COVEOFbupFyw==} + engines: {node: '>=8'} + dev: false /dir-glob@3.0.1: resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} @@ -643,6 +681,14 @@ packages: reusify: 1.0.4 dev: true + /fetch-blob@3.2.0: + resolution: {integrity: sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==} + engines: {node: ^12.20 || >= 14.13} + dependencies: + node-domexception: 1.0.0 + web-streams-polyfill: 3.2.1 + dev: false + /fill-range@7.0.1: resolution: {integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==} engines: {node: '>=8'} @@ -650,6 +696,22 @@ packages: to-regex-range: 5.0.1 dev: true + /form-data@4.0.0: + resolution: {integrity: sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==} + engines: {node: '>= 6'} + dependencies: + asynckit: 0.4.0 + combined-stream: 1.0.8 + mime-types: 2.1.35 + dev: false + + /formdata-polyfill@4.0.10: + resolution: {integrity: sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==} + engines: {node: '>=12.20.0'} + dependencies: + fetch-blob: 3.2.0 + dev: false + /fs.realpath@1.0.0: resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} dev: true @@ -667,8 +729,8 @@ packages: engines: {node: '>=10'} dev: true - /get-tsconfig@4.7.0: - resolution: {integrity: sha512-pmjiZ7xtB8URYm74PlGJozDNyhvsVLUcpBa8DZBG3bWHwaHa9bPiRpiSfovw+fjhwONSCWKRyk+JQHEGZmMrzw==} + /get-tsconfig@4.7.2: + resolution: {integrity: sha512-wuMsz4leaj5hbGgg4IvDU0bqJagpftG5l5cXIAvo8uZrqn0NJqwtfupTN00VnkQJPcIRrxYrm1Ue24btpCha2A==} dependencies: resolve-pkg-maps: 1.0.0 dev: true @@ -753,11 +815,6 @@ packages: engines: {node: '>=8'} dev: true - /is-what@4.1.15: - resolution: {integrity: sha512-uKua1wfy3Yt+YqsD6mTUEa2zSi3G1oPlqTflgaPJ7z63vUGN5pxFpnQfeSLMFnJDEsdvOtkp1rUWkYjB4YfhgA==} - engines: {node: '>=12.13'} - dev: false - /isexe@2.0.0: resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} dev: true @@ -767,6 +824,27 @@ packages: engines: {node: '>=10'} dev: true + /js-base64@3.7.5: + resolution: {integrity: sha512-3MEt5DTINKqfScXKfJFrRbxkrnk2AxPWGBL/ycjz4dK8iqiSJ06UxD8jh8xuh6p10TX4t2+7FsBYVxxQbMg+qA==} + dev: false + + /libsql@0.1.34: + resolution: {integrity: sha512-LGofp7z7gi1Td6vu2GxaA4WyvSPEkuFn0f/ePSti1TsAlBU0LWxdk+bj9D8nqswzxiqe5wpAyTLhVzTIYSyXEA==} + cpu: [x64, arm64] + os: [darwin, linux, win32] + dependencies: + '@neon-rs/load': 0.0.4 + detect-libc: 2.0.2 + optionalDependencies: + '@libsql/darwin-arm64': 0.1.34 + '@libsql/darwin-x64': 0.1.34 + '@libsql/linux-arm64-gnu': 0.1.34 + '@libsql/linux-arm64-musl': 0.1.34 + '@libsql/linux-x64-gnu': 0.1.34 + '@libsql/linux-x64-musl': 0.1.34 + '@libsql/win32-x64-msvc': 0.1.34 + dev: false + /lilconfig@2.1.0: resolution: {integrity: sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==} engines: {node: '>=10'} @@ -802,6 +880,18 @@ packages: picomatch: 2.3.1 dev: true + /mime-db@1.52.0: + resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} + engines: {node: '>= 0.6'} + dev: false + + /mime-types@2.1.35: + resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} + engines: {node: '>= 0.6'} + dependencies: + mime-db: 1.52.0 + dev: false + /mimic-fn@2.1.0: resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} engines: {node: '>=6'} @@ -815,6 +905,7 @@ packages: /ms@2.1.2: resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} + dev: true /mz@2.7.0: resolution: {integrity: sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==} @@ -824,6 +915,32 @@ packages: thenify-all: 1.6.0 dev: true + /node-domexception@1.0.0: + resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==} + engines: {node: '>=10.5.0'} + dev: false + + /node-fetch@2.7.0: + resolution: {integrity: sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==} + engines: {node: 4.x || >=6.0.0} + peerDependencies: + encoding: ^0.1.0 + peerDependenciesMeta: + encoding: + optional: true + dependencies: + whatwg-url: 5.0.0 + dev: false + + /node-fetch@3.3.2: + resolution: {integrity: sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + dependencies: + data-uri-to-buffer: 4.0.1 + fetch-blob: 3.2.0 + formdata-polyfill: 4.0.10 + dev: false + /normalize-path@3.0.0: resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} engines: {node: '>=0.10.0'} @@ -843,7 +960,7 @@ packages: /obuf@1.1.2: resolution: {integrity: sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==} - dev: true + dev: false /once@1.4.0: resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} @@ -860,6 +977,7 @@ packages: /packet-reader@1.0.0: resolution: {integrity: sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==} + dev: false /path-is-absolute@1.0.1: resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==} @@ -879,19 +997,22 @@ packages: /pg-cloudflare@1.1.1: resolution: {integrity: sha512-xWPagP/4B6BgFO+EKz3JONXv3YDgvkbVrGw2mTo3D6tVDQRh1e7cqVGvyR3BE+eQgAvx1XhW/iEASj4/jCWl3Q==} requiresBuild: true + dev: false optional: true /pg-connection-string@2.6.2: resolution: {integrity: sha512-ch6OwaeaPYcova4kKZ15sbJ2hKb/VP48ZD2gE7i1J+L4MspCtBMAx8nMgz7bksc7IojCIIWuEhHibSMFH8m8oA==} + dev: false /pg-int8@1.0.1: resolution: {integrity: sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==} engines: {node: '>=4.0.0'} + dev: false /pg-numeric@1.0.2: resolution: {integrity: sha512-BM/Thnrw5jm2kKLE5uJkXqqExRUY/toLHda65XgFTBTFYZyopbKjBe29Ii3RbkvlsMoFwD+tHeGaCjjv0gHlyw==} engines: {node: '>=4'} - dev: true + dev: false /pg-pool@3.6.1(pg@8.11.3): resolution: {integrity: sha512-jizsIzhkIitxCGfPRzJn1ZdcosIt3pz9Sh3V01fm1vZnbnCMgmGl5wvGGdNN2EL9Rmb0EcFoCkixH4Pu+sP9Og==} @@ -899,9 +1020,11 @@ packages: pg: '>=8.0' dependencies: pg: 8.11.3 + dev: false /pg-protocol@1.6.0: resolution: {integrity: sha512-M+PDm637OY5WM307051+bsDia5Xej6d9IR4GwJse1qA1DIhiKlksvrneZOYQq42OM+spubpcNYEo2FcKQrDk+Q==} + dev: false /pg-types@2.2.0: resolution: {integrity: sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==} @@ -912,6 +1035,7 @@ packages: postgres-bytea: 1.0.0 postgres-date: 1.0.7 postgres-interval: 1.2.0 + dev: false /pg-types@4.0.1: resolution: {integrity: sha512-hRCSDuLII9/LE3smys1hRHcu5QGcLs9ggT7I/TCs0IE+2Eesxi9+9RWAAwZ0yaGjxoWICF/YHLOEjydGujoJ+g==} @@ -924,7 +1048,7 @@ packages: postgres-date: 2.0.1 postgres-interval: 3.0.0 postgres-range: 1.1.3 - dev: true + dev: false /pg@8.11.3: resolution: {integrity: sha512-+9iuvG8QfaaUrrph+kpF24cXkH1YOOUeArRNYIxq1viYHZagBxrTno7cecY1Fa44tJeZvaoG+Djpkc3JwehN5g==} @@ -944,11 +1068,13 @@ packages: pgpass: 1.0.5 optionalDependencies: pg-cloudflare: 1.1.1 + dev: false /pgpass@1.0.5: resolution: {integrity: sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==} dependencies: split2: 4.2.0 + dev: false /picomatch@2.3.1: resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} @@ -973,60 +1099,56 @@ packages: optional: true dependencies: lilconfig: 2.1.0 - yaml: 2.3.2 + yaml: 2.3.3 dev: true /postgres-array@2.0.0: resolution: {integrity: sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==} engines: {node: '>=4'} + dev: false /postgres-array@3.0.2: resolution: {integrity: sha512-6faShkdFugNQCLwucjPcY5ARoW1SlbnrZjmGl0IrrqewpvxvhSLHimCVzqeuULCbG0fQv7Dtk1yDbG3xv7Veog==} engines: {node: '>=12'} - dev: true + dev: false /postgres-bytea@1.0.0: resolution: {integrity: sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==} engines: {node: '>=0.10.0'} + dev: false /postgres-bytea@3.0.0: resolution: {integrity: sha512-CNd4jim9RFPkObHSjVHlVrxoVQXz7quwNFpz7RY1okNNme49+sVyiTvTRobiLV548Hx/hb1BG+iE7h9493WzFw==} engines: {node: '>= 6'} dependencies: obuf: 1.1.2 - dev: true + dev: false /postgres-date@1.0.7: resolution: {integrity: sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==} engines: {node: '>=0.10.0'} + dev: false /postgres-date@2.0.1: resolution: {integrity: sha512-YtMKdsDt5Ojv1wQRvUhnyDJNSr2dGIC96mQVKz7xufp07nfuFONzdaowrMHjlAzY6GDLd4f+LUHHAAM1h4MdUw==} engines: {node: '>=12'} - dev: true + dev: false /postgres-interval@1.2.0: resolution: {integrity: sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==} engines: {node: '>=0.10.0'} dependencies: xtend: 4.0.2 + dev: false /postgres-interval@3.0.0: resolution: {integrity: sha512-BSNDnbyZCXSxgA+1f5UU2GmwhoI0aU5yMxRGO8CdFEcY2BQF9xm/7MqKnYoM1nJDk8nONNWDk9WeSmePFhQdlw==} engines: {node: '>=12'} - dev: true + dev: false /postgres-range@1.1.3: resolution: {integrity: sha512-VdlZoocy5lCP0c/t66xAfclglEapXPCIVhqqJRncYpvbCgImF0w67aPKfbqUMr72tO2k5q0TdTZwCLjPTI6C9g==} - dev: true - - /prisma@5.3.0-integration-feat-driver-adapters-in-client.1: - resolution: {integrity: sha512-M5EjBFZ3P3mjgYOfRBLqg5wKKeXq/VTv2wF9Ft4YCMMsHlcIJJ9IMV1UkzZLmP1yTdMxougJcLeDA9QGmdpsMA==} - engines: {node: '>=16.13'} - hasBin: true - requiresBuild: true - dependencies: - '@prisma/engines': 5.3.0-integration-feat-driver-adapters-in-client.1 + dev: false /punycode@2.3.0: resolution: {integrity: sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==} @@ -1058,8 +1180,8 @@ packages: engines: {iojs: '>=1.0.0', node: '>=0.10.0'} dev: true - /rollup@3.28.1: - resolution: {integrity: sha512-R9OMQmIHJm9znrU3m3cpE8uhN0fGdXiawME7aZIpQqvpS/85+Vt1Hq1/yVIcYfOmaQiHjvXkQAoJukvLpau6Yw==} + /rollup@3.29.4: + resolution: {integrity: sha512-oWzmBZwvYrU0iJHtDmhsm662rC15FRXmcjCk1xD771dFDx5jJ02ufAQQTn0etB2emNk4J9EZg/yWKpsn9BWGRw==} engines: {node: '>=14.18.0', npm: '>=8.0.0'} hasBin: true optionalDependencies: @@ -1115,10 +1237,6 @@ packages: /split2@4.2.0: resolution: {integrity: sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==} engines: {node: '>= 10.x'} - - /streamsearch@1.1.0: - resolution: {integrity: sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==} - engines: {node: '>=10.0.0'} dev: false /strip-final-newline@2.0.0: @@ -1140,13 +1258,6 @@ packages: ts-interface-checker: 0.1.13 dev: true - /superjson@1.13.1: - resolution: {integrity: sha512-AVH2eknm9DEd3qvxM4Sq+LTCkSXE2ssfh1t11MHMXyYXFQyQ1HLgVvV+guLTsaQnJU3gnaVo34TohHPulY/wLg==} - engines: {node: '>=10'} - dependencies: - copy-anything: 3.0.5 - dev: false - /thenify-all@1.6.0: resolution: {integrity: sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==} engines: {node: '>=0.8'} @@ -1167,6 +1278,10 @@ packages: is-number: 7.0.0 dev: true + /tr46@0.0.3: + resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==} + dev: false + /tr46@1.0.1: resolution: {integrity: sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA==} dependencies: @@ -1198,7 +1313,7 @@ packages: typescript: optional: true dependencies: - bundle-require: 4.0.1(esbuild@0.18.20) + bundle-require: 4.0.2(esbuild@0.18.20) cac: 6.7.14 chokidar: 3.5.3 debug: 4.3.4 @@ -1208,7 +1323,7 @@ packages: joycon: 3.1.1 postcss-load-config: 4.0.1 resolve-from: 5.0.0 - rollup: 3.28.1 + rollup: 3.29.4 source-map: 0.8.0-beta.0 sucrase: 3.34.0 tree-kill: 1.2.2 @@ -1222,9 +1337,9 @@ packages: resolution: {integrity: sha512-C2Ip+jPmqKd1GWVQDvz/Eyc6QJbGfE7NrR3fx5BpEHMZsEHoIxHL1j+lKdGobr8ovEyqeNkPLSKp6SCSOt7gmw==} hasBin: true dependencies: - '@esbuild-kit/cjs-loader': 2.4.2 - '@esbuild-kit/core-utils': 3.2.2 - '@esbuild-kit/esm-loader': 2.5.5 + '@esbuild-kit/cjs-loader': 2.4.4 + '@esbuild-kit/core-utils': 3.3.2 + '@esbuild-kit/esm-loader': 2.6.5 optionalDependencies: fsevents: 2.3.3 dev: true @@ -1235,17 +1350,33 @@ packages: hasBin: true dev: true - /undici@5.23.0: - resolution: {integrity: sha512-1D7w+fvRsqlQ9GscLBwcAJinqcZGHUKjbOmXdlE/v8BvEGXjeWAax+341q44EuTcHXXnfyKNbKRq4Lg7OzhMmg==} + /undici@5.26.5: + resolution: {integrity: sha512-cSb4bPFd5qgR7qr2jYAi0hlX9n5YKK2ONKkLFkxl+v/9BvC0sOpZjBHDBSXc5lWAf5ty9oZdRXytBIHzgUcerw==} engines: {node: '>=14.0'} dependencies: - busboy: 1.6.0 + '@fastify/busboy': 2.0.0 + dev: false + + /web-streams-polyfill@3.2.1: + resolution: {integrity: sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q==} + engines: {node: '>= 8'} + dev: false + + /webidl-conversions@3.0.1: + resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==} dev: false /webidl-conversions@4.0.2: resolution: {integrity: sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==} dev: true + /whatwg-url@5.0.0: + resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==} + dependencies: + tr46: 0.0.3 + webidl-conversions: 3.0.1 + dev: false + /whatwg-url@7.1.0: resolution: {integrity: sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg==} dependencies: @@ -1266,11 +1397,25 @@ packages: resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} dev: true + /ws@8.14.2: + resolution: {integrity: sha512-wEBG1ftX4jcglPxgFCMJmZ2PLtSbJ2Peg6TmpJFTbe9GZYOQCDPdMYu/Tm0/bGZkw8paZnJY45J4K2PZrLYq8g==} + engines: {node: '>=10.0.0'} + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: '>=5.0.2' + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + dev: false + /xtend@4.0.2: resolution: {integrity: sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==} engines: {node: '>=0.4'} + dev: false - /yaml@2.3.2: - resolution: {integrity: sha512-N/lyzTPaJasoDmfV7YTrYCI0G/3ivm/9wdG0aHuheKowWQwGTsK0Eoiw6utmzAnI6pkJa0DUVygvp3spqqEKXg==} + /yaml@2.3.3: + resolution: {integrity: sha512-zw0VAJxgeZ6+++/su5AFoqBbZbrEakwu+X0M5HmcwUiBL7AzcuPKjj5we4xfQLp78LkEMpD0cOnUhmgOVy3KdQ==} engines: {node: '>= 14'} dev: true diff --git a/query-engine/driver-adapters/connector-test-kit-executor/script/start_node.sh b/query-engine/driver-adapters/connector-test-kit-executor/script/start_node.sh new file mode 100755 index 000000000000..000f3bd1d45c --- /dev/null +++ b/query-engine/driver-adapters/connector-test-kit-executor/script/start_node.sh @@ -0,0 +1,2 @@ +#!/usr/bin/env bash +node "$(dirname "${BASH_SOURCE[0]}")/../dist/index.mjs" \ No newline at end of file diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/JsonProtocol.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/engines/JsonProtocol.ts similarity index 100% rename from query-engine/driver-adapters/js/smoke-test-js/src/engines/types/JsonProtocol.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/engines/JsonProtocol.ts diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Library.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/engines/Library.ts similarity index 83% rename from query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Library.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/engines/Library.ts index a8f1c28bb64c..b0e0b06abc49 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Library.ts +++ b/query-engine/driver-adapters/connector-test-kit-executor/src/engines/Library.ts @@ -1,4 +1,4 @@ -import type { ErrorCapturingDriverAdapter } from '@jkomyno/prisma-driver-adapter-utils' +import type { DriverAdapter } from '@prisma/driver-adapter-utils' import type { QueryEngineConfig } from './QueryEngine' export type QueryEngineInstance = { @@ -18,11 +18,7 @@ export type QueryEngineInstance = { } export interface QueryEngineConstructor { - new( - config: QueryEngineConfig, - logger: (log: string) => void, - driverAdapter?: ErrorCapturingDriverAdapter, - ): QueryEngineInstance + new(config: QueryEngineConfig, logger: (log: string) => void, nodejsFnCtx?: DriverAdapter): QueryEngineInstance } export interface LibraryLoader { diff --git a/query-engine/driver-adapters/connector-test-kit-executor/src/engines/QueryEngine.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/engines/QueryEngine.ts new file mode 100644 index 000000000000..416da634fc91 --- /dev/null +++ b/query-engine/driver-adapters/connector-test-kit-executor/src/engines/QueryEngine.ts @@ -0,0 +1,89 @@ +import { JsonBatchQuery, JsonQuery } from './JsonProtocol' +import * as Transaction from './Transaction' + +// Events +export type QueryEngineEvent = QueryEngineLogEvent | QueryEngineQueryEvent | QueryEnginePanicEvent + +export type QueryEngineLogEvent = { + level: string + module_path: string + message: string + span?: boolean +} + +export type QueryEngineQueryEvent = { + level: 'info' + module_path: string + query: string + item_type: 'query' + params: string + duration_ms: string + result: string +} + +export type QueryEnginePanicEvent = { + level: 'error' + module_path: string + message: 'PANIC' + reason: string + file: string + line: string + column: string +} + + +export type GraphQLQuery = { + query: string + variables: object +} + +export type EngineProtocol = 'graphql' | 'json' +export type EngineQuery = GraphQLQuery | JsonQuery + +export type EngineBatchQueries = GraphQLQuery[] | JsonQuery[] + +export type QueryEngineConfig = { + // TODO rename datamodel here and other places + datamodel: string + configDir: string + logQueries: boolean + ignoreEnvVarErrors: boolean + datasourceOverrides?: Record + env: Record + logLevel?: string + engineProtocol: EngineProtocol +} + +// Errors +export type SyncRustError = { + is_panic: boolean + message: string + meta: { + full_error: string + } + error_code: string +} + +export type RustRequestError = { + is_panic: boolean + message: string + backtrace: string +} + +export type QueryEngineResult = { + data: T + elapsed: number +} + +export type QueryEngineBatchRequest = QueryEngineBatchGraphQLRequest | JsonBatchQuery + +export type QueryEngineBatchGraphQLRequest = { + batch: QueryEngineRequest[] + transaction?: boolean + isolationLevel?: Transaction.IsolationLevel +} + +export type QueryEngineRequest = { + query: string + variables: Object +} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Transaction.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/engines/Transaction.ts similarity index 100% rename from query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Transaction.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/engines/Transaction.ts diff --git a/query-engine/driver-adapters/connector-test-kit-executor/src/index.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/index.ts new file mode 100644 index 000000000000..b89348fb3e77 --- /dev/null +++ b/query-engine/driver-adapters/connector-test-kit-executor/src/index.ts @@ -0,0 +1,287 @@ +import * as qe from './qe' +import * as engines from './engines/Library' +import * as readline from 'node:readline' +import * as jsonRpc from './jsonRpc' + +// pg dependencies +import pgDriver from 'pg' +import * as prismaPg from '@prisma/adapter-pg' + +// neon dependencies +import { Pool as NeonPool, neonConfig } from '@neondatabase/serverless' +import { fetch } from 'undici' +import { WebSocket } from 'ws' +import * as prismaNeon from '@prisma/adapter-neon' + +// libsql dependencies +import { createClient } from '@libsql/client' +import { PrismaLibSQL } from '@prisma/adapter-libsql' + +// planetscale dependencies +import { connect as planetscaleConnect } from '@planetscale/database' +import { PrismaPlanetScale } from '@prisma/adapter-planetscale' + + +import {bindAdapter, DriverAdapter, ErrorCapturingDriverAdapter} from "@prisma/driver-adapter-utils"; + + +const SUPPORTED_ADAPTERS: Record Promise> + = { + "pg": pgAdapter, + "neon:ws" : neonWsAdapter, + "libsql": libsqlAdapter, + "planetscale": planetscaleAdapter, + }; + +// conditional debug logging based on LOG_LEVEL env var +const debug = (() => { + if ((process.env.LOG_LEVEL ?? '').toLowerCase() != 'debug') { + return (...args: any[]) => {} + } + + return (...args: any[]) => { + console.error('[nodejs] DEBUG:', ...args); + }; +})(); + +// error logger +const err = (...args: any[]) => console.error('[nodejs] ERROR:', ...args); + +async function main(): Promise { + const iface = readline.createInterface({ + input: process.stdin, + output: process.stdout, + terminal: false, + }); + + iface.on('line', async (line) => { + try { + const request: jsonRpc.Request = JSON.parse(line); // todo: validate + debug(`Got a request: ${line}`) + try { + const response = await handleRequest(request.method, request.params) + respondOk(request.id, response) + } catch (err) { + debug("[nodejs] Error from request handler: ", err) + respondErr(request.id, { + code: 1, + message: err.toString(), + }) + } + } catch (err) { + debug("Received non-json line: ", line); + } + + }); +} + +const state: Record = {} + +async function handleRequest(method: string, params: unknown): Promise { + switch (method) { + case 'initializeSchema': { + interface InitializeSchemaParams { + schema: string + schemaId: string + url: string + } + + const castParams = params as InitializeSchemaParams; + const logs = [] as string[] + const [engine, adapter] = await initQe(castParams.url, castParams.schema, (log) => { + logs.push(log) + }); + await engine.connect("") + + state[castParams.schemaId] = { + engine, + adapter, + logs + } + return null + } + case 'query': { + interface QueryPayload { + query: string + schemaId: number + txId?: string + } + + debug("Got `query`", params) + const castParams = params as QueryPayload; + const engine = state[castParams.schemaId].engine + const result = await engine.query(JSON.stringify(castParams.query), "", castParams.txId) + + const parsedResult = JSON.parse(result) + if (parsedResult.errors) { + const error = parsedResult.errors[0]?.user_facing_error + if (error.error_code === 'P2036') { + const jsError = state[castParams.schemaId].adapter.errorRegistry.consumeError(error.meta.id) + if (!jsError) { + err(`Something went wrong. Engine reported external error with id ${error.meta.id}, but it was not registered.`) + } else { + err("got error response from the engine caused by the driver: ", jsError) + } + } + } + + debug("got response from engine: ", result) + // returning unparsed string: otherwise, some information gots lost during this round-trip. + // In particular, floating point without decimal part turn into integers + return result + } + + case 'startTx': { + interface StartTxPayload { + schemaId: number, + options: unknown + } + + debug("Got `startTx", params) + const {schemaId, options} = params as StartTxPayload + const result = await state[schemaId].engine.startTransaction(JSON.stringify(options), "") + return JSON.parse(result) + } + + case 'commitTx': { + interface CommitTxPayload { + schemaId: number, + txId: string, + } + + debug("Got `commitTx", params) + const {schemaId, txId} = params as CommitTxPayload + const result = await state[schemaId].engine.commitTransaction(txId, '{}') + return JSON.parse(result) + } + + case 'rollbackTx': { + interface RollbackTxPayload { + schemaId: number, + txId: string, + } + + debug("Got `rollbackTx", params) + const {schemaId, txId} = params as RollbackTxPayload + const result = await state[schemaId].engine.rollbackTransaction(txId, '{}') + return JSON.parse(result) + } + case 'teardown': { + interface TeardownPayload { + schemaId: number + } + + debug("Got `teardown", params) + const castParams = params as TeardownPayload; + await state[castParams.schemaId].engine.disconnect("") + delete state[castParams.schemaId] + + return {} + } + case 'getLogs': { + interface GetLogsPayload { + schemaId: number + } + + const castParams = params as GetLogsPayload + return state[castParams.schemaId].logs + } + default: { + throw new Error(`Unknown method: \`${method}\``) + } + } +} + +function respondErr(requestId: number, error: jsonRpc.RpcError) { + const msg: jsonRpc.ErrResponse = { + jsonrpc: '2.0', + id: requestId, + error, + } + console.log(JSON.stringify(msg)) +} + +function respondOk(requestId: number, payload: unknown) { + const msg: jsonRpc.OkResponse = { + jsonrpc: '2.0', + id: requestId, + result: payload + + }; + console.log(JSON.stringify(msg)) +} + +async function initQe(url: string, prismaSchema: string, logCallback: qe.QueryLogCallback): Promise<[engines.QueryEngineInstance, ErrorCapturingDriverAdapter]> { + const adapter = await adapterFromEnv(url) as DriverAdapter + const errorCapturingAdapter = bindAdapter(adapter) + const engineInstance = qe.initQueryEngine(errorCapturingAdapter, prismaSchema, logCallback, debug) + return [engineInstance, errorCapturingAdapter]; +} + +async function adapterFromEnv(url: string): Promise { + const adapter = process.env.DRIVER_ADAPTER ?? '' + + if (adapter == '') { + throw new Error("DRIVER_ADAPTER is not defined or empty.") + } + + if (!(adapter in SUPPORTED_ADAPTERS)) { + throw new Error(`Unsupported driver adapter: ${adapter}`) + } + + return await SUPPORTED_ADAPTERS[adapter](url) +} + +function postgres_options(url: string): any { + let args: any = {connectionString: url} + const schemaName = new URL(url).searchParams.get('schema') + if (schemaName != null) { + args.options = `--search_path="${schemaName}"` + } + return args; +} + +async function pgAdapter(url: string): Promise { + const pool = new pgDriver.Pool(postgres_options(url)) + return new prismaPg.PrismaPg(pool) +} + +async function neonWsAdapter(url: string): Promise { + const proxyURL = JSON.parse(process.env.DRIVER_ADAPTER_CONFIG || '{}').proxyUrl ?? '' + if (proxyURL == '') { + throw new Error("DRIVER_ADAPTER_CONFIG is not defined or empty, but its required for neon adapter."); + } + + neonConfig.wsProxy = () => proxyURL + neonConfig.webSocketConstructor = WebSocket + neonConfig.useSecureWebSocket = false + neonConfig.pipelineConnect = false + + const pool = new NeonPool(postgres_options(url)) + return new prismaNeon.PrismaNeon(pool) +} + +async function libsqlAdapter(url: string): Promise { + const libsql = createClient({ url, intMode: 'bigint' }) + return new PrismaLibSQL(libsql) +} + +async function planetscaleAdapter(url: string): Promise { + const proxyURL = JSON.parse(process.env.DRIVER_ADAPTER_CONFIG || '{}').proxyUrl ?? '' + if (proxyURL == '') { + throw new Error("DRIVER_ADAPTER_CONFIG is not defined or empty, but its required for planetscale adapter."); + } + + const connection = planetscaleConnect({ + url: proxyURL, + fetch, + }) + + return new PrismaPlanetScale(connection) +} + +main().catch(err) diff --git a/query-engine/driver-adapters/connector-test-kit-executor/src/jsonRpc.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/jsonRpc.ts new file mode 100644 index 000000000000..ec734e7b543f --- /dev/null +++ b/query-engine/driver-adapters/connector-test-kit-executor/src/jsonRpc.ts @@ -0,0 +1,28 @@ +export interface Request { + jsonrpc: '2.0' + method: string + params?: Object, + id: number +} + +export type Response = OkResponse | ErrResponse + +export interface OkResponse { + jsonrpc: '2.0' + result: unknown + error?: never + id: number +} + +export interface ErrResponse { + jsonrpc: '2.0' + error: RpcError + result?: never + id: number +} + +export interface RpcError { + code: number + message: string + data?: unknown +} diff --git a/query-engine/driver-adapters/connector-test-kit-executor/src/qe.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/qe.ts new file mode 100644 index 000000000000..186d7a9e80d2 --- /dev/null +++ b/query-engine/driver-adapters/connector-test-kit-executor/src/qe.ts @@ -0,0 +1,41 @@ +import type { ErrorCapturingDriverAdapter } from '@prisma/driver-adapter-utils' +import * as lib from './engines/Library' +import * as os from 'node:os' +import * as path from 'node:path' + +export type QueryLogCallback = (log: string) => void + +export function initQueryEngine(adapter: ErrorCapturingDriverAdapter, datamodel: string, queryLogCallback: QueryLogCallback, debug: (...args: any[]) => void): lib.QueryEngineInstance { + // I assume nobody will run this on Windows ¯\_(ツ)_/¯ + const libExt = os.platform() === 'darwin' ? 'dylib' : 'so' + const dirname = path.dirname(new URL(import.meta.url).pathname) + + const libQueryEnginePath = path.join(dirname, `../../../../target/debug/libquery_engine.${libExt}`) + + const libqueryEngine = { exports: {} as unknown as lib.Library } + // @ts-ignore + process.dlopen(libqueryEngine, libQueryEnginePath) + + const QueryEngine = libqueryEngine.exports.QueryEngine + + const queryEngineOptions = { + datamodel, + configDir: '.', + engineProtocol: 'json' as const, + logLevel: process.env["RUST_LOG"] ?? 'info' as any, + logQueries: true, + env: process.env, + ignoreEnvVarErrors: false, + } + + + const logCallback = (event: any) => { + const parsed = JSON.parse(event) + if (parsed.is_query) { + queryLogCallback(parsed.query) + } + debug(parsed) + } + + return new QueryEngine(queryEngineOptions, logCallback, adapter) +} diff --git a/query-engine/driver-adapters/js/tsconfig.json b/query-engine/driver-adapters/connector-test-kit-executor/tsconfig.json similarity index 99% rename from query-engine/driver-adapters/js/tsconfig.json rename to query-engine/driver-adapters/connector-test-kit-executor/tsconfig.json index b405cea50201..516c114b3e15 100644 --- a/query-engine/driver-adapters/js/tsconfig.json +++ b/query-engine/driver-adapters/connector-test-kit-executor/tsconfig.json @@ -20,4 +20,4 @@ "resolveJsonModule": true }, "exclude": ["**/dist", "**/declaration", "**/node_modules", "**/src/__tests__"] -} +} \ No newline at end of file diff --git a/query-engine/driver-adapters/js/.gitignore b/query-engine/driver-adapters/js/.gitignore deleted file mode 100644 index e885963af278..000000000000 --- a/query-engine/driver-adapters/js/.gitignore +++ /dev/null @@ -1,44 +0,0 @@ -node_modules - -yarn-error.log -dist -build -tmp -pnpm-debug.log -sandbox -.DS_Store - -query-engine* -migration-engine* -schema-engine* -libquery_engine* -libquery-engine* -query_engine-windows.dll.node - -*tmp.db -dist/ -declaration/ - -*.tsbuildinfo -.prisma -.pnpm-store - -.vscode -!.vscode/launch.json.default -coverage - -.eslintcache - -.pnpm-debug.log - -.envrc - -esm -reproductions/* -!reproductions/basic-sqlite -!reproductions/tracing -!reproductions/pnpm-workspace.yaml - -dev.db -junit.xml -/output.txt diff --git a/query-engine/driver-adapters/js/.npmrc b/query-engine/driver-adapters/js/.npmrc deleted file mode 100644 index c87ec9b9e3d3..000000000000 --- a/query-engine/driver-adapters/js/.npmrc +++ /dev/null @@ -1,2 +0,0 @@ -git-checks=false -access=public diff --git a/query-engine/driver-adapters/js/.nvmrc b/query-engine/driver-adapters/js/.nvmrc deleted file mode 100644 index 8c60e1e54f37..000000000000 --- a/query-engine/driver-adapters/js/.nvmrc +++ /dev/null @@ -1 +0,0 @@ -v20.5.1 diff --git a/query-engine/driver-adapters/js/README.md b/query-engine/driver-adapters/js/README.md deleted file mode 100644 index d4198f4c31f3..000000000000 --- a/query-engine/driver-adapters/js/README.md +++ /dev/null @@ -1,34 +0,0 @@ -# Prisma Driver Adapters - -This TypeScript monorepo contains the following packages: -- `@jkomyno/prisma-driver-adapter-utils` (later: `@prisma/driver-adapter-utils`) - - Internal set of utilities and types for Prisma's driver adapters. -- `@jkomyno/prisma-adapter-neon` (later: `@prisma/adapter-neon`) - - Prisma's Driver Adapter that wraps the `@neondatabase/serverless` driver - - It uses `provider = "postgres"` - - It exposes debug logs via `DEBUG="prisma:driver-adapter:neon"` -- `@jkomyno/prisma-adapter-planetscale` (later: `@prisma/adapter-planetscale`) - - Prisma's Driver Adapter that wraps the `@planetscale/database` driver - - It uses `provider = "mysql"` - - It exposes debug logs via `DEBUG="prisma:driver-adapter:planetscale"` -- `@jkomyno/prisma-adapter-pg` (later: `@prisma/adapter-pg`) - - Prisma's Driver Adapter that wraps the `pg` driver - - It uses `provider = "postgres"` - - It exposes debug logs via `DEBUG="prisma:driver-adapter:pg"` - -## Get Started - -We assume Node.js `v18.16.1`+ is installed. If not, run `nvm use` in the current directory. -This is very important to double-check if you have multiple versions installed, as PlanetScale requires either Node.js `v18.16.1`+ or a custom `fetch` function. - -Install `pnpm` via: - -```sh -npm i -g pnpm -``` - -## Development - -- Install Node.js dependencies via `pnpm i` -- Build and link TypeScript packages via `pnpm build` -- Publish packages to `npm` via `pnpm publish -r` diff --git a/query-engine/driver-adapters/js/adapter-neon/README.md b/query-engine/driver-adapters/js/adapter-neon/README.md deleted file mode 100644 index 74e6fa91dc31..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# @prisma/adapter-neon - -**INTERNAL PACKAGE, DO NOT USE** diff --git a/query-engine/driver-adapters/js/adapter-neon/package.json b/query-engine/driver-adapters/js/adapter-neon/package.json deleted file mode 100644 index 52ee08f97be2..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/package.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "name": "@jkomyno/prisma-adapter-neon", - "version": "0.2.1", - "description": "Prisma's driver adapter for \"@neondatabase/serverless\"", - "main": "dist/index.js", - "module": "dist/index.mjs", - "types": "dist/index.d.ts", - "scripts": { - "build": "tsup ./src/index.ts --format cjs,esm --dts", - "lint": "tsc -p ./tsconfig.build.json" - }, - "files": [ - "dist", - "README.md" - ], - "keywords": [], - "author": "Alberto Schiabel ", - "license": "Apache-2.0", - "sideEffects": false, - "dependencies": { - "@jkomyno/prisma-driver-adapter-utils": "workspace:*" - }, - "devDependencies": { - "@neondatabase/serverless": "^0.6.0" - }, - "peerDependencies": { - "@neondatabase/serverless": "^0.6.0" - } -} diff --git a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts b/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts deleted file mode 100644 index ea91f57eefdf..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts +++ /dev/null @@ -1,51 +0,0 @@ -import { ColumnTypeEnum, type ColumnType } from '@jkomyno/prisma-driver-adapter-utils' -import { types } from '@neondatabase/serverless' - -const NeonColumnType = types.builtins - -/** - * This is a simplification of quaint's value inference logic. Take a look at quaint's conversion.rs - * module to see how other attributes of the field packet such as the field length are used to infer - * the correct quaint::Value variant. - */ -export function fieldToColumnType(fieldTypeId: number): ColumnType { - switch (fieldTypeId) { - case NeonColumnType['INT2']: - case NeonColumnType['INT4']: - return ColumnTypeEnum.Int32 - case NeonColumnType['INT8']: - return ColumnTypeEnum.Int64 - case NeonColumnType['FLOAT4']: - return ColumnTypeEnum.Float - case NeonColumnType['FLOAT8']: - return ColumnTypeEnum.Double - case NeonColumnType['BOOL']: - return ColumnTypeEnum.Boolean - case NeonColumnType['DATE']: - return ColumnTypeEnum.Date - case NeonColumnType['TIME']: - return ColumnTypeEnum.Time - case NeonColumnType['TIMESTAMP']: - return ColumnTypeEnum.DateTime - case NeonColumnType['NUMERIC']: - return ColumnTypeEnum.Numeric - case NeonColumnType['BPCHAR']: - return ColumnTypeEnum.Char - case NeonColumnType['TEXT']: - case NeonColumnType['VARCHAR']: - return ColumnTypeEnum.Text - case NeonColumnType['JSONB']: - return ColumnTypeEnum.Json - default: - if (fieldTypeId >= 10000) { - // Postgres Custom Types - return ColumnTypeEnum.Enum - } - throw new Error(`Unsupported column type: ${fieldTypeId}`) - } -} - -// return string instead of JavaScript Date object -types.setTypeParser(NeonColumnType.DATE, date => date) -types.setTypeParser(NeonColumnType.TIME, date => date) -types.setTypeParser(NeonColumnType.TIMESTAMP, date => date) diff --git a/query-engine/driver-adapters/js/adapter-neon/src/index.ts b/query-engine/driver-adapters/js/adapter-neon/src/index.ts deleted file mode 100644 index f160d413ade0..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/src/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { PrismaNeon, PrismaNeonHTTP } from './neon' diff --git a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts b/query-engine/driver-adapters/js/adapter-neon/src/neon.ts deleted file mode 100644 index ba17da3036b6..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts +++ /dev/null @@ -1,137 +0,0 @@ -import type neon from '@neondatabase/serverless' -import { Debug } from '@jkomyno/prisma-driver-adapter-utils' -import type { DriverAdapter, ResultSet, Query, Queryable, Transaction, Result, TransactionOptions } from '@jkomyno/prisma-driver-adapter-utils' -import { fieldToColumnType } from './conversion' - -const debug = Debug('prisma:driver-adapter:neon') - -type ARRAY_MODE_DISABLED = false -type FULL_RESULTS_ENABLED = true - -type PerformIOResult = neon.QueryResult | neon.FullQueryResults - -/** - * Base class for http client, ws client and ws transaction - */ -abstract class NeonQueryable implements Queryable { - readonly flavour = 'postgres' - - async queryRaw(query: Query): Promise> { - const tag = '[js::query_raw]' - debug(`${tag} %O`, query) - - const { fields, rows: results } = await this.performIO(query) - - const columns = fields.map(field => field.name) - const resultSet: ResultSet = { - columnNames: columns, - columnTypes: fields.map(field => fieldToColumnType(field.dataTypeID)), - rows: results.map(result => columns.map(column => result[column])), - } - - return { ok: true, value: resultSet } - } - - async executeRaw(query: Query): Promise> { - const tag = '[js::execute_raw]' - debug(`${tag} %O`, query) - - const { rowCount: rowsAffected } = await this.performIO(query) - - // Note: `rowsAffected` can sometimes be null (e.g., when executing `"BEGIN"`) - return { ok: true, value: rowsAffected ?? 0 } - } - - abstract performIO(query: Query): Promise -} - -/** - * Base class for WS-based queryables: top-level client and transaction - */ -class NeonWsQueryable extends NeonQueryable { - constructor(protected client: ClientT) { - super() - } - - override async performIO(query: Query): Promise { - const { sql, args: values } = query - - try { - return await this.client.query(sql, values) - } catch (e) { - const error = e as Error - debug('Error in performIO: %O', error) - throw error - } - } -} - -class NeonTransaction extends NeonWsQueryable implements Transaction { - constructor(client: neon.PoolClient, readonly options: TransactionOptions) { - super(client) - } - - async commit(): Promise> { - debug(`[js::commit]`) - - this.client.release() - return Promise.resolve({ ok: true, value: undefined }) - } - - async rollback(): Promise> { - debug(`[js::rollback]`) - - this.client.release() - return Promise.resolve({ ok: true, value: undefined }) - } -} - -export class PrismaNeon extends NeonWsQueryable implements DriverAdapter { - private isRunning = true - - constructor(pool: neon.Pool) { - super(pool) - } - - async startTransaction(): Promise> { - const options: TransactionOptions = { - usePhantomQuery: false, - } - - const tag = '[js::startTransaction]' - debug(`${tag} options: %O`, options) - - const connection = await this.client.connect() - return { ok: true, value: new NeonTransaction(connection, options) } - } - - async close() { - if (this.isRunning) { - await this.client.end() - this.isRunning = false - } - return { ok: true as const, value: undefined } - } -} - -export class PrismaNeonHTTP extends NeonQueryable implements DriverAdapter { - constructor(private client: neon.NeonQueryFunction< - ARRAY_MODE_DISABLED, - FULL_RESULTS_ENABLED - >) { - super() - } - - override async performIO(query: Query): Promise { - const { sql, args: values } = query - return await this.client(sql, values) - } - - startTransaction(): Promise> { - return Promise.reject(new Error('Transactions are not supported in HTTP mode')) - } - - async close() { - return { ok: true as const, value: undefined } - } -} diff --git a/query-engine/driver-adapters/js/adapter-neon/tsconfig.build.json b/query-engine/driver-adapters/js/adapter-neon/tsconfig.build.json deleted file mode 100644 index 28c56f6c3a9a..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/tsconfig.build.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "extends": "../tsconfig.json", - "compilerOptions": { - "outDir": "declaration" - } -} diff --git a/query-engine/driver-adapters/js/adapter-neon/tsconfig.json b/query-engine/driver-adapters/js/adapter-neon/tsconfig.json deleted file mode 100644 index 3c43903cfdd1..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/tsconfig.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../tsconfig.json" -} diff --git a/query-engine/driver-adapters/js/adapter-pg/README.md b/query-engine/driver-adapters/js/adapter-pg/README.md deleted file mode 100644 index b8463742e25c..000000000000 --- a/query-engine/driver-adapters/js/adapter-pg/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# @prisma/adapter-pg - -**INTERNAL PACKAGE, DO NOT USE** diff --git a/query-engine/driver-adapters/js/adapter-pg/package.json b/query-engine/driver-adapters/js/adapter-pg/package.json deleted file mode 100644 index 3055976cb51b..000000000000 --- a/query-engine/driver-adapters/js/adapter-pg/package.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "name": "@jkomyno/prisma-adapter-pg", - "version": "0.2.1", - "description": "Prisma's driver adapter for \"pg\"", - "main": "dist/index.js", - "module": "dist/index.mjs", - "types": "dist/index.d.ts", - "scripts": { - "build": "tsup ./src/index.ts --format cjs,esm --dts", - "lint": "tsc -p ./tsconfig.build.json" - }, - "files": [ - "dist", - "README.md" - ], - "keywords": [], - "author": "Tom Houlé ", - "license": "Apache-2.0", - "sideEffects": false, - "dependencies": { - "@jkomyno/prisma-driver-adapter-utils": "workspace:*" - }, - "devDependencies": { - "pg": "^8.11.3", - "@types/pg": "^8.10.2" - }, - "peerDependencies": { - "pg": "^8.11.3" - } -} diff --git a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts b/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts deleted file mode 100644 index fc9ad43e9f0e..000000000000 --- a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts +++ /dev/null @@ -1,51 +0,0 @@ -import { ColumnTypeEnum, type ColumnType } from '@jkomyno/prisma-driver-adapter-utils' -import { types } from 'pg' - -const PgColumnType = types.builtins - -/** - * This is a simplification of quaint's value inference logic. Take a look at quaint's conversion.rs - * module to see how other attributes of the field packet such as the field length are used to infer - * the correct quaint::Value variant. - */ -export function fieldToColumnType(fieldTypeId: number): ColumnType { - switch (fieldTypeId) { - case PgColumnType['INT2']: - case PgColumnType['INT4']: - return ColumnTypeEnum.Int32 - case PgColumnType['INT8']: - return ColumnTypeEnum.Int64 - case PgColumnType['FLOAT4']: - return ColumnTypeEnum.Float - case PgColumnType['FLOAT8']: - return ColumnTypeEnum.Double - case PgColumnType['BOOL']: - return ColumnTypeEnum.Boolean - case PgColumnType['DATE']: - return ColumnTypeEnum.Date - case PgColumnType['TIME']: - return ColumnTypeEnum.Time - case PgColumnType['TIMESTAMP']: - return ColumnTypeEnum.DateTime - case PgColumnType['NUMERIC']: - return ColumnTypeEnum.Numeric - case PgColumnType['BPCHAR']: - return ColumnTypeEnum.Char - case PgColumnType['TEXT']: - case PgColumnType['VARCHAR']: - return ColumnTypeEnum.Text - case PgColumnType['JSONB']: - return ColumnTypeEnum.Json - default: - if (fieldTypeId >= 10000) { - // Postgres Custom Types - return ColumnTypeEnum.Enum - } - throw new Error(`Unsupported column type: ${fieldTypeId}`) - } -} - -// return string instead of JavaScript Date object -types.setTypeParser(PgColumnType.DATE, date => date) -types.setTypeParser(PgColumnType.TIME, date => date) -types.setTypeParser(PgColumnType.TIMESTAMP, date => date) diff --git a/query-engine/driver-adapters/js/adapter-pg/src/index.ts b/query-engine/driver-adapters/js/adapter-pg/src/index.ts deleted file mode 100644 index f8e51ac2685b..000000000000 --- a/query-engine/driver-adapters/js/adapter-pg/src/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { PrismaPg } from './pg' diff --git a/query-engine/driver-adapters/js/adapter-pg/src/pg.ts b/query-engine/driver-adapters/js/adapter-pg/src/pg.ts deleted file mode 100644 index a6a4ba4b58d9..000000000000 --- a/query-engine/driver-adapters/js/adapter-pg/src/pg.ts +++ /dev/null @@ -1,112 +0,0 @@ -import type pg from 'pg' -import { Debug } from '@jkomyno/prisma-driver-adapter-utils' -import type { DriverAdapter, Query, Queryable, Result, ResultSet, Transaction, TransactionOptions } from '@jkomyno/prisma-driver-adapter-utils' -import { fieldToColumnType } from './conversion' - -const debug = Debug('prisma:driver-adapter:pg') - -type StdClient = pg.Pool -type TransactionClient = pg.PoolClient - -class PgQueryable - implements Queryable { - readonly flavour = 'postgres' - - constructor(protected readonly client: ClientT) { - } - - /** - * Execute a query given as SQL, interpolating the given parameters. - */ - async queryRaw(query: Query): Promise> { - const tag = '[js::query_raw]' - debug(`${tag} %O`, query) - - const { fields, rows: results } = await this.performIO(query) - - const columns = fields.map((field) => field.name) - const resultSet: ResultSet = { - columnNames: columns, - columnTypes: fields.map((field) => fieldToColumnType(field.dataTypeID)), - rows: results.map((result) => columns.map((column) => result[column])), - } - - return { ok: true, value: resultSet } - } - - /** - * Execute a query given as SQL, interpolating the given parameters and - * returning the number of affected rows. - * Note: Queryable expects a u64, but napi.rs only supports u32. - */ - async executeRaw(query: Query): Promise> { - const tag = '[js::execute_raw]' - debug(`${tag} %O`, query) - - const { rowCount: rowsAffected } = await this.performIO(query) - - // Note: `rowsAffected` can sometimes be null (e.g., when executing `"BEGIN"`) - return { ok: true, value: rowsAffected ?? 0 } - } - - /** - * Run a query against the database, returning the result set. - * Should the query fail due to a connection error, the connection is - * marked as unhealthy. - */ - private async performIO(query: Query) { - const { sql, args: values } = query - - try { - const result = await this.client.query(sql, values) - return result - } catch (e) { - const error = e as Error - debug('Error in performIO: %O', error) - throw error - } - } -} - -class PgTransaction extends PgQueryable - implements Transaction { - constructor(client: pg.PoolClient, readonly options: TransactionOptions) { - super(client) - } - - async commit(): Promise> { - debug(`[js::commit]`) - - this.client.release() - return Promise.resolve({ ok: true, value: undefined }) - } - - async rollback(): Promise> { - debug(`[js::rollback]`) - - this.client.release() - return Promise.resolve({ ok: true, value: undefined }) - } -} - -export class PrismaPg extends PgQueryable implements DriverAdapter { - constructor(client: pg.Pool) { - super(client) - } - - async startTransaction(): Promise> { - const options: TransactionOptions = { - usePhantomQuery: false, - } - - const tag = '[js::startTransaction]' - debug(`${tag} options: %O`, options) - - const connection = await this.client.connect() - return { ok: true, value: new PgTransaction(connection, options) } - } - - async close() { - return { ok: true as const, value: undefined } - } -} diff --git a/query-engine/driver-adapters/js/adapter-planetscale/README.md b/query-engine/driver-adapters/js/adapter-planetscale/README.md deleted file mode 100644 index 61b8b1717c1a..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# @prisma/adapter-planetscale - -**INTERNAL PACKAGE, DO NOT USE** diff --git a/query-engine/driver-adapters/js/adapter-planetscale/package.json b/query-engine/driver-adapters/js/adapter-planetscale/package.json deleted file mode 100644 index bdda6c0a5c99..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/package.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "name": "@jkomyno/prisma-adapter-planetscale", - "version": "0.2.1", - "description": "Prisma's driver adapter for \"@planetscale/database\"", - "main": "dist/index.js", - "module": "dist/index.mjs", - "types": "dist/index.d.ts", - "scripts": { - "build": "tsup ./src/index.ts --format cjs,esm --dts", - "lint": "tsc -p ./tsconfig.build.json" - }, - "files": [ - "dist", - "README.md" - ], - "keywords": [], - "author": "Alberto Schiabel ", - "license": "Apache-2.0", - "sideEffects": false, - "dependencies": { - "@jkomyno/prisma-driver-adapter-utils": "workspace:*" - }, - "devDependencies": { - "@planetscale/database": "^1.11.0" - }, - "peerDependencies": { - "@planetscale/database": "^1.11.0" - } -} diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/conversion.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/conversion.ts deleted file mode 100644 index 2c79afdddd64..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/conversion.ts +++ /dev/null @@ -1,95 +0,0 @@ -import { ColumnTypeEnum, type ColumnType } from '@jkomyno/prisma-driver-adapter-utils' - -// See: https://github.com/planetscale/vitess-types/blob/06235e372d2050b4c0fff49972df8111e696c564/src/vitess/query/v16/query.proto#L108-L218 -export type PlanetScaleColumnType - = 'NULL_TYPE' // unsupported - | 'INT8' - | 'UINT8' - | 'INT16' - | 'UINT16' - | 'INT24' - | 'UINT24' - | 'INT32' - | 'UINT32' - | 'INT64' - | 'UINT64' - | 'FLOAT32' - | 'FLOAT64' - | 'TIMESTAMP' - | 'DATE' - | 'TIME' - | 'DATETIME' - | 'YEAR' - | 'DECIMAL' - | 'TEXT' - | 'BLOB' - | 'VARCHAR' - | 'VARBINARY' - | 'CHAR' - | 'BINARY' - | 'BIT' - | 'ENUM' - | 'SET' // unsupported - | 'TUPLE' // unsupported - | 'GEOMETRY' - | 'JSON' - | 'EXPRESSION' // unsupported - | 'HEXNUM' - | 'HEXVAL' - | 'BITNUM' - -/** - * This is a simplification of quaint's value inference logic. Take a look at quaint's conversion.rs - * module to see how other attributes of the field packet such as the field length are used to infer - * the correct quaint::Value variant. - */ -export function fieldToColumnType(field: PlanetScaleColumnType): ColumnType { - switch (field) { - case 'INT8': - case 'UINT8': - case 'INT16': - case 'UINT16': - case 'INT24': - case 'UINT24': - case 'INT32': - case 'UINT32': - case 'YEAR': - return ColumnTypeEnum.Int32 - case 'INT64': - case 'UINT64': - return ColumnTypeEnum.Int64 - case 'FLOAT32': - return ColumnTypeEnum.Float - case 'FLOAT64': - return ColumnTypeEnum.Double - case 'TIMESTAMP': - case 'DATETIME': - return ColumnTypeEnum.DateTime - case 'DATE': - return ColumnTypeEnum.Date - case 'TIME': - return ColumnTypeEnum.Time - case 'DECIMAL': - return ColumnTypeEnum.Numeric - case 'CHAR': - return ColumnTypeEnum.Char - case 'TEXT': - case 'VARCHAR': - return ColumnTypeEnum.Text - case 'ENUM': - return ColumnTypeEnum.Enum - case 'JSON': - return ColumnTypeEnum.Json - case 'BLOB': - case 'BINARY': - case 'VARBINARY': - case 'BIT': - case 'BITNUM': - case 'HEXNUM': - case 'HEXVAL': - case 'GEOMETRY': - return ColumnTypeEnum.Bytes - default: - throw new Error(`Unsupported column type: ${field}`) - } -} diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/deferred.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/deferred.ts deleted file mode 100644 index 013409c8424f..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/deferred.ts +++ /dev/null @@ -1,13 +0,0 @@ -export type Deferred = { - resolve(value: T | PromiseLike): void; - reject(reason: unknown): void; -} - - -export function createDeferred(): [Deferred, Promise] { - const deferred = {} as Deferred - return [deferred, new Promise((resolve, reject) => { - deferred.resolve = resolve - deferred.reject = reject - })] -} \ No newline at end of file diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/index.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/index.ts deleted file mode 100644 index 5e8add856fbb..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { PrismaPlanetScale } from './planetscale' diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts deleted file mode 100644 index 8bd2610336b5..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts +++ /dev/null @@ -1,139 +0,0 @@ -import type planetScale from '@planetscale/database' -import { Debug } from '@jkomyno/prisma-driver-adapter-utils' -import type { DriverAdapter, ResultSet, Query, Queryable, Transaction, Result, TransactionOptions } from '@jkomyno/prisma-driver-adapter-utils' -import { type PlanetScaleColumnType, fieldToColumnType } from './conversion' -import { createDeferred, Deferred } from './deferred' - -const debug = Debug('prisma:driver-adapter:planetscale') - -class RollbackError extends Error { - constructor() { - super('ROLLBACK') - this.name = 'RollbackError' - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, RollbackError) - } - } -} - - -class PlanetScaleQueryable implements Queryable { - readonly flavour = 'mysql' - constructor(protected client: ClientT) { - } - - /** - * Execute a query given as SQL, interpolating the given parameters. - */ - async queryRaw(query: Query): Promise> { - const tag = '[js::query_raw]' - debug(`${tag} %O`, query) - - const { fields, insertId: lastInsertId, rows: results } = await this.performIO(query) - - const columns = fields.map(field => field.name) - const resultSet: ResultSet = { - columnNames: columns, - columnTypes: fields.map(field => fieldToColumnType(field.type as PlanetScaleColumnType)), - rows: results.map(result => columns.map(column => result[column])), - lastInsertId, - } - - return { ok: true, value: resultSet } - } - - /** - * Execute a query given as SQL, interpolating the given parameters and - * returning the number of affected rows. - * Note: Queryable expects a u64, but napi.rs only supports u32. - */ - async executeRaw(query: Query): Promise> { - const tag = '[js::execute_raw]' - debug(`${tag} %O`, query) - - const { rowsAffected } = await this.performIO(query) - return { ok: true, value: rowsAffected } - } - - /** - * Run a query against the database, returning the result set. - * Should the query fail due to a connection error, the connection is - * marked as unhealthy. - */ - private async performIO(query: Query) { - const { sql, args: values } = query - - try { - const result = await this.client.execute(sql, values) - return result - } catch (e) { - const error = e as Error - debug('Error in performIO: %O', error) - throw error - } - } -} - -class PlanetScaleTransaction extends PlanetScaleQueryable implements Transaction { - constructor( - tx: planetScale.Transaction, - readonly options: TransactionOptions, - private txDeferred: Deferred, - private txResultPromise: Promise, - ) { - super(tx) - } - - async commit(): Promise> { - debug(`[js::commit]`) - - this.txDeferred.resolve() - return Promise.resolve({ ok: true, value: await this.txResultPromise }) - } - - async rollback(): Promise> { - debug(`[js::rollback]`) - - this.txDeferred.reject(new RollbackError()) - return Promise.resolve({ ok: true, value: await this.txResultPromise }) - } - -} - -export class PrismaPlanetScale extends PlanetScaleQueryable implements DriverAdapter { - constructor(client: planetScale.Connection) { - super(client) - } - - async startTransaction() { - const options: TransactionOptions = { - usePhantomQuery: true, - } - - const tag = '[js::startTransaction]' - debug(`${tag} options: %O`, options) - - return new Promise>((resolve, reject) => { - const txResultPromise = this.client.transaction(async tx => { - const [txDeferred, deferredPromise] = createDeferred() - const txWrapper = new PlanetScaleTransaction(tx, options, txDeferred, txResultPromise) - - resolve({ ok: true, value: txWrapper }) - return deferredPromise - }).catch(error => { - // Rollback error is ignored (so that tx.rollback() won't crash) - // any other error is legit and is re-thrown - if (!(error instanceof RollbackError)) { - return reject(error) - } - - return undefined - }) - }) - } - - async close() { - return { ok: true as const, value: undefined } - } -} diff --git a/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.build.json b/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.build.json deleted file mode 100644 index 28c56f6c3a9a..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.build.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "extends": "../tsconfig.json", - "compilerOptions": { - "outDir": "declaration" - } -} diff --git a/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.json b/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.json deleted file mode 100644 index 3c43903cfdd1..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../tsconfig.json" -} diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/README.md b/query-engine/driver-adapters/js/driver-adapter-utils/README.md deleted file mode 100644 index 78938e802bd3..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# @prisma/driver-adapters-utils - -**INTERNAL PACKAGE, DO NOT USE** diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/package.json b/query-engine/driver-adapters/js/driver-adapter-utils/package.json deleted file mode 100644 index 524d59e551fc..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/package.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "name": "@jkomyno/prisma-driver-adapter-utils", - "version": "0.2.1", - "description": "Internal set of utilities and types for Prisma's driver adapters.", - "main": "dist/index.js", - "module": "dist/index.mjs", - "types": "dist/index.d.ts", - "scripts": { - "build": "tsup ./src/index.ts --format cjs,esm --dts", - "lint": "tsc -p ./tsconfig.build.json" - }, - "files": [ - "dist", - "README.md" - ], - "keywords": [], - "author": "Alberto Schiabel ", - "license": "Apache-2.0", - "sideEffects": false, - "dependencies": { - "debug": "^4.3.4" - }, - "devDependencies": { - "@types/debug": "^4.1.8" - } -} diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts deleted file mode 100644 index 9d399056f9a1..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts +++ /dev/null @@ -1,63 +0,0 @@ -import type { ErrorCapturingDriverAdapter, DriverAdapter, Transaction, ErrorRegistry, ErrorRecord, Result } from './types' - -class ErrorRegistryInternal implements ErrorRegistry { - private registeredErrors: ErrorRecord[] = [] - - consumeError(id: number): ErrorRecord | undefined { - return this.registeredErrors[id] - } - - registerNewError(error: unknown) { - let i = 0 - while (this.registeredErrors[i] !== undefined) { - i++ - } - this.registeredErrors[i] = { error } - return i - } -} - -// *.bind(adapter) is required to preserve the `this` context of functions whose -// execution is delegated to napi.rs. -export const bindAdapter = (adapter: DriverAdapter): ErrorCapturingDriverAdapter => { - const errorRegistry = new ErrorRegistryInternal() - - return { - errorRegistry, - queryRaw: wrapAsync(errorRegistry, adapter.queryRaw.bind(adapter)), - executeRaw: wrapAsync(errorRegistry, adapter.executeRaw.bind(adapter)), - flavour: adapter.flavour, - startTransaction: async (...args) => { - const result = await adapter.startTransaction(...args) - if (result.ok) { - return { ok: true, value: bindTransaction(errorRegistry, result.value) } - } - return result - }, - close: wrapAsync(errorRegistry, adapter.close.bind(adapter)) - } -} - -// *.bind(transaction) is required to preserve the `this` context of functions whose -// execution is delegated to napi.rs. -const bindTransaction = (errorRegistry: ErrorRegistryInternal, transaction: Transaction): Transaction => { - return ({ - flavour: transaction.flavour, - options: transaction.options, - queryRaw: wrapAsync(errorRegistry, transaction.queryRaw.bind(transaction)), - executeRaw: wrapAsync(errorRegistry, transaction.executeRaw.bind(transaction)), - commit: wrapAsync(errorRegistry, transaction.commit.bind(transaction)), - rollback: wrapAsync(errorRegistry, transaction.rollback.bind(transaction)), - }); -} - -function wrapAsync(registry: ErrorRegistryInternal, fn: (...args: A) => Promise>): (...args: A) => Promise> { - return async (...args) => { - try { - return await fn(...args) - } catch (error) { - const id = registry.registerNewError(error) - return { ok: false, error: { kind: 'GenericJsError', id } } - } - } -} \ No newline at end of file diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts deleted file mode 100644 index f65104d5bf55..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts +++ /dev/null @@ -1,23 +0,0 @@ -// Same order as in rust driver-adapters' `ColumnType`. -// Note: exporting const enums causes lots of problems with bundlers, so we emulate -// them via regular dictionaries. -// See: https://hackmd.io/@dzearing/Sk3xV0cLs -export const ColumnTypeEnum = { - 'Int32': 0, - 'Int64': 1, - 'Float': 2, - 'Double': 3, - 'Numeric': 4, - 'Boolean': 5, - 'Char': 6, - 'Text': 7, - 'Date': 8, - 'Time': 9, - 'DateTime': 10, - 'Json': 11, - 'Enum': 12, - 'Bytes': 13, - // 'Set': 14, - // 'Array': 15, - // ... -} as const diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/debug.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/debug.ts deleted file mode 100644 index e0a1fe380fa2..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/debug.ts +++ /dev/null @@ -1,3 +0,0 @@ -import { debug as Debug } from 'debug' - -export { Debug } diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/index.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/index.ts deleted file mode 100644 index ce04822473d9..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/index.ts +++ /dev/null @@ -1,4 +0,0 @@ -export { bindAdapter } from './binder' -export { ColumnTypeEnum } from './const' -export { Debug } from './debug' -export type * from './types' diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts deleted file mode 100644 index 826bc67acea7..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts +++ /dev/null @@ -1,108 +0,0 @@ -import { ColumnTypeEnum } from './const' - -export type ColumnType = typeof ColumnTypeEnum[keyof typeof ColumnTypeEnum] - -export interface ResultSet { - /** - * List of column types appearing in a database query, in the same order as `columnNames`. - * They are used within the Query Engine to convert values from JS to Quaint values. - */ - columnTypes: Array - - /** - * List of column names appearing in a database query, in the same order as `columnTypes`. - */ - columnNames: Array - - /** - * List of rows retrieved from a database query. - * Each row is a list of values, whose length matches `columnNames` and `columnTypes`. - */ - rows: Array> - - /** - * The last ID of an `INSERT` statement, if any. - * This is required for `AUTO_INCREMENT` columns in MySQL and SQLite-flavoured databases. - */ - lastInsertId?: string -} - -export type Query = { - sql: string - args: Array -} - -export type Error = { - kind: 'GenericJsError', - id: number -} - -export type Result = { - ok: true, - value: T -} | { - ok: false, - error: Error -} - -export interface Queryable { - readonly flavour: 'mysql' | 'postgres' - - /** - * Execute a query given as SQL, interpolating the given parameters, - * and returning the type-aware result set of the query. - * - * This is the preferred way of executing `SELECT` queries. - */ - queryRaw(params: Query): Promise> - - /** - * Execute a query given as SQL, interpolating the given parameters, - * and returning the number of affected rows. - * - * This is the preferred way of executing `INSERT`, `UPDATE`, `DELETE` queries, - * as well as transactional queries. - */ - executeRaw(params: Query): Promise> -} - -export interface DriverAdapter extends Queryable { - /** - * Starts new transation. - */ - startTransaction(): Promise> - - /** - * Closes the connection to the database, if any. - */ - close: () => Promise> -} - -export type TransactionOptions = { - usePhantomQuery: boolean -} - -export interface Transaction extends Queryable { - /** - * Transaction options. - */ - readonly options: TransactionOptions - /** - * Commit the transaction. - */ - commit(): Promise> - /** - * Rolls back the transaction. - */ - rollback(): Promise> -} - -export interface ErrorCapturingDriverAdapter extends DriverAdapter { - readonly errorRegistry: ErrorRegistry -} - -export interface ErrorRegistry { - consumeError(id: number): ErrorRecord | undefined -} - -export type ErrorRecord = { error: unknown } diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.build.json b/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.build.json deleted file mode 100644 index 2c2e266bdb3b..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.build.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "extends": "../tsconfig.json", - "compilerOptions": { - "outDir": "declaration", - } -} diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.json b/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.json deleted file mode 100644 index 3c43903cfdd1..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../tsconfig.json" -} diff --git a/query-engine/driver-adapters/js/pnpm-workspace.yaml b/query-engine/driver-adapters/js/pnpm-workspace.yaml deleted file mode 100644 index 6a17ebd231fd..000000000000 --- a/query-engine/driver-adapters/js/pnpm-workspace.yaml +++ /dev/null @@ -1,6 +0,0 @@ -packages: - - './adapter-neon' - - './adapter-planetscale' - - './adapter-pg' - - './driver-adapter-utils' - - './smoke-test-js' diff --git a/query-engine/driver-adapters/js/smoke-test-js/.envrc.example b/query-engine/driver-adapters/js/smoke-test-js/.envrc.example deleted file mode 100644 index 4a73859e999e..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/.envrc.example +++ /dev/null @@ -1,5 +0,0 @@ -export JS_PLANETSCALE_DATABASE_URL="mysql://USER:PASSWORD@aws.connect.psdb.cloud/DATABASE?sslaccept=strict" -export JS_NEON_DATABASE_URL="postgres://USER:PASSWORD@DATABASE-pooler.eu-central-1.aws.neon.tech/neondb?pgbouncer=true&connect_timeout=10" - -# Note: if you use hosted Postgres instances (e.g., from PDP provision), you need `?sslmode=disable` -export JS_PG_DATABASE_URL="postgres://postgres:prisma@localhost:5438" diff --git a/query-engine/driver-adapters/js/smoke-test-js/README.md b/query-engine/driver-adapters/js/smoke-test-js/README.md deleted file mode 100644 index 62ec1d0439ea..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/README.md +++ /dev/null @@ -1,40 +0,0 @@ -# @prisma/smoke-test-js - -This is a playground for testing the `libquery` client with the experimental Node.js drivers. -It contains a subset of `@prisma/client`, plus some handy executable smoke tests: -- [`./src/libquery`](./src/libquery): it contains smoke tests using a local `libquery`, the Query Engine library. -- [`./src/client`](./src/client): it contains smoke tests using `@prisma/client`. - -## How to setup - -We assume Node.js `v20.5.1`+ is installed. If not, run `nvm use` in the current directory. -It's very important to double-check if you have multiple versions installed, as both PlanetScale and Neon requires either Node.js `v18`+ or a custom `fetch` function. - -- Create a `.envrc` starting from `.envrc.example`, and fill in the missing values following the given template -- Install Node.js dependencies via - ```bash - pnpm i - ``` -- Run `cargo build -p query-engine-node-api` to compile the `libquery` Query Engine - -### PlanetScale - -- Create a new database on [PlanetScale](https://planetscale.com/) -- Go to `Settings` > `Passwords`, and create a new password for the `main` database branch. Select the `Prisma` template and copy the generated URL (comprising username, password, etc). Paste it in the `JS_PLANETSCALE_DATABASE_URL` environment variable in `.envrc`. - -In the current directory: -- Run `pnpm prisma:planetscale` to push the Prisma schema and insert the test data. -- Run `pnpm planetscale` to run smoke tests using `libquery` against the PlanetScale database. -- Run `pnpm planetscale:client` to run smoke tests using `@prisma/client` against the PlanetScale database. - -Note: you used to be able to run these Prisma commands without changing the provider name, but [#4074](https://github.com/prisma/prisma-engines/pull/4074) changed that (see https://github.com/prisma/prisma-engines/pull/4074#issuecomment-1649942475). - -### Neon - -- Create a new database with Neon CLI `npx neonctl projects create` or in [Neon Console](https://neon.tech). -- Paste the connection string to `JS_NEON_DATABASE_URL`. - -In the current directory: -- Run `pnpm prisma:neon` to push the Prisma schema and insert the test data. -- Run `pnpm neon` to run smoke tests using `libquery` against the Neon database. -- Run `pnpm neon:client` to run smoke tests using `@prisma/client` against the Neon database. diff --git a/query-engine/driver-adapters/js/smoke-test-js/package.json b/query-engine/driver-adapters/js/smoke-test-js/package.json deleted file mode 100644 index b04840a3cae2..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/package.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "name": "@jkomyno/smoke-test-js", - "private": true, - "type": "module", - "version": "0.0.0", - "description": "", - "scripts": { - "prisma:db:push:postgres": "prisma db push --schema ./prisma/postgres/schema.prisma --force-reset", - "prisma:db:execute:postgres": "prisma db execute --schema ./prisma/postgres/schema.prisma --file ./prisma/postgres/commands/type_test/insert.sql", - "prisma:db:push:mysql": "prisma db push --schema ./prisma/mysql/schema.prisma --force-reset", - "prisma:db:execute:mysql": "prisma db execute --schema ./prisma/mysql/schema.prisma --file ./prisma/mysql/commands/type_test/insert.sql", - "prisma:neon": "cross-env-shell DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" \"pnpm prisma:db:push:postgres && pnpm prisma:db:execute:postgres\"", - "neon:ws": "cross-env-shell DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" \"tsx ./src/libquery/neon.ws.ts\"", - "neon:http": "cross-env-shell DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" \"tsx ./src/libquery/neon.http.ts\"", - "neon:ws:client": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --loader=tsx ./src/client/neon.ws.test.ts", - "neon:http:client": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --loader=tsx ./src/client/neon.http.test.ts", - "prisma:pg": "cross-env-shell DATABASE_URL=\"${JS_PG_DATABASE_URL}\" \"pnpm prisma:db:push:postgres && pnpm prisma:db:execute:postgres\"", - "pg": "cross-env-shell DATABASE_URL=\"${JS_PG_DATABASE_URL}\" \"tsx ./src/libquery/pg.ts\"", - "pg:client": "DATABASE_URL=\"${JS_PG_DATABASE_URL}\" node --test --loader=tsx ./src/client/pg.test.ts", - "prisma:planetscale": "cross-env-shell DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" \"pnpm prisma:db:push:mysql && pnpm prisma:db:execute:mysql\"", - "planetscale": "cross-env-shell DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" \"tsx ./src/libquery/planetscale.ts\"", - "planetscale:client": "DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" node --test --loader=tsx ./src/client/planetscale.test.ts" - }, - "keywords": [], - "author": "Alberto Schiabel ", - "license": "Apache-2.0", - "sideEffects": true, - "dependencies": { - "@jkomyno/prisma-adapter-neon": "workspace:*", - "@jkomyno/prisma-adapter-planetscale": "workspace:*", - "@jkomyno/prisma-adapter-pg": "workspace:*", - "@jkomyno/prisma-driver-adapter-utils": "workspace:*", - "@neondatabase/serverless": "^0.6.0", - "@planetscale/database": "^1.11.0", - "@prisma/client": "5.3.0-integration-feat-driver-adapters-in-client.1", - "pg": "^8.11.3", - "superjson": "^1.13.1", - "undici": "^5.23.0" - }, - "devDependencies": { - "@types/node": "^20.5.1", - "@types/pg": "^8.10.2", - "cross-env": "^7.0.3", - "prisma": "5.3.0-integration-feat-driver-adapters-in-client.1", - "tsx": "^3.12.7" - } -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/commands/type_test/insert.sql b/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/commands/type_test/insert.sql deleted file mode 100644 index 6641eff216b2..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/commands/type_test/insert.sql +++ /dev/null @@ -1,51 +0,0 @@ -INSERT INTO type_test ( - tinyint_column, - smallint_column, - mediumint_column, - int_column, - bigint_column, - float_column, - double_column, - decimal_column, - boolean_column, - bit_column, - char_column, - varchar_column, - text_column, - date_column, - time_column, - year_column, - datetime_column, - timestamp_column, - json_column, - enum_column, - binary_column, - varbinary_column, - blob_column, - set_column -) VALUES ( - 127, -- tinyint - 32767, -- smallint - 8388607, -- mediumint - 2147483647, -- int - 9223372036854775807, -- bigint - 3.402823466, -- float - 1.7976931348623157, -- double - 99999999.99, -- decimal - TRUE, -- boolean - 1, -- bit - 'c', -- char - 'Sample varchar', -- varchar - 'This is a long text...', -- text - '2023-07-24', -- date - '23:59:59', -- time - 2023, -- year - '2023-07-24 23:59:59.415', -- datetime - '2023-07-24 23:59:59', -- timestamp - '{"key": "value"}', -- json - 'value3', -- enum - 0x4D7953514C, -- binary - 0x48656C6C6F20, -- varbinary - _binary 'binary', -- blob - 'option1,option3' -- set -); diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma b/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma deleted file mode 100644 index 6681f70e6c69..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma +++ /dev/null @@ -1,116 +0,0 @@ -generator client { - provider = "prisma-client-js" - previewFeatures = ["driverAdapters"] -} - -datasource db { - provider = "mysql" - url = env("DATABASE_URL") -} - -model type_test { - id Int @id @default(autoincrement()) - tinyint_column Int @db.TinyInt - tinyint_column_null Int? @db.TinyInt - smallint_column Int @db.SmallInt - smallint_column_null Int? @db.SmallInt - mediumint_column Int @db.MediumInt - mediumint_column_null Int? @db.MediumInt - int_column Int - int_column_null Int? - bigint_column BigInt - bigint_column_null BigInt? - float_column Float @db.Float - float_column_null Float? @db.Float - double_column Float - double_column_null Float? - decimal_column Decimal @db.Decimal(10, 2) - decimal_column_null Decimal? @db.Decimal(10, 2) - boolean_column Boolean - boolean_column_null Boolean? - bit_column Boolean @db.Bit(1) - bit_column_null Boolean? @db.Bit(1) - char_column String @db.Char(10) - char_column_null String? @db.Char(10) - varchar_column String @db.VarChar(255) - varchar_column_null String? @db.VarChar(255) - text_column String @db.Text - text_column_null String? @db.Text - date_column DateTime @db.Date - date_column_null DateTime? @db.Date - time_column DateTime @db.Time(0) - time_column_null DateTime? @db.Time(0) - year_column Int @db.Year - year_column_null Int? @db.Year - datetime_column DateTime @db.DateTime(3) - datetime_column_null DateTime? @db.DateTime(3) - timestamp_column DateTime @db.Timestamp(0) - timestamp_column_null DateTime? @db.Timestamp(0) - json_column Json - json_column_null Json? - enum_column type_test_enum_column - enum_column_null type_test_enum_column_null? - binary_column Bytes @db.Binary(64) - binary_column_null Bytes? @db.Binary(64) - varbinary_column Bytes @db.VarBinary(128) - varbinary_column_null Bytes? @db.VarBinary(128) - blob_column Bytes @db.Blob - blob_null Bytes? @db.Blob - set_column String - set_column_null String? -} - -// This will eventually supersede type_test -model type_test_2 { - id String @id @default(cuid()) - datetime_column DateTime @default(now()) @db.DateTime(3) - datetime_column_null DateTime? @db.DateTime(3) -} - -enum type_test_enum_column { - value1 - value2 - value3 -} - -enum type_test_enum_column_null { - value1 - value2 - value3 -} - -model Child { - c String @unique - c_1 String - c_2 String - parentId String? @unique - non_unique String? - id String @id - - @@unique([c_1, c_2]) -} - -model Parent { - p String @unique - p_1 String - p_2 String - non_unique String? - id String @id - - @@unique([p_1, p_2]) -} - -model Author { - id Int @id @default(autoincrement()) - firstName String - lastName String - age Int - - @@map("authors") -} - -model Product { - id String @id @default(cuid()) - properties Json - properties_null Json? -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/commands/type_test/insert.sql b/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/commands/type_test/insert.sql deleted file mode 100644 index 170bafb9d810..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/commands/type_test/insert.sql +++ /dev/null @@ -1,35 +0,0 @@ -INSERT INTO type_test ( - smallint_column, - int_column, - bigint_column, - float_column, - double_column, - decimal_column, - boolean_column, - char_column, - varchar_column, - text_column, - date_column, - time_column, - datetime_column, - timestamp_column, - json_column, - enum_column -) VALUES ( - 32767, -- smallint - 2147483647, -- int - 9223372036854775807, -- bigint - 3.402823466, -- float - 1.7976931348623157, -- double - 99999999.99, -- decimal - TRUE, -- boolean - 'c', -- char - 'Sample varchar', -- varchar - 'This is a long text...', -- text - '2023-07-24', -- date - '23:59:59', -- time - '2023-07-24 23:59:59.415', -- datetime - '2023-07-24 23:59:59', -- timestamp - '{"key": "value"}', -- json - 'value3' -- enum -); diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma b/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma deleted file mode 100644 index c2564af557e4..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma +++ /dev/null @@ -1,100 +0,0 @@ -generator client { - provider = "prisma-client-js" - previewFeatures = ["driverAdapters"] -} - -datasource db { - provider = "postgres" - url = env("DATABASE_URL") -} - -model type_test { - id Int @id @default(autoincrement()) - smallint_column Int @db.SmallInt - smallint_column_null Int? @db.SmallInt - int_column Int - int_column_null Int? - bigint_column BigInt - bigint_column_null BigInt? - float_column Float @db.Real - float_column_null Float? @db.Real - double_column Float - double_column_null Float? - decimal_column Decimal @db.Decimal(10, 2) - decimal_column_null Decimal? @db.Decimal(10, 2) - boolean_column Boolean - boolean_column_null Boolean? - char_column String @db.Char(10) - char_column_null String? @db.Char(10) - varchar_column String @db.VarChar(255) - varchar_column_null String? @db.VarChar(255) - text_column String - text_column_null String? - date_column DateTime @db.Date - date_column_null DateTime? @db.Date - time_column DateTime @db.Time(0) - time_column_null DateTime? @db.Time(0) - datetime_column DateTime @db.Timestamp(3) - datetime_column_null DateTime? @db.Timestamp(3) - timestamp_column DateTime @db.Timestamp(0) - timestamp_column_null DateTime? @db.Timestamp(0) - json_column Json - json_column_null Json? - enum_column type_test_enum_column - enum_column_null type_test_enum_column_null? -} - -// This will eventually supersede type_test -model type_test_2 { - id String @id @default(cuid()) - datetime_column DateTime @default(now()) @db.Timestamp(3) - datetime_column_null DateTime? @db.Timestamp(3) -} - -model Child { - c String @unique - c_1 String - c_2 String - parentId String? @unique - non_unique String? - id String @id - - @@unique([c_1, c_2]) -} - -model Parent { - p String @unique - p_1 String - p_2 String - non_unique String? - id String @id - - @@unique([p_1, p_2]) -} - -enum type_test_enum_column { - value1 - value2 - value3 -} - -enum type_test_enum_column_null { - value1 - value2 - value3 -} - -model Author { - id Int @id @default(autoincrement()) - firstName String - lastName String - age Int - - @@map("authors") -} - -model Product { - id String @id @default(cuid()) - properties Json - properties_null Json? -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts deleted file mode 100644 index 8367b43a7acf..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts +++ /dev/null @@ -1,108 +0,0 @@ -import { describe, it } from 'node:test' -import assert from 'node:assert' -import { PrismaClient } from '@prisma/client' -import type { DriverAdapter } from '@jkomyno/prisma-driver-adapter-utils' - -export async function smokeTestClient(driverAdapter: DriverAdapter) { - const provider = driverAdapter.flavour - - const log = [ - { - emit: 'event', - level: 'query', - } as const, - ] - - for (const adapter of [driverAdapter, undefined]) { - const isUsingDriverAdapters = adapter !== undefined - describe(isUsingDriverAdapters ? `using Driver Adapters` : `using Rust drivers`, () => { - it('batch queries', async () => { - const prisma = new PrismaClient({ - // @ts-ignore - jsConnector: adapter, - log, - }) - - const queries: string[] = [] - prisma.$on('query', ({ query }) => queries.push(query)) - - await prisma.$transaction([ - prisma.$queryRawUnsafe('SELECT 1'), - prisma.$queryRawUnsafe('SELECT 2'), - prisma.$queryRawUnsafe('SELECT 3'), - ]) - - const defaultExpectedQueries = [ - 'BEGIN', - 'SELECT 1', - 'SELECT 2', - 'SELECT 3', - 'COMMIT', - ] - - const driverAdapterExpectedQueries = [ - '-- Implicit "BEGIN" query via underlying driver', - 'SELECT 1', - 'SELECT 2', - 'SELECT 3', - '-- Implicit "COMMIT" query via underlying driver', - ] - - const postgresExpectedQueries = [ - 'BEGIN', - 'DEALLOCATE ALL', - 'SELECT 1', - 'SELECT 2', - 'SELECT 3', - 'COMMIT', - ] - - if (['mysql'].includes(provider)) { - if (isUsingDriverAdapters) { - assert.deepEqual(queries, driverAdapterExpectedQueries) - } else { - assert.deepEqual(queries, defaultExpectedQueries) - } - } else if (['postgres'].includes(provider)) { - if (isUsingDriverAdapters) { - assert.deepEqual(queries, defaultExpectedQueries) - } else { - assert.deepEqual(queries, postgresExpectedQueries) - } - } - }) - - it('applies isolation level when using batch $transaction', async () => { - const prisma = new PrismaClient({ - // @ts-ignore - jsConnector: adapter, - log, - }) - - const queries: string[] = [] - prisma.$on('query', ({ query }) => queries.push(query)) - - await prisma.$transaction([ - prisma.child.findMany(), - prisma.child.count(), - ], { - isolationLevel: 'ReadCommitted', - }) - - if (['mysql'].includes(provider)) { - assert.deepEqual(queries.slice(0, 2), [ - 'SET TRANSACTION ISOLATION LEVEL READ COMMITTED', - 'BEGIN', - ]) - } else if (['postgres'].includes(provider)) { - assert.deepEqual(queries.slice(0, 2), [ - 'BEGIN', - 'SET TRANSACTION ISOLATION LEVEL READ COMMITTED', - ]) - } - - assert.deepEqual(queries.at(-1), 'COMMIT') - }) - }) - } -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts deleted file mode 100644 index e2de75384b33..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { describe } from 'node:test' -import { neon } from '@neondatabase/serverless' -import { PrismaNeonHTTP } from '@jkomyno/prisma-adapter-neon' -import { smokeTestClient } from './client' - -describe('neon with @prisma/client', async () => { - const connectionString = `${process.env.JS_NEON_DATABASE_URL as string}` - - const connection = neon(connectionString, { - arrayMode: false, - fullResults: true, - }) - const adapter = new PrismaNeonHTTP(connection) - - smokeTestClient(adapter) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.ws.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.ws.test.ts deleted file mode 100644 index fddc42eeadeb..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.ws.test.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { describe } from 'node:test' -import { Pool, neonConfig } from '@neondatabase/serverless' -import { PrismaNeon } from '@jkomyno/prisma-adapter-neon' -import { WebSocket } from 'undici' -import { smokeTestClient } from './client' - -neonConfig.webSocketConstructor = WebSocket - -describe('neon with @prisma/client', async () => { - const connectionString = `${process.env.JS_NEON_DATABASE_URL as string}` - - const pool = new Pool({ connectionString }) - const adapter = new PrismaNeon(pool) - - smokeTestClient(adapter) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/pg.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/pg.test.ts deleted file mode 100644 index a6652d714370..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/pg.test.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { describe } from 'node:test' -import pg from 'pg' -import { PrismaPg } from '@jkomyno/prisma-adapter-pg' -import { smokeTestClient } from './client' - -describe('pg with @prisma/client', async () => { - const connectionString = `${process.env.JS_PG_DATABASE_URL as string}` - - const pool = new pg.Pool({ connectionString }) - const adapter = new PrismaPg(pool) - - smokeTestClient(adapter) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/planetscale.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/planetscale.test.ts deleted file mode 100644 index 07a9809b8c07..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/planetscale.test.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { connect } from '@planetscale/database' -import { PrismaPlanetScale } from '@jkomyno/prisma-adapter-planetscale' -import { describe } from 'node:test' -import { smokeTestClient } from './client' - -describe('planetscale with @prisma/client', async () => { - const connectionString = `${process.env.JS_PLANETSCALE_DATABASE_URL as string}` - - const connnection = connect({ url: connectionString }) - const adapter = new PrismaPlanetScale(connnection) - - smokeTestClient(adapter) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/QueryEngine.ts b/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/QueryEngine.ts deleted file mode 100644 index 5bab74493dee..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/QueryEngine.ts +++ /dev/null @@ -1,97 +0,0 @@ -import { JsonBatchQuery, JsonQuery } from './JsonProtocol' -import * as Transaction from './Transaction' - -// Events -export type QueryEngineEvent = QueryEngineLogEvent | QueryEngineQueryEvent | QueryEnginePanicEvent - -export type QueryEngineLogEvent = { - level: string - module_path: string - message: string - span?: boolean -} - -export type QueryEngineQueryEvent = { - level: 'info' - module_path: string - query: string - item_type: 'query' - params: string - duration_ms: string - result: string -} - -export type QueryEnginePanicEvent = { - level: 'error' - module_path: string - message: 'PANIC' - reason: string - file: string - line: string - column: string -} - -// Configuration -export type QueryEngineLogLevel = 'trace' | 'debug' | 'info' | 'warn' | 'error' | 'off' - -export type QueryEngineTelemetry = { - enabled: Boolean - endpoint: string -} - -export type GraphQLQuery = { - query: string - variables: object -} - -export type EngineProtocol = 'graphql' | 'json' -export type EngineQuery = GraphQLQuery | JsonQuery - -export type EngineBatchQueries = GraphQLQuery[] | JsonQuery[] - -export type QueryEngineConfig = { - // TODO rename datamodel here and other places - datamodel: string - configDir: string - logQueries: boolean - ignoreEnvVarErrors: boolean - datasourceOverrides?: Record - env: Record - logLevel: QueryEngineLogLevel - telemetry?: QueryEngineTelemetry - engineProtocol: EngineProtocol -} - -// Errors -export type SyncRustError = { - is_panic: boolean - message: string - meta: { - full_error: string - } - error_code: string -} - -export type RustRequestError = { - is_panic: boolean - message: string - backtrace: string -} - -export type QueryEngineResult = { - data: T - elapsed: number -} - -export type QueryEngineBatchRequest = QueryEngineBatchGraphQLRequest | JsonBatchQuery - -export type QueryEngineBatchGraphQLRequest = { - batch: QueryEngineRequest[] - transaction?: boolean - isolationLevel?: Transaction.IsolationLevel -} - -export type QueryEngineRequest = { - query: string - variables: Object -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts deleted file mode 100644 index f9c7925c9be3..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts +++ /dev/null @@ -1,454 +0,0 @@ -import { setTimeout } from 'node:timers/promises' -import type { ErrorCapturingDriverAdapter } from '@jkomyno/prisma-driver-adapter-utils' -import type { QueryEngineInstance } from '../engines/types/Library' -import { initQueryEngine } from './util' -import { JsonQuery } from '../engines/types/JsonProtocol' - -export async function smokeTestLibquery(db: ErrorCapturingDriverAdapter, prismaSchemaRelativePath: string) { - const engine = initQueryEngine(db, prismaSchemaRelativePath) - - console.log('[nodejs] connecting...') - await engine.connect('trace') - console.log('[nodejs] connected') - - const test = new SmokeTest(engine, db) - - await test.testJSON() - await test.testTypeTest2() - await test.testFindManyTypeTest() - await test.createAutoIncrement() - await test.testCreateAndDeleteChildParent() - await test.testTransaction() - await test.testRawError() - - // Note: calling `engine.disconnect` won't actually close the database connection. - console.log('[nodejs] disconnecting...') - await engine.disconnect('trace') - console.log('[nodejs] disconnected') - - console.log('[nodejs] re-connecting...') - await engine.connect('trace') - console.log('[nodejs] re-connecting') - - await setTimeout(0) - - console.log('[nodejs] re-disconnecting...') - await engine.disconnect('trace') - console.log('[nodejs] re-disconnected') - - // Close the database connection. This is required to prevent the process from hanging. - console.log('[nodejs] closing database connection...') - await db.close() - console.log('[nodejs] closed database connection') -} - -class SmokeTest { - readonly flavour: ErrorCapturingDriverAdapter['flavour'] - - constructor(private readonly engine: QueryEngineInstance, private readonly connector: ErrorCapturingDriverAdapter) { - this.flavour = connector.flavour - } - - async testJSON() { - const json = JSON.stringify({ - foo: 'bar', - baz: 1, - }) - - const created = await this.doQuery( - { - "action": "createOne", - "modelName": "Product", - "query": { - "arguments": { - "data": { - "properties": json, - "properties_null": null - } - }, - "selection": { - "properties": true - } - } - }) - - console.log('[nodejs] created', JSON.stringify(created, null, 2)) - - const resultSet = await this.doQuery( - { - "action": "findMany", - "modelName": "Product", - "query": { - "selection": { - "id": true, - "properties": true, - "properties_null": true - } - } - } - ) - - console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) - - await this.doQuery( - { - "action": "deleteMany", - "modelName": "Product", - "query": { - "arguments": { - "where": {} - }, - "selection": { - "count": true - } - } - } - ) - - return resultSet - } - - async testTypeTest2() { - const create = await this.doQuery( - { - "action": "createOne", - "modelName": "type_test_2", - "query": { - "arguments": { - "data": {} - }, - "selection": { - "id": true, - "datetime_column": true, - "datetime_column_null": true - } - } - } - ) - - console.log('[nodejs] create', JSON.stringify(create, null, 2)) - - const resultSet = await this.doQuery( - { - "action": "findMany", - "modelName": "type_test_2", - "query": { - "selection": { - "id": true, - "datetime_column": true, - "datetime_column_null": true - }, - "arguments": { - "where": {} - } - } - } - ) - - console.log('[nodejs] resultSet', JSON.stringify(resultSet, null, 2)) - - await this.doQuery( - { - "action": "deleteMany", - "modelName": "type_test_2", - "query": { - "arguments": { - "where": {} - }, - "selection": { - "count": true - } - } - } - ) - - return resultSet - } - - async testFindManyTypeTest() { - await this.testFindManyTypeTestMySQL() - await this.testFindManyTypeTestPostgres() - } - - private async testFindManyTypeTestMySQL() { - if (this.flavour !== 'mysql') { - return - } - - const resultSet = await this.doQuery( - { - "action": "findMany", - "modelName": "type_test", - "query": { - "selection": { - "tinyint_column": true, - "smallint_column": true, - "mediumint_column": true, - "int_column": true, - "bigint_column": true, - "float_column": true, - "double_column": true, - "decimal_column": true, - "boolean_column": true, - "char_column": true, - "varchar_column": true, - "text_column": true, - "date_column": true, - "time_column": true, - "datetime_column": true, - "timestamp_column": true, - "json_column": true, - "enum_column": true, - "binary_column": true, - "varbinary_column": true, - "blob_column": true - } - } - }) - - console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) - - return resultSet - } - - private async testFindManyTypeTestPostgres() { - if (this.flavour !== 'postgres') { - return - } - - const resultSet = await this.doQuery( - { - "action": "findMany", - "modelName": "type_test", - "query": { - "selection": { - "smallint_column": true, - "int_column": true, - "bigint_column": true, - "float_column": true, - "double_column": true, - "decimal_column": true, - "boolean_column": true, - "char_column": true, - "varchar_column": true, - "text_column": true, - "date_column": true, - "time_column": true, - "datetime_column": true, - "timestamp_column": true, - "json_column": true, - "enum_column": true - } - } - } - ) - console.log('[nodejs] findMany resultSet', JSON.stringify((resultSet), null, 2)) - - return resultSet - } - - async createAutoIncrement() { - await this.doQuery( - { - "modelName": "Author", - "action": "deleteMany", - "query": { - "arguments": { - "where": {} - }, - "selection": { - "count": true - } - } - } - ) - - const author = await this.doQuery( - { - "modelName": "Author", - "action": "createOne", - "query": { - "arguments": { - "data": { - "firstName": "Firstname from autoincrement", - "lastName": "Lastname from autoincrement", - "age": 99 - } - }, - "selection": { - "id": true, - "firstName": true, - "lastName": true - } - } - } - ) - console.log('[nodejs] author', JSON.stringify(author, null, 2)) - } - - async testCreateAndDeleteChildParent() { - /* Delete all child and parent records */ - - // Queries: [ - // 'SELECT `cf-users`.`Child`.`id` FROM `cf-users`.`Child` WHERE 1=1', - // 'SELECT `cf-users`.`Child`.`id` FROM `cf-users`.`Child` WHERE 1=1', - // 'DELETE FROM `cf-users`.`Child` WHERE (`cf-users`.`Child`.`id` IN (?) AND 1=1)' - // ] - await this.doQuery( - { - "modelName": "Child", - "action": "deleteMany", - "query": { - "arguments": { - "where": {} - }, - "selection": { - "count": true - } - } - } - ) - - // Queries: [ - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE 1=1', - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE 1=1', - // 'DELETE FROM `cf-users`.`Parent` WHERE (`cf-users`.`Parent`.`id` IN (?) AND 1=1)' - // ] - await this.doQuery( - { - "modelName": "Parent", - "action": "deleteMany", - "query": { - "arguments": { - "where": {} - }, - "selection": { - "count": true - } - } - } - ) - - /* Create a parent with some new children, within a transaction */ - - // Queries: [ - // 'INSERT INTO `cf-users`.`Parent` (`p`,`p_1`,`p_2`,`id`) VALUES (?,?,?,?)', - // 'INSERT INTO `cf-users`.`Child` (`c`,`c_1`,`c_2`,`parentId`,`id`) VALUES (?,?,?,?,?)', - // 'SELECT `cf-users`.`Parent`.`id`, `cf-users`.`Parent`.`p` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`id` = ? LIMIT ? OFFSET ?', - // 'SELECT `cf-users`.`Child`.`id`, `cf-users`.`Child`.`c`, `cf-users`.`Child`.`parentId` FROM `cf-users`.`Child` WHERE `cf-users`.`Child`.`parentId` IN (?)' - // ] - await this.doQuery( - { - "modelName": "Parent", - "action": "createOne", - "query": { - "arguments": { - "data": { - "p": "p1", - "p_1": "1", - "p_2": "2", - "childOpt": { - "create": { - "c": "c1", - "c_1": "foo", - "c_2": "bar" - } - } - } - }, - "selection": { - "p": true, - "childOpt": { - "selection": { - "c": true - } - } - } - } - } - ) - - /* Delete the parent */ - - // Queries: [ - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`p` = ?', - // 'SELECT `cf-users`.`Child`.`id`, `cf-users`.`Child`.`parentId` FROM `cf-users`.`Child` WHERE (1=1 AND `cf-users`.`Child`.`parentId` IN (?))', - // 'UPDATE `cf-users`.`Child` SET `parentId` = ? WHERE (`cf-users`.`Child`.`id` IN (?) AND 1=1)', - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`p` = ?', - // 'DELETE FROM `cf-users`.`Parent` WHERE (`cf-users`.`Parent`.`id` IN (?) AND `cf-users`.`Parent`.`p` = ?)' - // ] - const resultDeleteMany = await this.doQuery( - { - "modelName": "Parent", - "action": "deleteMany", - "query": { - "arguments": { - "where": { - "p": "p1" - } - }, - "selection": { - "count": true - } - } - } - ) - console.log('[nodejs] resultDeleteMany', JSON.stringify(resultDeleteMany, null, 2)) - } - - async testTransaction() { - const startResponse = await this.engine.startTransaction(JSON.stringify({ isolation_level: 'Serializable', max_wait: 5000, timeout: 15000 }), 'trace') - - const tx_id = JSON.parse(startResponse).id - - console.log('[nodejs] transaction id', tx_id) - await this.doQuery( - { - "action": "findMany", - "modelName": "Author", - "query": { - "selection": { "$scalars": true } - } - }, - tx_id - ) - - const commitResponse = await this.engine.commitTransaction(tx_id, 'trace') - console.log('[nodejs] commited', commitResponse) - } - - async testRawError() { - try { - await this.doQuery({ - action: 'queryRaw', - query: { - selection: { $scalars: true }, - arguments: { - query: 'NOT A VALID SQL, THIS WILL FAIL', - parameters: '[]' - } - } - }) - console.log(`[nodejs] expected exception, but query succeeded`) - } catch (error) { - console.log('[nodejs] caught expected error', error) - } - - } - - private async doQuery(query: JsonQuery, tx_id?: string) { - const result = await this.engine.query(JSON.stringify(query), 'trace', tx_id) - const parsedResult = JSON.parse(result) - if (parsedResult.errors) { - const error = parsedResult.errors[0]?.user_facing_error - if (error.error_code === 'P2036') { - const jsError = this.connector.errorRegistry.consumeError(error.meta.id) - if (!jsError) { - throw new Error(`Something went wrong. Engine reported external error with id ${error.meta.id}, but it was not registered.`) - } - throw jsError.error - } - } - return parsedResult - } -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.ts deleted file mode 100644 index 755289dcd42c..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { PrismaNeonHTTP } from '@jkomyno/prisma-adapter-neon' -import { bindAdapter } from '@jkomyno/prisma-driver-adapter-utils' -import { neon } from '@neondatabase/serverless' -import { smokeTestLibquery } from './libquery' - -async function main() { - const connectionString = `${process.env.JS_NEON_DATABASE_URL as string}` - - const neonConnection = neon(connectionString, { - arrayMode: false, - fullResults: true, - }) - - const adapter = new PrismaNeonHTTP(neonConnection) - const driverAdapter = bindAdapter(adapter) - - await smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma') -} - -main().catch((e) => { - console.error(e) - process.exit(1) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.ts deleted file mode 100644 index 888f29d35e26..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { PrismaNeon } from '@jkomyno/prisma-adapter-neon' -import { bindAdapter } from '@jkomyno/prisma-driver-adapter-utils' -import { WebSocket } from 'undici' -import { Pool, neonConfig } from '@neondatabase/serverless' -import { smokeTestLibquery } from './libquery' - -neonConfig.webSocketConstructor = WebSocket - -async function main() { - const connectionString = `${process.env.JS_NEON_DATABASE_URL as string}` - - const pool = new Pool({ connectionString }) - const adapter = new PrismaNeon(pool) - const driverAdapter = bindAdapter(adapter) - - await smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma') -} - -main().catch((e) => { - console.error(e) - process.exit(1) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.ts deleted file mode 100644 index cc657ddbca33..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.ts +++ /dev/null @@ -1,19 +0,0 @@ -import pg from 'pg' -import { PrismaPg } from '@jkomyno/prisma-adapter-pg' -import { bindAdapter } from '@jkomyno/prisma-driver-adapter-utils' -import { smokeTestLibquery } from './libquery' - -async function main() { - const connectionString = `${process.env.JS_PG_DATABASE_URL as string}` - - const pool = new pg.Pool({ connectionString }) - const adapter = new PrismaPg(pool) - const driverAdapter = bindAdapter(adapter) - - await smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma') -} - -main().catch((e) => { - console.error(e) - process.exit(1) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.ts deleted file mode 100644 index 971c3fa0fb85..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { connect } from '@planetscale/database' -import { PrismaPlanetScale } from '@jkomyno/prisma-adapter-planetscale' -import { bindAdapter } from '@jkomyno/prisma-driver-adapter-utils' -import { smokeTestLibquery } from './libquery' - -async function main() { - const connectionString = `${process.env.JS_PLANETSCALE_DATABASE_URL as string}` - - const planetscale = connect({ url: connectionString }) - const adapter = new PrismaPlanetScale(planetscale) - const driverAdapter = bindAdapter(adapter) - - await smokeTestLibquery(driverAdapter, '../../prisma/mysql/schema.prisma') -} - -main().catch((e) => { - console.error(e) - process.exit(1) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts deleted file mode 100644 index 187d8b86c7f4..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts +++ /dev/null @@ -1,40 +0,0 @@ -import path from 'node:path' -import os from 'node:os' -import fs from 'node:fs' -import type { ErrorCapturingDriverAdapter } from '@jkomyno/prisma-driver-adapter-utils' -import { Library, QueryEngineInstance } from '../engines/types/Library' - -export function initQueryEngine(driver: ErrorCapturingDriverAdapter, prismaSchemaRelativePath: string): QueryEngineInstance { - // I assume nobody will run this on Windows ¯\_(ツ)_/¯ - const libExt = os.platform() === 'darwin' ? 'dylib' : 'so' - const dirname = path.dirname(new URL(import.meta.url).pathname) - - const libQueryEnginePath = path.join(dirname, `../../../../../../target/debug/libquery_engine.${libExt}`) - const schemaPath = path.join(dirname, prismaSchemaRelativePath) - - console.log('[nodejs] read Prisma schema from', schemaPath) - - const libqueryEngine = { exports: {} as unknown as Library } - // @ts-ignore - process.dlopen(libqueryEngine, libQueryEnginePath) - - const QueryEngine = libqueryEngine.exports.QueryEngine - - const queryEngineOptions = { - datamodel: fs.readFileSync(schemaPath, 'utf-8'), - configDir: '.', - engineProtocol: 'json' as const, - logLevel: 'info' as const, - logQueries: false, - env: process.env, - ignoreEnvVarErrors: false, - } - - const logCallback = (...args) => { - console.log(args) - } - - const engine = new QueryEngine(queryEngineOptions, logCallback, driver) - - return engine -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/tsconfig.json b/query-engine/driver-adapters/js/smoke-test-js/tsconfig.json deleted file mode 100644 index 3c43903cfdd1..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/tsconfig.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../tsconfig.json" -} diff --git a/query-engine/driver-adapters/js/package.json b/query-engine/driver-adapters/package.json similarity index 60% rename from query-engine/driver-adapters/js/package.json rename to query-engine/driver-adapters/package.json index 6864220623d4..1362da87700d 100644 --- a/query-engine/driver-adapters/js/package.json +++ b/query-engine/driver-adapters/package.json @@ -5,18 +5,21 @@ "description": "", "engines": { "node": ">=16.13", - "pnpm": ">=8.6.7 <9" + "pnpm": ">=8.6.6 <9" }, "license": "Apache-2.0", "scripts": { "build": "pnpm -r run build", - "lint": "pnpm -r run lint" + "lint": "pnpm -r run lint", + "clean": "git clean -nXd -e !query-engine/driver-adapters" }, "keywords": [], "author": "", "devDependencies": { "@types/node": "^20.5.1", "tsup": "^7.2.0", - "typescript": "^5.1.6" + "typescript": "5.2.2", + "esbuild": "0.19.5", + "esbuild-register": "3.5.0" } } diff --git a/query-engine/driver-adapters/pnpm-workspace.yaml b/query-engine/driver-adapters/pnpm-workspace.yaml new file mode 100644 index 000000000000..d37910ea5ae6 --- /dev/null +++ b/query-engine/driver-adapters/pnpm-workspace.yaml @@ -0,0 +1,8 @@ +packages: + - '../../../prisma/packages/adapter-libsql' + - '../../../prisma/packages/adapter-neon' + - '../../../prisma/packages/adapter-pg' + - '../../../prisma/packages/adapter-planetscale' + - '../../../prisma/packages/driver-adapter-utils' + - '../../../prisma/packages/debug' + - './connector-test-kit-executor' \ No newline at end of file diff --git a/query-engine/driver-adapters/src/conversion.rs b/query-engine/driver-adapters/src/conversion.rs index c64954e389cf..f65cc955fb21 100644 --- a/query-engine/driver-adapters/src/conversion.rs +++ b/query-engine/driver-adapters/src/conversion.rs @@ -1,5 +1,7 @@ +pub(crate) mod postgres; + use napi::bindgen_prelude::{FromNapiValue, ToNapiValue}; -use quaint::ast::Value as QuaintValue; +use napi::NapiValue; use serde::Serialize; use serde_json::value::Value as JsonValue; @@ -8,6 +10,8 @@ use serde_json::value::Value as JsonValue; pub enum JSArg { RawString(String), Value(serde_json::Value), + Buffer(Vec), + Array(Vec), } impl From for JSArg { @@ -17,8 +21,8 @@ impl From for JSArg { } // FromNapiValue is the napi equivalent to serde::Deserialize. -// Note: we can safely leave this unimplemented as we don't need deserialize JSArg back to napi_value -// (nor we need to). However, removing this altogether would cause a compile error. +// Note: we can safely leave this unimplemented as we don't need deserialize napi_value back to JSArg. +// However, removing this altogether would cause a compile error. impl FromNapiValue for JSArg { unsafe fn from_napi_value(_env: napi::sys::napi_env, _napi_value: napi::sys::napi_value) -> napi::Result { unreachable!() @@ -31,30 +35,59 @@ impl ToNapiValue for JSArg { match value { JSArg::RawString(s) => ToNapiValue::to_napi_value(env, s), JSArg::Value(v) => ToNapiValue::to_napi_value(env, v), + JSArg::Buffer(bytes) => { + ToNapiValue::to_napi_value(env, napi::Env::from_raw(env).create_buffer_with_data(bytes)?.into_raw()) + } + // While arrays are encodable as JSON generally, their element might not be, or may be + // represented in a different way than we need. We use this custom logic for all arrays + // to avoid having separate `JsonArray` and `BytesArray` variants in `JSArg` and + // avoid complicating the logic in `conv_params`. + JSArg::Array(items) => { + let env = napi::Env::from_raw(env); + let mut array = env.create_array(items.len().try_into().expect("JS array length must fit into u32"))?; + + for (index, item) in items.into_iter().enumerate() { + let js_value = ToNapiValue::to_napi_value(env.raw(), item)?; + // TODO: NapiRaw could be implemented for sys::napi_value directly, there should + // be no need for re-wrapping; submit a patch to napi-rs and simplify here. + array.set(index as u32, napi::JsUnknown::from_raw_unchecked(env.raw(), js_value))?; + } + + ToNapiValue::to_napi_value(env.raw(), array) + } } } } -pub fn conv_params(params: &[QuaintValue<'_>]) -> serde_json::Result> { - let mut values = Vec::with_capacity(params.len()); +pub fn values_to_js_args(values: &[quaint::Value<'_>]) -> serde_json::Result> { + let mut args = Vec::with_capacity(values.len()); - for pv in params { - let res = match pv { - QuaintValue::Json(s) => match s { + for qv in values { + let res = match &qv.typed { + quaint::ValueType::Json(s) => match s { Some(ref s) => { let json_str = serde_json::to_string(s)?; JSArg::RawString(json_str) } None => JsonValue::Null.into(), }, - quaint_value => { - let json: JsonValue = quaint_value.clone().into(); - json.into() - } + quaint::ValueType::Bytes(bytes) => match bytes { + Some(bytes) => JSArg::Buffer(bytes.to_vec()), + None => JsonValue::Null.into(), + }, + quaint_value @ quaint::ValueType::Numeric(bd) => match bd { + Some(bd) => match bd.to_string().parse::() { + Ok(double) => JSArg::from(JsonValue::from(double)), + Err(_) => JSArg::from(JsonValue::from(quaint_value.clone())), + }, + None => JsonValue::Null.into(), + }, + quaint::ValueType::Array(Some(items)) => JSArg::Array(values_to_js_args(items)?), + quaint_value => JSArg::from(JsonValue::from(quaint_value.clone())), }; - values.push(res); + args.push(res); } - Ok(values) + Ok(args) } diff --git a/query-engine/driver-adapters/src/conversion/postgres.rs b/query-engine/driver-adapters/src/conversion/postgres.rs new file mode 100644 index 000000000000..21b1ec6b2fb9 --- /dev/null +++ b/query-engine/driver-adapters/src/conversion/postgres.rs @@ -0,0 +1,52 @@ +use crate::conversion::JSArg; +use chrono::format::StrftimeItems; +use once_cell::sync::Lazy; +use serde_json::value::Value as JsonValue; + +static TIME_FMT: Lazy = Lazy::new(|| StrftimeItems::new("%H:%M:%S%.f")); + +pub fn values_to_js_args(values: &[quaint::Value<'_>]) -> serde_json::Result> { + let mut args = Vec::with_capacity(values.len()); + + for qv in values { + let res = match (&qv.typed, qv.native_column_type_name()) { + (quaint::ValueType::DateTime(value), Some("DATE")) => match value { + Some(value) => JSArg::RawString(value.date_naive().to_string()), + None => JsonValue::Null.into(), + }, + (quaint::ValueType::DateTime(value), Some("TIME")) => match value { + Some(value) => JSArg::RawString(value.time().to_string()), + None => JsonValue::Null.into(), + }, + (quaint::ValueType::DateTime(value), Some("TIMETZ")) => match value { + Some(value) => JSArg::RawString(value.time().format_with_items(TIME_FMT.clone()).to_string()), + None => JsonValue::Null.into(), + }, + (quaint::ValueType::DateTime(value), _) => match value { + Some(value) => JSArg::RawString(value.naive_utc().to_string()), + None => JsonValue::Null.into(), + }, + (quaint::ValueType::Json(s), _) => match s { + Some(ref s) => { + let json_str = serde_json::to_string(s)?; + JSArg::RawString(json_str) + } + None => JsonValue::Null.into(), + }, + (quaint::ValueType::Bytes(bytes), _) => match bytes { + Some(bytes) => JSArg::Buffer(bytes.to_vec()), + None => JsonValue::Null.into(), + }, + (quaint::ValueType::Numeric(bd), _) => match bd { + Some(bd) => JSArg::RawString(bd.to_string()), + None => JsonValue::Null.into(), + }, + (quaint::ValueType::Array(Some(items)), _) => JSArg::Array(values_to_js_args(items)?), + (quaint_value, _) => JSArg::from(JsonValue::from(quaint_value.clone())), + }; + + args.push(res); + } + + Ok(args) +} diff --git a/query-engine/driver-adapters/src/proxy.rs b/query-engine/driver-adapters/src/proxy.rs index bc17e2963236..da03336bdf53 100644 --- a/query-engine/driver-adapters/src/proxy.rs +++ b/query-engine/driver-adapters/src/proxy.rs @@ -1,17 +1,21 @@ -use core::panic; +use std::borrow::Cow; use std::str::FromStr; use crate::async_js_function::AsyncJsFunction; use crate::conversion::JSArg; use crate::transaction::JsTransaction; use napi::bindgen_prelude::{FromNapiValue, ToNapiValue}; +use napi::threadsafe_function::{ErrorStrategy, ThreadsafeFunction}; use napi::{JsObject, JsString}; use napi_derive::napi; use quaint::connector::ResultSet as QuaintResultSet; -use quaint::Value as QuaintValue; +use quaint::{ + error::{Error as QuaintError, ErrorKind}, + Value as QuaintValue, +}; // TODO(jkomyno): import these 3rd-party crates from the `quaint-core` crate. -use bigdecimal::BigDecimal; +use bigdecimal::{BigDecimal, FromPrimitive}; use chrono::{DateTime, Utc}; use chrono::{NaiveDate, NaiveTime}; @@ -44,8 +48,12 @@ pub(crate) struct TransactionProxy { /// commit transaction commit: AsyncJsFunction<(), ()>, - /// rollback transcation + /// rollback transaction rollback: AsyncJsFunction<(), ()>, + + /// dispose transaction, cleanup logic executed at the end of the transaction lifecycle + /// on drop. + dispose: ThreadsafeFunction<(), ErrorStrategy::Fatal>, } /// This result set is more convenient to be manipulated from both Rust and NodeJS. @@ -87,69 +95,129 @@ pub enum ColumnType { /// - INT16 (SMALLINT) -> e.g. `32767` /// - INT24 (MEDIUMINT) -> e.g. `8388607` /// - INT32 (INT) -> e.g. `2147483647` - Int32, + Int32 = 0, /// The following PlanetScale type IDs are mapped into Int64: /// - INT64 (BIGINT) -> e.g. `"9223372036854775807"` (String-encoded) - Int64, + Int64 = 1, /// The following PlanetScale type IDs are mapped into Float: /// - FLOAT32 (FLOAT) -> e.g. `3.402823466` - Float, + Float = 2, /// The following PlanetScale type IDs are mapped into Double: /// - FLOAT64 (DOUBLE) -> e.g. `1.7976931348623157` - Double, + Double = 3, /// The following PlanetScale type IDs are mapped into Numeric: /// - DECIMAL (DECIMAL) -> e.g. `"99999999.99"` (String-encoded) - Numeric, + Numeric = 4, /// The following PlanetScale type IDs are mapped into Boolean: /// - BOOLEAN (BOOLEAN) -> e.g. `1` - Boolean, + Boolean = 5, - /// The following PlanetScale type IDs are mapped into Char: - /// - CHAR (CHAR) -> e.g. `"c"` (String-encoded) - Char, + Character = 6, /// The following PlanetScale type IDs are mapped into Text: /// - TEXT (TEXT) -> e.g. `"foo"` (String-encoded) /// - VARCHAR (VARCHAR) -> e.g. `"foo"` (String-encoded) - Text, + Text = 7, /// The following PlanetScale type IDs are mapped into Date: /// - DATE (DATE) -> e.g. `"2023-01-01"` (String-encoded, yyyy-MM-dd) - Date, + Date = 8, /// The following PlanetScale type IDs are mapped into Time: /// - TIME (TIME) -> e.g. `"23:59:59"` (String-encoded, HH:mm:ss) - Time, + Time = 9, /// The following PlanetScale type IDs are mapped into DateTime: /// - DATETIME (DATETIME) -> e.g. `"2023-01-01 23:59:59"` (String-encoded, yyyy-MM-dd HH:mm:ss) /// - TIMESTAMP (TIMESTAMP) -> e.g. `"2023-01-01 23:59:59"` (String-encoded, yyyy-MM-dd HH:mm:ss) - DateTime, + DateTime = 10, /// The following PlanetScale type IDs are mapped into Json: /// - JSON (JSON) -> e.g. `"{\"key\": \"value\"}"` (String-encoded) - Json, + Json = 11, /// The following PlanetScale type IDs are mapped into Enum: /// - ENUM (ENUM) -> e.g. `"foo"` (String-encoded) - Enum, + Enum = 12, /// The following PlanetScale type IDs are mapped into Bytes: /// - BLOB (BLOB) -> e.g. `"\u0012"` (String-encoded) /// - VARBINARY (VARBINARY) -> e.g. `"\u0012"` (String-encoded) /// - BINARY (BINARY) -> e.g. `"\u0012"` (String-encoded) /// - GEOMETRY (GEOMETRY) -> e.g. `"\u0012"` (String-encoded) - Bytes, + Bytes = 13, /// The following PlanetScale type IDs are mapped into Set: /// - SET (SET) -> e.g. `"foo,bar"` (String-encoded, comma-separated) /// This is currently unhandled, and will panic if encountered. - Set, + Set = 14, + + /// UUID from postgres-flavored driver adapters is mapped to this type. + Uuid = 15, + + /* + * Scalar arrays + */ + /// Int32 array (INT2_ARRAY and INT4_ARRAY in PostgreSQL) + Int32Array = 64, + + /// Int64 array (INT8_ARRAY in PostgreSQL) + Int64Array = 65, + + /// Float array (FLOAT4_ARRAY in PostgreSQL) + FloatArray = 66, + + /// Double array (FLOAT8_ARRAY in PostgreSQL) + DoubleArray = 67, + + /// Numeric array (NUMERIC_ARRAY, MONEY_ARRAY etc in PostgreSQL) + NumericArray = 68, + + /// Boolean array (BOOL_ARRAY in PostgreSQL) + BooleanArray = 69, + + /// Char array (CHAR_ARRAY in PostgreSQL) + CharacterArray = 70, + + /// Text array (TEXT_ARRAY in PostgreSQL) + TextArray = 71, + + /// Date array (DATE_ARRAY in PostgreSQL) + DateArray = 72, + + /// Time array (TIME_ARRAY in PostgreSQL) + TimeArray = 73, + + /// DateTime array (TIMESTAMP_ARRAY in PostgreSQL) + DateTimeArray = 74, + + /// Json array (JSON_ARRAY in PostgreSQL) + JsonArray = 75, + + /// Enum array + EnumArray = 76, + + /// Bytes array (BYTEA_ARRAY in PostgreSQL) + BytesArray = 77, + + /// Uuid array (UUID_ARRAY in PostgreSQL) + UuidArray = 78, + + /* + * Below there are custom types that don't have a 1:1 translation with a quaint::Value. + * enum variant. + */ + /// UnknownNumber is used when the type of the column is a number but of unknown particular type + /// and precision. + /// + /// It's used by some driver adapters, like libsql to return aggregation values like AVG, or + /// COUNT, and it can be mapped to either Int64, or Double + UnknownNumber = 128, } #[napi(object)] @@ -159,13 +227,27 @@ pub struct Query { pub args: Vec, } +fn conversion_error(args: &std::fmt::Arguments) -> QuaintError { + let msg = match args.as_str() { + Some(s) => Cow::Borrowed(s), + None => Cow::Owned(args.to_string()), + }; + QuaintError::builder(ErrorKind::ConversionError(msg)).build() +} + +macro_rules! conversion_error { + ($($arg:tt)*) => { + conversion_error(&format_args!($($arg)*)) + }; +} + /// Handle data-type conversion from a JSON value to a Quaint value. /// This is used for most data types, except those that require connector-specific handling, e.g., `ColumnType::Boolean`. fn js_value_to_quaint( json_value: serde_json::Value, column_type: ColumnType, column_name: &str, -) -> QuaintValue<'static> { +) -> quaint::Result> { // Note for the future: it may be worth revisiting how much bloat so many panics with different static // strings add to the compiled artefact, and in case we should come up with a restricted set of panic // messages, or even find a way of removing them altogether. @@ -173,105 +255,241 @@ fn js_value_to_quaint( ColumnType::Int32 => match json_value { serde_json::Value::Number(n) => { // n.as_i32() is not implemented, so we need to downcast from i64 instead - QuaintValue::int32(n.as_i64().expect("number must be an i32") as i32) + n.as_i64() + .ok_or(conversion_error!("number must be an integer in column '{column_name}'")) + .and_then(|n| -> quaint::Result { + n.try_into() + .map_err(|e| conversion_error!("cannot convert {n} to i32 in column '{column_name}': {e}")) + }) + .map(QuaintValue::int32) } - serde_json::Value::Null => QuaintValue::Int32(None), - mismatch => panic!("Expected an i32 number in column {}, found {}", column_name, mismatch), + serde_json::Value::String(s) => s.parse::().map(QuaintValue::int32).map_err(|e| { + conversion_error!("string-encoded number must be an i32 in column '{column_name}', got {s}: {e}") + }), + serde_json::Value::Null => Ok(QuaintValue::null_int32()), + mismatch => Err(conversion_error!( + "expected an i32 number in column '{column_name}', found {mismatch}" + )), }, ColumnType::Int64 => match json_value { - serde_json::Value::String(s) => { - let n = s.parse::().expect("string-encoded number must be an i64"); - QuaintValue::int64(n) - } - serde_json::Value::Null => QuaintValue::Int64(None), - mismatch => panic!("Expected a string in column {}, found {}", column_name, mismatch), + serde_json::Value::Number(n) => n.as_i64().map(QuaintValue::int64).ok_or(conversion_error!( + "number must be an i64 in column '{column_name}', got {n}" + )), + serde_json::Value::String(s) => s.parse::().map(QuaintValue::int64).map_err(|e| { + conversion_error!("string-encoded number must be an i64 in column '{column_name}', got {s}: {e}") + }), + serde_json::Value::Null => Ok(QuaintValue::null_int64()), + mismatch => Err(conversion_error!( + "expected a string or number in column '{column_name}', found {mismatch}" + )), }, ColumnType::Float => match json_value { // n.as_f32() is not implemented, so we need to downcast from f64 instead. // We assume that the JSON value is a valid f32 number, but we check for overflows anyway. - serde_json::Value::Number(n) => QuaintValue::float(f64_to_f32(n.as_f64().expect("number must be a f64"))), - serde_json::Value::Null => QuaintValue::Float(None), - mismatch => panic!("Expected a f32 number in column {}, found {}", column_name, mismatch), + serde_json::Value::Number(n) => n + .as_f64() + .ok_or(conversion_error!( + "number must be a float in column '{column_name}', got {n}" + )) + .and_then(f64_to_f32) + .map(QuaintValue::float), + serde_json::Value::Null => Ok(QuaintValue::null_float()), + mismatch => Err(conversion_error!( + "expected an f32 number in column '{column_name}', found {mismatch}" + )), }, ColumnType::Double => match json_value { - serde_json::Value::Number(n) => QuaintValue::double(n.as_f64().expect("number must be a f64")), - serde_json::Value::Null => QuaintValue::Double(None), - mismatch => panic!("Expected a f64 number in column {}, found {}", column_name, mismatch), + serde_json::Value::Number(n) => n.as_f64().map(QuaintValue::double).ok_or(conversion_error!( + "number must be a f64 in column '{column_name}', got {n}" + )), + serde_json::Value::Null => Ok(QuaintValue::null_double()), + mismatch => Err(conversion_error!( + "expected an f64 number in column '{column_name}', found {mismatch}" + )), }, ColumnType::Numeric => match json_value { - serde_json::Value::String(s) => { - let decimal = BigDecimal::from_str(&s).expect("invalid numeric value"); - QuaintValue::numeric(decimal) - } - serde_json::Value::Null => QuaintValue::Numeric(None), - mismatch => panic!( - "Expected a string-encoded number in column {}, found {}", - column_name, mismatch - ), + serde_json::Value::String(s) => BigDecimal::from_str(&s).map(QuaintValue::numeric).map_err(|e| { + conversion_error!("invalid numeric value when parsing {s} in column '{column_name}': {e}") + }), + serde_json::Value::Number(n) => n + .as_f64() + .and_then(BigDecimal::from_f64) + .ok_or(conversion_error!( + "number must be an f64 in column '{column_name}', got {n}" + )) + .map(QuaintValue::numeric), + serde_json::Value::Null => Ok(QuaintValue::null_numeric()), + mismatch => Err(conversion_error!( + "expected a string-encoded number in column '{column_name}', found {mismatch}", + )), }, ColumnType::Boolean => match json_value { - serde_json::Value::Bool(b) => QuaintValue::boolean(b), - serde_json::Value::Null => QuaintValue::Boolean(None), - mismatch => panic!("Expected a boolean in column {}, found {}", column_name, mismatch), + serde_json::Value::Bool(b) => Ok(QuaintValue::boolean(b)), + serde_json::Value::Null => Ok(QuaintValue::null_boolean()), + serde_json::Value::Number(n) => match n.as_i64() { + Some(0) => Ok(QuaintValue::boolean(false)), + Some(1) => Ok(QuaintValue::boolean(true)), + _ => Err(conversion_error!( + "expected number-encoded boolean to be 0 or 1 in column '{column_name}', got {n}" + )), + }, + serde_json::Value::String(s) => match s.as_str() { + "false" | "FALSE" | "0" => Ok(QuaintValue::boolean(false)), + "true" | "TRUE" | "1" => Ok(QuaintValue::boolean(true)), + _ => Err(conversion_error!( + "expected string-encoded boolean in column '{column_name}', got {s}" + )), + }, + mismatch => Err(conversion_error!( + "expected a boolean in column '{column_name}', found {mismatch}" + )), }, - ColumnType::Char => match json_value { - serde_json::Value::String(s) => QuaintValue::Char(s.chars().next()), - serde_json::Value::Null => QuaintValue::Char(None), - mismatch => panic!("Expected a string in column {}, found {}", column_name, mismatch), + ColumnType::Character => match json_value { + serde_json::Value::String(s) => match s.chars().next() { + Some(c) => Ok(QuaintValue::character(c)), + None => Ok(QuaintValue::null_character()), + }, + serde_json::Value::Null => Ok(QuaintValue::null_character()), + mismatch => Err(conversion_error!( + "expected a string in column '{column_name}', found {mismatch}" + )), }, ColumnType::Text => match json_value { - serde_json::Value::String(s) => QuaintValue::text(s), - serde_json::Value::Null => QuaintValue::Text(None), - mismatch => panic!("Expected a string in column {}, found {}", column_name, mismatch), + serde_json::Value::String(s) => Ok(QuaintValue::text(s)), + serde_json::Value::Null => Ok(QuaintValue::null_text()), + mismatch => Err(conversion_error!( + "expected a string in column '{column_name}', found {mismatch}" + )), }, ColumnType::Date => match json_value { - serde_json::Value::String(s) => { - let date = NaiveDate::parse_from_str(&s, "%Y-%m-%d").expect("Expected a date string"); - QuaintValue::date(date) - } - serde_json::Value::Null => QuaintValue::Date(None), - mismatch => panic!("Expected a string in column {}, found {}", column_name, mismatch), + serde_json::Value::String(s) => NaiveDate::parse_from_str(&s, "%Y-%m-%d") + .map(QuaintValue::date) + .map_err(|_| conversion_error!("expected a date string in column '{column_name}', got {s}")), + serde_json::Value::Null => Ok(QuaintValue::null_date()), + mismatch => Err(conversion_error!( + "expected a string in column '{column_name}', found {mismatch}" + )), }, ColumnType::Time => match json_value { - serde_json::Value::String(s) => { - let time = NaiveTime::parse_from_str(&s, "%H:%M:%S").expect("Expected a time string"); - QuaintValue::time(time) - } - serde_json::Value::Null => QuaintValue::Time(None), - mismatch => panic!("Expected a string in column {}, found {}", column_name, mismatch), + serde_json::Value::String(s) => NaiveTime::parse_from_str(&s, "%H:%M:%S%.f") + .map(QuaintValue::time) + .map_err(|_| conversion_error!("expected a time string in column '{column_name}', got {s}")), + serde_json::Value::Null => Ok(QuaintValue::null_time()), + mismatch => Err(conversion_error!( + "expected a string in column '{column_name}', found {mismatch}" + )), }, ColumnType::DateTime => match json_value { - serde_json::Value::String(s) => { - let datetime = chrono::NaiveDateTime::parse_from_str(&s, "%Y-%m-%d %H:%M:%S%.f") - .unwrap_or_else(|_| panic!("Expected a datetime string, found {:?}", &s)); - let datetime: DateTime = DateTime::from_utc(datetime, Utc); - QuaintValue::datetime(datetime) - } - serde_json::Value::Null => QuaintValue::DateTime(None), - mismatch => panic!("Expected a string in column {}, found {}", column_name, mismatch), - }, - ColumnType::Json => match json_value { - serde_json::Value::Null => QuaintValue::Json(None), - json => QuaintValue::json(json), + // TODO: change parsing order to prefer RFC3339 + serde_json::Value::String(s) => chrono::NaiveDateTime::parse_from_str(&s, "%Y-%m-%d %H:%M:%S%.f") + .map(|dt| DateTime::from_utc(dt, Utc)) + .or_else(|_| DateTime::parse_from_rfc3339(&s).map(DateTime::::from)) + .map(QuaintValue::datetime) + .map_err(|_| conversion_error!("expected a datetime string in column '{column_name}', found {s}")), + serde_json::Value::Null => Ok(QuaintValue::null_datetime()), + mismatch => Err(conversion_error!( + "expected a string in column '{column_name}', found {mismatch}" + )), }, + ColumnType::Json => { + match json_value { + // DbNull + serde_json::Value::Null => Ok(QuaintValue::null_json()), + // JsonNull + serde_json::Value::String(s) if s == "$__prisma_null" => Ok(QuaintValue::json(serde_json::Value::Null)), + json => Ok(QuaintValue::json(json)), + } + } ColumnType::Enum => match json_value { - serde_json::Value::String(s) => QuaintValue::enum_variant(s), - serde_json::Value::Null => QuaintValue::Enum(None), - mismatch => panic!("Expected a string in column {}, found {}", column_name, mismatch), + serde_json::Value::String(s) => Ok(QuaintValue::enum_variant(s)), + serde_json::Value::Null => Ok(QuaintValue::null_enum()), + mismatch => Err(conversion_error!( + "expected a string in column '{column_name}', found {mismatch}" + )), }, ColumnType::Bytes => match json_value { - serde_json::Value::String(s) => QuaintValue::Bytes(Some(s.into_bytes().into())), - serde_json::Value::Null => QuaintValue::Bytes(None), - mismatch => panic!("Expected a string in column {}, found {}", column_name, mismatch), + serde_json::Value::String(s) => Ok(QuaintValue::bytes(s.into_bytes())), + serde_json::Value::Array(array) => array + .iter() + .map(|value| value.as_i64().and_then(|maybe_byte| maybe_byte.try_into().ok())) + .collect::>>() + .map(QuaintValue::bytes) + .ok_or(conversion_error!( + "elements of the array in column '{column_name}' must be u8" + )), + serde_json::Value::Null => Ok(QuaintValue::null_bytes()), + mismatch => Err(conversion_error!( + "expected a string or an array in column '{column_name}', found {mismatch}", + )), }, + ColumnType::Uuid => match json_value { + serde_json::Value::String(s) => uuid::Uuid::parse_str(&s) + .map(QuaintValue::uuid) + .map_err(|_| conversion_error!("Expected a UUID string in column '{column_name}'")), + serde_json::Value::Null => Ok(QuaintValue::null_bytes()), + mismatch => Err(conversion_error!( + "Expected a UUID string in column '{column_name}', found {mismatch}" + )), + }, + ColumnType::UnknownNumber => match json_value { + serde_json::Value::Number(n) => n + .as_i64() + .map(QuaintValue::int64) + .or(n.as_f64().map(QuaintValue::double)) + .ok_or(conversion_error!( + "number must be an i64 or f64 in column '{column_name}', got {n}" + )), + mismatch => Err(conversion_error!( + "expected a either an i64 or a f64 in column '{column_name}', found {mismatch}", + )), + }, + + ColumnType::Int32Array => js_array_to_quaint(ColumnType::Int32, json_value, column_name), + ColumnType::Int64Array => js_array_to_quaint(ColumnType::Int64, json_value, column_name), + ColumnType::FloatArray => js_array_to_quaint(ColumnType::Float, json_value, column_name), + ColumnType::DoubleArray => js_array_to_quaint(ColumnType::Double, json_value, column_name), + ColumnType::NumericArray => js_array_to_quaint(ColumnType::Numeric, json_value, column_name), + ColumnType::BooleanArray => js_array_to_quaint(ColumnType::Boolean, json_value, column_name), + ColumnType::CharacterArray => js_array_to_quaint(ColumnType::Character, json_value, column_name), + ColumnType::TextArray => js_array_to_quaint(ColumnType::Text, json_value, column_name), + ColumnType::DateArray => js_array_to_quaint(ColumnType::Date, json_value, column_name), + ColumnType::TimeArray => js_array_to_quaint(ColumnType::Time, json_value, column_name), + ColumnType::DateTimeArray => js_array_to_quaint(ColumnType::DateTime, json_value, column_name), + ColumnType::JsonArray => js_array_to_quaint(ColumnType::Json, json_value, column_name), + ColumnType::EnumArray => js_array_to_quaint(ColumnType::Enum, json_value, column_name), + ColumnType::BytesArray => js_array_to_quaint(ColumnType::Bytes, json_value, column_name), + ColumnType::UuidArray => js_array_to_quaint(ColumnType::Uuid, json_value, column_name), + unimplemented => { todo!("support column type {:?} in column {}", unimplemented, column_name) } } } -impl From for QuaintResultSet { - fn from(js_result_set: JSResultSet) -> Self { +fn js_array_to_quaint( + base_type: ColumnType, + json_value: serde_json::Value, + column_name: &str, +) -> quaint::Result> { + match json_value { + serde_json::Value::Array(array) => Ok(QuaintValue::array( + array + .into_iter() + .enumerate() + .map(|(index, elem)| js_value_to_quaint(elem, base_type, &format!("{column_name}[{index}]"))) + .collect::>>()?, + )), + serde_json::Value::Null => Ok(QuaintValue::null_array()), + mismatch => Err(conversion_error!( + "expected an array in column '{column_name}', found {mismatch}", + )), + } +} + +impl TryFrom for QuaintResultSet { + type Error = quaint::error::Error; + + fn try_from(js_result_set: JSResultSet) -> Result { let JSResultSet { rows, column_names, @@ -288,7 +506,7 @@ impl From for QuaintResultSet { let column_type = column_types[i]; let column_name = column_names[i].as_str(); - quaint_row.push(js_value_to_quaint(row, column_type, column_name)); + quaint_row.push(js_value_to_quaint(row, column_type, column_name)?); } quaint_rows.push(quaint_row); @@ -304,7 +522,7 @@ impl From for QuaintResultSet { quaint_result_set.set_last_insert_id(last_insert_id); } - quaint_result_set + Ok(quaint_result_set) } } @@ -353,11 +571,13 @@ impl TransactionProxy { pub fn new(js_transaction: &JsObject) -> napi::Result { let commit = js_transaction.get_named_property("commit")?; let rollback = js_transaction.get_named_property("rollback")?; - let options: TransactionOptions = js_transaction.get_named_property("options")?; + let dispose = js_transaction.get_named_property("dispose")?; + let options = js_transaction.get_named_property("options")?; Ok(Self { commit, rollback, + dispose, options, }) } @@ -369,21 +589,31 @@ impl TransactionProxy { pub async fn commit(&self) -> quaint::Result<()> { self.commit.call(()).await } + pub async fn rollback(&self) -> quaint::Result<()> { self.rollback.call(()).await } } +impl Drop for TransactionProxy { + fn drop(&mut self) { + _ = self + .dispose + .call((), napi::threadsafe_function::ThreadsafeFunctionCallMode::NonBlocking); + } +} + /// Coerce a `f64` to a `f32`, asserting that the conversion is lossless. /// Note that, when overflow occurs during conversion, the result is `infinity`. -fn f64_to_f32(x: f64) -> f32 { +fn f64_to_f32(x: f64) -> quaint::Result { let y = x as f32; - assert_eq!(x.is_finite(), y.is_finite(), "f32 overflow during conversion"); - - y + if x.is_finite() == y.is_finite() { + Ok(y) + } else { + Err(conversion_error!("f32 overflow during conversion")) + } } - #[cfg(test)] mod proxy_test { use num_bigint::BigInt; @@ -392,10 +622,10 @@ mod proxy_test { use super::*; #[track_caller] - fn test_null(quaint_none: QuaintValue, column_type: ColumnType) { + fn test_null<'a, T: Into>>(quaint_none: T, column_type: ColumnType) { let json_value = serde_json::Value::Null; - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); - assert_eq!(quaint_value, quaint_none); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, quaint_none.into()); } #[test] @@ -403,25 +633,31 @@ mod proxy_test { let column_type = ColumnType::Int32; // null - test_null(QuaintValue::Int32(None), column_type); + test_null(QuaintValue::null_int32(), column_type); // 0 let n: i32 = 0; let json_value = serde_json::Value::Number(serde_json::Number::from(n)); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); - assert_eq!(quaint_value, QuaintValue::Int32(Some(n))); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::int32(n)); // max let n: i32 = i32::MAX; let json_value = serde_json::Value::Number(serde_json::Number::from(n)); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); - assert_eq!(quaint_value, QuaintValue::Int32(Some(n))); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::int32(n)); // min let n: i32 = i32::MIN; let json_value = serde_json::Value::Number(serde_json::Number::from(n)); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); - assert_eq!(quaint_value, QuaintValue::Int32(Some(n))); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::int32(n)); + + // string-encoded + let n = i32::MAX; + let json_value = serde_json::Value::String(n.to_string()); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::int32(n)); } #[test] @@ -429,25 +665,31 @@ mod proxy_test { let column_type = ColumnType::Int64; // null - test_null(QuaintValue::Int64(None), column_type); + test_null(QuaintValue::null_int64(), column_type); // 0 let n: i64 = 0; let json_value = serde_json::Value::String(n.to_string()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); - assert_eq!(quaint_value, QuaintValue::Int64(Some(n))); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::int64(n)); // max let n: i64 = i64::MAX; let json_value = serde_json::Value::String(n.to_string()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); - assert_eq!(quaint_value, QuaintValue::Int64(Some(n))); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::int64(n)); // min let n: i64 = i64::MIN; let json_value = serde_json::Value::String(n.to_string()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); - assert_eq!(quaint_value, QuaintValue::Int64(Some(n))); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::int64(n)); + + // number-encoded + let n: i64 = (1 << 53) - 1; // max JS safe integer + let json_value = serde_json::Value::Number(serde_json::Number::from(n)); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::int64(n)); } #[test] @@ -455,25 +697,25 @@ mod proxy_test { let column_type = ColumnType::Float; // null - test_null(QuaintValue::Float(None), column_type); + test_null(QuaintValue::null_float(), column_type); // 0 let n: f32 = 0.0; let json_value = serde_json::Value::Number(serde_json::Number::from_f64(n.into()).unwrap()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); - assert_eq!(quaint_value, QuaintValue::Float(Some(n))); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::float(n)); // max let n: f32 = f32::MAX; let json_value = serde_json::Value::Number(serde_json::Number::from_f64(n.into()).unwrap()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); - assert_eq!(quaint_value, QuaintValue::Float(Some(n))); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::float(n)); // min let n: f32 = f32::MIN; let json_value = serde_json::Value::Number(serde_json::Number::from_f64(n.into()).unwrap()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); - assert_eq!(quaint_value, QuaintValue::Float(Some(n))); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::float(n)); } #[test] @@ -481,25 +723,25 @@ mod proxy_test { let column_type = ColumnType::Double; // null - test_null(QuaintValue::Double(None), column_type); + test_null(QuaintValue::null_double(), column_type); // 0 let n: f64 = 0.0; let json_value = serde_json::Value::Number(serde_json::Number::from_f64(n).unwrap()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); - assert_eq!(quaint_value, QuaintValue::Double(Some(n))); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::double(n)); // max let n: f64 = f64::MAX; let json_value = serde_json::Value::Number(serde_json::Number::from_f64(n).unwrap()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); - assert_eq!(quaint_value, QuaintValue::Double(Some(n))); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::double(n)); // min let n: f64 = f64::MIN; let json_value = serde_json::Value::Number(serde_json::Number::from_f64(n).unwrap()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); - assert_eq!(quaint_value, QuaintValue::Double(Some(n))); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::double(n)); } #[test] @@ -507,21 +749,21 @@ mod proxy_test { let column_type = ColumnType::Numeric; // null - test_null(QuaintValue::Numeric(None), column_type); + test_null(QuaintValue::null_numeric(), column_type); let n_as_string = "1234.99"; let decimal = BigDecimal::new(BigInt::parse_bytes(b"123499", 10).unwrap(), 2); let json_value = serde_json::Value::String(n_as_string.into()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); - assert_eq!(quaint_value, QuaintValue::Numeric(Some(decimal))); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::numeric(decimal)); let n_as_string = "1234.999999"; let decimal = BigDecimal::new(BigInt::parse_bytes(b"1234999999", 10).unwrap(), 6); let json_value = serde_json::Value::String(n_as_string.into()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); - assert_eq!(quaint_value, QuaintValue::Numeric(Some(decimal))); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::numeric(decimal)); } #[test] @@ -529,32 +771,32 @@ mod proxy_test { let column_type = ColumnType::Boolean; // null - test_null(QuaintValue::Boolean(None), column_type); + test_null(QuaintValue::null_boolean(), column_type); // true - let bool_val = true; - let json_value = serde_json::Value::Bool(bool_val); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); - assert_eq!(quaint_value, QuaintValue::Boolean(Some(bool_val))); + for truthy_value in [json!(true), json!(1), json!("true"), json!("TRUE"), json!("1")] { + let quaint_value = js_value_to_quaint(truthy_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::boolean(true)); + } // false - let bool_val = false; - let json_value = serde_json::Value::Bool(bool_val); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); - assert_eq!(quaint_value, QuaintValue::Boolean(Some(bool_val))); + for falsy_value in [json!(false), json!(0), json!("false"), json!("FALSE"), json!("0")] { + let quaint_value = js_value_to_quaint(falsy_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::boolean(false)); + } } #[test] fn js_value_char_to_quaint() { - let column_type = ColumnType::Char; + let column_type = ColumnType::Character; // null - test_null(QuaintValue::Char(None), column_type); + test_null(QuaintValue::null_character(), column_type); let c = 'c'; let json_value = serde_json::Value::String(c.to_string()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); - assert_eq!(quaint_value, QuaintValue::Char(Some(c))); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::character(c)); } #[test] @@ -562,12 +804,12 @@ mod proxy_test { let column_type = ColumnType::Text; // null - test_null(QuaintValue::Text(None), column_type); + test_null(QuaintValue::null_text(), column_type); let s = "some text"; let json_value = serde_json::Value::String(s.to_string()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); - assert_eq!(quaint_value, QuaintValue::Text(Some(s.into()))); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::text(s)); } #[test] @@ -575,14 +817,14 @@ mod proxy_test { let column_type = ColumnType::Date; // null - test_null(QuaintValue::Date(None), column_type); + test_null(QuaintValue::null_date(), column_type); let s = "2023-01-01"; let json_value = serde_json::Value::String(s.to_string()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); let date = NaiveDate::from_ymd_opt(2023, 1, 1).unwrap(); - assert_eq!(quaint_value, QuaintValue::Date(Some(date))); + assert_eq!(quaint_value, QuaintValue::date(date)); } #[test] @@ -590,14 +832,19 @@ mod proxy_test { let column_type = ColumnType::Time; // null - test_null(QuaintValue::Time(None), column_type); + test_null(QuaintValue::null_time(), column_type); let s = "23:59:59"; let json_value = serde_json::Value::String(s.to_string()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); - + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); let time: NaiveTime = NaiveTime::from_hms_opt(23, 59, 59).unwrap(); - assert_eq!(quaint_value, QuaintValue::Time(Some(time))); + assert_eq!(quaint_value, QuaintValue::time(time)); + + let s = "13:02:20.321"; + let json_value = serde_json::Value::String(s.to_string()); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + let time: NaiveTime = NaiveTime::from_hms_milli_opt(13, 02, 20, 321).unwrap(); + assert_eq!(quaint_value, QuaintValue::time(time)); } #[test] @@ -605,40 +852,40 @@ mod proxy_test { let column_type = ColumnType::DateTime; // null - test_null(QuaintValue::DateTime(None), column_type); + test_null(QuaintValue::null_datetime(), column_type); let s = "2023-01-01 23:59:59.415"; let json_value = serde_json::Value::String(s.to_string()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); let datetime = NaiveDate::from_ymd_opt(2023, 1, 1) .unwrap() .and_hms_milli_opt(23, 59, 59, 415) .unwrap(); let datetime = DateTime::from_utc(datetime, Utc); - assert_eq!(quaint_value, QuaintValue::DateTime(Some(datetime))); + assert_eq!(quaint_value, QuaintValue::datetime(datetime)); let s = "2023-01-01 23:59:59.123456"; let json_value = serde_json::Value::String(s.to_string()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); let datetime = NaiveDate::from_ymd_opt(2023, 1, 1) .unwrap() .and_hms_micro_opt(23, 59, 59, 123_456) .unwrap(); let datetime = DateTime::from_utc(datetime, Utc); - assert_eq!(quaint_value, QuaintValue::DateTime(Some(datetime))); + assert_eq!(quaint_value, QuaintValue::datetime(datetime)); let s = "2023-01-01 23:59:59"; let json_value = serde_json::Value::String(s.to_string()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); let datetime = NaiveDate::from_ymd_opt(2023, 1, 1) .unwrap() .and_hms_milli_opt(23, 59, 59, 0) .unwrap(); let datetime = DateTime::from_utc(datetime, Utc); - assert_eq!(quaint_value, QuaintValue::DateTime(Some(datetime))); + assert_eq!(quaint_value, QuaintValue::datetime(datetime)); } #[test] @@ -646,7 +893,7 @@ mod proxy_test { let column_type = ColumnType::Json; // null - test_null(QuaintValue::Json(None), column_type); + test_null(QuaintValue::null_json(), column_type); let json = json!({ "key": "value", @@ -658,8 +905,8 @@ mod proxy_test { ] }); let json_value = json.clone(); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); - assert_eq!(quaint_value, QuaintValue::Json(Some(json.clone()))); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::json(json.clone())); } #[test] @@ -667,11 +914,60 @@ mod proxy_test { let column_type = ColumnType::Enum; // null - test_null(QuaintValue::Enum(None), column_type); + test_null(QuaintValue::null_enum(), column_type); let s = "some enum variant"; let json_value = serde_json::Value::String(s.to_string()); + + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + assert_eq!(quaint_value, QuaintValue::enum_variant(s)); + } + + #[test] + fn js_int32_array_to_quaint() { + let column_type = ColumnType::Int32Array; + test_null(QuaintValue::null_array(), column_type); + + let json_value = json!([1, 2, 3]); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + + assert_eq!( + quaint_value, + QuaintValue::array(vec![ + QuaintValue::int32(1), + QuaintValue::int32(2), + QuaintValue::int32(3) + ]) + ); + + let json_value = json!([1, 2, {}]); let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); - assert_eq!(quaint_value, QuaintValue::Enum(Some(s.into()))); + + assert_eq!( + quaint_value.err().unwrap().to_string(), + "Conversion failed: expected an i32 number in column 'column_name[2]', found {}" + ); + } + + #[test] + fn js_text_array_to_quaint() { + let column_type = ColumnType::TextArray; + test_null(QuaintValue::null_array(), column_type); + + let json_value = json!(["hi", "there"]); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + + assert_eq!( + quaint_value, + QuaintValue::array(vec![QuaintValue::text("hi"), QuaintValue::text("there"),]) + ); + + let json_value = json!([10]); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + + assert_eq!( + quaint_value.err().unwrap().to_string(), + "Conversion failed: expected a string in column 'column_name[0]', found 10" + ); } } diff --git a/query-engine/driver-adapters/src/queryable.rs b/query-engine/driver-adapters/src/queryable.rs index 2a1d6755f684..864ba5042083 100644 --- a/query-engine/driver-adapters/src/queryable.rs +++ b/query-engine/driver-adapters/src/queryable.rs @@ -6,14 +6,10 @@ use async_trait::async_trait; use napi::JsObject; use psl::datamodel_connector::Flavour; use quaint::{ - connector::{ - metrics::{self}, - IsolationLevel, Transaction, - }, + connector::{metrics, IsolationLevel, Transaction}, error::{Error, ErrorKind}, prelude::{Query as QuaintQuery, Queryable as QuaintQueryable, ResultSet, TransactionCapable}, visitor::{self, Visitor}, - Value, }; use tracing::{info_span, Instrument}; @@ -37,51 +33,61 @@ pub(crate) struct JsBaseQueryable { impl JsBaseQueryable { pub(crate) fn new(proxy: CommonProxy) -> Self { - let flavour: Flavour = proxy.flavour.to_owned().parse().unwrap(); + let flavour: Flavour = proxy.flavour.parse().unwrap(); Self { proxy, flavour } } - /// visit a query according to the flavour of the JS connector - pub fn visit_query<'a>(&self, q: QuaintQuery<'a>) -> quaint::Result<(String, Vec>)> { + /// visit a quaint query AST according to the flavour of the JS connector + fn visit_quaint_query<'a>(&self, q: QuaintQuery<'a>) -> quaint::Result<(String, Vec>)> { match self.flavour { Flavour::Mysql => visitor::Mysql::build(q), Flavour::Postgres => visitor::Postgres::build(q), + Flavour::Sqlite => visitor::Sqlite::build(q), _ => unimplemented!("Unsupported flavour for JS connector {:?}", self.flavour), } } + + async fn build_query(&self, sql: &str, values: &[quaint::Value<'_>]) -> quaint::Result { + let sql: String = sql.to_string(); + let args = match self.flavour { + Flavour::Postgres => conversion::postgres::values_to_js_args(values), + _ => conversion::values_to_js_args(values), + }?; + Ok(Query { sql, args }) + } } #[async_trait] impl QuaintQueryable for JsBaseQueryable { async fn query(&self, q: QuaintQuery<'_>) -> quaint::Result { - let (sql, params) = self.visit_query(q)?; + let (sql, params) = self.visit_quaint_query(q)?; self.query_raw(&sql, ¶ms).await } - async fn query_raw(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn query_raw(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { metrics::query("js.query_raw", sql, params, move || async move { self.do_query_raw(sql, params).await }) .await } - async fn query_raw_typed(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn query_raw_typed(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { self.query_raw(sql, params).await } async fn execute(&self, q: QuaintQuery<'_>) -> quaint::Result { - let (sql, params) = self.visit_query(q)?; + let (sql, params) = self.visit_quaint_query(q)?; self.execute_raw(&sql, ¶ms).await } - async fn execute_raw(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn execute_raw(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { metrics::query("js.execute_raw", sql, params, move || async move { self.do_execute_raw(sql, params).await }) .await } - async fn execute_raw_typed(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn execute_raw_typed(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { self.execute_raw(sql, params).await } @@ -111,6 +117,13 @@ impl QuaintQueryable for JsBaseQueryable { return Err(Error::builder(ErrorKind::invalid_isolation_level(&isolation_level)).build()); } + if self.flavour == Flavour::Sqlite { + return match isolation_level { + IsolationLevel::Serializable => Ok(()), + _ => Err(Error::builder(ErrorKind::invalid_isolation_level(&isolation_level)).build()), + }; + } + self.raw_cmd(&format!("SET TRANSACTION ISOLATION LEVEL {isolation_level}")) .await } @@ -129,29 +142,24 @@ impl JsBaseQueryable { format!(r#"-- Implicit "{}" query via underlying driver"#, stmt) } - async fn build_query(sql: &str, values: &[quaint::Value<'_>]) -> quaint::Result { - let sql: String = sql.to_string(); - let args = conversion::conv_params(values)?; - Ok(Query { sql, args }) - } - - async fn do_query_raw(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn do_query_raw(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { let len = params.len(); let serialization_span = info_span!("js:query:args", user_facing = true, "length" = %len); - let query = Self::build_query(sql, params).instrument(serialization_span).await?; + let query = self.build_query(sql, params).instrument(serialization_span).await?; let sql_span = info_span!("js:query:sql", user_facing = true, "db.statement" = %sql); let result_set = self.proxy.query_raw(query).instrument(sql_span).await?; let len = result_set.len(); let _deserialization_span = info_span!("js:query:result", user_facing = true, "length" = %len).entered(); - Ok(ResultSet::from(result_set)) + + result_set.try_into() } - async fn do_execute_raw(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn do_execute_raw(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { let len = params.len(); let serialization_span = info_span!("js:query:args", user_facing = true, "length" = %len); - let query = Self::build_query(sql, params).instrument(serialization_span).await?; + let query = self.build_query(sql, params).instrument(serialization_span).await?; let sql_span = info_span!("js:query:sql", user_facing = true, "db.statement" = %sql); let affected_rows = self.proxy.execute_raw(query).instrument(sql_span).await?; @@ -196,11 +204,11 @@ impl QuaintQueryable for JsQueryable { self.inner.query(q).await } - async fn query_raw(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn query_raw(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { self.inner.query_raw(sql, params).await } - async fn query_raw_typed(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn query_raw_typed(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { self.inner.query_raw_typed(sql, params).await } @@ -208,11 +216,11 @@ impl QuaintQueryable for JsQueryable { self.inner.execute(q).await } - async fn execute_raw(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn execute_raw(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { self.inner.execute_raw(sql, params).await } - async fn execute_raw_typed(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn execute_raw_typed(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { self.inner.execute_raw_typed(sql, params).await } diff --git a/query-engine/driver-adapters/src/result.rs b/query-engine/driver-adapters/src/result.rs index a5965509ef84..53133e037b6f 100644 --- a/query-engine/driver-adapters/src/result.rs +++ b/query-engine/driver-adapters/src/result.rs @@ -1,15 +1,45 @@ use napi::{bindgen_prelude::FromNapiValue, Env, JsUnknown, NapiValue}; -use quaint::error::Error as QuaintError; +use quaint::error::{Error as QuaintError, MysqlError, PostgresError, SqliteError}; use serde::Deserialize; -#[derive(Deserialize, Debug)] +#[derive(Deserialize)] +#[serde(remote = "PostgresError")] +pub struct PostgresErrorDef { + code: String, + message: String, + severity: String, + detail: Option, + column: Option, + hint: Option, +} + +#[derive(Deserialize)] +#[serde(remote = "MysqlError")] +pub struct MysqlErrorDef { + pub code: u16, + pub message: String, + pub state: String, +} + +#[derive(Deserialize)] +#[serde(remote = "SqliteError", rename_all = "camelCase")] +pub struct SqliteErrorDef { + pub extended_code: i32, + pub message: Option, +} + +#[derive(Deserialize)] #[serde(tag = "kind")] /// Wrapper for JS-side errors -/// See driver-adapters/js/adapter-utils/src/types.ts file for example pub(crate) enum DriverAdapterError { /// Unexpected JS exception - GenericJsError { id: i32 }, - // in the future, expected errors that map to known user errors with PXXX codes will also go here + GenericJs { + id: i32, + }, + + Postgres(#[serde(with = "PostgresErrorDef")] PostgresError), + Mysql(#[serde(with = "MysqlErrorDef")] MysqlError), + Sqlite(#[serde(with = "SqliteErrorDef")] SqliteError), } impl FromNapiValue for DriverAdapterError { @@ -23,14 +53,16 @@ impl FromNapiValue for DriverAdapterError { impl From for QuaintError { fn from(value: DriverAdapterError) -> Self { match value { - DriverAdapterError::GenericJsError { id } => QuaintError::external_error(id), + DriverAdapterError::GenericJs { id } => QuaintError::external_error(id), + DriverAdapterError::Postgres(e) => e.into(), + DriverAdapterError::Mysql(e) => e.into(), + DriverAdapterError::Sqlite(e) => e.into(), // in future, more error types would be added and we'll need to convert them to proper QuaintErrors here } } } /// Wrapper for JS-side result type -/// See driver-adapters/js/adapter-utils/src/types.ts file for example pub(crate) enum JsResult where T: FromNapiValue, diff --git a/query-engine/metrics/src/common.rs b/query-engine/metrics/src/common.rs index 76549a4bb2dc..92c76ffba962 100644 --- a/query-engine/metrics/src/common.rs +++ b/query-engine/metrics/src/common.rs @@ -52,7 +52,27 @@ pub(crate) struct Metric { } impl Metric { - pub fn new(key: Key, description: String, value: MetricValue, global_labels: HashMap) -> Self { + pub(crate) fn renamed( + key: Key, + descriptions: &HashMap, + value: MetricValue, + global_labels: &HashMap, + ) -> Self { + match crate::METRIC_RENAMES.get(key.name()) { + Some((new_key, new_description)) => Self::new( + Key::from_parts(new_key.to_string(), key.labels()), + new_description.to_string(), + value, + global_labels.clone(), + ), + None => { + let description = descriptions.get(key.name()).map(|s| s.to_string()).unwrap_or_default(); + Self::new(key, description, value, global_labels.clone()) + } + } + } + + fn new(key: Key, description: String, value: MetricValue, global_labels: HashMap) -> Self { let (name, labels) = key.into_parts(); let mut labels_map: HashMap = labels @@ -62,13 +82,8 @@ impl Metric { labels_map.extend(global_labels); - let mut key = name.as_str(); - if let Some(rename) = crate::METRIC_RENAMES.get(key) { - key = rename; - } - Self { - key: key.to_string(), + key: name.as_str().to_string(), value, description, labels: labels_map, diff --git a/query-engine/metrics/src/lib.rs b/query-engine/metrics/src/lib.rs index 4e9fcc69f5c5..1965b56cb076 100644 --- a/query-engine/metrics/src/lib.rs +++ b/query-engine/metrics/src/lib.rs @@ -34,7 +34,8 @@ use once_cell::sync::Lazy; use recorder::*; pub use registry::MetricRegistry; use serde::Deserialize; -use std::{collections::HashMap, sync::Once}; +use std::collections::HashMap; +use std::sync::Once; pub extern crate metrics; pub use metrics::{ @@ -42,36 +43,31 @@ pub use metrics::{ increment_counter, increment_gauge, }; -// Dependency metrics names emitted by the connector pool implementation (mobc) that will be renamed -// using the `METRIC_RENAMES` map. -pub const MOBC_POOL_CONNECTIONS_OPENED_TOTAL: &str = "mobc_pool_connections_opened_total"; -pub const MOBC_POOL_CONNECTIONS_CLOSED_TOTAL: &str = "mobc_pool_connections_closed_total"; -pub const MOBC_POOL_CONNECTIONS_OPEN: &str = "mobc_pool_connections_open"; -pub const MOBC_POOL_CONNECTIONS_BUSY: &str = "mobc_pool_connections_busy"; -pub const MOBC_POOL_CONNECTIONS_IDLE: &str = "mobc_pool_connections_idle"; -pub const MOBC_POOL_WAIT_COUNT: &str = "mobc_client_queries_wait"; -pub const MOBC_POOL_WAIT_DURATION: &str = "mobc_client_queries_wait_histogram_ms"; - -// External metrics names that we expose. -// counters -pub const PRISMA_CLIENT_QUERIES_TOTAL: &str = "prisma_client_queries_total"; -pub const PRISMA_DATASOURCE_QUERIES_TOTAL: &str = "prisma_datasource_queries_total"; -pub const PRISMA_POOL_CONNECTIONS_OPENED_TOTAL: &str = "prisma_pool_connections_opened_total"; -pub const PRISMA_POOL_CONNECTIONS_CLOSED_TOTAL: &str = "prisma_pool_connections_closed_total"; -// gauges -pub const PRISMA_POOL_CONNECTIONS_OPEN: &str = "prisma_pool_connections_open"; -pub const PRISMA_POOL_CONNECTIONS_BUSY: &str = "prisma_pool_connections_busy"; -pub const PRISMA_POOL_CONNECTIONS_IDLE: &str = "prisma_pool_connections_idle"; -pub const PRISMA_CLIENT_QUERIES_WAIT: &str = "prisma_client_queries_wait"; -pub const PRISMA_CLIENT_QUERIES_ACTIVE: &str = "prisma_client_queries_active"; -// histograms -pub const PRISMA_CLIENT_QUERIES_DURATION_HISTOGRAM_MS: &str = "prisma_client_queries_duration_histogram_ms"; -pub const PRISMA_CLIENT_QUERIES_WAIT_HISTOGRAM_MS: &str = "prisma_client_queries_wait_histogram_ms"; -pub const PRISMA_DATASOURCE_QUERIES_DURATION_HISTOGRAM_MS: &str = "prisma_datasource_queries_duration_histogram_ms"; - -// We need a list of acceptable metrics, we don't want to accidentally process metrics emitted by a -// third party library -const ACCEPT_LIST: &[&str] = &[ +// Metrics that we emit from the engines, third party metrics emitted by libraries and that we rename are omitted. +pub const PRISMA_CLIENT_QUERIES_TOTAL: &str = "prisma_client_queries_total"; // counter +pub const PRISMA_DATASOURCE_QUERIES_TOTAL: &str = "prisma_datasource_queries_total"; // counter +pub const PRISMA_CLIENT_QUERIES_ACTIVE: &str = "prisma_client_queries_active"; // gauge +pub const PRISMA_CLIENT_QUERIES_DURATION_HISTOGRAM_MS: &str = "prisma_client_queries_duration_histogram_ms"; // histogram +pub const PRISMA_DATASOURCE_QUERIES_DURATION_HISTOGRAM_MS: &str = "prisma_datasource_queries_duration_histogram_ms"; // histogram + +// metrics emitted by the connector pool implementation (mobc) that will be renamed using the `METRIC_RENAMES` map. +const MOBC_POOL_CONNECTIONS_OPENED_TOTAL: &str = "mobc_pool_connections_opened_total"; // counter +const MOBC_POOL_CONNECTIONS_CLOSED_TOTAL: &str = "mobc_pool_connections_closed_total"; // counter +const MOBC_POOL_CONNECTIONS_OPEN: &str = "mobc_pool_connections_open"; // gauge +const MOBC_POOL_CONNECTIONS_BUSY: &str = "mobc_pool_connections_busy"; // gauge +const MOBC_POOL_CONNECTIONS_IDLE: &str = "mobc_pool_connections_idle"; // gauge +const MOBC_POOL_WAIT_COUNT: &str = "mobc_client_queries_wait"; // gauge +const MOBC_POOL_WAIT_DURATION: &str = "mobc_client_queries_wait_histogram_ms"; // histogram + +/// Accept list: both first-party (emitted by the query engine) and third-party (emitted) metrics +pub const ACCEPT_LIST: &[&str] = &[ + // first-party + PRISMA_CLIENT_QUERIES_TOTAL, + PRISMA_DATASOURCE_QUERIES_TOTAL, + PRISMA_CLIENT_QUERIES_ACTIVE, + PRISMA_CLIENT_QUERIES_DURATION_HISTOGRAM_MS, + PRISMA_DATASOURCE_QUERIES_DURATION_HISTOGRAM_MS, + // third-party, emitted by mobc MOBC_POOL_CONNECTIONS_OPENED_TOTAL, MOBC_POOL_CONNECTIONS_CLOSED_TOTAL, MOBC_POOL_CONNECTIONS_OPEN, @@ -79,120 +75,97 @@ const ACCEPT_LIST: &[&str] = &[ MOBC_POOL_CONNECTIONS_IDLE, MOBC_POOL_WAIT_COUNT, MOBC_POOL_WAIT_DURATION, - PRISMA_CLIENT_QUERIES_DURATION_HISTOGRAM_MS, - PRISMA_CLIENT_QUERIES_TOTAL, - PRISMA_DATASOURCE_QUERIES_DURATION_HISTOGRAM_MS, - PRISMA_DATASOURCE_QUERIES_TOTAL, - PRISMA_CLIENT_QUERIES_ACTIVE, ]; -// Some of the metrics we receive have their internal names, and we need to expose them under a different -// name, this map translates from the internal names used by mobc to the external names we want to expose -static METRIC_RENAMES: Lazy> = Lazy::new(|| { +/// Map that for any given accepted metric that is emitted by a third-party, in this case only the +/// connection pool library mobc, it points to an internal, accepted metrics name and its description +/// as displayed to users. This is used to rebrand the third-party metrics to accepted, prisma-specific +/// ones. +#[rustfmt::skip] +static METRIC_RENAMES: Lazy> = Lazy::new(|| { HashMap::from([ - (MOBC_POOL_CONNECTIONS_OPENED_TOTAL, PRISMA_POOL_CONNECTIONS_OPENED_TOTAL), - (MOBC_POOL_CONNECTIONS_CLOSED_TOTAL, PRISMA_POOL_CONNECTIONS_CLOSED_TOTAL), - (MOBC_POOL_CONNECTIONS_OPEN, PRISMA_POOL_CONNECTIONS_OPEN), - (MOBC_POOL_CONNECTIONS_BUSY, PRISMA_POOL_CONNECTIONS_BUSY), - (MOBC_POOL_CONNECTIONS_IDLE, PRISMA_POOL_CONNECTIONS_IDLE), - (MOBC_POOL_WAIT_COUNT, PRISMA_CLIENT_QUERIES_WAIT), - (MOBC_POOL_WAIT_DURATION, PRISMA_CLIENT_QUERIES_WAIT_HISTOGRAM_MS), + (MOBC_POOL_CONNECTIONS_OPENED_TOTAL, ("prisma_pool_connections_opened_total", "The total number of pool connections opened")), + (MOBC_POOL_CONNECTIONS_CLOSED_TOTAL, ("prisma_pool_connections_closed_total", "The total number of pool connections closed")), + (MOBC_POOL_CONNECTIONS_OPEN, ("prisma_pool_connections_open", "The number of pool connections currently open")), + (MOBC_POOL_CONNECTIONS_BUSY, ("prisma_pool_connections_busy", "The number of pool connections currently executing datasource queries")), + (MOBC_POOL_CONNECTIONS_IDLE, ("prisma_pool_connections_idle", "The number of pool connections that are not busy running a query")), + (MOBC_POOL_WAIT_COUNT, ("prisma_client_queries_wait", "The number of datasource queries currently waiting for a free connection")), + (MOBC_POOL_WAIT_DURATION, ("prisma_client_queries_wait_histogram_ms", "The distribution of the time all datasource queries spent waiting for a free connection")), ]) }); -// At the moment the histogram is only used for timings. So the bounds are hard coded here -// The buckets are for ms -pub(crate) const HISTOGRAM_BOUNDS: [f64; 10] = [0.0, 1.0, 5.0, 10.0, 50.0, 100.0, 500.0, 1000.0, 5000.0, 50000.0]; +pub fn setup() { + set_recorder(); + initialize_metrics(); +} -#[derive(PartialEq, Eq, Debug, Deserialize)] -pub enum MetricFormat { - #[serde(alias = "json")] - Json, - #[serde(alias = "prometheus")] - Prometheus, +static METRIC_RECORDER: Once = Once::new(); + +fn set_recorder() { + METRIC_RECORDER.call_once(|| { + metrics::set_boxed_recorder(Box::new(MetricRecorder)).unwrap(); + }); } -pub fn setup() { - set_recorder(); - describe_metrics(); +/// Initialize metrics descriptions and values +pub fn initialize_metrics() { + initialize_metrics_descriptions(); + initialize_metrics_values(); } -// Describe all metric here so that every time for create -// a new metric registry for a Query Instance the descriptions -// will be in place -pub fn describe_metrics() { - // counters +/// Describe all first-party metrics that we record in prisma-engines. Metrics recorded by third-parties +/// --like mobc-- are described by such third parties, but ignored, and replaced by the descriptions in the +/// METRICS_RENAMES map. +fn initialize_metrics_descriptions() { describe_counter!( PRISMA_CLIENT_QUERIES_TOTAL, - "Total number of Prisma Client queries executed" + "The total number of Prisma Client queries executed" ); describe_counter!( PRISMA_DATASOURCE_QUERIES_TOTAL, - "Total number of Datasource Queries executed" - ); - describe_counter!( - PRISMA_POOL_CONNECTIONS_OPENED_TOTAL, - "Total number of Pool Connections opened" - ); - describe_counter!( - PRISMA_POOL_CONNECTIONS_CLOSED_TOTAL, - "Total number of Pool Connections closed" - ); - - absolute_counter!(PRISMA_CLIENT_QUERIES_TOTAL, 0); - absolute_counter!(PRISMA_DATASOURCE_QUERIES_TOTAL, 0); - absolute_counter!(PRISMA_POOL_CONNECTIONS_OPENED_TOTAL, 0); - absolute_counter!(PRISMA_POOL_CONNECTIONS_CLOSED_TOTAL, 0); - - // gauges - describe_gauge!( - PRISMA_POOL_CONNECTIONS_OPEN, - "Number of currently open Pool Connections (able to execute a datasource query)" - ); - describe_gauge!( - PRISMA_POOL_CONNECTIONS_BUSY, - "Number of currently busy Pool Connections (executing a datasource query)" - ); - describe_gauge!( - PRISMA_POOL_CONNECTIONS_IDLE, - "Number of currently unused Pool Connections (waiting for the next datasource query to run)" - ); - describe_gauge!( - PRISMA_CLIENT_QUERIES_WAIT, - "Number of Prisma Client queries currently waiting for a connection" + "The total number of datasource queries executed" ); describe_gauge!( PRISMA_CLIENT_QUERIES_ACTIVE, - "Number of currently active Prisma Client queries" + "The number of currently active Prisma Client queries" ); - - gauge!(PRISMA_POOL_CONNECTIONS_OPEN, 0.0); - gauge!(PRISMA_POOL_CONNECTIONS_BUSY, 0.0); - gauge!(PRISMA_POOL_CONNECTIONS_IDLE, 0.0); - gauge!(PRISMA_CLIENT_QUERIES_WAIT, 0.0); - gauge!(PRISMA_CLIENT_QUERIES_ACTIVE, 0.0); - - // histograms describe_histogram!( - PRISMA_CLIENT_QUERIES_WAIT_HISTOGRAM_MS, - "Histogram of the wait time of all Prisma Client Queries in ms" + PRISMA_CLIENT_QUERIES_DURATION_HISTOGRAM_MS, + "The distribution of the time Prisma Client queries took to run end to end" ); describe_histogram!( PRISMA_DATASOURCE_QUERIES_DURATION_HISTOGRAM_MS, - "Histogram of the duration of all executed Datasource Queries in ms" - ); - describe_histogram!( - PRISMA_CLIENT_QUERIES_DURATION_HISTOGRAM_MS, - "Histogram of the duration of all executed Prisma Client queries in ms" + "The distribution of the time datasource queries took to run" ); } -static METRIC_RECORDER: Once = Once::new(); +/// Initialize all metrics values (first and third-party) +/// +/// FIXME: https://github.com/prisma/prisma/issues/21070 +/// Histograms are excluded, as their initialization will alter the histogram values. +/// (i.e. histograms don't have a neutral value, like counters or gauges) +fn initialize_metrics_values() { + absolute_counter!(PRISMA_CLIENT_QUERIES_TOTAL, 0); + absolute_counter!(PRISMA_DATASOURCE_QUERIES_TOTAL, 0); + gauge!(PRISMA_CLIENT_QUERIES_ACTIVE, 0.0); + absolute_counter!(MOBC_POOL_CONNECTIONS_OPENED_TOTAL, 0); + absolute_counter!(MOBC_POOL_CONNECTIONS_CLOSED_TOTAL, 0); + gauge!(MOBC_POOL_CONNECTIONS_OPEN, 0.0); + gauge!(MOBC_POOL_CONNECTIONS_BUSY, 0.0); + gauge!(MOBC_POOL_CONNECTIONS_IDLE, 0.0); + gauge!(MOBC_POOL_WAIT_COUNT, 0.0); +} -fn set_recorder() { - METRIC_RECORDER.call_once(|| { - metrics::set_boxed_recorder(Box::new(MetricRecorder)).unwrap(); - }); +// At the moment the histogram is only used for timings. So the bounds are hard coded here +// The buckets are for ms +pub(crate) const HISTOGRAM_BOUNDS: [f64; 10] = [0.0, 1.0, 5.0, 10.0, 50.0, 100.0, 500.0, 1000.0, 5000.0, 50000.0]; + +#[derive(PartialEq, Eq, Debug, Deserialize)] +pub enum MetricFormat { + #[serde(alias = "json")] + Json, + #[serde(alias = "prometheus")] + Prometheus, } #[cfg(test)] diff --git a/query-engine/metrics/src/registry.rs b/query-engine/metrics/src/registry.rs index 3f4a892b7088..6530edbe8764 100644 --- a/query-engine/metrics/src/registry.rs +++ b/query-engine/metrics/src/registry.rs @@ -160,20 +160,16 @@ impl MetricRegistry { let mut counters: Vec = counter_handles .into_iter() .map(|(key, counter)| { - let key_name = key.name(); let value = counter.get_inner().load(Ordering::Acquire); - let description = descriptions.get(key_name).cloned().unwrap_or_default(); - Metric::new(key, description, MetricValue::Counter(value), global_labels.clone()) + Metric::renamed(key, &descriptions, MetricValue::Counter(value), &global_labels) }) .collect(); let mut gauges: Vec = gauge_handles .into_iter() .map(|(key, gauge)| { - let key_name = key.name(); - let description = descriptions.get(key_name).cloned().unwrap_or_default(); let value = f64::from_bits(gauge.get_inner().load(Ordering::Acquire)); - Metric::new(key, description, MetricValue::Gauge(value), global_labels.clone()) + Metric::renamed(key, &descriptions, MetricValue::Gauge(value), &global_labels) }) .collect(); @@ -185,13 +181,11 @@ impl MetricRegistry { histogram.record_many(s); }); - let key_name = key.name(); - let description = descriptions.get(key_name).cloned().unwrap_or_default(); - Metric::new( + Metric::renamed( key, - description, + &descriptions, MetricValue::Histogram(histogram.into()), - global_labels.clone(), + &global_labels, ) }) .collect(); diff --git a/query-engine/prisma-models/Cargo.toml b/query-engine/prisma-models/Cargo.toml index c7e012afebfb..0becd1fdea70 100644 --- a/query-engine/prisma-models/Cargo.toml +++ b/query-engine/prisma-models/Cargo.toml @@ -10,13 +10,16 @@ prisma-value = { path = "../../libs/prisma-value" } bigdecimal = "0.3" thiserror = "1.0" +getrandom = { version = "0.2" } uuid = { workspace = true, optional = true } -cuid = { version = "1.2", optional = true } +cuid = { git = "https://github.com/prisma/cuid-rust", branch = "wasm32-support", optional = true } nanoid = { version = "0.4.0", optional = true } chrono = { version = "0.4.6", features = ["serde"] } +[target.'cfg(target_arch = "wasm32")'.dependencies.getrandom] +version = "0.2" +features = ["js"] + [features] -# Support for generating default UUID, CUID, nanoid and datetime values. This -# implies random number generation works, so it won't compile on targets like -# wasm32. +# Support for generating default UUID, CUID, nanoid and datetime values. default_generators = ["uuid/v4", "cuid", "nanoid"] diff --git a/query-engine/prisma-models/src/field/mod.rs b/query-engine/prisma-models/src/field/mod.rs index d05b6d7d6361..45d529c56abf 100644 --- a/query-engine/prisma-models/src/field/mod.rs +++ b/query-engine/prisma-models/src/field/mod.rs @@ -178,6 +178,11 @@ impl TypeIdentifier { TypeIdentifier::Unsupported => "Unsupported".into(), } } + + /// Returns `true` if the type identifier is [`Enum`]. + pub fn is_enum(&self) -> bool { + matches!(self, Self::Enum(..)) + } } #[derive(Clone, Debug, PartialEq, Eq, Hash)] diff --git a/query-engine/prisma-models/src/field/scalar.rs b/query-engine/prisma-models/src/field/scalar.rs index cc3b3533322b..92039da53663 100644 --- a/query-engine/prisma-models/src/field/scalar.rs +++ b/query-engine/prisma-models/src/field/scalar.rs @@ -90,7 +90,9 @@ impl ScalarField { }; match scalar_field_type { - ScalarFieldType::CompositeType(_) => unreachable!(), + ScalarFieldType::CompositeType(_) => { + unreachable!("Cannot convert a composite type to a type identifier. This error is typically caused by mistakenly using a composite type within a composite index.",) + } ScalarFieldType::Enum(x) => TypeIdentifier::Enum(x), ScalarFieldType::BuiltInScalar(scalar) => scalar.into(), ScalarFieldType::Unsupported(_) => TypeIdentifier::Unsupported, diff --git a/query-engine/prisma-models/src/internal_enum.rs b/query-engine/prisma-models/src/internal_enum.rs index 1ae3459f2356..6467adcebf6d 100644 --- a/query-engine/prisma-models/src/internal_enum.rs +++ b/query-engine/prisma-models/src/internal_enum.rs @@ -9,6 +9,14 @@ impl InternalEnum { pub fn name(&self) -> &str { self.dm.walk(self.id).name() } + + pub fn db_name(&self) -> &str { + self.dm.walk(self.id).database_name() + } + + pub fn schema_name(&self) -> Option<&str> { + self.dm.walk(self.id).schema().map(|tuple| tuple.0) + } } impl std::fmt::Debug for InternalEnum { diff --git a/query-engine/prisma-models/tests/datamodel_converter_tests.rs b/query-engine/prisma-models/tests/datamodel_converter_tests.rs index 2f5bf75b103f..0a45c80ed163 100644 --- a/query-engine/prisma-models/tests/datamodel_converter_tests.rs +++ b/query-engine/prisma-models/tests/datamodel_converter_tests.rs @@ -38,6 +38,32 @@ fn converting_enums() { } } +#[test] +fn converting_composite_types() { + let res = psl::parse_schema( + r#" + datasource db { + provider = "mongodb" + url = "mongodb://localhost:27017/hello" + } + + model MyModel { + id String @id @default(auto()) @map("_id") @db.ObjectId + attribute Attribute + + @@unique([attribute], name: "composite_index") + } + + type Attribute { + name String + value String + group String + } + "#, + ); + assert!(res.unwrap_err().contains("Indexes can only contain scalar attributes. Please remove \"attribute\" from the argument list of the indexes.")); +} + #[test] fn models_with_only_scalar_fields() { let datamodel = convert( diff --git a/query-engine/query-engine-node-api/src/engine.rs b/query-engine/query-engine-node-api/src/engine.rs index 37baeaee2c60..23782af1776a 100644 --- a/query-engine/query-engine-node-api/src/engine.rs +++ b/query-engine/query-engine-node-api/src/engine.rs @@ -147,7 +147,7 @@ impl QueryEngine { napi_env: Env, options: JsUnknown, callback: JsFunction, - maybe_driver: Option, + maybe_adapter: Option, ) -> napi::Result { let mut log_callback = callback.create_threadsafe_function(0usize, |ctx: ThreadSafeCallContext| { Ok(vec![ctx.env.create_string(&ctx.value)?]) @@ -163,7 +163,17 @@ impl QueryEngine { config_dir, ignore_env_var_errors, engine_protocol, - } = napi_env.from_js_value(options)?; + } = napi_env.from_js_value(options).expect( + r###" + Failed to deserialize constructor options. + + This usually happens when the javascript object passed to the constructor is missing + properties for the ConstructorOptions fields that must have some value. + + If you set some of these in javascript trough environment variables, make sure there are + values for data_model, log_level, and any field that is not Option + "###, + ); let env = stringify_env_values(env)?; // we cannot trust anything JS sends us from process.env let overrides: Vec<(_, _)> = datasource_overrides.into_iter().collect(); @@ -181,17 +191,14 @@ impl QueryEngine { ); } else { #[cfg(feature = "driver-adapters")] - if let Some(driver) = maybe_driver { - let js_queryable = driver_adapters::from_napi(driver); - let provider_name = schema.connector.provider_name(); + if let Some(adapter) = maybe_adapter { + let js_queryable = driver_adapters::from_napi(adapter); - match sql_connector::register_driver_adapter(provider_name, Arc::new(js_queryable)) { - Ok(_) => { - connector_mode = ConnectorMode::Js; - tracing::info!("Registered driver adapter for {provider_name}.") - } - Err(err) => tracing::error!("Failed to register driver adapter for {provider_name}. {err}"), - } + sql_connector::activate_driver_adapter(Arc::new(js_queryable)); + connector_mode = ConnectorMode::Js; + + let provider_name = schema.connector.provider_name(); + tracing::info!("Registered driver adapter for {provider_name}."); } } @@ -235,7 +242,7 @@ impl QueryEngine { if enable_metrics { napi_env.execute_tokio_future( async { - query_engine_metrics::describe_metrics(); + query_engine_metrics::initialize_metrics(); Ok(()) } .with_subscriber(logger.dispatcher()), @@ -370,34 +377,33 @@ impl QueryEngine { /// If connected, sends a query to the core and returns the response. #[napi] pub async fn query(&self, body: String, trace: String, tx_id: Option) -> napi::Result { + let dispatcher = self.logger.dispatcher(); + async_panic_to_js_error(async { let inner = self.inner.read().await; let engine = inner.as_engine()?; let query = RequestBody::try_from_str(&body, engine.engine_protocol())?; - let dispatcher = self.logger.dispatcher(); - - async move { - let span = if tx_id.is_none() { - tracing::info_span!("prisma:engine", user_facing = true) - } else { - Span::none() - }; + let span = if tx_id.is_none() { + tracing::info_span!("prisma:engine", user_facing = true) + } else { + Span::none() + }; - let trace_id = telemetry::helpers::set_parent_context_from_json_str(&span, &trace); + let trace_id = telemetry::helpers::set_parent_context_from_json_str(&span, &trace); + async move { let handler = RequestHandler::new(engine.executor(), engine.query_schema(), engine.engine_protocol()); - let response = handler - .handle(query, tx_id.map(TxId::from), trace_id) - .instrument(span) - .await; + let response = handler.handle(query, tx_id.map(TxId::from), trace_id).await; - Ok(serde_json::to_string(&response)?) + let serde_span = tracing::info_span!("prisma:engine:response_json_serialization", user_facing = true); + Ok(serde_span.in_scope(|| serde_json::to_string(&response))?) } - .with_subscriber(dispatcher) + .instrument(span) .await }) + .with_subscriber(dispatcher) .await } diff --git a/query-engine/query-engine-node-api/src/logger.rs b/query-engine/query-engine-node-api/src/logger.rs index d327726d6567..da3e725c0218 100644 --- a/query-engine/query-engine-node-api/src/logger.rs +++ b/query-engine/query-engine-node-api/src/logger.rs @@ -58,7 +58,7 @@ impl Logger { None }; - let layer = CallbackLayer::new(log_callback.clone()).with_filter(filters); + let layer = CallbackLayer::new(log_callback).with_filter(filters); let metrics = if enable_metrics { query_engine_metrics::setup(); diff --git a/query-engine/query-engine-wasm/.gitignore b/query-engine/query-engine-wasm/.gitignore new file mode 100644 index 000000000000..a6f0e4dca125 --- /dev/null +++ b/query-engine/query-engine-wasm/.gitignore @@ -0,0 +1,7 @@ +/target +**/*.rs.bk +Cargo.lock +bin/ +pkg/ +wasm-pack.log +node_modules/ \ No newline at end of file diff --git a/query-engine/query-engine-wasm/.nvmrc b/query-engine/query-engine-wasm/.nvmrc new file mode 100644 index 000000000000..6569dfa4f323 --- /dev/null +++ b/query-engine/query-engine-wasm/.nvmrc @@ -0,0 +1 @@ +20.8.1 diff --git a/query-engine/query-engine-wasm/Cargo.toml b/query-engine/query-engine-wasm/Cargo.toml new file mode 100644 index 000000000000..a8bc393aee3f --- /dev/null +++ b/query-engine/query-engine-wasm/Cargo.toml @@ -0,0 +1,36 @@ +[package] +name = "query-engine-wasm" +version = "0.1.0" +edition = "2021" + +[lib] +doc = false +crate-type = ["cdylib"] +name = "query_engine" + +[dependencies] +anyhow = "1" +async-trait = "0.1" +user-facing-errors = { path = "../../libs/user-facing-errors" } +psl.workspace = true +prisma-models = { path = "../prisma-models" } + +thiserror = "1" +connection-string.workspace = true +url = "2" +serde_json.workspace = true +serde.workspace = true +tokio = { version = "1.25", features = ["macros", "sync", "io-util", "time"] } +futures = "0.3" +wasm-bindgen = "=0.2.87" +wasm-bindgen-futures = "0.4" +serde-wasm-bindgen = "0.5" +js-sys = "0.3" +log = "0.4.6" +wasm-logger = "0.2.0" + +tracing = "0.1" +tracing-subscriber = { version = "0.3" } +tracing-futures = "0.2" +tsify = "0.4.5" +console_error_panic_hook = "0.1.7" diff --git a/query-engine/query-engine-wasm/README.md b/query-engine/query-engine-wasm/README.md new file mode 100644 index 000000000000..f5adc7eb2894 --- /dev/null +++ b/query-engine/query-engine-wasm/README.md @@ -0,0 +1,40 @@ +# @prisma/query-engine-wasm + +**INTERNAL PACKAGE, DO NOT USE** + +This is a Wasm-compatible version of the Query Engine library (libquery). +Currently, it just contains a skeleton of the public API, as some internal crates are still not Wasm-compatible. + +The published npm package is internal to Prisma. Its API will break without prior warning. + +## Setup + +``` +# Install the latest Rust version with `rustup` +# or update the latest Rust version with `rustup` +rustup update +rustup target add wasm32-unknown-unknown +cargo install wasm-bindgen +cargo install wasm-pack +``` + +## How to Build + +From the current folder: + +- `./build.sh $OUT_NPM_VERSION` + +where e.g. `OUT_NPM_VERSION="0.0.1"` is the version you want to publish this package on npm with. + +## How to Publish + +From the current folder: + +- `wasm-pack publish --access public` + +## How to Test + +To try importing the , you can run: + +- `nvm use` +- `node --experimental-wasm-modules ./example.js` diff --git a/query-engine/query-engine-wasm/build.rs b/query-engine/query-engine-wasm/build.rs new file mode 100644 index 000000000000..2e8fe20c0503 --- /dev/null +++ b/query-engine/query-engine-wasm/build.rs @@ -0,0 +1,11 @@ +use std::process::Command; + +fn store_git_commit_hash() { + let output = Command::new("git").args(["rev-parse", "HEAD"]).output().unwrap(); + let git_hash = String::from_utf8(output.stdout).unwrap(); + println!("cargo:rustc-env=GIT_HASH={git_hash}"); +} + +fn main() { + store_git_commit_hash(); +} diff --git a/query-engine/query-engine-wasm/build.sh b/query-engine/query-engine-wasm/build.sh new file mode 100755 index 000000000000..12d8328305ff --- /dev/null +++ b/query-engine/query-engine-wasm/build.sh @@ -0,0 +1,51 @@ +#!/bin/bash + +# Call this script as `./build.sh ` + +OUT_VERSION="$1" +OUT_FOLDER="pkg" +OUT_JSON="${OUT_FOLDER}/package.json" +OUT_TARGET="bundler" # Note(jkomyno): I wasn't able to make it work with `web` target +OUT_NPM_NAME="@prisma/query-engine-wasm" + +wasm-pack build --release --target $OUT_TARGET + +sleep 1 + +# Mark the package as a ES module, set the entry point to the query_engine.js file, mark the package as public +printf '%s\n' "$(jq '. + {"type": "module"} + {"main": "./query_engine.js"} + {"private": false}' $OUT_JSON)" > $OUT_JSON + +# Add the version +printf '%s\n' "$(jq --arg version "$OUT_VERSION" '. + {"version": $version}' $OUT_JSON)" > $OUT_JSON + +# Add the package name +printf '%s\n' "$(jq --arg name "$OUT_NPM_NAME" '. + {"name": $name}' $OUT_JSON)" > $OUT_JSON + +enable_cf_in_bindings() { + # Enable Cloudflare Workers in the generated JS bindings. + # The generated bindings are compatible with: + # - Node.js + # - Cloudflare Workers / Miniflare + + local FILE="$1" # e.g., `query_engine.js` + local BG_FILE="${FILE%.js}_bg.js" + local OUTPUT_FILE="${OUT_FOLDER}/${FILE}" + + cat < "$OUTPUT_FILE" +import * as imports from "./${BG_FILE}"; + +// switch between both syntax for Node.js and for workers (Cloudflare Workers) +import * as wkmod from "./${BG_FILE%.js}.wasm"; +import * as nodemod from "./${BG_FILE%.js}.wasm"; +if ((typeof process !== 'undefined') && (process.release.name === 'node')) { + imports.__wbg_set_wasm(nodemod); +} else { + const instance = new WebAssembly.Instance(wkmod.default, { "./${BG_FILE}": imports }); + imports.__wbg_set_wasm(instance.exports); +} + +export * from "./${BG_FILE}"; +EOF +} + +enable_cf_in_bindings "query_engine.js" diff --git a/query-engine/query-engine-wasm/example.js b/query-engine/query-engine-wasm/example.js new file mode 100644 index 000000000000..bca6d5ba95d7 --- /dev/null +++ b/query-engine/query-engine-wasm/example.js @@ -0,0 +1,54 @@ +/** + * Run with: `node --experimental-wasm-modules ./example.js` + * on Node.js 18+. + */ + +import { Pool } from '@neondatabase/serverless' +import { PrismaNeon } from '@prisma/adapter-neon' +import { bindAdapter } from '@prisma/driver-adapter-utils' +import { init, QueryEngine, getBuildTimeInfo } from './pkg/query_engine.js' + +async function main() { + // Always initialize the Wasm library before using it. + // This sets up the logging and panic hooks. + init() + + const connectionString = undefined + + const pool = new Pool({ connectionString }) + const adapter = new PrismaNeon(pool) + const driverAdapter = bindAdapter(adapter) + + console.log('buildTimeInfo', getBuildTimeInfo()) + + const options = { + datamodel: /* prisma */` + datasource db { + provider = "postgres" + url = env("DATABASE_URL") + } + + generator client { + provider = "prisma-client-js" + } + + model User { + id Int @id @default(autoincrement()) + } + `, + logLevel: 'info', + logQueries: true, + datasourceOverrides: {}, + env: process.env, + configDir: '/tmp', + ignoreEnvVarErrors: true, + } + const callback = () => { console.log('log-callback') } + + const queryEngine = new QueryEngine(options, callback, driverAdapter) + + await queryEngine.connect('trace') + await queryEngine.disconnect('trace') +} + +main() diff --git a/query-engine/query-engine-wasm/package-lock.json b/query-engine/query-engine-wasm/package-lock.json new file mode 100644 index 000000000000..bc854644f6dd --- /dev/null +++ b/query-engine/query-engine-wasm/package-lock.json @@ -0,0 +1,148 @@ +{ + "name": "query-engine-wasm", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "dependencies": { + "@neondatabase/serverless": "^0.6.0", + "@prisma/adapter-neon": "^5.4.1", + "@prisma/driver-adapter-utils": "^5.4.1" + } + }, + "node_modules/@neondatabase/serverless": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/@neondatabase/serverless/-/serverless-0.6.0.tgz", + "integrity": "sha512-qXxBRYN0m2v8kVQBfMxbzNGn2xFAhTXFibzQlE++NfJ56Shz3m7+MyBBtXDlEH+3Wfa6lToDXf1MElocY4sJ3w==", + "dependencies": { + "@types/pg": "8.6.6" + } + }, + "node_modules/@prisma/adapter-neon": { + "version": "5.4.1", + "resolved": "https://registry.npmjs.org/@prisma/adapter-neon/-/adapter-neon-5.4.1.tgz", + "integrity": "sha512-mIwLmwyAwDV9HXar9lSyM2uVm9H+X8noG4reKLnC3NjFsBxBfSUgW9vS8dPGqGW/rJWX3hg4pIffjEjmX4TDqg==", + "dependencies": { + "@prisma/driver-adapter-utils": "5.4.1" + }, + "peerDependencies": { + "@neondatabase/serverless": "^0.6.0" + } + }, + "node_modules/@prisma/driver-adapter-utils": { + "version": "5.4.1", + "resolved": "https://registry.npmjs.org/@prisma/driver-adapter-utils/-/driver-adapter-utils-5.4.1.tgz", + "integrity": "sha512-muYjkzf6qdxz4uGBi7nKyPaGRGLnSgiRautqAhZiMwbTOr9hMgyNI+aCJTCaKfYfNWjYCx2r5J6R1mJtPhzFhQ==", + "dependencies": { + "debug": "^4.3.4" + } + }, + "node_modules/@types/node": { + "version": "20.8.2", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.8.2.tgz", + "integrity": "sha512-Vvycsc9FQdwhxE3y3DzeIxuEJbWGDsnrxvMADzTDF/lcdR9/K+AQIeAghTQsHtotg/q0j3WEOYS/jQgSdWue3w==" + }, + "node_modules/@types/pg": { + "version": "8.6.6", + "resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.6.6.tgz", + "integrity": "sha512-O2xNmXebtwVekJDD+02udOncjVcMZQuTEQEMpKJ0ZRf5E7/9JJX3izhKUcUifBkyKpljyUM6BTgy2trmviKlpw==", + "dependencies": { + "@types/node": "*", + "pg-protocol": "*", + "pg-types": "^2.2.0" + } + }, + "node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/pg-int8": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/pg-protocol": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.6.0.tgz", + "integrity": "sha512-M+PDm637OY5WM307051+bsDia5Xej6d9IR4GwJse1qA1DIhiKlksvrneZOYQq42OM+spubpcNYEo2FcKQrDk+Q==" + }, + "node_modules/pg-types": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "dependencies": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/postgres-array": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", + "engines": { + "node": ">=4" + } + }, + "node_modules/postgres-bytea": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", + "integrity": "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-date": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", + "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-interval": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "dependencies": { + "xtend": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "engines": { + "node": ">=0.4" + } + } + } +} diff --git a/query-engine/query-engine-wasm/package.json b/query-engine/query-engine-wasm/package.json new file mode 100644 index 000000000000..538080ec1b8c --- /dev/null +++ b/query-engine/query-engine-wasm/package.json @@ -0,0 +1,9 @@ +{ + "type": "module", + "main": "./example.js", + "dependencies": { + "@neondatabase/serverless": "^0.6.0", + "@prisma/adapter-neon": "^5.4.1", + "@prisma/driver-adapter-utils": "^5.4.1" + } +} diff --git a/query-engine/query-engine-wasm/src/engine.rs b/query-engine/query-engine-wasm/src/engine.rs new file mode 100644 index 000000000000..f9a06fabcf4b --- /dev/null +++ b/query-engine/query-engine-wasm/src/engine.rs @@ -0,0 +1,265 @@ +#![allow(dead_code)] +#![allow(unused_variables)] + +use crate::proxy; +use crate::{ + error::ApiError, + logger::{LogCallback, Logger}, +}; +use js_sys::{Function as JsFunction, Object as JsObject}; +use serde::{Deserialize, Serialize}; +use std::{ + collections::{BTreeMap, HashMap}, + path::PathBuf, + sync::Arc, +}; +use tokio::sync::RwLock; +use tracing_subscriber::filter::LevelFilter; +use tsify::Tsify; +use wasm_bindgen::prelude::wasm_bindgen; + +/// The main query engine used by JS +#[wasm_bindgen] +pub struct QueryEngine { + inner: RwLock, + logger: Logger, +} + +/// The state of the engine. +enum Inner { + /// Not connected, holding all data to form a connection. + Builder(EngineBuilder), + /// A connected engine, holding all data to disconnect and form a new + /// connection. Allows querying when on this state. + Connected(ConnectedEngine), +} + +/// Everything needed to connect to the database and have the core running. +struct EngineBuilder { + schema: Arc, + config_dir: PathBuf, + env: HashMap, +} + +/// Internal structure for querying and reconnecting with the engine. +struct ConnectedEngine { + schema: Arc, + config_dir: PathBuf, + env: HashMap, +} + +/// Returned from the `serverInfo` method in javascript. +#[derive(Debug, Serialize)] +#[serde(rename_all = "camelCase")] +struct ServerInfo { + commit: String, + version: String, + primary_connector: Option, +} + +/// Parameters defining the construction of an engine. +#[derive(Debug, Deserialize, Tsify)] +#[tsify(from_wasm_abi)] +#[serde(rename_all = "camelCase")] +pub struct ConstructorOptions { + datamodel: String, + log_level: String, + #[serde(default)] + log_queries: bool, + #[serde(default)] + datasource_overrides: BTreeMap, + #[serde(default)] + env: serde_json::Value, + config_dir: PathBuf, + #[serde(default)] + ignore_env_var_errors: bool, + #[serde(default)] + engine_protocol: Option, +} + +impl Inner { + /// Returns a builder if the engine is not connected + fn as_builder(&self) -> crate::Result<&EngineBuilder> { + match self { + Inner::Builder(ref builder) => Ok(builder), + Inner::Connected(_) => Err(ApiError::AlreadyConnected), + } + } + + /// Returns the engine if connected + fn as_engine(&self) -> crate::Result<&ConnectedEngine> { + match self { + Inner::Builder(_) => Err(ApiError::NotConnected), + Inner::Connected(ref engine) => Ok(engine), + } + } +} + +#[wasm_bindgen] +impl QueryEngine { + /// Parse a validated datamodel and configuration to allow connecting later on. + #[wasm_bindgen(constructor)] + pub fn new( + options: ConstructorOptions, + callback: JsFunction, + maybe_adapter: Option, + ) -> Result { + log::info!("Called `QueryEngine::new()`"); + + let log_callback = LogCallback(callback); + log::info!("Parsed `log_callback`"); + + let ConstructorOptions { + datamodel, + log_level, + log_queries, + datasource_overrides, + env, + config_dir, + ignore_env_var_errors, + engine_protocol, + } = options; + + let env = stringify_env_values(env)?; // we cannot trust anything JS sends us from process.env + let overrides: Vec<(_, _)> = datasource_overrides.into_iter().collect(); + + let mut schema = psl::validate(datamodel.into()); + let config = &mut schema.configuration; + + if let Some(adapter) = maybe_adapter { + let js_queryable = + proxy::from_wasm(adapter).map_err(|e| ApiError::configuration(e.as_string().unwrap_or_default()))?; + + let provider_name = schema.connector.provider_name(); + log::info!("Received driver adapter for {provider_name}."); + } + + schema + .diagnostics + .to_result() + .map_err(|err| ApiError::conversion(err, schema.db.source()))?; + + config + .resolve_datasource_urls_query_engine( + &overrides, + |key| env.get(key).map(ToString::to_string), + ignore_env_var_errors, + ) + .map_err(|err| ApiError::conversion(err, schema.db.source()))?; + + config + .validate_that_one_datasource_is_provided() + .map_err(|errors| ApiError::conversion(errors, schema.db.source()))?; + + let builder = EngineBuilder { + schema: Arc::new(schema), + config_dir, + env, + }; + + let log_level = log_level.parse::().unwrap(); + let logger = Logger::new(log_queries, log_level, log_callback); + + Ok(Self { + inner: RwLock::new(Inner::Builder(builder)), + logger, + }) + } + + /// Connect to the database, allow queries to be run. + #[wasm_bindgen] + pub async fn connect(&self, trace: String) -> Result<(), wasm_bindgen::JsError> { + log::info!("Called `QueryEngine::connect()`"); + Ok(()) + } + + /// Disconnect and drop the core. Can be reconnected later with `#connect`. + #[wasm_bindgen] + pub async fn disconnect(&self, trace: String) -> Result<(), wasm_bindgen::JsError> { + log::info!("Called `QueryEngine::disconnect()`"); + Ok(()) + } + + /// If connected, sends a query to the core and returns the response. + #[wasm_bindgen] + pub async fn query( + &self, + body: String, + trace: String, + tx_id: Option, + ) -> Result { + log::info!("Called `QueryEngine::query()`"); + Err(ApiError::configuration("Can't use `query` until `request_handlers` is Wasm-compatible.").into()) + } + + /// If connected, attempts to start a transaction in the core and returns its ID. + #[wasm_bindgen(js_name = startTransaction)] + pub async fn start_transaction(&self, input: String, trace: String) -> Result { + log::info!("Called `QueryEngine::start_transaction()`"); + Err(ApiError::configuration("Can't use `start_transaction` until `query_core` is Wasm-compatible.").into()) + } + + /// If connected, attempts to commit a transaction with id `tx_id` in the core. + #[wasm_bindgen(js_name = commitTransaction)] + pub async fn commit_transaction(&self, tx_id: String, trace: String) -> Result { + log::info!("Called `QueryEngine::commit_transaction()`"); + Err(ApiError::configuration("Can't use `commit_transaction` until `query_core` is Wasm-compatible.").into()) + } + + #[wasm_bindgen] + pub async fn dmmf(&self, trace: String) -> Result { + log::info!("Called `QueryEngine::dmmf()`"); + Err(ApiError::configuration("Can't use `dmmf` until `request_handlers` is Wasm-compatible.").into()) + } + + /// If connected, attempts to roll back a transaction with id `tx_id` in the core. + #[wasm_bindgen(js_name = rollbackTransaction)] + pub async fn rollback_transaction(&self, tx_id: String, trace: String) -> Result { + log::info!("Called `QueryEngine::rollback_transaction()`"); + Ok("{}".to_owned()) + } + + /// Loads the query schema. Only available when connected. + #[wasm_bindgen(js_name = sdlSchema)] + pub async fn sdl_schema(&self) -> Result { + log::info!("Called `QueryEngine::sdl_schema()`"); + Ok("{}".to_owned()) + } + + #[wasm_bindgen] + pub async fn metrics(&self, json_options: String) -> Result<(), wasm_bindgen::JsError> { + log::info!("Called `QueryEngine::metrics()`"); + Err(ApiError::configuration("Metrics is not enabled in Wasm.").into()) + } +} + +fn stringify_env_values(origin: serde_json::Value) -> crate::Result> { + use serde_json::Value; + + let msg = match origin { + Value::Object(map) => { + let mut result: HashMap = HashMap::new(); + + for (key, val) in map.into_iter() { + match val { + Value::Null => continue, + Value::String(val) => { + result.insert(key, val); + } + val => { + result.insert(key, val.to_string()); + } + } + } + + return Ok(result); + } + Value::Null => return Ok(Default::default()), + Value::Bool(_) => "Expected an object for the env constructor parameter, got a boolean.", + Value::Number(_) => "Expected an object for the env constructor parameter, got a number.", + Value::String(_) => "Expected an object for the env constructor parameter, got a string.", + Value::Array(_) => "Expected an object for the env constructor parameter, got an array.", + }; + + Err(ApiError::JsonDecode(msg.to_string())) +} diff --git a/query-engine/query-engine-wasm/src/error.rs b/query-engine/query-engine-wasm/src/error.rs new file mode 100644 index 000000000000..619e96564f6a --- /dev/null +++ b/query-engine/query-engine-wasm/src/error.rs @@ -0,0 +1,93 @@ +use psl::diagnostics::Diagnostics; +// use query_connector::error::ConnectorError; +// use query_core::CoreError; +use thiserror::Error; + +#[derive(Debug, Error)] +pub enum ApiError { + #[error("{:?}", _0)] + Conversion(Diagnostics, String), + + #[error("{}", _0)] + Configuration(String), + + // #[error("{}", _0)] + // Core(CoreError), + + // #[error("{}", _0)] + // Connector(ConnectorError), + #[error("Can't modify an already connected engine.")] + AlreadyConnected, + + #[error("Engine is not yet connected.")] + NotConnected, + + #[error("{}", _0)] + JsonDecode(String), +} + +impl From for user_facing_errors::Error { + fn from(err: ApiError) -> Self { + use std::fmt::Write as _; + + match err { + // ApiError::Connector(ConnectorError { + // user_facing_error: Some(err), + // .. + // }) => err.into(), + ApiError::Conversion(errors, dml_string) => { + let mut full_error = errors.to_pretty_string("schema.prisma", &dml_string); + write!(full_error, "\nValidation Error Count: {}", errors.errors().len()).unwrap(); + + user_facing_errors::Error::from(user_facing_errors::KnownError::new( + user_facing_errors::common::SchemaParserError { full_error }, + )) + } + // ApiError::Core(error) => user_facing_errors::Error::from(error), + other => user_facing_errors::Error::new_non_panic_with_current_backtrace(other.to_string()), + } + } +} + +impl ApiError { + pub fn conversion(diagnostics: Diagnostics, dml: impl ToString) -> Self { + Self::Conversion(diagnostics, dml.to_string()) + } + + pub fn configuration(msg: impl ToString) -> Self { + Self::Configuration(msg.to_string()) + } +} + +// impl From for ApiError { +// fn from(e: CoreError) -> Self { +// match e { +// CoreError::ConfigurationError(message) => Self::Configuration(message), +// core_error => Self::Core(core_error), +// } +// } +// } + +// impl From for ApiError { +// fn from(e: ConnectorError) -> Self { +// Self::Connector(e) +// } +// } + +impl From for ApiError { + fn from(e: url::ParseError) -> Self { + Self::configuration(format!("Error parsing connection string: {e}")) + } +} + +impl From for ApiError { + fn from(e: connection_string::Error) -> Self { + Self::configuration(format!("Error parsing connection string: {e}")) + } +} + +impl From for ApiError { + fn from(e: serde_json::Error) -> Self { + Self::JsonDecode(format!("{e}")) + } +} diff --git a/query-engine/query-engine-wasm/src/functions.rs b/query-engine/query-engine-wasm/src/functions.rs new file mode 100644 index 000000000000..e0f0a93aa5cd --- /dev/null +++ b/query-engine/query-engine-wasm/src/functions.rs @@ -0,0 +1,47 @@ +use crate::error::ApiError; +use serde::Serialize; +use tsify::Tsify; +use wasm_bindgen::prelude::wasm_bindgen; + +#[derive(Serialize, Tsify)] +#[tsify(into_wasm_abi)] +#[serde(rename_all = "camelCase")] +pub struct Version { + pub commit: &'static str, + pub version: &'static str, +} + +#[wasm_bindgen(js_name = "getBuildTimeInfo")] +pub fn version() -> Version { + Version { + commit: env!("GIT_HASH"), + version: env!("CARGO_PKG_VERSION"), + } +} + +#[wasm_bindgen] +pub fn dmmf(datamodel_string: String) -> Result { + let mut schema = psl::validate(datamodel_string.into()); + + schema + .diagnostics + .to_result() + .map_err(|errors| ApiError::conversion(errors, schema.db.source()))?; + + Ok("{}".to_string()) + + // let query_schema = query_core::schema::build(Arc::new(schema), true); + // let dmmf = dmmf::render_dmmf(&query_schema); + + // Ok(serde_json::to_string(&dmmf)?) +} + +#[wasm_bindgen] +pub fn debug_panic(panic_message: Option) -> Result<(), wasm_bindgen::JsError> { + let user_facing = user_facing_errors::Error::from_panic_payload(Box::new( + panic_message.unwrap_or_else(|| "query-engine-wasm debug panic".to_string()), + )); + let message = serde_json::to_string(&user_facing).unwrap(); + + Err(wasm_bindgen::JsError::new(&message)) +} diff --git a/query-engine/query-engine-wasm/src/lib.rs b/query-engine/query-engine-wasm/src/lib.rs new file mode 100644 index 000000000000..89b519515517 --- /dev/null +++ b/query-engine/query-engine-wasm/src/lib.rs @@ -0,0 +1,19 @@ +pub mod engine; +pub mod error; +pub mod functions; +pub mod logger; +mod proxy; + +pub(crate) type Result = std::result::Result; + +use wasm_bindgen::prelude::wasm_bindgen; + +/// Function that should be called before any other public function in this module. +#[wasm_bindgen] +pub fn init() { + // Set up temporary logging for the wasm module. + wasm_logger::init(wasm_logger::Config::default()); + + // Set up temporary panic hook for the wasm module. + std::panic::set_hook(Box::new(console_error_panic_hook::hook)); +} diff --git a/query-engine/query-engine-wasm/src/logger.rs b/query-engine/query-engine-wasm/src/logger.rs new file mode 100644 index 000000000000..561c48271b77 --- /dev/null +++ b/query-engine/query-engine-wasm/src/logger.rs @@ -0,0 +1,132 @@ +#![allow(dead_code)] + +use core::fmt; +use js_sys::Function as JsFunction; +use serde_json::Value; +use std::collections::BTreeMap; +use tracing::{ + field::{Field, Visit}, + level_filters::LevelFilter, + Dispatch, Level, Subscriber, +}; +use tracing_subscriber::{ + filter::{filter_fn, FilterExt}, + layer::SubscriberExt, + Layer, Registry, +}; +use wasm_bindgen::JsValue; + +pub(crate) struct LogCallback(pub JsFunction); + +unsafe impl Send for LogCallback {} +unsafe impl Sync for LogCallback {} + +pub(crate) struct Logger { + dispatcher: Dispatch, +} + +impl Logger { + /// Creates a new logger using a call layer + pub fn new(log_queries: bool, log_level: LevelFilter, log_callback: LogCallback) -> Self { + let is_sql_query = filter_fn(|meta| { + meta.target() == "quaint::connector::metrics" && meta.fields().iter().any(|f| f.name() == "query") + }); + + // is a mongodb query? + let is_mongo_query = filter_fn(|meta| meta.target() == "mongodb_query_connector::query"); + + // We need to filter the messages to send to our callback logging mechanism + let filters = if log_queries { + // Filter trace query events (for query log) or based in the defined log level + is_sql_query.or(is_mongo_query).or(log_level).boxed() + } else { + // Filter based in the defined log level + FilterExt::boxed(log_level) + }; + + let layer = CallbackLayer::new(log_callback).with_filter(filters); + + Self { + dispatcher: Dispatch::new(Registry::default().with(layer)), + } + } + + pub fn dispatcher(&self) -> Dispatch { + self.dispatcher.clone() + } +} + +pub struct JsonVisitor<'a> { + values: BTreeMap<&'a str, Value>, +} + +impl<'a> JsonVisitor<'a> { + pub fn new(level: &Level, target: &str) -> Self { + let mut values = BTreeMap::new(); + values.insert("level", serde_json::Value::from(level.to_string())); + + // NOTE: previous version used module_path, this is not correct and it should be _target_ + values.insert("module_path", serde_json::Value::from(target)); + + JsonVisitor { values } + } +} + +impl<'a> Visit for JsonVisitor<'a> { + fn record_debug(&mut self, field: &Field, value: &dyn fmt::Debug) { + match field.name() { + name if name.starts_with("r#") => { + self.values + .insert(&name[2..], serde_json::Value::from(format!("{value:?}"))); + } + name => { + self.values.insert(name, serde_json::Value::from(format!("{value:?}"))); + } + }; + } + + fn record_i64(&mut self, field: &Field, value: i64) { + self.values.insert(field.name(), serde_json::Value::from(value)); + } + + fn record_u64(&mut self, field: &Field, value: u64) { + self.values.insert(field.name(), serde_json::Value::from(value)); + } + + fn record_bool(&mut self, field: &Field, value: bool) { + self.values.insert(field.name(), serde_json::Value::from(value)); + } + + fn record_str(&mut self, field: &Field, value: &str) { + self.values.insert(field.name(), serde_json::Value::from(value)); + } +} + +impl<'a> ToString for JsonVisitor<'a> { + fn to_string(&self) -> String { + serde_json::to_string(&self.values).unwrap() + } +} + +pub(crate) struct CallbackLayer { + callback: LogCallback, +} + +impl CallbackLayer { + pub fn new(callback: LogCallback) -> Self { + CallbackLayer { callback } + } +} + +// A tracing layer for sending logs to a js callback, layers are composable, subscribers are not. +impl Layer for CallbackLayer { + fn on_event(&self, event: &tracing::Event<'_>, _ctx: tracing_subscriber::layer::Context<'_, S>) { + let mut visitor = JsonVisitor::new(event.metadata().level(), event.metadata().target()); + event.record(&mut visitor); + + let _ = self + .callback + .0 + .call1(&JsValue::NULL, &JsValue::from_str(&visitor.to_string())); + } +} diff --git a/query-engine/query-engine-wasm/src/proxy.rs b/query-engine/query-engine-wasm/src/proxy.rs new file mode 100644 index 000000000000..ad028e218236 --- /dev/null +++ b/query-engine/query-engine-wasm/src/proxy.rs @@ -0,0 +1,107 @@ +#![allow(dead_code)] +#![allow(unused_variables)] + +// This code will likely live in a separate crate, but for now it's here. + +use async_trait::async_trait; +use js_sys::{Function as JsFunction, JsString, Object as JsObject, Promise as JsPromise, Reflect as JsReflect}; +use serde::{de::DeserializeOwned, Serialize}; +use wasm_bindgen::{JsCast, JsValue}; + +type Result = std::result::Result; + +pub struct CommonProxy { + /// Execute a query given as SQL, interpolating the given parameters. + query_raw: JsFunction, + + /// Execute a query given as SQL, interpolating the given parameters and + /// returning the number of affected rows. + execute_raw: JsFunction, + + /// Return the flavour for this driver. + pub(crate) flavour: String, +} + +impl CommonProxy { + pub(crate) fn new(driver: &JsObject) -> Result { + let query_raw = JsReflect::get(driver, &"queryRaw".into())?.dyn_into::()?; + let execute_raw = JsReflect::get(driver, &"executeRaw".into())?.dyn_into::()?; + let flavour: String = JsReflect::get(driver, &"flavour".into())? + .dyn_into::()? + .into(); + + let common_proxy = Self { + query_raw, + execute_raw, + flavour, + }; + Ok(common_proxy) + } +} + +pub struct DriverProxy { + start_transaction: JsFunction, +} + +impl DriverProxy { + pub(crate) fn new(driver: &JsObject) -> Result { + let start_transaction = JsReflect::get(driver, &"startTransaction".into())?.dyn_into::()?; + + let driver_proxy = Self { start_transaction }; + Ok(driver_proxy) + } +} + +pub struct JsQueryable { + inner: CommonProxy, + driver_proxy: DriverProxy, +} + +impl JsQueryable { + pub fn new(inner: CommonProxy, driver_proxy: DriverProxy) -> Self { + Self { inner, driver_proxy } + } +} + +pub fn from_wasm(driver: JsObject) -> Result { + let common_proxy = CommonProxy::new(&driver)?; + let driver_proxy = DriverProxy::new(&driver)?; + + let js_queryable = JsQueryable::new(common_proxy, driver_proxy); + Ok(js_queryable) +} + +#[async_trait(?Send)] +trait JsAsyncFunc { + async fn call1_async(&self, arg1: T) -> Result + where + T: Serialize, + R: DeserializeOwned; + + fn call0_sync(&self) -> Result + where + R: DeserializeOwned; +} + +#[async_trait(?Send)] +impl JsAsyncFunc for JsFunction { + async fn call1_async(&self, arg1: T) -> Result + where + T: Serialize, + R: DeserializeOwned, + { + let arg1 = serde_wasm_bindgen::to_value(&arg1).map_err(|err| js_sys::Error::new(&err.to_string()))?; + let promise = self.call1(&JsValue::null(), &arg1)?; + let future = wasm_bindgen_futures::JsFuture::from(JsPromise::from(promise)); + let value = future.await?; + serde_wasm_bindgen::from_value(value).map_err(|err| js_sys::Error::new(&err.to_string())) + } + + fn call0_sync(&self) -> Result + where + R: DeserializeOwned, + { + let value = self.call0(&JsValue::null())?; + serde_wasm_bindgen::from_value(value).map_err(|err| js_sys::Error::new(&err.to_string())) + } +} diff --git a/query-engine/query-engine/src/server/mod.rs b/query-engine/query-engine/src/server/mod.rs index 75543dc7ee58..f3583df310d7 100644 --- a/query-engine/query-engine/src/server/mod.rs +++ b/query-engine/query-engine/src/server/mod.rs @@ -63,7 +63,7 @@ pub(crate) async fn routes(cx: Arc, req: Request) -> Result let mut res = match (req.method(), req.uri().path()) { (&Method::POST, "/") => request_handler(cx, req).await?, (&Method::GET, "/") if cx.enabled_features.contains(Feature::Playground) => playground_handler(), - (&Method::GET, "/status") => build_json_response(StatusCode::OK, r#"{"status":"ok"}"#), + (&Method::GET, "/status") => build_json_response(StatusCode::OK, &json!({"status": "ok"})), (&Method::GET, "/sdl") => { let schema = render_graphql_schema(cx.query_schema()); diff --git a/query-engine/request-handlers/src/load_executor.rs b/query-engine/request-handlers/src/load_executor.rs index 6d4dec482285..652ad3108f0d 100644 --- a/query-engine/request-handlers/src/load_executor.rs +++ b/query-engine/request-handlers/src/load_executor.rs @@ -2,6 +2,7 @@ use psl::{builtin_connectors::*, Datasource, PreviewFeatures}; use query_core::{executor::InterpretingExecutor, Connector, QueryExecutor}; use sql_query_connector::*; use std::collections::HashMap; +use std::env; use tracing::trace; use url::Url; @@ -17,24 +18,38 @@ pub async fn load( features: PreviewFeatures, url: &str, ) -> query_core::Result> { - if connector_mode == ConnectorMode::Js { - #[cfg(feature = "driver-adapters")] - return driver_adapter(source, url, features).await; - } + match connector_mode { + ConnectorMode::Js => { + #[cfg(not(feature = "driver-adapters"))] + panic!("Driver adapters are not enabled, but connector mode is set to JS"); + + #[cfg(feature = "driver-adapters")] + driver_adapter(source, url, features).await + } + + ConnectorMode::Rust => { + if let Ok(value) = env::var("PRISMA_DISABLE_QUAINT_EXECUTORS") { + let disable = value.to_uppercase(); + if disable == "TRUE" || disable == "1" { + panic!("Quaint executors are disabled, as per env var PRISMA_DISABLE_QUAINT_EXECUTORS."); + } + } - match source.active_provider { - p if SQLITE.is_provider(p) => sqlite(source, url, features).await, - p if MYSQL.is_provider(p) => mysql(source, url, features).await, - p if POSTGRES.is_provider(p) => postgres(source, url, features).await, - p if MSSQL.is_provider(p) => mssql(source, url, features).await, - p if COCKROACH.is_provider(p) => postgres(source, url, features).await, + match source.active_provider { + p if SQLITE.is_provider(p) => sqlite(source, url, features).await, + p if MYSQL.is_provider(p) => mysql(source, url, features).await, + p if POSTGRES.is_provider(p) => postgres(source, url, features).await, + p if MSSQL.is_provider(p) => mssql(source, url, features).await, + p if COCKROACH.is_provider(p) => postgres(source, url, features).await, - #[cfg(feature = "mongodb")] - p if MONGODB.is_provider(p) => mongodb(source, url, features).await, + #[cfg(feature = "mongodb")] + p if MONGODB.is_provider(p) => mongodb(source, url, features).await, - x => Err(query_core::CoreError::ConfigurationError(format!( - "Unsupported connector type: {x}" - ))), + x => Err(query_core::CoreError::ConfigurationError(format!( + "Unsupported connector type: {x}" + ))), + } + } } } diff --git a/query-engine/request-handlers/src/response.rs b/query-engine/request-handlers/src/response.rs index af99835813e8..a196daade4be 100644 --- a/query-engine/request-handlers/src/response.rs +++ b/query-engine/request-handlers/src/response.rs @@ -9,26 +9,26 @@ use crate::HandlerError; #[derive(Debug, serde::Serialize, Default, PartialEq)] pub struct GQLResponse { #[serde(skip_serializing_if = "IndexMap::is_empty")] - data: Map, + pub data: Map, #[serde(skip_serializing_if = "Vec::is_empty")] - errors: Vec, + pub errors: Vec, #[serde(skip_serializing_if = "IndexMap::is_empty")] - extensions: Map, + pub extensions: Map, } #[derive(Debug, serde::Serialize, Default, PartialEq)] #[serde(rename_all = "camelCase")] pub struct GQLBatchResponse { #[serde(skip_serializing_if = "Vec::is_empty")] - batch_result: Vec, + pub batch_result: Vec, #[serde(skip_serializing_if = "Vec::is_empty")] - errors: Vec, + pub errors: Vec, #[serde(skip_serializing_if = "IndexMap::is_empty")] - extensions: Map, + pub extensions: Map, } #[derive(Debug, serde::Serialize, serde::Deserialize, PartialEq)] diff --git a/query-engine/schema/src/build/input_types/mod.rs b/query-engine/schema/src/build/input_types/mod.rs index f34da22f40af..98c8caa84a38 100644 --- a/query-engine/schema/src/build/input_types/mod.rs +++ b/query-engine/schema/src/build/input_types/mod.rs @@ -2,7 +2,6 @@ pub(crate) mod fields; pub(crate) mod objects; use super::*; -use crate::*; use fields::*; use prisma_models::ScalarFieldRef; diff --git a/query-engine/schema/src/build/input_types/objects/filter_objects.rs b/query-engine/schema/src/build/input_types/objects/filter_objects.rs index b8af982182a1..0ea555f77724 100644 --- a/query-engine/schema/src/build/input_types/objects/filter_objects.rs +++ b/query-engine/schema/src/build/input_types/objects/filter_objects.rs @@ -113,6 +113,7 @@ pub(crate) fn where_unique_object_type(ctx: &'_ QuerySchema, model: Model) -> In .indexes() .filter(|idx| idx.is_unique()) .filter(|index| index.fields().len() > 1) + .filter(|index| !index.fields().any(|f| f.is_unsupported())) .map(|index| { let fields = index .fields() @@ -130,6 +131,7 @@ pub(crate) fn where_unique_object_type(ctx: &'_ QuerySchema, model: Model) -> In .walk(model.id) .primary_key() .filter(|pk| pk.fields().len() > 1) + .filter(|pk| !pk.fields().any(|f| f.is_unsupported())) .map(|pk| { let name = compound_id_field_name(pk); let fields = model.fields().id_fields().unwrap().collect(); diff --git a/schema-engine/connectors/mongodb-schema-connector/src/sampler/statistics.rs b/schema-engine/connectors/mongodb-schema-connector/src/sampler/statistics.rs index 30e75007f2ba..d342e0e89f06 100644 --- a/schema-engine/connectors/mongodb-schema-connector/src/sampler/statistics.rs +++ b/schema-engine/connectors/mongodb-schema-connector/src/sampler/statistics.rs @@ -530,7 +530,7 @@ impl<'a> Statistics<'a> { sampler.types.insert(FieldType::Document(type_name.clone()), 1); let key = Name::CompositeType(type_name); - self.models.entry(key).or_insert_with(Default::default); + self.models.entry(key).or_default(); sampler } else { diff --git a/schema-engine/connectors/schema-connector/src/introspection_context.rs b/schema-engine/connectors/schema-connector/src/introspection_context.rs index 6d5e3319cb21..54f197935bd3 100644 --- a/schema-engine/connectors/schema-connector/src/introspection_context.rs +++ b/schema-engine/connectors/schema-connector/src/introspection_context.rs @@ -116,7 +116,7 @@ impl From for CompositeTypeDepth { fn from(size: isize) -> Self { match size { size if size < 0 => Self::Infinite, - size if size == 0 => Self::None, + 0 => Self::None, _ => Self::Level(size as usize), } } diff --git a/schema-engine/connectors/sql-schema-connector/src/sql_renderer/mysql_renderer.rs b/schema-engine/connectors/sql-schema-connector/src/sql_renderer/mysql_renderer.rs index d8620ff18eeb..4f96aea6fd69 100644 --- a/schema-engine/connectors/sql-schema-connector/src/sql_renderer/mysql_renderer.rs +++ b/schema-engine/connectors/sql-schema-connector/src/sql_renderer/mysql_renderer.rs @@ -366,7 +366,7 @@ fn render_mysql_modify( .map(|default| render_default(next_column, default)) .filter(|expr| !expr.is_empty()) .map(|expression| format!(" DEFAULT {expression}")) - .unwrap_or_else(String::new); + .unwrap_or_default(); format!( "MODIFY {column_name} {column_type}{nullability}{default}{sequence}", diff --git a/schema-engine/connectors/sql-schema-connector/src/sql_schema_differ/column.rs b/schema-engine/connectors/sql-schema-connector/src/sql_schema_differ/column.rs index 059e382ddae8..829531a14291 100644 --- a/schema-engine/connectors/sql-schema-connector/src/sql_schema_differ/column.rs +++ b/schema-engine/connectors/sql-schema-connector/src/sql_schema_differ/column.rs @@ -164,7 +164,7 @@ pub(crate) struct ColumnChanges { impl PartialOrd for ColumnChanges { fn partial_cmp(&self, other: &Self) -> Option { - self.changes.bits().partial_cmp(&other.changes.bits()) + Some(self.cmp(other)) } } diff --git a/schema-engine/sql-migration-tests/src/assertions/quaint_result_set_ext.rs b/schema-engine/sql-migration-tests/src/assertions/quaint_result_set_ext.rs index 3acb21c9af25..3f486f34163b 100644 --- a/schema-engine/sql-migration-tests/src/assertions/quaint_result_set_ext.rs +++ b/schema-engine/sql-migration-tests/src/assertions/quaint_result_set_ext.rs @@ -1,4 +1,4 @@ -use quaint::{connector::ResultRowRef, prelude::ResultSet, Value}; +use quaint::{connector::ResultRowRef, prelude::ResultSet, Value, ValueType}; pub trait ResultSetExt: Sized { fn assert_row_count(self, expected_count: usize) -> Self; @@ -34,8 +34,8 @@ pub struct RowAssertion<'a>(ResultRowRef<'a>); impl<'a> RowAssertion<'a> { pub fn assert_array_value(self, column_name: &str, expected_value: &[Value<'_>]) -> Self { - let actual_value = self.0.get(column_name).and_then(|col: &Value<'_>| match col { - Value::Array(x) => x.as_ref(), + let actual_value = self.0.get(column_name).and_then(|col: &Value<'_>| match &col.typed { + ValueType::Array(x) => x.as_ref(), _ => panic!("as_array"), }); @@ -87,8 +87,9 @@ impl<'a> RowAssertion<'a> { #[track_caller] pub fn assert_text_value(self, column_name: &str, expected_value: &str) -> Self { let value = self.0.get(column_name).expect("Expected a value, found none"); - let value_text: &str = match value { - Value::Text(val) | Value::Enum(val) => val.as_deref(), + let value_text: &str = match &value.typed { + ValueType::Text(val) => val.as_deref(), + ValueType::Enum(val, _) => val.as_deref(), _ => None, } .expect("Expected a string value"); diff --git a/schema-engine/sql-migration-tests/tests/existing_data/mod.rs b/schema-engine/sql-migration-tests/tests/existing_data/mod.rs index 461214dd62ae..bed7b8fc80ca 100644 --- a/schema-engine/sql-migration-tests/tests/existing_data/mod.rs +++ b/schema-engine/sql-migration-tests/tests/existing_data/mod.rs @@ -334,7 +334,7 @@ fn changing_a_column_from_optional_to_required_is_unexecutable(api: TestApi) { let insert = Insert::multi_into(api.render_table_name("Test"), ["id", "age"]) .values(("a", 12)) .values(("b", 22)) - .values(("c", Value::Int32(None))); + .values(("c", ValueType::Int32(None))); api.query(insert.into()); @@ -756,10 +756,9 @@ fn set_default_current_timestamp_on_existing_column_works(api: TestApi) { api.schema_push_w_datasource(dm1).send().assert_green(); - let insert = Insert::single_into(api.render_table_name("User")).value("id", 5).value( - "created_at", - Value::DateTime(Some("2020-06-15T14:50:00Z".parse().unwrap())), - ); + let insert = Insert::single_into(api.render_table_name("User")) + .value("id", 5) + .value("created_at", Value::datetime("2020-06-15T14:50:00Z".parse().unwrap())); api.query(insert.into()); let dm2 = r#" diff --git a/schema-engine/sql-migration-tests/tests/existing_data/sql_unexecutable_migrations/made_optional_field_required.rs b/schema-engine/sql-migration-tests/tests/existing_data/sql_unexecutable_migrations/made_optional_field_required.rs index 8b3f0cf608c3..718b34a3230b 100644 --- a/schema-engine/sql-migration-tests/tests/existing_data/sql_unexecutable_migrations/made_optional_field_required.rs +++ b/schema-engine/sql-migration-tests/tests/existing_data/sql_unexecutable_migrations/made_optional_field_required.rs @@ -92,8 +92,8 @@ fn making_an_optional_field_required_with_data_with_a_default_works(api: TestApi .map(|row| row.into_iter().collect::>()) .collect::>(), &[ - &[Value::text("abc"), Value::text("george"), Value::integer(84)], - &[Value::text("def"), Value::text("X Æ A-12"), Value::integer(7)], + &[Value::text("abc"), Value::text("george"), Value::int32(84)], + &[Value::text("def"), Value::text("X Æ A-12"), Value::int32(7)], ] ); } @@ -151,7 +151,7 @@ fn making_an_optional_field_required_with_data_with_a_default_is_unexecutable(ap .map(|row| row.into_iter().collect::>()) .collect::>(), &[ - &[Value::text("abc"), Value::text("george"), Value::Int32(None)], + &[Value::text("abc"), Value::text("george"), Value::null_int32()], &[Value::text("def"), Value::text("X Æ A-12"), Value::int32(7)], ] ); diff --git a/schema-engine/sql-migration-tests/tests/existing_data/sqlite_existing_data_tests.rs b/schema-engine/sql-migration-tests/tests/existing_data/sqlite_existing_data_tests.rs index f87d9f931a28..4485ad4de719 100644 --- a/schema-engine/sql-migration-tests/tests/existing_data/sqlite_existing_data_tests.rs +++ b/schema-engine/sql-migration-tests/tests/existing_data/sqlite_existing_data_tests.rs @@ -1,4 +1,4 @@ -use quaint::{prelude::Insert, Value}; +use quaint::{prelude::Insert, ValueType}; use sql_migration_tests::test_api::*; use sql_schema_describer::DefaultValue; @@ -16,7 +16,7 @@ fn changing_a_column_from_optional_to_required_with_a_default_is_safe(api: TestA let insert = Insert::multi_into(api.render_table_name("Test"), ["id", "age"]) .values(("a", 12)) .values(("b", 22)) - .values(("c", Value::Int32(None))); + .values(("c", ValueType::Int32(None))); api.query(insert.into()); diff --git a/schema-engine/sql-migration-tests/tests/existing_data/type_migration_tests.rs b/schema-engine/sql-migration-tests/tests/existing_data/type_migration_tests.rs index 56d63f7860b3..2a77f3a29eba 100644 --- a/schema-engine/sql-migration-tests/tests/existing_data/type_migration_tests.rs +++ b/schema-engine/sql-migration-tests/tests/existing_data/type_migration_tests.rs @@ -101,7 +101,7 @@ fn changing_a_string_array_column_to_scalar_is_fine(api: TestApi) { .value("id", "film1") .value( "mainProtagonist", - Value::Array(Some(vec!["giant shark".into(), "jason statham".into()])), + Value::array(vec![Value::text("giant shark"), Value::text("jason statham")]), ) .result_raw(); @@ -138,7 +138,7 @@ fn changing_an_int_array_column_to_scalar_is_not_possible(api: TestApi) { api.insert("Film") .value("id", "film1") - .value("mainProtagonist", Value::Array(Some(vec![7.into(), 11.into()]))) + .value("mainProtagonist", Value::array(vec![Value::int32(7), Value::int32(11)])) .result_raw(); let dm2 = r#" diff --git a/schema-engine/sql-migration-tests/tests/migrations/sql.rs b/schema-engine/sql-migration-tests/tests/migrations/sql.rs index 36486e7c18a6..8f87115db4dc 100644 --- a/schema-engine/sql-migration-tests/tests/migrations/sql.rs +++ b/schema-engine/sql-migration-tests/tests/migrations/sql.rs @@ -244,17 +244,17 @@ fn enum_defaults_must_work(api: TestApi) { assert_eq!(row.get("id").unwrap().to_string().unwrap(), "the-id"); assert_eq!( - match row.get("mood").unwrap() { - quaint::Value::Enum(Some(enm)) => enm.as_ref(), - quaint::Value::Text(Some(enm)) => enm.as_ref(), + match &row.get("mood").unwrap().typed { + quaint::ValueType::Enum(Some(enm), _) => enm.as_ref(), + quaint::ValueType::Text(Some(enm)) => enm.as_ref(), _ => panic!("mood is not an enum value"), }, "hongry" ); assert_eq!( - match row.get("previousMood").unwrap() { - quaint::Value::Enum(Some(enm)) => enm.as_ref(), - quaint::Value::Text(Some(enm)) => enm.as_ref(), + match &row.get("previousMood").unwrap().typed { + quaint::ValueType::Enum(Some(enm), _) => enm.as_ref(), + quaint::ValueType::Text(Some(enm)) => enm.as_ref(), _ => panic!("previousMood is not an enum value"), }, "ANGRY" diff --git a/schema-engine/sql-migration-tests/tests/native_types/mssql.rs b/schema-engine/sql-migration-tests/tests/native_types/mssql.rs index 32ac24688601..83d988acb176 100644 --- a/schema-engine/sql-migration-tests/tests/native_types/mssql.rs +++ b/schema-engine/sql-migration-tests/tests/native_types/mssql.rs @@ -43,7 +43,7 @@ static SAFE_CASTS: Lazy> = Lazy::new(|| { ), ( "TinyInt", - Value::integer(u8::MAX), + Value::int32(u8::MAX), &[ "SmallInt", "Int", @@ -69,7 +69,7 @@ static SAFE_CASTS: Lazy> = Lazy::new(|| { ), ( "SmallInt", - Value::integer(i16::MAX), + Value::int32(i16::MAX), &[ "Int", "BigInt", @@ -92,7 +92,7 @@ static SAFE_CASTS: Lazy> = Lazy::new(|| { ), ( "Int", - Value::integer(i32::MAX), + Value::int32(i32::MAX), &[ "BigInt", "Decimal", @@ -423,12 +423,12 @@ static RISKY_CASTS: Lazy> = Lazy::new(|| { vec![ ( "TinyInt", - Value::integer(u8::MAX), + Value::int32(u8::MAX), &["Decimal(2,0)", "Char(2)", "NChar(2)", "VarChar(2)", "NVarChar(2)"], ), ( "SmallInt", - Value::integer(i16::MAX), + Value::int32(i16::MAX), &[ "Bit", "TinyInt", @@ -443,7 +443,7 @@ static RISKY_CASTS: Lazy> = Lazy::new(|| { ), ( "Int", - Value::integer(i32::MAX), + Value::int32(i32::MAX), &[ "Bit", "TinyInt", @@ -468,7 +468,7 @@ static RISKY_CASTS: Lazy> = Lazy::new(|| { ), ( "BigInt", - Value::integer(i32::MAX), + Value::int32(i32::MAX), &[ "Bit", "TinyInt", @@ -1402,7 +1402,7 @@ static NOT_CASTABLE: Lazy> = Lazy::new(|| { ), ( "TinyInt", - Value::integer(u8::MAX), + Value::int32(u8::MAX), &[ "Date", "Time", @@ -1417,7 +1417,7 @@ static NOT_CASTABLE: Lazy> = Lazy::new(|| { ), ( "SmallInt", - Value::integer(i16::MAX), + Value::int32(i16::MAX), &[ "Date", "Time", @@ -1432,7 +1432,7 @@ static NOT_CASTABLE: Lazy> = Lazy::new(|| { ), ( "Int", - Value::integer(i32::MAX), + Value::int32(i32::MAX), &[ "Date", "Time", diff --git a/schema-engine/sql-migration-tests/tests/native_types/mysql.rs b/schema-engine/sql-migration-tests/tests/native_types/mysql.rs index 9144313af8ed..d8cf62f5767c 100644 --- a/schema-engine/sql-migration-tests/tests/native_types/mysql.rs +++ b/schema-engine/sql-migration-tests/tests/native_types/mysql.rs @@ -2,13 +2,13 @@ use sql_migration_tests::test_api::*; use std::{borrow::Cow, fmt::Write}; /// (source native type, test value to insert, target native type) -type Case = (&'static str, quaint::Value<'static>, &'static [&'static str]); +type Case = (&'static str, quaint::ValueType<'static>, &'static [&'static str]); type Cases = &'static [Case]; const SAFE_CASTS: Cases = &[ ( "BigInt", - quaint::Value::Int64(Some(99999999432)), + quaint::ValueType::Int64(Some(99999999432)), &[ "Binary(200)", "Bit(54)", @@ -30,7 +30,7 @@ const SAFE_CASTS: Cases = &[ ), ( "Binary(8)", - quaint::Value::Bytes(Some(Cow::Borrowed(b"08088044"))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(b"08088044"))), &[ "Bit(64)", "Blob", @@ -51,7 +51,7 @@ const SAFE_CASTS: Cases = &[ ), ( "Int", - quaint::Value::Int32(Some(i32::MIN)), + quaint::ValueType::Int32(Some(i32::MIN)), &[ "BigInt", "Char(20)", @@ -64,7 +64,7 @@ const SAFE_CASTS: Cases = &[ ), ( "Bit(32)", - quaint::Value::Bytes(Some(Cow::Borrowed(b""))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(b""))), &[ "SmallInt", "UnsignedSmallInt", @@ -86,12 +86,12 @@ const SAFE_CASTS: Cases = &[ ), ( "Blob", - quaint::Value::Bytes(Some(Cow::Borrowed(&[0xff]))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(&[0xff]))), &["TinyBlob", "MediumBlob", "LongBlob"], ), ( "Char(10)", - quaint::Value::Text(Some(Cow::Borrowed("1234"))), + quaint::ValueType::Text(Some(Cow::Borrowed("1234"))), &[ "Blob", "Char(11)", @@ -107,7 +107,7 @@ const SAFE_CASTS: Cases = &[ ), ( "Date", - quaint::Value::Text(Some(Cow::Borrowed("2020-01-12"))), + quaint::ValueType::Text(Some(Cow::Borrowed("2020-01-12"))), &[ "DateTime(0)", "Decimal(10,0)", @@ -131,7 +131,7 @@ const SAFE_CASTS: Cases = &[ ), ( "DateTime(0)", - quaint::Value::Text(Some(Cow::Borrowed("2020-01-08 08:00:00"))), + quaint::ValueType::Text(Some(Cow::Borrowed("2020-01-08 08:00:00"))), &[ "BigInt", "UnsignedBigInt", @@ -144,7 +144,7 @@ const SAFE_CASTS: Cases = &[ ), ( "Double", - quaint::Value::Float(Some(3.20)), + quaint::ValueType::Float(Some(3.20)), &[ "Float", "Bit(64)", @@ -169,7 +169,7 @@ const SAFE_CASTS: Cases = &[ ), ( "Float", - quaint::Value::Float(Some(3.20)), + quaint::ValueType::Float(Some(3.20)), &[ "Double", "Bit(32)", @@ -196,7 +196,7 @@ const SAFE_CASTS: Cases = &[ ), ( "Json", - quaint::Value::Text(Some(Cow::Borrowed("{\"a\":\"b\"}"))), + quaint::ValueType::Text(Some(Cow::Borrowed("{\"a\":\"b\"}"))), &[ // To string "Binary(10)", @@ -211,22 +211,22 @@ const SAFE_CASTS: Cases = &[ ), ( "LongBlob", - quaint::Value::Bytes(Some(Cow::Borrowed(&[0xff]))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(&[0xff]))), &["TinyBlob", "Blob", "MediumBlob"], ), ( "MediumBlob", - quaint::Value::Bytes(Some(Cow::Borrowed(&[0xff]))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(&[0xff]))), &["TinyBlob", "Blob", "LongBlob"], ), ( "TinyBlob", - quaint::Value::Bytes(Some(Cow::Borrowed(&[0xff]))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(&[0xff]))), &["LongBlob", "Blob", "MediumBlob"], ), ( "Time", - quaint::Value::Int32(Some(20)), + quaint::ValueType::Int32(Some(20)), &[ "VarChar(20)", "BigInt", @@ -238,7 +238,7 @@ const SAFE_CASTS: Cases = &[ ), ( "Year", - quaint::Value::Int32(Some(2000)), + quaint::ValueType::Int32(Some(2000)), &[ // To string "Binary(10)", @@ -272,7 +272,7 @@ const SAFE_CASTS: Cases = &[ const RISKY_CASTS: Cases = &[ ( "BigInt", - quaint::Value::Int64(Some(100)), + quaint::ValueType::Int64(Some(100)), &[ "Int", "MediumInt", @@ -285,30 +285,30 @@ const RISKY_CASTS: Cases = &[ "UnsignedTinyInt", ], ), - ("BigInt", quaint::Value::Int64(Some(2000)), &["Year"]), + ("BigInt", quaint::ValueType::Int64(Some(2000)), &["Year"]), ( "Binary(8)", - quaint::Value::Bytes(Some(Cow::Borrowed(b"08088044"))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(b"08088044"))), &["Bit(32)", "Int", "UnsignedBigInt", "UnsignedInt", "UnsignedMediumInt"], ), ( "Binary(1)", - quaint::Value::Bytes(Some(Cow::Borrowed(b"0"))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(b"0"))), &["Time(0)", "SmallInt", "TinyInt", "UnsignedSmallInt", "UnsignedTinyInt"], ), ( "Binary(4)", - quaint::Value::Bytes(Some(Cow::Borrowed(b"2000"))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(b"2000"))), &["Year"], ), ( "Bit(32)", - quaint::Value::Bytes(Some(Cow::Borrowed(b""))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(b""))), &["Decimal(10,2)", "Double", "Float"], ), ( "Blob", - quaint::Value::Bytes(Some(Cow::Borrowed(b"abc"))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(b"abc"))), &[ "Binary(10)", "Char(10)", @@ -322,22 +322,22 @@ const RISKY_CASTS: Cases = &[ ), ( "Decimal(20,5)", - quaint::Value::Text(Some(Cow::Borrowed("350"))), + quaint::ValueType::Text(Some(Cow::Borrowed("350"))), &["BigInt", "UnsignedBigInt", "Time(0)", "Json"], ), ( "Double", - quaint::Value::Float(Some(0f32)), + quaint::ValueType::Float(Some(0f32)), &["Char(40)", "VarBinary(40)", "VarChar(40)"], ), ( "Float", - quaint::Value::Float(Some(0f32)), + quaint::ValueType::Float(Some(0f32)), &["Char(40)", "VarBinary(40)", "VarChar(40)"], ), ( "LongBlob", - quaint::Value::Bytes(Some(Cow::Borrowed(b"abc"))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(b"abc"))), &[ "Binary(10)", "Char(10)", @@ -351,7 +351,7 @@ const RISKY_CASTS: Cases = &[ ), ( "MediumBlob", - quaint::Value::Bytes(Some(Cow::Borrowed(b"abc"))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(b"abc"))), &[ "Binary(10)", "Char(10)", @@ -363,10 +363,10 @@ const RISKY_CASTS: Cases = &[ "VarChar(20)", ], ), - ("SmallInt", quaint::Value::Int32(Some(1990)), &["Year", "Double"]), + ("SmallInt", quaint::ValueType::Int32(Some(1990)), &["Year", "Double"]), ( "TinyBlob", - quaint::Value::Bytes(Some(Cow::Borrowed(b"abc"))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(b"abc"))), &[ "Binary(10)", "Char(10)", @@ -380,12 +380,12 @@ const RISKY_CASTS: Cases = &[ ), ( "Time(0)", - quaint::Value::Int32(Some(5002)), + quaint::ValueType::Int32(Some(5002)), &["Date", "DateTime(0)", "Timestamp(0)"], ), ( "Year", - quaint::Value::Text(Some(Cow::Borrowed("1999"))), + quaint::ValueType::Text(Some(Cow::Borrowed("1999"))), &["Decimal(10,0)", "Json"], ), ]; @@ -393,22 +393,22 @@ const RISKY_CASTS: Cases = &[ const IMPOSSIBLE_CASTS: Cases = &[ ( "BigInt", - quaint::Value::Int64(Some(500)), + quaint::ValueType::Int64(Some(500)), &["Decimal(15,6)", "Date", "DateTime(0)", "Json", "Timestamp(0)"], ), ( "Binary(12)", - quaint::Value::Bytes(Some(Cow::Borrowed(b"8080008"))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(b"8080008"))), &["Date", "DateTime(0)", "Json", "Timestamp(0)"], ), ( "Bit(32)", - quaint::Value::Bytes(Some(Cow::Borrowed(b""))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(b""))), &["Date", "DateTime(0)", "Time(0)", "Timestamp(0)", "Json"], ), ( "Blob", - quaint::Value::Bytes(Some(Cow::Borrowed(&[0x00]))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(&[0x00]))), &[ "TinyInt", "BigInt", @@ -433,7 +433,7 @@ const IMPOSSIBLE_CASTS: Cases = &[ ), ( "Date", - quaint::Value::Text(Some(Cow::Borrowed("2020-01-12"))), + quaint::ValueType::Text(Some(Cow::Borrowed("2020-01-12"))), &[ "TinyInt", "UnsignedTinyInt", @@ -446,7 +446,7 @@ const IMPOSSIBLE_CASTS: Cases = &[ ), ( "DateTime(0)", - quaint::Value::Text(Some(Cow::Borrowed("2020-01-08 08:00:00"))), + quaint::ValueType::Text(Some(Cow::Borrowed("2020-01-08 08:00:00"))), &[ "TinyInt", "UnsignedTinyInt", @@ -461,17 +461,17 @@ const IMPOSSIBLE_CASTS: Cases = &[ ), ( "Double", - quaint::Value::Float(Some(3.20)), + quaint::ValueType::Float(Some(3.20)), &["Binary(10)", "Date", "Timestamp(0)", "DateTime(0)"], ), ( "Float", - quaint::Value::Float(Some(3.20)), + quaint::ValueType::Float(Some(3.20)), &["Binary(10)", "Date", "Timestamp(0)", "DateTime(0)"], ), ( "Json", - quaint::Value::Text(Some(Cow::Borrowed("{\"a\":\"b\"}"))), + quaint::ValueType::Text(Some(Cow::Borrowed("{\"a\":\"b\"}"))), &[ // Integer types "Bit(64)", @@ -490,7 +490,7 @@ const IMPOSSIBLE_CASTS: Cases = &[ ), ( "LongBlob", - quaint::Value::Bytes(Some(Cow::Borrowed(&[0x00]))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(&[0x00]))), &[ "TinyInt", "BigInt", @@ -515,7 +515,7 @@ const IMPOSSIBLE_CASTS: Cases = &[ ), ( "MediumBlob", - quaint::Value::Bytes(Some(Cow::Borrowed(&[0x00]))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(&[0x00]))), &[ "TinyInt", "BigInt", @@ -538,10 +538,10 @@ const IMPOSSIBLE_CASTS: Cases = &[ "Year", ], ), - ("Time(0)", quaint::Value::Int32(Some(0)), &["Json", "Year"]), + ("Time(0)", quaint::ValueType::Int32(Some(0)), &["Json", "Year"]), ( "TinyBlob", - quaint::Value::Bytes(Some(Cow::Borrowed(&[0x00]))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(&[0x00]))), &[ "TinyInt", "BigInt", @@ -566,7 +566,7 @@ const IMPOSSIBLE_CASTS: Cases = &[ ), ( "Year", - quaint::Value::Int32(Some(2001)), + quaint::ValueType::Int32(Some(2001)), &[ "TinyInt", "UnsignedTinyInt", @@ -638,7 +638,7 @@ fn colnames_for_cases(cases: Cases) -> Vec { fn expand_cases<'a, 'b>( from_type: &str, - test_value: &'a quaint::Value, + test_value: &'a quaint::ValueType<'a>, (to_types, nullable): (&[&str], bool), dm1: &'b mut String, dm2: &'b mut String, diff --git a/schema-engine/sql-migration-tests/tests/native_types/postgres.rs b/schema-engine/sql-migration-tests/tests/native_types/postgres.rs index 1e114f147e5d..24fa4a559744 100644 --- a/schema-engine/sql-migration-tests/tests/native_types/postgres.rs +++ b/schema-engine/sql-migration-tests/tests/native_types/postgres.rs @@ -7,12 +7,12 @@ use std::{collections::HashMap, fmt::Write as _, str::FromStr}; static SAFE_CASTS: Lazy> = Lazy::new(|| { vec![ - ("Oid", Value::integer(u8::MAX), &["VarChar(100)", "Integer", "BigInt"]), + ("Oid", Value::int32(u8::MAX), &["VarChar(100)", "Integer", "BigInt"]), ("Money", Value::int64(u8::MAX), &["VarChar(100)"]), ("Inet", Value::text("10.1.2.3"), &["VarChar(100)"]), ( "SmallInt", - Value::integer(u8::MAX), + Value::int32(u8::MAX), &[ "SmallInt", "Integer", @@ -26,7 +26,7 @@ static SAFE_CASTS: Lazy> = Lazy::new(|| { ), ( "Integer", - Value::integer(i32::MAX), + Value::int32(i32::MAX), &[ "Integer", "BigInt", @@ -67,7 +67,7 @@ static SAFE_CASTS: Lazy> = Lazy::new(|| { ), ( "DoublePrecision", - Value::Double(Some(f64::MIN)), + Value::double(f64::MIN), &["DoublePrecision", "Text", "VarChar", "Char(1000)"], ), ("VarChar", Value::text("fiver"), &["Text"]), @@ -155,13 +155,9 @@ static SAFE_CASTS: Lazy> = Lazy::new(|| { static RISKY_CASTS: Lazy> = Lazy::new(|| { vec![ ("Money", Value::int64(u8::MAX), &["Decimal"]), - ( - "SmallInt", - Value::integer(2), - &["Decimal(2,1)", "VarChar(3)", "Char(1)"], - ), - ("Integer", Value::integer(1), &["Decimal(2,1)", "VarChar(4)", "Char(1)"]), - ("BigInt", Value::integer(2), &["Decimal(2,1)", "VarChar(17)", "Char(1)"]), + ("SmallInt", Value::int32(2), &["Decimal(2,1)", "VarChar(3)", "Char(1)"]), + ("Integer", Value::int32(1), &["Decimal(2,1)", "VarChar(4)", "Char(1)"]), + ("BigInt", Value::int32(2), &["Decimal(2,1)", "VarChar(17)", "Char(1)"]), ( "Decimal(10,2)", Value::numeric(BigDecimal::from_str("1").unwrap()), @@ -227,7 +223,7 @@ static NOT_CASTABLE: Lazy> = Lazy::new(|| { vec![ ( "SmallInt", - Value::integer(u8::MAX), + Value::int32(u8::MAX), &[ "ByteA", "Timestamp(3)", @@ -246,7 +242,7 @@ static NOT_CASTABLE: Lazy> = Lazy::new(|| { ), ( "Integer", - Value::integer(i32::MAX), + Value::int32(i32::MAX), &[ "ByteA", "Timestamp(3)", @@ -1076,7 +1072,7 @@ static SAFE_CASTS_NON_LIST_TO_STRING: CastList = Lazy::new(|| { Value::array(vec![Value::numeric(BigDecimal::from_str("128.90").unwrap())]), ), ("Real", Value::array(vec![Value::float(f32::MIN)])), - ("DoublePrecision", Value::array(vec![Value::Double(Some(f64::MIN))])), + ("DoublePrecision", Value::array(vec![Value::double(f64::MIN)])), ("VarChar", Value::array(vec!["test"])), ("Char(1)", Value::array(vec!["a"])), ("Text", Value::array(vec!["text"])), @@ -1115,7 +1111,7 @@ static SAFE_CASTS_NON_LIST_TO_STRING: CastList = Lazy::new(|| { Value::array(vec![Value::numeric(BigDecimal::from_str("128.90").unwrap())]), ), ("Real", Value::array(vec![Value::float(f32::MIN)])), - ("DoublePrecision", Value::array(vec![Value::Double(Some(f64::MIN))])), + ("DoublePrecision", Value::array(vec![Value::double(f64::MIN)])), ("VarChar", Value::array(vec!["test"])), ("Char(1)", Value::array(vec!["a"])), ("Text", Value::array(vec!["text"])), diff --git a/schema-engine/sql-schema-describer/src/postgres.rs b/schema-engine/sql-schema-describer/src/postgres.rs index 10a6bd76cb53..211cf8da489a 100644 --- a/schema-engine/sql-schema-describer/src/postgres.rs +++ b/schema-engine/sql-schema-describer/src/postgres.rs @@ -16,7 +16,7 @@ use psl::{ builtin_connectors::{CockroachType, PostgresType}, datamodel_connector::NativeTypeInstance, }; -use quaint::{connector::ResultRow, prelude::Queryable, Value::Array}; +use quaint::{connector::ResultRow, prelude::Queryable, Value}; use regex::Regex; use std::{ any::type_name, @@ -663,13 +663,7 @@ impl<'a> SqlSchemaDescriber<'a> { WHERE n.nspname = ANY ( $1 ) "#; - let rows = self - .conn - .query_raw( - sql, - &[Array(Some(namespaces.iter().map(|v| v.as_str().into()).collect()))], - ) - .await?; + let rows = self.conn.query_raw(sql, &[Value::array(namespaces)]).await?; let mut procedures = Vec::with_capacity(rows.len()); @@ -691,10 +685,7 @@ impl<'a> SqlSchemaDescriber<'a> { async fn get_namespaces(&self, sql_schema: &mut SqlSchema, namespaces: &[&str]) -> DescriberResult<()> { let sql = include_str!("postgres/namespaces_query.sql"); - let rows = self - .conn - .query_raw(sql, &[Array(Some(namespaces.iter().map(|s| (*s).into()).collect()))]) - .await?; + let rows = self.conn.query_raw(sql, &[Value::array(namespaces)]).await?; let names = rows.into_iter().map(|row| (row.get_expect_string("namespace_name"))); @@ -718,13 +709,7 @@ impl<'a> SqlSchemaDescriber<'a> { let namespaces = &sql_schema.namespaces; - let rows = self - .conn - .query_raw( - sql, - &[Array(Some(namespaces.iter().map(|v| v.as_str().into()).collect()))], - ) - .await?; + let rows = self.conn.query_raw(sql, &[Value::array(namespaces)]).await?; let mut names = Vec::with_capacity(rows.len()); @@ -826,13 +811,7 @@ impl<'a> SqlSchemaDescriber<'a> { WHERE schemaname = ANY ( $1 ) "#}; - let result_set = self - .conn - .query_raw( - sql, - &[Array(Some(namespaces.iter().map(|v| v.as_str().into()).collect()))], - ) - .await?; + let result_set = self.conn.query_raw(sql, &[Value::array(namespaces)]).await?; for row in result_set.into_iter() { let name = row.get_expect_string("view_name"); @@ -886,6 +865,7 @@ impl<'a> SqlSchemaDescriber<'a> { FROM pg_class JOIN pg_namespace on pg_namespace.oid = pg_class.relnamespace AND pg_namespace.nspname = ANY ( $1 ) + WHERE reltype > 0 ) as oid on oid.oid = att.attrelid AND relname = info.table_name AND namespace = info.table_schema @@ -895,13 +875,7 @@ impl<'a> SqlSchemaDescriber<'a> { "# ); - let rows = self - .conn - .query_raw( - sql.as_str(), - &[Array(Some(namespaces.iter().map(|v| v.as_str().into()).collect()))], - ) - .await?; + let rows = self.conn.query_raw(sql.as_str(), &[Value::array(namespaces)]).await?; for col in rows { let namespace = col.get_expect_string("namespace"); @@ -1140,13 +1114,7 @@ impl<'a> SqlSchemaDescriber<'a> { // One foreign key with multiple columns will be represented here as several // rows with the same ID. - let result_set = self - .conn - .query_raw( - sql, - &[Array(Some(namespaces.iter().map(|v| v.as_str().into()).collect()))], - ) - .await?; + let result_set = self.conn.query_raw(sql, &[Value::array(namespaces)]).await?; for row in result_set.into_iter() { trace!("Got description FK row {:?}", row); @@ -1253,13 +1221,7 @@ impl<'a> SqlSchemaDescriber<'a> { let namespaces = &sql_schema.namespaces; let sql = include_str!("postgres/constraints_query.sql"); - let rows = self - .conn - .query_raw( - sql, - &[Array(Some(namespaces.iter().map(|v| v.as_str().into()).collect()))], - ) - .await?; + let rows = self.conn.query_raw(sql, &[Value::array(namespaces)]).await?; for row in rows { let namespace = row.get_expect_string("namespace"); @@ -1297,13 +1259,7 @@ impl<'a> SqlSchemaDescriber<'a> { ) -> DescriberResult<()> { let namespaces = &sql_schema.namespaces; let sql = include_str!("postgres/indexes_query.sql"); - let rows = self - .conn - .query_raw( - sql, - &[Array(Some(namespaces.iter().map(|v| v.as_str().into()).collect()))], - ) - .await?; + let rows = self.conn.query_raw(sql, &[Value::array(namespaces)]).await?; let mut result_rows = Vec::new(); let mut index_rows = rows.into_iter().peekable(); @@ -1373,13 +1329,7 @@ impl<'a> SqlSchemaDescriber<'a> { "# }; - let rows = self - .conn - .query_raw( - sql, - &[Array(Some(namespaces.iter().map(|v| v.as_str().into()).collect()))], - ) - .await?; + let rows = self.conn.query_raw(sql, &[Value::array(namespaces)]).await?; let sequences = rows.into_iter().map(|seq| Sequence { namespace_id: sql_schema .get_namespace_id(&seq.get_expect_string("namespace")) @@ -1413,13 +1363,7 @@ impl<'a> SqlSchemaDescriber<'a> { WHERE n.nspname = ANY ( $1 ) ORDER BY e.enumsortorder"; - let rows = self - .conn - .query_raw( - sql, - &[Array(Some(namespaces.iter().map(|v| v.as_str().into()).collect()))], - ) - .await?; + let rows = self.conn.query_raw(sql, &[Value::array(namespaces)]).await?; let mut enum_values: BTreeMap<(NamespaceId, String, Option), Vec> = BTreeMap::new(); for row in rows.into_iter() { @@ -1429,9 +1373,7 @@ impl<'a> SqlSchemaDescriber<'a> { let description = row.get_string("description"); let namespace_id = sql_schema.get_namespace_id(&namespace).unwrap(); - let values = enum_values - .entry((namespace_id, name, description)) - .or_insert_with(Vec::new); + let values = enum_values.entry((namespace_id, name, description)).or_default(); values.push(value); } diff --git a/schema-engine/sql-schema-describer/src/sqlite.rs b/schema-engine/sql-schema-describer/src/sqlite.rs index 3073be2b4daa..1f28958605a2 100644 --- a/schema-engine/sql-schema-describer/src/sqlite.rs +++ b/schema-engine/sql-schema-describer/src/sqlite.rs @@ -8,7 +8,7 @@ use crate::{ use either::Either; use indexmap::IndexMap; use quaint::{ - ast::Value, + ast::{Value, ValueType}, connector::{GetRow, ToColumnNames}, prelude::ResultRow, }; @@ -345,7 +345,10 @@ async fn push_columns( let default = match row.get("dflt_value") { None => None, Some(val) if val.is_null() => None, - Some(Value::Text(Some(cow_string))) => { + Some(Value { + typed: ValueType::Text(Some(cow_string)), + .. + }) => { let default_string = cow_string.to_string(); if default_string.to_lowercase() == "null" {