diff --git a/.github/workflows/build-prisma-schema-wasm.yml b/.github/workflows/build-prisma-schema-wasm.yml index f52db55654e7..dad4877d137d 100644 --- a/.github/workflows/build-prisma-schema-wasm.yml +++ b/.github/workflows/build-prisma-schema-wasm.yml @@ -18,7 +18,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - uses: cachix/install-nix-action@v24 - - run: nix build .#prisma-schema-wasm - - run: nix flake check + - uses: ./.github/workflows/include/rust-wasm-setup + + - run: make check-schema-wasm-package PROFILE=release diff --git a/.github/workflows/include/rust-wasm-setup/action.yml b/.github/workflows/include/rust-wasm-setup/action.yml new file mode 100644 index 000000000000..5a22bc1bf3cd --- /dev/null +++ b/.github/workflows/include/rust-wasm-setup/action.yml @@ -0,0 +1,25 @@ +name: Rust + WASM common deps + +runs: + using: "composite" + steps: + - name: Set default toolchain + shell: bash + run: rustup default stable + + - name: Add WASM target + shell: bash + run: rustup target add wasm32-unknown-unknown + + - uses: cargo-bins/cargo-binstall@main + + - name: Install wasm-bindgen, wasm-opt + shell: bash + run: | + cargo binstall -y \ + wasm-bindgen-cli@0.2.89 \ + wasm-opt@0.116.0 + + - name: Install bc + shell: bash + run: sudo apt update && sudo apt-get install -y bc diff --git a/.github/workflows/on-push-to-main.yml b/.github/workflows/on-push-to-main.yml index 909862708448..d1ce5822b553 100644 --- a/.github/workflows/on-push-to-main.yml +++ b/.github/workflows/on-push-to-main.yml @@ -27,8 +27,5 @@ jobs: git config user.email "prismabots@gmail.com" git config user.name "prisma-bot" - - name: Generate cargo docs for the workspace to gh-pages branch - run: nix run .#publish-cargo-docs - - name: Publish engines size to gh-pages branch run: nix run .#publish-engine-size diff --git a/.github/workflows/publish-prisma-schema-wasm.yml b/.github/workflows/publish-prisma-schema-wasm.yml index c8923944cb4a..9560ebeef3ba 100644 --- a/.github/workflows/publish-prisma-schema-wasm.yml +++ b/.github/workflows/publish-prisma-schema-wasm.yml @@ -32,10 +32,11 @@ jobs: - uses: actions/checkout@v4 with: ref: ${{ github.event.inputs.enginesHash }} - - uses: cachix/install-nix-action@v24 + + - uses: ./.github/workflows/include/rust-wasm-setup - name: Build - run: nix build .#prisma-schema-wasm + run: make build-schema-wasm PROFILE=release SCHEMA_WASM_VERSION=${{ github.event.inputs.enginesWrapperVersion }} - uses: actions/setup-node@v4 with: @@ -45,11 +46,9 @@ jobs: - name: Set up NPM token for publishing later run: echo "//registry.npmjs.org/:_authToken=${{ secrets.NPM_TOKEN }}" > ~/.npmrc - - name: Update version in package.json & Publish @prisma/prisma-schema-wasm - run: | - # Update version in package.json and return directory for later usage - PACKAGE_DIR=$( nix run .#renderPrismaSchemaWasmPackage ${{ github.event.inputs.enginesWrapperVersion }}) - npm publish "$PACKAGE_DIR" --access public --tag ${{ github.event.inputs.npmDistTag }} + - name: Publish @prisma/prisma-schema-wasm + run: npm publish --access public --tag ${{ github.event.inputs.npmDistTag }} + working-directory: target/prisma-schema-wasm # # Failure handlers # diff --git a/.github/workflows/publish-query-engine-wasm.yml b/.github/workflows/publish-query-engine-wasm.yml index 608876d56fd0..41d5d8611b15 100644 --- a/.github/workflows/publish-query-engine-wasm.yml +++ b/.github/workflows/publish-query-engine-wasm.yml @@ -9,14 +9,14 @@ on: inputs: packageVersion: required: true - description: 'New @prisma/query-engine-wasm package version' + description: "New @prisma/query-engine-wasm package version" enginesHash: required: true - description: 'prisma-engines commit to build' + description: "prisma-engines commit to build" npmDistTag: required: true - default: 'latest' - description: 'npm dist-tag (e.g. latest or integration)' + default: "latest" + description: "npm dist-tag (e.g. latest or integration)" jobs: build: @@ -30,22 +30,24 @@ jobs: with: ref: ${{ github.event.inputs.enginesHash }} - - uses: cachix/install-nix-action@v24 + - uses: ./.github/workflows/include/rust-wasm-setup - name: Build @prisma/query-engine-wasm - run: nix run .#export-query-engine-wasm "${{ github.event.inputs.packageVersion }}" package + run: make build-qe-wasm + env: + QE_WASM_VERSION: ${{ github.event.inputs.packageVersion }} - name: Install Node.js uses: actions/setup-node@v4 with: - node-version: '20.x' + node-version: "20.x" - name: Set up NPM token for publishing run: echo "//registry.npmjs.org/:_authToken=${{ secrets.NPM_TOKEN }}" > ~/.npmrc - name: Publish @prisma/query-engine-wasm run: npm publish --access public --tag ${{ github.event.inputs.npmDistTag }} - working-directory: package + working-directory: query-engine/query-engine-wasm/pkg # # Failure handlers @@ -57,7 +59,7 @@ jobs: if: ${{ failure() }} uses: rtCamp/action-slack-notify@v2.2.1 env: - SLACK_TITLE: 'Building and publishing @prisma/query-engine-wasm failed :x:' - SLACK_COLOR: '#FF0000' + SLACK_TITLE: "Building and publishing @prisma/query-engine-wasm failed :x:" + SLACK_COLOR: "#FF0000" SLACK_CHANNEL: feed-prisma-query-engine-wasm-publish-failures SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WASM_FAILING }} diff --git a/.github/workflows/test-query-engine-driver-adapters.yml b/.github/workflows/test-query-engine-driver-adapters.yml index 44761da11f77..6362525c053b 100644 --- a/.github/workflows/test-query-engine-driver-adapters.yml +++ b/.github/workflows/test-query-engine-driver-adapters.yml @@ -97,7 +97,7 @@ jobs: echo "DRIVER_ADAPTERS_BRANCH=$branch" >> "$GITHUB_ENV" fi - - uses: cachix/install-nix-action@v24 + - uses: ./.github/workflows/include/rust-wasm-setup - uses: taiki-e/install-action@nextest - run: make ${{ matrix.adapter.setup_task }} diff --git a/.github/workflows/wasm-benchmarks.yml b/.github/workflows/wasm-benchmarks.yml index 94241517f30f..d0630ff90108 100644 --- a/.github/workflows/wasm-benchmarks.yml +++ b/.github/workflows/wasm-benchmarks.yml @@ -21,6 +21,10 @@ jobs: steps: - name: Checkout PR branch uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} + + - uses: ./.github/workflows/include/rust-wasm-setup - name: "Setup Node.js" uses: actions/setup-node@v4 @@ -30,9 +34,6 @@ jobs: with: version: 8 - - name: Install bc - run: sudo apt update && sudo apt-get install -y bc - - name: "Login to Docker Hub" uses: docker/login-action@v3 continue-on-error: true @@ -43,8 +44,14 @@ jobs: with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - - - uses: cachix/install-nix-action@v24 + + - name: Extract Branch Name + run: | + branch="$(git show -s --format=%s | grep -o "DRIVER_ADAPTERS_BRANCH=[^ ]*" | cut -f2 -d=)" + if [ -n "$branch" ]; then + echo "Using $branch branch of driver adapters" + echo "DRIVER_ADAPTERS_BRANCH=$branch" >> "$GITHUB_ENV" + fi - name: Setup benchmark run: make setup-pg-bench @@ -122,6 +129,9 @@ jobs: - name: Create or update report uses: peter-evans/create-or-update-comment@v3 + # Only run on our repository + # It avoids an expected failure on forks + if: github.repository == 'prisma/prisma-engines' with: comment-id: ${{ steps.findReportComment.outputs.comment-id }} issue-number: ${{ github.event.pull_request.number }} diff --git a/.github/workflows/wasm-size.yml b/.github/workflows/wasm-size.yml index 9ea9d7479adc..bfabe08e84ef 100644 --- a/.github/workflows/wasm-size.yml +++ b/.github/workflows/wasm-size.yml @@ -29,22 +29,13 @@ jobs: - name: Checkout PR branch uses: actions/checkout@v4 - - uses: cachix/install-nix-action@v24 - with: - # we need internet access for the moment - extra_nix_config: | - sandbox = false + - uses: ./.github/workflows/include/rust-wasm-setup - name: Build and measure PR branch id: measure run: | - nix build -L .#query-engine-wasm-gz - - for provider in "postgresql" "mysql" "sqlite"; do - echo "${provider}_size=$(wc --bytes < ./result/query-engine-$provider.wasm)" >> $GITHUB_OUTPUT - echo "${provider}_size_gz=$(wc --bytes < ./result/query-engine-$provider.wasm.gz)" >> $GITHUB_OUTPUT - done - + export ENGINE_SIZE_OUTPUT=$GITHUB_OUTPUT + make measure-qe-wasm base-wasm-size: name: calculate module sizes (base branch) @@ -59,23 +50,16 @@ jobs: steps: - name: Checkout base branch uses: actions/checkout@v4 - with: - ref: ${{ github.event.pull_request.base.sha }} + # with: + # ref: ${{ github.event.pull_request.base.sha }} - - uses: cachix/install-nix-action@v24 - with: - extra_nix_config: | - sandbox = false + - uses: ./.github/workflows/include/rust-wasm-setup - name: Build and measure base branch id: measure run: | - nix build -L .#query-engine-wasm-gz - - for provider in "postgresql" "mysql" "sqlite"; do - echo "${provider}_size=$(wc --bytes < ./result/query-engine-$provider.wasm)" >> $GITHUB_OUTPUT - echo "${provider}_size_gz=$(wc --bytes < ./result/query-engine-$provider.wasm.gz)" >> $GITHUB_OUTPUT - done + export ENGINE_SIZE_OUTPUT=$GITHUB_OUTPUT + make measure-qe-wasm report-diff: name: report module size @@ -122,6 +106,9 @@ jobs: - name: Create or update report uses: peter-evans/create-or-update-comment@v3 + # Only run on our repository + # It avoids an expected failure on forks + if: github.repository == 'prisma/prisma-engines' with: comment-id: ${{ steps.findReportComment.outputs.comment-id }} issue-number: ${{ github.event.pull_request.number }} diff --git a/Makefile b/Makefile index ace0423a7691..e55627f7a436 100644 --- a/Makefile +++ b/Makefile @@ -1,12 +1,13 @@ +REPO_ROOT := $(shell git rev-parse --show-toplevel) + CONFIG_PATH = ./query-engine/connector-test-kit-rs/test-configs CONFIG_FILE = .test_config SCHEMA_EXAMPLES_PATH = ./query-engine/example_schemas DEV_SCHEMA_FILE = dev_datamodel.prisma DRIVER_ADAPTERS_BRANCH ?= main - -ifndef DISABLE_NIX -NIX := $(shell type nix 2> /dev/null) -endif +ENGINE_SIZE_OUTPUT ?= /dev/stdout +QE_WASM_VERSION ?= 0.0.0 +SCHEMA_WASM_VERSION ?= 0.0.0 LIBRARY_EXT := $(shell \ case "$$(uname -s)" in \ @@ -19,6 +20,20 @@ PROFILE ?= dev default: build +############### +# clean tasks # +############### + +clean-qe-wasm: + @echo "Cleaning query-engine/query-engine-wasm/pkg" && \ + cd query-engine/query-engine-wasm/pkg && find . ! -name '.' ! -name '..' ! -name 'README.md' -exec rm -rf {} + + +clean-cargo: + @echo "Cleaning cargo" && \ + cargo clean + +clean: clean-qe-wasm clean-cargo + ################### # script wrappers # ################### @@ -39,6 +54,29 @@ build: build-qe: cargo build --package query-engine +build-qe-napi: + cargo build --package query-engine-node-api --profile $(PROFILE) + +build-qe-wasm: + cd query-engine/query-engine-wasm && \ + ./build.sh $(QE_WASM_VERSION) query-engine/query-engine-wasm/pkg + +build-qe-wasm-gz: build-qe-wasm + @cd query-engine/query-engine-wasm/pkg && \ + for provider in postgresql mysql sqlite; do \ + tar -zcvf $$provider.gz $$provider; \ + done; + +build-schema-wasm: + @printf '%s\n' "🛠️ Building the Rust crate" + cargo build --profile $(PROFILE) --target=wasm32-unknown-unknown -p prisma-schema-build + + @printf '\n%s\n' "📦 Creating the npm package" + WASM_BUILD_PROFILE=$(PROFILE) \ + NPM_PACKAGE_VERSION=$(SCHEMA_WASM_VERSION) \ + out="$(REPO_ROOT)/target/prisma-schema-wasm" \ + ./prisma-schema-wasm/scripts/install.sh + # Emulate pedantic CI compilation. pedantic: RUSTFLAGS="-D warnings" cargo fmt -- --check && RUSTFLAGS="-D warnings" cargo clippy --all-targets @@ -77,6 +115,11 @@ test-qe-verbose-st: test-qe-black-box: build-qe cargo test --package black-box-tests -- --test-threads 1 +check-schema-wasm-package: build-schema-wasm + PRISMA_SCHEMA_WASM="$(REPO_ROOT)/target/prisma-schema-wasm" \ + out=$(shell mktemp -d) \ + NODE=$(shell which node) \ + ./prisma-schema-wasm/scripts/check.sh ########################### # Database setup commands # @@ -326,21 +369,12 @@ test-driver-adapter-planetscale-wasm: test-planetscale-wasm # Local dev commands # ###################### -build-qe-napi: - cargo build --package query-engine-node-api --profile $(PROFILE) - -build-qe-wasm: -ifdef NIX - @echo "Building wasm engine on nix" - rm -rf query-engine/query-engine-wasm/pkg - nix run .#export-query-engine-wasm 0.0.0 query-engine/query-engine-wasm/pkg -else - cd query-engine/query-engine-wasm && ./build.sh 0.0.0 query-engine/query-engine-wasm/pkg -endif - -measure-qe-wasm: build-qe-wasm +measure-qe-wasm: build-qe-wasm-gz @cd query-engine/query-engine-wasm/pkg; \ - gzip -k -c query_engine_bg.wasm | wc -c | awk '{$$1/=(1024*1024); printf "Current wasm query-engine size compressed: %.3fMB\n", $$1}' + for provider in postgresql mysql sqlite; do \ + echo "$${provider}_size=$$(cat $$provider/* | wc -c | tr -d ' ')" >> $(ENGINE_SIZE_OUTPUT); \ + echo "$${provider}_size_gz=$$(cat $$provider.gz | wc -c | tr -d ' ')" >> $(ENGINE_SIZE_OUTPUT); \ + done; build-driver-adapters-kit: build-driver-adapters cd query-engine/driver-adapters && pnpm i && pnpm build diff --git a/flake.lock b/flake.lock index d0bc10636991..7a2ebc464417 100644 --- a/flake.lock +++ b/flake.lock @@ -7,11 +7,11 @@ ] }, "locked": { - "lastModified": 1703439018, - "narHash": "sha256-VT+06ft/x3eMZ1MJxWzQP3zXFGcrxGo5VR2rB7t88hs=", + "lastModified": 1707685877, + "narHash": "sha256-XoXRS+5whotelr1rHiZle5t5hDg9kpguS5yk8c8qzOc=", "owner": "ipetkov", "repo": "crane", - "rev": "afdcd41180e3dfe4dac46b5ee396e3b12ccc967a", + "rev": "2c653e4478476a52c6aa3ac0495e4dea7449ea0e", "type": "github" }, "original": { @@ -27,11 +27,11 @@ ] }, "locked": { - "lastModified": 1704152458, - "narHash": "sha256-DS+dGw7SKygIWf9w4eNBUZsK+4Ug27NwEWmn2tnbycg=", + "lastModified": 1706830856, + "narHash": "sha256-a0NYyp+h9hlb7ddVz4LUn1vT/PLwqfrWYcHMvFB1xYg=", "owner": "hercules-ci", "repo": "flake-parts", - "rev": "88a2cd8166694ba0b6cb374700799cec53aef527", + "rev": "b253292d9c0a5ead9bc98c4e9a26c6312e27d69f", "type": "github" }, "original": { @@ -47,11 +47,11 @@ ] }, "locked": { - "lastModified": 1701680307, - "narHash": "sha256-kAuep2h5ajznlPMD9rnQyffWG8EM/C73lejGofXvdM8=", + "lastModified": 1705309234, + "narHash": "sha256-uNRRNRKmJyCRC/8y1RqBkqWBLM034y4qN7EprSdmgyA=", "owner": "numtide", "repo": "flake-utils", - "rev": "4022d587cbbfd70fe950c1e2083a02621806a725", + "rev": "1ef2e671c3b0c19053962c07dbda38332dcebf26", "type": "github" }, "original": { @@ -82,11 +82,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1703961334, - "narHash": "sha256-M1mV/Cq+pgjk0rt6VxoyyD+O8cOUiai8t9Q6Yyq4noY=", + "lastModified": 1707689078, + "narHash": "sha256-UUGmRa84ZJHpGZ1WZEBEUOzaPOWG8LZ0yPg1pdDF/yM=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "b0d36bd0a420ecee3bc916c91886caca87c894e9", + "rev": "f9d39fb9aff0efee4a3d5f4a6d7c17701d38a1d8", "type": "github" }, "original": { @@ -116,11 +116,11 @@ ] }, "locked": { - "lastModified": 1704075545, - "narHash": "sha256-L3zgOuVKhPjKsVLc3yTm2YJ6+BATyZBury7wnhyc8QU=", + "lastModified": 1707790272, + "narHash": "sha256-KQXPNl3BLdRbz7xx+mwIq/017fxLRk6JhXHxVWCKsTU=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "a0df72e106322b67e9c6e591fe870380bd0da0d5", + "rev": "8dfbe2dffc28c1a18a29ffa34d5d0b269622b158", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index e62a09803d3d..a76ab2edbea8 100644 --- a/flake.nix +++ b/flake.nix @@ -31,16 +31,9 @@ perSystem = { config, system, pkgs, craneLib, ... }: { config._module.args.flakeInputs = inputs; imports = [ - ./nix/all-engines.nix ./nix/args.nix - ./nix/cargo-doc.nix - ./nix/cli-shell.nix - ./nix/cli-prisma.nix - ./nix/dev-vm.nix - ./nix/memory-profiling.nix - ./nix/prisma-schema-wasm.nix - ./nix/publish-engine-size.nix ./nix/shell.nix + ./nix/publish-engine-size.nix ]; }; }; diff --git a/libs/user-facing-errors/src/query_engine/mod.rs b/libs/user-facing-errors/src/query_engine/mod.rs index 48833cb490a0..e42fbcb03f56 100644 --- a/libs/user-facing-errors/src/query_engine/mod.rs +++ b/libs/user-facing-errors/src/query_engine/mod.rs @@ -335,3 +335,9 @@ pub struct ExternalError { /// id of the error in external system, which would allow to retrieve it later pub id: i32, } + +#[derive(Debug, UserFacingError, Serialize)] +#[user_facing(code = "P2037", message = "Too many database connections opened: {message}")] +pub struct TooManyConnections { + pub message: String, +} diff --git a/nix/README.md b/nix/README.md new file mode 100644 index 000000000000..8f95176511b7 --- /dev/null +++ b/nix/README.md @@ -0,0 +1,5 @@ +This directory contains a nix shell that is convenient to streamline developement, however, +contributors must not require to depend on nix for any specific workflow. + +Instead, automation should be provided in a combination of bash scripts and docker, exposed over +tasks in the [root's Makefile](/Makefile) diff --git a/nix/all-engines.nix b/nix/all-engines.nix deleted file mode 100644 index b509d67229ad..000000000000 --- a/nix/all-engines.nix +++ /dev/null @@ -1,170 +0,0 @@ -{ pkgs, flakeInputs, lib, self', rustToolchain, ... }: - -let - stdenv = pkgs.clangStdenv; - srcPath = ../.; - srcFilter = flakeInputs.gitignore.lib.gitignoreFilterWith { - basePath = srcPath; - extraRules = '' - /nix - /flake.* - ''; - }; - src = lib.cleanSourceWith { - filter = srcFilter; - src = srcPath; - name = "prisma-engines-source"; - }; - craneLib = (flakeInputs.crane.mkLib pkgs).overrideToolchain rustToolchain; - deps = craneLib.vendorCargoDeps { inherit src; }; - libSuffix = stdenv.hostPlatform.extensions.sharedLibrary; -in -{ - packages.prisma-engines = stdenv.mkDerivation { - name = "prisma-engines"; - inherit src; - - buildInputs = [ pkgs.openssl.out ]; - nativeBuildInputs = with pkgs; [ - rustToolchain - git # for our build scripts that bake in the git hash - protobuf # for tonic - openssl.dev - pkg-config - ] ++ lib.optionals stdenv.isDarwin [ - perl # required to build openssl - darwin.apple_sdk.frameworks.Security - iconv - ]; - - configurePhase = '' - mkdir .cargo - ln -s ${deps}/config.toml .cargo/config.toml - ''; - - buildPhase = '' - cargo build --release --bins - cargo build --release -p query-engine-node-api - ''; - - installPhase = '' - mkdir -p $out/bin $out/lib - cp target/release/query-engine $out/bin/ - cp target/release/schema-engine $out/bin/ - cp target/release/prisma-fmt $out/bin/ - cp target/release/libquery_engine${libSuffix} $out/lib/libquery_engine.node - ''; - - dontStrip = true; - }; - - packages.test-cli = lib.makeOverridable - ({ profile }: stdenv.mkDerivation { - name = "test-cli"; - inherit src; - inherit (self'.packages.prisma-engines) buildInputs nativeBuildInputs configurePhase dontStrip; - - buildPhase = "cargo build --profile=${profile} --bin=test-cli"; - - installPhase = '' - set -eu - mkdir -p $out/bin - QE_PATH=$(find target -name 'test-cli') - cp $QE_PATH $out/bin - ''; - }) - { profile = "release"; }; - - packages.query-engine-bin = lib.makeOverridable - ({ profile }: stdenv.mkDerivation { - name = "query-engine-bin"; - inherit src; - inherit (self'.packages.prisma-engines) buildInputs nativeBuildInputs configurePhase dontStrip; - - buildPhase = "cargo build --profile=${profile} --bin=query-engine"; - - installPhase = '' - set -eu - mkdir -p $out/bin - QE_PATH=$(find target -name 'query-engine') - cp $QE_PATH $out/bin - ''; - }) - { profile = "release"; }; - - # TODO: try to make caching and sharing the build artifacts work with crane. There should be - # separate `query-engine-lib` and `query-engine-bin` derivations instead, but we use this for now - # to make the CI job that uses it faster. - packages.query-engine-bin-and-lib = lib.makeOverridable - ({ profile }: stdenv.mkDerivation { - name = "query-engine-bin-and-lib"; - inherit src; - inherit (self'.packages.prisma-engines) buildInputs nativeBuildInputs configurePhase dontStrip; - - buildPhase = '' - cargo build --profile=${profile} --bin=query-engine - cargo build --profile=${profile} -p query-engine-node-api - ''; - - installPhase = '' - set -eu - mkdir -p $out/bin $out/lib - cp target/${profile}/query-engine $out/bin/query-engine - cp target/${profile}/libquery_engine${libSuffix} $out/lib/libquery_engine.node - ''; - }) - { profile = "release"; }; - - packages.build-engine-wasm = pkgs.writeShellApplication { - name = "build-engine-wasm"; - runtimeInputs = with pkgs; [ git rustup wasm-bindgen-cli binaryen jq iconv ]; - text = '' - cd query-engine/query-engine-wasm - WASM_BUILD_PROFILE=release ./build.sh "$1" "$2" - ''; - }; - - packages.query-engine-wasm-gz = lib.makeOverridable - ({ profile }: stdenv.mkDerivation { - name = "query-engine-wasm-gz"; - inherit src; - buildInputs = with pkgs; [ iconv ]; - - buildPhase = '' - export HOME=$(mktemp -dt wasm-engine-home-XXXX) - - OUT_FOLDER=$(mktemp -dt wasm-engine-out-XXXX) - ${self'.packages.build-engine-wasm}/bin/build-engine-wasm "0.0.0" "$OUT_FOLDER" - - for provider in "postgresql" "mysql" "sqlite"; do - gzip -ckn "$OUT_FOLDER/$provider/query_engine_bg.wasm" > "query-engine-$provider.wasm.gz" - done - ''; - - installPhase = '' - set +x - mkdir -p $out - for provider in "postgresql" "mysql" "sqlite"; do - cp "$OUT_FOLDER/$provider/query_engine_bg.wasm" "$out/query-engine-$provider.wasm" - cp "query-engine-$provider.wasm.gz" "$out/" - done - ''; - }) - { profile = "release"; }; - - packages.export-query-engine-wasm = - pkgs.writeShellApplication { - name = "export-query-engine-wasm"; - runtimeInputs = with pkgs; [ jq ]; - text = '' - OUT_VERSION="$1" - OUT_FOLDER="$2" - - mkdir -p "$OUT_FOLDER" - ${self'.packages.build-engine-wasm}/bin/build-engine-wasm "$OUT_VERSION" "$OUT_FOLDER" - chmod -R +rw "$OUT_FOLDER" - mv "$OUT_FOLDER/package.json" "$OUT_FOLDER/package.json.bak" - jq --arg new_version "$OUT_VERSION" '.version = $new_version' "$OUT_FOLDER/package.json.bak" > "$OUT_FOLDER/package.json" - ''; - }; -} diff --git a/nix/cargo-doc.nix b/nix/cargo-doc.nix deleted file mode 100644 index 5e37148ef030..000000000000 --- a/nix/cargo-doc.nix +++ /dev/null @@ -1,46 +0,0 @@ -{ pkgs, self', ... }: - -{ - packages.cargo-docs = pkgs.clangStdenv.mkDerivation { - name = "prisma-engines-cargo-docs"; - inherit (self'.packages.prisma-engines) buildInputs nativeBuildInputs src configurePhase; - - buildPhase = "cargo doc --workspace"; - - installPhase = '' - mkdir -p $out/share - mv target/doc/ $out/share/docs - ''; - }; - - packages.publish-cargo-docs = pkgs.writeShellApplication { - name = "publish-cargo-docs"; - text = '' - set -euxo pipefail - - if ! git diff --exit-code 1> /dev/null; then - : "The workspace is not clean. Please commit or reset, then try again". - exit 1 - fi - - CURRENT_BRANCH=$(git rev-parse --abbrev-ref HEAD) - CURRENT_COMMIT=$(git rev-parse --short HEAD) - REPO_ROOT=$(git rev-parse --show-toplevel) - - pushd "$REPO_ROOT" - git fetch --depth=1 origin gh-pages - git checkout origin/gh-pages - rm -rf ./doc - cp \ - --recursive \ - --no-preserve=mode,ownership \ - ${self'.packages.cargo-docs}/share/docs \ - ./doc - git add doc - git commit --quiet -m "cargo docs for $CURRENT_COMMIT" - git push origin '+HEAD:gh-pages' - git checkout "$CURRENT_BRANCH" - popd - ''; - }; -} diff --git a/nix/cli-prisma.nix b/nix/cli-prisma.nix deleted file mode 100644 index fcbc6756ee95..000000000000 --- a/nix/cli-prisma.nix +++ /dev/null @@ -1,44 +0,0 @@ -{ config, pkgs, self', ... }: - -# This is an impure build for prisma/prisma. We need this because of the way we -# package `prisma-schema-wasm` and the fact that there's no `pnpm2nix`. -# See https://zimbatm.com/notes/nix-packaging-the-heretic-way for more details -# on impure builds. -let - schema-wasm = self'.packages.prisma-schema-wasm; - version = "4.11.0"; -in -{ - packages.cli-prisma = pkgs.runCommand "prisma-cli-${version}" - { - # Disable the Nix build sandbox for this specific build. - # This means the build can freely talk to the Internet. - __noChroot = true; - - buildInputs = [ - pkgs.nodejs - ]; - } - '' - # NIX sets this to something that doesn't exist for purity reasons. - export HOME=$(mktemp -d) - - # Install prisma locally, and impurely. - npm install prisma@${version} - - # Fix shebang scripts recursively. - patchShebangs . - - # Remove prisma-fmt and copy it over from our local build. - rm node_modules/prisma/build/prisma_schema_build_bg.wasm - cp ${schema-wasm}/src/prisma_schema_build_bg.wasm node_modules/prisma/build/prisma_schema_build_bg.wasm - - # Copy node_modules and everything else. - mkdir -p $out/share - cp -r . $out/share/$name - - # Add a symlink to the binary. - mkdir $out/bin - ln -s $out/share/$name/node_modules/.bin/prisma $out/bin/prisma - ''; -} diff --git a/nix/cli-shell.nix b/nix/cli-shell.nix deleted file mode 100644 index 4c98c8078ec5..000000000000 --- a/nix/cli-shell.nix +++ /dev/null @@ -1,22 +0,0 @@ -{ config, pkgs, self', ... }: - -# Run it with -# > nix develop --no-sandbox .#cli-shell -let - engines = self'.packages.prisma-engines; - prisma = self'.packages.cli-prisma; -in -{ - devShells.cli-shell = pkgs.mkShell { - packages = [ pkgs.cowsay pkgs.nodejs engines prisma ]; - shellHook = '' - cowsay -f turtle "Run prisma by just typing 'prisma ', e.g. 'prisma --version'" - - export PRISMA_SCHEMA_ENGINE_BINARY=${engines}/bin/schema-engine - export PRISMA_QUERY_ENGINE_BINARY=${engines}/bin/query-engine - export PRISMA_QUERY_ENGINE_LIBRARY=${engines}/lib/libquery_engine.node - # Does this even do anything anymore? - export PRISMA_FMT_BINARY=${engines}/bin/prisma-fmt - ''; - }; -} diff --git a/nix/dev-vm.nix b/nix/dev-vm.nix deleted file mode 100644 index f228488a24ff..000000000000 --- a/nix/dev-vm.nix +++ /dev/null @@ -1,98 +0,0 @@ -{ pkgs, flakeInputs, system, self', ... }: - -# Run qemu with a disk image containing prisma-engines repo, docker and all the -# packages to build and test engines. -# -# This is useful for testing engines with e.g. artificial memory or cpu limits. -# -# You can run it using: -# -# ``` -# $ nix run .#dev-vm -# ``` -# -# This will boot the VM and create a nixos.qcow2 VM image file, or reuse it if -# it is already there. -# -# You can pass extra arguments to the qemu command line, they will be forwarded -# (see --help for example). That lets you easily constrain the VM's resources -# (CPU, RAM, network, disk IO), among other things. -# -# The recommended way to interact with the vm is through SSH. It listens on -# 2222 on the host's localhost: -# -# ``` -# $ ssh prisma@localhost -p 2222 -# ``` -# -# Both the username and password are "prisma". -# -# Links: -# - https://github.com/NixOS/nixpkgs/blob/master/nixos/modules/virtualisation/qemu-vm.nix -let - evalConfig = import "${flakeInputs.nixpkgs}/nixos/lib/eval-config.nix"; - prisma-engines = self'.packages.prisma-engines; - prisma-engines-inputs = prisma-engines.buildInputs ++ prisma-engines.nativeBuildInputs; - vmConfig = (evalConfig { - modules = [ - { - system.stateVersion = "23.05"; - virtualisation.docker.enable = true; - - virtualisation.vmVariant.virtualisation = { - diskSize = 1024 * 8; # 8GB - forwardPorts = [ - { - from = "host"; - host.port = 2222; - guest.port = 22; - } - ]; - writableStore = true; - writableStoreUseTmpfs = false; - sharedDirectories.prisma-engines = { - source = "${prisma-engines.src}"; - target = "/home/prisma/prisma-engines"; - }; - }; - - # Enable flakes in the host vm - nix = { - # package = pkgs.nixUnstable; - extraOptions = "experimental-features = nix-command flakes"; - }; - - environment.systemPackages = with pkgs; [ - tmux - git - coreutils - gnumake - ] ++ prisma-engines-inputs; - - services.openssh = { - listenAddresses = [{ - addr = "0.0.0.0"; - port = 22; - }]; - enable = true; - settings.PasswordAuthentication = true; - }; - - users.users.prisma = { - isNormalUser = true; - extraGroups = [ - "docker" - "wheel" # Enable ‘sudo’ for the user. - ]; - password = "prisma"; - }; - - } - ]; - system = "x86_64-linux"; - } - ).config; -in -{ - packages.dev-vm = vmConfig.system.build.vm; -} diff --git a/nix/memory-profiling.nix b/nix/memory-profiling.nix deleted file mode 100644 index 693fb108d2e1..000000000000 --- a/nix/memory-profiling.nix +++ /dev/null @@ -1,50 +0,0 @@ -{ pkgs, self', ... }: - -let - # A convenience package to open the DHAT Visualizer - # (https://valgrind.org/docs/manual/dh-manual.html) in a browser. - dhat-viewer = pkgs.writeShellScriptBin "dhat-viewer" '' - xdg-open ${valgrind}/libexec/valgrind/dh_view.html - ''; - - # DHAT (https://valgrind.org/docs/manual/dh-manual.html) and Massif - # (https://valgrind.org/docs/manual/ms-manual.html#ms-manual.overview) - # profiles for schema-builder::build() with the odoo.prisma example schema. - # This is just the data, please read the docs of both tools to understand how - # to use that data. - # - # Usage example: - # - # $ nix build .#schema-builder-odoo-memory-profile - # $ nix run .#dhat-viewer - # $ : At this point your browser will open on the DHAT UI and you can - # $ : open the dhat-profile.json file in ./result. - schema-builder-odoo-memory-profile = stdenv.mkDerivation { - name = "schema-builder-odoo-memory-profile"; - inherit (self'.packages.prisma-engines) nativeBuildInputs configurePhase src; - buildInputs = self'.packages.prisma-engines.buildInputs ++ [ valgrind ]; - - buildPhase = '' - cargo build --profile=release --example schema_builder_build_odoo - valgrind --tool=dhat --dhat-out-file=dhat-profile.json \ - ./target/release/examples/schema_builder_build_odoo - valgrind --tool=massif --massif-out-file=massif-profile \ - ./target/release/examples/schema_builder_build_odoo - ''; - - installPhase = '' - mkdir $out - mv dhat-profile.json massif-profile $out/ - ''; - }; - - # Valgrind is not available on all platforms. We substitute the memory - # profiling derivations with an error in these scenarios. - wrongSystem = runCommand "wrongSystem" { } "echo 'Not available on this system'; exit 1"; - - inherit (pkgs) stdenv runCommand valgrind; -in -{ - packages.dhat-viewer = if stdenv.isLinux then dhat-viewer else wrongSystem; - packages.schema-builder-odoo-memory-profile = if stdenv.isLinux then schema-builder-odoo-memory-profile else wrongSystem; -} diff --git a/nix/prisma-schema-wasm.nix b/nix/prisma-schema-wasm.nix deleted file mode 100644 index 602e59b48ea5..000000000000 --- a/nix/prisma-schema-wasm.nix +++ /dev/null @@ -1,55 +0,0 @@ -{ pkgs, lib, self', ... }: - -let - toolchain = pkgs.rust-bin.fromRustupToolchainFile ../prisma-schema-wasm/rust-toolchain.toml; - scriptsDir = ../prisma-schema-wasm/scripts; - inherit (pkgs) jq nodejs coreutils wasm-bindgen-cli stdenv; - inherit (builtins) readFile replaceStrings; -in -{ - packages.prisma-schema-wasm = lib.makeOverridable - ({ profile }: stdenv.mkDerivation { - name = "prisma-schema-wasm"; - nativeBuildInputs = with pkgs; [ git wasm-bindgen-cli toolchain ]; - inherit (self'.packages.prisma-engines) configurePhase src; - - buildPhase = "cargo build --profile=${profile} --target=wasm32-unknown-unknown -p prisma-schema-build"; - installPhase = readFile "${scriptsDir}/install.sh"; - - WASM_BUILD_PROFILE = profile; - - passthru = { - dev = self'.packages.prisma-schema-wasm.override { profile = "dev"; }; - }; - }) - { profile = "release"; }; - - # Takes a package version as its single argument, and produces - # prisma-schema-wasm with the right package.json in a temporary directory, - # then prints the directory's path. This is used by the publish pipeline in CI. - packages.renderPrismaSchemaWasmPackage = - pkgs.writeShellApplication { - name = "renderPrismaSchemaWasmPackage"; - runtimeInputs = [ jq ]; - text = '' - set -euxo pipefail - - PACKAGE_DIR=$(mktemp -d) - cp -r --no-target-directory ${self'.packages.prisma-schema-wasm} "$PACKAGE_DIR" - rm -f "$PACKAGE_DIR/package.json" - jq ".version = \"$1\"" ${self'.packages.prisma-schema-wasm}/package.json > "$PACKAGE_DIR/package.json" - echo "$PACKAGE_DIR" - ''; - }; - - packages.syncWasmBindgenVersions = let template = readFile "${scriptsDir}/syncWasmBindgenVersions.sh"; in - pkgs.writeShellApplication { - name = "syncWasmBindgenVersions"; - runtimeInputs = [ coreutils toolchain ]; - text = replaceStrings [ "$WASM_BINDGEN_VERSION" ] [ wasm-bindgen-cli.version ] template; - }; - - checks.prismaSchemaWasmE2E = pkgs.runCommand "prismaSchemaWasmE2E" - { PRISMA_SCHEMA_WASM = self'.packages.prisma-schema-wasm; NODE = "${nodejs}/bin/node"; } - (readFile "${scriptsDir}/check.sh"); -} diff --git a/nix/publish-engine-size.nix b/nix/publish-engine-size.nix index b49c093d9a31..11a63d7de7e8 100644 --- a/nix/publish-engine-size.nix +++ b/nix/publish-engine-size.nix @@ -1,7 +1,178 @@ -{ pkgs, self', ... }: +/* +* Deprecated: This file is deprecated and will be removed soon. +* See https://github.com/prisma/team-orm/issues/943 +*/ +{ pkgs, flakeInputs, lib, self', rustToolchain, ... }: +let + stdenv = pkgs.clangStdenv; + srcPath = ../.; + srcFilter = flakeInputs.gitignore.lib.gitignoreFilterWith { + basePath = srcPath; + extraRules = '' + /nix + /flake.* + ''; + }; + src = lib.cleanSourceWith { + filter = srcFilter; + src = srcPath; + name = "prisma-engines-source"; + }; + craneLib = (flakeInputs.crane.mkLib pkgs).overrideToolchain rustToolchain; + deps = craneLib.vendorCargoDeps { inherit src; }; + libSuffix = stdenv.hostPlatform.extensions.sharedLibrary; +in { - /* Publish the size of the Query Engine binary and library to the CSV file + packages.prisma-engines = stdenv.mkDerivation { + name = "prisma-engines"; + inherit src; + + buildInputs = [ pkgs.openssl.out ]; + nativeBuildInputs = with pkgs; [ + rustToolchain + git # for our build scripts that bake in the git hash + protobuf # for tonic + openssl.dev + pkg-config + ] ++ lib.optionals stdenv.isDarwin [ + perl # required to build openssl + darwin.apple_sdk.frameworks.Security + iconv + ]; + + configurePhase = '' + mkdir .cargo + ln -s ${deps}/config.toml .cargo/config.toml + ''; + + buildPhase = '' + cargo build --release --bins + cargo build --release -p query-engine-node-api + ''; + + installPhase = '' + mkdir -p $out/bin $out/lib + cp target/release/query-engine $out/bin/ + cp target/release/schema-engine $out/bin/ + cp target/release/prisma-fmt $out/bin/ + cp target/release/libquery_engine${libSuffix} $out/lib/libquery_engine.node + ''; + + dontStrip = true; + }; + + packages.test-cli = lib.makeOverridable + ({ profile }: stdenv.mkDerivation { + name = "test-cli"; + inherit src; + inherit (self'.packages.prisma-engines) buildInputs nativeBuildInputs configurePhase dontStrip; + + buildPhase = "cargo build --profile=${profile} --bin=test-cli"; + + installPhase = '' + set -eu + mkdir -p $out/bin + QE_PATH=$(find target -name 'test-cli') + cp $QE_PATH $out/bin + ''; + }) + { profile = "release"; }; + + packages.query-engine-bin = lib.makeOverridable + ({ profile }: stdenv.mkDerivation { + name = "query-engine-bin"; + inherit src; + inherit (self'.packages.prisma-engines) buildInputs nativeBuildInputs configurePhase dontStrip; + + buildPhase = "cargo build --profile=${profile} --bin=query-engine"; + + installPhase = '' + set -eu + mkdir -p $out/bin + QE_PATH=$(find target -name 'query-engine') + cp $QE_PATH $out/bin + ''; + }) + { profile = "release"; }; + + # TODO: try to make caching and sharing the build artifacts work with crane. There should be + # separate `query-engine-lib` and `query-engine-bin` derivations instead, but we use this for now + # to make the CI job that uses it faster. + packages.query-engine-bin-and-lib = lib.makeOverridable + ({ profile }: stdenv.mkDerivation { + name = "query-engine-bin-and-lib"; + inherit src; + inherit (self'.packages.prisma-engines) buildInputs nativeBuildInputs configurePhase dontStrip; + + buildPhase = '' + cargo build --profile=${profile} --bin=query-engine + cargo build --profile=${profile} -p query-engine-node-api + ''; + + installPhase = '' + set -eu + mkdir -p $out/bin $out/lib + cp target/${profile}/query-engine $out/bin/query-engine + cp target/${profile}/libquery_engine${libSuffix} $out/lib/libquery_engine.node + ''; + }) + { profile = "release"; }; + + packages.build-engine-wasm = pkgs.writeShellApplication { + name = "build-engine-wasm"; + runtimeInputs = with pkgs; [ git rustup wasm-bindgen-cli binaryen jq iconv ]; + text = '' + cd query-engine/query-engine-wasm + WASM_BUILD_PROFILE=release ./build.sh "$1" "$2" + ''; + }; + + packages.query-engine-wasm-gz = lib.makeOverridable + ({ profile }: stdenv.mkDerivation { + name = "query-engine-wasm-gz"; + inherit src; + buildInputs = with pkgs; [ iconv ]; + + buildPhase = '' + export HOME=$(mktemp -dt wasm-engine-home-XXXX) + + OUT_FOLDER=$(mktemp -dt wasm-engine-out-XXXX) + ${self'.packages.build-engine-wasm}/bin/build-engine-wasm "0.0.0" "$OUT_FOLDER" + + for provider in "postgresql" "mysql" "sqlite"; do + gzip -ckn "$OUT_FOLDER/$provider/query_engine_bg.wasm" > "query-engine-$provider.wasm.gz" + done + ''; + + installPhase = '' + set +x + mkdir -p $out + for provider in "postgresql" "mysql" "sqlite"; do + cp "$OUT_FOLDER/$provider/query_engine_bg.wasm" "$out/query-engine-$provider.wasm" + cp "query-engine-$provider.wasm.gz" "$out/" + done + ''; + }) + { profile = "release"; }; + + packages.export-query-engine-wasm = + pkgs.writeShellApplication { + name = "export-query-engine-wasm"; + runtimeInputs = with pkgs; [ jq ]; + text = '' + OUT_VERSION="$1" + OUT_FOLDER="$2" + + mkdir -p "$OUT_FOLDER" + ${self'.packages.build-engine-wasm}/bin/build-engine-wasm "$OUT_VERSION" "$OUT_FOLDER" + chmod -R +rw "$OUT_FOLDER" + mv "$OUT_FOLDER/package.json" "$OUT_FOLDER/package.json.bak" + jq --arg new_version "$OUT_VERSION" '.version = $new_version' "$OUT_FOLDER/package.json.bak" > "$OUT_FOLDER/package.json" + ''; + }; + + /* Publish the size of the Query Engine binary and library to the CSV file in the `gh-pages` branch of the repository. Data: https://github.com/prisma/prisma-engines/blob/gh-pages/engines-size/data.csv diff --git a/prisma-schema-wasm/scripts/install.sh b/prisma-schema-wasm/scripts/install.sh index 992dbd1ac380..aafc335956d7 100755 --- a/prisma-schema-wasm/scripts/install.sh +++ b/prisma-schema-wasm/scripts/install.sh @@ -6,6 +6,10 @@ if [[ -z "${WASM_BUILD_PROFILE:-}" ]]; then WASM_BUILD_PROFILE="release" fi +if [[ -z "${NPM_PACKAGE_VERSION:-}" ]]; then + NPM_PACKAGE_VERSION="0.0.0" +fi + if [[ $WASM_BUILD_PROFILE == "dev" ]]; then TARGET_DIR="debug" else @@ -18,8 +22,8 @@ printf '%s\n' " -> Creating out dir..." # shellcheck disable=SC2154 mkdir -p "$out"/src -printf '%s\n' " -> Copying package.json" -cp ./prisma-schema-wasm/package.json "$out"/ +printf '%s\n' " -> Copying package.json and updating version to $NPM_PACKAGE_VERSION" +jq ".version = \"$NPM_PACKAGE_VERSION\"" ./prisma-schema-wasm/package.json > "$out/package.json" printf '%s\n' " -> Copying README.md" cp ./prisma-schema-wasm/README.md "$out"/ diff --git a/quaint/src/connector/mssql/native/error.rs b/quaint/src/connector/mssql/native/error.rs index 9c16bf9f2952..bbccffca8435 100644 --- a/quaint/src/connector/mssql/native/error.rs +++ b/quaint/src/connector/mssql/native/error.rs @@ -234,6 +234,13 @@ impl From for Error { builder.build() } + tiberius::error::Error::Server(e) if e.code() == 5828 => { + let mut builder = Error::builder(ErrorKind::TooManyConnections(e.clone().into())); + builder.set_original_code(format!("{}", e.code())); + builder.set_original_message(e.message().to_string()); + + builder.build() + } tiberius::error::Error::Server(e) => { let kind = ErrorKind::QueryError(e.clone().into()); diff --git a/quaint/src/connector/mysql/error.rs b/quaint/src/connector/mysql/error.rs index 7b4813bf0223..bb5edf957801 100644 --- a/quaint/src/connector/mysql/error.rs +++ b/quaint/src/connector/mysql/error.rs @@ -231,6 +231,12 @@ impl From for Error { builder.set_original_message(error.message); builder.build() } + 1040 | 1203 => { + let mut builder = Error::builder(ErrorKind::TooManyConnections(error.clone().into())); + builder.set_original_code(format!("{code}")); + builder.set_original_message(error.message); + builder.build() + } _ => { let kind = ErrorKind::QueryError( MysqlAsyncError::Server(MysqlError { diff --git a/quaint/src/connector/postgres/error.rs b/quaint/src/connector/postgres/error.rs index ab6ec7b07847..3dcc481eccba 100644 --- a/quaint/src/connector/postgres/error.rs +++ b/quaint/src/connector/postgres/error.rs @@ -218,6 +218,16 @@ impl From for Error { builder.build() } + "53300" => { + let code = value.code.to_owned(); + let message = value.to_string(); + let kind = ErrorKind::TooManyConnections(value.into()); + let mut builder = Error::builder(kind); + builder.set_original_code(code); + builder.set_original_message(message); + builder.build() + } + _ => { let code = value.code.to_owned(); let message = value.to_string(); diff --git a/quaint/src/error/mod.rs b/quaint/src/error/mod.rs index c28e97970ebc..661eb4d344ff 100644 --- a/quaint/src/error/mod.rs +++ b/quaint/src/error/mod.rs @@ -148,6 +148,9 @@ pub enum ErrorKind { #[error("Error querying the database: {}", _0)] QueryError(Box), + #[error("Too many DB connections opened")] + TooManyConnections(Box), + #[error("Invalid input provided to query: {}", _0)] QueryInvalidInput(String), diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/mod.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/mod.rs index deaaa7e84313..4b4aa97479d6 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/mod.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/mod.rs @@ -30,3 +30,4 @@ mod prisma_7072; mod prisma_7434; mod prisma_8265; mod prisma_engines_4286; +mod team_orm_927; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/team_orm_927.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/team_orm_927.rs new file mode 100644 index 000000000000..45d7eba7aad9 --- /dev/null +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/team_orm_927.rs @@ -0,0 +1,90 @@ +//! Regression test for https://github.com/prisma/team-orm/issues/927 + +use query_engine_tests::*; + +#[test_suite(schema(schema))] +mod count_before_relation { + fn schema() -> String { + indoc! { + r#" + model Parent { + #id(id, Int, @id) + children Child[] + } + + model Child { + #id(id, Int, @id) + parentId Int + parent Parent @relation(fields: [parentId], references: [id]) + } + "# + } + .to_owned() + } + + #[connector_test] + async fn find_unique(runner: Runner) -> TestResult<()> { + seed(&runner).await?; + + insta::assert_snapshot!( + run_query!( + runner, + r#" + query { + findUniqueParent( + where: { id: 1 } + ) { + _count { children } + children { id } + } + } + "# + ), + @r###"{"data":{"findUniqueParent":{"_count":{"children":1},"children":[{"id":1}]}}}"### + ); + + Ok(()) + } + + #[connector_test] + async fn find_many(runner: Runner) -> TestResult<()> { + seed(&runner).await?; + + insta::assert_snapshot!( + run_query!( + runner, + r#" + query { + findManyParent { + _count { children } + children { id } + } + } + "# + ), + @r###"{"data":{"findManyParent":[{"_count":{"children":1},"children":[{"id":1}]}]}}"### + ); + + Ok(()) + } + + async fn seed(runner: &Runner) -> TestResult<()> { + run_query!( + runner, + r#" + mutation { + createOneParent( + data: { + id: 1, + children: { + create: { id: 1 } + } + } + ) { id } + } + "# + ); + + Ok(()) + } +} diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/many_count_relation.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/many_count_relation.rs index 54dbccefe7cc..312463f19b15 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/many_count_relation.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/many_count_relation.rs @@ -84,9 +84,9 @@ mod many_count_rel { Ok(()) } - // "Counting with some records and filters" should "not affect the count" + // Counting with cursor should not affect the count #[connector_test] - async fn count_with_filters(runner: Runner) -> TestResult<()> { + async fn count_with_cursor(runner: Runner) -> TestResult<()> { // 4 comment / 4 categories create_row( &runner, @@ -113,6 +113,128 @@ mod many_count_rel { Ok(()) } + // Counting with take should not affect the count + #[connector_test] + async fn count_with_take(runner: Runner) -> TestResult<()> { + // 4 comment / 4 categories + create_row( + &runner, + r#"{ + id: 1, + title: "a", + comments: { create: [{id: 1}, {id: 2}, {id: 3}, {id: 4}] }, + categories: { create: [{id: 1}, {id: 2}, {id: 3}, {id: 4}] } + }"#, + ) + .await?; + + insta::assert_snapshot!( + run_query!(&runner, r#"{ + findManyPost(where: { id: 1 }) { + comments(take: 1) { id } + categories(take: 1) { id } + _count { comments categories } + } + }"#), + @r###"{"data":{"findManyPost":[{"comments":[{"id":1}],"categories":[{"id":1}],"_count":{"comments":4,"categories":4}}]}}"### + ); + + Ok(()) + } + + // Counting with skip should not affect the count + #[connector_test] + async fn count_with_skip(runner: Runner) -> TestResult<()> { + // 4 comment / 4 categories + create_row( + &runner, + r#"{ + id: 1, + title: "a", + comments: { create: [{id: 1}, {id: 2}, {id: 3}, {id: 4}] }, + categories: { create: [{id: 1}, {id: 2}, {id: 3}, {id: 4}] } + }"#, + ) + .await?; + + insta::assert_snapshot!( + run_query!(&runner, r#"{ + findManyPost(where: { id: 1 }) { + comments(skip: 3) { id } + categories(skip: 3) { id } + _count { comments categories } + } + }"#), + @r###"{"data":{"findManyPost":[{"comments":[{"id":4}],"categories":[{"id":4}],"_count":{"comments":4,"categories":4}}]}}"### + ); + + Ok(()) + } + + // Counting with filters should not affect the count + #[connector_test] + async fn count_with_filters(runner: Runner) -> TestResult<()> { + // 4 comment / 4 categories + create_row( + &runner, + r#"{ + id: 1, + title: "a", + comments: { create: [{id: 1}, {id: 2}, {id: 3}, {id: 4}] }, + categories: { create: [{id: 1}, {id: 2}, {id: 3}, {id: 4}] } + }"#, + ) + .await?; + + insta::assert_snapshot!( + run_query!(&runner, r#"{ + findManyPost(where: { id: 1 }) { + comments(where: { id: 2 }) { id } + categories(where: { id: 2 }) { id } + _count { comments categories } + } + }"#), + @r###"{"data":{"findManyPost":[{"comments":[{"id":2}],"categories":[{"id":2}],"_count":{"comments":4,"categories":4}}]}}"### + ); + + Ok(()) + } + + // Counting with distinct should not affect the count + #[connector_test] + async fn count_with_distinct(runner: Runner) -> TestResult<()> { + create_row( + &runner, + r#"{ + id: 1, + title: "a", + categories: { create: { id: 1 } } + }"#, + ) + .await?; + create_row( + &runner, + r#"{ + id: 2, + title: "a", + categories: { connect: { id: 1 } } + }"#, + ) + .await?; + + insta::assert_snapshot!( + run_query!(&runner, r#"{ + findManyCategory { + posts(distinct: title) { id } + _count { posts } + } + }"#), + @r###"{"data":{"findManyCategory":[{"posts":[{"id":1}],"_count":{"posts":2}}]}}"### + ); + + Ok(()) + } + fn schema_nested() -> String { let schema = indoc! { r#"model User { @@ -214,6 +336,124 @@ mod many_count_rel { Ok(()) } + #[connector_test(schema(schema_nested))] + async fn nested_count_same_field_on_many_levels(runner: Runner) -> TestResult<()> { + run_query!( + runner, + r#" + mutation { + createOneUser( + data: { + id: 1, + name: "Author", + posts: { + create: [ + { + id: 1, + title: "good post", + comments: { + create: [ + { id: 1, body: "insightful!" }, + { id: 2, body: "deep lore uncovered" } + ] + } + }, + { + id: 2, + title: "boring post" + } + ] + } + } + ) { + id + } + } + "# + ); + + insta::assert_snapshot!( + run_query!( + runner, + r#" + query { + findManyPost { + comments { + post { + _count { comments } + } + } + _count { comments } + } + } + "# + ), + @r###"{"data":{"findManyPost":[{"comments":[{"post":{"_count":{"comments":2}}},{"post":{"_count":{"comments":2}}}],"_count":{"comments":2}},{"comments":[],"_count":{"comments":0}}]}}"### + ); + + insta::assert_snapshot!( + run_query!( + runner, + r#" + query { + findManyPost { + comments { + post { + comments { id } + _count { comments } + } + } + _count { comments } + } + } + "# + ), + @r###"{"data":{"findManyPost":[{"comments":[{"post":{"comments":[{"id":1},{"id":2}],"_count":{"comments":2}}},{"post":{"comments":[{"id":1},{"id":2}],"_count":{"comments":2}}}],"_count":{"comments":2}},{"comments":[],"_count":{"comments":0}}]}}"### + ); + + insta::assert_snapshot!( + run_query!( + runner, + r#" + query { + findManyPost { + comments { + post { + comments(where: { id: 1 }) { id } + _count { comments } + } + } + _count { comments } + } + } + "# + ), + @r###"{"data":{"findManyPost":[{"comments":[{"post":{"comments":[{"id":1}],"_count":{"comments":2}}},{"post":{"comments":[{"id":1}],"_count":{"comments":2}}}],"_count":{"comments":2}},{"comments":[],"_count":{"comments":0}}]}}"### + ); + + insta::assert_snapshot!( + run_query!( + runner, + r#" + query { + findManyPost { + comments(where: { id: 1}) { + post { + comments { id } + _count { comments } + } + } + _count { comments } + } + } + "# + ), + @r###"{"data":{"findManyPost":[{"comments":[{"post":{"comments":[{"id":1},{"id":2}],"_count":{"comments":2}}}],"_count":{"comments":2}},{"comments":[],"_count":{"comments":0}}]}}"### + ); + + Ok(()) + } + fn m_n_self_rel() -> String { let schema = indoc! { r#"model User { diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/uniq_count_relation.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/uniq_count_relation.rs index 4d21189bf125..45c49150e474 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/uniq_count_relation.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/uniq_count_relation.rs @@ -84,9 +84,9 @@ mod uniq_count_rel { Ok(()) } - // "Counting with some records and filters" should "not affect the count" + // Counting with cursor should not affect the count #[connector_test] - async fn count_with_filters(runner: Runner) -> TestResult<()> { + async fn count_with_cursor(runner: Runner) -> TestResult<()> { // 4 comment / 4 categories create_row( &runner, @@ -113,6 +113,128 @@ mod uniq_count_rel { Ok(()) } + // Counting with take should not affect the count + #[connector_test] + async fn count_with_take(runner: Runner) -> TestResult<()> { + // 4 comment / 4 categories + create_row( + &runner, + r#"{ + id: 1, + title: "a", + comments: { create: [{id: 1}, {id: 2}, {id: 3}, {id: 4}] }, + categories: { create: [{id: 1}, {id: 2}, {id: 3}, {id: 4}] }, + }"#, + ) + .await?; + + insta::assert_snapshot!( + run_query!(&runner, r#"{ + findUniquePost(where: { id: 1 }) { + comments(take: 1) { id } + categories(take: 1) { id } + _count { comments categories } + } + }"#), + @r###"{"data":{"findUniquePost":{"comments":[{"id":1}],"categories":[{"id":1}],"_count":{"comments":4,"categories":4}}}}"### + ); + + Ok(()) + } + + // Counting with skip should not affect the count + #[connector_test] + async fn count_with_skip(runner: Runner) -> TestResult<()> { + // 4 comment / 4 categories + create_row( + &runner, + r#"{ + id: 1, + title: "a", + comments: { create: [{id: 1}, {id: 2}, {id: 3}, {id: 4}] }, + categories: { create: [{id: 1}, {id: 2}, {id: 3}, {id: 4}] }, + }"#, + ) + .await?; + + insta::assert_snapshot!( + run_query!(&runner, r#"{ + findUniquePost(where: { id: 1 }) { + comments(skip: 2) { id } + categories(skip: 2) { id } + _count { comments categories } + } + }"#), + @r###"{"data":{"findUniquePost":{"comments":[{"id":3},{"id":4}],"categories":[{"id":3},{"id":4}],"_count":{"comments":4,"categories":4}}}}"### + ); + + Ok(()) + } + + // Counting with filters should not affect the count + #[connector_test] + async fn count_with_filters(runner: Runner) -> TestResult<()> { + // 4 comment / 4 categories + create_row( + &runner, + r#"{ + id: 1, + title: "a", + comments: { create: [{id: 1}, {id: 2}, {id: 3}, {id: 4}] }, + categories: { create: [{id: 1}, {id: 2}, {id: 3}, {id: 4}] }, + }"#, + ) + .await?; + + insta::assert_snapshot!( + run_query!(&runner, r#"{ + findUniquePost(where: { id: 1 }) { + comments(where: { id: 2}) { id } + categories(where: { id: 2}) { id } + _count { comments categories } + } + }"#), + @r###"{"data":{"findUniquePost":{"comments":[{"id":2}],"categories":[{"id":2}],"_count":{"comments":4,"categories":4}}}}"### + ); + + Ok(()) + } + + // Counting with distinct should not affect the count + #[connector_test] + async fn count_with_distinct(runner: Runner) -> TestResult<()> { + create_row( + &runner, + r#"{ + id: 1, + title: "a", + categories: { create: { id: 1 } } + }"#, + ) + .await?; + create_row( + &runner, + r#"{ + id: 2, + title: "a", + categories: { connect: { id: 1 } } + }"#, + ) + .await?; + + insta::assert_snapshot!( + run_query!(&runner, r#"{ + findUniqueCategory(where: { id: 1 }) { + posts(distinct: title) { id } + _count { posts } + } + }"#), + @r###"{"data":{"findUniqueCategory":{"posts":[{"id":1}],"_count":{"posts":2}}}}"### + ); + + Ok(()) + } + fn schema_nested() -> String { let schema = indoc! { r#"model User { diff --git a/query-engine/connectors/query-connector/src/error.rs b/query-engine/connectors/query-connector/src/error.rs index 24e64a6c587f..1d9937ee55aa 100644 --- a/query-engine/connectors/query-connector/src/error.rs +++ b/query-engine/connectors/query-connector/src/error.rs @@ -119,6 +119,12 @@ impl ConnectorError { ErrorKind::RecordDoesNotExist { cause } => Some(KnownError::new( user_facing_errors::query_engine::RecordRequiredButNotFound { cause: cause.clone() }, )), + + ErrorKind::TooManyConnections(e) => Some(user_facing_errors::KnownError::new( + user_facing_errors::query_engine::TooManyConnections { + message: format!("{}", e), + }, + )), _ => None, }; @@ -278,6 +284,9 @@ pub enum ErrorKind { #[error("Invalid driver adapter: {0}")] InvalidDriverAdapter(String), + + #[error("Too many DB connections opened: {}", _0)] + TooManyConnections(Box), } impl From for ConnectorError { diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/read.rs b/query-engine/connectors/sql-query-connector/src/database/operations/read.rs index f9041c6dcd78..13206f560776 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/read.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/read.rs @@ -33,6 +33,7 @@ pub(crate) async fn get_single_record_joins( selected_fields: &FieldSelection, ctx: &Context<'_>, ) -> crate::Result> { + let selected_fields = selected_fields.to_virtuals_last(); let field_names: Vec<_> = selected_fields.db_names_grouping_virtuals().collect(); let idents = selected_fields.type_identifiers_with_arities_grouping_virtuals(); @@ -44,7 +45,7 @@ pub(crate) async fn get_single_record_joins( let query = query_builder::select::SelectBuilder::build( QueryArguments::from((model.clone(), filter.clone())), - selected_fields, + &selected_fields, ctx, ); @@ -130,6 +131,7 @@ pub(crate) async fn get_many_records_joins( selected_fields: &FieldSelection, ctx: &Context<'_>, ) -> crate::Result { + let selected_fields = selected_fields.to_virtuals_last(); let field_names: Vec<_> = selected_fields.db_names_grouping_virtuals().collect(); let idents = selected_fields.type_identifiers_with_arities_grouping_virtuals(); let meta = column_metadata::create(field_names.as_slice(), idents.as_slice()); @@ -155,7 +157,7 @@ pub(crate) async fn get_many_records_joins( _ => (), }; - let query = query_builder::select::SelectBuilder::build(query_arguments.clone(), selected_fields, ctx); + let query = query_builder::select::SelectBuilder::build(query_arguments.clone(), &selected_fields, ctx); for item in conn.filter(query.into(), meta.as_slice(), ctx).await?.into_iter() { let mut record = Record::from(item); diff --git a/query-engine/connectors/sql-query-connector/src/error.rs b/query-engine/connectors/sql-query-connector/src/error.rs index feb47fcbbb44..1156ed13a59a 100644 --- a/query-engine/connectors/sql-query-connector/src/error.rs +++ b/query-engine/connectors/sql-query-connector/src/error.rs @@ -200,6 +200,9 @@ pub enum SqlError { #[error("External connector error")] ExternalError(i32), + + #[error("Too many DB connections opened")] + TooManyConnections(Box), } impl SqlError { @@ -282,6 +285,7 @@ impl SqlError { SqlError::MissingFullTextSearchIndex => ConnectorError::from_kind(ErrorKind::MissingFullTextSearchIndex), SqlError::InvalidIsolationLevel(msg) => ConnectorError::from_kind(ErrorKind::InternalConversionError(msg)), SqlError::ExternalError(error_id) => ConnectorError::from_kind(ErrorKind::ExternalError(error_id)), + SqlError::TooManyConnections(e) => ConnectorError::from_kind(ErrorKind::TooManyConnections(e)), } } } @@ -336,6 +340,7 @@ impl From for SqlError { QuaintKind::TransactionWriteConflict => Self::TransactionWriteConflict, QuaintKind::RollbackWithoutBegin => Self::RollbackWithoutBegin, QuaintKind::ExternalError(error_id) => Self::ExternalError(error_id), + QuaintKind::TooManyConnections(e) => Self::TooManyConnections(e), e @ QuaintKind::UnsupportedColumnType { .. } => SqlError::ConversionError(e.into()), e @ QuaintKind::TransactionAlreadyClosed(_) => SqlError::TransactionAlreadyClosed(format!("{e}")), e @ QuaintKind::IncorrectNumberOfParameters { .. } => SqlError::QueryError(e.into()), diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/select/lateral.rs b/query-engine/connectors/sql-query-connector/src/query_builder/select/lateral.rs index 5b86bfaa581b..af3b73271aa9 100644 --- a/query-engine/connectors/sql-query-connector/src/query_builder/select/lateral.rs +++ b/query-engine/connectors/sql-query-connector/src/query_builder/select/lateral.rs @@ -6,13 +6,31 @@ use crate::{ model_extensions::AsColumn, }; +use std::collections::HashMap; + use quaint::ast::*; use query_structure::*; +/// Represents a projection of a virtual field that is cheap to clone and compare but still has +/// enough information to determine whether it refers to the same field. +#[derive(PartialEq, Eq, Hash, Debug)] +enum VirtualSelectionKey { + RelationCount(RelationField), +} + +impl From<&VirtualSelection> for VirtualSelectionKey { + fn from(vs: &VirtualSelection) -> Self { + match vs { + VirtualSelection::RelationCount(rf, _) => Self::RelationCount(rf.clone()), + } + } +} + /// Select builder for joined queries. Relations are resolved using LATERAL JOINs. #[derive(Debug, Default)] pub(crate) struct LateralJoinSelectBuilder { alias: Alias, + visited_virtuals: HashMap, } impl JoinSelectBuilder for LateralJoinSelectBuilder { @@ -29,10 +47,18 @@ impl JoinSelectBuilder for LateralJoinSelectBuilder { /// ``` fn build(&mut self, args: QueryArguments, selected_fields: &FieldSelection, ctx: &Context<'_>) -> Select<'static> { let (select, parent_alias) = self.build_default_select(&args, ctx); - let select = self.with_selection(select, selected_fields, parent_alias, ctx); - let select = self.with_relations(select, selected_fields.relations(), parent_alias, ctx); + let select = self.with_relations( + select, + selected_fields.relations(), + selected_fields.virtuals(), + parent_alias, + ctx, + ); + let select = self.with_virtual_selections(select, selected_fields.virtuals(), parent_alias, ctx); - self.with_virtual_selections(select, selected_fields.virtuals(), parent_alias, ctx) + // Build selection as the last step utilizing the information we collected in + // `with_relations` and `with_virtual_selections`. + self.with_selection(select, selected_fields, parent_alias, ctx) } fn build_selection<'a>( @@ -67,38 +93,52 @@ impl JoinSelectBuilder for LateralJoinSelectBuilder { parent_alias: Alias, ctx: &Context<'_>, ) -> Select<'a> { - let (subselect, child_alias) = - self.build_to_one_select(rs, parent_alias, |expr: Expression<'_>| expr.alias(JSON_AGG_IDENT), ctx); - let subselect = self.with_relations(subselect, rs.relations(), child_alias, ctx); + let (subselect, child_alias) = self.build_to_one_select(rs, parent_alias, ctx); + + let subselect = self.with_relations(subselect, rs.relations(), rs.virtuals(), child_alias, ctx); let subselect = self.with_virtual_selections(subselect, rs.virtuals(), child_alias, ctx); + // Build the JSON object using the information we collected before in `with_relations` and + // `with_virtual_selections`. + let subselect = subselect.value(self.build_json_obj_fn(rs, child_alias, ctx).alias(JSON_AGG_IDENT)); + let join_table = Table::from(subselect).alias(join_alias_name(&rs.field)); + // LEFT JOIN LATERAL ( ) AS ON TRUE select.left_join(join_table.on(ConditionTree::single(true.raw())).lateral()) } - fn add_to_many_relation<'a>( + fn add_to_many_relation<'a, 'b>( &mut self, select: Select<'a>, rs: &RelationSelection, + parent_virtuals: impl Iterator, parent_alias: Alias, ctx: &Context<'_>, ) -> Select<'a> { let join_table_alias = join_alias_name(&rs.field); - let join_table = Table::from(self.build_to_many_select(rs, parent_alias, ctx)).alias(join_table_alias); + let mut to_many_select = self.build_to_many_select(rs, parent_alias, ctx); + + if let Some(vs) = self.find_compatible_virtual_for_relation(rs, parent_virtuals) { + self.visited_virtuals.insert(vs.into(), join_table_alias.clone()); + to_many_select = to_many_select.value(build_inline_virtual_selection(vs)); + } + + let join_table = Table::from(to_many_select).alias(join_table_alias); // LEFT JOIN LATERAL ( ) AS ON TRUE select.left_join(join_table.on(ConditionTree::single(true.raw())).lateral()) } - fn add_many_to_many_relation<'a>( + fn add_many_to_many_relation<'a, 'b>( &mut self, select: Select<'a>, rs: &RelationSelection, + parent_virtuals: impl Iterator, parent_alias: Alias, ctx: &Context<'_>, ) -> Select<'a> { - let m2m_join = self.build_m2m_join(rs, parent_alias, ctx); + let m2m_join = self.build_m2m_join(rs, parent_virtuals, parent_alias, ctx); select.left_join(m2m_join) } @@ -110,8 +150,11 @@ impl JoinSelectBuilder for LateralJoinSelectBuilder { parent_alias: Alias, ctx: &Context<'_>, ) -> Select<'a> { + let alias = self.next_alias(); let relation_count_select = self.build_virtual_select(vs, parent_alias, ctx); - let table = Table::from(relation_count_select).alias(relation_count_alias_name(vs.relation_field())); + let table = Table::from(relation_count_select).alias(alias.to_table_string()); + + self.visited_virtuals.insert(vs.into(), alias.to_table_string()); select.left_join_lateral(table.on(ConditionTree::single(true.raw()))) } @@ -155,10 +198,13 @@ impl JoinSelectBuilder for LateralJoinSelectBuilder { _parent_alias: Alias, _ctx: &Context<'_>, ) -> Expression<'static> { - let rf = vs.relation_field(); + let virtual_selection_alias = self + .visited_virtuals + .remove(&vs.into()) + .expect("All virtual fields must be visited before calling build_virtual_expr"); coalesce([ - Expression::from(Column::from((relation_count_alias_name(rf), vs.db_alias()))), + Expression::from(Column::from((virtual_selection_alias, vs.db_alias()))), Expression::from(0.raw()), ]) .into() @@ -168,14 +214,25 @@ impl JoinSelectBuilder for LateralJoinSelectBuilder { self.alias = self.alias.inc(AliasMode::Table); self.alias } + + fn was_virtual_processed_in_relation(&self, vs: &VirtualSelection) -> bool { + self.visited_virtuals.contains_key(&vs.into()) + } } impl LateralJoinSelectBuilder { - fn build_m2m_join<'a>(&mut self, rs: &RelationSelection, parent_alias: Alias, ctx: &Context<'_>) -> JoinData<'a> { + fn build_m2m_join<'a, 'b>( + &mut self, + rs: &RelationSelection, + parent_virtuals: impl Iterator, + parent_alias: Alias, + ctx: &Context<'_>, + ) -> JoinData<'a> { let rf = rs.field.clone(); let m2m_table_alias = self.next_alias(); let m2m_join_alias = self.next_alias(); let outer_alias = self.next_alias(); + let json_data_alias = m2m_join_alias_name(&rf); let m2m_join_data = Table::from(self.build_to_many_select(rs, m2m_table_alias, ctx)) .alias(m2m_join_alias.to_table_string()) @@ -194,13 +251,24 @@ impl LateralJoinSelectBuilder { .with_pagination(rs.args.take_abs(), rs.args.skip) .comment("inner"); // adds pagination - let outer = Select::from_table(Table::from(inner).alias(outer_alias.to_table_string())) + let mut outer = Select::from_table(Table::from(inner).alias(outer_alias.to_table_string())) .value(json_agg()) .comment("outer"); + if let Some(vs) = self.find_compatible_virtual_for_relation(rs, parent_virtuals) { + self.visited_virtuals.insert(vs.into(), json_data_alias.clone()); + outer = outer.value(build_inline_virtual_selection(vs)); + } + Table::from(outer) - .alias(m2m_join_alias_name(&rf)) + .alias(json_data_alias) .on(ConditionTree::single(true.raw())) .lateral() } } + +fn build_inline_virtual_selection<'a>(vs: &VirtualSelection) -> Expression<'a> { + match vs { + VirtualSelection::RelationCount(..) => count(Column::from(JSON_AGG_IDENT)).alias(vs.db_alias()).into(), + } +} diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/select/mod.rs b/query-engine/connectors/sql-query-connector/src/query_builder/select/mod.rs index d878ad63ec18..766c102b5b69 100644 --- a/query-engine/connectors/sql-query-connector/src/query_builder/select/mod.rs +++ b/query-engine/connectors/sql-query-connector/src/query_builder/select/mod.rs @@ -45,18 +45,20 @@ pub(crate) trait JoinSelectBuilder { ctx: &Context<'_>, ) -> Select<'a>; /// Adds to `select` the SQL statements to fetch a 1-m relation. - fn add_to_many_relation<'a>( + fn add_to_many_relation<'a, 'b>( &mut self, select: Select<'a>, rs: &RelationSelection, + parent_virtuals: impl Iterator, parent_alias: Alias, ctx: &Context<'_>, ) -> Select<'a>; /// Adds to `select` the SQL statements to fetch a m-n relation. - fn add_many_to_many_relation<'a>( + fn add_many_to_many_relation<'a, 'b>( &mut self, select: Select<'a>, rs: &RelationSelection, + parent_virtuals: impl Iterator, parent_alias: Alias, ctx: &Context<'_>, ) -> Select<'a>; @@ -89,6 +91,9 @@ pub(crate) trait JoinSelectBuilder { ) -> Expression<'static>; /// Get the next alias for a table. fn next_alias(&mut self) -> Alias; + /// Checks if a virtual selection has already been added to the query at an earlier stage + /// as a part of a relation query for a matching relation field. + fn was_virtual_processed_in_relation(&self, vs: &VirtualSelection) -> bool; fn with_selection<'a>( &mut self, @@ -107,11 +112,12 @@ pub(crate) trait JoinSelectBuilder { } /// Builds the core select for a 1-1 relation. + /// Note: it does not add the JSON object selection because there are additional steps to + /// perform before that depending on the `JoinSelectBuilder` implementation. fn build_to_one_select( &mut self, rs: &RelationSelection, parent_alias: Alias, - selection_modifier: impl FnOnce(Expression<'static>) -> Expression<'static>, ctx: &Context<'_>, ) -> (Select<'static>, Alias) { let rf = &rs.field; @@ -121,12 +127,10 @@ pub(crate) trait JoinSelectBuilder { .related_field() .as_table(ctx) .alias(child_table_alias.to_table_string()); - let json_expr = self.build_json_obj_fn(rs, child_table_alias, ctx); let select = Select::from_table(table) .with_join_conditions(rf, parent_alias, child_table_alias, ctx) .with_filters(rs.args.filter.clone(), Some(child_table_alias), ctx) - .value(selection_modifier(json_expr)) .limit(1); (select, child_table_alias) @@ -155,11 +159,14 @@ pub(crate) trait JoinSelectBuilder { .comment("root select"); // SELECT JSON_BUILD_OBJECT() FROM ( ) - let inner = Select::from_table(Table::from(root).alias(root_alias.to_table_string())) - .value(self.build_json_obj_fn(rs, root_alias, ctx).alias(JSON_AGG_IDENT)); - let inner = self.with_relations(inner, rs.relations(), root_alias, ctx); + let inner = Select::from_table(Table::from(root).alias(root_alias.to_table_string())); + let inner = self.with_relations(inner, rs.relations(), rs.virtuals(), root_alias, ctx); let inner = self.with_virtual_selections(inner, rs.virtuals(), root_alias, ctx); + // Build the JSON object utilizing the information we collected in `with_relations` and + // `with_virtual_selections`. + let inner = inner.value(self.build_json_obj_fn(rs, root_alias, ctx).alias(JSON_AGG_IDENT)); + let linking_fields = rs.field.related_field().linking_fields(); if rs.field.relation().is_many_to_many() { @@ -212,16 +219,17 @@ pub(crate) trait JoinSelectBuilder { } } - fn with_relation<'a>( + fn with_relation<'a, 'b>( &mut self, select: Select<'a>, rs: &RelationSelection, + parent_virtuals: impl Iterator, parent_alias: Alias, ctx: &Context<'_>, ) -> Select<'a> { match (rs.field.is_list(), rs.field.relation().is_many_to_many()) { - (true, true) => self.add_many_to_many_relation(select, rs, parent_alias, ctx), - (true, false) => self.add_to_many_relation(select, rs, parent_alias, ctx), + (true, true) => self.add_many_to_many_relation(select, rs, parent_virtuals, parent_alias, ctx), + (true, false) => self.add_to_many_relation(select, rs, parent_virtuals, parent_alias, ctx), (false, _) => self.add_to_one_relation(select, rs, parent_alias, ctx), } } @@ -230,10 +238,15 @@ pub(crate) trait JoinSelectBuilder { &mut self, input: Select<'a>, relation_selections: impl Iterator, + virtual_selections: impl Iterator, parent_alias: Alias, ctx: &Context<'_>, ) -> Select<'a> { - relation_selections.fold(input, |acc, rs| self.with_relation(acc, rs, parent_alias, ctx)) + let virtual_selections = virtual_selections.collect::>(); + + relation_selections.fold(input, |acc, rs| { + self.with_relation(acc, rs, virtual_selections.iter().copied(), parent_alias, ctx) + }) } fn build_default_select(&mut self, args: &QueryArguments, ctx: &Context<'_>) -> (Select<'static>, Alias) { @@ -258,7 +271,13 @@ pub(crate) trait JoinSelectBuilder { parent_alias: Alias, ctx: &Context<'_>, ) -> Select<'a> { - selections.fold(select, |acc, vs| self.add_virtual_selection(acc, vs, parent_alias, ctx)) + selections.fold(select, |acc, vs| { + if self.was_virtual_processed_in_relation(vs) { + acc + } else { + self.add_virtual_selection(acc, vs, parent_alias, ctx) + } + }) } fn build_virtual_select( @@ -372,6 +391,18 @@ pub(crate) trait JoinSelectBuilder { select } + + fn find_compatible_virtual_for_relation<'a>( + &self, + rs: &RelationSelection, + mut parent_virtuals: impl Iterator, + ) -> Option<&'a VirtualSelection> { + if rs.args.take.is_some() || rs.args.skip.is_some() || rs.args.cursor.is_some() || rs.args.distinct.is_some() { + return None; + } + + parent_virtuals.find(|vs| *vs.relation_field() == rs.field && vs.filter() == rs.args.filter.as_ref()) + } } pub(crate) trait SelectBuilderExt<'a> { @@ -613,7 +644,3 @@ fn supports_lateral_join(args: &QueryArguments) -> bool { .connector .has_capability(ConnectorCapability::LateralJoin) } - -fn relation_count_alias_name(rf: &RelationField) -> String { - format!("aggr_count_{}_{}", rf.model().name(), rf.name()) -} diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/select/subquery.rs b/query-engine/connectors/sql-query-connector/src/query_builder/select/subquery.rs index 202d42780e8b..437ee9f075a8 100644 --- a/query-engine/connectors/sql-query-connector/src/query_builder/select/subquery.rs +++ b/query-engine/connectors/sql-query-connector/src/query_builder/select/subquery.rs @@ -46,7 +46,7 @@ impl JoinSelectBuilder for SubqueriesSelectBuilder { .table(parent_alias.to_table_string()) .set_is_selected(true), ), - SelectedField::Relation(rs) => self.with_relation(select, rs, parent_alias, ctx), + SelectedField::Relation(rs) => self.with_relation(select, rs, Vec::new().iter(), parent_alias, ctx), _ => select, } } @@ -58,15 +58,17 @@ impl JoinSelectBuilder for SubqueriesSelectBuilder { parent_alias: Alias, ctx: &Context<'_>, ) -> Select<'a> { - let (subselect, _) = self.build_to_one_select(rs, parent_alias, |x| x, ctx); + let (subselect, child_alias) = self.build_to_one_select(rs, parent_alias, ctx); + let subselect = subselect.value(self.build_json_obj_fn(rs, child_alias, ctx)); select.value(Expression::from(subselect).alias(rs.field.name().to_owned())) } - fn add_to_many_relation<'a>( + fn add_to_many_relation<'a, 'b>( &mut self, select: Select<'a>, rs: &RelationSelection, + _parent_virtuals: impl Iterator, parent_alias: Alias, ctx: &Context<'_>, ) -> Select<'a> { @@ -75,10 +77,11 @@ impl JoinSelectBuilder for SubqueriesSelectBuilder { select.value(Expression::from(subselect).alias(rs.field.name().to_owned())) } - fn add_many_to_many_relation<'a>( + fn add_many_to_many_relation<'a, 'b>( &mut self, select: Select<'a>, rs: &RelationSelection, + _parent_virtuals: impl Iterator, parent_alias: Alias, ctx: &Context<'_>, ) -> Select<'a> { @@ -117,7 +120,7 @@ impl JoinSelectBuilder for SubqueriesSelectBuilder { )), SelectedField::Relation(rs) => Some(( Cow::from(rs.field.name().to_owned()), - Expression::from(self.with_relation(Select::default(), rs, parent_alias, ctx)), + Expression::from(self.with_relation(Select::default(), rs, Vec::new().iter(), parent_alias, ctx)), )), _ => None, }) @@ -144,6 +147,10 @@ impl JoinSelectBuilder for SubqueriesSelectBuilder { self.alias = self.alias.inc(AliasMode::Table); self.alias } + + fn was_virtual_processed_in_relation(&self, _vs: &VirtualSelection) -> bool { + false + } } impl SubqueriesSelectBuilder { @@ -188,3 +195,7 @@ impl SubqueriesSelectBuilder { .comment("outer") } } + +fn relation_count_alias_name(rf: &RelationField) -> String { + format!("aggr_count_{}_{}", rf.model().name(), rf.name()) +} diff --git a/query-engine/driver-adapters/executor/bench/queries.json b/query-engine/driver-adapters/executor/bench/queries.json index e143da135acc..5410f162be11 100644 --- a/query-engine/driver-adapters/executor/bench/queries.json +++ b/query-engine/driver-adapters/executor/bench/queries.json @@ -1,6 +1,6 @@ [ { - "description": "movies.findMany() (all - 25000)", + "description": "movies.findMany() (all - ~50K)", "query": { "action": "findMany", "modelName": "Movie", @@ -59,7 +59,12 @@ "cast": true }, "selection": { - "$scalars": true + "$scalars": true, + "cast": { + "selection": { + "$scalars": true + } + } } } } @@ -82,7 +87,12 @@ "cast": true }, "selection": { - "$scalars": true + "$scalars": true, + "cast": { + "selection": { + "$scalars": true + } + } } } } @@ -104,7 +114,16 @@ } }, "selection": { - "$scalars": true + "$scalars": true, + "cast": { + "selection": { + "person": { + "selection": { + "$scalars": true + } + } + } + } } } } @@ -131,7 +150,16 @@ } }, "selection": { - "$scalars": true + "$scalars": true, + "cast": { + "selection": { + "person": { + "selection": { + "$scalars": true + } + } + } + } } } } diff --git a/query-engine/driver-adapters/executor/bench/schema.prisma b/query-engine/driver-adapters/executor/bench/schema.prisma index a45c1e62b4cc..6346afed158d 100644 --- a/query-engine/driver-adapters/executor/bench/schema.prisma +++ b/query-engine/driver-adapters/executor/bench/schema.prisma @@ -5,7 +5,7 @@ datasource db { generator foo { provider = "prisma-client-js" - previewFeatures = ["driverAdapters"] + previewFeatures = ["driverAdapters", "relationJoins"] } model Movie { diff --git a/query-engine/driver-adapters/executor/package.json b/query-engine/driver-adapters/executor/package.json index 8e1fd6e2a398..3733b94193e8 100644 --- a/query-engine/driver-adapters/executor/package.json +++ b/query-engine/driver-adapters/executor/package.json @@ -22,9 +22,6 @@ "sideEffects": false, "license": "Apache-2.0", "dependencies": { - "@libsql/client": "0.3.6", - "@neondatabase/serverless": "0.8.1", - "@planetscale/database": "1.16.0", "query-engine-wasm-latest": "npm:@prisma/query-engine-wasm@latest", "query-engine-wasm-baseline": "npm:@prisma/query-engine-wasm@0.0.19", "@prisma/adapter-libsql": "workspace:*", @@ -32,9 +29,8 @@ "@prisma/adapter-pg": "workspace:*", "@prisma/adapter-planetscale": "workspace:*", "@prisma/driver-adapter-utils": "workspace:*", - "@types/pg": "8.10.9", + "@prisma/bundled-js-drivers": "workspace:*", "mitata": "^0.1.6", - "pg": "8.11.3", "undici": "6.0.1", "ws": "8.14.2" }, diff --git a/query-engine/driver-adapters/executor/src/bench.ts b/query-engine/driver-adapters/executor/src/bench.ts index 1f4ffa6ad436..e168e95a9cab 100644 --- a/query-engine/driver-adapters/executor/src/bench.ts +++ b/query-engine/driver-adapters/executor/src/bench.ts @@ -9,7 +9,7 @@ import { fileURLToPath } from "node:url"; import * as qe from "./qe"; -import pgDriver from "pg"; +import { pg } from "@prisma/bundled-js-drivers"; import * as prismaPg from "@prisma/adapter-pg"; import { bindAdapter, DriverAdapter } from "@prisma/driver-adapter-utils"; @@ -45,11 +45,18 @@ async function main(): Promise { const withErrorCapturing = bindAdapter(pg); // We build two decorators for recording and replaying db queries. - const { recorder, replayer } = recording(withErrorCapturing); + const { recorder, replayer, recordings } = recording(withErrorCapturing); // We exercise the queries recording them await recordQueries(recorder, datamodel, prismaQueries); + // Dump recordings if requested + if (process.env.BENCH_RECORDINGS_FILE != null) { + const recordingsJson = JSON.stringify(recordings.data(), null, 2); + await fs.writeFile(process.env.BENCH_RECORDINGS_FILE, recordingsJson); + debug(`Recordings written to ${process.env.BENCH_RECORDINGS_FILE}`); + } + // Then we benchmark the execution of the queries but instead of hitting the DB // we fetch results from the recordings, thus isolating the performance // of the engine + driver adapter code from that of the DB IO. @@ -61,23 +68,37 @@ async function recordQueries( datamodel: string, prismaQueries: any ): Promise { - const qe = await initQeWasmBaseLine(adapter, datamodel); - await qe.connect(""); + // Different engines might have made different SQL queries to complete the same Prisma Query, + // so we record the results of all engines for the benchmarking phase. + const napi = await initQeNapiCurrent(adapter, datamodel); + await napi.connect(""); + const wasmCurrent = await initQeWasmCurrent(adapter, datamodel); + await wasmCurrent.connect(""); + const wasmBaseline = await initQeWasmBaseLine(adapter, datamodel); + await wasmBaseline.connect(""); + const wasmLatest = await initQeWasmLatest(adapter, datamodel); + await wasmLatest.connect(""); try { - for (const prismaQuery of prismaQueries) { - const { description, query } = prismaQuery; - const res = await qe.query(JSON.stringify(query), "", undefined); - - const errors = JSON.parse(res).errors; - if (errors != null && errors.length > 0) { - throw new Error( - `Query failed for ${description}: ${JSON.stringify(res)}` - ); + for (const qe of [napi, wasmCurrent, wasmBaseline, wasmLatest]) { + for (const prismaQuery of prismaQueries) { + const { description, query } = prismaQuery; + const res = await qe.query(JSON.stringify(query), "", undefined); + console.log(res[9]); + + const errors = JSON.parse(res).errors; + if (errors != null) { + throw new Error( + `Query failed for ${description}: ${JSON.stringify(res)}` + ); + } } } } finally { - await qe.disconnect(""); + await napi.disconnect(""); + await wasmCurrent.disconnect(""); + await wasmBaseline.disconnect(""); + await wasmLatest.disconnect(""); } } @@ -176,7 +197,7 @@ async function pgAdapter(url: string): Promise { if (schemaName != null) { args.options = `--search_path="${schemaName}"`; } - const pool = new pgDriver.Pool(args); + const pool = new pg.Pool(args); return new prismaPg.PrismaPg(pool, { schema: schemaName, diff --git a/query-engine/driver-adapters/executor/src/recording.ts b/query-engine/driver-adapters/executor/src/recording.ts index a4152488e985..0602cb69dc4e 100644 --- a/query-engine/driver-adapters/executor/src/recording.ts +++ b/query-engine/driver-adapters/executor/src/recording.ts @@ -13,6 +13,7 @@ export function recording(adapter: DriverAdapter) { return { recorder: recorder(adapter, recordings), replayer: replayer(adapter, recordings), + recordings: recordings, }; } @@ -31,9 +32,7 @@ function recorder(adapter: DriverAdapter, recordings: Recordings) { return result; }, executeRaw: async (params) => { - const result = await adapter.executeRaw(params); - recordings.addCommandResults(params, result); - return result; + throw new Error("Not implemented"); }, }; } @@ -61,18 +60,25 @@ function createInMemoryRecordings() { const queryResults: Map> = new Map(); const commandResults: Map> = new Map(); - // Recording is currently only used in benchmarks. Before we used to serialize the whole query - // (sql + args) but since bigints are not serialized by JSON.stringify, and we didn’t really need - // (sql + args) but since bigints are not serialized by JSON.stringify, and we didn't really need - // args for benchmarks, we just serialize the sql part. - // - // If this ever changes (we reuse query recording in tests) we need to make sure to serialize the - // args as well. const queryToKey = (params: Query) => { - return JSON.stringify(params.sql); + var sql = params.sql; + params.args.forEach((arg: any, i) => { + sql = sql.replace("$" + (i + 1), arg.toString()); + }); + return sql; }; return { + data: (): Map => { + const map = new Map(); + for (const [key, value] of queryResults.entries()) { + value.map((resultSet) => { + map[key] = resultSet; + }); + } + return map; + }, + addQueryResults: (params: Query, result: Result) => { const key = queryToKey(params); queryResults.set(key, result); diff --git a/query-engine/driver-adapters/executor/src/testd.ts b/query-engine/driver-adapters/executor/src/testd.ts index ae40ee229490..4345887fe659 100644 --- a/query-engine/driver-adapters/executor/src/testd.ts +++ b/query-engine/driver-adapters/executor/src/testd.ts @@ -3,21 +3,18 @@ import * as readline from 'node:readline' import * as jsonRpc from './jsonRpc' // pg dependencies -import pgDriver from 'pg' import * as prismaPg from '@prisma/adapter-pg' // neon dependencies -import { Pool as NeonPool, neonConfig } from '@neondatabase/serverless' import { fetch } from 'undici' import { WebSocket } from 'ws' +import { pg, neon, planetScale, libSql } from '@prisma/bundled-js-drivers' import * as prismaNeon from '@prisma/adapter-neon' // libsql dependencies -import { createClient } from '@libsql/client' import { PrismaLibSQL } from '@prisma/adapter-libsql' // planetscale dependencies -import { Client as PlanetscaleClient } from '@planetscale/database' import { PrismaPlanetScale } from '@prisma/adapter-planetscale' @@ -256,7 +253,7 @@ function postgresSchemaName(url: string) { async function pgAdapter(url: string): Promise { const schemaName = postgresSchemaName(url) - const pool = new pgDriver.Pool(postgres_options(url)) + const pool = new pg.Pool(postgres_options(url)) return new prismaPg.PrismaPg(pool, { schema: schemaName }) @@ -264,6 +261,7 @@ async function pgAdapter(url: string): Promise { } async function neonWsAdapter(url: string): Promise { + const { neonConfig, Pool: NeonPool } = neon const proxyURL = JSON.parse(process.env.DRIVER_ADAPTER_CONFIG || '{}').proxy_url ?? '' if (proxyURL == '') { throw new Error("DRIVER_ADAPTER_CONFIG is not defined or empty, but its required for neon adapter."); @@ -281,7 +279,7 @@ async function neonWsAdapter(url: string): Promise { } async function libsqlAdapter(url: string): Promise { - const libsql = createClient({ url, intMode: 'bigint' }) + const libsql = libSql.createClient({ url, intMode: 'bigint' }) return new PrismaLibSQL(libsql) } @@ -291,7 +289,7 @@ async function planetscaleAdapter(url: string): Promise { throw new Error("DRIVER_ADAPTER_CONFIG is not defined or empty, but its required for planetscale adapter."); } - const client = new PlanetscaleClient({ + const client = new planetScale.Client({ // preserving path name so proxy url would look like real DB url url: copyPathName(url, proxyUrl), fetch, diff --git a/query-engine/driver-adapters/pnpm-workspace.yaml b/query-engine/driver-adapters/pnpm-workspace.yaml index a616622479a5..7d2cb5c6d311 100644 --- a/query-engine/driver-adapters/pnpm-workspace.yaml +++ b/query-engine/driver-adapters/pnpm-workspace.yaml @@ -5,4 +5,5 @@ packages: - '../../../prisma/packages/adapter-planetscale' - '../../../prisma/packages/driver-adapter-utils' - '../../../prisma/packages/debug' + - '../../../prisma/packages/bundled-js-drivers' - './executor' diff --git a/query-engine/driver-adapters/src/napi/conversion.rs b/query-engine/driver-adapters/src/napi/conversion.rs index ac2dda60a279..6cfe445925e3 100644 --- a/query-engine/driver-adapters/src/napi/conversion.rs +++ b/query-engine/driver-adapters/src/napi/conversion.rs @@ -18,7 +18,12 @@ impl ToNapiValue for JSArg { match value { JSArg::Value(v) => ToNapiValue::to_napi_value(env, v), JSArg::Buffer(bytes) => { - ToNapiValue::to_napi_value(env, napi::Env::from_raw(env).create_buffer_with_data(bytes)?.into_raw()) + let env = napi::Env::from_raw(env); + let length = bytes.len(); + let buffer = env.create_arraybuffer_with_data(bytes)?.into_raw(); + let byte_array = buffer.into_typedarray(napi::TypedArrayType::Uint8, length, 0)?; + + ToNapiValue::to_napi_value(env.raw(), byte_array) } // While arrays are encodable as JSON generally, their element might not be, or may be // represented in a different way than we need. We use this custom logic for all arrays diff --git a/query-engine/driver-adapters/src/wasm/conversion.rs b/query-engine/driver-adapters/src/wasm/conversion.rs index c41ff8a23107..73e6a7c30331 100644 --- a/query-engine/driver-adapters/src/wasm/conversion.rs +++ b/query-engine/driver-adapters/src/wasm/conversion.rs @@ -3,16 +3,7 @@ use crate::conversion::JSArg; use super::to_js::{serde_serialize, ToJsValue}; use crate::types::Query; use js_sys::{Array, JsString, Object, Reflect, Uint8Array}; -use wasm_bindgen::{prelude::wasm_bindgen, JsValue}; - -#[wasm_bindgen] -extern "C" { - #[wasm_bindgen(extends = Object)] - pub type Buffer; - - #[wasm_bindgen(static_method_of = Buffer)] - pub fn from(array: &Uint8Array) -> Buffer; -} +use wasm_bindgen::JsValue; impl ToJsValue for Query { fn to_js_value(&self) -> Result { @@ -36,7 +27,7 @@ impl ToJsValue for JSArg { JSArg::Value(value) => serde_serialize(value), JSArg::Buffer(buf) => { let array = Uint8Array::from(buf.as_slice()); - Ok(Buffer::from(&array).into()) + Ok(array.into()) } JSArg::Array(value) => { let array = Array::new(); diff --git a/query-engine/query-engine-wasm/build.sh b/query-engine/query-engine-wasm/build.sh index c3f129bb276b..0db1aad5bf08 100755 --- a/query-engine/query-engine-wasm/build.sh +++ b/query-engine/query-engine-wasm/build.sh @@ -34,7 +34,6 @@ echo "ℹ️ target version: $OUT_VERSION" echo "ℹ️ out folder: $OUT_FOLDER" if [[ -z "${WASM_BUILD_PROFILE:-}" ]]; then - # use `wasm-pack build --release` by default on CI only if [[ -z "${BUILDKITE:-}" ]] && [[ -z "${GITHUB_ACTIONS:-}" ]]; then WASM_BUILD_PROFILE="dev" else @@ -45,22 +44,23 @@ fi if [ "$WASM_BUILD_PROFILE" = "dev" ]; then WASM_TARGET_SUBDIR="debug" else - WASM_TARGET_SUBDIR="release" + WASM_TARGET_SUBDIR="$WASM_BUILD_PROFILE" fi -echo "Using build profile: \"${WASM_BUILD_PROFILE}\"" -echo "ℹ️ Configuring rust toolchain to use nightly and rust-src component" -rustup default nightly-2024-01-25 -rustup target add wasm32-unknown-unknown -rustup component add rust-src --target wasm32-unknown-unknown -export RUSTFLAGS="-Zlocation-detail=none" -CARGO_TARGET_DIR=$(cargo metadata --format-version 1 | jq -r .target_directory) build() { + echo "ℹ️ Configuring rust toolchain to use nightly and rust-src component" + rustup default nightly-2024-01-25 + rustup target add wasm32-unknown-unknown + rustup component add rust-std --target wasm32-unknown-unknown + rustup component add rust-src --target wasm32-unknown-unknown + local CONNECTOR="$1" - echo "🔨 Building $CONNECTOR" - CARGO_PROFILE_RELEASE_OPT_LEVEL="z" cargo build \ + local CARGO_TARGET_DIR + CARGO_TARGET_DIR=$(cargo metadata --format-version 1 | jq -r .target_directory) + echo "🔨 Building $CONNECTOR" + RUSTFLAGS="-Zlocation-detail=none" CARGO_PROFILE_RELEASE_OPT_LEVEL="z" cargo build \ -p query-engine-wasm \ --profile "$WASM_BUILD_PROFILE" \ --features "$CONNECTOR" \ diff --git a/query-engine/query-structure/src/field_selection.rs b/query-engine/query-structure/src/field_selection.rs index 4558eb77f335..b6d9bcb883e9 100644 --- a/query-engine/query-structure/src/field_selection.rs +++ b/query-engine/query-structure/src/field_selection.rs @@ -68,6 +68,10 @@ impl FieldSelection { FieldSelection::new(non_virtuals.into_iter().chain(virtuals).collect()) } + pub fn to_virtuals_last(&self) -> Self { + self.clone().into_virtuals_last() + } + /// Returns the selections, grouping the virtual fields that are wrapped into objects in the /// query (like `_count`) and returning only the first virtual field in each of those groups. /// This is useful when we want to treat the group as a whole but we don't need the information @@ -332,11 +336,21 @@ impl VirtualSelection { VirtualSelection::RelationCount(rf, _) => rf, } } + + pub fn filter(&self) -> Option<&Filter> { + match self { + VirtualSelection::RelationCount(_, filter) => filter.as_ref(), + } + } } impl Display for VirtualSelection { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.db_alias()) + let model = self.relation_field().model(); + let model_name = model.name(); + let (obj, field) = self.serialized_name(); + + write!(f, "{model_name}.{obj}.{field}") } }