From 76cf002c0dda5c1db33fb47ebe5d85ed52af1ecb Mon Sep 17 00:00:00 2001 From: Alberto Schiabel Date: Mon, 9 Oct 2023 17:19:41 +0200 Subject: [PATCH 01/67] feat(query-engine): add skeleton for Wasm Query Engine (#4333) Co-authored-by: Sergey Tatarintsev --- Cargo.lock | 99 +++++++ Cargo.toml | 1 + query-engine/query-engine-wasm/.gitignore | 7 + query-engine/query-engine-wasm/.nvmrc | 1 + query-engine/query-engine-wasm/Cargo.toml | 36 +++ query-engine/query-engine-wasm/README.md | 40 +++ query-engine/query-engine-wasm/build.rs | 11 + query-engine/query-engine-wasm/build.sh | 51 ++++ query-engine/query-engine-wasm/example.js | 54 ++++ .../query-engine-wasm/package-lock.json | 148 ++++++++++ query-engine/query-engine-wasm/package.json | 9 + query-engine/query-engine-wasm/src/engine.rs | 265 ++++++++++++++++++ query-engine/query-engine-wasm/src/error.rs | 93 ++++++ .../query-engine-wasm/src/functions.rs | 47 ++++ query-engine/query-engine-wasm/src/lib.rs | 19 ++ query-engine/query-engine-wasm/src/logger.rs | 132 +++++++++ query-engine/query-engine-wasm/src/proxy.rs | 107 +++++++ 17 files changed, 1120 insertions(+) create mode 100644 query-engine/query-engine-wasm/.gitignore create mode 100644 query-engine/query-engine-wasm/.nvmrc create mode 100644 query-engine/query-engine-wasm/Cargo.toml create mode 100644 query-engine/query-engine-wasm/README.md create mode 100644 query-engine/query-engine-wasm/build.rs create mode 100755 query-engine/query-engine-wasm/build.sh create mode 100644 query-engine/query-engine-wasm/example.js create mode 100644 query-engine/query-engine-wasm/package-lock.json create mode 100644 query-engine/query-engine-wasm/package.json create mode 100644 query-engine/query-engine-wasm/src/engine.rs create mode 100644 query-engine/query-engine-wasm/src/error.rs create mode 100644 query-engine/query-engine-wasm/src/functions.rs create mode 100644 query-engine/query-engine-wasm/src/lib.rs create mode 100644 query-engine/query-engine-wasm/src/logger.rs create mode 100644 query-engine/query-engine-wasm/src/proxy.rs diff --git a/Cargo.lock b/Cargo.lock index 3002a1404210..8166394f8c89 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -672,6 +672,16 @@ dependencies = [ "windows-sys 0.45.0", ] +[[package]] +name = "console_error_panic_hook" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06aeb73f470f66dcdbf7223caeebb85984942f22f1adb2a088cf9668146bbbc" +dependencies = [ + "cfg-if", + "wasm-bindgen", +] + [[package]] name = "convert_case" version = "0.4.0" @@ -1563,6 +1573,19 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" +[[package]] +name = "gloo-utils" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "037fcb07216cb3a30f7292bd0176b050b7b9a052ba830ef7d5d65f6dc64ba58e" +dependencies = [ + "js-sys", + "serde", + "serde_json", + "wasm-bindgen", + "web-sys", +] + [[package]] name = "graphql-parser" version = "0.3.0" @@ -3784,6 +3807,35 @@ dependencies = [ "uuid", ] +[[package]] +name = "query-engine-wasm" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "connection-string", + "console_error_panic_hook", + "futures", + "js-sys", + "log", + "prisma-models", + "psl", + "serde", + "serde-wasm-bindgen", + "serde_json", + "thiserror", + "tokio", + "tracing", + "tracing-futures", + "tracing-subscriber", + "tsify", + "url", + "user-facing-errors", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-logger", +] + [[package]] name = "query-test-macros" version = "0.1.0" @@ -4533,6 +4585,17 @@ dependencies = [ "serde_derive", ] +[[package]] +name = "serde-wasm-bindgen" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3b143e2833c57ab9ad3ea280d21fd34e285a42837aeb0ee301f4f41890fa00e" +dependencies = [ + "js-sys", + "serde", + "wasm-bindgen", +] + [[package]] name = "serde_bytes" version = "0.11.12" @@ -4553,6 +4616,17 @@ dependencies = [ "syn 2.0.28", ] +[[package]] +name = "serde_derive_internals" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e578a843d40b4189a4d66bba51d7684f57da5bd7c304c64e14bd63efbef49509" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.28", +] + [[package]] name = "serde_json" version = "1.0.104" @@ -5663,6 +5737,31 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" +[[package]] +name = "tsify" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6b26cf145f2f3b9ff84e182c448eaf05468e247f148cf3d2a7d67d78ff023a0" +dependencies = [ + "gloo-utils", + "serde", + "serde_json", + "tsify-macros", + "wasm-bindgen", +] + +[[package]] +name = "tsify-macros" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a94b0f0954b3e59bfc2c246b4c8574390d94a4ad4ad246aaf2fb07d7dfd3b47" +dependencies = [ + "proc-macro2", + "quote", + "serde_derive_internals", + "syn 2.0.28", +] + [[package]] name = "twox-hash" version = "1.6.3" diff --git a/Cargo.toml b/Cargo.toml index 4499033a624b..4a3cd1450caf 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -24,6 +24,7 @@ members = [ "query-engine/prisma-models", "query-engine/query-engine", "query-engine/query-engine-node-api", + "query-engine/query-engine-wasm", "query-engine/request-handlers", "query-engine/schema", "libs/*", diff --git a/query-engine/query-engine-wasm/.gitignore b/query-engine/query-engine-wasm/.gitignore new file mode 100644 index 000000000000..a6f0e4dca125 --- /dev/null +++ b/query-engine/query-engine-wasm/.gitignore @@ -0,0 +1,7 @@ +/target +**/*.rs.bk +Cargo.lock +bin/ +pkg/ +wasm-pack.log +node_modules/ \ No newline at end of file diff --git a/query-engine/query-engine-wasm/.nvmrc b/query-engine/query-engine-wasm/.nvmrc new file mode 100644 index 000000000000..8c60e1e54f37 --- /dev/null +++ b/query-engine/query-engine-wasm/.nvmrc @@ -0,0 +1 @@ +v20.5.1 diff --git a/query-engine/query-engine-wasm/Cargo.toml b/query-engine/query-engine-wasm/Cargo.toml new file mode 100644 index 000000000000..a8bc393aee3f --- /dev/null +++ b/query-engine/query-engine-wasm/Cargo.toml @@ -0,0 +1,36 @@ +[package] +name = "query-engine-wasm" +version = "0.1.0" +edition = "2021" + +[lib] +doc = false +crate-type = ["cdylib"] +name = "query_engine" + +[dependencies] +anyhow = "1" +async-trait = "0.1" +user-facing-errors = { path = "../../libs/user-facing-errors" } +psl.workspace = true +prisma-models = { path = "../prisma-models" } + +thiserror = "1" +connection-string.workspace = true +url = "2" +serde_json.workspace = true +serde.workspace = true +tokio = { version = "1.25", features = ["macros", "sync", "io-util", "time"] } +futures = "0.3" +wasm-bindgen = "=0.2.87" +wasm-bindgen-futures = "0.4" +serde-wasm-bindgen = "0.5" +js-sys = "0.3" +log = "0.4.6" +wasm-logger = "0.2.0" + +tracing = "0.1" +tracing-subscriber = { version = "0.3" } +tracing-futures = "0.2" +tsify = "0.4.5" +console_error_panic_hook = "0.1.7" diff --git a/query-engine/query-engine-wasm/README.md b/query-engine/query-engine-wasm/README.md new file mode 100644 index 000000000000..f5adc7eb2894 --- /dev/null +++ b/query-engine/query-engine-wasm/README.md @@ -0,0 +1,40 @@ +# @prisma/query-engine-wasm + +**INTERNAL PACKAGE, DO NOT USE** + +This is a Wasm-compatible version of the Query Engine library (libquery). +Currently, it just contains a skeleton of the public API, as some internal crates are still not Wasm-compatible. + +The published npm package is internal to Prisma. Its API will break without prior warning. + +## Setup + +``` +# Install the latest Rust version with `rustup` +# or update the latest Rust version with `rustup` +rustup update +rustup target add wasm32-unknown-unknown +cargo install wasm-bindgen +cargo install wasm-pack +``` + +## How to Build + +From the current folder: + +- `./build.sh $OUT_NPM_VERSION` + +where e.g. `OUT_NPM_VERSION="0.0.1"` is the version you want to publish this package on npm with. + +## How to Publish + +From the current folder: + +- `wasm-pack publish --access public` + +## How to Test + +To try importing the , you can run: + +- `nvm use` +- `node --experimental-wasm-modules ./example.js` diff --git a/query-engine/query-engine-wasm/build.rs b/query-engine/query-engine-wasm/build.rs new file mode 100644 index 000000000000..2e8fe20c0503 --- /dev/null +++ b/query-engine/query-engine-wasm/build.rs @@ -0,0 +1,11 @@ +use std::process::Command; + +fn store_git_commit_hash() { + let output = Command::new("git").args(["rev-parse", "HEAD"]).output().unwrap(); + let git_hash = String::from_utf8(output.stdout).unwrap(); + println!("cargo:rustc-env=GIT_HASH={git_hash}"); +} + +fn main() { + store_git_commit_hash(); +} diff --git a/query-engine/query-engine-wasm/build.sh b/query-engine/query-engine-wasm/build.sh new file mode 100755 index 000000000000..12d8328305ff --- /dev/null +++ b/query-engine/query-engine-wasm/build.sh @@ -0,0 +1,51 @@ +#!/bin/bash + +# Call this script as `./build.sh ` + +OUT_VERSION="$1" +OUT_FOLDER="pkg" +OUT_JSON="${OUT_FOLDER}/package.json" +OUT_TARGET="bundler" # Note(jkomyno): I wasn't able to make it work with `web` target +OUT_NPM_NAME="@prisma/query-engine-wasm" + +wasm-pack build --release --target $OUT_TARGET + +sleep 1 + +# Mark the package as a ES module, set the entry point to the query_engine.js file, mark the package as public +printf '%s\n' "$(jq '. + {"type": "module"} + {"main": "./query_engine.js"} + {"private": false}' $OUT_JSON)" > $OUT_JSON + +# Add the version +printf '%s\n' "$(jq --arg version "$OUT_VERSION" '. + {"version": $version}' $OUT_JSON)" > $OUT_JSON + +# Add the package name +printf '%s\n' "$(jq --arg name "$OUT_NPM_NAME" '. + {"name": $name}' $OUT_JSON)" > $OUT_JSON + +enable_cf_in_bindings() { + # Enable Cloudflare Workers in the generated JS bindings. + # The generated bindings are compatible with: + # - Node.js + # - Cloudflare Workers / Miniflare + + local FILE="$1" # e.g., `query_engine.js` + local BG_FILE="${FILE%.js}_bg.js" + local OUTPUT_FILE="${OUT_FOLDER}/${FILE}" + + cat < "$OUTPUT_FILE" +import * as imports from "./${BG_FILE}"; + +// switch between both syntax for Node.js and for workers (Cloudflare Workers) +import * as wkmod from "./${BG_FILE%.js}.wasm"; +import * as nodemod from "./${BG_FILE%.js}.wasm"; +if ((typeof process !== 'undefined') && (process.release.name === 'node')) { + imports.__wbg_set_wasm(nodemod); +} else { + const instance = new WebAssembly.Instance(wkmod.default, { "./${BG_FILE}": imports }); + imports.__wbg_set_wasm(instance.exports); +} + +export * from "./${BG_FILE}"; +EOF +} + +enable_cf_in_bindings "query_engine.js" diff --git a/query-engine/query-engine-wasm/example.js b/query-engine/query-engine-wasm/example.js new file mode 100644 index 000000000000..bca6d5ba95d7 --- /dev/null +++ b/query-engine/query-engine-wasm/example.js @@ -0,0 +1,54 @@ +/** + * Run with: `node --experimental-wasm-modules ./example.js` + * on Node.js 18+. + */ + +import { Pool } from '@neondatabase/serverless' +import { PrismaNeon } from '@prisma/adapter-neon' +import { bindAdapter } from '@prisma/driver-adapter-utils' +import { init, QueryEngine, getBuildTimeInfo } from './pkg/query_engine.js' + +async function main() { + // Always initialize the Wasm library before using it. + // This sets up the logging and panic hooks. + init() + + const connectionString = undefined + + const pool = new Pool({ connectionString }) + const adapter = new PrismaNeon(pool) + const driverAdapter = bindAdapter(adapter) + + console.log('buildTimeInfo', getBuildTimeInfo()) + + const options = { + datamodel: /* prisma */` + datasource db { + provider = "postgres" + url = env("DATABASE_URL") + } + + generator client { + provider = "prisma-client-js" + } + + model User { + id Int @id @default(autoincrement()) + } + `, + logLevel: 'info', + logQueries: true, + datasourceOverrides: {}, + env: process.env, + configDir: '/tmp', + ignoreEnvVarErrors: true, + } + const callback = () => { console.log('log-callback') } + + const queryEngine = new QueryEngine(options, callback, driverAdapter) + + await queryEngine.connect('trace') + await queryEngine.disconnect('trace') +} + +main() diff --git a/query-engine/query-engine-wasm/package-lock.json b/query-engine/query-engine-wasm/package-lock.json new file mode 100644 index 000000000000..bc854644f6dd --- /dev/null +++ b/query-engine/query-engine-wasm/package-lock.json @@ -0,0 +1,148 @@ +{ + "name": "query-engine-wasm", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "dependencies": { + "@neondatabase/serverless": "^0.6.0", + "@prisma/adapter-neon": "^5.4.1", + "@prisma/driver-adapter-utils": "^5.4.1" + } + }, + "node_modules/@neondatabase/serverless": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/@neondatabase/serverless/-/serverless-0.6.0.tgz", + "integrity": "sha512-qXxBRYN0m2v8kVQBfMxbzNGn2xFAhTXFibzQlE++NfJ56Shz3m7+MyBBtXDlEH+3Wfa6lToDXf1MElocY4sJ3w==", + "dependencies": { + "@types/pg": "8.6.6" + } + }, + "node_modules/@prisma/adapter-neon": { + "version": "5.4.1", + "resolved": "https://registry.npmjs.org/@prisma/adapter-neon/-/adapter-neon-5.4.1.tgz", + "integrity": "sha512-mIwLmwyAwDV9HXar9lSyM2uVm9H+X8noG4reKLnC3NjFsBxBfSUgW9vS8dPGqGW/rJWX3hg4pIffjEjmX4TDqg==", + "dependencies": { + "@prisma/driver-adapter-utils": "5.4.1" + }, + "peerDependencies": { + "@neondatabase/serverless": "^0.6.0" + } + }, + "node_modules/@prisma/driver-adapter-utils": { + "version": "5.4.1", + "resolved": "https://registry.npmjs.org/@prisma/driver-adapter-utils/-/driver-adapter-utils-5.4.1.tgz", + "integrity": "sha512-muYjkzf6qdxz4uGBi7nKyPaGRGLnSgiRautqAhZiMwbTOr9hMgyNI+aCJTCaKfYfNWjYCx2r5J6R1mJtPhzFhQ==", + "dependencies": { + "debug": "^4.3.4" + } + }, + "node_modules/@types/node": { + "version": "20.8.2", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.8.2.tgz", + "integrity": "sha512-Vvycsc9FQdwhxE3y3DzeIxuEJbWGDsnrxvMADzTDF/lcdR9/K+AQIeAghTQsHtotg/q0j3WEOYS/jQgSdWue3w==" + }, + "node_modules/@types/pg": { + "version": "8.6.6", + "resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.6.6.tgz", + "integrity": "sha512-O2xNmXebtwVekJDD+02udOncjVcMZQuTEQEMpKJ0ZRf5E7/9JJX3izhKUcUifBkyKpljyUM6BTgy2trmviKlpw==", + "dependencies": { + "@types/node": "*", + "pg-protocol": "*", + "pg-types": "^2.2.0" + } + }, + "node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/pg-int8": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/pg-protocol": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.6.0.tgz", + "integrity": "sha512-M+PDm637OY5WM307051+bsDia5Xej6d9IR4GwJse1qA1DIhiKlksvrneZOYQq42OM+spubpcNYEo2FcKQrDk+Q==" + }, + "node_modules/pg-types": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "dependencies": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/postgres-array": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", + "engines": { + "node": ">=4" + } + }, + "node_modules/postgres-bytea": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", + "integrity": "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-date": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", + "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-interval": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "dependencies": { + "xtend": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "engines": { + "node": ">=0.4" + } + } + } +} diff --git a/query-engine/query-engine-wasm/package.json b/query-engine/query-engine-wasm/package.json new file mode 100644 index 000000000000..538080ec1b8c --- /dev/null +++ b/query-engine/query-engine-wasm/package.json @@ -0,0 +1,9 @@ +{ + "type": "module", + "main": "./example.js", + "dependencies": { + "@neondatabase/serverless": "^0.6.0", + "@prisma/adapter-neon": "^5.4.1", + "@prisma/driver-adapter-utils": "^5.4.1" + } +} diff --git a/query-engine/query-engine-wasm/src/engine.rs b/query-engine/query-engine-wasm/src/engine.rs new file mode 100644 index 000000000000..f9a06fabcf4b --- /dev/null +++ b/query-engine/query-engine-wasm/src/engine.rs @@ -0,0 +1,265 @@ +#![allow(dead_code)] +#![allow(unused_variables)] + +use crate::proxy; +use crate::{ + error::ApiError, + logger::{LogCallback, Logger}, +}; +use js_sys::{Function as JsFunction, Object as JsObject}; +use serde::{Deserialize, Serialize}; +use std::{ + collections::{BTreeMap, HashMap}, + path::PathBuf, + sync::Arc, +}; +use tokio::sync::RwLock; +use tracing_subscriber::filter::LevelFilter; +use tsify::Tsify; +use wasm_bindgen::prelude::wasm_bindgen; + +/// The main query engine used by JS +#[wasm_bindgen] +pub struct QueryEngine { + inner: RwLock, + logger: Logger, +} + +/// The state of the engine. +enum Inner { + /// Not connected, holding all data to form a connection. + Builder(EngineBuilder), + /// A connected engine, holding all data to disconnect and form a new + /// connection. Allows querying when on this state. + Connected(ConnectedEngine), +} + +/// Everything needed to connect to the database and have the core running. +struct EngineBuilder { + schema: Arc, + config_dir: PathBuf, + env: HashMap, +} + +/// Internal structure for querying and reconnecting with the engine. +struct ConnectedEngine { + schema: Arc, + config_dir: PathBuf, + env: HashMap, +} + +/// Returned from the `serverInfo` method in javascript. +#[derive(Debug, Serialize)] +#[serde(rename_all = "camelCase")] +struct ServerInfo { + commit: String, + version: String, + primary_connector: Option, +} + +/// Parameters defining the construction of an engine. +#[derive(Debug, Deserialize, Tsify)] +#[tsify(from_wasm_abi)] +#[serde(rename_all = "camelCase")] +pub struct ConstructorOptions { + datamodel: String, + log_level: String, + #[serde(default)] + log_queries: bool, + #[serde(default)] + datasource_overrides: BTreeMap, + #[serde(default)] + env: serde_json::Value, + config_dir: PathBuf, + #[serde(default)] + ignore_env_var_errors: bool, + #[serde(default)] + engine_protocol: Option, +} + +impl Inner { + /// Returns a builder if the engine is not connected + fn as_builder(&self) -> crate::Result<&EngineBuilder> { + match self { + Inner::Builder(ref builder) => Ok(builder), + Inner::Connected(_) => Err(ApiError::AlreadyConnected), + } + } + + /// Returns the engine if connected + fn as_engine(&self) -> crate::Result<&ConnectedEngine> { + match self { + Inner::Builder(_) => Err(ApiError::NotConnected), + Inner::Connected(ref engine) => Ok(engine), + } + } +} + +#[wasm_bindgen] +impl QueryEngine { + /// Parse a validated datamodel and configuration to allow connecting later on. + #[wasm_bindgen(constructor)] + pub fn new( + options: ConstructorOptions, + callback: JsFunction, + maybe_adapter: Option, + ) -> Result { + log::info!("Called `QueryEngine::new()`"); + + let log_callback = LogCallback(callback); + log::info!("Parsed `log_callback`"); + + let ConstructorOptions { + datamodel, + log_level, + log_queries, + datasource_overrides, + env, + config_dir, + ignore_env_var_errors, + engine_protocol, + } = options; + + let env = stringify_env_values(env)?; // we cannot trust anything JS sends us from process.env + let overrides: Vec<(_, _)> = datasource_overrides.into_iter().collect(); + + let mut schema = psl::validate(datamodel.into()); + let config = &mut schema.configuration; + + if let Some(adapter) = maybe_adapter { + let js_queryable = + proxy::from_wasm(adapter).map_err(|e| ApiError::configuration(e.as_string().unwrap_or_default()))?; + + let provider_name = schema.connector.provider_name(); + log::info!("Received driver adapter for {provider_name}."); + } + + schema + .diagnostics + .to_result() + .map_err(|err| ApiError::conversion(err, schema.db.source()))?; + + config + .resolve_datasource_urls_query_engine( + &overrides, + |key| env.get(key).map(ToString::to_string), + ignore_env_var_errors, + ) + .map_err(|err| ApiError::conversion(err, schema.db.source()))?; + + config + .validate_that_one_datasource_is_provided() + .map_err(|errors| ApiError::conversion(errors, schema.db.source()))?; + + let builder = EngineBuilder { + schema: Arc::new(schema), + config_dir, + env, + }; + + let log_level = log_level.parse::().unwrap(); + let logger = Logger::new(log_queries, log_level, log_callback); + + Ok(Self { + inner: RwLock::new(Inner::Builder(builder)), + logger, + }) + } + + /// Connect to the database, allow queries to be run. + #[wasm_bindgen] + pub async fn connect(&self, trace: String) -> Result<(), wasm_bindgen::JsError> { + log::info!("Called `QueryEngine::connect()`"); + Ok(()) + } + + /// Disconnect and drop the core. Can be reconnected later with `#connect`. + #[wasm_bindgen] + pub async fn disconnect(&self, trace: String) -> Result<(), wasm_bindgen::JsError> { + log::info!("Called `QueryEngine::disconnect()`"); + Ok(()) + } + + /// If connected, sends a query to the core and returns the response. + #[wasm_bindgen] + pub async fn query( + &self, + body: String, + trace: String, + tx_id: Option, + ) -> Result { + log::info!("Called `QueryEngine::query()`"); + Err(ApiError::configuration("Can't use `query` until `request_handlers` is Wasm-compatible.").into()) + } + + /// If connected, attempts to start a transaction in the core and returns its ID. + #[wasm_bindgen(js_name = startTransaction)] + pub async fn start_transaction(&self, input: String, trace: String) -> Result { + log::info!("Called `QueryEngine::start_transaction()`"); + Err(ApiError::configuration("Can't use `start_transaction` until `query_core` is Wasm-compatible.").into()) + } + + /// If connected, attempts to commit a transaction with id `tx_id` in the core. + #[wasm_bindgen(js_name = commitTransaction)] + pub async fn commit_transaction(&self, tx_id: String, trace: String) -> Result { + log::info!("Called `QueryEngine::commit_transaction()`"); + Err(ApiError::configuration("Can't use `commit_transaction` until `query_core` is Wasm-compatible.").into()) + } + + #[wasm_bindgen] + pub async fn dmmf(&self, trace: String) -> Result { + log::info!("Called `QueryEngine::dmmf()`"); + Err(ApiError::configuration("Can't use `dmmf` until `request_handlers` is Wasm-compatible.").into()) + } + + /// If connected, attempts to roll back a transaction with id `tx_id` in the core. + #[wasm_bindgen(js_name = rollbackTransaction)] + pub async fn rollback_transaction(&self, tx_id: String, trace: String) -> Result { + log::info!("Called `QueryEngine::rollback_transaction()`"); + Ok("{}".to_owned()) + } + + /// Loads the query schema. Only available when connected. + #[wasm_bindgen(js_name = sdlSchema)] + pub async fn sdl_schema(&self) -> Result { + log::info!("Called `QueryEngine::sdl_schema()`"); + Ok("{}".to_owned()) + } + + #[wasm_bindgen] + pub async fn metrics(&self, json_options: String) -> Result<(), wasm_bindgen::JsError> { + log::info!("Called `QueryEngine::metrics()`"); + Err(ApiError::configuration("Metrics is not enabled in Wasm.").into()) + } +} + +fn stringify_env_values(origin: serde_json::Value) -> crate::Result> { + use serde_json::Value; + + let msg = match origin { + Value::Object(map) => { + let mut result: HashMap = HashMap::new(); + + for (key, val) in map.into_iter() { + match val { + Value::Null => continue, + Value::String(val) => { + result.insert(key, val); + } + val => { + result.insert(key, val.to_string()); + } + } + } + + return Ok(result); + } + Value::Null => return Ok(Default::default()), + Value::Bool(_) => "Expected an object for the env constructor parameter, got a boolean.", + Value::Number(_) => "Expected an object for the env constructor parameter, got a number.", + Value::String(_) => "Expected an object for the env constructor parameter, got a string.", + Value::Array(_) => "Expected an object for the env constructor parameter, got an array.", + }; + + Err(ApiError::JsonDecode(msg.to_string())) +} diff --git a/query-engine/query-engine-wasm/src/error.rs b/query-engine/query-engine-wasm/src/error.rs new file mode 100644 index 000000000000..619e96564f6a --- /dev/null +++ b/query-engine/query-engine-wasm/src/error.rs @@ -0,0 +1,93 @@ +use psl::diagnostics::Diagnostics; +// use query_connector::error::ConnectorError; +// use query_core::CoreError; +use thiserror::Error; + +#[derive(Debug, Error)] +pub enum ApiError { + #[error("{:?}", _0)] + Conversion(Diagnostics, String), + + #[error("{}", _0)] + Configuration(String), + + // #[error("{}", _0)] + // Core(CoreError), + + // #[error("{}", _0)] + // Connector(ConnectorError), + #[error("Can't modify an already connected engine.")] + AlreadyConnected, + + #[error("Engine is not yet connected.")] + NotConnected, + + #[error("{}", _0)] + JsonDecode(String), +} + +impl From for user_facing_errors::Error { + fn from(err: ApiError) -> Self { + use std::fmt::Write as _; + + match err { + // ApiError::Connector(ConnectorError { + // user_facing_error: Some(err), + // .. + // }) => err.into(), + ApiError::Conversion(errors, dml_string) => { + let mut full_error = errors.to_pretty_string("schema.prisma", &dml_string); + write!(full_error, "\nValidation Error Count: {}", errors.errors().len()).unwrap(); + + user_facing_errors::Error::from(user_facing_errors::KnownError::new( + user_facing_errors::common::SchemaParserError { full_error }, + )) + } + // ApiError::Core(error) => user_facing_errors::Error::from(error), + other => user_facing_errors::Error::new_non_panic_with_current_backtrace(other.to_string()), + } + } +} + +impl ApiError { + pub fn conversion(diagnostics: Diagnostics, dml: impl ToString) -> Self { + Self::Conversion(diagnostics, dml.to_string()) + } + + pub fn configuration(msg: impl ToString) -> Self { + Self::Configuration(msg.to_string()) + } +} + +// impl From for ApiError { +// fn from(e: CoreError) -> Self { +// match e { +// CoreError::ConfigurationError(message) => Self::Configuration(message), +// core_error => Self::Core(core_error), +// } +// } +// } + +// impl From for ApiError { +// fn from(e: ConnectorError) -> Self { +// Self::Connector(e) +// } +// } + +impl From for ApiError { + fn from(e: url::ParseError) -> Self { + Self::configuration(format!("Error parsing connection string: {e}")) + } +} + +impl From for ApiError { + fn from(e: connection_string::Error) -> Self { + Self::configuration(format!("Error parsing connection string: {e}")) + } +} + +impl From for ApiError { + fn from(e: serde_json::Error) -> Self { + Self::JsonDecode(format!("{e}")) + } +} diff --git a/query-engine/query-engine-wasm/src/functions.rs b/query-engine/query-engine-wasm/src/functions.rs new file mode 100644 index 000000000000..e0f0a93aa5cd --- /dev/null +++ b/query-engine/query-engine-wasm/src/functions.rs @@ -0,0 +1,47 @@ +use crate::error::ApiError; +use serde::Serialize; +use tsify::Tsify; +use wasm_bindgen::prelude::wasm_bindgen; + +#[derive(Serialize, Tsify)] +#[tsify(into_wasm_abi)] +#[serde(rename_all = "camelCase")] +pub struct Version { + pub commit: &'static str, + pub version: &'static str, +} + +#[wasm_bindgen(js_name = "getBuildTimeInfo")] +pub fn version() -> Version { + Version { + commit: env!("GIT_HASH"), + version: env!("CARGO_PKG_VERSION"), + } +} + +#[wasm_bindgen] +pub fn dmmf(datamodel_string: String) -> Result { + let mut schema = psl::validate(datamodel_string.into()); + + schema + .diagnostics + .to_result() + .map_err(|errors| ApiError::conversion(errors, schema.db.source()))?; + + Ok("{}".to_string()) + + // let query_schema = query_core::schema::build(Arc::new(schema), true); + // let dmmf = dmmf::render_dmmf(&query_schema); + + // Ok(serde_json::to_string(&dmmf)?) +} + +#[wasm_bindgen] +pub fn debug_panic(panic_message: Option) -> Result<(), wasm_bindgen::JsError> { + let user_facing = user_facing_errors::Error::from_panic_payload(Box::new( + panic_message.unwrap_or_else(|| "query-engine-wasm debug panic".to_string()), + )); + let message = serde_json::to_string(&user_facing).unwrap(); + + Err(wasm_bindgen::JsError::new(&message)) +} diff --git a/query-engine/query-engine-wasm/src/lib.rs b/query-engine/query-engine-wasm/src/lib.rs new file mode 100644 index 000000000000..89b519515517 --- /dev/null +++ b/query-engine/query-engine-wasm/src/lib.rs @@ -0,0 +1,19 @@ +pub mod engine; +pub mod error; +pub mod functions; +pub mod logger; +mod proxy; + +pub(crate) type Result = std::result::Result; + +use wasm_bindgen::prelude::wasm_bindgen; + +/// Function that should be called before any other public function in this module. +#[wasm_bindgen] +pub fn init() { + // Set up temporary logging for the wasm module. + wasm_logger::init(wasm_logger::Config::default()); + + // Set up temporary panic hook for the wasm module. + std::panic::set_hook(Box::new(console_error_panic_hook::hook)); +} diff --git a/query-engine/query-engine-wasm/src/logger.rs b/query-engine/query-engine-wasm/src/logger.rs new file mode 100644 index 000000000000..561c48271b77 --- /dev/null +++ b/query-engine/query-engine-wasm/src/logger.rs @@ -0,0 +1,132 @@ +#![allow(dead_code)] + +use core::fmt; +use js_sys::Function as JsFunction; +use serde_json::Value; +use std::collections::BTreeMap; +use tracing::{ + field::{Field, Visit}, + level_filters::LevelFilter, + Dispatch, Level, Subscriber, +}; +use tracing_subscriber::{ + filter::{filter_fn, FilterExt}, + layer::SubscriberExt, + Layer, Registry, +}; +use wasm_bindgen::JsValue; + +pub(crate) struct LogCallback(pub JsFunction); + +unsafe impl Send for LogCallback {} +unsafe impl Sync for LogCallback {} + +pub(crate) struct Logger { + dispatcher: Dispatch, +} + +impl Logger { + /// Creates a new logger using a call layer + pub fn new(log_queries: bool, log_level: LevelFilter, log_callback: LogCallback) -> Self { + let is_sql_query = filter_fn(|meta| { + meta.target() == "quaint::connector::metrics" && meta.fields().iter().any(|f| f.name() == "query") + }); + + // is a mongodb query? + let is_mongo_query = filter_fn(|meta| meta.target() == "mongodb_query_connector::query"); + + // We need to filter the messages to send to our callback logging mechanism + let filters = if log_queries { + // Filter trace query events (for query log) or based in the defined log level + is_sql_query.or(is_mongo_query).or(log_level).boxed() + } else { + // Filter based in the defined log level + FilterExt::boxed(log_level) + }; + + let layer = CallbackLayer::new(log_callback).with_filter(filters); + + Self { + dispatcher: Dispatch::new(Registry::default().with(layer)), + } + } + + pub fn dispatcher(&self) -> Dispatch { + self.dispatcher.clone() + } +} + +pub struct JsonVisitor<'a> { + values: BTreeMap<&'a str, Value>, +} + +impl<'a> JsonVisitor<'a> { + pub fn new(level: &Level, target: &str) -> Self { + let mut values = BTreeMap::new(); + values.insert("level", serde_json::Value::from(level.to_string())); + + // NOTE: previous version used module_path, this is not correct and it should be _target_ + values.insert("module_path", serde_json::Value::from(target)); + + JsonVisitor { values } + } +} + +impl<'a> Visit for JsonVisitor<'a> { + fn record_debug(&mut self, field: &Field, value: &dyn fmt::Debug) { + match field.name() { + name if name.starts_with("r#") => { + self.values + .insert(&name[2..], serde_json::Value::from(format!("{value:?}"))); + } + name => { + self.values.insert(name, serde_json::Value::from(format!("{value:?}"))); + } + }; + } + + fn record_i64(&mut self, field: &Field, value: i64) { + self.values.insert(field.name(), serde_json::Value::from(value)); + } + + fn record_u64(&mut self, field: &Field, value: u64) { + self.values.insert(field.name(), serde_json::Value::from(value)); + } + + fn record_bool(&mut self, field: &Field, value: bool) { + self.values.insert(field.name(), serde_json::Value::from(value)); + } + + fn record_str(&mut self, field: &Field, value: &str) { + self.values.insert(field.name(), serde_json::Value::from(value)); + } +} + +impl<'a> ToString for JsonVisitor<'a> { + fn to_string(&self) -> String { + serde_json::to_string(&self.values).unwrap() + } +} + +pub(crate) struct CallbackLayer { + callback: LogCallback, +} + +impl CallbackLayer { + pub fn new(callback: LogCallback) -> Self { + CallbackLayer { callback } + } +} + +// A tracing layer for sending logs to a js callback, layers are composable, subscribers are not. +impl Layer for CallbackLayer { + fn on_event(&self, event: &tracing::Event<'_>, _ctx: tracing_subscriber::layer::Context<'_, S>) { + let mut visitor = JsonVisitor::new(event.metadata().level(), event.metadata().target()); + event.record(&mut visitor); + + let _ = self + .callback + .0 + .call1(&JsValue::NULL, &JsValue::from_str(&visitor.to_string())); + } +} diff --git a/query-engine/query-engine-wasm/src/proxy.rs b/query-engine/query-engine-wasm/src/proxy.rs new file mode 100644 index 000000000000..ad028e218236 --- /dev/null +++ b/query-engine/query-engine-wasm/src/proxy.rs @@ -0,0 +1,107 @@ +#![allow(dead_code)] +#![allow(unused_variables)] + +// This code will likely live in a separate crate, but for now it's here. + +use async_trait::async_trait; +use js_sys::{Function as JsFunction, JsString, Object as JsObject, Promise as JsPromise, Reflect as JsReflect}; +use serde::{de::DeserializeOwned, Serialize}; +use wasm_bindgen::{JsCast, JsValue}; + +type Result = std::result::Result; + +pub struct CommonProxy { + /// Execute a query given as SQL, interpolating the given parameters. + query_raw: JsFunction, + + /// Execute a query given as SQL, interpolating the given parameters and + /// returning the number of affected rows. + execute_raw: JsFunction, + + /// Return the flavour for this driver. + pub(crate) flavour: String, +} + +impl CommonProxy { + pub(crate) fn new(driver: &JsObject) -> Result { + let query_raw = JsReflect::get(driver, &"queryRaw".into())?.dyn_into::()?; + let execute_raw = JsReflect::get(driver, &"executeRaw".into())?.dyn_into::()?; + let flavour: String = JsReflect::get(driver, &"flavour".into())? + .dyn_into::()? + .into(); + + let common_proxy = Self { + query_raw, + execute_raw, + flavour, + }; + Ok(common_proxy) + } +} + +pub struct DriverProxy { + start_transaction: JsFunction, +} + +impl DriverProxy { + pub(crate) fn new(driver: &JsObject) -> Result { + let start_transaction = JsReflect::get(driver, &"startTransaction".into())?.dyn_into::()?; + + let driver_proxy = Self { start_transaction }; + Ok(driver_proxy) + } +} + +pub struct JsQueryable { + inner: CommonProxy, + driver_proxy: DriverProxy, +} + +impl JsQueryable { + pub fn new(inner: CommonProxy, driver_proxy: DriverProxy) -> Self { + Self { inner, driver_proxy } + } +} + +pub fn from_wasm(driver: JsObject) -> Result { + let common_proxy = CommonProxy::new(&driver)?; + let driver_proxy = DriverProxy::new(&driver)?; + + let js_queryable = JsQueryable::new(common_proxy, driver_proxy); + Ok(js_queryable) +} + +#[async_trait(?Send)] +trait JsAsyncFunc { + async fn call1_async(&self, arg1: T) -> Result + where + T: Serialize, + R: DeserializeOwned; + + fn call0_sync(&self) -> Result + where + R: DeserializeOwned; +} + +#[async_trait(?Send)] +impl JsAsyncFunc for JsFunction { + async fn call1_async(&self, arg1: T) -> Result + where + T: Serialize, + R: DeserializeOwned, + { + let arg1 = serde_wasm_bindgen::to_value(&arg1).map_err(|err| js_sys::Error::new(&err.to_string()))?; + let promise = self.call1(&JsValue::null(), &arg1)?; + let future = wasm_bindgen_futures::JsFuture::from(JsPromise::from(promise)); + let value = future.await?; + serde_wasm_bindgen::from_value(value).map_err(|err| js_sys::Error::new(&err.to_string())) + } + + fn call0_sync(&self) -> Result + where + R: DeserializeOwned, + { + let value = self.call0(&JsValue::null())?; + serde_wasm_bindgen::from_value(value).map_err(|err| js_sys::Error::new(&err.to_string())) + } +} From d39b430484e59dd184d26e6ef4e4918fd23ecf02 Mon Sep 17 00:00:00 2001 From: Jan Piotrowski Date: Mon, 9 Oct 2023 17:19:51 +0200 Subject: [PATCH 02/67] ci(smoke-tests): Move errors step into its own job to make clear this is not driver adapter dependant (#4344) --- .../workflows/driver-adapter-smoke-tests.yml | 47 +++++++++++++++++++ 1 file changed, 47 insertions(+) diff --git a/.github/workflows/driver-adapter-smoke-tests.yml b/.github/workflows/driver-adapter-smoke-tests.yml index aa653ea7a57d..802e3188dedc 100644 --- a/.github/workflows/driver-adapter-smoke-tests.yml +++ b/.github/workflows/driver-adapter-smoke-tests.yml @@ -37,6 +37,7 @@ jobs: ports: - 5432:5432 + # via package.json rewritten into DATABASE_URL before scripts are run env: JS_NEON_DATABASE_URL: ${{ secrets.JS_NEON_DATABASE_URL }} JS_PLANETSCALE_DATABASE_URL: ${{ secrets.JS_PLANETSCALE_DATABASE_URL }} @@ -78,6 +79,52 @@ jobs: if: always() working-directory: ./query-engine/driver-adapters/js/smoke-test-js + + driver-adapter-smoke-tests-errors: + name: Errors + + runs-on: ubuntu-latest + + # services: + # postgres: + # image: postgres + # env: + # POSTGRES_PASSWORD: postgres + # options: >- + # --health-cmd pg_isready + # --health-interval 10s + # --health-timeout 5s + # --health-retries 5 + # ports: + # - 5432:5432 + + env: + # via package.json rewritten into DATABASE_URL before scripts are run + JS_PG_DATABASE_URL: postgres://postgres:postgres@localhost:5432/test + + steps: + - uses: actions/checkout@v4 + + - uses: dtolnay/rust-toolchain@stable + + - uses: pnpm/action-setup@v2 + with: + version: 8 + - uses: actions/setup-node@v3 + with: + node-version: 18 + #cache: 'pnpm' + + - name: Compile Query Engine + run: cargo build -p query-engine-node-api + + - name: Install Dependencies (Driver Adapters) + run: pnpm install + working-directory: ./query-engine/driver-adapters/js + - name: Build Driver Adapters + run: pnpm build + working-directory: ./query-engine/driver-adapters/js + - name: pnpm errors run: pnpm errors if: always() From 72963d8dd2e8e493a6496c242a16bffb8383efe7 Mon Sep 17 00:00:00 2001 From: pierre Date: Mon, 9 Oct 2023 09:55:32 -1000 Subject: [PATCH 03/67] chore: add badges for driver adapters readme (#4347) --- query-engine/driver-adapters/js/README.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/query-engine/driver-adapters/js/README.md b/query-engine/driver-adapters/js/README.md index e5e64c60dfc8..926d6db2b0a8 100644 --- a/query-engine/driver-adapters/js/README.md +++ b/query-engine/driver-adapters/js/README.md @@ -1,5 +1,13 @@ # Prisma Driver Adapters + + + + + + +
+ This TypeScript monorepo contains the following packages: - `@prisma/driver-adapter-utils` - Internal set of utilities and types for Prisma's driver adapters. From 0722655adf11cf3e884a92f6333a101e2adb1898 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Wed, 11 Oct 2023 13:04:08 +0200 Subject: [PATCH 04/67] ci: add ignored paths & skip running Buildkite tests when git diff is empty (#4355) --- .buildkite/engineer | 35 ++++++++++++++++++++++++++++++++--- 1 file changed, 32 insertions(+), 3 deletions(-) diff --git a/.buildkite/engineer b/.buildkite/engineer index 0b1adc2d8011..701e57fa9229 100755 --- a/.buildkite/engineer +++ b/.buildkite/engineer @@ -1,5 +1,36 @@ #!/usr/bin/env bash +set -e + +if [[ -z "$2" ]]; then + printf "Error: the name of the pipeline must be provided.\nExample: './engineer pipeline test'" 1>&2 + exit 1 +else + echo "We are in the $2 pipeline." +fi + +# Checks what's the diff with the previous commit, +# excluding some paths that do not need a run, +# because they do not affect tests running in Buildkite. +GIT_DIFF=$(git diff --name-only HEAD HEAD~1 -- . ':!.github' ':!query-engine/driver-adapters/js' ':!renovate.json' ':!*.md' ':!LICENSE' ':!CODEOWNERS';) + +# $2 is either "test" or "build", depending on the pipeline +# Example: ./.buildkite/engineer pipeline test +# We only want to check for changes and skip in the test pipeline. +if [[ "$2" == "test" ]]; then + # Checking if GIT_DIFF is empty + # If it's empty then it's most likely that there are changes but they are in ignored paths. + # So we do not start Buildkite + if [ -z "${GIT_DIFF}" ]; then + echo "No changes found for the previous commit in paths that are not ignored, this run will now be skipped." + exit 0 + else + # Note that printf works better for displaying line returns in CI + printf "Changes found for the previous commit in paths that are not ignored: \n\n${GIT_DIFF}\n\nThis run will continue...\n" + fi +fi + +# Check OS if [[ "$OSTYPE" == "linux-gnu" ]]; then OS=linux-amzn elif [[ "$OSTYPE" == "darwin"* ]]; then @@ -12,7 +43,6 @@ fi # Check if the system has engineer installed, if not, use a local copy. if ! type "engineer" &> /dev/null; then # Setup Prisma engine build & test tool (engineer). - set -e curl --fail -sSL "https://prisma-engineer.s3-eu-west-1.amazonaws.com/1.59/latest/$OS/engineer.gz" --output engineer.gz gzip -d engineer.gz chmod +x engineer @@ -22,6 +52,5 @@ if ! type "engineer" &> /dev/null; then rm -rf ./engineer else # Already installed on the system - set -e engineer "$@" -fi +fi \ No newline at end of file From c5d4d1ed59b9a9e63e53f19d61a28901ab3fd7b1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Wed, 11 Oct 2023 15:01:14 +0200 Subject: [PATCH 05/67] tests: driver adapters: add build job for planetscale engine tests (#4356) --- .../query-engine-driver-adapters.yml | 2 ++ Makefile | 6 ++++ docker-compose.yml | 22 +++++++++++++++ docker/planetscale_proxy/Dockerfile | 15 ++++++++++ .../test-configs/planetscale-vitess8 | 7 +++++ .../connector-test-kit-executor/package.json | 2 ++ .../connector-test-kit-executor/src/index.ts | 28 +++++++++++++++++-- .../driver-adapters/js/pnpm-lock.yaml | 6 ++++ 8 files changed, 86 insertions(+), 2 deletions(-) create mode 100644 docker/planetscale_proxy/Dockerfile create mode 100644 query-engine/connector-test-kit-rs/test-configs/planetscale-vitess8 diff --git a/.github/workflows/query-engine-driver-adapters.yml b/.github/workflows/query-engine-driver-adapters.yml index dea1726c56c9..50f86575a8a7 100644 --- a/.github/workflows/query-engine-driver-adapters.yml +++ b/.github/workflows/query-engine-driver-adapters.yml @@ -31,6 +31,8 @@ jobs: setup_task: 'dev-neon-ws-postgres13' - name: 'libsql' setup_task: 'dev-libsql-sqlite' + - name: 'planetscale' + setup_task: 'dev-planetscale-vitess8' node_version: ['18'] env: LOG_LEVEL: 'info' # Set to "debug" to trace the query engine and node process running the driver adapter diff --git a/Makefile b/Makefile index 3a683b824e3b..06e10dbd56c7 100644 --- a/Makefile +++ b/Makefile @@ -249,6 +249,12 @@ start-vitess_8_0: dev-vitess_8_0: start-vitess_8_0 cp $(CONFIG_PATH)/vitess_8_0 $(CONFIG_FILE) +start-planetscale-vitess8: build-qe-napi build-connector-kit-js + docker compose -f docker-compose.yml up -d --remove-orphans planetscale-vitess8 + +dev-planetscale-vitess8: start-planetscale-vitess8 + cp $(CONFIG_PATH)/planetscale-vitess8 $(CONFIG_FILE) + ###################### # Local dev commands # ###################### diff --git a/docker-compose.yml b/docker-compose.yml index 1988f864d304..6f7c6de4ca07 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -121,6 +121,23 @@ services: networks: - databases + planetscale-vitess8: + build: ./docker/planetscale_proxy + environment: + MYSQL_HOST: 'vitess-test-8_0' + MYSQL_PORT: 33807 + MYSQL_DATABASE: 'test-0000-00000000' + ports: + - '8085:8085' + depends_on: + - vitess-test-8_0 + restart: always + healthcheck: + test: [ 'CMD', 'nc', '-z', '127.0.0.1', '8085' ] + interval: 5s + timeout: 2s + retries: 20 + postgres14: image: postgres:14 restart: always @@ -230,6 +247,11 @@ services: FOREIGN_KEY_MODE: "disallow" TABLET_REFRESH_INTERVAL: "500ms" ENABLE_ONLINE_DDL: false + healthcheck: + test: [ 'CMD', 'mysqladmin', 'ping', '-h127.0.0.1', '-P33807' ] + interval: 5s + timeout: 2s + retries: 20 vitess-shadow-5_7: image: vitess/vttestserver:mysql57@sha256:23863a518b34330109c502ac61a396008f5f023e96263bcb2bb1b0f7f7d5dc7f diff --git a/docker/planetscale_proxy/Dockerfile b/docker/planetscale_proxy/Dockerfile new file mode 100644 index 000000000000..ae5ec56329c2 --- /dev/null +++ b/docker/planetscale_proxy/Dockerfile @@ -0,0 +1,15 @@ +FROM golang:1 + +RUN apt update && apt install netcat-openbsd -y +RUN cd /go/src && git clone https://github.com/prisma/planetscale-proxy.git +RUN cd /go/src/planetscale-proxy && go install . + +ENTRYPOINT /go/bin/planetscale-proxy \ + -http-addr=0.0.0.0 \ + -http-port=8085 \ + -mysql-addr=$MYSQL_HOST \ + -mysql-port=$MYSQL_PORT \ + -mysql-idle-timeout=1200s \ + -mysql-no-pass \ + -mysql-max-rows=1000 \ + -mysql-dbname=$MYSQL_DATABASE diff --git a/query-engine/connector-test-kit-rs/test-configs/planetscale-vitess8 b/query-engine/connector-test-kit-rs/test-configs/planetscale-vitess8 new file mode 100644 index 000000000000..48c89c79427c --- /dev/null +++ b/query-engine/connector-test-kit-rs/test-configs/planetscale-vitess8 @@ -0,0 +1,7 @@ +{ + "connector": "vitess", + "version": "8.0", + "driver_adapter": "planetscale", + "driver_adapter_config": { "proxyUrl": "http://root:root@127.0.0.1:8085" }, + "external_test_executor": "default" +} diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/package.json b/query-engine/driver-adapters/js/connector-test-kit-executor/package.json index be6a54a315fb..4a5f093388e6 100644 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/package.json +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/package.json @@ -15,9 +15,11 @@ "dependencies": { "@libsql/client": "0.3.5", "@neondatabase/serverless": "^0.6.0", + "@planetscale/database": "1.11.0", "@prisma/adapter-libsql": "workspace:*", "@prisma/adapter-neon": "workspace:*", "@prisma/adapter-pg": "workspace:*", + "@prisma/adapter-planetscale": "workspace:*", "@prisma/driver-adapter-utils": "workspace:*", "@types/pg": "^8.10.2", "pg": "^8.11.3", diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts b/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts index 68664272a6ce..a36e0e360514 100644 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts @@ -9,18 +9,28 @@ import * as prismaPg from '@prisma/adapter-pg' // neon dependencies import { Pool as NeonPool, neonConfig } from '@neondatabase/serverless' -import { WebSocket } from 'undici' +import { fetch, WebSocket } from 'undici' import * as prismaNeon from '@prisma/adapter-neon' // libsql dependencies import { createClient } from '@libsql/client' import { PrismaLibSQL } from '@prisma/adapter-libsql' +// planetscale dependencies +import { connect as planetscaleConnect } from '@planetscale/database' +import { PrismaPlanetScale } from '@prisma/adapter-planetscale' + + import {bindAdapter, DriverAdapter, ErrorCapturingDriverAdapter} from "@prisma/driver-adapter-utils"; const SUPPORTED_ADAPTERS: Record Promise> - = {"pg": pgAdapter, "neon:ws" : neonWsAdapter, "libsql": libsqlAdapter}; + = { + "pg": pgAdapter, + "neon:ws" : neonWsAdapter, + "libsql": libsqlAdapter, + "planetscale": planetscaleAdapter, + }; // conditional debug logging based on LOG_LEVEL env var const debug = (() => { @@ -250,4 +260,18 @@ async function libsqlAdapter(url: string): Promise { return new PrismaLibSQL(libsql) } +async function planetscaleAdapter(url: string): Promise { + const proxyURL = JSON.parse(process.env.DRIVER_ADAPTER_CONFIG || '{}').proxyUrl ?? '' + if (proxyURL == '') { + throw new Error("DRIVER_ADAPTER_CONFIG is not defined or empty, but its required for planetscale adapter."); + } + + const connection = planetscaleConnect({ + url: proxyURL, + fetch, + }) + + return new PrismaPlanetScale(connection) +} + main().catch(err) diff --git a/query-engine/driver-adapters/js/pnpm-lock.yaml b/query-engine/driver-adapters/js/pnpm-lock.yaml index efa3787712e4..5236ba2ffc11 100644 --- a/query-engine/driver-adapters/js/pnpm-lock.yaml +++ b/query-engine/driver-adapters/js/pnpm-lock.yaml @@ -75,6 +75,9 @@ importers: '@neondatabase/serverless': specifier: ^0.6.0 version: 0.6.0 + '@planetscale/database': + specifier: 1.11.0 + version: 1.11.0 '@prisma/adapter-libsql': specifier: workspace:* version: link:../adapter-libsql @@ -84,6 +87,9 @@ importers: '@prisma/adapter-pg': specifier: workspace:* version: link:../adapter-pg + '@prisma/adapter-planetscale': + specifier: workspace:* + version: link:../adapter-planetscale '@prisma/driver-adapter-utils': specifier: workspace:* version: link:../driver-adapter-utils From 2b9174994bde8659013b09b8b0025d570710223c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Wed, 11 Oct 2023 18:21:51 +0200 Subject: [PATCH 06/67] ci(docker): add --wait for all & healthcheck for vitess (#4353) --- Makefile | 62 +++++++++++++++++++++++----------------------- docker-compose.yml | 59 +++++++++++++++++++++++++++++-------------- 2 files changed, 71 insertions(+), 50 deletions(-) diff --git a/Makefile b/Makefile index 06e10dbd56c7..0c3e1541e632 100644 --- a/Makefile +++ b/Makefile @@ -67,7 +67,7 @@ test-qe-black-box: build-qe ########################### all-dbs-up: - docker compose -f docker-compose.yml up -d --remove-orphans + docker compose -f docker-compose.yml up --wait -d --remove-orphans all-dbs-down: docker compose -f docker-compose.yml down -v --remove-orphans @@ -81,31 +81,31 @@ dev-libsql-sqlite: build-qe-napi build-connector-kit-js cp $(CONFIG_PATH)/libsql-sqlite $(CONFIG_FILE) start-postgres9: - docker compose -f docker-compose.yml up -d --remove-orphans postgres9 + docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres9 dev-postgres9: start-postgres9 cp $(CONFIG_PATH)/postgres9 $(CONFIG_FILE) start-postgres10: - docker compose -f docker-compose.yml up -d --remove-orphans postgres10 + docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres10 dev-postgres10: start-postgres10 cp $(CONFIG_PATH)/postgres10 $(CONFIG_FILE) start-postgres11: - docker compose -f docker-compose.yml up -d --remove-orphans postgres11 + docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres11 dev-postgres11: start-postgres11 cp $(CONFIG_PATH)/postgres11 $(CONFIG_FILE) start-postgres12: - docker compose -f docker-compose.yml up -d --remove-orphans postgres12 + docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres12 dev-postgres12: start-postgres12 cp $(CONFIG_PATH)/postgres12 $(CONFIG_FILE) start-postgres13: - docker compose -f docker-compose.yml up -d --remove-orphans postgres13 + docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres13 dev-postgres13: start-postgres13 cp $(CONFIG_PATH)/postgres13 $(CONFIG_FILE) @@ -116,120 +116,120 @@ dev-pg-postgres13: start-pg-postgres13 cp $(CONFIG_PATH)/pg-postgres13 $(CONFIG_FILE) start-neon-postgres13: build-qe-napi build-connector-kit-js - docker compose -f docker-compose.yml up -d --remove-orphans neon-postgres13 + docker compose -f docker-compose.yml up --wait -d --remove-orphans neon-postgres13 dev-neon-ws-postgres13: start-neon-postgres13 cp $(CONFIG_PATH)/neon-ws-postgres13 $(CONFIG_FILE) start-postgres14: - docker compose -f docker-compose.yml up -d --remove-orphans postgres14 + docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres14 dev-postgres14: start-postgres14 cp $(CONFIG_PATH)/postgres14 $(CONFIG_FILE) start-postgres15: - docker compose -f docker-compose.yml up -d --remove-orphans postgres15 + docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres15 dev-postgres15: start-postgres15 cp $(CONFIG_PATH)/postgres15 $(CONFIG_FILE) start-cockroach_23_1: - docker compose -f docker-compose.yml up -d --remove-orphans cockroach_23_1 + docker compose -f docker-compose.yml up --wait -d --remove-orphans cockroach_23_1 dev-cockroach_23_1: start-cockroach_23_1 cp $(CONFIG_PATH)/cockroach_23_1 $(CONFIG_FILE) start-cockroach_22_2: - docker compose -f docker-compose.yml up -d --remove-orphans cockroach_22_2 + docker compose -f docker-compose.yml up --wait -d --remove-orphans cockroach_22_2 dev-cockroach_22_2: start-cockroach_22_2 cp $(CONFIG_PATH)/cockroach_22_2 $(CONFIG_FILE) start-cockroach_22_1_0: - docker compose -f docker-compose.yml up -d --remove-orphans cockroach_22_1_0 + docker compose -f docker-compose.yml up --wait -d --remove-orphans cockroach_22_1_0 dev-cockroach_22_1_0: start-cockroach_22_1_0 cp $(CONFIG_PATH)/cockroach_22_1 $(CONFIG_FILE) start-cockroach_21_2_0_patched: - docker compose -f docker-compose.yml up -d --remove-orphans cockroach_21_2_0_patched + docker compose -f docker-compose.yml up --wait -d --remove-orphans cockroach_21_2_0_patched dev-cockroach_21_2_0_patched: start-cockroach_21_2_0_patched cp $(CONFIG_PATH)/cockroach_21_2_0_patched $(CONFIG_FILE) dev-pgbouncer: - docker compose -f docker-compose.yml up -d --remove-orphans pgbouncer postgres11 + docker compose -f docker-compose.yml up --wait -d --remove-orphans pgbouncer postgres11 start-mysql_5_7: - docker compose -f docker-compose.yml up -d --remove-orphans mysql-5-7 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mysql-5-7 dev-mysql: start-mysql_5_7 cp $(CONFIG_PATH)/mysql57 $(CONFIG_FILE) start-mysql_5_6: - docker compose -f docker-compose.yml up -d --remove-orphans mysql-5-6 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mysql-5-6 dev-mysql_5_6: start-mysql_5_6 cp $(CONFIG_PATH)/mysql56 $(CONFIG_FILE) start-mysql_8: - docker compose -f docker-compose.yml up -d --remove-orphans mysql-8-0 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mysql-8-0 dev-mysql8: start-mysql_8 cp $(CONFIG_PATH)/mysql8 $(CONFIG_FILE) start-mysql_mariadb: - docker compose -f docker-compose.yml up -d --remove-orphans mariadb-10-0 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mariadb-10-0 dev-mariadb: start-mysql_mariadb cp $(CONFIG_PATH)/mariadb $(CONFIG_FILE) start-mssql_2019: - docker compose -f docker-compose.yml up -d --remove-orphans mssql-2019 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mssql-2019 dev-mssql2019: start-mssql_2019 cp $(CONFIG_PATH)/sqlserver2019 $(CONFIG_FILE) start-mssql_2022: - docker compose -f docker-compose.yml up -d --remove-orphans mssql-2022 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mssql-2022 dev-mssql2022: start-mssql_2022 cp $(CONFIG_PATH)/sqlserver2022 $(CONFIG_FILE) start-mssql_edge: - docker compose -f docker-compose.yml up -d --remove-orphans azure-edge + docker compose -f docker-compose.yml up --wait -d --remove-orphans azure-edge dev-mssql_edge: start-mssql_edge cp $(CONFIG_PATH)/sqlserver2019 $(CONFIG_FILE) start-mssql_2017: - docker compose -f docker-compose.yml up -d --remove-orphans mssql-2017 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mssql-2017 dev-mssql2017: start-mssql_2017 cp $(CONFIG_PATH)/sqlserver2017 $(CONFIG_FILE) start-mongodb42-single: - docker compose -f docker-compose.yml up -d --remove-orphans mongo42-single + docker compose -f docker-compose.yml up --wait -d --remove-orphans mongo42-single start-mongodb44-single: - docker compose -f docker-compose.yml up -d --remove-orphans mongo44-single + docker compose -f docker-compose.yml up --wait -d --remove-orphans mongo44-single start-mongodb4-single: start-mongodb44-single start-mongodb5-single: - docker compose -f docker-compose.yml up -d --remove-orphans mongo5-single + docker compose -f docker-compose.yml up --wait -d --remove-orphans mongo5-single start-mongodb_4_2: - docker compose -f docker-compose.yml up -d --remove-orphans mongo42 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mongo42 start-mongodb_4_4: - docker compose -f docker-compose.yml up -d --remove-orphans mongo44 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mongo44 dev-mongodb_4_4: start-mongodb_4_4 cp $(CONFIG_PATH)/mongodb44 $(CONFIG_FILE) start-mongodb_5: - docker compose -f docker-compose.yml up -d --remove-orphans mongo5 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mongo5 dev-mongodb_5: start-mongodb_5 cp $(CONFIG_PATH)/mongodb5 $(CONFIG_FILE) @@ -238,13 +238,13 @@ dev-mongodb_4_2: start-mongodb_4_2 cp $(CONFIG_PATH)/mongodb42 $(CONFIG_FILE) start-vitess_5_7: - docker compose -f docker-compose.yml up -d --remove-orphans vitess-test-5_7 vitess-shadow-5_7 + docker compose -f docker-compose.yml up --wait -d --remove-orphans vitess-test-5_7 vitess-shadow-5_7 dev-vitess_5_7: start-vitess_5_7 cp $(CONFIG_PATH)/vitess_5_7 $(CONFIG_FILE) start-vitess_8_0: - docker compose -f docker-compose.yml up -d --remove-orphans vitess-test-8_0 vitess-shadow-8_0 + docker compose -f docker-compose.yml up --wait -d --remove-orphans vitess-test-8_0 vitess-shadow-8_0 dev-vitess_8_0: start-vitess_8_0 cp $(CONFIG_PATH)/vitess_8_0 $(CONFIG_FILE) @@ -299,7 +299,7 @@ use-local-query-engine: cp target/release/query-engine $(PRISMA2_BINARY_PATH)/query-engine-darwin show-metrics: - docker compose -f docker-compose.yml up -d --remove-orphans grafana prometheus + docker compose -f docker-compose.yml up --wait -d --remove-orphans grafana prometheus ## OpenTelemetry otel: diff --git a/docker-compose.yml b/docker-compose.yml index 6f7c6de4ca07..97c9ed79e1c7 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -228,11 +228,18 @@ services: - 33577:33577 environment: PORT: 33574 - KEYSPACES: "test" - NUM_SHARDS: "1" - MYSQL_BIND_HOST: "0.0.0.0" - FOREIGN_KEY_MODE: "disallow" + KEYSPACES: 'test' + NUM_SHARDS: '1' + MYSQL_BIND_HOST: '0.0.0.0' + FOREIGN_KEY_MODE: 'disallow' ENABLE_ONLINE_DDL: false + MYSQL_MAX_CONNECTIONS: 100000 + TABLET_REFRESH_INTERVAL: '500ms' + healthcheck: + test: ['CMD', 'mysqladmin', 'ping', '-h127.0.0.1', '-P33577'] + interval: 5s + timeout: 2s + retries: 20 vitess-test-8_0: image: vitess/vttestserver:mysql80@sha256:8bec2644d83cb322eb2cdd596d33c0f858243ba6ade9164c95dfcc519643094e @@ -241,14 +248,15 @@ services: - 33807:33807 environment: PORT: 33804 - KEYSPACES: "test" - NUM_SHARDS: "1" - MYSQL_BIND_HOST: "0.0.0.0" - FOREIGN_KEY_MODE: "disallow" - TABLET_REFRESH_INTERVAL: "500ms" + KEYSPACES: 'test' + NUM_SHARDS: '1' + MYSQL_BIND_HOST: '0.0.0.0' + FOREIGN_KEY_MODE: 'disallow' ENABLE_ONLINE_DDL: false + MYSQL_MAX_CONNECTIONS: 100000 + TABLET_REFRESH_INTERVAL: '500ms' healthcheck: - test: [ 'CMD', 'mysqladmin', 'ping', '-h127.0.0.1', '-P33807' ] + test: ['CMD', 'mysqladmin', 'ping', '-h127.0.0.1', '-P33807'] interval: 5s timeout: 2s retries: 20 @@ -260,11 +268,18 @@ services: - 33578:33577 environment: PORT: 33574 - KEYSPACES: "shadow" - NUM_SHARDS: "1" - MYSQL_BIND_HOST: "0.0.0.0" - FOREIGN_KEY_MODE: "disallow" + KEYSPACES: 'shadow' + NUM_SHARDS: '1' + MYSQL_BIND_HOST: '0.0.0.0' + FOREIGN_KEY_MODE: 'disallow' ENABLE_ONLINE_DDL: false + MYSQL_MAX_CONNECTIONS: 100000 + TABLET_REFRESH_INTERVAL: '500ms' + healthcheck: + test: ['CMD', 'mysqladmin', 'ping', '-h127.0.0.1', '-P33577'] + interval: 5s + timeout: 2s + retries: 20 vitess-shadow-8_0: image: vitess/vttestserver:mysql80@sha256:8bec2644d83cb322eb2cdd596d33c0f858243ba6ade9164c95dfcc519643094e @@ -273,12 +288,18 @@ services: - 33808:33807 environment: PORT: 33804 - KEYSPACES: "shadow" - NUM_SHARDS: "1" - MYSQL_BIND_HOST: "0.0.0.0" - FOREIGN_KEY_MODE: "disallow" - TABLET_REFRESH_INTERVAL: "500ms" + KEYSPACES: 'shadow' + NUM_SHARDS: '1' + MYSQL_BIND_HOST: '0.0.0.0' + FOREIGN_KEY_MODE: 'disallow' ENABLE_ONLINE_DDL: false + MYSQL_MAX_CONNECTIONS: 100000 + TABLET_REFRESH_INTERVAL: '500ms' + healthcheck: + test: ['CMD', 'mysqladmin', 'ping', '-h127.0.0.1', '-P33807'] + interval: 5s + timeout: 2s + retries: 20 mssql-2017: image: mcr.microsoft.com/mssql/server:2017-latest From 26f1cbc8c0a969b85562a0a6b6e166229ce3876b Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Wed, 11 Oct 2023 18:25:16 +0200 Subject: [PATCH 07/67] .buildkite/engineer: fix shellcheck error (#4359) --- .buildkite/engineer | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.buildkite/engineer b/.buildkite/engineer index 701e57fa9229..5e586ad2f0ec 100755 --- a/.buildkite/engineer +++ b/.buildkite/engineer @@ -26,7 +26,7 @@ if [[ "$2" == "test" ]]; then exit 0 else # Note that printf works better for displaying line returns in CI - printf "Changes found for the previous commit in paths that are not ignored: \n\n${GIT_DIFF}\n\nThis run will continue...\n" + printf "Changes found for the previous commit in paths that are not ignored: \n\n%s\n\nThis run will continue...\n" "${GIT_DIFF}" fi fi @@ -53,4 +53,4 @@ if ! type "engineer" &> /dev/null; then else # Already installed on the system engineer "$@" -fi \ No newline at end of file +fi From 95d40a41c29cc69d533d79a221164aaa1c81dff5 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 12 Oct 2023 00:28:45 +0200 Subject: [PATCH 08/67] fix(deps): update prisma monorepo to v5.4.2 (patch) (#4350) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .../driver-adapters/js/pnpm-lock.yaml | 30 +++++++++---------- .../js/smoke-test-js/package.json | 4 +-- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/query-engine/driver-adapters/js/pnpm-lock.yaml b/query-engine/driver-adapters/js/pnpm-lock.yaml index 5236ba2ffc11..0b15115b5e23 100644 --- a/query-engine/driver-adapters/js/pnpm-lock.yaml +++ b/query-engine/driver-adapters/js/pnpm-lock.yaml @@ -137,8 +137,8 @@ importers: specifier: workspace:* version: link:../adapter-planetscale '@prisma/client': - specifier: 5.4.1 - version: 5.4.1(prisma@5.4.1) + specifier: 5.4.2 + version: 5.4.2(prisma@5.4.2) '@prisma/driver-adapter-utils': specifier: workspace:* version: link:../driver-adapter-utils @@ -162,8 +162,8 @@ importers: specifier: ^7.0.3 version: 7.0.3 prisma: - specifier: 5.4.1 - version: 5.4.1 + specifier: 5.4.2 + version: 5.4.2 tsx: specifier: ^3.12.7 version: 3.12.7 @@ -527,8 +527,8 @@ packages: resolution: {integrity: sha512-aWbU+D/IRHoDE9975y+Q4c+EwwAWxCPwFId+N1AhQVFXzbeJMkj6KN2iQtoi03elcLMRdfT+V3i9Z4WRw+/oIA==} engines: {node: '>=16'} - /@prisma/client@5.4.1(prisma@5.4.1): - resolution: {integrity: sha512-xyD0DJ3gRNfLbPsC+YfMBBuLJtZKQfy1OD2qU/PZg+HKrr7SO+09174LMeTlWP0YF2wca9LxtVd4HnAiB5ketQ==} + /@prisma/client@5.4.2(prisma@5.4.2): + resolution: {integrity: sha512-2xsPaz4EaMKj1WS9iW6MlPhmbqtBsXAOeVttSePp8vTFTtvzh2hZbDgswwBdSCgPzmmwF+tLB259QzggvCmJqA==} engines: {node: '>=16.13'} requiresBuild: true peerDependencies: @@ -537,16 +537,16 @@ packages: prisma: optional: true dependencies: - '@prisma/engines-version': 5.4.1-1.2f302df92bd8945e20ad4595a73def5b96afa54f - prisma: 5.4.1 + '@prisma/engines-version': 5.4.1-2.ac9d7041ed77bcc8a8dbd2ab6616b39013829574 + prisma: 5.4.2 dev: false - /@prisma/engines-version@5.4.1-1.2f302df92bd8945e20ad4595a73def5b96afa54f: - resolution: {integrity: sha512-+nUQM/y8C+1GG5Ioeqcu6itFslCfxvQSAUVSMC9XM2G2Fcq0F4Afnp6m0pXF6X6iUBWen7jZBPmM9Qlq4Nr3/A==} + /@prisma/engines-version@5.4.1-2.ac9d7041ed77bcc8a8dbd2ab6616b39013829574: + resolution: {integrity: sha512-wvupDL4AA1vf4TQNANg7kR7y98ITqPsk6aacfBxZKtrJKRIsWjURHkZCGcQliHdqCiW/hGreO6d6ZuSv9MhdAA==} dev: false - /@prisma/engines@5.4.1: - resolution: {integrity: sha512-vJTdY4la/5V3N7SFvWRmSMUh4mIQnyb/MNoDjzVbh9iLmEC+uEykj/1GPviVsorvfz7DbYSQC4RiwmlEpTEvGA==} + /@prisma/engines@5.4.2: + resolution: {integrity: sha512-fqeucJ3LH0e1eyFdT0zRx+oETLancu5+n4lhiYECyEz6H2RDskPJHJYHkVc0LhkU4Uv7fuEnppKU3nVKNzMh8g==} requiresBuild: true /@types/debug@4.1.8: @@ -1259,13 +1259,13 @@ packages: /postgres-range@1.1.3: resolution: {integrity: sha512-VdlZoocy5lCP0c/t66xAfclglEapXPCIVhqqJRncYpvbCgImF0w67aPKfbqUMr72tO2k5q0TdTZwCLjPTI6C9g==} - /prisma@5.4.1: - resolution: {integrity: sha512-op9PmU8Bcw5dNAas82wBYTG0yHnpq9/O3bhxbDBrNzwZTwBqsVCxxYRLf6wHNh9HVaDGhgjjHlu1+BcW8qdnBg==} + /prisma@5.4.2: + resolution: {integrity: sha512-GDMZwZy7mysB2oXU+angQqJ90iaPFdD0rHaZNkn+dio5NRkGLmMqmXs31//tg/qXT3iB0cTQwnGGQNuirhSTZg==} engines: {node: '>=16.13'} hasBin: true requiresBuild: true dependencies: - '@prisma/engines': 5.4.1 + '@prisma/engines': 5.4.2 /punycode@2.3.0: resolution: {integrity: sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==} diff --git a/query-engine/driver-adapters/js/smoke-test-js/package.json b/query-engine/driver-adapters/js/smoke-test-js/package.json index 90f25234be1e..27d4220f41bc 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/package.json +++ b/query-engine/driver-adapters/js/smoke-test-js/package.json @@ -51,7 +51,7 @@ "@prisma/adapter-neon": "workspace:*", "@prisma/adapter-pg": "workspace:*", "@prisma/adapter-planetscale": "workspace:*", - "@prisma/client": "5.4.1", + "@prisma/client": "5.4.2", "@prisma/driver-adapter-utils": "workspace:*", "pg": "^8.11.3", "superjson": "^1.13.1", @@ -61,7 +61,7 @@ "@types/node": "^20.5.1", "@types/pg": "^8.10.2", "cross-env": "^7.0.3", - "prisma": "5.4.1", + "prisma": "5.4.2", "tsx": "^3.12.7" } } From 7c57bf8256c25aabd52b7184bd92e8906c990612 Mon Sep 17 00:00:00 2001 From: Jan Piotrowski Date: Thu, 12 Oct 2023 00:31:10 +0200 Subject: [PATCH 09/67] fix(smoke-tests): Fix expected error tests, other small changes (#4345) --- .../js/smoke-test-js/README.md | 6 +- .../smoke-test-js/prisma/mysql/schema.prisma | 5 ++ .../prisma/postgres/schema.prisma | 5 ++ .../driver-adapters/js/smoke-test-js/setup.sh | 7 ++ .../js/smoke-test-js/src/client/client.ts | 53 +++++++++---- .../js/smoke-test-js/src/libquery/libquery.ts | 77 +++++++++++-------- .../src/libquery/neon.http.test.ts | 2 +- 7 files changed, 103 insertions(+), 52 deletions(-) create mode 100644 query-engine/driver-adapters/js/smoke-test-js/setup.sh diff --git a/query-engine/driver-adapters/js/smoke-test-js/README.md b/query-engine/driver-adapters/js/smoke-test-js/README.md index 204be94670b9..f1b81df5d268 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/README.md +++ b/query-engine/driver-adapters/js/smoke-test-js/README.md @@ -20,6 +20,8 @@ In the current directoy: pnpm i ``` +(or run `sh ./setup.sh`) + Anywhere in the repository: - Run `cargo build -p query-engine-node-api` to compile the `libquery` Query Engine @@ -53,8 +55,8 @@ In the current directory: - Run `pnpm neon:ws:client` to test using `@prisma/client` - Run `pnpm neon:http` to run smoke tests using `libquery` against the Neon database, using an HTTP connection. In this case, transactions won't work, and tests are expected to fail. For more fine-grained control: - - Run `pnpm neon:ws:http` to test using `libquery` - - Run `pnpm neon:ws:http` to test using `@prisma/client` + - Run `pnpm neon:http:libquery` to test using `libquery` + - Run `pnpm neon:http:client` to test using `@prisma/client` ### Pg diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma b/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma index 00418d57cc2c..59efb33a5594 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma +++ b/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma @@ -67,6 +67,11 @@ model type_test_2 { datetime_column_null DateTime? @db.DateTime(3) } +model type_test_3 { + id Int @id @default(autoincrement()) + bytes Bytes +} + enum type_test_enum_column { value1 value2 diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma b/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma index 7319f07d8a60..7cd31f406b9d 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma +++ b/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma @@ -51,6 +51,11 @@ model type_test_2 { datetime_column_null DateTime? @db.Timestamp(3) } +model type_test_3 { + id Int @id @default(autoincrement()) + bytes Bytes +} + model Child { c String @unique c_1 String diff --git a/query-engine/driver-adapters/js/smoke-test-js/setup.sh b/query-engine/driver-adapters/js/smoke-test-js/setup.sh new file mode 100644 index 000000000000..7654679db14e --- /dev/null +++ b/query-engine/driver-adapters/js/smoke-test-js/setup.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env bash + +cd .. || return +pnpm i && pnpm build +cargo build -p query-engine-node-api +cd smoke-test-js || exit +pnpm i \ No newline at end of file diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts index dcae3c46437d..b23cf2d97fb8 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts @@ -22,11 +22,25 @@ export async function smokeTestClient(driverAdapter: DriverAdapter) { for (const adapter of [driverAdapter, null]) { const isUsingDriverAdapters = adapter !== null describe(isUsingDriverAdapters ? `using Driver Adapters` : `using Rust drivers`, () => { + + it('expected error (on duplicate insert) as exception thrown / promise rejected', async () => { + const prisma = new PrismaClient({ adapter, log }) + + await assert.rejects( + async () => { + const result = await prisma.unique.create({ data: { email: 'duplicate@example.com' } }) + const result2 = await prisma.unique.create({ data: { email: 'duplicate@example.com' } }) + }, + (err) => { + assert.match(err.message, /unique/i); + return true; + }, + ); + + }) + it('batch queries', async () => { - const prisma = new PrismaClient({ - adapter, - log, - }) + const prisma = new PrismaClient({ adapter, log }) const queries: string[] = [] prisma.$on('query', ({ query }) => queries.push(query)) @@ -83,7 +97,11 @@ export async function smokeTestClient(driverAdapter: DriverAdapter) { }) if (['mysql'].includes(provider)) { - assert.deepEqual(queries.slice(0, 2), ['SET TRANSACTION ISOLATION LEVEL READ COMMITTED', 'BEGIN']) + if (isUsingDriverAdapters) { + assert.deepEqual(queries.slice(0, 2), ['SET TRANSACTION ISOLATION LEVEL READ COMMITTED', '-- Implicit "BEGIN" query via underlying driver']) + } else { + assert.deepEqual(queries.slice(0, 2), ['SET TRANSACTION ISOLATION LEVEL READ COMMITTED', 'BEGIN']) + } } else if (['postgres'].includes(provider)) { assert.deepEqual(queries.slice(0, 2), ['BEGIN', 'SET TRANSACTION ISOLATION LEVEL READ COMMITTED']) } @@ -102,6 +120,8 @@ export async function smokeTestClient(driverAdapter: DriverAdapter) { isolationLevel: 'Serializable', }) + console.log("queries", queries) + if (isUsingDriverAdapters) { assert.equal(queries.at(0), '-- Implicit "BEGIN" query via underlying driver') assert.equal(queries.at(-1), '-- Implicit "COMMIT" query via underlying driver') @@ -123,19 +143,22 @@ export async function smokeTestClient(driverAdapter: DriverAdapter) { ) }) - it('bytes type support', async () => { - const prisma = new PrismaClient({ adapter, log }) + }) - const result = await prisma.type_test_3.create({ - data: { - bytes: Buffer.from([1, 2, 3, 4]), - }, - }) + } - assert.deepEqual(result.bytes, Buffer.from([1, 2, 3, 4])) - }) + it('bytes type support', async () => { + const prisma = new PrismaClient({ adapter, log }) + + const result = await prisma.type_test_3.create({ + data: { + bytes: Buffer.from([1, 2, 3, 4]), + }, }) - } + + assert.deepEqual(result.bytes, Buffer.from([1, 2, 3, 4])) + }) + }) } } diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts index bdf50eab5669..4cdde4515615 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts @@ -4,8 +4,9 @@ import type { ErrorCapturingDriverAdapter } from '@prisma/driver-adapter-utils' import type { QueryEngineInstance } from '../engines/types/Library' import { createQueryFn, initQueryEngine } from './util' import { JsonQuery } from '../engines/types/JsonProtocol' +import { PrismaNeonHTTP } from '@prisma/adapter-neon' -export function smokeTestLibquery(adapter: ErrorCapturingDriverAdapter, prismaSchemaRelativePath: string) { +export function smokeTestLibquery(adapter: ErrorCapturingDriverAdapter, prismaSchemaRelativePath: string, supportsTransactions = true) { const engine = initQueryEngine(adapter, prismaSchemaRelativePath) const flavour = adapter.flavour @@ -262,11 +263,14 @@ export function smokeTestLibquery(adapter: ErrorCapturingDriverAdapter, prismaSc }) it('create explicit transaction', async () => { + if(!supportsTransactions) return + const args = { isolation_level: 'Serializable', max_wait: 5000, timeout: 15000 } const startResponse = await engine.startTransaction(JSON.stringify(args), 'trace') const tx_id = JSON.parse(startResponse).id - console.log('[nodejs] transaction id', tx_id) + assert.notStrictEqual(tx_id, undefined) + await doQuery( { action: 'findMany', @@ -282,42 +286,47 @@ export function smokeTestLibquery(adapter: ErrorCapturingDriverAdapter, prismaSc console.log('[nodejs] commited', commitResponse) }) - it('expected error', async () => { + it('expected error (on duplicate insert) as json result (not throwing error)', async () => { + // clean up first + await doQuery({ + modelName: 'Unique', + action: 'deleteMany', + query: { + selection: { + count: true, + }, + }, + }) + const result = await doQuery({ + modelName: 'Unique', + action: 'createOne', + query: { + arguments: { + data: { email: 'duplicate@example.com' }, + }, + selection: { + $scalars: true, + }, + }, + }) + console.log('[nodejs] error result1', JSON.stringify(result, null, 2)) - await assert.rejects( - async () => { - const result = await doQuery({ - modelName: 'Unique', - action: 'createOne', - query: { - arguments: { - data: { email: 'duplicate@example.com' }, - }, - selection: { - $scalars: true, - }, - }, - }) - const result2 = await doQuery({ - modelName: 'Unique', - action: 'createOne', - query: { - arguments: { - data: { email: 'duplicate@example.com' } - }, - selection: { - $scalars: true, - }, - }, - }) - console.log('[nodejs] error result', JSON.stringify(result, null, 2)) + const result2 = await doQuery({ + modelName: 'Unique', + action: 'createOne', + query: { + arguments: { + data: { email: 'duplicate@example.com' } }, - (err) => { - assert.match(err.message, /unique/i); - return true; + selection: { + $scalars: true, }, - ); + }, + }) + console.log('[nodejs] error result2', JSON.stringify(result2, null, 2)) + + // TODO assert that result2 includes `errors.error` (which should currently only pass on neon:ws) }) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts index ac165d29f584..02872b885fe3 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts @@ -12,5 +12,5 @@ describe('neon (HTTP)', () => { const adapter = new PrismaNeonHTTP(neonConnection) const driverAdapter = bindAdapter(adapter) - smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma') + smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma', false) }) From 8cae8897c727c193b0981844bb2d0fe7a99b0bf9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Thu, 12 Oct 2023 10:37:01 +0200 Subject: [PATCH 10/67] test(driver-adapters) search-path in tests for neon and pg (#4352) --- .../js/connector-test-kit-executor/src/index.ts | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts b/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts index a36e0e360514..8a05a6b2e9aa 100644 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts @@ -235,8 +235,17 @@ async function adapterFromEnv(url: string): Promise { return await SUPPORTED_ADAPTERS[adapter](url) } +function postgres_options(url: string): any { + let args: any = {connectionString: url} + const schemaName = new URL(url).searchParams.get('schema') + if (schemaName != null) { + args.options = `--search_path="${schemaName}"` + } + return args; +} + async function pgAdapter(url: string): Promise { - const pool = new pgDriver.Pool({connectionString: url}) + const pool = new pgDriver.Pool(postgres_options(url)) return new prismaPg.PrismaPg(pool) } @@ -251,7 +260,7 @@ async function neonWsAdapter(url: string): Promise { neonConfig.useSecureWebSocket = false neonConfig.pipelineConnect = false - const pool = new NeonPool({ connectionString: url }) + const pool = new NeonPool(postgres_options(url)) return new prismaNeon.PrismaNeon(pool) } From 6b41f7a3b9f16da11f37cf41d70e8149f95eb6cf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Thu, 12 Oct 2023 11:12:07 +0200 Subject: [PATCH 11/67] Driver adapters phase 1 correctness: implement flavor-specific driver adapter conversions for postgres dates (#4351) * Implement date conversions based on given types * Rename (Postgres|Neon)ColumnType to ScalarColumnType * Copy changes over pg driver adapter * ignore case in type matching * Fix unit tests * Update query-engine/driver-adapters/js/adapter-neon/src/conversion.ts Co-authored-by: Alexey Orlenko * Remove peer dependency for array * Address feedback * Update lock file * driver-adapters: parse decimals as strings in arrays in neon and pg Fixes the following test: `writes::data_types::scalar_list::defaults::decimal::basic_write`. Fixes: https://github.com/prisma/team-orm/issues/435 --------- Co-authored-by: Alexey Orlenko --- quaint/src/ast/values.rs | 32 ++- .../src/model_extensions/scalar_field.rs | 6 +- .../js/adapter-neon/package.json | 3 +- .../js/adapter-neon/src/conversion.ts | 195 ++++++++++++----- .../js/adapter-pg/package.json | 3 +- .../js/adapter-pg/src/conversion.ts | 197 ++++++++++++------ .../driver-adapters/js/pnpm-lock.yaml | 6 + .../driver-adapters/src/conversion.rs | 22 +- .../src/conversion/postgres.rs | 55 +++++ query-engine/driver-adapters/src/proxy.rs | 114 +++++----- query-engine/driver-adapters/src/queryable.rs | 48 +++-- 11 files changed, 476 insertions(+), 205 deletions(-) create mode 100644 query-engine/driver-adapters/src/conversion/postgres.rs diff --git a/quaint/src/ast/values.rs b/quaint/src/ast/values.rs index 081405374340..a1bf4f41a26d 100644 --- a/quaint/src/ast/values.rs +++ b/quaint/src/ast/values.rs @@ -33,13 +33,43 @@ where } } +/// A native-column type, i.e. the connector-specific type of the column. +#[derive(Debug, Clone, PartialEq)] +pub struct NativeColumnType<'a>(Cow<'a, str>); + +impl<'a> std::ops::Deref for NativeColumnType<'a> { + type Target = str; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl<'a> From<&'a str> for NativeColumnType<'a> { + fn from(s: &'a str) -> Self { + Self(Cow::Owned(s.to_uppercase())) + } +} + #[derive(Debug, Clone, PartialEq)] pub struct Value<'a> { pub typed: ValueType<'a>, - pub native_column_type: Option>, + pub native_column_type: Option>, } impl<'a> Value<'a> { + /// Returns the native column type of the value, if any, in the form + /// of an UPCASE string. ex: "VARCHAR, BYTEA, DATE, TIMEZ" + pub fn native_column_type_name(&'a self) -> Option<&'a str> { + self.native_column_type.as_deref() + } + + /// Changes the value to include information about the native column type + pub fn with_native_column_type>>(mut self, column_type: Option) -> Self { + self.native_column_type = column_type.map(|ct| ct.into()); + self + } + /// Creates a new 32-bit signed integer. pub fn int32(value: I) -> Self where diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs b/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs index b8ea590f25dc..7eb414dd92a8 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs +++ b/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs @@ -14,7 +14,7 @@ pub(crate) trait ScalarFieldExt { impl ScalarFieldExt for ScalarField { fn value<'a>(&self, pv: PrismaValue, ctx: &Context<'_>) -> Value<'a> { - match (pv, self.type_identifier()) { + let value = match (pv, self.type_identifier()) { (PrismaValue::String(s), _) => s.into(), (PrismaValue::Float(f), _) => f.into(), (PrismaValue::Boolean(b), _) => b.into(), @@ -76,7 +76,9 @@ impl ScalarFieldExt for ScalarField { TypeIdentifier::Bytes => Value::null_bytes(), TypeIdentifier::Unsupported => unreachable!("No unsupported field should reach that path"), }, - } + }; + + value.with_native_column_type(self.native_type().map(|nt| nt.name())) } fn type_family(&self) -> TypeFamily { diff --git a/query-engine/driver-adapters/js/adapter-neon/package.json b/query-engine/driver-adapters/js/adapter-neon/package.json index 03d19f6eeb0d..02005a13572f 100644 --- a/query-engine/driver-adapters/js/adapter-neon/package.json +++ b/query-engine/driver-adapters/js/adapter-neon/package.json @@ -18,7 +18,8 @@ "license": "Apache-2.0", "sideEffects": false, "dependencies": { - "@prisma/driver-adapter-utils": "workspace:*" + "@prisma/driver-adapter-utils": "workspace:*", + "postgres-array": "^3.0.2" }, "devDependencies": { "@neondatabase/serverless": "^0.6.0" diff --git a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts b/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts index 932461e3bc3b..9f6486362d78 100644 --- a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts +++ b/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts @@ -1,10 +1,11 @@ import { ColumnTypeEnum, type ColumnType, JsonNullMarker } from '@prisma/driver-adapter-utils' import { types } from '@neondatabase/serverless' +import { parse as parseArray } from 'postgres-array' -const NeonColumnType = types.builtins +const ScalarColumnType = types.builtins /** - * PostgreSQL array column types (not defined in NeonColumnType). + * PostgreSQL array column types (not defined in ScalarColumnType). */ const ArrayColumnType = { BOOL_ARRAY: 1000, @@ -35,45 +36,46 @@ const ArrayColumnType = { */ export function fieldToColumnType(fieldTypeId: number): ColumnType { switch (fieldTypeId) { - case NeonColumnType['INT2']: - case NeonColumnType['INT4']: + case ScalarColumnType['INT2']: + case ScalarColumnType['INT4']: return ColumnTypeEnum.Int32 - case NeonColumnType['INT8']: + case ScalarColumnType['INT8']: return ColumnTypeEnum.Int64 - case NeonColumnType['FLOAT4']: + case ScalarColumnType['FLOAT4']: return ColumnTypeEnum.Float - case NeonColumnType['FLOAT8']: + case ScalarColumnType['FLOAT8']: return ColumnTypeEnum.Double - case NeonColumnType['BOOL']: + case ScalarColumnType['BOOL']: return ColumnTypeEnum.Boolean - case NeonColumnType['DATE']: + case ScalarColumnType['DATE']: return ColumnTypeEnum.Date - case NeonColumnType['TIME']: + case ScalarColumnType['TIME']: + case ScalarColumnType['TIMETZ']: return ColumnTypeEnum.Time - case NeonColumnType['TIMESTAMP']: + case ScalarColumnType['TIMESTAMP']: + case ScalarColumnType['TIMESTAMPTZ']: return ColumnTypeEnum.DateTime - case NeonColumnType['NUMERIC']: - case NeonColumnType['MONEY']: + case ScalarColumnType['NUMERIC']: + case ScalarColumnType['MONEY']: return ColumnTypeEnum.Numeric - case NeonColumnType['JSON']: - case NeonColumnType['JSONB']: + case ScalarColumnType['JSON']: + case ScalarColumnType['JSONB']: return ColumnTypeEnum.Json - case NeonColumnType['UUID']: + case ScalarColumnType['UUID']: return ColumnTypeEnum.Uuid - case NeonColumnType['OID']: + case ScalarColumnType['OID']: return ColumnTypeEnum.Int64 - case NeonColumnType['BPCHAR']: - case NeonColumnType['TEXT']: - case NeonColumnType['VARCHAR']: - case NeonColumnType['BIT']: - case NeonColumnType['VARBIT']: - case NeonColumnType['INET']: - case NeonColumnType['CIDR']: - case NeonColumnType['XML']: + case ScalarColumnType['BPCHAR']: + case ScalarColumnType['TEXT']: + case ScalarColumnType['VARCHAR']: + case ScalarColumnType['BIT']: + case ScalarColumnType['VARBIT']: + case ScalarColumnType['INET']: + case ScalarColumnType['CIDR']: + case ScalarColumnType['XML']: return ColumnTypeEnum.Text - case NeonColumnType['BYTEA']: + case ScalarColumnType['BYTEA']: return ColumnTypeEnum.Bytes - case ArrayColumnType.INT2_ARRAY: case ArrayColumnType.INT4_ARRAY: return ColumnTypeEnum.Int32Array @@ -116,6 +118,88 @@ export function fieldToColumnType(fieldTypeId: number): ColumnType { } } +function normalize_array(element_normalizer: (string) => string): (string) => string[] { + return (str) => parseArray(str, element_normalizer) +} + +/****************************/ +/* Time-related data-types */ +/****************************/ + +function normalize_numeric(numeric: string): string { + return numeric +} + +types.setTypeParser(ScalarColumnType.NUMERIC, normalize_numeric) +types.setTypeParser(ArrayColumnType.NUMERIC_ARRAY, normalize_array(normalize_numeric)) + +/****************************/ +/* Time-related data-types */ +/****************************/ + + +function normalize_date(date: string): string { + return date +} + +function normalize_timestamp(time: string): string { + return time +} + +function normalize_timestampz(time: string): string { + return time.split("+")[0] +} + +/* + * TIME, TIMETZ, TIME_ARRAY - converts value (or value elements) to a string in the format HH:mm:ss.f + */ + +function normalize_time(time: string): string { + return time +} + +function normalize_timez(time: string): string { + // Although it might be controversial, UTC is assumed in consistency with the behavior of rust postgres driver + // in quaint. See quaint/src/connector/postgres/conversion.rs + return time.split("+")[0] +} + +types.setTypeParser(ScalarColumnType.TIME, normalize_time) +types.setTypeParser(ArrayColumnType.TIME_ARRAY, normalize_array(normalize_time)) +types.setTypeParser(ScalarColumnType.TIMETZ, normalize_timez) + +/* + * DATE, DATE_ARRAY - converts value (or value elements) to a string in the format YYYY-MM-DD + */ + +types.setTypeParser(ScalarColumnType.DATE, normalize_date) +types.setTypeParser(ArrayColumnType.DATE_ARRAY, normalize_array(normalize_date)) + + +/* + * TIMESTAMP, TIMESTAMP_ARRAY - converts value (or value elements) to a string in the rfc3339 format + * ex: 1996-12-19T16:39:57-08:00 + */ +types.setTypeParser(ScalarColumnType.TIMESTAMP, normalize_timestamp) +types.setTypeParser(ArrayColumnType.TIMESTAMP_ARRAY, normalize_array(normalize_timestamp)) +types.setTypeParser(ScalarColumnType.TIMESTAMPTZ, normalize_timestampz) + +/******************/ +/* Money handling */ +/******************/ + +function normalize_money(money: string): string { + return money.slice(1) +} + +types.setTypeParser(ScalarColumnType.MONEY, normalize_money) +types.setTypeParser(ArrayColumnType.MONEY_ARRAY, normalize_array(normalize_money)) + + +/*****************/ +/* JSON handling */ +/*****************/ + /** * JsonNull are stored in JSON strings as the string "null", distinguishable from * the `null` value which is used by the driver to represent the database NULL. @@ -126,22 +210,17 @@ export function fieldToColumnType(fieldTypeId: number): ColumnType { * By converting "null" to JsonNullMarker, we can signal JsonNull in Rust side and * convert it to QuaintValue::Json(Some(Null)). */ -function convertJson(json: string): unknown { +function toJson(json: string): unknown { return (json === 'null') ? JsonNullMarker : JSON.parse(json) } -// Original BYTEA parser -const parsePgBytes = types.getTypeParser(NeonColumnType.BYTEA) as (_: string) => Buffer -/** - * Convert bytes to a JSON-encodable representation since we can't - * currently send a parsed Buffer or ArrayBuffer across JS to Rust - * boundary. - */ -function convertBytes(serializedBytes: string): number[] { - const buffer = parsePgBytes(serializedBytes) - return encodeBuffer(buffer) -} +types.setTypeParser(ScalarColumnType.JSONB, toJson) +types.setTypeParser(ScalarColumnType.JSON, toJson) + +/************************/ +/* Binary data handling */ +/************************/ /** * TODO: @@ -154,14 +233,26 @@ function encodeBuffer(buffer: Buffer) { return Array.from(new Uint8Array(buffer)) } -// return string instead of JavaScript Date object -types.setTypeParser(NeonColumnType.TIME, date => date) -types.setTypeParser(NeonColumnType.DATE, date => date) -types.setTypeParser(NeonColumnType.TIMESTAMP, date => date) -types.setTypeParser(NeonColumnType.JSONB, convertJson) -types.setTypeParser(NeonColumnType.JSON, convertJson) -types.setTypeParser(NeonColumnType.MONEY, money => money.slice(1)) -types.setTypeParser(NeonColumnType.BYTEA, convertBytes) +/* + * BYTEA - arbitrary raw binary strings + */ + +const parsePgBytes = types.getTypeParser(ScalarColumnType.BYTEA) as (_: string) => Buffer +/** + * Convert bytes to a JSON-encodable representation since we can't + * currently send a parsed Buffer or ArrayBuffer across JS to Rust + * boundary. + */ +function convertBytes(serializedBytes: string): number[] { + const buffer = parsePgBytes(serializedBytes) + return encodeBuffer(buffer) +} + +types.setTypeParser(ScalarColumnType.BYTEA, convertBytes) + +/* + * BYTEA_ARRAYS - arrays of arbitrary raw binary strings + */ const parseBytesArray = types.getTypeParser(ArrayColumnType.BYTEA_ARRAY) as (_: string) => Buffer[] @@ -169,13 +260,3 @@ types.setTypeParser(ArrayColumnType.BYTEA_ARRAY, (serializedBytesArray) => { const buffers = parseBytesArray(serializedBytesArray) return buffers.map(encodeBuffer) }) - -const parseTextArray = types.getTypeParser(ArrayColumnType.TEXT_ARRAY) as (_: string) => string[] - -types.setTypeParser(ArrayColumnType.TIME_ARRAY, parseTextArray) -types.setTypeParser(ArrayColumnType.DATE_ARRAY, parseTextArray) -types.setTypeParser(ArrayColumnType.TIMESTAMP_ARRAY, parseTextArray) - -types.setTypeParser(ArrayColumnType.MONEY_ARRAY, (moneyArray) => - parseTextArray(moneyArray).map((money) => money.slice(1)), -) diff --git a/query-engine/driver-adapters/js/adapter-pg/package.json b/query-engine/driver-adapters/js/adapter-pg/package.json index 3573d33bc161..7514569c562a 100644 --- a/query-engine/driver-adapters/js/adapter-pg/package.json +++ b/query-engine/driver-adapters/js/adapter-pg/package.json @@ -18,7 +18,8 @@ "license": "Apache-2.0", "sideEffects": false, "dependencies": { - "@prisma/driver-adapter-utils": "workspace:*" + "@prisma/driver-adapter-utils": "workspace:*", + "postgres-array": "^3.0.2" }, "devDependencies": { "pg": "^8.11.3", diff --git a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts b/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts index a1c8ce7c5e6a..69e8f1d9dec1 100644 --- a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts +++ b/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts @@ -1,10 +1,11 @@ import { ColumnTypeEnum, type ColumnType, JsonNullMarker } from '@prisma/driver-adapter-utils' import { types } from 'pg' +import { parse as parseArray } from 'postgres-array' -const PgColumnType = types.builtins +const ScalarColumnType = types.builtins /** - * PostgreSQL array column types (not defined in PgColumnType). + * PostgreSQL array column types (not defined in ScalarColumnType). */ const ArrayColumnType = { BOOL_ARRAY: 1000, @@ -35,45 +36,46 @@ const ArrayColumnType = { */ export function fieldToColumnType(fieldTypeId: number): ColumnType { switch (fieldTypeId) { - case PgColumnType['INT2']: - case PgColumnType['INT4']: + case ScalarColumnType['INT2']: + case ScalarColumnType['INT4']: return ColumnTypeEnum.Int32 - case PgColumnType['INT8']: + case ScalarColumnType['INT8']: return ColumnTypeEnum.Int64 - case PgColumnType['FLOAT4']: + case ScalarColumnType['FLOAT4']: return ColumnTypeEnum.Float - case PgColumnType['FLOAT8']: + case ScalarColumnType['FLOAT8']: return ColumnTypeEnum.Double - case PgColumnType['BOOL']: + case ScalarColumnType['BOOL']: return ColumnTypeEnum.Boolean - case PgColumnType['DATE']: + case ScalarColumnType['DATE']: return ColumnTypeEnum.Date - case PgColumnType['TIME']: + case ScalarColumnType['TIME']: + case ScalarColumnType['TIMETZ']: return ColumnTypeEnum.Time - case PgColumnType['TIMESTAMP']: + case ScalarColumnType['TIMESTAMP']: + case ScalarColumnType['TIMESTAMPTZ']: return ColumnTypeEnum.DateTime - case PgColumnType['NUMERIC']: - case PgColumnType['MONEY']: + case ScalarColumnType['NUMERIC']: + case ScalarColumnType['MONEY']: return ColumnTypeEnum.Numeric - case PgColumnType['JSON']: - case PgColumnType['JSONB']: + case ScalarColumnType['JSON']: + case ScalarColumnType['JSONB']: return ColumnTypeEnum.Json - case PgColumnType['UUID']: + case ScalarColumnType['UUID']: return ColumnTypeEnum.Uuid - case PgColumnType['OID']: + case ScalarColumnType['OID']: return ColumnTypeEnum.Int64 - case PgColumnType['BPCHAR']: - case PgColumnType['TEXT']: - case PgColumnType['VARCHAR']: - case PgColumnType['BIT']: - case PgColumnType['VARBIT']: - case PgColumnType['INET']: - case PgColumnType['CIDR']: - case PgColumnType['XML']: + case ScalarColumnType['BPCHAR']: + case ScalarColumnType['TEXT']: + case ScalarColumnType['VARCHAR']: + case ScalarColumnType['BIT']: + case ScalarColumnType['VARBIT']: + case ScalarColumnType['INET']: + case ScalarColumnType['CIDR']: + case ScalarColumnType['XML']: return ColumnTypeEnum.Text - case PgColumnType['BYTEA']: + case ScalarColumnType['BYTEA']: return ColumnTypeEnum.Bytes - case ArrayColumnType.INT2_ARRAY: case ArrayColumnType.INT4_ARRAY: return ColumnTypeEnum.Int32Array @@ -116,6 +118,88 @@ export function fieldToColumnType(fieldTypeId: number): ColumnType { } } +function normalize_array(element_normalizer: (string) => string): (string) => string[] { + return (str) => parseArray(str, element_normalizer) +} + +/****************************/ +/* Time-related data-types */ +/****************************/ + +function normalize_numeric(numeric: string): string { + return numeric +} + +types.setTypeParser(ScalarColumnType.NUMERIC, normalize_numeric) +types.setTypeParser(ArrayColumnType.NUMERIC_ARRAY, normalize_array(normalize_numeric)) + +/****************************/ +/* Time-related data-types */ +/****************************/ + + +function normalize_date(date: string): string { + return date +} + +function normalize_timestamp(time: string): string { + return time +} + +function normalize_timestampz(time: string): string { + return time.split("+")[0] +} + +/* + * TIME, TIMETZ, TIME_ARRAY - converts value (or value elements) to a string in the format HH:mm:ss.f + */ + +function normalize_time(time: string): string { + return time +} + +function normalize_timez(time: string): string { + // Although it might be controversial, UTC is assumed in consistency with the behavior of rust postgres driver + // in quaint. See quaint/src/connector/postgres/conversion.rs + return time.split("+")[0] +} + +types.setTypeParser(ScalarColumnType.TIME, normalize_time) +types.setTypeParser(ArrayColumnType.TIME_ARRAY, normalize_array(normalize_time)) +types.setTypeParser(ScalarColumnType.TIMETZ, normalize_timez) + +/* + * DATE, DATE_ARRAY - converts value (or value elements) to a string in the format YYYY-MM-DD + */ + +types.setTypeParser(ScalarColumnType.DATE, normalize_date) +types.setTypeParser(ArrayColumnType.DATE_ARRAY, normalize_array(normalize_date)) + + +/* + * TIMESTAMP, TIMESTAMP_ARRAY - converts value (or value elements) to a string in the rfc3339 format + * ex: 1996-12-19T16:39:57-08:00 + */ +types.setTypeParser(ScalarColumnType.TIMESTAMP, normalize_timestamp) +types.setTypeParser(ArrayColumnType.TIMESTAMP_ARRAY, normalize_array(normalize_timestamp)) +types.setTypeParser(ScalarColumnType.TIMESTAMPTZ, normalize_timestampz) + +/******************/ +/* Money handling */ +/******************/ + +function normalize_money(money: string): string { + return money.slice(1) +} + +types.setTypeParser(ScalarColumnType.MONEY, normalize_money) +types.setTypeParser(ArrayColumnType.MONEY_ARRAY, normalize_array(normalize_money)) + + +/*****************/ +/* JSON handling */ +/*****************/ + /** * JsonNull are stored in JSON strings as the string "null", distinguishable from * the `null` value which is used by the driver to represent the database NULL. @@ -126,22 +210,17 @@ export function fieldToColumnType(fieldTypeId: number): ColumnType { * By converting "null" to JsonNullMarker, we can signal JsonNull in Rust side and * convert it to QuaintValue::Json(Some(Null)). */ -function convertJson(json: string): unknown { +function toJson(json: string): unknown { return (json === 'null') ? JsonNullMarker : JSON.parse(json) } -// Original BYTEA parser -const parsePgBytes = types.getTypeParser(PgColumnType.BYTEA) as (_: string) => Buffer -/** - * Convert bytes to a JSON-encodable representation since we can't - * currently send a parsed Buffer or ArrayBuffer across JS to Rust - * boundary. - */ -function convertBytes(serializedBytes: string): number[] { - const buffer = parsePgBytes(serializedBytes) - return encodeBuffer(buffer) -} +types.setTypeParser(ScalarColumnType.JSONB, toJson) +types.setTypeParser(ScalarColumnType.JSON, toJson) + +/************************/ +/* Binary data handling */ +/************************/ /** * TODO: @@ -154,28 +233,30 @@ function encodeBuffer(buffer: Buffer) { return Array.from(new Uint8Array(buffer)) } -// return string instead of JavaScript Date object -types.setTypeParser(PgColumnType.TIME, date => date) -types.setTypeParser(PgColumnType.DATE, date => date) -types.setTypeParser(PgColumnType.TIMESTAMP, date => date) -types.setTypeParser(PgColumnType.JSONB, convertJson) -types.setTypeParser(PgColumnType.JSON, convertJson) -types.setTypeParser(PgColumnType.MONEY, money => money.slice(1)) -types.setTypeParser(PgColumnType.BYTEA, convertBytes) +/* + * BYTEA - arbitrary raw binary strings + */ + +const parsePgBytes = types.getTypeParser(ScalarColumnType.BYTEA) as (_: string) => Buffer +/** + * Convert bytes to a JSON-encodable representation since we can't + * currently send a parsed Buffer or ArrayBuffer across JS to Rust + * boundary. + */ +function convertBytes(serializedBytes: string): number[] { + const buffer = parsePgBytes(serializedBytes) + return encodeBuffer(buffer) +} + +types.setTypeParser(ScalarColumnType.BYTEA, convertBytes) + +/* + * BYTEA_ARRAYS - arrays of arbitrary raw binary strings + */ const parseBytesArray = types.getTypeParser(ArrayColumnType.BYTEA_ARRAY) as (_: string) => Buffer[] types.setTypeParser(ArrayColumnType.BYTEA_ARRAY, (serializedBytesArray) => { const buffers = parseBytesArray(serializedBytesArray) return buffers.map(encodeBuffer) -}) - -const parseTextArray = types.getTypeParser(ArrayColumnType.TEXT_ARRAY) as (_: string) => string[] - -types.setTypeParser(ArrayColumnType.TIME_ARRAY, parseTextArray) -types.setTypeParser(ArrayColumnType.DATE_ARRAY, parseTextArray) -types.setTypeParser(ArrayColumnType.TIMESTAMP_ARRAY, parseTextArray) - -types.setTypeParser(ArrayColumnType.MONEY_ARRAY, (moneyArray) => - parseTextArray(moneyArray).map((money) => money.slice(1)), -) +}) \ No newline at end of file diff --git a/query-engine/driver-adapters/js/pnpm-lock.yaml b/query-engine/driver-adapters/js/pnpm-lock.yaml index 0b15115b5e23..89dbc4ee3d38 100644 --- a/query-engine/driver-adapters/js/pnpm-lock.yaml +++ b/query-engine/driver-adapters/js/pnpm-lock.yaml @@ -39,6 +39,9 @@ importers: '@prisma/driver-adapter-utils': specifier: workspace:* version: link:../driver-adapter-utils + postgres-array: + specifier: ^3.0.2 + version: 3.0.2 devDependencies: '@neondatabase/serverless': specifier: ^0.6.0 @@ -49,6 +52,9 @@ importers: '@prisma/driver-adapter-utils': specifier: workspace:* version: link:../driver-adapter-utils + postgres-array: + specifier: ^3.0.2 + version: 3.0.2 devDependencies: '@types/pg': specifier: ^8.10.2 diff --git a/query-engine/driver-adapters/src/conversion.rs b/query-engine/driver-adapters/src/conversion.rs index 2d469a5ab7c3..f65cc955fb21 100644 --- a/query-engine/driver-adapters/src/conversion.rs +++ b/query-engine/driver-adapters/src/conversion.rs @@ -1,7 +1,7 @@ +pub(crate) mod postgres; + use napi::bindgen_prelude::{FromNapiValue, ToNapiValue}; use napi::NapiValue; -use quaint::ast::Value as QuaintValue; -use quaint::ast::ValueType as QuaintValueType; use serde::Serialize; use serde_json::value::Value as JsonValue; @@ -59,35 +59,35 @@ impl ToNapiValue for JSArg { } } -pub fn conv_params(params: &[QuaintValue<'_>]) -> serde_json::Result> { - let mut values = Vec::with_capacity(params.len()); +pub fn values_to_js_args(values: &[quaint::Value<'_>]) -> serde_json::Result> { + let mut args = Vec::with_capacity(values.len()); - for qv in params { + for qv in values { let res = match &qv.typed { - QuaintValueType::Json(s) => match s { + quaint::ValueType::Json(s) => match s { Some(ref s) => { let json_str = serde_json::to_string(s)?; JSArg::RawString(json_str) } None => JsonValue::Null.into(), }, - QuaintValueType::Bytes(bytes) => match bytes { + quaint::ValueType::Bytes(bytes) => match bytes { Some(bytes) => JSArg::Buffer(bytes.to_vec()), None => JsonValue::Null.into(), }, - quaint_value @ QuaintValueType::Numeric(bd) => match bd { + quaint_value @ quaint::ValueType::Numeric(bd) => match bd { Some(bd) => match bd.to_string().parse::() { Ok(double) => JSArg::from(JsonValue::from(double)), Err(_) => JSArg::from(JsonValue::from(quaint_value.clone())), }, None => JsonValue::Null.into(), }, - QuaintValueType::Array(Some(items)) => JSArg::Array(conv_params(items)?), + quaint::ValueType::Array(Some(items)) => JSArg::Array(values_to_js_args(items)?), quaint_value => JSArg::from(JsonValue::from(quaint_value.clone())), }; - values.push(res); + args.push(res); } - Ok(values) + Ok(args) } diff --git a/query-engine/driver-adapters/src/conversion/postgres.rs b/query-engine/driver-adapters/src/conversion/postgres.rs new file mode 100644 index 000000000000..77e79f549d06 --- /dev/null +++ b/query-engine/driver-adapters/src/conversion/postgres.rs @@ -0,0 +1,55 @@ +use crate::conversion::JSArg; +use chrono::format::StrftimeItems; +use once_cell::sync::Lazy; +use serde_json::value::Value as JsonValue; + +static TIME_FMT: Lazy = Lazy::new(|| StrftimeItems::new("%H:%M:%S%.f")); + +pub fn values_to_js_args(values: &[quaint::Value<'_>]) -> serde_json::Result> { + let mut args = Vec::with_capacity(values.len()); + + for qv in values { + let res = match (&qv.typed, qv.native_column_type_name()) { + (quaint::ValueType::DateTime(value), Some("DATE")) => match value { + Some(value) => JSArg::RawString(value.date_naive().to_string()), + None => JsonValue::Null.into(), + }, + (quaint::ValueType::DateTime(value), Some("TIME")) => match value { + Some(value) => JSArg::RawString(value.time().to_string()), + None => JsonValue::Null.into(), + }, + (quaint::ValueType::DateTime(value), Some("TIMETZ")) => match value { + Some(value) => JSArg::RawString(value.time().format_with_items(TIME_FMT.clone()).to_string()), + None => JsonValue::Null.into(), + }, + (quaint::ValueType::DateTime(value), _) => match value { + Some(value) => JSArg::RawString(value.naive_utc().to_string()), + None => JsonValue::Null.into(), + }, + (quaint::ValueType::Json(s), _) => match s { + Some(ref s) => { + let json_str = serde_json::to_string(s)?; + JSArg::RawString(json_str) + } + None => JsonValue::Null.into(), + }, + (quaint::ValueType::Bytes(bytes), _) => match bytes { + Some(bytes) => JSArg::Buffer(bytes.to_vec()), + None => JsonValue::Null.into(), + }, + (quaint_value @ quaint::ValueType::Numeric(bd), _) => match bd { + Some(bd) => match bd.to_string().parse::() { + Ok(double) => JSArg::from(JsonValue::from(double)), + Err(_) => JSArg::from(JsonValue::from(quaint_value.clone())), + }, + None => JsonValue::Null.into(), + }, + (quaint::ValueType::Array(Some(items)), _) => JSArg::Array(values_to_js_args(items)?), + (quaint_value, _) => JSArg::from(JsonValue::from(quaint_value.clone())), + }; + + args.push(res); + } + + Ok(args) +} diff --git a/query-engine/driver-adapters/src/proxy.rs b/query-engine/driver-adapters/src/proxy.rs index bdcab93a0c55..14bfd46e62e0 100644 --- a/query-engine/driver-adapters/src/proxy.rs +++ b/query-engine/driver-adapters/src/proxy.rs @@ -258,34 +258,31 @@ fn js_value_to_quaint( serde_json::Value::Number(n) => { // n.as_i32() is not implemented, so we need to downcast from i64 instead n.as_i64() - .ok_or(conversion_error!("number must be an integer")) + .ok_or(conversion_error!("number must be an integer in column '{column_name}'")) .and_then(|n| -> quaint::Result { n.try_into() - .map_err(|e| conversion_error!("cannot convert {n} to i32: {e}")) + .map_err(|e| conversion_error!("cannot convert {n} to i32 in column '{column_name}': {e}")) }) .map(QuaintValue::int32) } - serde_json::Value::String(s) => s - .parse::() - .map(QuaintValue::int32) - .map_err(|e| conversion_error!("string-encoded number must be an i32, got {s}: {e}")), + serde_json::Value::String(s) => s.parse::().map(QuaintValue::int32).map_err(|e| { + conversion_error!("string-encoded number must be an i32 in column '{column_name}', got {s}: {e}") + }), serde_json::Value::Null => Ok(QuaintValue::null_int32()), mismatch => Err(conversion_error!( - "expected an i32 number in column {column_name}, found {mismatch}" + "expected an i32 number in column '{column_name}', found {mismatch}" )), }, ColumnType::Int64 => match json_value { - serde_json::Value::Number(n) => n - .as_i64() - .map(QuaintValue::int64) - .ok_or(conversion_error!("number must be an i64, got {n}")), - serde_json::Value::String(s) => s - .parse::() - .map(QuaintValue::int64) - .map_err(|e| conversion_error!("string-encoded number must be an i64, got {s}: {e}")), + serde_json::Value::Number(n) => n.as_i64().map(QuaintValue::int64).ok_or(conversion_error!( + "number must be an i64 in column '{column_name}', got {n}" + )), + serde_json::Value::String(s) => s.parse::().map(QuaintValue::int64).map_err(|e| { + conversion_error!("string-encoded number must be an i64 in column '{column_name}', got {s}: {e}") + }), serde_json::Value::Null => Ok(QuaintValue::null_int64()), mismatch => Err(conversion_error!( - "expected a string or number in column {column_name}, found {mismatch}" + "expected a string or number in column '{column_name}', found {mismatch}" )), }, ColumnType::Float => match json_value { @@ -293,36 +290,39 @@ fn js_value_to_quaint( // We assume that the JSON value is a valid f32 number, but we check for overflows anyway. serde_json::Value::Number(n) => n .as_f64() - .ok_or(conversion_error!("number must be a float, got {n}")) + .ok_or(conversion_error!( + "number must be a float in column '{column_name}', got {n}" + )) .and_then(f64_to_f32) .map(QuaintValue::float), serde_json::Value::Null => Ok(QuaintValue::null_float()), mismatch => Err(conversion_error!( - "expected an f32 number in column {column_name}, found {mismatch}" + "expected an f32 number in column '{column_name}', found {mismatch}" )), }, ColumnType::Double => match json_value { - serde_json::Value::Number(n) => n - .as_f64() - .map(QuaintValue::double) - .ok_or(conversion_error!("number must be a f64, got {n}")), + serde_json::Value::Number(n) => n.as_f64().map(QuaintValue::double).ok_or(conversion_error!( + "number must be a f64 in column '{column_name}', got {n}" + )), serde_json::Value::Null => Ok(QuaintValue::null_double()), mismatch => Err(conversion_error!( - "expected an f64 number in column {column_name}, found {mismatch}" + "expected an f64 number in column '{column_name}', found {mismatch}" )), }, ColumnType::Numeric => match json_value { - serde_json::Value::String(s) => BigDecimal::from_str(&s) - .map(QuaintValue::numeric) - .map_err(|e| conversion_error!("invalid numeric value when parsing {s}: {e}")), + serde_json::Value::String(s) => BigDecimal::from_str(&s).map(QuaintValue::numeric).map_err(|e| { + conversion_error!("invalid numeric value when parsing {s} in column '{column_name}': {e}") + }), serde_json::Value::Number(n) => n .as_f64() .and_then(BigDecimal::from_f64) - .ok_or(conversion_error!("number must be an f64, got {n}")) + .ok_or(conversion_error!( + "number must be an f64 in column '{column_name}', got {n}" + )) .map(QuaintValue::numeric), serde_json::Value::Null => Ok(QuaintValue::null_numeric()), mismatch => Err(conversion_error!( - "expected a string-encoded number in column {column_name}, found {mismatch}", + "expected a string-encoded number in column '{column_name}', found {mismatch}", )), }, ColumnType::Boolean => match json_value { @@ -332,16 +332,18 @@ fn js_value_to_quaint( Some(0) => Ok(QuaintValue::boolean(false)), Some(1) => Ok(QuaintValue::boolean(true)), _ => Err(conversion_error!( - "expected number-encoded boolean to be 0 or 1, got {n}" + "expected number-encoded boolean to be 0 or 1 in column '{column_name}', got {n}" )), }, serde_json::Value::String(s) => match s.as_str() { "false" | "FALSE" | "0" => Ok(QuaintValue::boolean(false)), "true" | "TRUE" | "1" => Ok(QuaintValue::boolean(true)), - _ => Err(conversion_error!("expected string-encoded boolean, got {s}")), + _ => Err(conversion_error!( + "expected string-encoded boolean in column '{column_name}', got {s}" + )), }, mismatch => Err(conversion_error!( - "expected a boolean in column {column_name}, found {mismatch}" + "expected a boolean in column '{column_name}', found {mismatch}" )), }, ColumnType::Char => match json_value { @@ -351,43 +353,44 @@ fn js_value_to_quaint( }, serde_json::Value::Null => Ok(QuaintValue::null_character()), mismatch => Err(conversion_error!( - "expected a string in column {column_name}, found {mismatch}" + "expected a string in column '{column_name}', found {mismatch}" )), }, ColumnType::Text => match json_value { serde_json::Value::String(s) => Ok(QuaintValue::text(s)), serde_json::Value::Null => Ok(QuaintValue::null_text()), mismatch => Err(conversion_error!( - "expected a string in column {column_name}, found {mismatch}" + "expected a string in column '{column_name}', found {mismatch}" )), }, ColumnType::Date => match json_value { serde_json::Value::String(s) => NaiveDate::parse_from_str(&s, "%Y-%m-%d") .map(QuaintValue::date) - .map_err(|_| conversion_error!("expected a date string, got {s}")), + .map_err(|_| conversion_error!("expected a date string in column '{column_name}', got {s}")), serde_json::Value::Null => Ok(QuaintValue::null_date()), mismatch => Err(conversion_error!( - "expected a string in column {column_name}, found {mismatch}" + "expected a string in column '{column_name}', found {mismatch}" )), }, ColumnType::Time => match json_value { - serde_json::Value::String(s) => NaiveTime::parse_from_str(&s, "%H:%M:%S") + serde_json::Value::String(s) => NaiveTime::parse_from_str(&s, "%H:%M:%S%.f") .map(QuaintValue::time) - .map_err(|_| conversion_error!("expected a time string, got {s}")), + .map_err(|_| conversion_error!("expected a time string in column '{column_name}', got {s}")), serde_json::Value::Null => Ok(QuaintValue::null_time()), mismatch => Err(conversion_error!( - "expected a string in column {column_name}, found {mismatch}" + "expected a string in column '{column_name}', found {mismatch}" )), }, ColumnType::DateTime => match json_value { + // TODO: change parsing order to prefer RFC3339 serde_json::Value::String(s) => chrono::NaiveDateTime::parse_from_str(&s, "%Y-%m-%d %H:%M:%S%.f") .map(|dt| DateTime::from_utc(dt, Utc)) .or_else(|_| DateTime::parse_from_rfc3339(&s).map(DateTime::::from)) .map(QuaintValue::datetime) - .map_err(|_| conversion_error!("expected a datetime string, found {s}")), + .map_err(|_| conversion_error!("expected a datetime string in column '{column_name}', found {s}")), serde_json::Value::Null => Ok(QuaintValue::null_datetime()), mismatch => Err(conversion_error!( - "expected a string in column {column_name}, found {mismatch}" + "expected a string in column '{column_name}', found {mismatch}" )), }, ColumnType::Json => { @@ -403,7 +406,7 @@ fn js_value_to_quaint( serde_json::Value::String(s) => Ok(QuaintValue::enum_variant(s)), serde_json::Value::Null => Ok(QuaintValue::null_enum()), mismatch => Err(conversion_error!( - "expected a string in column {column_name}, found {mismatch}" + "expected a string in column '{column_name}', found {mismatch}" )), }, ColumnType::Bytes => match json_value { @@ -413,19 +416,21 @@ fn js_value_to_quaint( .map(|value| value.as_i64().and_then(|maybe_byte| maybe_byte.try_into().ok())) .collect::>>() .map(QuaintValue::bytes) - .ok_or(conversion_error!("elements of the array must be u8")), + .ok_or(conversion_error!( + "elements of the array in column '{column_name}' must be u8" + )), serde_json::Value::Null => Ok(QuaintValue::null_bytes()), mismatch => Err(conversion_error!( - "expected a string or an array in column {column_name}, found {mismatch}", + "expected a string or an array in column '{column_name}', found {mismatch}", )), }, ColumnType::Uuid => match json_value { serde_json::Value::String(s) => uuid::Uuid::parse_str(&s) .map(QuaintValue::uuid) - .map_err(|_| conversion_error!("Expected a UUID string")), + .map_err(|_| conversion_error!("Expected a UUID string in column '{column_name}'")), serde_json::Value::Null => Ok(QuaintValue::null_bytes()), mismatch => Err(conversion_error!( - "Expected a UUID string in column {column_name}, found {mismatch}" + "Expected a UUID string in column '{column_name}', found {mismatch}" )), }, ColumnType::UnknownNumber => match json_value { @@ -433,9 +438,11 @@ fn js_value_to_quaint( .as_i64() .map(QuaintValue::int64) .or(n.as_f64().map(QuaintValue::double)) - .ok_or(conversion_error!("number must be an i64 or f64, got {n}")), + .ok_or(conversion_error!( + "number must be an i64 or f64 in column '{column_name}', got {n}" + )), mismatch => Err(conversion_error!( - "expected a either an i64 or a f64 in column {column_name}, found {mismatch}", + "expected a either an i64 or a f64 in column '{column_name}', found {mismatch}", )), }, @@ -476,7 +483,7 @@ fn js_array_to_quaint( )), serde_json::Value::Null => Ok(QuaintValue::null_array()), mismatch => Err(conversion_error!( - "expected an array in column {column_name}, found {mismatch}", + "expected an array in column '{column_name}', found {mismatch}", )), } } @@ -832,9 +839,14 @@ mod proxy_test { let s = "23:59:59"; let json_value = serde_json::Value::String(s.to_string()); let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - let time: NaiveTime = NaiveTime::from_hms_opt(23, 59, 59).unwrap(); assert_eq!(quaint_value, QuaintValue::time(time)); + + let s = "13:02:20.321"; + let json_value = serde_json::Value::String(s.to_string()); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + let time: NaiveTime = NaiveTime::from_hms_milli_opt(13, 02, 20, 321).unwrap(); + assert_eq!(quaint_value, QuaintValue::time(time)); } #[test] @@ -935,7 +947,7 @@ mod proxy_test { assert_eq!( quaint_value.err().unwrap().to_string(), - "Conversion failed: expected an i32 number in column column_name[2], found {}" + "Conversion failed: expected an i32 number in column 'column_name[2]', found {}" ); } @@ -957,7 +969,7 @@ mod proxy_test { assert_eq!( quaint_value.err().unwrap().to_string(), - "Conversion failed: expected a string in column column_name[0], found 10" + "Conversion failed: expected a string in column 'column_name[0]', found 10" ); } } diff --git a/query-engine/driver-adapters/src/queryable.rs b/query-engine/driver-adapters/src/queryable.rs index d8b022d0fa49..864ba5042083 100644 --- a/query-engine/driver-adapters/src/queryable.rs +++ b/query-engine/driver-adapters/src/queryable.rs @@ -10,7 +10,6 @@ use quaint::{ error::{Error, ErrorKind}, prelude::{Query as QuaintQuery, Queryable as QuaintQueryable, ResultSet, TransactionCapable}, visitor::{self, Visitor}, - Value, }; use tracing::{info_span, Instrument}; @@ -38,8 +37,8 @@ impl JsBaseQueryable { Self { proxy, flavour } } - /// visit a query according to the flavour of the JS connector - pub fn visit_query<'a>(&self, q: QuaintQuery<'a>) -> quaint::Result<(String, Vec>)> { + /// visit a quaint query AST according to the flavour of the JS connector + fn visit_quaint_query<'a>(&self, q: QuaintQuery<'a>) -> quaint::Result<(String, Vec>)> { match self.flavour { Flavour::Mysql => visitor::Mysql::build(q), Flavour::Postgres => visitor::Postgres::build(q), @@ -47,39 +46,48 @@ impl JsBaseQueryable { _ => unimplemented!("Unsupported flavour for JS connector {:?}", self.flavour), } } + + async fn build_query(&self, sql: &str, values: &[quaint::Value<'_>]) -> quaint::Result { + let sql: String = sql.to_string(); + let args = match self.flavour { + Flavour::Postgres => conversion::postgres::values_to_js_args(values), + _ => conversion::values_to_js_args(values), + }?; + Ok(Query { sql, args }) + } } #[async_trait] impl QuaintQueryable for JsBaseQueryable { async fn query(&self, q: QuaintQuery<'_>) -> quaint::Result { - let (sql, params) = self.visit_query(q)?; + let (sql, params) = self.visit_quaint_query(q)?; self.query_raw(&sql, ¶ms).await } - async fn query_raw(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn query_raw(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { metrics::query("js.query_raw", sql, params, move || async move { self.do_query_raw(sql, params).await }) .await } - async fn query_raw_typed(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn query_raw_typed(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { self.query_raw(sql, params).await } async fn execute(&self, q: QuaintQuery<'_>) -> quaint::Result { - let (sql, params) = self.visit_query(q)?; + let (sql, params) = self.visit_quaint_query(q)?; self.execute_raw(&sql, ¶ms).await } - async fn execute_raw(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn execute_raw(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { metrics::query("js.execute_raw", sql, params, move || async move { self.do_execute_raw(sql, params).await }) .await } - async fn execute_raw_typed(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn execute_raw_typed(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { self.execute_raw(sql, params).await } @@ -134,16 +142,10 @@ impl JsBaseQueryable { format!(r#"-- Implicit "{}" query via underlying driver"#, stmt) } - async fn build_query(sql: &str, values: &[quaint::Value<'_>]) -> quaint::Result { - let sql: String = sql.to_string(); - let args = conversion::conv_params(values)?; - Ok(Query { sql, args }) - } - - async fn do_query_raw(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn do_query_raw(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { let len = params.len(); let serialization_span = info_span!("js:query:args", user_facing = true, "length" = %len); - let query = Self::build_query(sql, params).instrument(serialization_span).await?; + let query = self.build_query(sql, params).instrument(serialization_span).await?; let sql_span = info_span!("js:query:sql", user_facing = true, "db.statement" = %sql); let result_set = self.proxy.query_raw(query).instrument(sql_span).await?; @@ -154,10 +156,10 @@ impl JsBaseQueryable { result_set.try_into() } - async fn do_execute_raw(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn do_execute_raw(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { let len = params.len(); let serialization_span = info_span!("js:query:args", user_facing = true, "length" = %len); - let query = Self::build_query(sql, params).instrument(serialization_span).await?; + let query = self.build_query(sql, params).instrument(serialization_span).await?; let sql_span = info_span!("js:query:sql", user_facing = true, "db.statement" = %sql); let affected_rows = self.proxy.execute_raw(query).instrument(sql_span).await?; @@ -202,11 +204,11 @@ impl QuaintQueryable for JsQueryable { self.inner.query(q).await } - async fn query_raw(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn query_raw(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { self.inner.query_raw(sql, params).await } - async fn query_raw_typed(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn query_raw_typed(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { self.inner.query_raw_typed(sql, params).await } @@ -214,11 +216,11 @@ impl QuaintQueryable for JsQueryable { self.inner.execute(q).await } - async fn execute_raw(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn execute_raw(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { self.inner.execute_raw(sql, params).await } - async fn execute_raw_typed(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn execute_raw_typed(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { self.inner.execute_raw_typed(sql, params).await } From 66f0881e93adb8868b623fa14a649cb876947c31 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Fri, 13 Oct 2023 13:16:43 +0200 Subject: [PATCH 12/67] Fix binary entrypoint for planetscale tests (#4357) * Change binary entrypoint for planetscale tests * Fix MYSQL_DATABASE name to match what the test setup uses --------- Co-authored-by: Alexey Orlenko --- docker-compose.yml | 2 +- docker/planetscale_proxy/Dockerfile | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 97c9ed79e1c7..fad49d836cde 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -126,7 +126,7 @@ services: environment: MYSQL_HOST: 'vitess-test-8_0' MYSQL_PORT: 33807 - MYSQL_DATABASE: 'test-0000-00000000' + MYSQL_DATABASE: 'test' ports: - '8085:8085' depends_on: diff --git a/docker/planetscale_proxy/Dockerfile b/docker/planetscale_proxy/Dockerfile index ae5ec56329c2..2411894d88f0 100644 --- a/docker/planetscale_proxy/Dockerfile +++ b/docker/planetscale_proxy/Dockerfile @@ -4,7 +4,7 @@ RUN apt update && apt install netcat-openbsd -y RUN cd /go/src && git clone https://github.com/prisma/planetscale-proxy.git RUN cd /go/src/planetscale-proxy && go install . -ENTRYPOINT /go/bin/planetscale-proxy \ +ENTRYPOINT /go/bin/ps-http-sim \ -http-addr=0.0.0.0 \ -http-port=8085 \ -mysql-addr=$MYSQL_HOST \ From 42bfcd0e4a2e9f73937c1ed5c2f42bf08d07289a Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Fri, 13 Oct 2023 16:34:45 +0200 Subject: [PATCH 13/67] engineer: update to 1.60 (#4366) --- .buildkite/engineer | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/engineer b/.buildkite/engineer index 5e586ad2f0ec..bf31a6e371df 100755 --- a/.buildkite/engineer +++ b/.buildkite/engineer @@ -43,7 +43,7 @@ fi # Check if the system has engineer installed, if not, use a local copy. if ! type "engineer" &> /dev/null; then # Setup Prisma engine build & test tool (engineer). - curl --fail -sSL "https://prisma-engineer.s3-eu-west-1.amazonaws.com/1.59/latest/$OS/engineer.gz" --output engineer.gz + curl --fail -sSL "https://prisma-engineer.s3-eu-west-1.amazonaws.com/1.60/latest/$OS/engineer.gz" --output engineer.gz gzip -d engineer.gz chmod +x engineer From f44b62756e369de9a75621e9cdc1a1840b3b59b2 Mon Sep 17 00:00:00 2001 From: Serhii Tatarintsev Date: Fri, 13 Oct 2023 17:19:35 +0200 Subject: [PATCH 14/67] driver-adapters: Common code for libsql/mysql error handling (#4365) * driver-adapters: Common code for libsql/mysql error handling Splits part of the code that are common between #4364 and #4362 into it's own PR so it could be reviewd and merged separately and aforementioned PR stop conflicting with each other. Also implements error handling for PG since it is necessary for modified smoke test to pass. * fix codestyle --- quaint/src/connector/postgres.rs | 4 +- quaint/src/error.rs | 2 +- .../js/adapter-neon/src/neon.ts | 7 +- .../driver-adapters/js/adapter-pg/src/pg.ts | 44 ++++++++----- .../js/driver-adapter-utils/src/binder.ts | 4 +- .../js/driver-adapter-utils/src/types.ts | 26 ++++---- .../js/smoke-test-js/src/libquery/libquery.ts | 66 +++++++++++-------- query-engine/driver-adapters/src/result.rs | 8 +-- 8 files changed, 89 insertions(+), 72 deletions(-) diff --git a/quaint/src/connector/postgres.rs b/quaint/src/connector/postgres.rs index 2c81144c812b..766be38b27e4 100644 --- a/quaint/src/connector/postgres.rs +++ b/quaint/src/connector/postgres.rs @@ -1,5 +1,5 @@ mod conversion; -pub mod error; +mod error; use crate::{ ast::{Query, Value}, @@ -27,6 +27,8 @@ use tokio_postgres::{ }; use url::{Host, Url}; +pub use error::PostgresError; + pub(crate) const DEFAULT_SCHEMA: &str = "public"; /// The underlying postgres driver. Only available with the `expose-drivers` diff --git a/quaint/src/error.rs b/quaint/src/error.rs index 22037d443c35..c7c78a24772e 100644 --- a/quaint/src/error.rs +++ b/quaint/src/error.rs @@ -6,7 +6,7 @@ use thiserror::Error; #[cfg(feature = "pooled")] use std::time::Duration; -pub use crate::connector::postgres::error::PostgresError; +pub use crate::connector::postgres::PostgresError; #[derive(Debug, PartialEq, Eq)] pub enum DatabaseConstraint { diff --git a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts b/query-engine/driver-adapters/js/adapter-neon/src/neon.ts index c86b8d88bef0..e8fe40ada22f 100644 --- a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts +++ b/query-engine/driver-adapters/js/adapter-neon/src/neon.ts @@ -67,7 +67,7 @@ class NeonWsQueryable extends NeonQ debug('Error in performIO: %O', e) if (e && e.code) { return err({ - kind: 'PostgresError', + kind: 'Postgres', code: e.code, severity: e.severity, message: e.message, @@ -84,10 +84,7 @@ class NeonWsQueryable extends NeonQ class NeonTransaction extends NeonWsQueryable implements Transaction { finished = false - constructor( - client: neon.PoolClient, - readonly options: TransactionOptions, - ) { + constructor(client: neon.PoolClient, readonly options: TransactionOptions) { super(client) } diff --git a/query-engine/driver-adapters/js/adapter-pg/src/pg.ts b/query-engine/driver-adapters/js/adapter-pg/src/pg.ts index a049b59a0740..c34050778c39 100644 --- a/query-engine/driver-adapters/js/adapter-pg/src/pg.ts +++ b/query-engine/driver-adapters/js/adapter-pg/src/pg.ts @@ -1,5 +1,5 @@ import type pg from 'pg' -import { Debug, ok } from '@prisma/driver-adapter-utils' +import { Debug, err, ok } from '@prisma/driver-adapter-utils' import type { DriverAdapter, Query, @@ -28,18 +28,17 @@ class PgQueryable implements Quer const tag = '[js::query_raw]' debug(`${tag} %O`, query) - const { fields, rows } = await this.performIO(query) - - const columns = fields.map((field) => field.name) - const columnTypes = fields.map((field) => fieldToColumnType(field.dataTypeID)) - - const resultSet: ResultSet = { - columnNames: columns, - columnTypes, - rows, - } - - return ok(resultSet) + const ioResult = await this.performIO(query) + return ioResult.map(({ fields, rows }) => { + const columns = fields.map((field) => field.name) + const columnTypes = fields.map((field) => fieldToColumnType(field.dataTypeID)) + + return { + columnNames: columns, + columnTypes, + rows, + } + }) } /** @@ -51,10 +50,8 @@ class PgQueryable implements Quer const tag = '[js::execute_raw]' debug(`${tag} %O`, query) - const { rowCount: rowsAffected } = await this.performIO(query) - // Note: `rowsAffected` can sometimes be null (e.g., when executing `"BEGIN"`) - return ok(rowsAffected ?? 0) + return (await this.performIO(query)).map(({ rowCount: rowsAffected }) => rowsAffected ?? 0) } /** @@ -62,15 +59,26 @@ class PgQueryable implements Quer * Should the query fail due to a connection error, the connection is * marked as unhealthy. */ - private async performIO(query: Query) { + private async performIO(query: Query): Promise>> { const { sql, args: values } = query try { const result = await this.client.query({ text: sql, values, rowMode: 'array' }) - return result + return ok(result) } catch (e) { const error = e as Error debug('Error in performIO: %O', error) + if (e && e.code) { + return err({ + kind: 'Postgres', + code: e.code, + severity: e.severity, + message: e.message, + detail: e.detail, + column: e.column, + hint: e.hint, + }) + } throw error } } diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts index aee18197e291..1e3aa36210cf 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts +++ b/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts @@ -60,7 +60,7 @@ function wrapAsync( return await fn(...args) } catch (error) { const id = registry.registerNewError(error) - return err({ kind: 'GenericJsError', id }) + return err({ kind: 'GenericJs', id }) } } } @@ -74,7 +74,7 @@ function wrapSync( return fn(...args) } catch (error) { const id = registry.registerNewError(error) - return err({ kind: 'GenericJsError', id }) + return err({ kind: 'GenericJs', id }) } } } diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts index 65fa002dcc3a..42f1b0513076 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts +++ b/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts @@ -33,18 +33,20 @@ export type Query = { args: Array } -export type Error = { - kind: 'GenericJsError' - id: number -} | { - kind: 'PostgresError' - code: string, - severity: string - message: string - detail: string | undefined - column: string | undefined - hint: string | undefined -} +export type Error = + | { + kind: 'GenericJs' + id: number + } + | { + kind: 'Postgres' + code: string + severity: string + message: string + detail: string | undefined + column: string | undefined + hint: string | undefined + } export interface Queryable { readonly flavour: 'mysql' | 'postgres' | 'sqlite' diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts index 4cdde4515615..61d239ea42d6 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts @@ -4,9 +4,12 @@ import type { ErrorCapturingDriverAdapter } from '@prisma/driver-adapter-utils' import type { QueryEngineInstance } from '../engines/types/Library' import { createQueryFn, initQueryEngine } from './util' import { JsonQuery } from '../engines/types/JsonProtocol' -import { PrismaNeonHTTP } from '@prisma/adapter-neon' -export function smokeTestLibquery(adapter: ErrorCapturingDriverAdapter, prismaSchemaRelativePath: string, supportsTransactions = true) { +export function smokeTestLibquery( + adapter: ErrorCapturingDriverAdapter, + prismaSchemaRelativePath: string, + supportsTransactions = true, +) { const engine = initQueryEngine(adapter, prismaSchemaRelativePath) const flavour = adapter.flavour @@ -263,7 +266,7 @@ export function smokeTestLibquery(adapter: ErrorCapturingDriverAdapter, prismaSc }) it('create explicit transaction', async () => { - if(!supportsTransactions) return + if (!supportsTransactions) return const args = { isolation_level: 'Serializable', max_wait: 5000, timeout: 15000 } const startResponse = await engine.startTransaction(JSON.stringify(args), 'trace') @@ -298,7 +301,7 @@ export function smokeTestLibquery(adapter: ErrorCapturingDriverAdapter, prismaSc }, }) - const result = await doQuery({ + await doQuery({ modelName: 'Unique', action: 'createOne', query: { @@ -310,24 +313,31 @@ export function smokeTestLibquery(adapter: ErrorCapturingDriverAdapter, prismaSc }, }, }) - console.log('[nodejs] error result1', JSON.stringify(result, null, 2)) - const result2 = await doQuery({ + const promise = doQuery({ modelName: 'Unique', action: 'createOne', query: { arguments: { - data: { email: 'duplicate@example.com' } + data: { email: 'duplicate@example.com' }, }, selection: { $scalars: true, }, }, }) - console.log('[nodejs] error result2', JSON.stringify(result2, null, 2)) - - // TODO assert that result2 includes `errors.error` (which should currently only pass on neon:ws) - + + if (flavour === 'postgres') { + const result = await promise + console.log('[nodejs] error result', JSON.stringify(result, null, 2)) + assert.equal(result?.errors?.[0]?.['user_facing_error']?.['error_code'], 'P2002') + } else { + await assert.rejects(promise, (err) => { + assert(typeof err === 'object' && err !== null) + assert.match(err['message'], /unique/i) + return true + }) + } }) describe('read scalar and non scalar types', () => { @@ -395,24 +405,22 @@ export function smokeTestLibquery(adapter: ErrorCapturingDriverAdapter, prismaSc }) } else if (['sqlite'].includes(flavour)) { it('sqlite', async () => { - const resultSet = await doQuery( - { - "action": "findMany", - "modelName": "type_test", - "query": { - "selection": { - "int_column": true, - "bigint_column": true, - "double_column": true, - "decimal_column": true, - "boolean_column": true, - "text_column": true, - "datetime_column": true, - } - } - } - ) - console.log('[nodejs] findMany resultSet', JSON.stringify((resultSet), null, 2)) + const resultSet = await doQuery({ + action: 'findMany', + modelName: 'type_test', + query: { + selection: { + int_column: true, + bigint_column: true, + double_column: true, + decimal_column: true, + boolean_column: true, + text_column: true, + datetime_column: true, + }, + }, + }) + console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) }) } else { throw new Error(`Missing test for flavour ${flavour}`) diff --git a/query-engine/driver-adapters/src/result.rs b/query-engine/driver-adapters/src/result.rs index fc6f52bd2743..10bdb8a4aecb 100644 --- a/query-engine/driver-adapters/src/result.rs +++ b/query-engine/driver-adapters/src/result.rs @@ -19,11 +19,11 @@ pub struct PostgresErrorDef { /// See driver-adapters/js/adapter-utils/src/types.ts file for example pub(crate) enum DriverAdapterError { /// Unexpected JS exception - GenericJsError { + GenericJs { id: i32, }, - PostgresError(#[serde(with = "PostgresErrorDef")] PostgresError), + Postgres(#[serde(with = "PostgresErrorDef")] PostgresError), // in the future, expected errors that map to known user errors with PXXX codes will also go here } @@ -38,8 +38,8 @@ impl FromNapiValue for DriverAdapterError { impl From for QuaintError { fn from(value: DriverAdapterError) -> Self { match value { - DriverAdapterError::GenericJsError { id } => QuaintError::external_error(id), - DriverAdapterError::PostgresError(e) => e.into(), + DriverAdapterError::GenericJs { id } => QuaintError::external_error(id), + DriverAdapterError::Postgres(e) => e.into(), // in future, more error types would be added and we'll need to convert them to proper QuaintErrors here } } From cdd7f4c02d91616346ae555ef679e66af57a78b5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 17 Oct 2023 09:47:40 +0200 Subject: [PATCH 15/67] chore(deps): bump undici in /query-engine/driver-adapters/js (#4372) Bumps [undici](https://github.com/nodejs/undici) from 5.23.0 to 5.26.2. - [Release notes](https://github.com/nodejs/undici/releases) - [Commits](https://github.com/nodejs/undici/compare/v5.23.0...v5.26.2) --- updated-dependencies: - dependency-name: undici dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .../connector-test-kit-executor/package.json | 2 +- .../driver-adapters/js/pnpm-lock.yaml | 31 +++++++------------ .../js/smoke-test-js/package.json | 2 +- 3 files changed, 14 insertions(+), 21 deletions(-) diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/package.json b/query-engine/driver-adapters/js/connector-test-kit-executor/package.json index 4a5f093388e6..2a0d16bd4ccf 100644 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/package.json +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/package.json @@ -23,6 +23,6 @@ "@prisma/driver-adapter-utils": "workspace:*", "@types/pg": "^8.10.2", "pg": "^8.11.3", - "undici": "^5.23.0" + "undici": "^5.26.2" } } diff --git a/query-engine/driver-adapters/js/pnpm-lock.yaml b/query-engine/driver-adapters/js/pnpm-lock.yaml index 89dbc4ee3d38..3f7f13d3ff6a 100644 --- a/query-engine/driver-adapters/js/pnpm-lock.yaml +++ b/query-engine/driver-adapters/js/pnpm-lock.yaml @@ -106,8 +106,8 @@ importers: specifier: ^8.11.3 version: 8.11.3 undici: - specifier: ^5.23.0 - version: 5.23.0 + specifier: ^5.26.2 + version: 5.26.2 driver-adapter-utils: dependencies: @@ -155,8 +155,8 @@ importers: specifier: ^1.13.1 version: 1.13.1 undici: - specifier: ^5.23.0 - version: 5.23.0 + specifier: ^5.26.2 + version: 5.26.2 devDependencies: '@types/node': specifier: ^20.5.1 @@ -395,6 +395,11 @@ packages: dev: true optional: true + /@fastify/busboy@2.0.0: + resolution: {integrity: sha512-JUFJad5lv7jxj926GPgymrWQxxjPYuJNiNjNMzqT+HiuP6Vl3dk5xzG+8sTX96np0ZAluvaMzPsjhHZ5rNuNQQ==} + engines: {node: '>=14'} + dev: false + /@jridgewell/gen-mapping@0.3.3: resolution: {integrity: sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ==} engines: {node: '>=6.0.0'} @@ -667,13 +672,6 @@ packages: load-tsconfig: 0.2.5 dev: true - /busboy@1.6.0: - resolution: {integrity: sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA==} - engines: {node: '>=10.16.0'} - dependencies: - streamsearch: 1.1.0 - dev: false - /cac@6.7.14: resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} engines: {node: '>=8'} @@ -1361,11 +1359,6 @@ packages: resolution: {integrity: sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==} engines: {node: '>= 10.x'} - /streamsearch@1.1.0: - resolution: {integrity: sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==} - engines: {node: '>=10.0.0'} - dev: false - /strip-final-newline@2.0.0: resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==} engines: {node: '>=6'} @@ -1487,11 +1480,11 @@ packages: hasBin: true dev: true - /undici@5.23.0: - resolution: {integrity: sha512-1D7w+fvRsqlQ9GscLBwcAJinqcZGHUKjbOmXdlE/v8BvEGXjeWAax+341q44EuTcHXXnfyKNbKRq4Lg7OzhMmg==} + /undici@5.26.2: + resolution: {integrity: sha512-a4PDLQgLTPHVzOK+x3F79/M4GtyYPl+aX9AAK7aQxpwxDwCqkeZCScy7Gk5kWT3JtdFq1uhO3uZJdLtHI4dK9A==} engines: {node: '>=14.0'} dependencies: - busboy: 1.6.0 + '@fastify/busboy': 2.0.0 dev: false /web-streams-polyfill@3.2.1: diff --git a/query-engine/driver-adapters/js/smoke-test-js/package.json b/query-engine/driver-adapters/js/smoke-test-js/package.json index 27d4220f41bc..31362c1cc873 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/package.json +++ b/query-engine/driver-adapters/js/smoke-test-js/package.json @@ -55,7 +55,7 @@ "@prisma/driver-adapter-utils": "workspace:*", "pg": "^8.11.3", "superjson": "^1.13.1", - "undici": "^5.23.0" + "undici": "^5.26.2" }, "devDependencies": { "@types/node": "^20.5.1", From 2e1051d3e62163b3a4de20caf5bfa092322cb073 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Mon, 23 Oct 2023 15:10:58 +0200 Subject: [PATCH 16/67] Driver adapters: Support BIT_ARRAY, CIDR_ARRAY, INET_ARRAY, OID_ARRAY, VARBIT_ARRAY in pg and neon (#4363) * Support BIT_ARRAY, CIDR_ARRAY, INET_ARRAY, OID_ARRAY, VARBIT_ARRAY in pg and neon * INT8_ARRAY * It's ok to reduce precision a little bit * Null lists * Allow tests to drift in the different drivers' implementation of bigdecimal * revert changes to decima.rs test * Adapt comment --- .../js/adapter-neon/src/conversion.ts | 32 ++++++++++++++--- .../js/adapter-pg/src/conversion.ts | 34 ++++++++++++++++--- .../src/conversion/postgres.rs | 7 ++-- 3 files changed, 59 insertions(+), 14 deletions(-) diff --git a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts b/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts index 9f6486362d78..78f285240599 100644 --- a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts +++ b/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts @@ -6,25 +6,34 @@ const ScalarColumnType = types.builtins /** * PostgreSQL array column types (not defined in ScalarColumnType). + * + * See the semantics of each of this code in: + * https://github.com/postgres/postgres/blob/master/src/include/catalog/pg_type.dat */ const ArrayColumnType = { + BIT_ARRAY: 1561, BOOL_ARRAY: 1000, BYTEA_ARRAY: 1001, BPCHAR_ARRAY: 1014, CHAR_ARRAY: 1002, + CIDR_ARRAY: 651, DATE_ARRAY: 1182, FLOAT4_ARRAY: 1021, FLOAT8_ARRAY: 1022, + INET_ARRAY: 1041, INT2_ARRAY: 1005, INT4_ARRAY: 1007, + INT8_ARRAY: 1016, JSONB_ARRAY: 3807, JSON_ARRAY: 199, MONEY_ARRAY: 791, NUMERIC_ARRAY: 1231, + OID_ARRAY: 1028, TEXT_ARRAY: 1009, TIMESTAMP_ARRAY: 1115, TIME_ARRAY: 1183, UUID_ARRAY: 2951, + VARBIT_ARRAY: 1563, VARCHAR_ARRAY: 1015, XML_ARRAY: 143, } @@ -90,9 +99,13 @@ export function fieldToColumnType(fieldTypeId: number): ColumnType { return ColumnTypeEnum.BooleanArray case ArrayColumnType.CHAR_ARRAY: return ColumnTypeEnum.CharArray + case ArrayColumnType.BPCHAR_ARRAY: case ArrayColumnType.TEXT_ARRAY: case ArrayColumnType.VARCHAR_ARRAY: - case ArrayColumnType.BPCHAR_ARRAY: + case ArrayColumnType.VARBIT_ARRAY: + case ArrayColumnType.BIT_ARRAY: + case ArrayColumnType.INET_ARRAY: + case ArrayColumnType.CIDR_ARRAY: case ArrayColumnType.XML_ARRAY: return ColumnTypeEnum.TextArray case ArrayColumnType.DATE_ARRAY: @@ -108,7 +121,9 @@ export function fieldToColumnType(fieldTypeId: number): ColumnType { return ColumnTypeEnum.BytesArray case ArrayColumnType.UUID_ARRAY: return ColumnTypeEnum.UuidArray - + case ArrayColumnType.INT8_ARRAY: + case ArrayColumnType.OID_ARRAY: + return ColumnTypeEnum.Int64Array default: if (fieldTypeId >= 10000) { // Postgres Custom Types @@ -251,12 +266,21 @@ function convertBytes(serializedBytes: string): number[] { types.setTypeParser(ScalarColumnType.BYTEA, convertBytes) /* - * BYTEA_ARRAYS - arrays of arbitrary raw binary strings + * BYTEA_ARRAY - arrays of arbitrary raw binary strings */ const parseBytesArray = types.getTypeParser(ArrayColumnType.BYTEA_ARRAY) as (_: string) => Buffer[] types.setTypeParser(ArrayColumnType.BYTEA_ARRAY, (serializedBytesArray) => { const buffers = parseBytesArray(serializedBytesArray) - return buffers.map(encodeBuffer) + return buffers.map((buf) => buf ? encodeBuffer(buf) : null) }) + +/* BIT_ARRAY, VARBIT_ARRAY */ + +function normalizeBit(bit: string): string { + return bit +} + +types.setTypeParser(ArrayColumnType.BIT_ARRAY, normalize_array(normalizeBit)) +types.setTypeParser(ArrayColumnType.VARBIT_ARRAY, normalize_array(normalizeBit)) \ No newline at end of file diff --git a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts b/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts index 69e8f1d9dec1..c26b13877927 100644 --- a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts +++ b/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts @@ -6,25 +6,34 @@ const ScalarColumnType = types.builtins /** * PostgreSQL array column types (not defined in ScalarColumnType). + * + * See the semantics of each of this code in: + * https://github.com/postgres/postgres/blob/master/src/include/catalog/pg_type.dat */ const ArrayColumnType = { + BIT_ARRAY: 1561, BOOL_ARRAY: 1000, BYTEA_ARRAY: 1001, BPCHAR_ARRAY: 1014, CHAR_ARRAY: 1002, + CIDR_ARRAY: 651, DATE_ARRAY: 1182, FLOAT4_ARRAY: 1021, FLOAT8_ARRAY: 1022, + INET_ARRAY: 1041, INT2_ARRAY: 1005, INT4_ARRAY: 1007, + INT8_ARRAY: 1016, JSONB_ARRAY: 3807, JSON_ARRAY: 199, MONEY_ARRAY: 791, NUMERIC_ARRAY: 1231, + OID_ARRAY: 1028, TEXT_ARRAY: 1009, TIMESTAMP_ARRAY: 1115, TIME_ARRAY: 1183, UUID_ARRAY: 2951, + VARBIT_ARRAY: 1563, VARCHAR_ARRAY: 1015, XML_ARRAY: 143, } @@ -90,9 +99,13 @@ export function fieldToColumnType(fieldTypeId: number): ColumnType { return ColumnTypeEnum.BooleanArray case ArrayColumnType.CHAR_ARRAY: return ColumnTypeEnum.CharArray + case ArrayColumnType.BPCHAR_ARRAY: case ArrayColumnType.TEXT_ARRAY: case ArrayColumnType.VARCHAR_ARRAY: - case ArrayColumnType.BPCHAR_ARRAY: + case ArrayColumnType.VARBIT_ARRAY: + case ArrayColumnType.BIT_ARRAY: + case ArrayColumnType.INET_ARRAY: + case ArrayColumnType.CIDR_ARRAY: case ArrayColumnType.XML_ARRAY: return ColumnTypeEnum.TextArray case ArrayColumnType.DATE_ARRAY: @@ -108,7 +121,9 @@ export function fieldToColumnType(fieldTypeId: number): ColumnType { return ColumnTypeEnum.BytesArray case ArrayColumnType.UUID_ARRAY: return ColumnTypeEnum.UuidArray - + case ArrayColumnType.INT8_ARRAY: + case ArrayColumnType.OID_ARRAY: + return ColumnTypeEnum.Int64Array default: if (fieldTypeId >= 10000) { // Postgres Custom Types @@ -251,12 +266,21 @@ function convertBytes(serializedBytes: string): number[] { types.setTypeParser(ScalarColumnType.BYTEA, convertBytes) /* - * BYTEA_ARRAYS - arrays of arbitrary raw binary strings + * BYTEA_ARRAY - arrays of arbitrary raw binary strings */ const parseBytesArray = types.getTypeParser(ArrayColumnType.BYTEA_ARRAY) as (_: string) => Buffer[] types.setTypeParser(ArrayColumnType.BYTEA_ARRAY, (serializedBytesArray) => { const buffers = parseBytesArray(serializedBytesArray) - return buffers.map(encodeBuffer) -}) \ No newline at end of file + return buffers.map((buf) => buf ? encodeBuffer(buf) : null) +}) + +/* BIT_ARRAY, VARBIT_ARRAY */ + +function normalizeBit(bit: string): string { + return bit +} + +types.setTypeParser(ArrayColumnType.BIT_ARRAY, normalize_array(normalizeBit)) +types.setTypeParser(ArrayColumnType.VARBIT_ARRAY, normalize_array(normalizeBit)) \ No newline at end of file diff --git a/query-engine/driver-adapters/src/conversion/postgres.rs b/query-engine/driver-adapters/src/conversion/postgres.rs index 77e79f549d06..21b1ec6b2fb9 100644 --- a/query-engine/driver-adapters/src/conversion/postgres.rs +++ b/query-engine/driver-adapters/src/conversion/postgres.rs @@ -37,11 +37,8 @@ pub fn values_to_js_args(values: &[quaint::Value<'_>]) -> serde_json::Result JSArg::Buffer(bytes.to_vec()), None => JsonValue::Null.into(), }, - (quaint_value @ quaint::ValueType::Numeric(bd), _) => match bd { - Some(bd) => match bd.to_string().parse::() { - Ok(double) => JSArg::from(JsonValue::from(double)), - Err(_) => JSArg::from(JsonValue::from(quaint_value.clone())), - }, + (quaint::ValueType::Numeric(bd), _) => match bd { + Some(bd) => JSArg::RawString(bd.to_string()), None => JsonValue::Null.into(), }, (quaint::ValueType::Array(Some(items)), _) => JSArg::Array(values_to_js_args(items)?), From 98389c0f3bc634961b2866960d9cd85bb9a138ca Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Mon, 23 Oct 2023 16:35:53 +0200 Subject: [PATCH 17/67] run driver adapter tests in ubunt-latest rather than buildjet (#4374) --- .github/workflows/query-engine-driver-adapters.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/query-engine-driver-adapters.yml b/.github/workflows/query-engine-driver-adapters.yml index 50f86575a8a7..d52b446b12fb 100644 --- a/.github/workflows/query-engine-driver-adapters.yml +++ b/.github/workflows/query-engine-driver-adapters.yml @@ -46,7 +46,7 @@ jobs: QUERY_BATCH_SIZE: '10' WORKSPACE_ROOT: ${{ github.workspace }} - runs-on: buildjet-16vcpu-ubuntu-2004 + runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 From 28291c703da2b149e7feabdebc287319e1bf0a46 Mon Sep 17 00:00:00 2001 From: Serhii Tatarintsev Date: Mon, 23 Oct 2023 16:45:18 +0200 Subject: [PATCH 18/67] driver-adapters: Map planetsclae/mysql DB errors to Prisma error codes (#4364) Fix prisma/team-orm#439 --- quaint/src/connector/mysql.rs | 2 + quaint/src/connector/mysql/error.rs | 173 ++++++++++-------- quaint/src/error.rs | 1 + .../js/adapter-planetscale/src/planetscale.ts | 54 ++++-- .../js/driver-adapter-utils/src/types.ts | 6 + .../js/smoke-test-js/src/libquery/libquery.ts | 2 +- query-engine/driver-adapters/src/result.rs | 12 +- 7 files changed, 158 insertions(+), 92 deletions(-) diff --git a/quaint/src/connector/mysql.rs b/quaint/src/connector/mysql.rs index e5a1b794ab5b..4b6f27a583da 100644 --- a/quaint/src/connector/mysql.rs +++ b/quaint/src/connector/mysql.rs @@ -24,6 +24,8 @@ use std::{ use tokio::sync::Mutex; use url::{Host, Url}; +pub use error::MysqlError; + /// The underlying MySQL driver. Only available with the `expose-drivers` /// Cargo feature. #[cfg(feature = "expose-drivers")] diff --git a/quaint/src/connector/mysql/error.rs b/quaint/src/connector/mysql/error.rs index 8b381e1581bb..dd7c3d3bfa66 100644 --- a/quaint/src/connector/mysql/error.rs +++ b/quaint/src/connector/mysql/error.rs @@ -1,22 +1,29 @@ use crate::error::{DatabaseConstraint, Error, ErrorKind}; use mysql_async as my; -impl From for Error { - fn from(e: my::Error) -> Error { - use my::ServerError; +pub struct MysqlError { + pub code: u16, + pub message: String, + pub state: String, +} - match e { - my::Error::Io(my::IoError::Tls(err)) => Error::builder(ErrorKind::TlsError { - message: err.to_string(), - }) - .build(), - my::Error::Io(my::IoError::Io(err)) if err.kind() == std::io::ErrorKind::UnexpectedEof => { - Error::builder(ErrorKind::ConnectionClosed).build() - } - my::Error::Io(io_error) => Error::builder(ErrorKind::ConnectionError(io_error.into())).build(), - my::Error::Driver(e) => Error::builder(ErrorKind::QueryError(e.into())).build(), - my::Error::Server(ServerError { ref message, code, .. }) if code == 1062 => { - let constraint = message +impl From<&my::ServerError> for MysqlError { + fn from(value: &my::ServerError) -> Self { + MysqlError { + code: value.code, + message: value.message.to_owned(), + state: value.state.to_owned(), + } + } +} + +impl From for Error { + fn from(error: MysqlError) -> Self { + let code = error.code; + match code { + 1062 => { + let constraint = error + .message .split_whitespace() .last() .and_then(|s| s.split('\'').nth(1)) @@ -29,12 +36,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1451 || code == 1452 => { - let constraint = message + 1451 | 1452 => { + let constraint = error + .message .split_whitespace() .nth(17) .and_then(|s| s.split('`').nth(1)) @@ -45,12 +53,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1263 => { - let constraint = message + 1263 => { + let constraint = error + .message .split_whitespace() .last() .and_then(|s| s.split('\'').nth(1)) @@ -62,22 +71,23 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1264 => { + 1264 => { let mut builder = Error::builder(ErrorKind::ValueOutOfRange { - message: message.clone(), + message: error.message.clone(), }); builder.set_original_code(code.to_string()); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1364 || code == 1048 => { - let constraint = message + 1364 | 1048 => { + let constraint = error + .message .split_whitespace() .nth(1) .and_then(|s| s.split('\'').nth(1)) @@ -88,12 +98,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1049 => { - let db_name = message + 1049 => { + let db_name = error + .message .split_whitespace() .last() .and_then(|s| s.split('\'').nth(1)) @@ -103,12 +114,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1007 => { - let db_name = message + 1007 => { + let db_name = error + .message .split_whitespace() .nth(3) .and_then(|s| s.split('\'').nth(1)) @@ -118,12 +130,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1044 => { - let db_name = message + 1044 => { + let db_name = error + .message .split_whitespace() .last() .and_then(|s| s.split('\'').nth(1)) @@ -133,12 +146,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1045 => { - let user = message + 1045 => { + let user = error + .message .split_whitespace() .nth(4) .and_then(|s| s.split('@').next()) @@ -149,12 +163,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1146 => { - let table = message + 1146 => { + let table = error + .message .split_whitespace() .nth(1) .and_then(|s| s.split('\'').nth(1)) @@ -165,12 +180,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1054 => { - let column = message + 1054 => { + let column = error + .message .split_whitespace() .nth(2) .and_then(|s| s.split('\'').nth(1)) @@ -179,68 +195,77 @@ impl From for Error { let mut builder = Error::builder(ErrorKind::ColumnNotFound { column }); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { - ref message, - code, - state: _, - }) if code == 1406 => { - let column = message.split_whitespace().flat_map(|s| s.split('\'')).nth(6).into(); + 1406 => { + let column = error + .message + .split_whitespace() + .flat_map(|s| s.split('\'')) + .nth(6) + .into(); let kind = ErrorKind::LengthMismatch { column }; let mut builder = Error::builder(kind); builder.set_original_code(code.to_string()); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { - ref message, - code, - state: _, - }) if code == 1191 => { + 1191 => { let kind = ErrorKind::MissingFullTextSearchIndex; let mut builder = Error::builder(kind); builder.set_original_code(code.to_string()); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { - ref message, - code, - state: _, - }) if code == 1213 => { + 1213 => { let mut builder = Error::builder(ErrorKind::TransactionWriteConflict); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { - ref message, - code, - ref state, - }) => { + _ => { let kind = ErrorKind::QueryError( - my::Error::Server(ServerError { - message: message.clone(), + my::Error::Server(my::ServerError { + message: error.message.clone(), code, - state: state.clone(), + state: error.state.clone(), }) .into(), ); let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } + } + } +} + +impl From for Error { + fn from(e: my::Error) -> Error { + match e { + my::Error::Io(my::IoError::Tls(err)) => Error::builder(ErrorKind::TlsError { + message: err.to_string(), + }) + .build(), + my::Error::Io(my::IoError::Io(err)) if err.kind() == std::io::ErrorKind::UnexpectedEof => { + Error::builder(ErrorKind::ConnectionClosed).build() + } + my::Error::Io(io_error) => Error::builder(ErrorKind::ConnectionError(io_error.into())).build(), + my::Error::Driver(e) => Error::builder(ErrorKind::QueryError(e.into())).build(), + my::Error::Server(ref server_error) => { + let mysql_error: MysqlError = server_error.into(); + mysql_error.into() + } e => Error::builder(ErrorKind::QueryError(e.into())).build(), } } diff --git a/quaint/src/error.rs b/quaint/src/error.rs index c7c78a24772e..0460b77100fb 100644 --- a/quaint/src/error.rs +++ b/quaint/src/error.rs @@ -6,6 +6,7 @@ use thiserror::Error; #[cfg(feature = "pooled")] use std::time::Duration; +pub use crate::connector::mysql::MysqlError; pub use crate::connector::postgres::PostgresError; #[derive(Debug, PartialEq, Eq)] diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts index cffb00482003..5a52851112b2 100644 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts +++ b/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts @@ -1,5 +1,5 @@ import type planetScale from '@planetscale/database' -import { Debug, ok } from '@prisma/driver-adapter-utils' +import { Debug, err, ok } from '@prisma/driver-adapter-utils' import type { DriverAdapter, ResultSet, @@ -36,17 +36,16 @@ class PlanetScaleQueryable field.name) - const resultSet: ResultSet = { - columnNames: columns, - columnTypes: fields.map((field) => fieldToColumnType(field.type as PlanetScaleColumnType)), - rows: rows as ResultSet['rows'], - lastInsertId, - } - - return ok(resultSet) + const ioResult = await this.performIO(query) + return ioResult.map(({ fields, insertId: lastInsertId, rows }) => { + const columns = fields.map((field) => field.name) + return { + columnNames: columns, + columnTypes: fields.map((field) => fieldToColumnType(field.type as PlanetScaleColumnType)), + rows: rows as ResultSet['rows'], + lastInsertId, + } + }) } /** @@ -58,8 +57,7 @@ class PlanetScaleQueryable rowsAffected) } /** @@ -67,22 +65,46 @@ class PlanetScaleQueryable> { const { sql, args: values } = query try { const result = await this.client.execute(sql, values, { as: 'array', }) - return result + return ok(result) } catch (e) { const error = e as Error + if (error.name === 'DatabaseError') { + const parsed = parseErrorMessage(error.message) + if (parsed) { + return err({ + kind: 'Mysql', + ...parsed, + }) + } + } debug('Error in performIO: %O', error) throw error } } } +function parseErrorMessage(message: string) { + const match = message.match( + /target: (?:.+?) vttablet: (?.+?) \(errno (?\d+)\) \(sqlstate (?.+?)\)/, + ) + + if (!match || !match.groups) { + return undefined + } + return { + code: Number(match.groups.code), + message: match.groups.message, + state: match.groups.state, + } +} + class PlanetScaleTransaction extends PlanetScaleQueryable implements Transaction { finished = false diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts index 42f1b0513076..104b23d233c5 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts +++ b/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts @@ -47,6 +47,12 @@ export type Error = column: string | undefined hint: string | undefined } + | { + kind: 'Mysql' + code: number + message: string + state: string + } export interface Queryable { readonly flavour: 'mysql' | 'postgres' | 'sqlite' diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts index 61d239ea42d6..e94eacbae328 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts @@ -327,7 +327,7 @@ export function smokeTestLibquery( }, }) - if (flavour === 'postgres') { + if (flavour === 'postgres' || flavour === 'mysql') { const result = await promise console.log('[nodejs] error result', JSON.stringify(result, null, 2)) assert.equal(result?.errors?.[0]?.['user_facing_error']?.['error_code'], 'P2002') diff --git a/query-engine/driver-adapters/src/result.rs b/query-engine/driver-adapters/src/result.rs index 10bdb8a4aecb..08397d834ed0 100644 --- a/query-engine/driver-adapters/src/result.rs +++ b/query-engine/driver-adapters/src/result.rs @@ -1,5 +1,5 @@ use napi::{bindgen_prelude::FromNapiValue, Env, JsUnknown, NapiValue}; -use quaint::error::{Error as QuaintError, PostgresError}; +use quaint::error::{Error as QuaintError, MysqlError, PostgresError}; use serde::Deserialize; #[derive(Deserialize)] @@ -13,6 +13,14 @@ pub struct PostgresErrorDef { hint: Option, } +#[derive(Deserialize)] +#[serde(remote = "MysqlError")] +pub struct MysqlErrorDef { + pub code: u16, + pub message: String, + pub state: String, +} + #[derive(Deserialize)] #[serde(tag = "kind")] /// Wrapper for JS-side errors @@ -24,6 +32,7 @@ pub(crate) enum DriverAdapterError { }, Postgres(#[serde(with = "PostgresErrorDef")] PostgresError), + Mysql(#[serde(with = "MysqlErrorDef")] MysqlError), // in the future, expected errors that map to known user errors with PXXX codes will also go here } @@ -40,6 +49,7 @@ impl From for QuaintError { match value { DriverAdapterError::GenericJs { id } => QuaintError::external_error(id), DriverAdapterError::Postgres(e) => e.into(), + DriverAdapterError::Mysql(e) => e.into(), // in future, more error types would be added and we'll need to convert them to proper QuaintErrors here } } From 475c616176945d72f4330c92801f0c5e6398dc0f Mon Sep 17 00:00:00 2001 From: Serhii Tatarintsev Date: Mon, 23 Oct 2023 17:40:38 +0200 Subject: [PATCH 19/67] driver-adapters: Map libsql errors to Prisma errors (#4362) Similar approach to what we did with Neon: raw error data is returned from driver adapter in case of DB error, which then reuses Quaint's error handling code for adapter too. Close prisma/team-orm#393 --- quaint/src/connector/sqlite.rs | 2 + quaint/src/connector/sqlite/error.rs | 211 ++++++++---------- quaint/src/error.rs | 1 + .../js/adapter-libsql/src/libsql.ts | 48 ++-- .../js/driver-adapter-utils/src/types.ts | 8 + .../driver-adapters/js/pnpm-lock.yaml | 44 ++-- .../js/smoke-test-js/src/libquery/libquery.ts | 18 +- query-engine/driver-adapters/src/result.rs | 12 +- 8 files changed, 170 insertions(+), 174 deletions(-) diff --git a/quaint/src/connector/sqlite.rs b/quaint/src/connector/sqlite.rs index 6db49523c80a..3a1ef72b4883 100644 --- a/quaint/src/connector/sqlite.rs +++ b/quaint/src/connector/sqlite.rs @@ -1,6 +1,8 @@ mod conversion; mod error; +pub use error::SqliteError; + pub use rusqlite::{params_from_iter, version as sqlite_version}; use super::IsolationLevel; diff --git a/quaint/src/connector/sqlite/error.rs b/quaint/src/connector/sqlite/error.rs index fa8b83f3f28a..c10b335cb3c0 100644 --- a/quaint/src/connector/sqlite/error.rs +++ b/quaint/src/connector/sqlite/error.rs @@ -1,69 +1,45 @@ +use std::fmt; + use crate::error::*; use rusqlite::ffi; use rusqlite::types::FromSqlError; -impl From for Error { - fn from(e: rusqlite::Error) -> Error { - match e { - rusqlite::Error::ToSqlConversionFailure(error) => match error.downcast::() { - Ok(error) => *error, - Err(error) => { - let mut builder = Error::builder(ErrorKind::QueryError(error)); - - builder.set_original_message("Could not interpret parameters in an SQLite query."); - - builder.build() - } - }, - rusqlite::Error::InvalidQuery => { - let mut builder = Error::builder(ErrorKind::QueryError(e.into())); - - builder.set_original_message( - "Could not interpret the query or its parameters. Check the syntax and parameter types.", - ); - - builder.build() - } - rusqlite::Error::ExecuteReturnedResults => { - let mut builder = Error::builder(ErrorKind::QueryError(e.into())); - builder.set_original_message("Execute returned results, which is not allowed in SQLite."); - - builder.build() - } - - rusqlite::Error::QueryReturnedNoRows => Error::builder(ErrorKind::NotFound).build(), +#[derive(Debug)] +pub struct SqliteError { + pub extended_code: i32, + pub message: Option, +} - rusqlite::Error::SqliteFailure( - ffi::Error { - code: ffi::ErrorCode::ConstraintViolation, - extended_code: 2067, - }, - Some(description), - ) => { - let constraint = description - .split(": ") - .nth(1) - .map(|s| s.split(", ")) - .map(|i| i.flat_map(|s| s.split('.').last())) - .map(DatabaseConstraint::fields) - .unwrap_or(DatabaseConstraint::CannotParse); +impl fmt::Display for SqliteError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "Error code {}: {}", + self.extended_code, + ffi::code_to_str(self.extended_code) + ) + } +} - let kind = ErrorKind::UniqueConstraintViolation { constraint }; - let mut builder = Error::builder(kind); +impl std::error::Error for SqliteError {} - builder.set_original_code("2067"); - builder.set_original_message(description); +impl SqliteError { + pub fn new(extended_code: i32, message: Option) -> Self { + Self { extended_code, message } + } - builder.build() - } + pub fn primary_code(&self) -> i32 { + self.extended_code & 0xFF + } +} - rusqlite::Error::SqliteFailure( - ffi::Error { - code: ffi::ErrorCode::ConstraintViolation, - extended_code: 1555, - }, - Some(description), - ) => { +impl From for Error { + fn from(error: SqliteError) -> Self { + match error { + SqliteError { + extended_code: ffi::SQLITE_CONSTRAINT_UNIQUE | ffi::SQLITE_CONSTRAINT_PRIMARYKEY, + message: Some(description), + } => { let constraint = description .split(": ") .nth(1) @@ -75,19 +51,16 @@ impl From for Error { let kind = ErrorKind::UniqueConstraintViolation { constraint }; let mut builder = Error::builder(kind); - builder.set_original_code("1555"); + builder.set_original_code(error.extended_code.to_string()); builder.set_original_message(description); builder.build() } - rusqlite::Error::SqliteFailure( - ffi::Error { - code: ffi::ErrorCode::ConstraintViolation, - extended_code: 1299, - }, - Some(description), - ) => { + SqliteError { + extended_code: ffi::SQLITE_CONSTRAINT_NOTNULL, + message: Some(description), + } => { let constraint = description .split(": ") .nth(1) @@ -99,64 +72,41 @@ impl From for Error { let kind = ErrorKind::NullConstraintViolation { constraint }; let mut builder = Error::builder(kind); - builder.set_original_code("1299"); - builder.set_original_message(description); - - builder.build() - } - - rusqlite::Error::SqliteFailure( - ffi::Error { - code: ffi::ErrorCode::ConstraintViolation, - extended_code: 787, - }, - Some(description), - ) => { - let mut builder = Error::builder(ErrorKind::ForeignKeyConstraintViolation { - constraint: DatabaseConstraint::ForeignKey, - }); - - builder.set_original_code("787"); + builder.set_original_code(error.extended_code.to_string()); builder.set_original_message(description); builder.build() } - rusqlite::Error::SqliteFailure( - ffi::Error { - code: ffi::ErrorCode::ConstraintViolation, - extended_code: 1811, - }, - Some(description), - ) => { + SqliteError { + extended_code: ffi::SQLITE_CONSTRAINT_FOREIGNKEY | ffi::SQLITE_CONSTRAINT_TRIGGER, + message: Some(description), + } => { let mut builder = Error::builder(ErrorKind::ForeignKeyConstraintViolation { constraint: DatabaseConstraint::ForeignKey, }); - builder.set_original_code("1811"); + builder.set_original_code(error.extended_code.to_string()); builder.set_original_message(description); builder.build() } - rusqlite::Error::SqliteFailure( - ffi::Error { - code: ffi::ErrorCode::DatabaseBusy, - extended_code, - }, - description, - ) => { + SqliteError { extended_code, message } if error.primary_code() == ffi::SQLITE_BUSY => { let mut builder = Error::builder(ErrorKind::SocketTimeout); builder.set_original_code(format!("{extended_code}")); - if let Some(description) = description { + if let Some(description) = message { builder.set_original_message(description); } builder.build() } - rusqlite::Error::SqliteFailure(ffi::Error { extended_code, .. }, ref description) => match description { + SqliteError { + extended_code, + ref message, + } => match message { Some(d) if d.starts_with("no such table") => { let table = d.split(": ").last().into(); let kind = ErrorKind::TableDoesNotExist { table }; @@ -188,8 +138,8 @@ impl From for Error { builder.build() } _ => { - let description = description.as_ref().map(|d| d.to_string()); - let mut builder = Error::builder(ErrorKind::QueryError(e.into())); + let description = message.as_ref().map(|d| d.to_string()); + let mut builder = Error::builder(ErrorKind::QueryError(error.into())); builder.set_original_code(format!("{extended_code}")); if let Some(description) = description { @@ -199,31 +149,50 @@ impl From for Error { builder.build() } }, + } + } +} - rusqlite::Error::SqlInputError { - error: ffi::Error { extended_code, .. }, - ref msg, - .. - } => match msg { - d if d.starts_with("no such column: ") => { - let column = d.split("no such column: ").last().into(); - let kind = ErrorKind::ColumnNotFound { column }; - - let mut builder = Error::builder(kind); - builder.set_original_code(extended_code.to_string()); - builder.set_original_message(d); +impl From for Error { + fn from(e: rusqlite::Error) -> Error { + match e { + rusqlite::Error::ToSqlConversionFailure(error) => match error.downcast::() { + Ok(error) => *error, + Err(error) => { + let mut builder = Error::builder(ErrorKind::QueryError(error)); - builder.build() - } - _ => { - let description = msg.clone(); - let mut builder = Error::builder(ErrorKind::QueryError(e.into())); - builder.set_original_code(extended_code.to_string()); - builder.set_original_message(description); + builder.set_original_message("Could not interpret parameters in an SQLite query."); builder.build() } }, + rusqlite::Error::InvalidQuery => { + let mut builder = Error::builder(ErrorKind::QueryError(e.into())); + + builder.set_original_message( + "Could not interpret the query or its parameters. Check the syntax and parameter types.", + ); + + builder.build() + } + rusqlite::Error::ExecuteReturnedResults => { + let mut builder = Error::builder(ErrorKind::QueryError(e.into())); + builder.set_original_message("Execute returned results, which is not allowed in SQLite."); + + builder.build() + } + + rusqlite::Error::QueryReturnedNoRows => Error::builder(ErrorKind::NotFound).build(), + + rusqlite::Error::SqliteFailure(ffi::Error { code: _, extended_code }, message) => { + SqliteError::new(extended_code, message).into() + } + + rusqlite::Error::SqlInputError { + error: ffi::Error { extended_code, .. }, + msg, + .. + } => SqliteError::new(extended_code, Some(msg)).into(), e => Error::builder(ErrorKind::QueryError(e.into())).build(), } diff --git a/quaint/src/error.rs b/quaint/src/error.rs index 0460b77100fb..705bb6b37ee0 100644 --- a/quaint/src/error.rs +++ b/quaint/src/error.rs @@ -8,6 +8,7 @@ use std::time::Duration; pub use crate::connector::mysql::MysqlError; pub use crate::connector::postgres::PostgresError; +pub use crate::connector::sqlite::SqliteError; #[derive(Debug, PartialEq, Eq)] pub enum DatabaseConstraint { diff --git a/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts b/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts index 5d104e8e2949..6528c8f44a8a 100644 --- a/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts +++ b/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts @@ -1,4 +1,4 @@ -import { Debug, ok } from '@prisma/driver-adapter-utils' +import { Debug, ok, err } from '@prisma/driver-adapter-utils' import type { DriverAdapter, Query, @@ -8,7 +8,12 @@ import type { Transaction, TransactionOptions, } from '@prisma/driver-adapter-utils' -import type { InStatement, Client as LibSqlClientRaw, Transaction as LibSqlTransactionRaw } from '@libsql/client' +import type { + InStatement, + Client as LibSqlClientRaw, + Transaction as LibSqlTransactionRaw, + ResultSet as LibSqlResultSet, +} from '@libsql/client' import { Mutex } from 'async-mutex' import { getColumnTypes, mapRow } from './conversion' @@ -33,17 +38,17 @@ class LibSqlQueryable implements const tag = '[js::query_raw]' debug(`${tag} %O`, query) - const { columns, rows, columnTypes: declaredColumnTypes } = await this.performIO(query) - - const columnTypes = getColumnTypes(declaredColumnTypes, rows) + const ioResult = await this.performIO(query) - const resultSet: ResultSet = { - columnNames: columns, - columnTypes, - rows: rows.map((row) => mapRow(row, columnTypes)), - } + return ioResult.map(({ columns, rows, columnTypes: declaredColumnTypes }) => { + const columnTypes = getColumnTypes(declaredColumnTypes, rows) - return ok(resultSet) + return { + columnNames: columns, + columnTypes, + rows: rows.map((row) => mapRow(row, columnTypes)), + } + }) } /** @@ -55,8 +60,7 @@ class LibSqlQueryable implements const tag = '[js::execute_raw]' debug(`${tag} %O`, query) - const { rowsAffected } = await this.performIO(query) - return ok(rowsAffected ?? 0) + return (await this.performIO(query)).map(({ rowsAffected }) => rowsAffected ?? 0) } /** @@ -64,14 +68,22 @@ class LibSqlQueryable implements * Should the query fail due to a connection error, the connection is * marked as unhealthy. */ - private async performIO(query: Query) { + private async performIO(query: Query): Promise> { const release = await this[LOCK_TAG].acquire() try { const result = await this.client.execute(query as InStatement) - return result + return ok(result) } catch (e) { const error = e as Error debug('Error in performIO: %O', error) + const rawCode = error['rawCode'] ?? e.cause?.['rawCode'] + if (typeof rawCode === 'number') { + return err({ + kind: 'Sqlite', + extendedCode: rawCode, + message: error.message, + }) + } throw error } finally { release() @@ -82,11 +94,7 @@ class LibSqlQueryable implements class LibSqlTransaction extends LibSqlQueryable implements Transaction { finished = false - constructor( - client: TransactionClient, - readonly options: TransactionOptions, - readonly unlockParent: () => void, - ) { + constructor(client: TransactionClient, readonly options: TransactionOptions, readonly unlockParent: () => void) { super(client) } diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts index 104b23d233c5..92019f81824b 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts +++ b/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts @@ -53,6 +53,14 @@ export type Error = message: string state: string } + | { + kind: 'Sqlite' + /** + * Sqlite extended error code: https://www.sqlite.org/rescode.html + */ + extendedCode: number + message: string + } export interface Queryable { readonly flavour: 'mysql' | 'postgres' | 'sqlite' diff --git a/query-engine/driver-adapters/js/pnpm-lock.yaml b/query-engine/driver-adapters/js/pnpm-lock.yaml index 3f7f13d3ff6a..9a82ffdbac63 100644 --- a/query-engine/driver-adapters/js/pnpm-lock.yaml +++ b/query-engine/driver-adapters/js/pnpm-lock.yaml @@ -435,21 +435,21 @@ packages: dependencies: '@libsql/hrana-client': 0.5.5 js-base64: 3.7.5 - libsql: 0.1.23 + libsql: 0.1.28 transitivePeerDependencies: - bufferutil - encoding - utf-8-validate - /@libsql/darwin-arm64@0.1.23: - resolution: {integrity: sha512-+V9aoOrZ47iYbY5NrcS0F2bDOCH407QI0wxAtss0CLOcFxlz/T6Nw0ryLK31GabklJQAmOXIyqkumLfz5HT64w==} + /@libsql/darwin-arm64@0.1.28: + resolution: {integrity: sha512-p4nldHUOhcl9ibnH1F6oiXV5Dl3PAcPB9VIjdjVvO3/URo5J7mhqRMuwJMKO5DZJJGtkKJ5IO0gu0hc90rnKIg==} cpu: [arm64] os: [darwin] requiresBuild: true optional: true - /@libsql/darwin-x64@0.1.23: - resolution: {integrity: sha512-toHo7s0HiMl4VCIfjhGXDe9bGWWo78eP8fxIbwU6RlaLO6MNV9fjHY/GjTWccWOwyxcT+q6X/kUc957HnoW3bg==} + /@libsql/darwin-x64@0.1.28: + resolution: {integrity: sha512-WaEK+Z+wP5sr0h8EcusSGHv4Mqc3smYICeG4P/wsbRDKQ2WUMWqZrpgqaBsm+WPbXogU2vpf+qGc8BnpFZ0ggw==} cpu: [x64] os: [darwin] requiresBuild: true @@ -484,22 +484,29 @@ packages: - bufferutil - utf-8-validate - /@libsql/linux-x64-gnu@0.1.23: - resolution: {integrity: sha512-U11LdjayakOj0lQCHDYkTgUfe4Q+7AjZZh8MzgEDF/9l0bmKNI3eFLWA3JD2Xm98yz65lUx95om0WKOKu5VW/w==} + /@libsql/linux-arm64-gnu@0.1.28: + resolution: {integrity: sha512-a17ANBuOqH2L8gdyET4Kg3XggQvxWnoA+7x7sDEX5NyWNyvr7P04WzNPAT0xAOWLclC1fDD6jM5sh/fbJk/7NA==} + cpu: [arm64] + os: [linux] + requiresBuild: true + optional: true + + /@libsql/linux-x64-gnu@0.1.28: + resolution: {integrity: sha512-dkg+Ou7ApV0PHpZWd9c6NrYyc/WSNn5h/ScKotaMTLWlLL96XAMNwrYLpZpUj61I2y7QzU98XtMfiSD1Ux+VaA==} cpu: [x64] os: [linux] requiresBuild: true optional: true - /@libsql/linux-x64-musl@0.1.23: - resolution: {integrity: sha512-8UcCK2sPVzcafHsEmcU5IDp/NxjD6F6JFS5giijsMX5iGgxYQiiwTUMOmSxW0AWBeT4VY5U7G6rG5PC8JSFtfg==} + /@libsql/linux-x64-musl@0.1.28: + resolution: {integrity: sha512-ZuOxCDYlG+f1IDsxstmaxLtgG9HvlLuUKs0X3um4f5F5V+P+PF8qr08gSdD1IP2pj+JBOiwhQffaEpR1wupxhQ==} cpu: [x64] os: [linux] requiresBuild: true optional: true - /@libsql/win32-x64-msvc@0.1.23: - resolution: {integrity: sha512-HAugD66jTmRRRGNMLKRiaFeMOC3mgUsAiuO6NRdRz3nM6saf9e5QqN/Ppuu9yqHHcZfv7VhQ9UGlAvzVK64Itg==} + /@libsql/win32-x64-msvc@0.1.28: + resolution: {integrity: sha512-2cmUiMIsJLHpetebGeeYqUYaCPWEnwMjqxwu1ZEEbA5x8r+DNmIhLrc0QSQ29p7a5u14vbZnShNOtT/XG7vKew==} cpu: [x64] os: [win32] requiresBuild: true @@ -971,19 +978,20 @@ packages: /js-base64@3.7.5: resolution: {integrity: sha512-3MEt5DTINKqfScXKfJFrRbxkrnk2AxPWGBL/ycjz4dK8iqiSJ06UxD8jh8xuh6p10TX4t2+7FsBYVxxQbMg+qA==} - /libsql@0.1.23: - resolution: {integrity: sha512-Nf/1B2Glxvcnba4jYFhXcaYmicyBA3RRm0LVwBkTl8UWCIDbX+Ad7c1ecrQwixPLPffWOVxKIqyCNTuUHUkVgA==} + /libsql@0.1.28: + resolution: {integrity: sha512-yCKlT0ntV8ZIWTPGNClhQQeH/LNAzLjbbEgBvgLb+jfQwAuTbyvPpVVLwkZzesqja1nbkWApztW0pX81Jp0pkw==} cpu: [x64, arm64] os: [darwin, linux, win32] dependencies: '@neon-rs/load': 0.0.4 detect-libc: 2.0.2 optionalDependencies: - '@libsql/darwin-arm64': 0.1.23 - '@libsql/darwin-x64': 0.1.23 - '@libsql/linux-x64-gnu': 0.1.23 - '@libsql/linux-x64-musl': 0.1.23 - '@libsql/win32-x64-msvc': 0.1.23 + '@libsql/darwin-arm64': 0.1.28 + '@libsql/darwin-x64': 0.1.28 + '@libsql/linux-arm64-gnu': 0.1.28 + '@libsql/linux-x64-gnu': 0.1.28 + '@libsql/linux-x64-musl': 0.1.28 + '@libsql/win32-x64-msvc': 0.1.28 /lilconfig@2.1.0: resolution: {integrity: sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts index e94eacbae328..c50ad3e257ab 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts @@ -290,13 +290,13 @@ export function smokeTestLibquery( }) it('expected error (on duplicate insert) as json result (not throwing error)', async () => { - // clean up first await doQuery({ modelName: 'Unique', action: 'deleteMany', query: { + arguments: {}, selection: { - count: true, + $scalars: true, }, }, }) @@ -327,17 +327,9 @@ export function smokeTestLibquery( }, }) - if (flavour === 'postgres' || flavour === 'mysql') { - const result = await promise - console.log('[nodejs] error result', JSON.stringify(result, null, 2)) - assert.equal(result?.errors?.[0]?.['user_facing_error']?.['error_code'], 'P2002') - } else { - await assert.rejects(promise, (err) => { - assert(typeof err === 'object' && err !== null) - assert.match(err['message'], /unique/i) - return true - }) - } + const result = await promise + console.log('[nodejs] error result', JSON.stringify(result, null, 2)) + assert.equal(result?.errors?.[0]?.['user_facing_error']?.['error_code'], 'P2002') }) describe('read scalar and non scalar types', () => { diff --git a/query-engine/driver-adapters/src/result.rs b/query-engine/driver-adapters/src/result.rs index 08397d834ed0..c43f66a81e72 100644 --- a/query-engine/driver-adapters/src/result.rs +++ b/query-engine/driver-adapters/src/result.rs @@ -1,5 +1,5 @@ use napi::{bindgen_prelude::FromNapiValue, Env, JsUnknown, NapiValue}; -use quaint::error::{Error as QuaintError, MysqlError, PostgresError}; +use quaint::error::{Error as QuaintError, MysqlError, PostgresError, SqliteError}; use serde::Deserialize; #[derive(Deserialize)] @@ -21,6 +21,13 @@ pub struct MysqlErrorDef { pub state: String, } +#[derive(Deserialize)] +#[serde(remote = "SqliteError", rename_all = "camelCase")] +pub struct SqliteErrorDef { + pub extended_code: i32, + pub message: Option, +} + #[derive(Deserialize)] #[serde(tag = "kind")] /// Wrapper for JS-side errors @@ -33,7 +40,7 @@ pub(crate) enum DriverAdapterError { Postgres(#[serde(with = "PostgresErrorDef")] PostgresError), Mysql(#[serde(with = "MysqlErrorDef")] MysqlError), - // in the future, expected errors that map to known user errors with PXXX codes will also go here + Sqlite(#[serde(with = "SqliteErrorDef")] SqliteError), } impl FromNapiValue for DriverAdapterError { @@ -50,6 +57,7 @@ impl From for QuaintError { DriverAdapterError::GenericJs { id } => QuaintError::external_error(id), DriverAdapterError::Postgres(e) => e.into(), DriverAdapterError::Mysql(e) => e.into(), + DriverAdapterError::Sqlite(e) => e.into(), // in future, more error types would be added and we'll need to convert them to proper QuaintErrors here } } From 39b6c54adcb81cacdaca4648194c925105406ae9 Mon Sep 17 00:00:00 2001 From: Alberto Schiabel Date: Mon, 23 Oct 2023 19:31:01 +0200 Subject: [PATCH 20/67] chore(docs): Add "how to" README sections to public-facing Driver Adapters (#4377) * chore(driver-adapters): add README for PlanetScale * chore(driver-adapters): add README for Neon * chore(driver-adapters): add README for LibSQL * chore: fix sentence removing env var reference --- .../js/adapter-libsql/README.md | 90 +++++++++++++++++++ .../driver-adapters/js/adapter-neon/README.md | 68 +++++++++++++- .../js/adapter-planetscale/README.md | 67 +++++++++++++- 3 files changed, 223 insertions(+), 2 deletions(-) diff --git a/query-engine/driver-adapters/js/adapter-libsql/README.md b/query-engine/driver-adapters/js/adapter-libsql/README.md index 219200af2080..5ca415ea8ec9 100644 --- a/query-engine/driver-adapters/js/adapter-libsql/README.md +++ b/query-engine/driver-adapters/js/adapter-libsql/README.md @@ -3,3 +3,93 @@ Prisma driver adapter for Turso and libSQL. See https://prisma.io/turso for details. + +The following usage tutorial is valid for Prisma 5.4.2 and later versions. + +## How to install + +After [getting started with Turso](https://www.prisma.io/blog/prisma-turso-ea-support-rXGd_Tmy3UXX#create-a-database-on-turso), you can use the Turso serverless driver to connect to your database. You will need to install the `@prisma/adapter-libsql` driver adapter and the `@libsql/client` serverless driver. + +```sh +npm install @prisma/adapter-libsql +npm install @libsql/client +``` + +Make sure your Turso database connection string and authentication token is copied over to your `.env` file. The connection string will start with `libsql://`. + +```env +# .env +TURSO_AUTH_TOKEN="eyJhbGciOiJFZERTQSIsInR5cCI6IkpXVCJ9..." +TURSO_DATABASE_URL="libsql://turso-prisma-random-user.turso.io" +``` + +You can now reference this environment variable in your `schema.prisma` datasource. Make sure you also include the `driverAdapters` Preview feature. + +```prisma +// schema.prisma +generator client { + provider = "prisma-client-js" + previewFeatures = ["driverAdapters"] +} + +datasource db { + provider = "sqlite" + url = "file:./dev.db" +} +``` + +Now run `npx prisma generate` to re-generate Prisma Client. + +## How to setup migrations + +As Turso needs to sync between a local sqlite database and another one hosted on Turso Cloud, an additional migration setup is needed. In particular, anytime you modify models and relations in your `schema.prisma` file, you should: + +1. Create a baseline migration + +```sh +npx prisma migrate diff --from-empty \ + --to-schema-datamodel prisma/schema.prisma \ + --script > baseline.sql +``` + +2. Apply the migration to your Turso database + +```sh +turso db shell turso-prisma < baseline.sql +``` + +## How to use + +In TypeScript, you will need to: + +1. Import packages +2. Set up the libSQL serverless database driver +3. Instantiate the Prisma libSQL adapter with the libSQL serverless database driver +4. Pass the driver adapter to the Prisma Client instance + +```typescript +// Import needed packages +import { PrismaClient } from '@prisma/client'; +import { PrismaLibSQL } from '@prisma/adapter-libsql'; +import { createClient } from '@libsql/client'; + +// Setup +const connectionString = `${process.env.TURSO_DATABASE_URL}`; +const authToken = `${process.env.TURSO_AUTH_TOKEN}`; + +// Init prisma client +const libsql = createClient({ + url: connectionString, + authToken, +}); +const adapter = new PrismaLibSQL(libsql); +const prisma = new PrismaClient({ adapter }); + +// Use Prisma Client as normal +``` + +Your Prisma Client instance now uses a **single** remote Turso database. +You can take it a step further by setting up database replicas. Turso automatically picks the closest replica to your app for read queries when you create replicas. No additional logic is required to define how the routing of the read queries should be handled. Write queries will be forwarded to the primary database. +We encourage you to create an issue if you find something missing or run into a bug. + +If you have any feedback about our libSQL Serverless Driver support, please leave a comment on our [dedicated GitHub issue](https://github.com/prisma/prisma/discussions/21345) and we'll use it as we continue development. diff --git a/query-engine/driver-adapters/js/adapter-neon/README.md b/query-engine/driver-adapters/js/adapter-neon/README.md index 8af259ab74c1..f36f44c6bca4 100644 --- a/query-engine/driver-adapters/js/adapter-neon/README.md +++ b/query-engine/driver-adapters/js/adapter-neon/README.md @@ -2,4 +2,70 @@ Prisma driver adapter for [Neon Serverless Driver](https://github.com/neondatabase/serverless). -See https://github.com/prisma/prisma/releases/tag/5.4.0 for details. +See https://github.com/prisma/prisma/releases/tag/5.4.0 and https://www.prisma.io/blog/serverless-database-drivers-KML1ehXORxZV for details. + +The following usage tutorial is valid for Prisma 5.4.2 and later versions. + +## How to install + +After [creating your database on Neon](https://neon.tech/docs/get-started-with-neon/setting-up-a-project), you'll need to install the `@prisma/adapter-neon` driver adapter, Neon’s serverless database driver `@neondatabase/serverless`, and `ws` to set up a WebSocket connection for use by Neon. + +```sh +npm install @prisma/adapter-neon +npm install @neondatabase/serverless +npm install ws +``` + +Make sure your [Neon database connection string](https://neon.tech/docs/connect/connect-from-any-app) is copied over to your `.env` file. The connection string will start with `postgres://`. + +```env +# .env +DATABASE_URL="postgres://..." +``` + +Make sure you also include the `driverAdapters` Preview feature in your `schema.prisma`. + +```prisma +// schema.prisma +generator client { + provider = "prisma-client-js" + previewFeatures = ["driverAdapters"] +} + +datasource db { + provider = "postgresql" + url = env("DATABASE_URL") +} +``` + +Now run `npx prisma generate` to re-generate Prisma Client. + +## How to use + +In TypeScript, you will need to: + +1. Import packages +2. Set up the Neon serverless database driver +3. Instantiate the Prisma Neon adapter with the Neon serverless database driver +4. Pass the driver adapter to the Prisma Client instance + +```typescript +// Import needed packages +import { Pool, neonConfig } from '@neondatabase/serverless'; +import { PrismaNeon } from '@prisma/adapter-neon'; +import { PrismaClient } from '@prisma/client'; +import ws from 'ws'; + +// Setup +neonConfig.webSocketConstructor = ws; +const connectionString = `${process.env.DATABASE_URL}`; + +// Init prisma client +const pool = new Pool({ connectionString }); +const adapter = new PrismaNeon(pool); +const prisma = new PrismaClient({ adapter }); + +// Use Prisma Client as normal +``` + +Now your code has built-in benefits of the Neon serverless driver, such as WebSocket connections and [message pipelining](https://neon.tech/blog/quicker-serverless-postgres), while Prisma covers connection creation and destruction, error handling, and type safety. If you have any feedback about our Neon Serverless Driver support, please leave a comment on our [dedicated GitHub issue](https://github.com/prisma/prisma/discussions/21346) and we'll use it as we continue development. diff --git a/query-engine/driver-adapters/js/adapter-planetscale/README.md b/query-engine/driver-adapters/js/adapter-planetscale/README.md index 8e145c07c098..a4cdc132036a 100644 --- a/query-engine/driver-adapters/js/adapter-planetscale/README.md +++ b/query-engine/driver-adapters/js/adapter-planetscale/README.md @@ -2,5 +2,70 @@ Prisma driver adapter for [PlanetScale Serverless Driver](https://github.com/planetscale/database-js). -See https://github.com/prisma/prisma/releases/tag/5.4.0 for details. +See https://github.com/prisma/prisma/releases/tag/5.4.0 and https://www.prisma.io/blog/serverless-database-drivers-KML1ehXORxZV for details. +The following usage tutorial is valid for Prisma 5.4.2 and later versions. + +## How to install + +After [getting started with PlanetScale](https://neon.tech/docs/get-started-with-neon/setting-up-a-project), you can use the PlanetScale serverless driver to connect to your database. You will need to install the `@prisma/adapter-planetscale` driver adapter, the `@planetscale/database` serverless driver, and `undici` to provide a `fetch` function to the PlanetScale driver. + +```sh +npm install @prisma/adapter-planetscale +npm install @planetscale/database +npm install undici +``` + +Make sure your [PlanetScale database connection string](https://planetscale.com/docs/concepts/connection-strings) is copied over to your `.env` file. The connection string will start with `mysql://`. + +```env +# .env +DATABASE_URL="mysql://..." +``` + +You can now reference this environment variable in your `schema.prisma` datasource. Make sure you also include the `driverAdapters` Preview feature. + +```prisma +// schema.prisma +generator client { + provider = "prisma-client-js" + previewFeatures = ["driverAdapters"] +} + +datasource db { + provider = "mysql" + url = env("DATABASE_URL") + relationMode = "prisma" +} +``` + +Now run `npx prisma generate` to re-generate Prisma Client. + +## How to use + +In TypeScript, you will need to: + +1. Import packages +2. Set up the PlanetScale serverless database driver +3. Instantiate the Prisma PlanetScale adapter with the PlanetScale serverless database driver +4. Pass the driver adapter to the Prisma Client instance + +```typescript +// Import needed packages +import { connect } from '@planetscale/database'; +import { PrismaPlanetScale } from '@prisma/adapter-planetscale'; +import { PrismaClient } from '@prisma/client'; +import { fetch as undiciFetch } from 'undici'; + +// Setup +const connectionString = `${process.env.DATABASE_URL}`; + +// Init prisma client +const connection = connect({ url: connectionString, fetch: undiciFetch }); +const adapter = new PrismaPlanetScale(connection); +const prisma = new PrismaClient({ adapter }); + +// Use Prisma Client as normal +``` + +Your Prisma Client instance now uses PlanetScale's [`database-js`](https://github.com/planetscale/database-js), which can improve [`connection reliability and performance`](https://planetscale.com/blog/faster-mysql-with-http3). It uses HTTP requests instead of Prisma’s connection pool, but Prisma will continue to handle error handling and type safety. If you have any feedback about our PlanetScale Serverless Driver support, please leave a comment on our [dedicated GitHub issue](https://github.com/prisma/prisma/discussions/21347) and we'll use it as we continue development. From 2450f885b75e29f5a6d7cde46d6a3e05290e5b33 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Tue, 24 Oct 2023 16:11:10 +0200 Subject: [PATCH 21/67] chore(docker): switch restart to unless-stopped + auto-formatting (#4369) --- .../workflows/publish-prisma-schema-wasm.yml | 10 +- docker-compose.yml | 206 +++++++++--------- quaint/docker-compose.yml | 34 +-- 3 files changed, 127 insertions(+), 123 deletions(-) diff --git a/.github/workflows/publish-prisma-schema-wasm.yml b/.github/workflows/publish-prisma-schema-wasm.yml index e166c05e5841..f453811009ce 100644 --- a/.github/workflows/publish-prisma-schema-wasm.yml +++ b/.github/workflows/publish-prisma-schema-wasm.yml @@ -12,7 +12,7 @@ on: required: true npmDistTag: required: true - default: "latest" + default: 'latest' jobs: build: @@ -21,7 +21,7 @@ jobs: steps: - name: Print input env: - THE_INPUT: "${{ toJson(github.event.inputs) }}" + THE_INPUT: '${{ toJson(github.event.inputs) }}' run: | echo $THE_INPUT @@ -42,7 +42,7 @@ jobs: - uses: actions/setup-node@v3 with: - node-version: "14.x" + node-version: '14.x' - name: Set up NPM token run: echo "//registry.npmjs.org/:_authToken=${{ secrets.NPM_TOKEN }}" > ~/.npmrc @@ -65,6 +65,6 @@ jobs: if: ${{ failure() }} uses: rtCamp/action-slack-notify@v2.2.1 env: - SLACK_TITLE: "prisma-schema-wasm publishing failed :x:" - SLACK_COLOR: "#FF0000" + SLACK_TITLE: 'prisma-schema-wasm publishing failed :x:' + SLACK_COLOR: '#FF0000' SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WASM_FAILING }} diff --git a/docker-compose.yml b/docker-compose.yml index fad49d836cde..fc585adabafe 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,113 +1,115 @@ -version: "3" +version: '3' services: cockroach_23_1: image: prismagraphql/cockroachdb-custom:23.1 + restart: unless-stopped command: | start-single-node --insecure ports: - - "26260:26257" + - '26260:26257' networks: - databases cockroach_22_2: image: prismagraphql/cockroachdb-custom:22.2 - restart: always + restart: unless-stopped command: start-single-node --insecure ports: - - "26259:26257" + - '26259:26257' networks: - databases cockroach_22_1_0: image: prismagraphql/cockroachdb-custom:22.1.0 - restart: always + restart: unless-stopped command: start-single-node --insecure ports: - - "26257:26257" + - '26257:26257' networks: - databases cockroach_21_2_0_patched: image: prismagraphql/cockroachdb-custom:21.2.0-patched - restart: always + restart: unless-stopped command: start-single-node --insecure ports: - - "26258:26257" + - '26258:26257' networks: - databases pgbouncer: image: brainsam/pgbouncer:latest - restart: always + restart: unless-stopped environment: - DB_HOST: "postgres11" - DB_PORT: "5432" - DB_USER: "postgres" - DB_PASSWORD: "prisma" - POOL_MODE: "transaction" - MAX_CLIENT_CONN: "1000" + DB_HOST: 'postgres11' + DB_PORT: '5432' + DB_USER: 'postgres' + DB_PASSWORD: 'prisma' + POOL_MODE: 'transaction' + MAX_CLIENT_CONN: '1000' networks: - databases ports: - - "6432:6432" + - '6432:6432' postgres9: image: postgres:9.6 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" + POSTGRES_PASSWORD: 'prisma' ports: - - "5431:5432" + - '5431:5432' networks: - databases postgres10: image: postgres:10 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" + POSTGRES_PASSWORD: 'prisma' ports: - - "5432:5432" + - '5432:5432' networks: - databases postgres11: image: postgres:11 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" + POSTGRES_PASSWORD: 'prisma' ports: - - "5433:5432" + - '5433:5432' networks: - databases postgres12: image: postgres:12 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" + POSTGRES_PASSWORD: 'prisma' ports: - - "5434:5432" + - '5434:5432' networks: - databases postgres13: image: postgres:13 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" + POSTGRES_PASSWORD: 'prisma' ports: - - "5435:5432" + - '5435:5432' networks: - databases neon-postgres13: image: ghcr.io/neondatabase/wsproxy:latest + restart: unless-stopped environment: # the port of the postgres13 within the databases network APPEND_PORT: 'postgres13:5432' @@ -131,50 +133,50 @@ services: - '8085:8085' depends_on: - vitess-test-8_0 - restart: always + restart: unless-stopped healthcheck: - test: [ 'CMD', 'nc', '-z', '127.0.0.1', '8085' ] + test: ['CMD', 'nc', '-z', '127.0.0.1', '8085'] interval: 5s timeout: 2s retries: 20 postgres14: image: postgres:14 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" - POSTGRES_HOST_AUTH_METHOD: "md5" - POSTGRES_INITDB_ARGS: "--auth-host=md5" + POSTGRES_PASSWORD: 'prisma' + POSTGRES_HOST_AUTH_METHOD: 'md5' + POSTGRES_INITDB_ARGS: '--auth-host=md5' ports: - - "5437:5432" + - '5437:5432' networks: - databases postgres15: image: postgres:15 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" - POSTGRES_HOST_AUTH_METHOD: "md5" - POSTGRES_INITDB_ARGS: "--auth-host=md5" + POSTGRES_PASSWORD: 'prisma' + POSTGRES_HOST_AUTH_METHOD: 'md5' + POSTGRES_INITDB_ARGS: '--auth-host=md5' ports: - - "5438:5432" + - '5438:5432' networks: - databases mysql-5-6: image: mysql:5.6.50 command: mysqld - restart: always + restart: unless-stopped platform: linux/x86_64 environment: MYSQL_USER: root MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3309:3306" + - '3309:3306' networks: - databases tmpfs: /var/lib/mysql @@ -182,14 +184,14 @@ services: mysql-5-7: image: mysql:5.7.32 command: mysqld - restart: always + restart: unless-stopped platform: linux/x86_64 environment: MYSQL_USER: root MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3306:3306" + - '3306:3306' networks: - databases tmpfs: /var/lib/mysql @@ -197,33 +199,33 @@ services: mysql-8-0: image: mysql:8.0.28 command: mysqld - restart: always + restart: unless-stopped platform: linux/x86_64 environment: MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3307:3306" + - '3307:3306' networks: - databases tmpfs: /var/lib/mysql8 mariadb-10-0: image: mariadb:10 - restart: always + restart: unless-stopped environment: MYSQL_USER: root MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3308:3306" + - '3308:3306' networks: - databases tmpfs: /var/lib/mariadb vitess-test-5_7: image: vitess/vttestserver:mysql57@sha256:23863a518b34330109c502ac61a396008f5f023e96263bcb2bb1b0f7f7d5dc7f - restart: always + restart: unless-stopped ports: - 33577:33577 environment: @@ -243,7 +245,7 @@ services: vitess-test-8_0: image: vitess/vttestserver:mysql80@sha256:8bec2644d83cb322eb2cdd596d33c0f858243ba6ade9164c95dfcc519643094e - restart: always + restart: unless-stopped ports: - 33807:33807 environment: @@ -263,7 +265,7 @@ services: vitess-shadow-5_7: image: vitess/vttestserver:mysql57@sha256:23863a518b34330109c502ac61a396008f5f023e96263bcb2bb1b0f7f7d5dc7f - restart: always + restart: unless-stopped ports: - 33578:33577 environment: @@ -283,7 +285,7 @@ services: vitess-shadow-8_0: image: vitess/vttestserver:mysql80@sha256:8bec2644d83cb322eb2cdd596d33c0f858243ba6ade9164c95dfcc519643094e - restart: always + restart: unless-stopped ports: - 33808:33807 environment: @@ -303,139 +305,140 @@ services: mssql-2017: image: mcr.microsoft.com/mssql/server:2017-latest - restart: always + restart: unless-stopped environment: - ACCEPT_EULA: "Y" - SA_PASSWORD: "" + ACCEPT_EULA: 'Y' + SA_PASSWORD: '' ports: - - "1434:1433" + - '1434:1433' networks: - databases - + mssql-2019: image: mcr.microsoft.com/mssql/server:2019-latest - restart: always + restart: unless-stopped environment: - ACCEPT_EULA: "Y" - SA_PASSWORD: "" + ACCEPT_EULA: 'Y' + SA_PASSWORD: '' ports: - - "1433:1433" + - '1433:1433' networks: - databases mssql-2022: image: mcr.microsoft.com/mssql/server:2022-latest - restart: always + restart: unless-stopped environment: - ACCEPT_EULA: "Y" - SA_PASSWORD: "" + ACCEPT_EULA: 'Y' + SA_PASSWORD: '' ports: - - "1435:1433" + - '1435:1433' networks: - databases azure-edge: image: mcr.microsoft.com/azure-sql-edge - restart: always + restart: unless-stopped environment: - ACCEPT_EULA: "Y" - MSSQL_SA_PASSWORD: "" + ACCEPT_EULA: 'Y' + MSSQL_SA_PASSWORD: '' ports: - - "1433:1433" + - '1433:1433' networks: - databases mongo42: image: prismagraphql/mongo-single-replica:4.2.17-bionic - restart: always + restart: unless-stopped environment: - MONGO_INITDB_ROOT_USERNAME: "prisma" - MONGO_INITDB_ROOT_PASSWORD: "prisma" + MONGO_INITDB_ROOT_USERNAME: 'prisma' + MONGO_INITDB_ROOT_PASSWORD: 'prisma' MONGO_PORT: 27016 INIT_WAIT_SEC: $INIT_WAIT_SEC networks: - databases ports: - - "27016:27016" + - '27016:27016' mongo44: image: prismagraphql/mongo-single-replica:4.4.3-bionic - restart: always + restart: unless-stopped environment: - MONGO_INITDB_ROOT_USERNAME: "prisma" - MONGO_INITDB_ROOT_PASSWORD: "prisma" + MONGO_INITDB_ROOT_USERNAME: 'prisma' + MONGO_INITDB_ROOT_PASSWORD: 'prisma' INIT_WAIT_SEC: $INIT_WAIT_SEC ports: - - "27017:27017" + - '27017:27017' networks: - databases mongo42-single: image: mongo:4.2 - restart: always + restart: unless-stopped environment: - MONGO_INITDB_ROOT_USERNAME: "prisma" - MONGO_INITDB_ROOT_PASSWORD: "prisma" + MONGO_INITDB_ROOT_USERNAME: 'prisma' + MONGO_INITDB_ROOT_PASSWORD: 'prisma' INIT_WAIT_SEC: $INIT_WAIT_SEC ports: - - "27016:27017" + - '27016:27017' networks: - databases mongo44-single: image: mongo:4.4 - restart: always + restart: unless-stopped environment: - MONGO_INITDB_ROOT_USERNAME: "prisma" - MONGO_INITDB_ROOT_PASSWORD: "prisma" + MONGO_INITDB_ROOT_USERNAME: 'prisma' + MONGO_INITDB_ROOT_PASSWORD: 'prisma' INIT_WAIT_SEC: $INIT_WAIT_SEC ports: - - "27017:27017" + - '27017:27017' networks: - databases mongo5: image: prismagraphql/mongo-single-replica:5.0.3 - restart: always + restart: unless-stopped environment: - MONGO_INITDB_ROOT_USERNAME: "prisma" - MONGO_INITDB_ROOT_PASSWORD: "prisma" + MONGO_INITDB_ROOT_USERNAME: 'prisma' + MONGO_INITDB_ROOT_PASSWORD: 'prisma' MONGO_PORT: 27018 INIT_WAIT_SEC: $INIT_WAIT_SEC ports: - - "27018:27018" + - '27018:27018' networks: - databases mongo5-single: image: mongo:5 - restart: always + restart: unless-stopped environment: - MONGO_INITDB_ROOT_USERNAME: "prisma" - MONGO_INITDB_ROOT_PASSWORD: "prisma" + MONGO_INITDB_ROOT_USERNAME: 'prisma' + MONGO_INITDB_ROOT_PASSWORD: 'prisma' INIT_WAIT_SEC: $INIT_WAIT_SEC ports: - - "27018:27017" + - '27018:27017' networks: - databases mongo-express: image: mongo-express - restart: always + restart: unless-stopped ports: - 8081:8081 environment: - ME_CONFIG_MONGODB_ADMINUSERNAME: "prisma" - ME_CONFIG_MONGODB_ADMINPASSWORD: "prisma" + ME_CONFIG_MONGODB_ADMINUSERNAME: 'prisma' + ME_CONFIG_MONGODB_ADMINPASSWORD: 'prisma' ME_CONFIG_MONGODB_URL: mongodb://prisma:prisma@mongo4-single:27017/ networks: - databases otel: image: jaegertracing/all-in-one:1.35 + restart: unless-stopped environment: - COLLECTOR_OTLP_ENABLED: "true" - COLLECTOR_ZIPKIN_HOST_PORT: ":9411" + COLLECTOR_OTLP_ENABLED: 'true' + COLLECTOR_ZIPKIN_HOST_PORT: ':9411' ports: - 6831:6831/udp - 6832:6832/udp @@ -450,6 +453,7 @@ services: prometheus: image: prom/prometheus + restart: unless-stopped volumes: - ${PWD}/metrics/prometheus:/prometheus-data command: --config.file=/prometheus-data/prometheus.yml diff --git a/quaint/docker-compose.yml b/quaint/docker-compose.yml index ec3c06faa289..47f1a3456a6e 100644 --- a/quaint/docker-compose.yml +++ b/quaint/docker-compose.yml @@ -1,14 +1,14 @@ -version: "3" +version: '3' services: postgres13: image: postgres:13 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" - PGDATA: "/pgtmpfs13" + POSTGRES_PASSWORD: 'prisma' + PGDATA: '/pgtmpfs13' ports: - - "5432:5432" + - '5432:5432' networks: - databases tmpfs: /pgtmpfs12 @@ -16,13 +16,13 @@ services: mysql57: image: mysql:5.7 command: mysqld - restart: always + restart: unless-stopped platform: linux/x86_64 environment: MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3306:3306" + - '3306:3306' networks: - databases tmpfs: /var/lib/mysql5.7 @@ -30,48 +30,48 @@ services: mysql8: image: mysql:8.0.22 command: mysqld - restart: always + restart: unless-stopped platform: linux/x86_64 environment: MYSQL_USER: root MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3307:3306" + - '3307:3306' networks: - databases tmpfs: /var/lib/mysql8 mariadb: image: mariadb:10 - restart: always + restart: unless-stopped environment: MYSQL_USER: root MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3308:3306" + - '3308:3306' networks: - databases tmpfs: /var/lib/mariadb mssql: image: mcr.microsoft.com/mssql/server:2022-latest - restart: always + restart: unless-stopped environment: - ACCEPT_EULA: "Y" - SA_PASSWORD: "" + ACCEPT_EULA: 'Y' + SA_PASSWORD: '' ports: - - "1433:1433" + - '1433:1433' networks: - databases cockroach_22_2: image: prismagraphql/cockroachdb-custom:22.2 - restart: always + restart: unless-stopped command: start-single-node --insecure ports: - - "26259:26257" + - '26259:26257' networks: - databases From de2449110135e91857b477c346b7f74d52d61613 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 24 Oct 2023 16:29:28 +0200 Subject: [PATCH 22/67] chore(deps): update dependency node to v20.8.1 (#4204) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- query-engine/driver-adapters/js/.nvmrc | 2 +- query-engine/query-engine-wasm/.nvmrc | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/query-engine/driver-adapters/js/.nvmrc b/query-engine/driver-adapters/js/.nvmrc index 8c60e1e54f37..6569dfa4f323 100644 --- a/query-engine/driver-adapters/js/.nvmrc +++ b/query-engine/driver-adapters/js/.nvmrc @@ -1 +1 @@ -v20.5.1 +20.8.1 diff --git a/query-engine/query-engine-wasm/.nvmrc b/query-engine/query-engine-wasm/.nvmrc index 8c60e1e54f37..6569dfa4f323 100644 --- a/query-engine/query-engine-wasm/.nvmrc +++ b/query-engine/query-engine-wasm/.nvmrc @@ -1 +1 @@ -v20.5.1 +20.8.1 From f365956fa36e50f1c89d8ffe3997d512ab2d6fec Mon Sep 17 00:00:00 2001 From: Robert Craigie Date: Wed, 25 Oct 2023 15:55:21 +0100 Subject: [PATCH 23/67] fix(qe): correct /status route response body (#4246) --- query-engine/query-engine/src/server/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/query-engine/query-engine/src/server/mod.rs b/query-engine/query-engine/src/server/mod.rs index 75543dc7ee58..f3583df310d7 100644 --- a/query-engine/query-engine/src/server/mod.rs +++ b/query-engine/query-engine/src/server/mod.rs @@ -63,7 +63,7 @@ pub(crate) async fn routes(cx: Arc, req: Request) -> Result let mut res = match (req.method(), req.uri().path()) { (&Method::POST, "/") => request_handler(cx, req).await?, (&Method::GET, "/") if cx.enabled_features.contains(Feature::Playground) => playground_handler(), - (&Method::GET, "/status") => build_json_response(StatusCode::OK, r#"{"status":"ok"}"#), + (&Method::GET, "/status") => build_json_response(StatusCode::OK, &json!({"status": "ok"})), (&Method::GET, "/sdl") => { let schema = render_graphql_schema(cx.query_schema()); From 46fa0396e2de9ab6ec99c48bd342bc513b032648 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Thu, 26 Oct 2023 13:44:56 +0200 Subject: [PATCH 24/67] ci: do not skip the buildkite pipeline when previous commit is empty (#4385) --- .buildkite/engineer | 27 +++++++++++++++++++-------- 1 file changed, 19 insertions(+), 8 deletions(-) diff --git a/.buildkite/engineer b/.buildkite/engineer index bf31a6e371df..98b78284eaf2 100755 --- a/.buildkite/engineer +++ b/.buildkite/engineer @@ -9,24 +9,35 @@ else echo "We are in the $2 pipeline." fi +# Checks what's the diff with the previous commit +# This is used to detect if the previous commit was empty +GIT_DIFF=$(git diff --name-only HEAD HEAD~1 -- .) + # Checks what's the diff with the previous commit, # excluding some paths that do not need a run, # because they do not affect tests running in Buildkite. -GIT_DIFF=$(git diff --name-only HEAD HEAD~1 -- . ':!.github' ':!query-engine/driver-adapters/js' ':!renovate.json' ':!*.md' ':!LICENSE' ':!CODEOWNERS';) +GIT_DIFF_WITH_IGNORED_PATHS=$(git diff --name-only HEAD HEAD~1 -- . ':!.github' ':!query-engine/driver-adapters/js' ':!renovate.json' ':!*.md' ':!LICENSE' ':!CODEOWNERS';) # $2 is either "test" or "build", depending on the pipeline # Example: ./.buildkite/engineer pipeline test # We only want to check for changes and skip in the test pipeline. if [[ "$2" == "test" ]]; then - # Checking if GIT_DIFF is empty - # If it's empty then it's most likely that there are changes but they are in ignored paths. - # So we do not start Buildkite + # If GIT_DIFF is empty then the previous commit was empty + # We assume it's intended and we continue with the run + # Example use: to get a new engine hash built with identical code if [ -z "${GIT_DIFF}" ]; then - echo "No changes found for the previous commit in paths that are not ignored, this run will now be skipped." - exit 0 + echo "The previous commit is empty, this run will continue..." else - # Note that printf works better for displaying line returns in CI - printf "Changes found for the previous commit in paths that are not ignored: \n\n%s\n\nThis run will continue...\n" "${GIT_DIFF}" + # Checking if GIT_DIFF_WITH_IGNORED_PATHS is empty + # If it's empty then it's most likely that there are changes but they are in ignored paths. + # So we do not start Buildkite + if [ -z "${GIT_DIFF_WITH_IGNORED_PATHS}" ]; then + echo "No changes found for the previous commit in paths that are not ignored, this run will now be skipped." + exit 0 + else + # Note that printf works better for displaying line returns in CI + printf "Changes found for the previous commit in paths that are not ignored: \n\n%s\n\nThis run will continue...\n" "${GIT_DIFF_WITH_IGNORED_PATHS}" + fi fi fi From 51d8349124b96b4c636526990eba13a691d553a4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Thu, 26 Oct 2023 13:45:25 +0200 Subject: [PATCH 25/67] chore: login to Docker only if Docker credentials are truthy (#4381) --- .github/workflows/query-engine-black-box.yml | 1 + .github/workflows/query-engine-driver-adapters.yml | 1 + .github/workflows/query-engine.yml | 1 + .github/workflows/schema-engine.yml | 1 + 4 files changed, 4 insertions(+) diff --git a/.github/workflows/query-engine-black-box.yml b/.github/workflows/query-engine-black-box.yml index 78e60178d7f7..a941588dfd8e 100644 --- a/.github/workflows/query-engine-black-box.yml +++ b/.github/workflows/query-engine-black-box.yml @@ -50,6 +50,7 @@ jobs: - name: Login to Docker Hub uses: docker/login-action@v3 continue-on-error: true + if: "${{ secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }}" with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/query-engine-driver-adapters.yml b/.github/workflows/query-engine-driver-adapters.yml index d52b446b12fb..5b34b9761c4c 100644 --- a/.github/workflows/query-engine-driver-adapters.yml +++ b/.github/workflows/query-engine-driver-adapters.yml @@ -68,6 +68,7 @@ jobs: - name: 'Login to Docker Hub' uses: docker/login-action@v3 continue-on-error: true + if: "${{ secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }}" with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/query-engine.yml b/.github/workflows/query-engine.yml index 9c242217662d..3df596e20d61 100644 --- a/.github/workflows/query-engine.yml +++ b/.github/workflows/query-engine.yml @@ -80,6 +80,7 @@ jobs: - name: Login to Docker Hub uses: docker/login-action@v3 continue-on-error: true + if: "${{ secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }}" with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/schema-engine.yml b/.github/workflows/schema-engine.yml index 5bdf25a2bd35..c6249f069091 100644 --- a/.github/workflows/schema-engine.yml +++ b/.github/workflows/schema-engine.yml @@ -113,6 +113,7 @@ jobs: - name: Login to Docker Hub uses: docker/login-action@v3 continue-on-error: true + if: "${{ secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }}" with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} From 473ee41d8162d802413a60f9b23238b8e5648fd6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Thu, 26 Oct 2023 13:48:54 +0200 Subject: [PATCH 26/67] ci(biuildkite): skip test&build for changes in query-engine/query-engine-wasm (#4371) --- .buildkite/engineer | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/engineer b/.buildkite/engineer index 98b78284eaf2..5de99cea5390 100755 --- a/.buildkite/engineer +++ b/.buildkite/engineer @@ -16,7 +16,7 @@ GIT_DIFF=$(git diff --name-only HEAD HEAD~1 -- .) # Checks what's the diff with the previous commit, # excluding some paths that do not need a run, # because they do not affect tests running in Buildkite. -GIT_DIFF_WITH_IGNORED_PATHS=$(git diff --name-only HEAD HEAD~1 -- . ':!.github' ':!query-engine/driver-adapters/js' ':!renovate.json' ':!*.md' ':!LICENSE' ':!CODEOWNERS';) +GIT_DIFF_WITH_IGNORED_PATHS=$(git diff --name-only HEAD HEAD~1 -- . ':!.github' ':!query-engine/driver-adapters/js' ':!query-engine/query-engine-wasm' ':!renovate.json' ':!*.md' ':!LICENSE' ':!CODEOWNERS';) # $2 is either "test" or "build", depending on the pipeline # Example: ./.buildkite/engineer pipeline test From 9c1efedeb581438e6d20860d939957bc093154a3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Thu, 26 Oct 2023 14:35:07 +0200 Subject: [PATCH 27/67] ci(schema-wasm): cleanup the GitHub Action (#4370) Co-authored-by: Jan Piotrowski --- .../workflows/publish-prisma-schema-wasm.yml | 27 +++++++------------ 1 file changed, 10 insertions(+), 17 deletions(-) diff --git a/.github/workflows/publish-prisma-schema-wasm.yml b/.github/workflows/publish-prisma-schema-wasm.yml index f453811009ce..070bf528654a 100644 --- a/.github/workflows/publish-prisma-schema-wasm.yml +++ b/.github/workflows/publish-prisma-schema-wasm.yml @@ -1,6 +1,7 @@ name: Build and publish @prisma/prisma-schema-wasm +run-name: npm - release @prisma/prisma-schema-wasm@${{ github.event.inputs.enginesWrapperVersion }} from ${{ github.event.inputs.enginesHash }} on ${{ github.event.inputs.npmDistTag }} -concurrency: build-prisma-schema-wasm +concurrency: publish-prisma-schema-wasm on: # usually triggered via GH Actions Workflow in prisma/engines-wrapper repo @@ -30,25 +31,18 @@ jobs: ref: ${{ github.event.inputs.enginesHash }} - uses: cachix/install-nix-action@v23 - # - # Build - # - - - run: nix build .#prisma-schema-wasm - - # - # Publish - # + - name: Build + run: nix build .#prisma-schema-wasm - uses: actions/setup-node@v3 with: - node-version: '14.x' + node-version: '20.x' + registry-url: 'https://registry.npmjs.org/' - - name: Set up NPM token - run: echo "//registry.npmjs.org/:_authToken=${{ secrets.NPM_TOKEN }}" > ~/.npmrc - - - run: | - PACKAGE_DIR=$( nix run .#renderPrismaSchemaWasmPackage ${{ github.event.inputs.enginesWrapperVersion }}) + - name: Update version in package.json & Publish @prisma/prisma-schema-wasm + run: + # Update version in package.json and return directory for later usage + PACKAGE_DIR=$( nix run .#renderPrismaSchemaWasmPackage ${{ github.event.inputs.enginesWrapperVersion }}) npm publish "$PACKAGE_DIR" --access public --tag ${{ github.event.inputs.npmDistTag }} env: NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} @@ -60,7 +54,6 @@ jobs: - name: Set current job url in SLACK_FOOTER env var if: ${{ failure() }} run: echo "SLACK_FOOTER=<$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID|Click here to go to the job logs>" >> $GITHUB_ENV - - name: Slack Notification on Failure if: ${{ failure() }} uses: rtCamp/action-slack-notify@v2.2.1 From 87000b9863599bcab1769a68b466108a0e68216b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Fri, 27 Oct 2023 10:45:45 +0200 Subject: [PATCH 28/67] ci: fix publish-prisma-schema-wasm.yml (#4388) --- .github/workflows/publish-prisma-schema-wasm.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/publish-prisma-schema-wasm.yml b/.github/workflows/publish-prisma-schema-wasm.yml index 070bf528654a..e47031837224 100644 --- a/.github/workflows/publish-prisma-schema-wasm.yml +++ b/.github/workflows/publish-prisma-schema-wasm.yml @@ -37,15 +37,16 @@ jobs: - uses: actions/setup-node@v3 with: node-version: '20.x' - registry-url: 'https://registry.npmjs.org/' + + # This is needed to be done manually because of `PACKAGE_DIR` used later + - name: Set up NPM token for publishing later + run: echo "//registry.npmjs.org/:_authToken=${{ secrets.NPM_TOKEN }}" > ~/.npmrc - name: Update version in package.json & Publish @prisma/prisma-schema-wasm run: # Update version in package.json and return directory for later usage PACKAGE_DIR=$( nix run .#renderPrismaSchemaWasmPackage ${{ github.event.inputs.enginesWrapperVersion }}) npm publish "$PACKAGE_DIR" --access public --tag ${{ github.event.inputs.npmDistTag }} - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} # # Failure handlers From f2a389ec6343da9935493c8170851414a559371e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Fri, 27 Oct 2023 10:56:42 +0200 Subject: [PATCH 29/67] ci: fix publish-prisma-schema-wasm.yml (#4389) --- .github/workflows/publish-prisma-schema-wasm.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/publish-prisma-schema-wasm.yml b/.github/workflows/publish-prisma-schema-wasm.yml index e47031837224..78d139f80772 100644 --- a/.github/workflows/publish-prisma-schema-wasm.yml +++ b/.github/workflows/publish-prisma-schema-wasm.yml @@ -47,7 +47,9 @@ jobs: # Update version in package.json and return directory for later usage PACKAGE_DIR=$( nix run .#renderPrismaSchemaWasmPackage ${{ github.event.inputs.enginesWrapperVersion }}) npm publish "$PACKAGE_DIR" --access public --tag ${{ github.event.inputs.npmDistTag }} - + env: + # Required for publishing + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} # # Failure handlers # From 0bd2db9e01fa7842e8d4b6e3bc4599351bd562e0 Mon Sep 17 00:00:00 2001 From: Marco Ieni <11428655+MarcoIeni@users.noreply.github.com> Date: Fri, 27 Oct 2023 10:59:27 +0200 Subject: [PATCH 30/67] fix: typo in metric description (#4387) --- query-engine/black-box-tests/tests/metrics/smoke_tests.rs | 2 +- query-engine/metrics/src/lib.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/query-engine/black-box-tests/tests/metrics/smoke_tests.rs b/query-engine/black-box-tests/tests/metrics/smoke_tests.rs index 8542f753b78e..3397de75af99 100644 --- a/query-engine/black-box-tests/tests/metrics/smoke_tests.rs +++ b/query-engine/black-box-tests/tests/metrics/smoke_tests.rs @@ -76,7 +76,7 @@ mod smoke_tests { assert_eq!(metrics.matches("# HELP prisma_client_queries_active The number of currently active Prisma Client queries").count(), 1); assert_eq!(metrics.matches("# TYPE prisma_client_queries_active gauge").count(), 1); - assert_eq!(metrics.matches("# HELP prisma_client_queries_wait The number of datasource queries currently waiting for an free connection").count(), 1); + assert_eq!(metrics.matches("# HELP prisma_client_queries_wait The number of datasource queries currently waiting for a free connection").count(), 1); assert_eq!(metrics.matches("# TYPE prisma_client_queries_wait gauge").count(), 1); assert_eq!(metrics.matches("# HELP prisma_pool_connections_busy The number of pool connections currently executing datasource queries").count(), 1); diff --git a/query-engine/metrics/src/lib.rs b/query-engine/metrics/src/lib.rs index 7f34f84a8612..1965b56cb076 100644 --- a/query-engine/metrics/src/lib.rs +++ b/query-engine/metrics/src/lib.rs @@ -89,7 +89,7 @@ static METRIC_RENAMES: Lazy> (MOBC_POOL_CONNECTIONS_OPEN, ("prisma_pool_connections_open", "The number of pool connections currently open")), (MOBC_POOL_CONNECTIONS_BUSY, ("prisma_pool_connections_busy", "The number of pool connections currently executing datasource queries")), (MOBC_POOL_CONNECTIONS_IDLE, ("prisma_pool_connections_idle", "The number of pool connections that are not busy running a query")), - (MOBC_POOL_WAIT_COUNT, ("prisma_client_queries_wait", "The number of datasource queries currently waiting for an free connection")), + (MOBC_POOL_WAIT_COUNT, ("prisma_client_queries_wait", "The number of datasource queries currently waiting for a free connection")), (MOBC_POOL_WAIT_DURATION, ("prisma_client_queries_wait_histogram_ms", "The distribution of the time all datasource queries spent waiting for a free connection")), ]) }); From 4362521bfeb4ec2631819c6c1421af0221640137 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Fri, 27 Oct 2023 11:23:06 +0200 Subject: [PATCH 31/67] chore(deps): update mysql docker tag to v5.7.44 (#2735) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index fc585adabafe..c0d4f179e0a4 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -182,7 +182,7 @@ services: tmpfs: /var/lib/mysql mysql-5-7: - image: mysql:5.7.32 + image: mysql:5.7.44 command: mysqld restart: unless-stopped platform: linux/x86_64 From 3305eccac7176c86ac9678ad7e6da63c5b0d20c9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Fri, 27 Oct 2023 11:38:05 +0200 Subject: [PATCH 32/67] ci: fix yml files for GitHub Actions workflow for DOCKERHUB login (#4390) --- .github/workflows/query-engine-black-box.yml | 5 ++++- .github/workflows/query-engine-driver-adapters.yml | 5 ++++- .github/workflows/query-engine.yml | 5 ++++- .github/workflows/schema-engine.yml | 5 ++++- 4 files changed, 16 insertions(+), 4 deletions(-) diff --git a/.github/workflows/query-engine-black-box.yml b/.github/workflows/query-engine-black-box.yml index a941588dfd8e..5ebcd79cec4c 100644 --- a/.github/workflows/query-engine-black-box.yml +++ b/.github/workflows/query-engine-black-box.yml @@ -50,7 +50,10 @@ jobs: - name: Login to Docker Hub uses: docker/login-action@v3 continue-on-error: true - if: "${{ secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }}" + env: + DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} + if: "${{ env.DOCKERHUB_USERNAME != '' && env.DOCKERHUB_TOKEN != '' }}" with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/query-engine-driver-adapters.yml b/.github/workflows/query-engine-driver-adapters.yml index 5b34b9761c4c..7823bed70cfb 100644 --- a/.github/workflows/query-engine-driver-adapters.yml +++ b/.github/workflows/query-engine-driver-adapters.yml @@ -68,7 +68,10 @@ jobs: - name: 'Login to Docker Hub' uses: docker/login-action@v3 continue-on-error: true - if: "${{ secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }}" + env: + DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} + if: "${{ env.DOCKERHUB_USERNAME != '' && env.DOCKERHUB_TOKEN != '' }}" with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/query-engine.yml b/.github/workflows/query-engine.yml index 3df596e20d61..762c3da4a50a 100644 --- a/.github/workflows/query-engine.yml +++ b/.github/workflows/query-engine.yml @@ -80,7 +80,10 @@ jobs: - name: Login to Docker Hub uses: docker/login-action@v3 continue-on-error: true - if: "${{ secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }}" + env: + DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} + if: "${{ env.DOCKERHUB_USERNAME != '' && env.DOCKERHUB_TOKEN != '' }}" with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/schema-engine.yml b/.github/workflows/schema-engine.yml index c6249f069091..03d23317bbd0 100644 --- a/.github/workflows/schema-engine.yml +++ b/.github/workflows/schema-engine.yml @@ -113,7 +113,10 @@ jobs: - name: Login to Docker Hub uses: docker/login-action@v3 continue-on-error: true - if: "${{ secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }}" + env: + DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} + if: "${{ env.DOCKERHUB_USERNAME != '' && env.DOCKERHUB_TOKEN != '' }}" with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} From 6dda9d7a540d41932067b8c8308e086f55f7dded Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Fri, 27 Oct 2023 13:51:18 +0200 Subject: [PATCH 33/67] Migrate driver adapters to prisma/prisma (#4380) * Promote connector-test-kit to the driver-adapters directory and remove js * Remove node_modules from connector-test-kit-executor * Remove dist from connector-test-kit-executor * Ignore non-relevant files * Sort out dependencies * Makefile to setup driver adapters from a checkout of prisma * Only clone prisma/prisma shallowly * Delete driver-adapter-smoke-tests.yml * DRIVER_ADAPTERS_BRANCH=driver-adapters-migration see if this works * DRIVER_ADAPTERS_BRANCH=driver-adapters-migration change deprecated set-output command * DRIVER_ADAPTERS_BRANCH=driver-adapters-migration tmp remove * DRIVER_ADAPTERS_BRANCH=driver-adapters-migration clearer detection of branch * DRIVER_ADAPTERS_BRANCH=driver-adapters-migration Build executor separately * DRIVER_ADAPTERS_BRANCH=driver-adapters-migration Add make tasks to test driver adapters * Document and ease running driver adapter tests * Revert "DRIVER_ADAPTERS_BRANCH=driver-adapters-migration tmp remove" This reverts commit 463775a8b467ff7aa0d3e21063f1e617014f4b81. * Move documentation to where it belongs * Document how to do integration testing in shorter loops in CI. * chore(driver-adapters): remove outdated symlink to tsconfig file * fix(driver-adapters): use ws, making connector-test-kit-executor compatible with Node.js 16+ * fix(driver-adapters): remove warning "import.meta" is not available with the "cjs" output format * chore(driver-adapters): remove references to query-engine-driver-adapters.yml * Revert "chore(driver-adapters): remove references to query-engine-driver-adapters.yml" This reverts commit eeaaa8f92a24ae8ff3951b786002ca76ae6de837. * Remove publish-driver-adapters workflow * Fix using main branch * Take back conditional on docker login after bad main merge --------- Co-authored-by: jkomyno --- .../workflows/driver-adapter-smoke-tests.yml | 131 ---- .github/workflows/publish-driver-adapters.yml | 83 -- .../query-engine-driver-adapters.yml | 11 + .gitignore | 3 + Makefile | 55 +- README.md | 26 + query-engine/connector-test-kit-rs/README.md | 33 +- .../query-tests-setup/src/config.rs | 2 +- query-engine/driver-adapters/.gitignore | 3 + .../connector-test-kit-executor/.gitignore | 3 + .../connector-test-kit-executor/package.json | 40 + .../pnpm-lock.yaml | 494 +++++------- .../script/start_node.sh | 0 .../src/engines/JsonProtocol.ts | 0 .../src/engines/Library.ts | 0 .../src/engines/QueryEngine.ts | 0 .../src/engines/Transaction.ts | 0 .../connector-test-kit-executor/src/index.ts | 3 +- .../src/jsonRpc.ts | 0 .../connector-test-kit-executor/src/qe.ts | 2 +- .../tsconfig.json | 2 +- query-engine/driver-adapters/js/.gitignore | 44 -- query-engine/driver-adapters/js/.npmrc | 2 - .../driver-adapters/js/.prettierrc.yml | 5 - query-engine/driver-adapters/js/README.md | 42 - .../js/adapter-libsql/.gitignore | 1 - .../js/adapter-libsql/README.md | 95 --- .../js/adapter-libsql/package.json | 31 - .../js/adapter-libsql/src/conversion.ts | 161 ---- .../js/adapter-libsql/src/index.ts | 1 - .../js/adapter-libsql/src/libsql.ts | 171 ----- .../js/adapter-libsql/tests/types.test.mts | 151 ---- .../js/adapter-libsql/tsconfig.build.json | 6 - .../js/adapter-libsql/tsconfig.json | 3 - .../driver-adapters/js/adapter-neon/README.md | 71 -- .../js/adapter-neon/package.json | 30 - .../js/adapter-neon/src/conversion.ts | 286 ------- .../js/adapter-neon/src/index.ts | 1 - .../js/adapter-neon/src/neon.ts | 165 ---- .../js/adapter-neon/tsconfig.build.json | 6 - .../js/adapter-neon/tsconfig.json | 3 - .../driver-adapters/js/adapter-pg/README.md | 3 - .../js/adapter-pg/package.json | 31 - .../js/adapter-pg/src/conversion.ts | 286 ------- .../js/adapter-pg/src/index.ts | 1 - .../driver-adapters/js/adapter-pg/src/pg.ts | 138 ---- .../js/adapter-planetscale/README.md | 71 -- .../js/adapter-planetscale/package.json | 29 - .../js/adapter-planetscale/src/conversion.ts | 98 --- .../js/adapter-planetscale/src/deferred.ts | 13 - .../js/adapter-planetscale/src/index.ts | 1 - .../js/adapter-planetscale/src/planetscale.ts | 181 ----- .../adapter-planetscale/tsconfig.build.json | 6 - .../js/adapter-planetscale/tsconfig.json | 3 - .../connector-test-kit-executor/package.json | 28 - .../js/driver-adapter-utils/README.md | 3 - .../js/driver-adapter-utils/package.json | 26 - .../js/driver-adapter-utils/src/binder.ts | 80 -- .../js/driver-adapter-utils/src/const.ts | 48 -- .../js/driver-adapter-utils/src/debug.ts | 3 - .../js/driver-adapter-utils/src/index.ts | 5 - .../js/driver-adapter-utils/src/result.ts | 41 - .../js/driver-adapter-utils/src/types.ts | 132 ---- .../driver-adapter-utils/tsconfig.build.json | 6 - .../js/driver-adapter-utils/tsconfig.json | 3 - query-engine/driver-adapters/js/package.json | 23 - .../driver-adapters/js/pnpm-workspace.yaml | 8 - .../js/smoke-test-js/.envrc.example | 26 - .../js/smoke-test-js/.gitignore | 4 - .../js/smoke-test-js/README.md | 79 -- .../js/smoke-test-js/package.json | 67 -- .../mysql/commands/type_test/insert.sql | 51 -- .../smoke-test-js/prisma/mysql/schema.prisma | 125 --- .../postgres/commands/type_test/insert.sql | 35 - .../prisma/postgres/schema.prisma | 117 --- .../sqlite/commands/type_test/insert.sql | 17 - .../20230915202554_init/migration.sql | 85 --- .../sqlite/migrations/migration_lock.toml | 3 - .../smoke-test-js/prisma/sqlite/schema.prisma | 79 -- .../driver-adapters/js/smoke-test-js/setup.sh | 7 - .../js/smoke-test-js/src/client/client.ts | 164 ---- .../smoke-test-js/src/client/libsql.test.ts | 20 - .../src/client/neon.http.test.ts | 13 - .../smoke-test-js/src/client/neon.ws.test.ts | 16 - .../js/smoke-test-js/src/client/pg.test.ts | 13 - .../src/client/planetscale.test.ts | 13 - .../src/engines/types/JsonProtocol.ts | 78 -- .../src/engines/types/Library.ts | 46 -- .../src/engines/types/QueryEngine.ts | 97 --- .../src/engines/types/Transaction.ts | 35 - .../smoke-test-js/src/libquery/errors.test.ts | 105 --- .../js/smoke-test-js/src/libquery/libquery.ts | 722 ------------------ .../smoke-test-js/src/libquery/libsql.test.ts | 22 - .../src/libquery/neon.http.test.ts | 16 - .../src/libquery/neon.ws.test.ts | 18 - .../js/smoke-test-js/src/libquery/pg.test.ts | 15 - .../src/libquery/planetscale.test.ts | 15 - .../js/smoke-test-js/src/libquery/util.ts | 71 -- .../js/smoke-test-js/tsconfig.json | 3 - query-engine/driver-adapters/js/version.sh | 15 - query-engine/driver-adapters/src/result.rs | 2 - 101 files changed, 348 insertions(+), 5279 deletions(-) delete mode 100644 .github/workflows/driver-adapter-smoke-tests.yml delete mode 100644 .github/workflows/publish-driver-adapters.yml create mode 100644 query-engine/driver-adapters/.gitignore create mode 100644 query-engine/driver-adapters/connector-test-kit-executor/.gitignore create mode 100644 query-engine/driver-adapters/connector-test-kit-executor/package.json rename query-engine/driver-adapters/{js => connector-test-kit-executor}/pnpm-lock.yaml (79%) rename query-engine/driver-adapters/{js => }/connector-test-kit-executor/script/start_node.sh (100%) rename query-engine/driver-adapters/{js => }/connector-test-kit-executor/src/engines/JsonProtocol.ts (100%) rename query-engine/driver-adapters/{js => }/connector-test-kit-executor/src/engines/Library.ts (100%) rename query-engine/driver-adapters/{js => }/connector-test-kit-executor/src/engines/QueryEngine.ts (100%) rename query-engine/driver-adapters/{js => }/connector-test-kit-executor/src/engines/Transaction.ts (100%) rename query-engine/driver-adapters/{js => }/connector-test-kit-executor/src/index.ts (99%) rename query-engine/driver-adapters/{js => }/connector-test-kit-executor/src/jsonRpc.ts (100%) rename query-engine/driver-adapters/{js => }/connector-test-kit-executor/src/qe.ts (92%) rename query-engine/driver-adapters/{js => connector-test-kit-executor}/tsconfig.json (99%) delete mode 100644 query-engine/driver-adapters/js/.gitignore delete mode 100644 query-engine/driver-adapters/js/.npmrc delete mode 100644 query-engine/driver-adapters/js/.prettierrc.yml delete mode 100644 query-engine/driver-adapters/js/README.md delete mode 100644 query-engine/driver-adapters/js/adapter-libsql/.gitignore delete mode 100644 query-engine/driver-adapters/js/adapter-libsql/README.md delete mode 100644 query-engine/driver-adapters/js/adapter-libsql/package.json delete mode 100644 query-engine/driver-adapters/js/adapter-libsql/src/conversion.ts delete mode 100644 query-engine/driver-adapters/js/adapter-libsql/src/index.ts delete mode 100644 query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts delete mode 100644 query-engine/driver-adapters/js/adapter-libsql/tests/types.test.mts delete mode 100644 query-engine/driver-adapters/js/adapter-libsql/tsconfig.build.json delete mode 100644 query-engine/driver-adapters/js/adapter-libsql/tsconfig.json delete mode 100644 query-engine/driver-adapters/js/adapter-neon/README.md delete mode 100644 query-engine/driver-adapters/js/adapter-neon/package.json delete mode 100644 query-engine/driver-adapters/js/adapter-neon/src/conversion.ts delete mode 100644 query-engine/driver-adapters/js/adapter-neon/src/index.ts delete mode 100644 query-engine/driver-adapters/js/adapter-neon/src/neon.ts delete mode 100644 query-engine/driver-adapters/js/adapter-neon/tsconfig.build.json delete mode 100644 query-engine/driver-adapters/js/adapter-neon/tsconfig.json delete mode 100644 query-engine/driver-adapters/js/adapter-pg/README.md delete mode 100644 query-engine/driver-adapters/js/adapter-pg/package.json delete mode 100644 query-engine/driver-adapters/js/adapter-pg/src/conversion.ts delete mode 100644 query-engine/driver-adapters/js/adapter-pg/src/index.ts delete mode 100644 query-engine/driver-adapters/js/adapter-pg/src/pg.ts delete mode 100644 query-engine/driver-adapters/js/adapter-planetscale/README.md delete mode 100644 query-engine/driver-adapters/js/adapter-planetscale/package.json delete mode 100644 query-engine/driver-adapters/js/adapter-planetscale/src/conversion.ts delete mode 100644 query-engine/driver-adapters/js/adapter-planetscale/src/deferred.ts delete mode 100644 query-engine/driver-adapters/js/adapter-planetscale/src/index.ts delete mode 100644 query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts delete mode 100644 query-engine/driver-adapters/js/adapter-planetscale/tsconfig.build.json delete mode 100644 query-engine/driver-adapters/js/adapter-planetscale/tsconfig.json delete mode 100644 query-engine/driver-adapters/js/connector-test-kit-executor/package.json delete mode 100644 query-engine/driver-adapters/js/driver-adapter-utils/README.md delete mode 100644 query-engine/driver-adapters/js/driver-adapter-utils/package.json delete mode 100644 query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts delete mode 100644 query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts delete mode 100644 query-engine/driver-adapters/js/driver-adapter-utils/src/debug.ts delete mode 100644 query-engine/driver-adapters/js/driver-adapter-utils/src/index.ts delete mode 100644 query-engine/driver-adapters/js/driver-adapter-utils/src/result.ts delete mode 100644 query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts delete mode 100644 query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.build.json delete mode 100644 query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.json delete mode 100644 query-engine/driver-adapters/js/package.json delete mode 100644 query-engine/driver-adapters/js/pnpm-workspace.yaml delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/.envrc.example delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/.gitignore delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/README.md delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/package.json delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/commands/type_test/insert.sql delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/commands/type_test/insert.sql delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/commands/type_test/insert.sql delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/20230915202554_init/migration.sql delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/migration_lock.toml delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/schema.prisma delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/setup.sh delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/client/libsql.test.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/client/neon.ws.test.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/client/pg.test.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/client/planetscale.test.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/engines/types/JsonProtocol.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Library.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/engines/types/QueryEngine.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Transaction.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/libquery/errors.test.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/libquery/libsql.test.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.test.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.test.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/tsconfig.json delete mode 100755 query-engine/driver-adapters/js/version.sh diff --git a/.github/workflows/driver-adapter-smoke-tests.yml b/.github/workflows/driver-adapter-smoke-tests.yml deleted file mode 100644 index 802e3188dedc..000000000000 --- a/.github/workflows/driver-adapter-smoke-tests.yml +++ /dev/null @@ -1,131 +0,0 @@ -name: Driver Adapters, Smoke Tests -on: - push: - branches: - - main - pull_request: - paths-ignore: - - '.github/**' - - '!.github/workflows/driver-adapter-smoke-tests.yml' - - '.buildkite/**' - - '*.md' - - 'LICENSE' - - 'CODEOWNERS' - - 'renovate.json' - -jobs: - driver-adapter-smoke-tests: - name: ${{ matrix.adapter }} - - strategy: - fail-fast: false - matrix: - adapter: ['neon:ws', 'neon:http', planetscale, pg, libsql] - - runs-on: ubuntu-latest - - services: - postgres: - image: postgres - env: - POSTGRES_PASSWORD: postgres - options: >- - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - ports: - - 5432:5432 - - # via package.json rewritten into DATABASE_URL before scripts are run - env: - JS_NEON_DATABASE_URL: ${{ secrets.JS_NEON_DATABASE_URL }} - JS_PLANETSCALE_DATABASE_URL: ${{ secrets.JS_PLANETSCALE_DATABASE_URL }} - JS_PG_DATABASE_URL: postgres://postgres:postgres@localhost:5432/test # ${{ secrets.JS_PG_DATABASE_URL }} - # TODO: test sqld and embedded replicas - JS_LIBSQL_DATABASE_URL: file:/tmp/libsql.db - # TODO: test all three of ("number", "bigint", "string") and conditionally skip some tests as appropriate - JS_LIBSQL_INT_MODE: bigint - - steps: - - uses: actions/checkout@v4 - - - uses: dtolnay/rust-toolchain@stable - - - uses: pnpm/action-setup@v2 - with: - version: 8 - - uses: actions/setup-node@v3 - with: - node-version: 18 - #cache: 'pnpm' - - - name: Compile Query Engine - run: cargo build -p query-engine-node-api - - - name: Install Dependencies (Driver Adapters) - run: pnpm install - working-directory: ./query-engine/driver-adapters/js - - name: Build Driver Adapters - run: pnpm build - working-directory: ./query-engine/driver-adapters/js - - - run: pnpm prisma:${{ matrix.adapter }} - working-directory: ./query-engine/driver-adapters/js/smoke-test-js - - run: pnpm ${{ matrix.adapter }}:libquery - working-directory: ./query-engine/driver-adapters/js/smoke-test-js - - name: pnpm ${{ matrix.adapter }}:client (using @prisma/client - including engine! - from Npm) - run: pnpm ${{ matrix.adapter }}:client - if: always() - working-directory: ./query-engine/driver-adapters/js/smoke-test-js - - - driver-adapter-smoke-tests-errors: - name: Errors - - runs-on: ubuntu-latest - - # services: - # postgres: - # image: postgres - # env: - # POSTGRES_PASSWORD: postgres - # options: >- - # --health-cmd pg_isready - # --health-interval 10s - # --health-timeout 5s - # --health-retries 5 - # ports: - # - 5432:5432 - - env: - # via package.json rewritten into DATABASE_URL before scripts are run - JS_PG_DATABASE_URL: postgres://postgres:postgres@localhost:5432/test - - steps: - - uses: actions/checkout@v4 - - - uses: dtolnay/rust-toolchain@stable - - - uses: pnpm/action-setup@v2 - with: - version: 8 - - uses: actions/setup-node@v3 - with: - node-version: 18 - #cache: 'pnpm' - - - name: Compile Query Engine - run: cargo build -p query-engine-node-api - - - name: Install Dependencies (Driver Adapters) - run: pnpm install - working-directory: ./query-engine/driver-adapters/js - - name: Build Driver Adapters - run: pnpm build - working-directory: ./query-engine/driver-adapters/js - - - name: pnpm errors - run: pnpm errors - if: always() - working-directory: ./query-engine/driver-adapters/js/smoke-test-js diff --git a/.github/workflows/publish-driver-adapters.yml b/.github/workflows/publish-driver-adapters.yml deleted file mode 100644 index 7da972c35e1b..000000000000 --- a/.github/workflows/publish-driver-adapters.yml +++ /dev/null @@ -1,83 +0,0 @@ -name: Build and publish Prisma Driver Adapters -run-name: npm - release Driver Adapters ${{ github.event.inputs.prismaVersion }} from ${{ github.event.inputs.enginesHash }} on ${{ github.event.inputs.npmDistTag }} - -concurrency: publish-prisma-driver-adapters - -on: - # usually triggered via GH Actions Workflow in prisma/prisma repo - workflow_dispatch: - inputs: - enginesHash: - description: Engine commit hash to checkout for publishing - required: true - prismaVersion: - description: Prisma version to use for publishing - required: true - npmDistTag: - description: npm dist-tag to use for publishing - required: true - default: 'latest' - dryRun: - description: 'Check to do a dry run (does not publish packages)' - type: boolean - -jobs: - build: - name: Build and publish Prisma Driver Adapters - runs-on: ubuntu-latest - steps: - - name: Print input - env: - THE_INPUT: '${{ toJson(github.event.inputs) }}' - run: | - echo $THE_INPUT - - - uses: actions/checkout@v4 - with: - ref: ${{ github.event.inputs.enginesHash }} - - - uses: pnpm/action-setup@v2.4.0 - with: - version: 8 - - - uses: actions/setup-node@v3 - with: - node-version: '20.x' - registry-url: 'https://registry.npmjs.org/' - - - name: Install dependencies - run: pnpm i - working-directory: query-engine/driver-adapters/js - - - name: Build - run: pnpm -r build - working-directory: query-engine/driver-adapters/js - - - name: Update version in package.json - run: | - # find all files package.json, and for each use jq to write the version, then write to temp file and overwrite original file with result - find . -name "package.json" -exec bash -c 'jq --arg version "${{ github.event.inputs.prismaVersion }}" ".version = \$version" "{}" > tmpfile && mv tmpfile "{}"' \; - working-directory: query-engine/driver-adapters/js - - - name: Publish Prisma Driver Adapters packages - run: | - pnpm -r publish --no-git-checks --tag ${{ github.event.inputs.npmDistTag }} ${{ env.DRY_RUN }} - working-directory: query-engine/driver-adapters/js - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} - DRY_RUN: ${{ github.event.inputs.dryRun == 'true' && '--dry-run' || '' }} - - # - # Failure handlers - # - - - name: Set current job url in SLACK_FOOTER env var - if: ${{ failure() }} - run: echo "SLACK_FOOTER=<$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID|Click here to go to the job logs>" >> $GITHUB_ENV - - name: Slack Notification on Failure - if: ${{ failure() }} - uses: rtCamp/action-slack-notify@v2.2.1 - env: - SLACK_TITLE: 'prisma driver adapters publishing failed :x:' - SLACK_COLOR: '#FF0000' - SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_DRIVER_ADPATERS_FAILING }} diff --git a/.github/workflows/query-engine-driver-adapters.yml b/.github/workflows/query-engine-driver-adapters.yml index 7823bed70cfb..f3a3badfb804 100644 --- a/.github/workflows/query-engine-driver-adapters.yml +++ b/.github/workflows/query-engine-driver-adapters.yml @@ -49,6 +49,8 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} - name: 'Setup Node.js' uses: actions/setup-node@v3 @@ -76,6 +78,15 @@ jobs: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Extract Branch Name + id: extract-branch + run: | + branch="$(git show -s --format=%s | grep -o "DRIVER_ADAPTERS_BRANCH=[^ ]*" | cut -f2 -d=)" + if [ -n "$branch" ]; then + echo "Using $branch branch of driver adapters" + echo "DRIVER_ADAPTERS_BRANCH=$branch" >> "$GITHUB_ENV" + fi + - run: make ${{ matrix.adapter.setup_task }} - uses: dtolnay/rust-toolchain@stable diff --git a/.gitignore b/.gitignore index 43e03e31867d..be185b0f7afc 100644 --- a/.gitignore +++ b/.gitignore @@ -46,3 +46,6 @@ dmmf.json graph.dot prisma-schema-wasm/nodejs + +# This symlink looks orphan here, but it comes from prisma/prisma where driver adapters reference a file in their parent directory +tsconfig.build.adapter.json diff --git a/Makefile b/Makefile index 0c3e1541e632..541738c35d95 100644 --- a/Makefile +++ b/Makefile @@ -2,6 +2,7 @@ CONFIG_PATH = ./query-engine/connector-test-kit-rs/test-configs CONFIG_FILE = .test_config SCHEMA_EXAMPLES_PATH = ./query-engine/example_schemas DEV_SCHEMA_FILE = dev_datamodel.prisma +DRIVER_ADAPTERS_BRANCH ?= main LIBRARY_EXT := $(shell \ case "$$(uname -s)" in \ @@ -44,7 +45,13 @@ release: ################# test-qe: +ifndef DRIVER_ADAPTER cargo test --package query-engine-tests +else + @echo "Executing query engine tests with $(DRIVER_ADAPTER) driver adapter"; \ + # Add your actual command for the "test-driver-adapter" task here + $(MAKE) test-driver-adapter-$(DRIVER_ADAPTER); +endif test-qe-verbose: cargo test --package query-engine-tests -- --nocapture @@ -80,6 +87,10 @@ dev-sqlite: dev-libsql-sqlite: build-qe-napi build-connector-kit-js cp $(CONFIG_PATH)/libsql-sqlite $(CONFIG_FILE) +test-libsql-sqlite: dev-libsql-sqlite test-qe-st + +test-driver-adapter-libsql: test-libsql-sqlite + start-postgres9: docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres9 @@ -115,12 +126,20 @@ start-pg-postgres13: build-qe-napi build-connector-kit-js start-postgres13 dev-pg-postgres13: start-pg-postgres13 cp $(CONFIG_PATH)/pg-postgres13 $(CONFIG_FILE) +test-pg-postgres13: dev-pg-postgres13 test-qe-st + +test-driver-adapter-pg: test-pg-postgres13 + start-neon-postgres13: build-qe-napi build-connector-kit-js docker compose -f docker-compose.yml up --wait -d --remove-orphans neon-postgres13 dev-neon-ws-postgres13: start-neon-postgres13 cp $(CONFIG_PATH)/neon-ws-postgres13 $(CONFIG_FILE) +test-neon-ws-postgres13: dev-neon-ws-postgres13 test-qe-st + +test-driver-adapter-neon: test-neon-ws-postgres13 + start-postgres14: docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres14 @@ -255,6 +274,10 @@ start-planetscale-vitess8: build-qe-napi build-connector-kit-js dev-planetscale-vitess8: start-planetscale-vitess8 cp $(CONFIG_PATH)/planetscale-vitess8 $(CONFIG_FILE) +test-planetscale-vitess8: dev-planetscale-vitess8 test-qe-st + +test-driver-adapter-planetscale: test-planetscale-vitess8 + ###################### # Local dev commands # ###################### @@ -262,8 +285,36 @@ dev-planetscale-vitess8: start-planetscale-vitess8 build-qe-napi: cargo build --package query-engine-node-api -build-connector-kit-js: - cd query-engine/driver-adapters/js && pnpm i && pnpm build +build-connector-kit-js: build-driver-adapters symlink-driver-adapters + cd query-engine/driver-adapters/connector-test-kit-executor && pnpm i && pnpm build + +build-driver-adapters: ensure-prisma-present + @echo "Building driver adapters..." + @cd ../prisma && pnpm --filter "*adapter*" i && pnpm --filter "*adapter*" build + @echo "Driver adapters build completed."; + +symlink-driver-adapters: ensure-prisma-present + @echo "Creating symbolic links for driver adapters..." + @for dir in $(wildcard $(realpath ../prisma)/packages/*adapter*); do \ + if [ -d "$$dir" ]; then \ + dir_name=$$(basename "$$dir"); \ + ln -sfn "$$dir" "$(realpath .)/query-engine/driver-adapters/$$dir_name"; \ + echo "Created symbolic link for $$dir_name"; \ + fi; \ + done; + echo "Symbolic links creation completed."; + +ensure-prisma-present: + @if [ -d ../prisma ]; then \ + cd "$(realpath ../prisma)" && git fetch origin main; \ + LOCAL_CHANGES=$$(git diff --name-only HEAD origin/main -- 'packages/*adapter*'); \ + if [ -n "$$LOCAL_CHANGES" ]; then \ + echo "⚠️ ../prisma diverges from prisma/prisma main branch. Test results might diverge from those in CI ⚠️ "; \ + fi \ + else \ + echo "git clone --depth=1 https://github.com/prisma/prisma.git --branch=$(DRIVER_ADAPTERS_BRANCH) ../prisma"; \ + git clone --depth=1 https://github.com/prisma/prisma.git --branch=$(DRIVER_ADAPTERS_BRANCH) "../prisma" && echo "Prisma repository has been cloned to ../prisma"; \ + fi; # Quick schema validation of whatever you have in the dev_datamodel.prisma file. validate: diff --git a/README.md b/README.md index 6fd072072757..49c7c1a8ab39 100644 --- a/README.md +++ b/README.md @@ -203,6 +203,7 @@ integration tests. - Alternatively: Load the defined environment in `./.envrc` manually in your shell. **Setup:** + There are helper `make` commands to set up a test environment for a specific database connector you want to test. The commands set up a container (if needed) and write the `.test_config` file, which is picked up by the integration @@ -234,6 +235,31 @@ Other variables may or may not be useful. Run `cargo test` in the repository root. +### Testing driver adapters + +Please refer to the [Testing driver adapters](./query-engine/connector-test-kit-rs/README.md#testing-driver-adapters) section in the connector-test-kit-rs README. + +**ℹ️ Important note on developing features that require changes to the both the query engine, and driver adapters code** + +As explained in [Testing driver adapters](./query-engine/connector-test-kit-rs/README.md#testing-driver-adapters), running `DRIVER_ADAPTER=$adapter make qe-test` +will ensure you have prisma checked out in your filesystem in the same directory as prisma-engines. This is needed because the driver adapters code is symlinked in prisma-engines. + +When working on a feature or bugfix spanning adapters code and query-engine code, you will need to open sibling PRs in `prisma/prisma` and `prisma/prisma-engines` respectively. +Locally, each time you run `DRIVER_ADAPTER=$adapter make qe-test` tests will run using the driver adapters built from the source code in the working copy of prisma/prisma. All good. + +In CI, tho', we need to denote which branch of prisma/prisma we want to use for tests. In CI, there's no working copy of prisma/prisma before tests run. +The CI jobs clones prisma/prisma `main` branch by default, which doesn't include your local changes. To test in integration, we can tell CI to use the branch of prisma/prisma containing +the changes in adapters. To do it, you can use a simple convention in commit messages. Like this: + +``` +git commit -m "DRIVER_ADAPTERS_BRANCH=prisma-branch-with-changes-in-adapters [...]" +``` + +GitHub actions will then pick up the branch name and use it to clone that branch's code of prisma/prisma, and build the driver adapters code from there. + +When it's time to merge the sibling PRs, you'll need to merge the prisma/prisma PR first, so when merging the engines PR you have the code of the adapters ready in prisma/prisma `main` branch. + + ## Parallel rust-analyzer builds When rust-analzyer runs `cargo check` it will lock the build directory and stop any cargo commands from running until it has completed. This makes the build process feel a lot longer. It is possible to avoid this by setting a different build path for diff --git a/query-engine/connector-test-kit-rs/README.md b/query-engine/connector-test-kit-rs/README.md index 2c849a2aa985..97d19467879a 100644 --- a/query-engine/connector-test-kit-rs/README.md +++ b/query-engine/connector-test-kit-rs/README.md @@ -64,34 +64,45 @@ On the note of docker containers: Most connectors require an endpoint to run aga If you choose to set up the databases yourself, please note that the connection strings used in the tests (found in the files in `/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/`) to set up user, password and database for the test user. +### Running + +Note that by default tests run concurrently. + +- VSCode should automatically detect tests and display `run test`. +- Use `make test-qe` (minimal log output) or `make test-qe-verbose` (all log output) in `$WORKSPACE_ROOT`. +- `cargo test` in the `query-engine-tests` crate. +- A single test can be tested with the normal cargo rust facilities from command line, e.g. `cargo test --package query-engine-tests --test query_engine_tests --all-features -- queries::filters::where_unique::where_unique::no_unique_fields --exact --nocapture` where `queries::filters::where_unique::where_unique::no_unique_fields` can be substituted for the path you want to test. +- If you want to test a single relation test, define the `RELATION_TEST_IDX` env var with its index. + #### Running tests through driver adapters -The query engine is able to delegate query execution to javascript through [driver adapters](query-engine/driver-adapters/js/README.md). -This means that instead of drivers being implemented in Rust, it's a layer of adapters over NodeJs drivers the code that actually communicates with the databases. +The query engine is able to delegate query execution to javascript through driver adapters. +This means that instead of drivers being implemented in Rust, it's a layer of adapters over NodeJs +drivers the code that actually communicates with the databases. See [`adapter-*` packages in prisma/prisma](https://github.com/prisma/prisma/tree/main/packages) To run tests through a driver adapters, you should also configure the following environment variables: -* `EXTERNAL_TEST_EXECUTOR`: tells the query engine test kit to use an external process to run the queries, this is a node process running a program that will read the queries to run from STDIN, and return responses to STDOUT. The connector kit follows a protocol over JSON RPC for this communication. +* `EXTERNAL_TEST_EXECUTOR`: tells the query engine test kit to use an external process to run the queries, this is a node process running a program that will read the queries to run from STDIN, and return responses to STDOUT. The connector kit follows a protocol over JSON RPC for this communication. * `DRIVER_ADAPTER`: tells the test executor to use a particular driver adapter. Set to `neon`, `planetscale` or any other supported adapter. * `DRIVER_ADAPTER_CONFIG`: a json string with the configuration for the driver adapter. This is adapter specific. See the [github workflow for driver adapter tests](.github/workflows/query-engine-driver-adapters.yml) for examples on how to configure the driver adapters. Example: ```shell -export EXTERNAL_TEST_EXECUTOR="$WORKSPACE_ROOT/query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh" +export EXTERNAL_TEST_EXECUTOR="$WORKSPACE_ROOT/query-engine/driver-adapters/connector-test-kit-executor/script/start_node.sh" export DRIVER_ADAPTER=neon export DRIVER_ADAPTER_CONFIG ='{ "proxyUrl": "127.0.0.1:5488/v1" }' ```` -### Running +We have provided helpers to run the query-engine tests with driver adapters, these helpers set all the required environment +variables for you: -Note that by default tests run concurrently. +```shell +DRIVER_ADAPTER=$adapter make test-qe +``` + +Where `$adapter` is one of the supported adapters: `neon`, `planetscale`, `libsql`. -- VSCode should automatically detect tests and display `run test`. -- Use `make test-qe` (minimal log output) or `make test-qe-verbose` (all log output) in `$WORKSPACE_ROOT`. -- `cargo test` in the `query-engine-tests` crate. -- A single test can be tested with the normal cargo rust facilities from command line, e.g. `cargo test --package query-engine-tests --test query_engine_tests --all-features -- queries::filters::where_unique::where_unique::no_unique_fields --exact --nocapture` where `queries::filters::where_unique::where_unique::no_unique_fields` can be substituted for the path you want to test. -- If you want to test a single relation test, define the `RELATION_TEST_IDX` env var with its index. ## Authoring tests The following is an example on how to write a new test suite, as extending or changing an existing one follows the same rules and considerations. diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs index b27f27406e5c..4af4e763298a 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs @@ -159,7 +159,7 @@ impl TestConfig { /// and the workspace_root is set, then use the default external test executor. fn fill_defaults(&mut self) { const DEFAULT_TEST_EXECUTOR: &str = - "query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh"; + "query-engine/driver-adapters/connector-test-kit-executor/script/start_node.sh"; if self .external_test_executor diff --git a/query-engine/driver-adapters/.gitignore b/query-engine/driver-adapters/.gitignore new file mode 100644 index 000000000000..dab5c8905550 --- /dev/null +++ b/query-engine/driver-adapters/.gitignore @@ -0,0 +1,3 @@ +node_modules +adapter-* +driver-adapter-utils diff --git a/query-engine/driver-adapters/connector-test-kit-executor/.gitignore b/query-engine/driver-adapters/connector-test-kit-executor/.gitignore new file mode 100644 index 000000000000..37b61ff565c7 --- /dev/null +++ b/query-engine/driver-adapters/connector-test-kit-executor/.gitignore @@ -0,0 +1,3 @@ +node_modules +pnpm-debug.log +dist/ diff --git a/query-engine/driver-adapters/connector-test-kit-executor/package.json b/query-engine/driver-adapters/connector-test-kit-executor/package.json new file mode 100644 index 000000000000..b63694bb4459 --- /dev/null +++ b/query-engine/driver-adapters/connector-test-kit-executor/package.json @@ -0,0 +1,40 @@ +{ + "engines": { + "node": ">=16.13", + "pnpm": ">=8.6.6 <9" + }, + "name": "connector-test-kit-executor", + "version": "0.0.1", + "description": "", + "main": "dist/index.mjs", + "module": "dist/index.mjs", + "private": true, + "scripts": { + "build": "tsup ./src/index.ts --format esm --dts", + "lint": "tsc -p ./tsconfig.build.json" + }, + "keywords": [], + "author": "", + "sideEffects": false, + "license": "Apache-2.0", + "dependencies": { + "@libsql/client": "0.3.5", + "@neondatabase/serverless": "^0.6.0", + "@planetscale/database": "1.11.0", + "@prisma/adapter-libsql": "../adapter-libsql", + "@prisma/adapter-neon": "../adapter-neon", + "@prisma/adapter-pg": "../adapter-pg", + "@prisma/adapter-planetscale": "../adapter-planetscale", + "@prisma/driver-adapter-utils": "../driver-adapter-utils", + "@types/pg": "^8.10.2", + "pg": "^8.11.3", + "undici": "^5.26.5", + "ws": "^8.14.2" + }, + "devDependencies": { + "@types/node": "^20.5.1", + "tsup": "^7.2.0", + "tsx": "^3.12.7", + "typescript": "^5.1.6" + } +} \ No newline at end of file diff --git a/query-engine/driver-adapters/js/pnpm-lock.yaml b/query-engine/driver-adapters/connector-test-kit-executor/pnpm-lock.yaml similarity index 79% rename from query-engine/driver-adapters/js/pnpm-lock.yaml rename to query-engine/driver-adapters/connector-test-kit-executor/pnpm-lock.yaml index 9a82ffdbac63..d140be7b516c 100644 --- a/query-engine/driver-adapters/js/pnpm-lock.yaml +++ b/query-engine/driver-adapters/connector-test-kit-executor/pnpm-lock.yaml @@ -4,197 +4,79 @@ settings: autoInstallPeers: true excludeLinksFromLockfile: false -importers: - - .: - devDependencies: - '@types/node': - specifier: ^20.5.1 - version: 20.5.1 - tsup: - specifier: ^7.2.0 - version: 7.2.0(typescript@5.1.6) - tsx: - specifier: ^3.12.7 - version: 3.12.7 - typescript: - specifier: ^5.1.6 - version: 5.1.6 - - adapter-libsql: - dependencies: - '@prisma/driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils - async-mutex: - specifier: 0.4.0 - version: 0.4.0 - devDependencies: - '@libsql/client': - specifier: 0.3.5 - version: 0.3.5 - - adapter-neon: - dependencies: - '@prisma/driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils - postgres-array: - specifier: ^3.0.2 - version: 3.0.2 - devDependencies: - '@neondatabase/serverless': - specifier: ^0.6.0 - version: 0.6.0 - - adapter-pg: - dependencies: - '@prisma/driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils - postgres-array: - specifier: ^3.0.2 - version: 3.0.2 - devDependencies: - '@types/pg': - specifier: ^8.10.2 - version: 8.10.2 - pg: - specifier: ^8.11.3 - version: 8.11.3 - - adapter-planetscale: - dependencies: - '@prisma/driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils - devDependencies: - '@planetscale/database': - specifier: ^1.11.0 - version: 1.11.0 - - connector-test-kit-executor: - dependencies: - '@libsql/client': - specifier: 0.3.5 - version: 0.3.5 - '@neondatabase/serverless': - specifier: ^0.6.0 - version: 0.6.0 - '@planetscale/database': - specifier: 1.11.0 - version: 1.11.0 - '@prisma/adapter-libsql': - specifier: workspace:* - version: link:../adapter-libsql - '@prisma/adapter-neon': - specifier: workspace:* - version: link:../adapter-neon - '@prisma/adapter-pg': - specifier: workspace:* - version: link:../adapter-pg - '@prisma/adapter-planetscale': - specifier: workspace:* - version: link:../adapter-planetscale - '@prisma/driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils - '@types/pg': - specifier: ^8.10.2 - version: 8.10.2 - pg: - specifier: ^8.11.3 - version: 8.11.3 - undici: - specifier: ^5.26.2 - version: 5.26.2 - - driver-adapter-utils: - dependencies: - debug: - specifier: ^4.3.4 - version: 4.3.4 - devDependencies: - '@types/debug': - specifier: ^4.1.8 - version: 4.1.8 - - smoke-test-js: - dependencies: - '@libsql/client': - specifier: 0.3.5 - version: 0.3.5 - '@neondatabase/serverless': - specifier: ^0.6.0 - version: 0.6.0 - '@planetscale/database': - specifier: ^1.11.0 - version: 1.11.0 - '@prisma/adapter-libsql': - specifier: workspace:* - version: link:../adapter-libsql - '@prisma/adapter-neon': - specifier: workspace:* - version: link:../adapter-neon - '@prisma/adapter-pg': - specifier: workspace:* - version: link:../adapter-pg - '@prisma/adapter-planetscale': - specifier: workspace:* - version: link:../adapter-planetscale - '@prisma/client': - specifier: 5.4.2 - version: 5.4.2(prisma@5.4.2) - '@prisma/driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils - pg: - specifier: ^8.11.3 - version: 8.11.3 - superjson: - specifier: ^1.13.1 - version: 1.13.1 - undici: - specifier: ^5.26.2 - version: 5.26.2 - devDependencies: - '@types/node': - specifier: ^20.5.1 - version: 20.5.1 - '@types/pg': - specifier: ^8.10.2 - version: 8.10.2 - cross-env: - specifier: ^7.0.3 - version: 7.0.3 - prisma: - specifier: 5.4.2 - version: 5.4.2 - tsx: - specifier: ^3.12.7 - version: 3.12.7 +dependencies: + '@libsql/client': + specifier: 0.3.5 + version: 0.3.5 + '@neondatabase/serverless': + specifier: ^0.6.0 + version: 0.6.0 + '@planetscale/database': + specifier: 1.11.0 + version: 1.11.0 + '@prisma/adapter-libsql': + specifier: ../adapter-libsql + version: link:../adapter-libsql + '@prisma/adapter-neon': + specifier: ../adapter-neon + version: link:../adapter-neon + '@prisma/adapter-pg': + specifier: ../adapter-pg + version: link:../adapter-pg + '@prisma/adapter-planetscale': + specifier: ../adapter-planetscale + version: link:../adapter-planetscale + '@prisma/driver-adapter-utils': + specifier: ../driver-adapter-utils + version: link:../driver-adapter-utils + '@types/pg': + specifier: ^8.10.2 + version: 8.10.2 + pg: + specifier: ^8.11.3 + version: 8.11.3 + undici: + specifier: ^5.26.5 + version: 5.26.5 + ws: + specifier: ^8.14.2 + version: 8.14.2 + +devDependencies: + '@types/node': + specifier: ^20.5.1 + version: 20.5.1 + tsup: + specifier: ^7.2.0 + version: 7.2.0(typescript@5.1.6) + tsx: + specifier: ^3.12.7 + version: 3.12.7 + typescript: + specifier: ^5.1.6 + version: 5.1.6 packages: - /@esbuild-kit/cjs-loader@2.4.2: - resolution: {integrity: sha512-BDXFbYOJzT/NBEtp71cvsrGPwGAMGRB/349rwKuoxNSiKjPraNNnlK6MIIabViCjqZugu6j+xeMDlEkWdHHJSg==} + /@esbuild-kit/cjs-loader@2.4.4: + resolution: {integrity: sha512-NfsJX4PdzhwSkfJukczyUiZGc7zNNWZcEAyqeISpDnn0PTfzMJR1aR8xAIPskBejIxBJbIgCCMzbaYa9SXepIg==} dependencies: - '@esbuild-kit/core-utils': 3.2.2 - get-tsconfig: 4.7.0 + '@esbuild-kit/core-utils': 3.3.2 + get-tsconfig: 4.7.2 dev: true - /@esbuild-kit/core-utils@3.2.2: - resolution: {integrity: sha512-Ub6LaRaAgF80dTSzUdXpFLM1pVDdmEVB9qb5iAzSpyDlX/mfJTFGOnZ516O05p5uWWteNviMKi4PAyEuRxI5gA==} + /@esbuild-kit/core-utils@3.3.2: + resolution: {integrity: sha512-sPRAnw9CdSsRmEtnsl2WXWdyquogVpB3yZ3dgwJfe8zrOzTsV7cJvmwrKVa+0ma5BoiGJ+BoqkMvawbayKUsqQ==} dependencies: esbuild: 0.18.20 source-map-support: 0.5.21 dev: true - /@esbuild-kit/esm-loader@2.5.5: - resolution: {integrity: sha512-Qwfvj/qoPbClxCRNuac1Du01r9gvNOT+pMYtJDapfB1eoGN1YlJ1BixLyL9WVENRx5RXgNLdfYdx/CuswlGhMw==} + /@esbuild-kit/esm-loader@2.6.5: + resolution: {integrity: sha512-FxEMIkJKnodyA1OaCUoEvbYRkoZlLZ4d/eXFu9Fh8CbBBgP5EmZxrfTRyN0qpXZ4vOvqnE5YdRdcrmUUXuU+dA==} dependencies: - '@esbuild-kit/core-utils': 3.2.2 - get-tsconfig: 4.7.0 + '@esbuild-kit/core-utils': 3.3.2 + get-tsconfig: 4.7.2 dev: true /@esbuild/android-arm64@0.18.20: @@ -406,7 +288,7 @@ packages: dependencies: '@jridgewell/set-array': 1.1.2 '@jridgewell/sourcemap-codec': 1.4.15 - '@jridgewell/trace-mapping': 0.3.19 + '@jridgewell/trace-mapping': 0.3.20 dev: true /@jridgewell/resolve-uri@3.1.1: @@ -423,8 +305,8 @@ packages: resolution: {integrity: sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==} dev: true - /@jridgewell/trace-mapping@0.3.19: - resolution: {integrity: sha512-kf37QtfW+Hwx/buWGMPcR60iF9ziHa6r/CZJIHbmcm4+0qrXiVdxegAH0F6yddEVQ7zdkjcGCgCzUu+BcbhQxw==} + /@jridgewell/trace-mapping@0.3.20: + resolution: {integrity: sha512-R8LcPeWZol2zR8mmH3JeKQ6QRCFb7XgUhV9ZlGhHLGyg4wpPiPZNQOOWhFZhxKw8u//yTbNGI42Bx/3paXEQ+Q==} dependencies: '@jridgewell/resolve-uri': 3.1.1 '@jridgewell/sourcemap-codec': 1.4.15 @@ -435,24 +317,27 @@ packages: dependencies: '@libsql/hrana-client': 0.5.5 js-base64: 3.7.5 - libsql: 0.1.28 + libsql: 0.1.34 transitivePeerDependencies: - bufferutil - encoding - utf-8-validate + dev: false - /@libsql/darwin-arm64@0.1.28: - resolution: {integrity: sha512-p4nldHUOhcl9ibnH1F6oiXV5Dl3PAcPB9VIjdjVvO3/URo5J7mhqRMuwJMKO5DZJJGtkKJ5IO0gu0hc90rnKIg==} + /@libsql/darwin-arm64@0.1.34: + resolution: {integrity: sha512-Wv8jvkj/fUAO8DF3A4HaddCMldUUpKcg/WW1sY95FNsSHOxktyxqU80jAp/tCuZ85GQIJozvgSr51/ARIC0gsw==} cpu: [arm64] os: [darwin] requiresBuild: true + dev: false optional: true - /@libsql/darwin-x64@0.1.28: - resolution: {integrity: sha512-WaEK+Z+wP5sr0h8EcusSGHv4Mqc3smYICeG4P/wsbRDKQ2WUMWqZrpgqaBsm+WPbXogU2vpf+qGc8BnpFZ0ggw==} + /@libsql/darwin-x64@0.1.34: + resolution: {integrity: sha512-2NQXD9nUzC08hg7FdcZLq5uTEwGz1KbD7YvUzQb/psO1lO/E/p83wl1es1082+Pp0z5pSPDWQeRTuccD41L+3w==} cpu: [x64] os: [darwin] requiresBuild: true + dev: false optional: true /@libsql/hrana-client@0.5.5: @@ -466,59 +351,76 @@ packages: - bufferutil - encoding - utf-8-validate + dev: false /@libsql/isomorphic-fetch@0.1.10: resolution: {integrity: sha512-dH0lMk50gKSvEKD78xWMu60SY1sjp1sY//iFLO0XMmBwfVfG136P9KOk06R4maBdlb8KMXOzJ1D28FR5ZKnHTA==} dependencies: - '@types/node-fetch': 2.6.6 + '@types/node-fetch': 2.6.7 node-fetch: 2.7.0 transitivePeerDependencies: - encoding + dev: false /@libsql/isomorphic-ws@0.1.5: resolution: {integrity: sha512-DtLWIH29onUYR00i0GlQ3UdcTRC6EP4u9w/h9LxpUZJWRMARk6dQwZ6Jkd+QdwVpuAOrdxt18v0K2uIYR3fwFg==} dependencies: - '@types/ws': 8.5.5 + '@types/ws': 8.5.8 ws: 8.14.2 transitivePeerDependencies: - bufferutil - utf-8-validate + dev: false - /@libsql/linux-arm64-gnu@0.1.28: - resolution: {integrity: sha512-a17ANBuOqH2L8gdyET4Kg3XggQvxWnoA+7x7sDEX5NyWNyvr7P04WzNPAT0xAOWLclC1fDD6jM5sh/fbJk/7NA==} + /@libsql/linux-arm64-gnu@0.1.34: + resolution: {integrity: sha512-r3dY1FDYZ7eX5HX7HyAoYSqK5FPugj5NSB5Bt/nz+ygBWdXASgSKxkE/RqjJIM59vXwv300iJX9qhR5fXv8sTw==} cpu: [arm64] os: [linux] requiresBuild: true + dev: false + optional: true + + /@libsql/linux-arm64-musl@0.1.34: + resolution: {integrity: sha512-9AE/eNb9eQRcNsLxqtpLJxVEoIMmItrdwqJDImPJtOp10rhp4U0x/9RGKerl9Mg3ObVj676pyhAR2KzyudrOfQ==} + cpu: [arm64] + os: [linux] + requiresBuild: true + dev: false optional: true - /@libsql/linux-x64-gnu@0.1.28: - resolution: {integrity: sha512-dkg+Ou7ApV0PHpZWd9c6NrYyc/WSNn5h/ScKotaMTLWlLL96XAMNwrYLpZpUj61I2y7QzU98XtMfiSD1Ux+VaA==} + /@libsql/linux-x64-gnu@0.1.34: + resolution: {integrity: sha512-o8toY1Txstjt13fBhZbFe8sNAW6OaS6qVcp1Bd6bHkCLSBLZ6pjJmwzQN8rFv9QFBPAnaKP3lI4vaOXXw7huTA==} cpu: [x64] os: [linux] requiresBuild: true + dev: false optional: true - /@libsql/linux-x64-musl@0.1.28: - resolution: {integrity: sha512-ZuOxCDYlG+f1IDsxstmaxLtgG9HvlLuUKs0X3um4f5F5V+P+PF8qr08gSdD1IP2pj+JBOiwhQffaEpR1wupxhQ==} + /@libsql/linux-x64-musl@0.1.34: + resolution: {integrity: sha512-EldEmcAxxNPSCjJ73oFxg81PDDIpDbPqK/QOrhmmGYLvYwrnQtVRUIbARf80JQvcy6bCxOO/Q9dh6wGhnyHyYA==} cpu: [x64] os: [linux] requiresBuild: true + dev: false optional: true - /@libsql/win32-x64-msvc@0.1.28: - resolution: {integrity: sha512-2cmUiMIsJLHpetebGeeYqUYaCPWEnwMjqxwu1ZEEbA5x8r+DNmIhLrc0QSQ29p7a5u14vbZnShNOtT/XG7vKew==} + /@libsql/win32-x64-msvc@0.1.34: + resolution: {integrity: sha512-jnv0qfVMnrVv00r+wUOe6DHrHuao9y1w1lN543cV2J1JdQNJT/eSZzhyZFSlS3T2ZUvXfZfZ5GeL8U18IAID6w==} cpu: [x64] os: [win32] requiresBuild: true + dev: false optional: true /@neon-rs/load@0.0.4: resolution: {integrity: sha512-kTPhdZyTQxB+2wpiRcFWrDcejc4JI6tkPuS7UZCG4l6Zvc5kU/gGQ/ozvHTh1XR5tS+UlfAfGuPajjzQjCiHCw==} + dev: false /@neondatabase/serverless@0.6.0: resolution: {integrity: sha512-qXxBRYN0m2v8kVQBfMxbzNGn2xFAhTXFibzQlE++NfJ56Shz3m7+MyBBtXDlEH+3Wfa6lToDXf1MElocY4sJ3w==} dependencies: '@types/pg': 8.6.6 + dev: false /@nodelib/fs.scandir@2.1.5: resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} @@ -544,73 +446,39 @@ packages: /@planetscale/database@1.11.0: resolution: {integrity: sha512-aWbU+D/IRHoDE9975y+Q4c+EwwAWxCPwFId+N1AhQVFXzbeJMkj6KN2iQtoi03elcLMRdfT+V3i9Z4WRw+/oIA==} engines: {node: '>=16'} - - /@prisma/client@5.4.2(prisma@5.4.2): - resolution: {integrity: sha512-2xsPaz4EaMKj1WS9iW6MlPhmbqtBsXAOeVttSePp8vTFTtvzh2hZbDgswwBdSCgPzmmwF+tLB259QzggvCmJqA==} - engines: {node: '>=16.13'} - requiresBuild: true - peerDependencies: - prisma: '*' - peerDependenciesMeta: - prisma: - optional: true - dependencies: - '@prisma/engines-version': 5.4.1-2.ac9d7041ed77bcc8a8dbd2ab6616b39013829574 - prisma: 5.4.2 dev: false - /@prisma/engines-version@5.4.1-2.ac9d7041ed77bcc8a8dbd2ab6616b39013829574: - resolution: {integrity: sha512-wvupDL4AA1vf4TQNANg7kR7y98ITqPsk6aacfBxZKtrJKRIsWjURHkZCGcQliHdqCiW/hGreO6d6ZuSv9MhdAA==} - dev: false - - /@prisma/engines@5.4.2: - resolution: {integrity: sha512-fqeucJ3LH0e1eyFdT0zRx+oETLancu5+n4lhiYECyEz6H2RDskPJHJYHkVc0LhkU4Uv7fuEnppKU3nVKNzMh8g==} - requiresBuild: true - - /@types/debug@4.1.8: - resolution: {integrity: sha512-/vPO1EPOs306Cvhwv7KfVfYvOJqA/S/AXjaHQiJboCZzcNDb+TIJFN9/2C9DZ//ijSKWioNyUxD792QmDJ+HKQ==} + /@types/node-fetch@2.6.7: + resolution: {integrity: sha512-lX17GZVpJ/fuCjguZ5b3TjEbSENxmEk1B2z02yoXSK9WMEWRivhdSY73wWMn6bpcCDAOh6qAdktpKHIlkDk2lg==} dependencies: - '@types/ms': 0.7.31 - dev: true - - /@types/ms@0.7.31: - resolution: {integrity: sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA==} - dev: true - - /@types/node-fetch@2.6.6: - resolution: {integrity: sha512-95X8guJYhfqiuVVhRFxVQcf4hW/2bCuoPwDasMf/531STFoNoWTT7YDnWdXHEZKqAGUigmpG31r2FE70LwnzJw==} - dependencies: - '@types/node': 20.6.5 + '@types/node': 20.5.1 form-data: 4.0.0 + dev: false /@types/node@20.5.1: resolution: {integrity: sha512-4tT2UrL5LBqDwoed9wZ6N3umC4Yhz3W3FloMmiiG4JwmUJWpie0c7lcnUNd4gtMKuDEO4wRVS8B6Xa0uMRsMKg==} - dev: true - - /@types/node@20.5.9: - resolution: {integrity: sha512-PcGNd//40kHAS3sTlzKB9C9XL4K0sTup8nbG5lC14kzEteTNuAFh9u5nA0o5TWnSG2r/JNPRXFVcHJIIeRlmqQ==} - - /@types/node@20.6.5: - resolution: {integrity: sha512-2qGq5LAOTh9izcc0+F+dToFigBWiK1phKPt7rNhOqJSr35y8rlIBjDwGtFSgAI6MGIhjwOVNSQZVdJsZJ2uR1w==} /@types/pg@8.10.2: resolution: {integrity: sha512-MKFs9P6nJ+LAeHLU3V0cODEOgyThJ3OAnmOlsZsxux6sfQs3HRXR5bBn7xG5DjckEFhTAxsXi7k7cd0pCMxpJw==} dependencies: - '@types/node': 20.5.9 + '@types/node': 20.5.1 pg-protocol: 1.6.0 pg-types: 4.0.1 + dev: false /@types/pg@8.6.6: resolution: {integrity: sha512-O2xNmXebtwVekJDD+02udOncjVcMZQuTEQEMpKJ0ZRf5E7/9JJX3izhKUcUifBkyKpljyUM6BTgy2trmviKlpw==} dependencies: - '@types/node': 20.5.9 + '@types/node': 20.5.1 pg-protocol: 1.6.0 pg-types: 2.2.0 + dev: false - /@types/ws@8.5.5: - resolution: {integrity: sha512-lwhs8hktwxSjf9UaZ9tG5M03PGogvFaH8gUgLNbN9HKIg0dvv6q+gkSuJ8HN4/VbyxkuLzCjlN7GquQ0gUJfIg==} + /@types/ws@8.5.8: + resolution: {integrity: sha512-flUksGIQCnJd6sZ1l5dqCEG/ksaoAg/eUwiLAGTJQcfgvZJKF++Ta4bJA6A5aPSJmsr+xlseHn4KLgVlNnvPTg==} dependencies: - '@types/node': 20.6.5 + '@types/node': 20.5.1 + dev: false /any-promise@1.3.0: resolution: {integrity: sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==} @@ -629,14 +497,9 @@ packages: engines: {node: '>=8'} dev: true - /async-mutex@0.4.0: - resolution: {integrity: sha512-eJFZ1YhRR8UN8eBLoNzcDPcy/jqjsg6I1AP+KvWQX80BqOSW1oJPJXDylPUEeMr2ZQvHgnQ//Lp6f3RQ1zI7HA==} - dependencies: - tslib: 2.6.2 - dev: false - /asynckit@0.4.0: resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} + dev: false /balanced-match@1.0.2: resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} @@ -668,9 +531,10 @@ packages: /buffer-writer@2.0.0: resolution: {integrity: sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==} engines: {node: '>=4'} + dev: false - /bundle-require@4.0.1(esbuild@0.18.20): - resolution: {integrity: sha512-9NQkRHlNdNpDBGmLpngF3EFDcwodhMUuLz9PaWYciVcQF9SE4LFjM2DB/xV1Li5JiuDMv7ZUWuC3rGbqR0MAXQ==} + /bundle-require@4.0.2(esbuild@0.18.20): + resolution: {integrity: sha512-jwzPOChofl67PSTW2SGubV9HBQAhhR2i6nskiOThauo9dzwDUgOWQScFVaJkjEfYX+UXiD+LEx8EblQMc2wIag==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} peerDependencies: esbuild: '>=0.17' @@ -704,6 +568,7 @@ packages: engines: {node: '>= 0.8'} dependencies: delayed-stream: 1.0.0 + dev: false /commander@4.1.1: resolution: {integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==} @@ -714,21 +579,6 @@ packages: resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} dev: true - /copy-anything@3.0.5: - resolution: {integrity: sha512-yCEafptTtb4bk7GLEQoM8KVJpxAfdBJYaXyzQEgQQQgYrZiDp8SJmGKlYza6CYjEDNstAdNdKA3UuoULlEbS6w==} - engines: {node: '>=12.13'} - dependencies: - is-what: 4.1.15 - dev: false - - /cross-env@7.0.3: - resolution: {integrity: sha512-+/HKd6EgcQCJGh2PSjZuUitQBQynKor4wrFbRg4DtAgS1aWO+gU52xpH7M9ScGgXSYmAVS9bIJ8EzuaGw0oNAw==} - engines: {node: '>=10.14', npm: '>=6', yarn: '>=1'} - hasBin: true - dependencies: - cross-spawn: 7.0.3 - dev: true - /cross-spawn@7.0.3: resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==} engines: {node: '>= 8'} @@ -741,6 +591,7 @@ packages: /data-uri-to-buffer@4.0.1: resolution: {integrity: sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==} engines: {node: '>= 12'} + dev: false /debug@4.3.4: resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==} @@ -752,14 +603,17 @@ packages: optional: true dependencies: ms: 2.1.2 + dev: true /delayed-stream@1.0.0: resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} engines: {node: '>=0.4.0'} + dev: false /detect-libc@2.0.2: resolution: {integrity: sha512-UX6sGumvvqSaXgdKGUsgZWqcUyIXZ/vZTrlRT/iobiKhGL0zL4d3osHj3uqllWJK+i+sixDS/3COVEOFbupFyw==} engines: {node: '>=8'} + dev: false /dir-glob@3.0.1: resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} @@ -836,6 +690,7 @@ packages: dependencies: node-domexception: 1.0.0 web-streams-polyfill: 3.2.1 + dev: false /fill-range@7.0.1: resolution: {integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==} @@ -851,12 +706,14 @@ packages: asynckit: 0.4.0 combined-stream: 1.0.8 mime-types: 2.1.35 + dev: false /formdata-polyfill@4.0.10: resolution: {integrity: sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==} engines: {node: '>=12.20.0'} dependencies: fetch-blob: 3.2.0 + dev: false /fs.realpath@1.0.0: resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} @@ -875,8 +732,8 @@ packages: engines: {node: '>=10'} dev: true - /get-tsconfig@4.7.0: - resolution: {integrity: sha512-pmjiZ7xtB8URYm74PlGJozDNyhvsVLUcpBa8DZBG3bWHwaHa9bPiRpiSfovw+fjhwONSCWKRyk+JQHEGZmMrzw==} + /get-tsconfig@4.7.2: + resolution: {integrity: sha512-wuMsz4leaj5hbGgg4IvDU0bqJagpftG5l5cXIAvo8uZrqn0NJqwtfupTN00VnkQJPcIRrxYrm1Ue24btpCha2A==} dependencies: resolve-pkg-maps: 1.0.0 dev: true @@ -961,11 +818,6 @@ packages: engines: {node: '>=8'} dev: true - /is-what@4.1.15: - resolution: {integrity: sha512-uKua1wfy3Yt+YqsD6mTUEa2zSi3G1oPlqTflgaPJ7z63vUGN5pxFpnQfeSLMFnJDEsdvOtkp1rUWkYjB4YfhgA==} - engines: {node: '>=12.13'} - dev: false - /isexe@2.0.0: resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} dev: true @@ -977,21 +829,24 @@ packages: /js-base64@3.7.5: resolution: {integrity: sha512-3MEt5DTINKqfScXKfJFrRbxkrnk2AxPWGBL/ycjz4dK8iqiSJ06UxD8jh8xuh6p10TX4t2+7FsBYVxxQbMg+qA==} + dev: false - /libsql@0.1.28: - resolution: {integrity: sha512-yCKlT0ntV8ZIWTPGNClhQQeH/LNAzLjbbEgBvgLb+jfQwAuTbyvPpVVLwkZzesqja1nbkWApztW0pX81Jp0pkw==} + /libsql@0.1.34: + resolution: {integrity: sha512-LGofp7z7gi1Td6vu2GxaA4WyvSPEkuFn0f/ePSti1TsAlBU0LWxdk+bj9D8nqswzxiqe5wpAyTLhVzTIYSyXEA==} cpu: [x64, arm64] os: [darwin, linux, win32] dependencies: '@neon-rs/load': 0.0.4 detect-libc: 2.0.2 optionalDependencies: - '@libsql/darwin-arm64': 0.1.28 - '@libsql/darwin-x64': 0.1.28 - '@libsql/linux-arm64-gnu': 0.1.28 - '@libsql/linux-x64-gnu': 0.1.28 - '@libsql/linux-x64-musl': 0.1.28 - '@libsql/win32-x64-msvc': 0.1.28 + '@libsql/darwin-arm64': 0.1.34 + '@libsql/darwin-x64': 0.1.34 + '@libsql/linux-arm64-gnu': 0.1.34 + '@libsql/linux-arm64-musl': 0.1.34 + '@libsql/linux-x64-gnu': 0.1.34 + '@libsql/linux-x64-musl': 0.1.34 + '@libsql/win32-x64-msvc': 0.1.34 + dev: false /lilconfig@2.1.0: resolution: {integrity: sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==} @@ -1031,12 +886,14 @@ packages: /mime-db@1.52.0: resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} engines: {node: '>= 0.6'} + dev: false /mime-types@2.1.35: resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} engines: {node: '>= 0.6'} dependencies: mime-db: 1.52.0 + dev: false /mimic-fn@2.1.0: resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} @@ -1051,6 +908,7 @@ packages: /ms@2.1.2: resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} + dev: true /mz@2.7.0: resolution: {integrity: sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==} @@ -1063,6 +921,7 @@ packages: /node-domexception@1.0.0: resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==} engines: {node: '>=10.5.0'} + dev: false /node-fetch@2.7.0: resolution: {integrity: sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==} @@ -1074,6 +933,7 @@ packages: optional: true dependencies: whatwg-url: 5.0.0 + dev: false /node-fetch@3.3.2: resolution: {integrity: sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==} @@ -1082,6 +942,7 @@ packages: data-uri-to-buffer: 4.0.1 fetch-blob: 3.2.0 formdata-polyfill: 4.0.10 + dev: false /normalize-path@3.0.0: resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} @@ -1102,6 +963,7 @@ packages: /obuf@1.1.2: resolution: {integrity: sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==} + dev: false /once@1.4.0: resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} @@ -1118,6 +980,7 @@ packages: /packet-reader@1.0.0: resolution: {integrity: sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==} + dev: false /path-is-absolute@1.0.1: resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==} @@ -1137,18 +1000,22 @@ packages: /pg-cloudflare@1.1.1: resolution: {integrity: sha512-xWPagP/4B6BgFO+EKz3JONXv3YDgvkbVrGw2mTo3D6tVDQRh1e7cqVGvyR3BE+eQgAvx1XhW/iEASj4/jCWl3Q==} requiresBuild: true + dev: false optional: true /pg-connection-string@2.6.2: resolution: {integrity: sha512-ch6OwaeaPYcova4kKZ15sbJ2hKb/VP48ZD2gE7i1J+L4MspCtBMAx8nMgz7bksc7IojCIIWuEhHibSMFH8m8oA==} + dev: false /pg-int8@1.0.1: resolution: {integrity: sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==} engines: {node: '>=4.0.0'} + dev: false /pg-numeric@1.0.2: resolution: {integrity: sha512-BM/Thnrw5jm2kKLE5uJkXqqExRUY/toLHda65XgFTBTFYZyopbKjBe29Ii3RbkvlsMoFwD+tHeGaCjjv0gHlyw==} engines: {node: '>=4'} + dev: false /pg-pool@3.6.1(pg@8.11.3): resolution: {integrity: sha512-jizsIzhkIitxCGfPRzJn1ZdcosIt3pz9Sh3V01fm1vZnbnCMgmGl5wvGGdNN2EL9Rmb0EcFoCkixH4Pu+sP9Og==} @@ -1156,9 +1023,11 @@ packages: pg: '>=8.0' dependencies: pg: 8.11.3 + dev: false /pg-protocol@1.6.0: resolution: {integrity: sha512-M+PDm637OY5WM307051+bsDia5Xej6d9IR4GwJse1qA1DIhiKlksvrneZOYQq42OM+spubpcNYEo2FcKQrDk+Q==} + dev: false /pg-types@2.2.0: resolution: {integrity: sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==} @@ -1169,6 +1038,7 @@ packages: postgres-bytea: 1.0.0 postgres-date: 1.0.7 postgres-interval: 1.2.0 + dev: false /pg-types@4.0.1: resolution: {integrity: sha512-hRCSDuLII9/LE3smys1hRHcu5QGcLs9ggT7I/TCs0IE+2Eesxi9+9RWAAwZ0yaGjxoWICF/YHLOEjydGujoJ+g==} @@ -1181,6 +1051,7 @@ packages: postgres-date: 2.0.1 postgres-interval: 3.0.0 postgres-range: 1.1.3 + dev: false /pg@8.11.3: resolution: {integrity: sha512-+9iuvG8QfaaUrrph+kpF24cXkH1YOOUeArRNYIxq1viYHZagBxrTno7cecY1Fa44tJeZvaoG+Djpkc3JwehN5g==} @@ -1200,11 +1071,13 @@ packages: pgpass: 1.0.5 optionalDependencies: pg-cloudflare: 1.1.1 + dev: false /pgpass@1.0.5: resolution: {integrity: sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==} dependencies: split2: 4.2.0 + dev: false /picomatch@2.3.1: resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} @@ -1229,55 +1102,56 @@ packages: optional: true dependencies: lilconfig: 2.1.0 - yaml: 2.3.2 + yaml: 2.3.3 dev: true /postgres-array@2.0.0: resolution: {integrity: sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==} engines: {node: '>=4'} + dev: false /postgres-array@3.0.2: resolution: {integrity: sha512-6faShkdFugNQCLwucjPcY5ARoW1SlbnrZjmGl0IrrqewpvxvhSLHimCVzqeuULCbG0fQv7Dtk1yDbG3xv7Veog==} engines: {node: '>=12'} + dev: false /postgres-bytea@1.0.0: resolution: {integrity: sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==} engines: {node: '>=0.10.0'} + dev: false /postgres-bytea@3.0.0: resolution: {integrity: sha512-CNd4jim9RFPkObHSjVHlVrxoVQXz7quwNFpz7RY1okNNme49+sVyiTvTRobiLV548Hx/hb1BG+iE7h9493WzFw==} engines: {node: '>= 6'} dependencies: obuf: 1.1.2 + dev: false /postgres-date@1.0.7: resolution: {integrity: sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==} engines: {node: '>=0.10.0'} + dev: false /postgres-date@2.0.1: resolution: {integrity: sha512-YtMKdsDt5Ojv1wQRvUhnyDJNSr2dGIC96mQVKz7xufp07nfuFONzdaowrMHjlAzY6GDLd4f+LUHHAAM1h4MdUw==} engines: {node: '>=12'} + dev: false /postgres-interval@1.2.0: resolution: {integrity: sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==} engines: {node: '>=0.10.0'} dependencies: xtend: 4.0.2 + dev: false /postgres-interval@3.0.0: resolution: {integrity: sha512-BSNDnbyZCXSxgA+1f5UU2GmwhoI0aU5yMxRGO8CdFEcY2BQF9xm/7MqKnYoM1nJDk8nONNWDk9WeSmePFhQdlw==} engines: {node: '>=12'} + dev: false /postgres-range@1.1.3: resolution: {integrity: sha512-VdlZoocy5lCP0c/t66xAfclglEapXPCIVhqqJRncYpvbCgImF0w67aPKfbqUMr72tO2k5q0TdTZwCLjPTI6C9g==} - - /prisma@5.4.2: - resolution: {integrity: sha512-GDMZwZy7mysB2oXU+angQqJ90iaPFdD0rHaZNkn+dio5NRkGLmMqmXs31//tg/qXT3iB0cTQwnGGQNuirhSTZg==} - engines: {node: '>=16.13'} - hasBin: true - requiresBuild: true - dependencies: - '@prisma/engines': 5.4.2 + dev: false /punycode@2.3.0: resolution: {integrity: sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==} @@ -1309,8 +1183,8 @@ packages: engines: {iojs: '>=1.0.0', node: '>=0.10.0'} dev: true - /rollup@3.28.1: - resolution: {integrity: sha512-R9OMQmIHJm9znrU3m3cpE8uhN0fGdXiawME7aZIpQqvpS/85+Vt1Hq1/yVIcYfOmaQiHjvXkQAoJukvLpau6Yw==} + /rollup@3.29.4: + resolution: {integrity: sha512-oWzmBZwvYrU0iJHtDmhsm662rC15FRXmcjCk1xD771dFDx5jJ02ufAQQTn0etB2emNk4J9EZg/yWKpsn9BWGRw==} engines: {node: '>=14.18.0', npm: '>=8.0.0'} hasBin: true optionalDependencies: @@ -1366,6 +1240,7 @@ packages: /split2@4.2.0: resolution: {integrity: sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==} engines: {node: '>= 10.x'} + dev: false /strip-final-newline@2.0.0: resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==} @@ -1386,13 +1261,6 @@ packages: ts-interface-checker: 0.1.13 dev: true - /superjson@1.13.1: - resolution: {integrity: sha512-AVH2eknm9DEd3qvxM4Sq+LTCkSXE2ssfh1t11MHMXyYXFQyQ1HLgVvV+guLTsaQnJU3gnaVo34TohHPulY/wLg==} - engines: {node: '>=10'} - dependencies: - copy-anything: 3.0.5 - dev: false - /thenify-all@1.6.0: resolution: {integrity: sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==} engines: {node: '>=0.8'} @@ -1415,6 +1283,7 @@ packages: /tr46@0.0.3: resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==} + dev: false /tr46@1.0.1: resolution: {integrity: sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA==} @@ -1431,10 +1300,6 @@ packages: resolution: {integrity: sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==} dev: true - /tslib@2.6.2: - resolution: {integrity: sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==} - dev: false - /tsup@7.2.0(typescript@5.1.6): resolution: {integrity: sha512-vDHlczXbgUvY3rWvqFEbSqmC1L7woozbzngMqTtL2PGBODTtWlRwGDDawhvWzr5c1QjKe4OAKqJGfE1xeXUvtQ==} engines: {node: '>=16.14'} @@ -1451,7 +1316,7 @@ packages: typescript: optional: true dependencies: - bundle-require: 4.0.1(esbuild@0.18.20) + bundle-require: 4.0.2(esbuild@0.18.20) cac: 6.7.14 chokidar: 3.5.3 debug: 4.3.4 @@ -1461,7 +1326,7 @@ packages: joycon: 3.1.1 postcss-load-config: 4.0.1 resolve-from: 5.0.0 - rollup: 3.28.1 + rollup: 3.29.4 source-map: 0.8.0-beta.0 sucrase: 3.34.0 tree-kill: 1.2.2 @@ -1475,9 +1340,9 @@ packages: resolution: {integrity: sha512-C2Ip+jPmqKd1GWVQDvz/Eyc6QJbGfE7NrR3fx5BpEHMZsEHoIxHL1j+lKdGobr8ovEyqeNkPLSKp6SCSOt7gmw==} hasBin: true dependencies: - '@esbuild-kit/cjs-loader': 2.4.2 - '@esbuild-kit/core-utils': 3.2.2 - '@esbuild-kit/esm-loader': 2.5.5 + '@esbuild-kit/cjs-loader': 2.4.4 + '@esbuild-kit/core-utils': 3.3.2 + '@esbuild-kit/esm-loader': 2.6.5 optionalDependencies: fsevents: 2.3.3 dev: true @@ -1488,8 +1353,8 @@ packages: hasBin: true dev: true - /undici@5.26.2: - resolution: {integrity: sha512-a4PDLQgLTPHVzOK+x3F79/M4GtyYPl+aX9AAK7aQxpwxDwCqkeZCScy7Gk5kWT3JtdFq1uhO3uZJdLtHI4dK9A==} + /undici@5.26.5: + resolution: {integrity: sha512-cSb4bPFd5qgR7qr2jYAi0hlX9n5YKK2ONKkLFkxl+v/9BvC0sOpZjBHDBSXc5lWAf5ty9oZdRXytBIHzgUcerw==} engines: {node: '>=14.0'} dependencies: '@fastify/busboy': 2.0.0 @@ -1498,9 +1363,11 @@ packages: /web-streams-polyfill@3.2.1: resolution: {integrity: sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q==} engines: {node: '>= 8'} + dev: false /webidl-conversions@3.0.1: resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==} + dev: false /webidl-conversions@4.0.2: resolution: {integrity: sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==} @@ -1511,6 +1378,7 @@ packages: dependencies: tr46: 0.0.3 webidl-conversions: 3.0.1 + dev: false /whatwg-url@7.1.0: resolution: {integrity: sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg==} @@ -1543,12 +1411,14 @@ packages: optional: true utf-8-validate: optional: true + dev: false /xtend@4.0.2: resolution: {integrity: sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==} engines: {node: '>=0.4'} + dev: false - /yaml@2.3.2: - resolution: {integrity: sha512-N/lyzTPaJasoDmfV7YTrYCI0G/3ivm/9wdG0aHuheKowWQwGTsK0Eoiw6utmzAnI6pkJa0DUVygvp3spqqEKXg==} + /yaml@2.3.3: + resolution: {integrity: sha512-zw0VAJxgeZ6+++/su5AFoqBbZbrEakwu+X0M5HmcwUiBL7AzcuPKjj5we4xfQLp78LkEMpD0cOnUhmgOVy3KdQ==} engines: {node: '>= 14'} dev: true diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh b/query-engine/driver-adapters/connector-test-kit-executor/script/start_node.sh similarity index 100% rename from query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh rename to query-engine/driver-adapters/connector-test-kit-executor/script/start_node.sh diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/JsonProtocol.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/engines/JsonProtocol.ts similarity index 100% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/JsonProtocol.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/engines/JsonProtocol.ts diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Library.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/engines/Library.ts similarity index 100% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Library.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/engines/Library.ts diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/QueryEngine.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/engines/QueryEngine.ts similarity index 100% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/QueryEngine.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/engines/QueryEngine.ts diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Transaction.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/engines/Transaction.ts similarity index 100% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Transaction.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/engines/Transaction.ts diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/index.ts similarity index 99% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/index.ts index 8a05a6b2e9aa..b89348fb3e77 100644 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts +++ b/query-engine/driver-adapters/connector-test-kit-executor/src/index.ts @@ -9,7 +9,8 @@ import * as prismaPg from '@prisma/adapter-pg' // neon dependencies import { Pool as NeonPool, neonConfig } from '@neondatabase/serverless' -import { fetch, WebSocket } from 'undici' +import { fetch } from 'undici' +import { WebSocket } from 'ws' import * as prismaNeon from '@prisma/adapter-neon' // libsql dependencies diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/jsonRpc.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/jsonRpc.ts similarity index 100% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/jsonRpc.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/jsonRpc.ts diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/qe.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/qe.ts similarity index 92% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/qe.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/qe.ts index 764df8f6108d..186d7a9e80d2 100644 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/src/qe.ts +++ b/query-engine/driver-adapters/connector-test-kit-executor/src/qe.ts @@ -10,7 +10,7 @@ export function initQueryEngine(adapter: ErrorCapturingDriverAdapter, datamodel: const libExt = os.platform() === 'darwin' ? 'dylib' : 'so' const dirname = path.dirname(new URL(import.meta.url).pathname) - const libQueryEnginePath = path.join(dirname, `../../../../../target/debug/libquery_engine.${libExt}`) + const libQueryEnginePath = path.join(dirname, `../../../../target/debug/libquery_engine.${libExt}`) const libqueryEngine = { exports: {} as unknown as lib.Library } // @ts-ignore diff --git a/query-engine/driver-adapters/js/tsconfig.json b/query-engine/driver-adapters/connector-test-kit-executor/tsconfig.json similarity index 99% rename from query-engine/driver-adapters/js/tsconfig.json rename to query-engine/driver-adapters/connector-test-kit-executor/tsconfig.json index b405cea50201..516c114b3e15 100644 --- a/query-engine/driver-adapters/js/tsconfig.json +++ b/query-engine/driver-adapters/connector-test-kit-executor/tsconfig.json @@ -20,4 +20,4 @@ "resolveJsonModule": true }, "exclude": ["**/dist", "**/declaration", "**/node_modules", "**/src/__tests__"] -} +} \ No newline at end of file diff --git a/query-engine/driver-adapters/js/.gitignore b/query-engine/driver-adapters/js/.gitignore deleted file mode 100644 index e885963af278..000000000000 --- a/query-engine/driver-adapters/js/.gitignore +++ /dev/null @@ -1,44 +0,0 @@ -node_modules - -yarn-error.log -dist -build -tmp -pnpm-debug.log -sandbox -.DS_Store - -query-engine* -migration-engine* -schema-engine* -libquery_engine* -libquery-engine* -query_engine-windows.dll.node - -*tmp.db -dist/ -declaration/ - -*.tsbuildinfo -.prisma -.pnpm-store - -.vscode -!.vscode/launch.json.default -coverage - -.eslintcache - -.pnpm-debug.log - -.envrc - -esm -reproductions/* -!reproductions/basic-sqlite -!reproductions/tracing -!reproductions/pnpm-workspace.yaml - -dev.db -junit.xml -/output.txt diff --git a/query-engine/driver-adapters/js/.npmrc b/query-engine/driver-adapters/js/.npmrc deleted file mode 100644 index c87ec9b9e3d3..000000000000 --- a/query-engine/driver-adapters/js/.npmrc +++ /dev/null @@ -1,2 +0,0 @@ -git-checks=false -access=public diff --git a/query-engine/driver-adapters/js/.prettierrc.yml b/query-engine/driver-adapters/js/.prettierrc.yml deleted file mode 100644 index f0beb50a2167..000000000000 --- a/query-engine/driver-adapters/js/.prettierrc.yml +++ /dev/null @@ -1,5 +0,0 @@ -tabWidth: 2 -trailingComma: all -singleQuote: true -semi: false -printWidth: 120 diff --git a/query-engine/driver-adapters/js/README.md b/query-engine/driver-adapters/js/README.md deleted file mode 100644 index 926d6db2b0a8..000000000000 --- a/query-engine/driver-adapters/js/README.md +++ /dev/null @@ -1,42 +0,0 @@ -# Prisma Driver Adapters - - - - - - - -
- -This TypeScript monorepo contains the following packages: -- `@prisma/driver-adapter-utils` - - Internal set of utilities and types for Prisma's driver adapters. -- `@prisma/adapter-neon` - - Prisma's Driver Adapter that wraps the `@neondatabase/serverless` driver - - It uses `provider = "postgres"` - - It exposes debug logs via `DEBUG="prisma:driver-adapter:neon"` -- `@prisma/adapter-planetscale` - - Prisma's Driver Adapter that wraps the `@planetscale/database` driver - - It uses `provider = "mysql"` - - It exposes debug logs via `DEBUG="prisma:driver-adapter:planetscale"` -- `@prisma/adapter-pg` - - Prisma's Driver Adapter that wraps the `pg` driver - - It uses `provider = "postgres"` - - It exposes debug logs via `DEBUG="prisma:driver-adapter:pg"` - -## Get Started - -We assume Node.js `v18.16.1`+ is installed. If not, run `nvm use` in the current directory. -This is very important to double-check if you have multiple versions installed, as PlanetScale requires either Node.js `v18.16.1`+ or a custom `fetch` function. - -Install `pnpm` via: - -```sh -npm i -g pnpm -``` - -## Development - -- Install Node.js dependencies via `pnpm i` -- Build and link TypeScript packages via `pnpm build` -- Publish packages to `npm` via `pnpm publish -r` diff --git a/query-engine/driver-adapters/js/adapter-libsql/.gitignore b/query-engine/driver-adapters/js/adapter-libsql/.gitignore deleted file mode 100644 index c370cb644f95..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/.gitignore +++ /dev/null @@ -1 +0,0 @@ -test.db diff --git a/query-engine/driver-adapters/js/adapter-libsql/README.md b/query-engine/driver-adapters/js/adapter-libsql/README.md deleted file mode 100644 index 5ca415ea8ec9..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/README.md +++ /dev/null @@ -1,95 +0,0 @@ -# @prisma/adapter-libsql - -Prisma driver adapter for Turso and libSQL. - -See https://prisma.io/turso for details. - -The following usage tutorial is valid for Prisma 5.4.2 and later versions. - -## How to install - -After [getting started with Turso](https://www.prisma.io/blog/prisma-turso-ea-support-rXGd_Tmy3UXX#create-a-database-on-turso), you can use the Turso serverless driver to connect to your database. You will need to install the `@prisma/adapter-libsql` driver adapter and the `@libsql/client` serverless driver. - -```sh -npm install @prisma/adapter-libsql -npm install @libsql/client -``` - -Make sure your Turso database connection string and authentication token is copied over to your `.env` file. The connection string will start with `libsql://`. - -```env -# .env -TURSO_AUTH_TOKEN="eyJhbGciOiJFZERTQSIsInR5cCI6IkpXVCJ9..." -TURSO_DATABASE_URL="libsql://turso-prisma-random-user.turso.io" -``` - -You can now reference this environment variable in your `schema.prisma` datasource. Make sure you also include the `driverAdapters` Preview feature. - -```prisma -// schema.prisma -generator client { - provider = "prisma-client-js" - previewFeatures = ["driverAdapters"] -} - -datasource db { - provider = "sqlite" - url = "file:./dev.db" -} -``` - -Now run `npx prisma generate` to re-generate Prisma Client. - -## How to setup migrations - -As Turso needs to sync between a local sqlite database and another one hosted on Turso Cloud, an additional migration setup is needed. In particular, anytime you modify models and relations in your `schema.prisma` file, you should: - -1. Create a baseline migration - -```sh -npx prisma migrate diff --from-empty \ - --to-schema-datamodel prisma/schema.prisma \ - --script > baseline.sql -``` - -2. Apply the migration to your Turso database - -```sh -turso db shell turso-prisma < baseline.sql -``` - -## How to use - -In TypeScript, you will need to: - -1. Import packages -2. Set up the libSQL serverless database driver -3. Instantiate the Prisma libSQL adapter with the libSQL serverless database driver -4. Pass the driver adapter to the Prisma Client instance - -```typescript -// Import needed packages -import { PrismaClient } from '@prisma/client'; -import { PrismaLibSQL } from '@prisma/adapter-libsql'; -import { createClient } from '@libsql/client'; - -// Setup -const connectionString = `${process.env.TURSO_DATABASE_URL}`; -const authToken = `${process.env.TURSO_AUTH_TOKEN}`; - -// Init prisma client -const libsql = createClient({ - url: connectionString, - authToken, -}); -const adapter = new PrismaLibSQL(libsql); -const prisma = new PrismaClient({ adapter }); - -// Use Prisma Client as normal -``` - -Your Prisma Client instance now uses a **single** remote Turso database. -You can take it a step further by setting up database replicas. Turso automatically picks the closest replica to your app for read queries when you create replicas. No additional logic is required to define how the routing of the read queries should be handled. Write queries will be forwarded to the primary database. -We encourage you to create an issue if you find something missing or run into a bug. - -If you have any feedback about our libSQL Serverless Driver support, please leave a comment on our [dedicated GitHub issue](https://github.com/prisma/prisma/discussions/21345) and we'll use it as we continue development. diff --git a/query-engine/driver-adapters/js/adapter-libsql/package.json b/query-engine/driver-adapters/js/adapter-libsql/package.json deleted file mode 100644 index fbce33c98a29..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/package.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "name": "@prisma/adapter-libsql", - "version": "0.0.0", - "description": "Prisma's driver adapter for libSQL and Turso", - "main": "dist/index.js", - "module": "dist/index.mjs", - "types": "dist/index.d.ts", - "scripts": { - "build": "tsup ./src/index.ts --format cjs,esm --dts", - "lint": "tsc -p ./tsconfig.build.json", - "test": "node --loader tsx --test tests/*.test.mts" - }, - "files": [ - "dist", - "README.md" - ], - "keywords": [], - "author": "Alexey Orlenko ", - "license": "Apache-2.0", - "sideEffects": false, - "dependencies": { - "@prisma/driver-adapter-utils": "workspace:*", - "async-mutex": "0.4.0" - }, - "devDependencies": { - "@libsql/client": "0.3.5" - }, - "peerDependencies": { - "@libsql/client": "^0.3.5" - } -} diff --git a/query-engine/driver-adapters/js/adapter-libsql/src/conversion.ts b/query-engine/driver-adapters/js/adapter-libsql/src/conversion.ts deleted file mode 100644 index b2fa4b5b4095..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/src/conversion.ts +++ /dev/null @@ -1,161 +0,0 @@ -import { ColumnTypeEnum, ColumnType, Debug } from '@prisma/driver-adapter-utils' -import { Row, Value } from '@libsql/client' -import { isArrayBuffer } from 'node:util/types' - -const debug = Debug('prisma:driver-adapter:libsql:conversion') - -// Mirrors sqlite/conversion.rs in quaint -function mapDeclType(declType: string): ColumnType | null { - switch (declType.toUpperCase()) { - case '': - return null - case 'DECIMAL': - return ColumnTypeEnum.Numeric - case 'FLOAT': - return ColumnTypeEnum.Float - case 'DOUBLE': - case 'DOUBLE PRECISION': - case 'NUMERIC': - case 'REAL': - return ColumnTypeEnum.Double - case 'TINYINT': - case 'SMALLINT': - case 'MEDIUMINT': - case 'INT': - case 'INTEGER': - case 'SERIAL': - case 'INT2': - return ColumnTypeEnum.Int32 - case 'BIGINT': - case 'UNSIGNED BIG INT': - case 'INT8': - return ColumnTypeEnum.Int64 - case 'DATETIME': - case 'TIMESTAMP': - return ColumnTypeEnum.DateTime - case 'TIME': - return ColumnTypeEnum.Time - case 'DATE': - return ColumnTypeEnum.Date - case 'TEXT': - case 'CLOB': - case 'CHARACTER': - case 'VARCHAR': - case 'VARYING CHARACTER': - case 'NCHAR': - case 'NATIVE CHARACTER': - case 'NVARCHAR': - return ColumnTypeEnum.Text - case 'BLOB': - return ColumnTypeEnum.Bytes - case 'BOOLEAN': - return ColumnTypeEnum.Boolean - default: - debug('unknown decltype:', declType) - return null - } -} - -function mapDeclaredColumnTypes(columntTypes: string[]): [out: Array, empty: Set] { - const emptyIndices = new Set() - const result = columntTypes.map((typeName, index) => { - const mappedType = mapDeclType(typeName) - if (mappedType === null) { - emptyIndices.add(index) - } - return mappedType - }) - return [result, emptyIndices] -} - -export function getColumnTypes(declaredTypes: string[], rows: Row[]): ColumnType[] { - const [columnTypes, emptyIndices] = mapDeclaredColumnTypes(declaredTypes) - - if (emptyIndices.size === 0) { - return columnTypes as ColumnType[] - } - - columnLoop: for (const columnIndex of emptyIndices) { - // No declared column type in db schema, infer using first non-null value - for (let rowIndex = 0; rowIndex < rows.length; rowIndex++) { - const candidateValue = rows[rowIndex][columnIndex] - if (candidateValue !== null) { - columnTypes[columnIndex] = inferColumnType(candidateValue) - continue columnLoop - } - } - - // No non-null value found for this column, fall back to int32 to mimic what quaint does - columnTypes[columnIndex] = ColumnTypeEnum.Int32 - } - - return columnTypes as ColumnType[] -} - -function inferColumnType(value: NonNullable): ColumnType { - switch (typeof value) { - case 'string': - return ColumnTypeEnum.Text - case 'bigint': - return ColumnTypeEnum.Int64 - case 'boolean': - return ColumnTypeEnum.Boolean - case 'number': - return ColumnTypeEnum.UnknownNumber - case 'object': - return inferObjectType(value) - default: - throw new UnexpectedTypeError(value) - } -} - -function inferObjectType(value: {}): ColumnType { - if (isArrayBuffer(value)) { - return ColumnTypeEnum.Bytes - } - throw new UnexpectedTypeError(value) -} - -class UnexpectedTypeError extends Error { - name = 'UnexpectedTypeError' - constructor(value: unknown) { - const type = typeof value - const repr = type === 'object' ? JSON.stringify(value) : String(value) - super(`unexpected value of type ${type}: ${repr}`) - } -} - -export function mapRow(row: Row, columnTypes: ColumnType[]): unknown[] { - // `Row` doesn't have map, so we copy the array once and modify it in-place - // to avoid allocating and copying twice if we used `Array.from(row).map(...)`. - const result: unknown[] = Array.from(row) - - for (let i = 0; i < result.length; i++) { - const value = result[i] - - // Convert bigint to string as we can only use JSON-encodable types here - if (typeof value === 'bigint') { - result[i] = value.toString() - } - - // Convert array buffers to arrays of bytes. - // Base64 would've been more efficient but would collide with the existing - // logic that treats string values of type Bytes as raw UTF-8 bytes that was - // implemented for other adapters. - if (isArrayBuffer(value)) { - result[i] = Array.from(new Uint8Array(value)) - } - - // If an integer is required and the current number isn't one, - // discard the fractional part. - if ( - typeof value === 'number' && - (columnTypes[i] === ColumnTypeEnum.Int32 || columnTypes[i] === ColumnTypeEnum.Int64) && - !Number.isInteger(value) - ) { - result[i] = Math.trunc(value) - } - } - - return result -} diff --git a/query-engine/driver-adapters/js/adapter-libsql/src/index.ts b/query-engine/driver-adapters/js/adapter-libsql/src/index.ts deleted file mode 100644 index 04a95cc4cfcd..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/src/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { PrismaLibSQL } from './libsql' diff --git a/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts b/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts deleted file mode 100644 index 6528c8f44a8a..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts +++ /dev/null @@ -1,171 +0,0 @@ -import { Debug, ok, err } from '@prisma/driver-adapter-utils' -import type { - DriverAdapter, - Query, - Queryable, - Result, - ResultSet, - Transaction, - TransactionOptions, -} from '@prisma/driver-adapter-utils' -import type { - InStatement, - Client as LibSqlClientRaw, - Transaction as LibSqlTransactionRaw, - ResultSet as LibSqlResultSet, -} from '@libsql/client' -import { Mutex } from 'async-mutex' -import { getColumnTypes, mapRow } from './conversion' - -const debug = Debug('prisma:driver-adapter:libsql') - -type StdClient = LibSqlClientRaw -type TransactionClient = LibSqlTransactionRaw - -const LOCK_TAG = Symbol() - -class LibSqlQueryable implements Queryable { - readonly flavour = 'sqlite'; - - [LOCK_TAG] = new Mutex() - - constructor(protected readonly client: ClientT) {} - - /** - * Execute a query given as SQL, interpolating the given parameters. - */ - async queryRaw(query: Query): Promise> { - const tag = '[js::query_raw]' - debug(`${tag} %O`, query) - - const ioResult = await this.performIO(query) - - return ioResult.map(({ columns, rows, columnTypes: declaredColumnTypes }) => { - const columnTypes = getColumnTypes(declaredColumnTypes, rows) - - return { - columnNames: columns, - columnTypes, - rows: rows.map((row) => mapRow(row, columnTypes)), - } - }) - } - - /** - * Execute a query given as SQL, interpolating the given parameters and - * returning the number of affected rows. - * Note: Queryable expects a u64, but napi.rs only supports u32. - */ - async executeRaw(query: Query): Promise> { - const tag = '[js::execute_raw]' - debug(`${tag} %O`, query) - - return (await this.performIO(query)).map(({ rowsAffected }) => rowsAffected ?? 0) - } - - /** - * Run a query against the database, returning the result set. - * Should the query fail due to a connection error, the connection is - * marked as unhealthy. - */ - private async performIO(query: Query): Promise> { - const release = await this[LOCK_TAG].acquire() - try { - const result = await this.client.execute(query as InStatement) - return ok(result) - } catch (e) { - const error = e as Error - debug('Error in performIO: %O', error) - const rawCode = error['rawCode'] ?? e.cause?.['rawCode'] - if (typeof rawCode === 'number') { - return err({ - kind: 'Sqlite', - extendedCode: rawCode, - message: error.message, - }) - } - throw error - } finally { - release() - } - } -} - -class LibSqlTransaction extends LibSqlQueryable implements Transaction { - finished = false - - constructor(client: TransactionClient, readonly options: TransactionOptions, readonly unlockParent: () => void) { - super(client) - } - - async commit(): Promise> { - debug(`[js::commit]`) - - this.finished = true - - try { - await this.client.commit() - } finally { - this.unlockParent() - } - - return ok(undefined) - } - - async rollback(): Promise> { - debug(`[js::rollback]`) - - this.finished = true - - try { - await this.client.rollback() - } catch (error) { - debug('error in rollback:', error) - } finally { - this.unlockParent() - } - - return ok(undefined) - } - - dispose(): Result { - if (!this.finished) { - this.finished = true - this.rollback().catch(console.error) - } - return ok(undefined) - } -} - -export class PrismaLibSQL extends LibSqlQueryable implements DriverAdapter { - constructor(client: StdClient) { - super(client) - } - - async startTransaction(): Promise> { - const options: TransactionOptions = { - usePhantomQuery: true, - } - - const tag = '[js::startTransaction]' - debug(`${tag} options: %O`, options) - - const release = await this[LOCK_TAG].acquire() - - try { - const tx = await this.client.transaction('deferred') - return ok(new LibSqlTransaction(tx, options, release)) - } catch (e) { - // note: we only release the lock if creating the transaction fails, it must stay locked otherwise, - // hence `catch` and rethrowing the error and not `finally`. - release() - throw e - } - } - - async close(): Promise> { - await this[LOCK_TAG].acquire() - this.client.close() - return ok(undefined) - } -} diff --git a/query-engine/driver-adapters/js/adapter-libsql/tests/types.test.mts b/query-engine/driver-adapters/js/adapter-libsql/tests/types.test.mts deleted file mode 100644 index f7f1b474a300..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/tests/types.test.mts +++ /dev/null @@ -1,151 +0,0 @@ -import assert from 'node:assert/strict' -import { describe, it } from 'node:test' -import { Config, createClient } from '@libsql/client' -import { PrismaLibSQL } from '../dist/index.js' -import { ColumnTypeEnum } from '@jkomyno/prisma-driver-adapter-utils' - -function connect(config?: Partial): PrismaLibSQL { - const client = createClient({ url: 'file:test.db', ...config }) - return new PrismaLibSQL(client) -} - -it('checks declared types', async () => { - const client = connect() - - await client.executeRaw({ - sql: ` - DROP TABLE IF EXISTS types; - `, - args: [], - }) - - await client.executeRaw({ - sql: ` - CREATE TABLE types ( - id INTEGER PRIMARY KEY, - real REAL, - bigint BIGINT, - date DATETIME, - text TEXT, - blob BLOB - ) - `, - args: [], - }) - - const result = await client.queryRaw({ - sql: ` - SELECT * FROM types - `, - args: [], - }) - - assert(result.ok) - assert.deepEqual(result.value.columnTypes, [ - ColumnTypeEnum.Int32, - ColumnTypeEnum.Double, - ColumnTypeEnum.Int64, - ColumnTypeEnum.DateTime, - ColumnTypeEnum.Text, - ColumnTypeEnum.Bytes, - ]) -}) - -it('infers types when sqlite decltype is not available', async () => { - const client = connect() - - const result = await client.queryRaw({ - sql: ` - SELECT 1 as first, 'test' as second - `, - args: [], - }) - - assert(result.ok) - assert.deepEqual(result.value.columnTypes, [ColumnTypeEnum.Int64, ColumnTypeEnum.Text]) -}) - -describe('int64 with different intMode', () => { - const N = 2n ** 63n - 1n - - it('correctly infers int64 with intMode=number for safe JS integers', async () => { - const client = connect({ intMode: 'number' }) - - const result = await client.queryRaw({ - sql: `SELECT ?`, - args: [Number.MAX_SAFE_INTEGER], - }) - - assert(result.ok) - assert.equal(result.value.columnTypes[0], ColumnTypeEnum.Int64) - assert.equal(result.value.rows[0][0], Number.MAX_SAFE_INTEGER) - }) - - it("doesn't support very big int64 with intMode=number", async () => { - const client = connect({ intMode: 'number' }) - - assert.rejects( - client.queryRaw({ - sql: `SELECT ?`, - args: [N], - }), - ) - }) - - it('correctly infers int64 with intMode=bigint', async () => { - const client = connect({ intMode: 'bigint' }) - - const result = await client.queryRaw({ - sql: `SELECT ?`, - args: [N], - }) - - assert(result.ok) - assert.equal(result.value.columnTypes[0], ColumnTypeEnum.Int64) - - // bigints are converted to strings because we can't currently pass a bigint - // to rust due to a napi.rs limitation - assert.equal(result.value.rows[0][0], N.toString()) - }) - - it('correctly infers int64 with intMode=string when we have decltype', async () => { - const client = connect({ intMode: 'string' }) - - await client.executeRaw({ - sql: `DROP TABLE IF EXISTS test`, - args: [], - }) - - await client.executeRaw({ - sql: `CREATE TABLE test (int64 BIGINT)`, - args: [], - }) - - await client.executeRaw({ - sql: `INSERT INTO test (int64) VALUES (?)`, - args: [N], - }) - - const result = await client.queryRaw({ - sql: `SELECT int64 FROM test`, - args: [], - }) - - assert(result.ok) - assert.equal(result.value.columnTypes[0], ColumnTypeEnum.Int64) - assert.equal(result.value.rows[0][0], N.toString()) - }) - - it("can't infer int64 with intMode=string without schema", async () => { - const client = connect({ intMode: 'string' }) - - const result = await client.queryRaw({ - sql: `SELECT ?`, - args: [N], - }) - - assert(result.ok) - assert.equal(result.value.columnTypes[0], ColumnTypeEnum.Text) - assert.equal(result.value.rows[0][0], N.toString()) - }) -}) diff --git a/query-engine/driver-adapters/js/adapter-libsql/tsconfig.build.json b/query-engine/driver-adapters/js/adapter-libsql/tsconfig.build.json deleted file mode 100644 index 28c56f6c3a9a..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/tsconfig.build.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "extends": "../tsconfig.json", - "compilerOptions": { - "outDir": "declaration" - } -} diff --git a/query-engine/driver-adapters/js/adapter-libsql/tsconfig.json b/query-engine/driver-adapters/js/adapter-libsql/tsconfig.json deleted file mode 100644 index 3c43903cfdd1..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/tsconfig.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../tsconfig.json" -} diff --git a/query-engine/driver-adapters/js/adapter-neon/README.md b/query-engine/driver-adapters/js/adapter-neon/README.md deleted file mode 100644 index f36f44c6bca4..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/README.md +++ /dev/null @@ -1,71 +0,0 @@ -# @prisma/adapter-neon - -Prisma driver adapter for [Neon Serverless Driver](https://github.com/neondatabase/serverless). - -See https://github.com/prisma/prisma/releases/tag/5.4.0 and https://www.prisma.io/blog/serverless-database-drivers-KML1ehXORxZV for details. - -The following usage tutorial is valid for Prisma 5.4.2 and later versions. - -## How to install - -After [creating your database on Neon](https://neon.tech/docs/get-started-with-neon/setting-up-a-project), you'll need to install the `@prisma/adapter-neon` driver adapter, Neon’s serverless database driver `@neondatabase/serverless`, and `ws` to set up a WebSocket connection for use by Neon. - -```sh -npm install @prisma/adapter-neon -npm install @neondatabase/serverless -npm install ws -``` - -Make sure your [Neon database connection string](https://neon.tech/docs/connect/connect-from-any-app) is copied over to your `.env` file. The connection string will start with `postgres://`. - -```env -# .env -DATABASE_URL="postgres://..." -``` - -Make sure you also include the `driverAdapters` Preview feature in your `schema.prisma`. - -```prisma -// schema.prisma -generator client { - provider = "prisma-client-js" - previewFeatures = ["driverAdapters"] -} - -datasource db { - provider = "postgresql" - url = env("DATABASE_URL") -} -``` - -Now run `npx prisma generate` to re-generate Prisma Client. - -## How to use - -In TypeScript, you will need to: - -1. Import packages -2. Set up the Neon serverless database driver -3. Instantiate the Prisma Neon adapter with the Neon serverless database driver -4. Pass the driver adapter to the Prisma Client instance - -```typescript -// Import needed packages -import { Pool, neonConfig } from '@neondatabase/serverless'; -import { PrismaNeon } from '@prisma/adapter-neon'; -import { PrismaClient } from '@prisma/client'; -import ws from 'ws'; - -// Setup -neonConfig.webSocketConstructor = ws; -const connectionString = `${process.env.DATABASE_URL}`; - -// Init prisma client -const pool = new Pool({ connectionString }); -const adapter = new PrismaNeon(pool); -const prisma = new PrismaClient({ adapter }); - -// Use Prisma Client as normal -``` - -Now your code has built-in benefits of the Neon serverless driver, such as WebSocket connections and [message pipelining](https://neon.tech/blog/quicker-serverless-postgres), while Prisma covers connection creation and destruction, error handling, and type safety. If you have any feedback about our Neon Serverless Driver support, please leave a comment on our [dedicated GitHub issue](https://github.com/prisma/prisma/discussions/21346) and we'll use it as we continue development. diff --git a/query-engine/driver-adapters/js/adapter-neon/package.json b/query-engine/driver-adapters/js/adapter-neon/package.json deleted file mode 100644 index 02005a13572f..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/package.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "name": "@prisma/adapter-neon", - "version": "0.0.0", - "description": "Prisma's driver adapter for \"@neondatabase/serverless\"", - "main": "dist/index.js", - "module": "dist/index.mjs", - "types": "dist/index.d.ts", - "scripts": { - "build": "tsup ./src/index.ts --format cjs,esm --dts", - "lint": "tsc -p ./tsconfig.build.json" - }, - "files": [ - "dist", - "README.md" - ], - "keywords": [], - "author": "Alberto Schiabel ", - "license": "Apache-2.0", - "sideEffects": false, - "dependencies": { - "@prisma/driver-adapter-utils": "workspace:*", - "postgres-array": "^3.0.2" - }, - "devDependencies": { - "@neondatabase/serverless": "^0.6.0" - }, - "peerDependencies": { - "@neondatabase/serverless": "^0.6.0" - } -} diff --git a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts b/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts deleted file mode 100644 index 78f285240599..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts +++ /dev/null @@ -1,286 +0,0 @@ -import { ColumnTypeEnum, type ColumnType, JsonNullMarker } from '@prisma/driver-adapter-utils' -import { types } from '@neondatabase/serverless' -import { parse as parseArray } from 'postgres-array' - -const ScalarColumnType = types.builtins - -/** - * PostgreSQL array column types (not defined in ScalarColumnType). - * - * See the semantics of each of this code in: - * https://github.com/postgres/postgres/blob/master/src/include/catalog/pg_type.dat - */ -const ArrayColumnType = { - BIT_ARRAY: 1561, - BOOL_ARRAY: 1000, - BYTEA_ARRAY: 1001, - BPCHAR_ARRAY: 1014, - CHAR_ARRAY: 1002, - CIDR_ARRAY: 651, - DATE_ARRAY: 1182, - FLOAT4_ARRAY: 1021, - FLOAT8_ARRAY: 1022, - INET_ARRAY: 1041, - INT2_ARRAY: 1005, - INT4_ARRAY: 1007, - INT8_ARRAY: 1016, - JSONB_ARRAY: 3807, - JSON_ARRAY: 199, - MONEY_ARRAY: 791, - NUMERIC_ARRAY: 1231, - OID_ARRAY: 1028, - TEXT_ARRAY: 1009, - TIMESTAMP_ARRAY: 1115, - TIME_ARRAY: 1183, - UUID_ARRAY: 2951, - VARBIT_ARRAY: 1563, - VARCHAR_ARRAY: 1015, - XML_ARRAY: 143, -} - -/** - * This is a simplification of quaint's value inference logic. Take a look at quaint's conversion.rs - * module to see how other attributes of the field packet such as the field length are used to infer - * the correct quaint::Value variant. - */ -export function fieldToColumnType(fieldTypeId: number): ColumnType { - switch (fieldTypeId) { - case ScalarColumnType['INT2']: - case ScalarColumnType['INT4']: - return ColumnTypeEnum.Int32 - case ScalarColumnType['INT8']: - return ColumnTypeEnum.Int64 - case ScalarColumnType['FLOAT4']: - return ColumnTypeEnum.Float - case ScalarColumnType['FLOAT8']: - return ColumnTypeEnum.Double - case ScalarColumnType['BOOL']: - return ColumnTypeEnum.Boolean - case ScalarColumnType['DATE']: - return ColumnTypeEnum.Date - case ScalarColumnType['TIME']: - case ScalarColumnType['TIMETZ']: - return ColumnTypeEnum.Time - case ScalarColumnType['TIMESTAMP']: - case ScalarColumnType['TIMESTAMPTZ']: - return ColumnTypeEnum.DateTime - case ScalarColumnType['NUMERIC']: - case ScalarColumnType['MONEY']: - return ColumnTypeEnum.Numeric - case ScalarColumnType['JSON']: - case ScalarColumnType['JSONB']: - return ColumnTypeEnum.Json - case ScalarColumnType['UUID']: - return ColumnTypeEnum.Uuid - case ScalarColumnType['OID']: - return ColumnTypeEnum.Int64 - case ScalarColumnType['BPCHAR']: - case ScalarColumnType['TEXT']: - case ScalarColumnType['VARCHAR']: - case ScalarColumnType['BIT']: - case ScalarColumnType['VARBIT']: - case ScalarColumnType['INET']: - case ScalarColumnType['CIDR']: - case ScalarColumnType['XML']: - return ColumnTypeEnum.Text - case ScalarColumnType['BYTEA']: - return ColumnTypeEnum.Bytes - case ArrayColumnType.INT2_ARRAY: - case ArrayColumnType.INT4_ARRAY: - return ColumnTypeEnum.Int32Array - case ArrayColumnType.FLOAT4_ARRAY: - return ColumnTypeEnum.FloatArray - case ArrayColumnType.FLOAT8_ARRAY: - return ColumnTypeEnum.DoubleArray - case ArrayColumnType.NUMERIC_ARRAY: - case ArrayColumnType.MONEY_ARRAY: - return ColumnTypeEnum.NumericArray - case ArrayColumnType.BOOL_ARRAY: - return ColumnTypeEnum.BooleanArray - case ArrayColumnType.CHAR_ARRAY: - return ColumnTypeEnum.CharArray - case ArrayColumnType.BPCHAR_ARRAY: - case ArrayColumnType.TEXT_ARRAY: - case ArrayColumnType.VARCHAR_ARRAY: - case ArrayColumnType.VARBIT_ARRAY: - case ArrayColumnType.BIT_ARRAY: - case ArrayColumnType.INET_ARRAY: - case ArrayColumnType.CIDR_ARRAY: - case ArrayColumnType.XML_ARRAY: - return ColumnTypeEnum.TextArray - case ArrayColumnType.DATE_ARRAY: - return ColumnTypeEnum.DateArray - case ArrayColumnType.TIME_ARRAY: - return ColumnTypeEnum.TimeArray - case ArrayColumnType.TIMESTAMP_ARRAY: - return ColumnTypeEnum.DateTimeArray - case ArrayColumnType.JSON_ARRAY: - case ArrayColumnType.JSONB_ARRAY: - return ColumnTypeEnum.JsonArray - case ArrayColumnType.BYTEA_ARRAY: - return ColumnTypeEnum.BytesArray - case ArrayColumnType.UUID_ARRAY: - return ColumnTypeEnum.UuidArray - case ArrayColumnType.INT8_ARRAY: - case ArrayColumnType.OID_ARRAY: - return ColumnTypeEnum.Int64Array - default: - if (fieldTypeId >= 10000) { - // Postgres Custom Types - return ColumnTypeEnum.Enum - } - throw new Error(`Unsupported column type: ${fieldTypeId}`) - } -} - -function normalize_array(element_normalizer: (string) => string): (string) => string[] { - return (str) => parseArray(str, element_normalizer) -} - -/****************************/ -/* Time-related data-types */ -/****************************/ - -function normalize_numeric(numeric: string): string { - return numeric -} - -types.setTypeParser(ScalarColumnType.NUMERIC, normalize_numeric) -types.setTypeParser(ArrayColumnType.NUMERIC_ARRAY, normalize_array(normalize_numeric)) - -/****************************/ -/* Time-related data-types */ -/****************************/ - - -function normalize_date(date: string): string { - return date -} - -function normalize_timestamp(time: string): string { - return time -} - -function normalize_timestampz(time: string): string { - return time.split("+")[0] -} - -/* - * TIME, TIMETZ, TIME_ARRAY - converts value (or value elements) to a string in the format HH:mm:ss.f - */ - -function normalize_time(time: string): string { - return time -} - -function normalize_timez(time: string): string { - // Although it might be controversial, UTC is assumed in consistency with the behavior of rust postgres driver - // in quaint. See quaint/src/connector/postgres/conversion.rs - return time.split("+")[0] -} - -types.setTypeParser(ScalarColumnType.TIME, normalize_time) -types.setTypeParser(ArrayColumnType.TIME_ARRAY, normalize_array(normalize_time)) -types.setTypeParser(ScalarColumnType.TIMETZ, normalize_timez) - -/* - * DATE, DATE_ARRAY - converts value (or value elements) to a string in the format YYYY-MM-DD - */ - -types.setTypeParser(ScalarColumnType.DATE, normalize_date) -types.setTypeParser(ArrayColumnType.DATE_ARRAY, normalize_array(normalize_date)) - - -/* - * TIMESTAMP, TIMESTAMP_ARRAY - converts value (or value elements) to a string in the rfc3339 format - * ex: 1996-12-19T16:39:57-08:00 - */ -types.setTypeParser(ScalarColumnType.TIMESTAMP, normalize_timestamp) -types.setTypeParser(ArrayColumnType.TIMESTAMP_ARRAY, normalize_array(normalize_timestamp)) -types.setTypeParser(ScalarColumnType.TIMESTAMPTZ, normalize_timestampz) - -/******************/ -/* Money handling */ -/******************/ - -function normalize_money(money: string): string { - return money.slice(1) -} - -types.setTypeParser(ScalarColumnType.MONEY, normalize_money) -types.setTypeParser(ArrayColumnType.MONEY_ARRAY, normalize_array(normalize_money)) - - -/*****************/ -/* JSON handling */ -/*****************/ - -/** - * JsonNull are stored in JSON strings as the string "null", distinguishable from - * the `null` value which is used by the driver to represent the database NULL. - * By default, JSON and JSONB columns use JSON.parse to parse a JSON column value - * and this will lead to serde_json::Value::Null in Rust, which will be interpreted - * as DbNull. - * - * By converting "null" to JsonNullMarker, we can signal JsonNull in Rust side and - * convert it to QuaintValue::Json(Some(Null)). - */ -function toJson(json: string): unknown { - return (json === 'null') ? JsonNullMarker : JSON.parse(json) -} - - -types.setTypeParser(ScalarColumnType.JSONB, toJson) -types.setTypeParser(ScalarColumnType.JSON, toJson) - -/************************/ -/* Binary data handling */ -/************************/ - -/** - * TODO: - * 1. Check if using base64 would be more efficient than this encoding. - * 2. Consider the possibility of eliminating re-encoding altogether - * and passing bytea hex format to the engine if that can be aligned - * with other adapter flavours. - */ -function encodeBuffer(buffer: Buffer) { - return Array.from(new Uint8Array(buffer)) -} - -/* - * BYTEA - arbitrary raw binary strings - */ - -const parsePgBytes = types.getTypeParser(ScalarColumnType.BYTEA) as (_: string) => Buffer -/** - * Convert bytes to a JSON-encodable representation since we can't - * currently send a parsed Buffer or ArrayBuffer across JS to Rust - * boundary. - */ -function convertBytes(serializedBytes: string): number[] { - const buffer = parsePgBytes(serializedBytes) - return encodeBuffer(buffer) -} - -types.setTypeParser(ScalarColumnType.BYTEA, convertBytes) - -/* - * BYTEA_ARRAY - arrays of arbitrary raw binary strings - */ - -const parseBytesArray = types.getTypeParser(ArrayColumnType.BYTEA_ARRAY) as (_: string) => Buffer[] - -types.setTypeParser(ArrayColumnType.BYTEA_ARRAY, (serializedBytesArray) => { - const buffers = parseBytesArray(serializedBytesArray) - return buffers.map((buf) => buf ? encodeBuffer(buf) : null) -}) - -/* BIT_ARRAY, VARBIT_ARRAY */ - -function normalizeBit(bit: string): string { - return bit -} - -types.setTypeParser(ArrayColumnType.BIT_ARRAY, normalize_array(normalizeBit)) -types.setTypeParser(ArrayColumnType.VARBIT_ARRAY, normalize_array(normalizeBit)) \ No newline at end of file diff --git a/query-engine/driver-adapters/js/adapter-neon/src/index.ts b/query-engine/driver-adapters/js/adapter-neon/src/index.ts deleted file mode 100644 index f160d413ade0..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/src/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { PrismaNeon, PrismaNeonHTTP } from './neon' diff --git a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts b/query-engine/driver-adapters/js/adapter-neon/src/neon.ts deleted file mode 100644 index e8fe40ada22f..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts +++ /dev/null @@ -1,165 +0,0 @@ -import type neon from '@neondatabase/serverless' -import { Debug, ok, err } from '@prisma/driver-adapter-utils' -import type { - DriverAdapter, - ResultSet, - Query, - Queryable, - Transaction, - Result, - TransactionOptions, -} from '@prisma/driver-adapter-utils' -import { fieldToColumnType } from './conversion' - -const debug = Debug('prisma:driver-adapter:neon') - -type ARRAY_MODE_ENABLED = true - -type PerformIOResult = neon.QueryResult | neon.FullQueryResults - -/** - * Base class for http client, ws client and ws transaction - */ -abstract class NeonQueryable implements Queryable { - readonly flavour = 'postgres' - - async queryRaw(query: Query): Promise> { - const tag = '[js::query_raw]' - debug(`${tag} %O`, query) - - return (await this.performIO(query)).map(({ fields, rows }) => { - const columns = fields.map((field) => field.name) - const columnTypes = fields.map((field) => fieldToColumnType(field.dataTypeID)) - - return { - columnNames: columns, - columnTypes, - rows, - } - }) - } - - async executeRaw(query: Query): Promise> { - const tag = '[js::execute_raw]' - debug(`${tag} %O`, query) - - // Note: `rowsAffected` can sometimes be null (e.g., when executing `"BEGIN"`) - return (await this.performIO(query)).map((r) => r.rowCount ?? 0) - } - - abstract performIO(query: Query): Promise> -} - -/** - * Base class for WS-based queryables: top-level client and transaction - */ -class NeonWsQueryable extends NeonQueryable { - constructor(protected client: ClientT) { - super() - } - - override async performIO(query: Query): Promise> { - const { sql, args: values } = query - - try { - return ok(await this.client.query({ text: sql, values, rowMode: 'array' })) - } catch (e) { - debug('Error in performIO: %O', e) - if (e && e.code) { - return err({ - kind: 'Postgres', - code: e.code, - severity: e.severity, - message: e.message, - detail: e.detail, - column: e.column, - hint: e.hint, - }) - } - throw e - } - } -} - -class NeonTransaction extends NeonWsQueryable implements Transaction { - finished = false - - constructor(client: neon.PoolClient, readonly options: TransactionOptions) { - super(client) - } - - async commit(): Promise> { - debug(`[js::commit]`) - - this.finished = true - this.client.release() - return Promise.resolve(ok(undefined)) - } - - async rollback(): Promise> { - debug(`[js::rollback]`) - - this.finished = true - this.client.release() - return Promise.resolve(ok(undefined)) - } - - dispose(): Result { - if (!this.finished) { - this.client.release() - } - return ok(undefined) - } -} - -export class PrismaNeon extends NeonWsQueryable implements DriverAdapter { - private isRunning = true - - constructor(pool: neon.Pool) { - super(pool) - } - - async startTransaction(): Promise> { - const options: TransactionOptions = { - usePhantomQuery: false, - } - - const tag = '[js::startTransaction]' - debug(`${tag} options: %O`, options) - - const connection = await this.client.connect() - return ok(new NeonTransaction(connection, options)) - } - - async close() { - if (this.isRunning) { - await this.client.end() - this.isRunning = false - } - return ok(undefined) - } -} - -export class PrismaNeonHTTP extends NeonQueryable implements DriverAdapter { - constructor(private client: neon.NeonQueryFunction) { - super() - } - - override async performIO(query: Query): Promise> { - const { sql, args: values } = query - return ok( - await this.client(sql, values, { - arrayMode: true, - fullResults: true, - }), - ) - } - - startTransaction(): Promise> { - return Promise.reject(new Error('Transactions are not supported in HTTP mode')) - } - - async close() { - return ok(undefined) - } -} diff --git a/query-engine/driver-adapters/js/adapter-neon/tsconfig.build.json b/query-engine/driver-adapters/js/adapter-neon/tsconfig.build.json deleted file mode 100644 index 28c56f6c3a9a..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/tsconfig.build.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "extends": "../tsconfig.json", - "compilerOptions": { - "outDir": "declaration" - } -} diff --git a/query-engine/driver-adapters/js/adapter-neon/tsconfig.json b/query-engine/driver-adapters/js/adapter-neon/tsconfig.json deleted file mode 100644 index 3c43903cfdd1..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/tsconfig.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../tsconfig.json" -} diff --git a/query-engine/driver-adapters/js/adapter-pg/README.md b/query-engine/driver-adapters/js/adapter-pg/README.md deleted file mode 100644 index b8463742e25c..000000000000 --- a/query-engine/driver-adapters/js/adapter-pg/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# @prisma/adapter-pg - -**INTERNAL PACKAGE, DO NOT USE** diff --git a/query-engine/driver-adapters/js/adapter-pg/package.json b/query-engine/driver-adapters/js/adapter-pg/package.json deleted file mode 100644 index 7514569c562a..000000000000 --- a/query-engine/driver-adapters/js/adapter-pg/package.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "name": "@prisma/adapter-pg", - "version": "0.0.0", - "description": "Prisma's driver adapter for \"pg\"", - "main": "dist/index.js", - "module": "dist/index.mjs", - "types": "dist/index.d.ts", - "scripts": { - "build": "tsup ./src/index.ts --format cjs,esm --dts", - "lint": "tsc -p ./tsconfig.build.json" - }, - "files": [ - "dist", - "README.md" - ], - "keywords": [], - "author": "Tom Houlé ", - "license": "Apache-2.0", - "sideEffects": false, - "dependencies": { - "@prisma/driver-adapter-utils": "workspace:*", - "postgres-array": "^3.0.2" - }, - "devDependencies": { - "pg": "^8.11.3", - "@types/pg": "^8.10.2" - }, - "peerDependencies": { - "pg": "^8.11.3" - } -} diff --git a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts b/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts deleted file mode 100644 index c26b13877927..000000000000 --- a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts +++ /dev/null @@ -1,286 +0,0 @@ -import { ColumnTypeEnum, type ColumnType, JsonNullMarker } from '@prisma/driver-adapter-utils' -import { types } from 'pg' -import { parse as parseArray } from 'postgres-array' - -const ScalarColumnType = types.builtins - -/** - * PostgreSQL array column types (not defined in ScalarColumnType). - * - * See the semantics of each of this code in: - * https://github.com/postgres/postgres/blob/master/src/include/catalog/pg_type.dat - */ -const ArrayColumnType = { - BIT_ARRAY: 1561, - BOOL_ARRAY: 1000, - BYTEA_ARRAY: 1001, - BPCHAR_ARRAY: 1014, - CHAR_ARRAY: 1002, - CIDR_ARRAY: 651, - DATE_ARRAY: 1182, - FLOAT4_ARRAY: 1021, - FLOAT8_ARRAY: 1022, - INET_ARRAY: 1041, - INT2_ARRAY: 1005, - INT4_ARRAY: 1007, - INT8_ARRAY: 1016, - JSONB_ARRAY: 3807, - JSON_ARRAY: 199, - MONEY_ARRAY: 791, - NUMERIC_ARRAY: 1231, - OID_ARRAY: 1028, - TEXT_ARRAY: 1009, - TIMESTAMP_ARRAY: 1115, - TIME_ARRAY: 1183, - UUID_ARRAY: 2951, - VARBIT_ARRAY: 1563, - VARCHAR_ARRAY: 1015, - XML_ARRAY: 143, -} - -/** - * This is a simplification of quaint's value inference logic. Take a look at quaint's conversion.rs - * module to see how other attributes of the field packet such as the field length are used to infer - * the correct quaint::Value variant. - */ -export function fieldToColumnType(fieldTypeId: number): ColumnType { - switch (fieldTypeId) { - case ScalarColumnType['INT2']: - case ScalarColumnType['INT4']: - return ColumnTypeEnum.Int32 - case ScalarColumnType['INT8']: - return ColumnTypeEnum.Int64 - case ScalarColumnType['FLOAT4']: - return ColumnTypeEnum.Float - case ScalarColumnType['FLOAT8']: - return ColumnTypeEnum.Double - case ScalarColumnType['BOOL']: - return ColumnTypeEnum.Boolean - case ScalarColumnType['DATE']: - return ColumnTypeEnum.Date - case ScalarColumnType['TIME']: - case ScalarColumnType['TIMETZ']: - return ColumnTypeEnum.Time - case ScalarColumnType['TIMESTAMP']: - case ScalarColumnType['TIMESTAMPTZ']: - return ColumnTypeEnum.DateTime - case ScalarColumnType['NUMERIC']: - case ScalarColumnType['MONEY']: - return ColumnTypeEnum.Numeric - case ScalarColumnType['JSON']: - case ScalarColumnType['JSONB']: - return ColumnTypeEnum.Json - case ScalarColumnType['UUID']: - return ColumnTypeEnum.Uuid - case ScalarColumnType['OID']: - return ColumnTypeEnum.Int64 - case ScalarColumnType['BPCHAR']: - case ScalarColumnType['TEXT']: - case ScalarColumnType['VARCHAR']: - case ScalarColumnType['BIT']: - case ScalarColumnType['VARBIT']: - case ScalarColumnType['INET']: - case ScalarColumnType['CIDR']: - case ScalarColumnType['XML']: - return ColumnTypeEnum.Text - case ScalarColumnType['BYTEA']: - return ColumnTypeEnum.Bytes - case ArrayColumnType.INT2_ARRAY: - case ArrayColumnType.INT4_ARRAY: - return ColumnTypeEnum.Int32Array - case ArrayColumnType.FLOAT4_ARRAY: - return ColumnTypeEnum.FloatArray - case ArrayColumnType.FLOAT8_ARRAY: - return ColumnTypeEnum.DoubleArray - case ArrayColumnType.NUMERIC_ARRAY: - case ArrayColumnType.MONEY_ARRAY: - return ColumnTypeEnum.NumericArray - case ArrayColumnType.BOOL_ARRAY: - return ColumnTypeEnum.BooleanArray - case ArrayColumnType.CHAR_ARRAY: - return ColumnTypeEnum.CharArray - case ArrayColumnType.BPCHAR_ARRAY: - case ArrayColumnType.TEXT_ARRAY: - case ArrayColumnType.VARCHAR_ARRAY: - case ArrayColumnType.VARBIT_ARRAY: - case ArrayColumnType.BIT_ARRAY: - case ArrayColumnType.INET_ARRAY: - case ArrayColumnType.CIDR_ARRAY: - case ArrayColumnType.XML_ARRAY: - return ColumnTypeEnum.TextArray - case ArrayColumnType.DATE_ARRAY: - return ColumnTypeEnum.DateArray - case ArrayColumnType.TIME_ARRAY: - return ColumnTypeEnum.TimeArray - case ArrayColumnType.TIMESTAMP_ARRAY: - return ColumnTypeEnum.DateTimeArray - case ArrayColumnType.JSON_ARRAY: - case ArrayColumnType.JSONB_ARRAY: - return ColumnTypeEnum.JsonArray - case ArrayColumnType.BYTEA_ARRAY: - return ColumnTypeEnum.BytesArray - case ArrayColumnType.UUID_ARRAY: - return ColumnTypeEnum.UuidArray - case ArrayColumnType.INT8_ARRAY: - case ArrayColumnType.OID_ARRAY: - return ColumnTypeEnum.Int64Array - default: - if (fieldTypeId >= 10000) { - // Postgres Custom Types - return ColumnTypeEnum.Enum - } - throw new Error(`Unsupported column type: ${fieldTypeId}`) - } -} - -function normalize_array(element_normalizer: (string) => string): (string) => string[] { - return (str) => parseArray(str, element_normalizer) -} - -/****************************/ -/* Time-related data-types */ -/****************************/ - -function normalize_numeric(numeric: string): string { - return numeric -} - -types.setTypeParser(ScalarColumnType.NUMERIC, normalize_numeric) -types.setTypeParser(ArrayColumnType.NUMERIC_ARRAY, normalize_array(normalize_numeric)) - -/****************************/ -/* Time-related data-types */ -/****************************/ - - -function normalize_date(date: string): string { - return date -} - -function normalize_timestamp(time: string): string { - return time -} - -function normalize_timestampz(time: string): string { - return time.split("+")[0] -} - -/* - * TIME, TIMETZ, TIME_ARRAY - converts value (or value elements) to a string in the format HH:mm:ss.f - */ - -function normalize_time(time: string): string { - return time -} - -function normalize_timez(time: string): string { - // Although it might be controversial, UTC is assumed in consistency with the behavior of rust postgres driver - // in quaint. See quaint/src/connector/postgres/conversion.rs - return time.split("+")[0] -} - -types.setTypeParser(ScalarColumnType.TIME, normalize_time) -types.setTypeParser(ArrayColumnType.TIME_ARRAY, normalize_array(normalize_time)) -types.setTypeParser(ScalarColumnType.TIMETZ, normalize_timez) - -/* - * DATE, DATE_ARRAY - converts value (or value elements) to a string in the format YYYY-MM-DD - */ - -types.setTypeParser(ScalarColumnType.DATE, normalize_date) -types.setTypeParser(ArrayColumnType.DATE_ARRAY, normalize_array(normalize_date)) - - -/* - * TIMESTAMP, TIMESTAMP_ARRAY - converts value (or value elements) to a string in the rfc3339 format - * ex: 1996-12-19T16:39:57-08:00 - */ -types.setTypeParser(ScalarColumnType.TIMESTAMP, normalize_timestamp) -types.setTypeParser(ArrayColumnType.TIMESTAMP_ARRAY, normalize_array(normalize_timestamp)) -types.setTypeParser(ScalarColumnType.TIMESTAMPTZ, normalize_timestampz) - -/******************/ -/* Money handling */ -/******************/ - -function normalize_money(money: string): string { - return money.slice(1) -} - -types.setTypeParser(ScalarColumnType.MONEY, normalize_money) -types.setTypeParser(ArrayColumnType.MONEY_ARRAY, normalize_array(normalize_money)) - - -/*****************/ -/* JSON handling */ -/*****************/ - -/** - * JsonNull are stored in JSON strings as the string "null", distinguishable from - * the `null` value which is used by the driver to represent the database NULL. - * By default, JSON and JSONB columns use JSON.parse to parse a JSON column value - * and this will lead to serde_json::Value::Null in Rust, which will be interpreted - * as DbNull. - * - * By converting "null" to JsonNullMarker, we can signal JsonNull in Rust side and - * convert it to QuaintValue::Json(Some(Null)). - */ -function toJson(json: string): unknown { - return (json === 'null') ? JsonNullMarker : JSON.parse(json) -} - - -types.setTypeParser(ScalarColumnType.JSONB, toJson) -types.setTypeParser(ScalarColumnType.JSON, toJson) - -/************************/ -/* Binary data handling */ -/************************/ - -/** - * TODO: - * 1. Check if using base64 would be more efficient than this encoding. - * 2. Consider the possibility of eliminating re-encoding altogether - * and passing bytea hex format to the engine if that can be aligned - * with other adapter flavours. - */ -function encodeBuffer(buffer: Buffer) { - return Array.from(new Uint8Array(buffer)) -} - -/* - * BYTEA - arbitrary raw binary strings - */ - -const parsePgBytes = types.getTypeParser(ScalarColumnType.BYTEA) as (_: string) => Buffer -/** - * Convert bytes to a JSON-encodable representation since we can't - * currently send a parsed Buffer or ArrayBuffer across JS to Rust - * boundary. - */ -function convertBytes(serializedBytes: string): number[] { - const buffer = parsePgBytes(serializedBytes) - return encodeBuffer(buffer) -} - -types.setTypeParser(ScalarColumnType.BYTEA, convertBytes) - -/* - * BYTEA_ARRAY - arrays of arbitrary raw binary strings - */ - -const parseBytesArray = types.getTypeParser(ArrayColumnType.BYTEA_ARRAY) as (_: string) => Buffer[] - -types.setTypeParser(ArrayColumnType.BYTEA_ARRAY, (serializedBytesArray) => { - const buffers = parseBytesArray(serializedBytesArray) - return buffers.map((buf) => buf ? encodeBuffer(buf) : null) -}) - -/* BIT_ARRAY, VARBIT_ARRAY */ - -function normalizeBit(bit: string): string { - return bit -} - -types.setTypeParser(ArrayColumnType.BIT_ARRAY, normalize_array(normalizeBit)) -types.setTypeParser(ArrayColumnType.VARBIT_ARRAY, normalize_array(normalizeBit)) \ No newline at end of file diff --git a/query-engine/driver-adapters/js/adapter-pg/src/index.ts b/query-engine/driver-adapters/js/adapter-pg/src/index.ts deleted file mode 100644 index f8e51ac2685b..000000000000 --- a/query-engine/driver-adapters/js/adapter-pg/src/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { PrismaPg } from './pg' diff --git a/query-engine/driver-adapters/js/adapter-pg/src/pg.ts b/query-engine/driver-adapters/js/adapter-pg/src/pg.ts deleted file mode 100644 index c34050778c39..000000000000 --- a/query-engine/driver-adapters/js/adapter-pg/src/pg.ts +++ /dev/null @@ -1,138 +0,0 @@ -import type pg from 'pg' -import { Debug, err, ok } from '@prisma/driver-adapter-utils' -import type { - DriverAdapter, - Query, - Queryable, - Result, - ResultSet, - Transaction, - TransactionOptions, -} from '@prisma/driver-adapter-utils' -import { fieldToColumnType } from './conversion' - -const debug = Debug('prisma:driver-adapter:pg') - -type StdClient = pg.Pool -type TransactionClient = pg.PoolClient - -class PgQueryable implements Queryable { - readonly flavour = 'postgres' - - constructor(protected readonly client: ClientT) {} - - /** - * Execute a query given as SQL, interpolating the given parameters. - */ - async queryRaw(query: Query): Promise> { - const tag = '[js::query_raw]' - debug(`${tag} %O`, query) - - const ioResult = await this.performIO(query) - return ioResult.map(({ fields, rows }) => { - const columns = fields.map((field) => field.name) - const columnTypes = fields.map((field) => fieldToColumnType(field.dataTypeID)) - - return { - columnNames: columns, - columnTypes, - rows, - } - }) - } - - /** - * Execute a query given as SQL, interpolating the given parameters and - * returning the number of affected rows. - * Note: Queryable expects a u64, but napi.rs only supports u32. - */ - async executeRaw(query: Query): Promise> { - const tag = '[js::execute_raw]' - debug(`${tag} %O`, query) - - // Note: `rowsAffected` can sometimes be null (e.g., when executing `"BEGIN"`) - return (await this.performIO(query)).map(({ rowCount: rowsAffected }) => rowsAffected ?? 0) - } - - /** - * Run a query against the database, returning the result set. - * Should the query fail due to a connection error, the connection is - * marked as unhealthy. - */ - private async performIO(query: Query): Promise>> { - const { sql, args: values } = query - - try { - const result = await this.client.query({ text: sql, values, rowMode: 'array' }) - return ok(result) - } catch (e) { - const error = e as Error - debug('Error in performIO: %O', error) - if (e && e.code) { - return err({ - kind: 'Postgres', - code: e.code, - severity: e.severity, - message: e.message, - detail: e.detail, - column: e.column, - hint: e.hint, - }) - } - throw error - } - } -} - -class PgTransaction extends PgQueryable implements Transaction { - finished = false - - constructor(client: pg.PoolClient, readonly options: TransactionOptions) { - super(client) - } - - async commit(): Promise> { - debug(`[js::commit]`) - - this.finished = true - this.client.release() - return ok(undefined) - } - - async rollback(): Promise> { - debug(`[js::rollback]`) - - this.finished = true - this.client.release() - return ok(undefined) - } - - dispose(): Result { - if (!this.finished) { - this.client.release() - } - return ok(undefined) - } -} - -export class PrismaPg extends PgQueryable implements DriverAdapter { - constructor(client: pg.Pool) { - super(client) - } - - async startTransaction(): Promise> { - const options: TransactionOptions = { - usePhantomQuery: false, - } - - const tag = '[js::startTransaction]' - debug(`${tag} options: %O`, options) - - const connection = await this.client.connect() - return ok(new PgTransaction(connection, options)) - } - - async close() { - return ok(undefined) - } -} diff --git a/query-engine/driver-adapters/js/adapter-planetscale/README.md b/query-engine/driver-adapters/js/adapter-planetscale/README.md deleted file mode 100644 index a4cdc132036a..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/README.md +++ /dev/null @@ -1,71 +0,0 @@ -# @prisma/adapter-planetscale - -Prisma driver adapter for [PlanetScale Serverless Driver](https://github.com/planetscale/database-js). - -See https://github.com/prisma/prisma/releases/tag/5.4.0 and https://www.prisma.io/blog/serverless-database-drivers-KML1ehXORxZV for details. - -The following usage tutorial is valid for Prisma 5.4.2 and later versions. - -## How to install - -After [getting started with PlanetScale](https://neon.tech/docs/get-started-with-neon/setting-up-a-project), you can use the PlanetScale serverless driver to connect to your database. You will need to install the `@prisma/adapter-planetscale` driver adapter, the `@planetscale/database` serverless driver, and `undici` to provide a `fetch` function to the PlanetScale driver. - -```sh -npm install @prisma/adapter-planetscale -npm install @planetscale/database -npm install undici -``` - -Make sure your [PlanetScale database connection string](https://planetscale.com/docs/concepts/connection-strings) is copied over to your `.env` file. The connection string will start with `mysql://`. - -```env -# .env -DATABASE_URL="mysql://..." -``` - -You can now reference this environment variable in your `schema.prisma` datasource. Make sure you also include the `driverAdapters` Preview feature. - -```prisma -// schema.prisma -generator client { - provider = "prisma-client-js" - previewFeatures = ["driverAdapters"] -} - -datasource db { - provider = "mysql" - url = env("DATABASE_URL") - relationMode = "prisma" -} -``` - -Now run `npx prisma generate` to re-generate Prisma Client. - -## How to use - -In TypeScript, you will need to: - -1. Import packages -2. Set up the PlanetScale serverless database driver -3. Instantiate the Prisma PlanetScale adapter with the PlanetScale serverless database driver -4. Pass the driver adapter to the Prisma Client instance - -```typescript -// Import needed packages -import { connect } from '@planetscale/database'; -import { PrismaPlanetScale } from '@prisma/adapter-planetscale'; -import { PrismaClient } from '@prisma/client'; -import { fetch as undiciFetch } from 'undici'; - -// Setup -const connectionString = `${process.env.DATABASE_URL}`; - -// Init prisma client -const connection = connect({ url: connectionString, fetch: undiciFetch }); -const adapter = new PrismaPlanetScale(connection); -const prisma = new PrismaClient({ adapter }); - -// Use Prisma Client as normal -``` - -Your Prisma Client instance now uses PlanetScale's [`database-js`](https://github.com/planetscale/database-js), which can improve [`connection reliability and performance`](https://planetscale.com/blog/faster-mysql-with-http3). It uses HTTP requests instead of Prisma’s connection pool, but Prisma will continue to handle error handling and type safety. If you have any feedback about our PlanetScale Serverless Driver support, please leave a comment on our [dedicated GitHub issue](https://github.com/prisma/prisma/discussions/21347) and we'll use it as we continue development. diff --git a/query-engine/driver-adapters/js/adapter-planetscale/package.json b/query-engine/driver-adapters/js/adapter-planetscale/package.json deleted file mode 100644 index 59d59704ab50..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/package.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "name": "@prisma/adapter-planetscale", - "version": "0.0.0", - "description": "Prisma's driver adapter for \"@planetscale/database\"", - "main": "dist/index.js", - "module": "dist/index.mjs", - "types": "dist/index.d.ts", - "scripts": { - "build": "tsup ./src/index.ts --format cjs,esm --dts", - "lint": "tsc -p ./tsconfig.build.json" - }, - "files": [ - "dist", - "README.md" - ], - "keywords": [], - "author": "Alberto Schiabel ", - "license": "Apache-2.0", - "sideEffects": false, - "dependencies": { - "@prisma/driver-adapter-utils": "workspace:*" - }, - "devDependencies": { - "@planetscale/database": "^1.11.0" - }, - "peerDependencies": { - "@planetscale/database": "^1.11.0" - } -} diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/conversion.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/conversion.ts deleted file mode 100644 index f6cf8563dc24..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/conversion.ts +++ /dev/null @@ -1,98 +0,0 @@ -import { ColumnTypeEnum, type ColumnType } from '@prisma/driver-adapter-utils' - -// See: https://github.com/planetscale/vitess-types/blob/06235e372d2050b4c0fff49972df8111e696c564/src/vitess/query/v16/query.proto#L108-L218 -export type PlanetScaleColumnType - = 'NULL' - | 'INT8' - | 'UINT8' - | 'INT16' - | 'UINT16' - | 'INT24' - | 'UINT24' - | 'INT32' - | 'UINT32' - | 'INT64' - | 'UINT64' - | 'FLOAT32' - | 'FLOAT64' - | 'TIMESTAMP' - | 'DATE' - | 'TIME' - | 'DATETIME' - | 'YEAR' - | 'DECIMAL' - | 'TEXT' - | 'BLOB' - | 'VARCHAR' - | 'VARBINARY' - | 'CHAR' - | 'BINARY' - | 'BIT' - | 'ENUM' - | 'SET' // unsupported - | 'TUPLE' // unsupported - | 'GEOMETRY' - | 'JSON' - | 'EXPRESSION' // unsupported - | 'HEXNUM' - | 'HEXVAL' - | 'BITNUM' - -/** - * This is a simplification of quaint's value inference logic. Take a look at quaint's conversion.rs - * module to see how other attributes of the field packet such as the field length are used to infer - * the correct quaint::Value variant. - */ -export function fieldToColumnType(field: PlanetScaleColumnType): ColumnType { - switch (field) { - case 'INT8': - case 'UINT8': - case 'INT16': - case 'UINT16': - case 'INT24': - case 'UINT24': - case 'INT32': - case 'UINT32': - case 'YEAR': - return ColumnTypeEnum.Int32 - case 'INT64': - case 'UINT64': - return ColumnTypeEnum.Int64 - case 'FLOAT32': - return ColumnTypeEnum.Float - case 'FLOAT64': - return ColumnTypeEnum.Double - case 'TIMESTAMP': - case 'DATETIME': - return ColumnTypeEnum.DateTime - case 'DATE': - return ColumnTypeEnum.Date - case 'TIME': - return ColumnTypeEnum.Time - case 'DECIMAL': - return ColumnTypeEnum.Numeric - case 'CHAR': - return ColumnTypeEnum.Char - case 'TEXT': - case 'VARCHAR': - return ColumnTypeEnum.Text - case 'ENUM': - return ColumnTypeEnum.Enum - case 'JSON': - return ColumnTypeEnum.Json - case 'BLOB': - case 'BINARY': - case 'VARBINARY': - case 'BIT': - case 'BITNUM': - case 'HEXNUM': - case 'HEXVAL': - case 'GEOMETRY': - return ColumnTypeEnum.Bytes - case 'NULL': - // Fall back to Int32 for consistency with quaint. - return ColumnTypeEnum.Int32 - default: - throw new Error(`Unsupported column type: ${field}`) - } -} diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/deferred.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/deferred.ts deleted file mode 100644 index 013409c8424f..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/deferred.ts +++ /dev/null @@ -1,13 +0,0 @@ -export type Deferred = { - resolve(value: T | PromiseLike): void; - reject(reason: unknown): void; -} - - -export function createDeferred(): [Deferred, Promise] { - const deferred = {} as Deferred - return [deferred, new Promise((resolve, reject) => { - deferred.resolve = resolve - deferred.reject = reject - })] -} \ No newline at end of file diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/index.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/index.ts deleted file mode 100644 index 5e8add856fbb..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { PrismaPlanetScale } from './planetscale' diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts deleted file mode 100644 index 5a52851112b2..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts +++ /dev/null @@ -1,181 +0,0 @@ -import type planetScale from '@planetscale/database' -import { Debug, err, ok } from '@prisma/driver-adapter-utils' -import type { - DriverAdapter, - ResultSet, - Query, - Queryable, - Transaction, - Result, - TransactionOptions, -} from '@prisma/driver-adapter-utils' -import { type PlanetScaleColumnType, fieldToColumnType } from './conversion' -import { createDeferred, Deferred } from './deferred' - -const debug = Debug('prisma:driver-adapter:planetscale') - -class RollbackError extends Error { - constructor() { - super('ROLLBACK') - this.name = 'RollbackError' - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, RollbackError) - } - } -} - -class PlanetScaleQueryable implements Queryable { - readonly flavour = 'mysql' - constructor(protected client: ClientT) {} - - /** - * Execute a query given as SQL, interpolating the given parameters. - */ - async queryRaw(query: Query): Promise> { - const tag = '[js::query_raw]' - debug(`${tag} %O`, query) - - const ioResult = await this.performIO(query) - return ioResult.map(({ fields, insertId: lastInsertId, rows }) => { - const columns = fields.map((field) => field.name) - return { - columnNames: columns, - columnTypes: fields.map((field) => fieldToColumnType(field.type as PlanetScaleColumnType)), - rows: rows as ResultSet['rows'], - lastInsertId, - } - }) - } - - /** - * Execute a query given as SQL, interpolating the given parameters and - * returning the number of affected rows. - * Note: Queryable expects a u64, but napi.rs only supports u32. - */ - async executeRaw(query: Query): Promise> { - const tag = '[js::execute_raw]' - debug(`${tag} %O`, query) - - return (await this.performIO(query)).map(({ rowsAffected }) => rowsAffected) - } - - /** - * Run a query against the database, returning the result set. - * Should the query fail due to a connection error, the connection is - * marked as unhealthy. - */ - private async performIO(query: Query): Promise> { - const { sql, args: values } = query - - try { - const result = await this.client.execute(sql, values, { - as: 'array', - }) - return ok(result) - } catch (e) { - const error = e as Error - if (error.name === 'DatabaseError') { - const parsed = parseErrorMessage(error.message) - if (parsed) { - return err({ - kind: 'Mysql', - ...parsed, - }) - } - } - debug('Error in performIO: %O', error) - throw error - } - } -} - -function parseErrorMessage(message: string) { - const match = message.match( - /target: (?:.+?) vttablet: (?.+?) \(errno (?\d+)\) \(sqlstate (?.+?)\)/, - ) - - if (!match || !match.groups) { - return undefined - } - return { - code: Number(match.groups.code), - message: match.groups.message, - state: match.groups.state, - } -} - -class PlanetScaleTransaction extends PlanetScaleQueryable implements Transaction { - finished = false - - constructor( - tx: planetScale.Transaction, - readonly options: TransactionOptions, - private txDeferred: Deferred, - private txResultPromise: Promise, - ) { - super(tx) - } - - async commit(): Promise> { - debug(`[js::commit]`) - - this.finished = true - this.txDeferred.resolve() - return Promise.resolve(ok(await this.txResultPromise)) - } - - async rollback(): Promise> { - debug(`[js::rollback]`) - - this.finished = true - this.txDeferred.reject(new RollbackError()) - return Promise.resolve(ok(await this.txResultPromise)) - } - - dispose(): Result { - if (!this.finished) { - this.rollback().catch(console.error) - } - return ok(undefined) - } -} - -export class PrismaPlanetScale extends PlanetScaleQueryable implements DriverAdapter { - constructor(client: planetScale.Connection) { - super(client) - } - - async startTransaction() { - const options: TransactionOptions = { - usePhantomQuery: true, - } - - const tag = '[js::startTransaction]' - debug(`${tag} options: %O`, options) - - return new Promise>((resolve, reject) => { - const txResultPromise = this.client - .transaction(async (tx) => { - const [txDeferred, deferredPromise] = createDeferred() - const txWrapper = new PlanetScaleTransaction(tx, options, txDeferred, txResultPromise) - - resolve(ok(txWrapper)) - return deferredPromise - }) - .catch((error) => { - // Rollback error is ignored (so that tx.rollback() won't crash) - // any other error is legit and is re-thrown - if (!(error instanceof RollbackError)) { - return reject(error) - } - - return undefined - }) - }) - } - - async close() { - return ok(undefined) - } -} diff --git a/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.build.json b/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.build.json deleted file mode 100644 index 28c56f6c3a9a..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.build.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "extends": "../tsconfig.json", - "compilerOptions": { - "outDir": "declaration" - } -} diff --git a/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.json b/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.json deleted file mode 100644 index 3c43903cfdd1..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../tsconfig.json" -} diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/package.json b/query-engine/driver-adapters/js/connector-test-kit-executor/package.json deleted file mode 100644 index 2a0d16bd4ccf..000000000000 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/package.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "name": "connector-test-kit-executor", - "version": "5.4.0", - "description": "", - "main": "dist/index.js", - "private": true, - "scripts": { - "build": "tsup ./src/index.ts --format cjs,esm --dts", - "lint": "tsc -p ./tsconfig.build.json" - }, - "keywords": [], - "author": "", - "sideEffects": false, - "license": "Apache-2.0", - "dependencies": { - "@libsql/client": "0.3.5", - "@neondatabase/serverless": "^0.6.0", - "@planetscale/database": "1.11.0", - "@prisma/adapter-libsql": "workspace:*", - "@prisma/adapter-neon": "workspace:*", - "@prisma/adapter-pg": "workspace:*", - "@prisma/adapter-planetscale": "workspace:*", - "@prisma/driver-adapter-utils": "workspace:*", - "@types/pg": "^8.10.2", - "pg": "^8.11.3", - "undici": "^5.26.2" - } -} diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/README.md b/query-engine/driver-adapters/js/driver-adapter-utils/README.md deleted file mode 100644 index 78938e802bd3..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# @prisma/driver-adapters-utils - -**INTERNAL PACKAGE, DO NOT USE** diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/package.json b/query-engine/driver-adapters/js/driver-adapter-utils/package.json deleted file mode 100644 index 64301a7a5533..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/package.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "name": "@prisma/driver-adapter-utils", - "version": "0.0.0", - "description": "Internal set of utilities and types for Prisma's driver adapters.", - "main": "dist/index.js", - "module": "dist/index.mjs", - "types": "dist/index.d.ts", - "scripts": { - "build": "tsup ./src/index.ts --format cjs,esm --dts", - "lint": "tsc -p ./tsconfig.build.json" - }, - "files": [ - "dist", - "README.md" - ], - "keywords": [], - "author": "Alberto Schiabel ", - "license": "Apache-2.0", - "sideEffects": false, - "dependencies": { - "debug": "^4.3.4" - }, - "devDependencies": { - "@types/debug": "^4.1.8" - } -} diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts deleted file mode 100644 index 1e3aa36210cf..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts +++ /dev/null @@ -1,80 +0,0 @@ -import { Result, err, ok } from './result' -import type { ErrorCapturingDriverAdapter, DriverAdapter, Transaction, ErrorRegistry, ErrorRecord } from './types' - -class ErrorRegistryInternal implements ErrorRegistry { - private registeredErrors: ErrorRecord[] = [] - - consumeError(id: number): ErrorRecord | undefined { - return this.registeredErrors[id] - } - - registerNewError(error: unknown) { - let i = 0 - while (this.registeredErrors[i] !== undefined) { - i++ - } - this.registeredErrors[i] = { error } - return i - } -} - -// *.bind(adapter) is required to preserve the `this` context of functions whose -// execution is delegated to napi.rs. -export const bindAdapter = (adapter: DriverAdapter): ErrorCapturingDriverAdapter => { - const errorRegistry = new ErrorRegistryInternal() - - const startTransaction = wrapAsync(errorRegistry, adapter.startTransaction.bind(adapter)) - return { - errorRegistry, - queryRaw: wrapAsync(errorRegistry, adapter.queryRaw.bind(adapter)), - executeRaw: wrapAsync(errorRegistry, adapter.executeRaw.bind(adapter)), - flavour: adapter.flavour, - startTransaction: async (...args) => { - const result = await startTransaction(...args) - return result.map((tx) => bindTransaction(errorRegistry, tx)) - }, - close: wrapAsync(errorRegistry, adapter.close.bind(adapter)), - } -} - -// *.bind(transaction) is required to preserve the `this` context of functions whose -// execution is delegated to napi.rs. -const bindTransaction = (errorRegistry: ErrorRegistryInternal, transaction: Transaction): Transaction => { - return { - flavour: transaction.flavour, - options: transaction.options, - queryRaw: wrapAsync(errorRegistry, transaction.queryRaw.bind(transaction)), - executeRaw: wrapAsync(errorRegistry, transaction.executeRaw.bind(transaction)), - commit: wrapAsync(errorRegistry, transaction.commit.bind(transaction)), - rollback: wrapAsync(errorRegistry, transaction.rollback.bind(transaction)), - dispose: wrapSync(errorRegistry, transaction.dispose.bind(transaction)), - } -} - -function wrapAsync( - registry: ErrorRegistryInternal, - fn: (...args: A) => Promise>, -): (...args: A) => Promise> { - return async (...args) => { - try { - return await fn(...args) - } catch (error) { - const id = registry.registerNewError(error) - return err({ kind: 'GenericJs', id }) - } - } -} - -function wrapSync( - registry: ErrorRegistryInternal, - fn: (...args: A) => Result, -): (...args: A) => Result { - return (...args) => { - try { - return fn(...args) - } catch (error) { - const id = registry.registerNewError(error) - return err({ kind: 'GenericJs', id }) - } - } -} diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts deleted file mode 100644 index 5ddc7f20b390..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts +++ /dev/null @@ -1,48 +0,0 @@ -// Same order as in rust driver-adapters' `ColumnType`. -// Note: exporting const enums causes lots of problems with bundlers, so we emulate -// them via regular dictionaries. -// See: https://hackmd.io/@dzearing/Sk3xV0cLs -export const ColumnTypeEnum = { - // Scalars - Int32: 0, - Int64: 1, - Float: 2, - Double: 3, - Numeric: 4, - Boolean: 5, - Char: 6, - Text: 7, - Date: 8, - Time: 9, - DateTime: 10, - Json: 11, - Enum: 12, - Bytes: 13, - Set: 14, - Uuid: 15, - - // Arrays - Int32Array: 64, - Int64Array: 65, - FloatArray: 66, - DoubleArray: 67, - NumericArray: 68, - BooleanArray: 69, - CharArray: 70, - TextArray: 71, - DateArray: 72, - TimeArray: 73, - DateTimeArray: 74, - JsonArray: 75, - EnumArray: 76, - BytesArray: 77, - UuidArray: 78, - - // Custom - UnknownNumber: 128, -} as const - -// This string value paired with `ColumnType.Json` will be treated as JSON `null` -// when convering to a quaint value. This is to work around JS/JSON null values -// already being used to represent database NULLs. -export const JsonNullMarker = '$__prisma_null' diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/debug.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/debug.ts deleted file mode 100644 index e0a1fe380fa2..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/debug.ts +++ /dev/null @@ -1,3 +0,0 @@ -import { debug as Debug } from 'debug' - -export { Debug } diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/index.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/index.ts deleted file mode 100644 index e7c13be99966..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -export { bindAdapter } from './binder' -export { ColumnTypeEnum, JsonNullMarker } from './const' -export { Debug } from './debug' -export { ok, err, type Result } from './result' -export type * from './types' diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/result.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/result.ts deleted file mode 100644 index 5af95db68671..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/result.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { Error } from './types' -export type Result = { - // common methods - map(fn: (value: T) => U): Result - flatMap(fn: (value: T) => Result): Result -} & ( - | { - readonly ok: true - readonly value: T - } - | { - readonly ok: false - readonly error: Error - } -) - -export function ok(value: T): Result { - return { - ok: true, - value, - map(fn) { - return ok(fn(value)) - }, - flatMap(fn) { - return fn(value) - }, - } -} - -export function err(error: Error): Result { - return { - ok: false, - error, - map() { - return err(error) - }, - flatMap() { - return err(error) - }, - } -} diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts deleted file mode 100644 index 92019f81824b..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts +++ /dev/null @@ -1,132 +0,0 @@ -import { ColumnTypeEnum } from './const' -import { Result } from './result' - -export type ColumnType = (typeof ColumnTypeEnum)[keyof typeof ColumnTypeEnum] - -export interface ResultSet { - /** - * List of column types appearing in a database query, in the same order as `columnNames`. - * They are used within the Query Engine to convert values from JS to Quaint values. - */ - columnTypes: Array - - /** - * List of column names appearing in a database query, in the same order as `columnTypes`. - */ - columnNames: Array - - /** - * List of rows retrieved from a database query. - * Each row is a list of values, whose length matches `columnNames` and `columnTypes`. - */ - rows: Array> - - /** - * The last ID of an `INSERT` statement, if any. - * This is required for `AUTO_INCREMENT` columns in MySQL and SQLite-flavoured databases. - */ - lastInsertId?: string -} - -export type Query = { - sql: string - args: Array -} - -export type Error = - | { - kind: 'GenericJs' - id: number - } - | { - kind: 'Postgres' - code: string - severity: string - message: string - detail: string | undefined - column: string | undefined - hint: string | undefined - } - | { - kind: 'Mysql' - code: number - message: string - state: string - } - | { - kind: 'Sqlite' - /** - * Sqlite extended error code: https://www.sqlite.org/rescode.html - */ - extendedCode: number - message: string - } - -export interface Queryable { - readonly flavour: 'mysql' | 'postgres' | 'sqlite' - - /** - * Execute a query given as SQL, interpolating the given parameters, - * and returning the type-aware result set of the query. - * - * This is the preferred way of executing `SELECT` queries. - */ - queryRaw(params: Query): Promise> - - /** - * Execute a query given as SQL, interpolating the given parameters, - * and returning the number of affected rows. - * - * This is the preferred way of executing `INSERT`, `UPDATE`, `DELETE` queries, - * as well as transactional queries. - */ - executeRaw(params: Query): Promise> -} - -export interface DriverAdapter extends Queryable { - /** - * Starts new transation. - */ - startTransaction(): Promise> - - /** - * Closes the connection to the database, if any. - */ - close: () => Promise> -} - -export type TransactionOptions = { - usePhantomQuery: boolean -} - -export interface Transaction extends Queryable { - /** - * Transaction options. - */ - readonly options: TransactionOptions - /** - * Commit the transaction. - */ - commit(): Promise> - /** - * Rolls back the transaction. - */ - rollback(): Promise> - /** - * Discards and closes the transaction which may or may not have been committed or rolled back. - * This operation must be synchronous. If the implementation requires calling creating new - * asynchronous tasks on the event loop, the driver is responsible for handling the errors - * appropriately to ensure they don't crash the application. - */ - dispose(): Result -} - -export interface ErrorCapturingDriverAdapter extends DriverAdapter { - readonly errorRegistry: ErrorRegistry -} - -export interface ErrorRegistry { - consumeError(id: number): ErrorRecord | undefined -} - -export type ErrorRecord = { error: unknown } diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.build.json b/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.build.json deleted file mode 100644 index 2c2e266bdb3b..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.build.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "extends": "../tsconfig.json", - "compilerOptions": { - "outDir": "declaration", - } -} diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.json b/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.json deleted file mode 100644 index 3c43903cfdd1..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../tsconfig.json" -} diff --git a/query-engine/driver-adapters/js/package.json b/query-engine/driver-adapters/js/package.json deleted file mode 100644 index 2036794f8c02..000000000000 --- a/query-engine/driver-adapters/js/package.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "private": true, - "name": "js", - "version": "0.0.2", - "description": "", - "engines": { - "node": ">=16.13", - "pnpm": ">=8.6.6 <9" - }, - "license": "Apache-2.0", - "scripts": { - "build": "pnpm -r run build", - "lint": "pnpm -r run lint" - }, - "keywords": [], - "author": "", - "devDependencies": { - "@types/node": "^20.5.1", - "tsup": "^7.2.0", - "tsx": "^3.12.7", - "typescript": "^5.1.6" - } -} diff --git a/query-engine/driver-adapters/js/pnpm-workspace.yaml b/query-engine/driver-adapters/js/pnpm-workspace.yaml deleted file mode 100644 index f9e70da7ee5a..000000000000 --- a/query-engine/driver-adapters/js/pnpm-workspace.yaml +++ /dev/null @@ -1,8 +0,0 @@ -packages: - - './adapter-libsql' - - './adapter-neon' - - './adapter-pg' - - './adapter-planetscale' - - './connector-test-kit-executor' - - './driver-adapter-utils' - - './smoke-test-js' diff --git a/query-engine/driver-adapters/js/smoke-test-js/.envrc.example b/query-engine/driver-adapters/js/smoke-test-js/.envrc.example deleted file mode 100644 index 15a286787cbd..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/.envrc.example +++ /dev/null @@ -1,26 +0,0 @@ -# Uncomment "source_up" if you need to load the .envrc at the root of the -# `prisma-engines` repository before loading this one (for example, if you -# are using Nix). -# -# source_up - -export JS_PLANETSCALE_DATABASE_URL="mysql://USER:PASSWORD@aws.connect.psdb.cloud/DATABASE?sslaccept=strict" -export JS_NEON_DATABASE_URL="postgres://USER:PASSWORD@DATABASE-pooler.eu-central-1.aws.neon.tech/neondb?pgbouncer=true&connect_timeout=10" - -# Note: if you use hosted Postgres instances (e.g., from PDP provision), you need `?sslmode=disable` -export JS_PG_DATABASE_URL="postgres://postgres:prisma@localhost:5438" - -# Set this to a `file:` URL when using a local sqlite database (either -# standalone or as an embedded replica). Otherwise, when using a remote Turso -# (or sqld) database in HTTP mode directly without an embedded replica, set its -# URL here. -export JS_LIBSQL_DATABASE_URL="file:${PWD}/libsql.db" - -# # Set this to the URL of remote Turso database when using an embedded replica. -# export JS_LIBSQL_SYNC_URL="" - -# # Provide an auth token when using a remote Turso database. -# export JS_LIBSQL_AUTH_TOKEN="" - -# Can be one of "number" (the default when nothing is specified), "bigint" or "string". "bigint" works best with Prisma. -export JS_LIBSQL_INT_MODE="bigint" diff --git a/query-engine/driver-adapters/js/smoke-test-js/.gitignore b/query-engine/driver-adapters/js/smoke-test-js/.gitignore deleted file mode 100644 index be550f99317f..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -libsql.db -libsql.db-journal -libsql.db-shm -libsql.db-wal diff --git a/query-engine/driver-adapters/js/smoke-test-js/README.md b/query-engine/driver-adapters/js/smoke-test-js/README.md deleted file mode 100644 index f1b81df5d268..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/README.md +++ /dev/null @@ -1,79 +0,0 @@ -# @prisma/driver-adapters-smoke-tests-js - -This is a playground for testing the `libquery` client with the experimental Node.js drivers. -It contains a subset of `@prisma/client`, plus some handy executable smoke tests: -- [`./src/libquery`](./src/libquery): it contains smoke tests using a local `libquery`, the Query Engine library. -- [`./src/client`](./src/client): it contains smoke tests using `@prisma/client`. - -## How to setup - -We assume a recent Node.js is installed (e.g., `v20.5.x`). If not, run `nvm use` in the current directory. -It's very important to double-check if you have multiple versions installed, as both PlanetScale and Neon requires either Node.js `v18`+ or a custom `fetch` function. - -In the parent directory (`cd ..`): -- Build the driver adapters via `pnpm i && pnpm build` - -In the current directoy: -- Create a `.envrc` starting from `.envrc.example`, and fill in the missing values following the given template -- Install Node.js dependencies via - ```bash - pnpm i - ``` - -(or run `sh ./setup.sh`) - -Anywhere in the repository: -- Run `cargo build -p query-engine-node-api` to compile the `libquery` Query Engine - -### PlanetScale - -If you don't have a connection string yet: - -- [Follow the Notion document](https://www.notion.so/How-to-get-a-PlanetScale-and-Neon-database-for-testing-93d978061f9c4ffc80ebfed36896af16) or create a new database on [PlanetScale](https://planetscale.com/) -- Go to `Settings` > `Passwords`, and create a new password for the `main` database branch. Select the `Prisma` template and copy the generated URL (comprising username, password, etc). -- Paste it in the `JS_PLANETSCALE_DATABASE_URL` environment variable in `.envrc`. - -In the current directory: -- Run `pnpm prisma:planetscale` to push the Prisma schema and insert the test data. -- Run `pnpm planetscale` to run smoke tests using `libquery` against the PlanetScale database. - For more fine-grained control: - - Run `pnpm planetscale:libquery` to test using `libquery` - - Run `pnpm planetscale:client` to test using `@prisma/client` - -### Neon - -If you don't have a connection string yet: - -- [Follow the Notion document](https://www.notion.so/How-to-get-a-PlanetScale-and-Neon-database-for-testing-93d978061f9c4ffc80ebfed36896af16) or create a new database with Neon CLI `npx neonctl projects create` or in [Neon Console](https://neon.tech). -- Paste the connection string to `JS_NEON_DATABASE_URL`. - -In the current directory: -- Run `pnpm prisma:neon` to push the Prisma schema and insert the test data. -- Run `pnpm neon:ws` to run smoke tests using `libquery` against the Neon database, using a WebSocket connection. - For more fine-grained control: - - Run `pnpm neon:ws:libquery` to test using `libquery` - - Run `pnpm neon:ws:client` to test using `@prisma/client` -- Run `pnpm neon:http` to run smoke tests using `libquery` against the Neon database, using an HTTP connection. In this case, transactions won't work, and tests are expected to fail. - For more fine-grained control: - - Run `pnpm neon:http:libquery` to test using `libquery` - - Run `pnpm neon:http:client` to test using `@prisma/client` - -### Pg - -Start database via `docker compose up postgres15` in `/docker`. - -In the current directory: -- Run `pnpm prisma:pg` to push the Prisma schema and insert the test data. -- Run `pnpm pg` to run smoke tests using `libquery` against the PostgreSQL database, using `pg` - For more fine-grained control: - - Run `pnpm pg:libquery` to test using `libquery` - - Run `pnpm pg:client` to test using `@prisma/client` - -### Libsql - -In the current directory: -- Run `pnpm prisma:libsql` to push the Prisma schema and insert the test data. -- Run `pnpm libsql` to run smoke tests using `libquery` against the SQLite database, using `libSQL` - For more fine-grained control: - - Run `pnpm libsql:libquery` to test using `libquery` - - Run `pnpm libsql:client` to test using `@prisma/client` \ No newline at end of file diff --git a/query-engine/driver-adapters/js/smoke-test-js/package.json b/query-engine/driver-adapters/js/smoke-test-js/package.json deleted file mode 100644 index 31362c1cc873..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/package.json +++ /dev/null @@ -1,67 +0,0 @@ -{ - "name": "@prisma/driver-adapters-smoke-tests-js", - "private": true, - "type": "module", - "version": "5.4.0", - "description": "", - "scripts": { - "prisma:db:push:postgres": "prisma db push --schema ./prisma/postgres/schema.prisma --force-reset", - "prisma:db:execute:postgres": "prisma db execute --schema ./prisma/postgres/schema.prisma --file ./prisma/postgres/commands/type_test/insert.sql", - "prisma:studio:postgres": "prisma studio --schema ./prisma/postgres/schema.prisma", - "prisma:db:push:mysql": "prisma db push --schema ./prisma/mysql/schema.prisma --force-reset", - "prisma:db:execute:mysql": "prisma db execute --schema ./prisma/mysql/schema.prisma --file ./prisma/mysql/commands/type_test/insert.sql", - "prisma:db:push:sqlite": "prisma db push --schema ./prisma/sqlite/schema.prisma --force-reset", - "prisma:db:execute:sqlite": "prisma db execute --schema ./prisma/sqlite/schema.prisma --file ./prisma/sqlite/commands/type_test/insert.sql", - "prisma:studio:mysql": "prisma studio --schema ./prisma/mysql/schema.prisma", - "prisma:neon:ws": "pnpm prisma:neon", - "prisma:neon:http": "pnpm prisma:neon", - "prisma:neon": "cross-env-shell DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" \"pnpm prisma:db:push:postgres && pnpm prisma:db:execute:postgres\"", - "studio:neon": "cross-env-shell DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" \"pnpm prisma:studio:postgres\"", - "neon:ws:libquery": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/neon.ws.test.ts", - "neon:http:libquery": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/neon.http.test.ts", - "neon:ws:client": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/neon.ws.test.ts", - "neon:http:client": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/neon.http.test.ts", - "neon:ws": "pnpm neon:ws:libquery && pnpm neon:ws:client", - "neon:http": "pnpm neon:http:libquery && pnpm neon:http:client", - "prisma:pg": "cross-env-shell DATABASE_URL=\"${JS_PG_DATABASE_URL}\" \"pnpm prisma:db:push:postgres && pnpm prisma:db:execute:postgres\"", - "studio:pg": "cross-env-shell DATABASE_URL=\"${JS_PG_DATABASE_URL}\" \"pnpm prisma:studio:postgres\"", - "pg:libquery": "cross-env-shell DATABASE_URL=\"${JS_PG_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/pg.test.ts", - "pg:client": "DATABASE_URL=\"${JS_PG_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/pg.test.ts", - "pg": "pnpm pg:libquery && pnpm pg:client", - "errors": "DATABASE_URL=\"${JS_PG_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/errors.test.ts", - "prisma:planetscale": "cross-env-shell DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" \"pnpm prisma:db:push:mysql && pnpm prisma:db:execute:mysql\"", - "studio:planetscale": "cross-env-shell DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" \"pnpm prisma:studio:mysql\"", - "planetscale:libquery": "DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/planetscale.test.ts", - "planetscale:client": "DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/planetscale.test.ts", - "planetscale": "pnpm planetscale:libquery && pnpm planetscale:client", - "prisma:libsql": "cross-env-shell DATABASE_URL=\"${JS_LIBSQL_DATABASE_URL}\" \"pnpm prisma:db:push:sqlite && pnpm prisma:db:execute:sqlite\"", - "libsql:libquery": "cross-env-shell DATABASE_URL=\"${JS_LIBSQL_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/libsql.test.ts", - "libsql:client": "cross-env-shell DATABASE_URL=\"${JS_LIBSQL_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/libsql.test.ts", - "libsql": "pnpm libsql:libquery && pnpm libsql:client" - }, - "keywords": [], - "author": "Alberto Schiabel ", - "license": "Apache-2.0", - "sideEffects": true, - "dependencies": { - "@libsql/client": "0.3.5", - "@neondatabase/serverless": "^0.6.0", - "@planetscale/database": "^1.11.0", - "@prisma/adapter-libsql": "workspace:*", - "@prisma/adapter-neon": "workspace:*", - "@prisma/adapter-pg": "workspace:*", - "@prisma/adapter-planetscale": "workspace:*", - "@prisma/client": "5.4.2", - "@prisma/driver-adapter-utils": "workspace:*", - "pg": "^8.11.3", - "superjson": "^1.13.1", - "undici": "^5.26.2" - }, - "devDependencies": { - "@types/node": "^20.5.1", - "@types/pg": "^8.10.2", - "cross-env": "^7.0.3", - "prisma": "5.4.2", - "tsx": "^3.12.7" - } -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/commands/type_test/insert.sql b/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/commands/type_test/insert.sql deleted file mode 100644 index 6641eff216b2..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/commands/type_test/insert.sql +++ /dev/null @@ -1,51 +0,0 @@ -INSERT INTO type_test ( - tinyint_column, - smallint_column, - mediumint_column, - int_column, - bigint_column, - float_column, - double_column, - decimal_column, - boolean_column, - bit_column, - char_column, - varchar_column, - text_column, - date_column, - time_column, - year_column, - datetime_column, - timestamp_column, - json_column, - enum_column, - binary_column, - varbinary_column, - blob_column, - set_column -) VALUES ( - 127, -- tinyint - 32767, -- smallint - 8388607, -- mediumint - 2147483647, -- int - 9223372036854775807, -- bigint - 3.402823466, -- float - 1.7976931348623157, -- double - 99999999.99, -- decimal - TRUE, -- boolean - 1, -- bit - 'c', -- char - 'Sample varchar', -- varchar - 'This is a long text...', -- text - '2023-07-24', -- date - '23:59:59', -- time - 2023, -- year - '2023-07-24 23:59:59.415', -- datetime - '2023-07-24 23:59:59', -- timestamp - '{"key": "value"}', -- json - 'value3', -- enum - 0x4D7953514C, -- binary - 0x48656C6C6F20, -- varbinary - _binary 'binary', -- blob - 'option1,option3' -- set -); diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma b/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma deleted file mode 100644 index 59efb33a5594..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma +++ /dev/null @@ -1,125 +0,0 @@ -generator client { - provider = "prisma-client-js" - previewFeatures = ["driverAdapters"] -} - -datasource db { - provider = "mysql" - url = env("DATABASE_URL") -} - -model type_test { - id Int @id @default(autoincrement()) - tinyint_column Int @db.TinyInt - tinyint_column_null Int? @db.TinyInt - smallint_column Int @db.SmallInt - smallint_column_null Int? @db.SmallInt - mediumint_column Int @db.MediumInt - mediumint_column_null Int? @db.MediumInt - int_column Int - int_column_null Int? - bigint_column BigInt - bigint_column_null BigInt? - float_column Float @db.Float - float_column_null Float? @db.Float - double_column Float - double_column_null Float? - decimal_column Decimal @db.Decimal(10, 2) - decimal_column_null Decimal? @db.Decimal(10, 2) - boolean_column Boolean - boolean_column_null Boolean? - bit_column Boolean @db.Bit(1) - bit_column_null Boolean? @db.Bit(1) - char_column String @db.Char(10) - char_column_null String? @db.Char(10) - varchar_column String @db.VarChar(255) - varchar_column_null String? @db.VarChar(255) - text_column String @db.Text - text_column_null String? @db.Text - date_column DateTime @db.Date - date_column_null DateTime? @db.Date - time_column DateTime @db.Time(0) - time_column_null DateTime? @db.Time(0) - year_column Int @db.Year - year_column_null Int? @db.Year - datetime_column DateTime @db.DateTime(3) - datetime_column_null DateTime? @db.DateTime(3) - timestamp_column DateTime @db.Timestamp(0) - timestamp_column_null DateTime? @db.Timestamp(0) - json_column Json - json_column_null Json? - enum_column type_test_enum_column - enum_column_null type_test_enum_column_null? - binary_column Bytes @db.Binary(64) - binary_column_null Bytes? @db.Binary(64) - varbinary_column Bytes @db.VarBinary(128) - varbinary_column_null Bytes? @db.VarBinary(128) - blob_column Bytes @db.Blob - blob_null Bytes? @db.Blob - set_column String - set_column_null String? -} - -// This will eventually supersede type_test -model type_test_2 { - id String @id @default(cuid()) - datetime_column DateTime @default(now()) @db.DateTime(3) - datetime_column_null DateTime? @db.DateTime(3) -} - -model type_test_3 { - id Int @id @default(autoincrement()) - bytes Bytes -} - -enum type_test_enum_column { - value1 - value2 - value3 -} - -enum type_test_enum_column_null { - value1 - value2 - value3 -} - -model Child { - c String @unique - c_1 String - c_2 String - parentId String? @unique - non_unique String? - id String @id - - @@unique([c_1, c_2]) -} - -model Parent { - p String @unique - p_1 String - p_2 String - non_unique String? - id String @id - - @@unique([p_1, p_2]) -} - -model Author { - id Int @id @default(autoincrement()) - firstName String - lastName String - age Int - - @@map("authors") -} - -model Product { - id String @id @default(cuid()) - properties Json - properties_null Json? -} - -model Unique { - email String @id -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/commands/type_test/insert.sql b/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/commands/type_test/insert.sql deleted file mode 100644 index 170bafb9d810..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/commands/type_test/insert.sql +++ /dev/null @@ -1,35 +0,0 @@ -INSERT INTO type_test ( - smallint_column, - int_column, - bigint_column, - float_column, - double_column, - decimal_column, - boolean_column, - char_column, - varchar_column, - text_column, - date_column, - time_column, - datetime_column, - timestamp_column, - json_column, - enum_column -) VALUES ( - 32767, -- smallint - 2147483647, -- int - 9223372036854775807, -- bigint - 3.402823466, -- float - 1.7976931348623157, -- double - 99999999.99, -- decimal - TRUE, -- boolean - 'c', -- char - 'Sample varchar', -- varchar - 'This is a long text...', -- text - '2023-07-24', -- date - '23:59:59', -- time - '2023-07-24 23:59:59.415', -- datetime - '2023-07-24 23:59:59', -- timestamp - '{"key": "value"}', -- json - 'value3' -- enum -); diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma b/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma deleted file mode 100644 index 7cd31f406b9d..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma +++ /dev/null @@ -1,117 +0,0 @@ -generator client { - provider = "prisma-client-js" - previewFeatures = ["driverAdapters"] -} - -datasource db { - provider = "postgres" - url = env("DATABASE_URL") -} - -model type_test { - id Int @id @default(autoincrement()) - smallint_column Int @db.SmallInt - smallint_column_null Int? @db.SmallInt - int_column Int - int_column_null Int? - bigint_column BigInt - bigint_column_null BigInt? - float_column Float @db.Real - float_column_null Float? @db.Real - double_column Float - double_column_null Float? - decimal_column Decimal @db.Decimal(10, 2) - decimal_column_null Decimal? @db.Decimal(10, 2) - boolean_column Boolean - boolean_column_null Boolean? - char_column String @db.Char(10) - char_column_null String? @db.Char(10) - varchar_column String @db.VarChar(255) - varchar_column_null String? @db.VarChar(255) - text_column String - text_column_null String? - date_column DateTime @db.Date - date_column_null DateTime? @db.Date - time_column DateTime @db.Time(0) - time_column_null DateTime? @db.Time(0) - datetime_column DateTime @db.Timestamp(3) - datetime_column_null DateTime? @db.Timestamp(3) - timestamp_column DateTime @db.Timestamp(0) - timestamp_column_null DateTime? @db.Timestamp(0) - json_column Json - json_column_null Json? - enum_column type_test_enum_column - enum_column_null type_test_enum_column_null? -} - -// This will eventually supersede type_test -model type_test_2 { - id String @id @default(cuid()) - datetime_column DateTime @default(now()) @db.Timestamp(3) - datetime_column_null DateTime? @db.Timestamp(3) -} - -model type_test_3 { - id Int @id @default(autoincrement()) - bytes Bytes -} - -model Child { - c String @unique - c_1 String - c_2 String - parentId String? @unique - non_unique String? - id String @id - - @@unique([c_1, c_2]) -} - -model Parent { - p String @unique - p_1 String - p_2 String - non_unique String? - id String @id - - @@unique([p_1, p_2]) -} - -enum type_test_enum_column { - value1 - value2 - value3 -} - -enum type_test_enum_column_null { - value1 - value2 - value3 -} - -model Author { - id Int @id @default(autoincrement()) - firstName String - lastName String - age Int - - @@map("authors") -} - -model Product { - id String @id @default(cuid()) - properties Json - properties_null Json? - users User[] -} - -model User { - id String @id @default(uuid()) - email String - favoriteProduct Product? @relation(fields: [productId], references: [id]) - productId String? -} - -model Unique { - email String @id -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/commands/type_test/insert.sql b/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/commands/type_test/insert.sql deleted file mode 100644 index 014592d2fa2c..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/commands/type_test/insert.sql +++ /dev/null @@ -1,17 +0,0 @@ -INSERT INTO type_test ( - int_column, - bigint_column, - double_column, - decimal_column, - boolean_column, - text_column, - datetime_column -) VALUES ( - 2147483647, -- int - 9223372036854775807, -- bigint - 1.7976931348623157, -- double - 99999999.99, -- decimal - TRUE, -- boolean - 'This is a long text...', -- text - '2023-07-24 23:59:59.415' -- datetime -); diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/20230915202554_init/migration.sql b/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/20230915202554_init/migration.sql deleted file mode 100644 index 31c63d423e22..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/20230915202554_init/migration.sql +++ /dev/null @@ -1,85 +0,0 @@ --- CreateTable -CREATE TABLE "type_test" ( - "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, - "int_column" INTEGER NOT NULL, - "int_column_null" INTEGER, - "bigint_column" BIGINT NOT NULL, - "bigint_column_null" BIGINT, - "double_column" REAL NOT NULL, - "double_column_null" REAL, - "decimal_column" DECIMAL NOT NULL, - "decimal_column_null" DECIMAL, - "boolean_column" BOOLEAN NOT NULL, - "boolean_column_null" BOOLEAN, - "text_column" TEXT NOT NULL, - "text_column_null" TEXT, - "datetime_column" DATETIME NOT NULL, - "datetime_column_null" DATETIME -); - --- CreateTable -CREATE TABLE "type_test_2" ( - "id" TEXT NOT NULL PRIMARY KEY, - "datetime_column" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, - "datetime_column_null" DATETIME -); - --- CreateTable -CREATE TABLE "type_test_3" ( - "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, - "bytes" BLOB NOT NULL -); - --- CreateTable -CREATE TABLE "Child" ( - "c" TEXT NOT NULL, - "c_1" TEXT NOT NULL, - "c_2" TEXT NOT NULL, - "parentId" TEXT, - "non_unique" TEXT, - "id" TEXT NOT NULL PRIMARY KEY -); - --- CreateTable -CREATE TABLE "Parent" ( - "p" TEXT NOT NULL, - "p_1" TEXT NOT NULL, - "p_2" TEXT NOT NULL, - "non_unique" TEXT, - "id" TEXT NOT NULL PRIMARY KEY -); - --- CreateTable -CREATE TABLE "authors" ( - "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, - "firstName" TEXT NOT NULL, - "lastName" TEXT NOT NULL, - "age" INTEGER NOT NULL -); - --- CreateTable -CREATE TABLE "Product" ( - "id" TEXT NOT NULL PRIMARY KEY, - "properties" TEXT NOT NULL, - "properties_null" TEXT -); - --- CreateTable -CREATE TABLE "Unique" ( - "email" TEXT NOT NULL PRIMARY KEY, -); - --- CreateIndex -CREATE UNIQUE INDEX "Child_c_key" ON "Child"("c"); - --- CreateIndex -CREATE UNIQUE INDEX "Child_parentId_key" ON "Child"("parentId"); - --- CreateIndex -CREATE UNIQUE INDEX "Child_c_1_c_2_key" ON "Child"("c_1", "c_2"); - --- CreateIndex -CREATE UNIQUE INDEX "Parent_p_key" ON "Parent"("p"); - --- CreateIndex -CREATE UNIQUE INDEX "Parent_p_1_p_2_key" ON "Parent"("p_1", "p_2"); diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/migration_lock.toml b/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/migration_lock.toml deleted file mode 100644 index e5e5c4705ab0..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/migration_lock.toml +++ /dev/null @@ -1,3 +0,0 @@ -# Please do not edit this file manually -# It should be added in your version-control system (i.e. Git) -provider = "sqlite" \ No newline at end of file diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/schema.prisma b/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/schema.prisma deleted file mode 100644 index bde23dee66ac..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/schema.prisma +++ /dev/null @@ -1,79 +0,0 @@ -generator client { - provider = "prisma-client-js" - previewFeatures = ["driverAdapters"] -} - -datasource db { - provider = "sqlite" - url = env("DATABASE_URL") -} - -model type_test { - id Int @id @default(autoincrement()) - int_column Int - int_column_null Int? - bigint_column BigInt - bigint_column_null BigInt? - double_column Float - double_column_null Float? - decimal_column Decimal - decimal_column_null Decimal? - boolean_column Boolean - boolean_column_null Boolean? - text_column String - text_column_null String? - datetime_column DateTime - datetime_column_null DateTime? -} - -// This will eventually supersede type_test -model type_test_2 { - id String @id @default(cuid()) - datetime_column DateTime @default(now()) - datetime_column_null DateTime? -} - -model type_test_3 { - id Int @id @default(autoincrement()) - bytes Bytes -} - -model Child { - c String @unique - c_1 String - c_2 String - parentId String? @unique - non_unique String? - id String @id - - @@unique([c_1, c_2]) -} - -model Parent { - p String @unique - p_1 String - p_2 String - non_unique String? - id String @id - - @@unique([p_1, p_2]) -} - -model Author { - id Int @id @default(autoincrement()) - firstName String - lastName String - age Int - - @@map("authors") -} - -model Product { - id String @id @default(cuid()) - properties String - properties_null String? -} - -model Unique { - email String @id -} \ No newline at end of file diff --git a/query-engine/driver-adapters/js/smoke-test-js/setup.sh b/query-engine/driver-adapters/js/smoke-test-js/setup.sh deleted file mode 100644 index 7654679db14e..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/setup.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/usr/bin/env bash - -cd .. || return -pnpm i && pnpm build -cargo build -p query-engine-node-api -cd smoke-test-js || exit -pnpm i \ No newline at end of file diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts deleted file mode 100644 index b23cf2d97fb8..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts +++ /dev/null @@ -1,164 +0,0 @@ -import { describe, it } from 'node:test' -import path from 'node:path' -import assert from 'node:assert' -import { PrismaClient } from '@prisma/client' -import type { DriverAdapter } from '@prisma/driver-adapter-utils' -import { getLibQueryEnginePath } from '../libquery/util' - -export async function smokeTestClient(driverAdapter: DriverAdapter) { - const provider = driverAdapter.flavour - - const log = [ - { - emit: 'event', - level: 'query', - } as const, - ] - - const dirname = path.dirname(new URL(import.meta.url).pathname) - process.env.PRISMA_QUERY_ENGINE_LIBRARY = getLibQueryEnginePath(dirname) - - // Run twice, once with adapter and once fully without - for (const adapter of [driverAdapter, null]) { - const isUsingDriverAdapters = adapter !== null - describe(isUsingDriverAdapters ? `using Driver Adapters` : `using Rust drivers`, () => { - - it('expected error (on duplicate insert) as exception thrown / promise rejected', async () => { - const prisma = new PrismaClient({ adapter, log }) - - await assert.rejects( - async () => { - const result = await prisma.unique.create({ data: { email: 'duplicate@example.com' } }) - const result2 = await prisma.unique.create({ data: { email: 'duplicate@example.com' } }) - }, - (err) => { - assert.match(err.message, /unique/i); - return true; - }, - ); - - }) - - it('batch queries', async () => { - const prisma = new PrismaClient({ adapter, log }) - - const queries: string[] = [] - prisma.$on('query', ({ query }) => queries.push(query)) - - await prisma.$transaction([ - prisma.$queryRawUnsafe('SELECT 1'), - prisma.$queryRawUnsafe('SELECT 2'), - prisma.$queryRawUnsafe('SELECT 3'), - ]) - - const defaultExpectedQueries = [ - 'BEGIN', - 'SELECT 1', - 'SELECT 2', - 'SELECT 3', - 'COMMIT', - ] - - const driverAdapterExpectedQueries = [ - '-- Implicit "BEGIN" query via underlying driver', - 'SELECT 1', - 'SELECT 2', - 'SELECT 3', - '-- Implicit "COMMIT" query via underlying driver', - ] - - // TODO: sqlite should be here too but it's too flaky the way the test is currently written, - // only a subset of logs arrives on time (from 2 to 4 out of 5) - if (['mysql'].includes(provider)) { - if (isUsingDriverAdapters) { - assert.deepEqual(queries, driverAdapterExpectedQueries) - } else { - assert.deepEqual(queries, defaultExpectedQueries) - } - } else if (['postgres'].includes(provider)) { - // Note: the "DEALLOCATE ALL" query is only present after "BEGIN" when using Rust Postgres with pgbouncer. - assert.deepEqual(queries.at(0), defaultExpectedQueries.at(0)) - assert.deepEqual( - queries.filter((q) => q !== 'DEALLOCATE ALL'), - defaultExpectedQueries, - ) - } - }) - - if (provider !== 'sqlite') { - it('applies isolation level when using batch $transaction', async () => { - const prisma = new PrismaClient({ adapter, log }) - - const queries: string[] = [] - prisma.$on('query', ({ query }) => queries.push(query)) - - await prisma.$transaction([prisma.child.findMany(), prisma.child.count()], { - isolationLevel: 'ReadCommitted', - }) - - if (['mysql'].includes(provider)) { - if (isUsingDriverAdapters) { - assert.deepEqual(queries.slice(0, 2), ['SET TRANSACTION ISOLATION LEVEL READ COMMITTED', '-- Implicit "BEGIN" query via underlying driver']) - } else { - assert.deepEqual(queries.slice(0, 2), ['SET TRANSACTION ISOLATION LEVEL READ COMMITTED', 'BEGIN']) - } - } else if (['postgres'].includes(provider)) { - assert.deepEqual(queries.slice(0, 2), ['BEGIN', 'SET TRANSACTION ISOLATION LEVEL READ COMMITTED']) - } - - assert.deepEqual(queries.at(-1), 'COMMIT') - }) - } else { - describe('isolation levels with sqlite', () => { - it('accepts Serializable as a no-op', async () => { - const prisma = new PrismaClient({ adapter, log }) - - const queries: string[] = [] - prisma.$on('query', ({ query }) => queries.push(query)) - - await prisma.$transaction([prisma.child.findMany(), prisma.child.count()], { - isolationLevel: 'Serializable', - }) - - console.log("queries", queries) - - if (isUsingDriverAdapters) { - assert.equal(queries.at(0), '-- Implicit "BEGIN" query via underlying driver') - assert.equal(queries.at(-1), '-- Implicit "COMMIT" query via underlying driver') - } else { - assert.equal(queries.at(0), 'BEGIN') - assert.equal(queries.at(-1), 'COMMIT') - } - - assert(!queries.find((q) => q.includes('SET TRANSACTION ISOLATION LEVEL'))) - }) - - it('throws on unsupported isolation levels', async () => { - const prisma = new PrismaClient({ adapter }) - - assert.rejects( - prisma.$transaction([prisma.child.findMany(), prisma.child.count()], { - isolationLevel: 'ReadCommitted', - }), - ) - }) - - }) - - } - - it('bytes type support', async () => { - const prisma = new PrismaClient({ adapter, log }) - - const result = await prisma.type_test_3.create({ - data: { - bytes: Buffer.from([1, 2, 3, 4]), - }, - }) - - assert.deepEqual(result.bytes, Buffer.from([1, 2, 3, 4])) - }) - - }) - } -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/libsql.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/libsql.test.ts deleted file mode 100644 index f216b2a02ac7..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/libsql.test.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { PrismaLibSQL } from '@prisma/adapter-libsql' -import { IntMode, createClient } from '@libsql/client' -import { describe } from 'node:test' -import { smokeTestClient } from './client' - -describe('libsql with @prisma/client', async () => { - const url = process.env.JS_LIBSQL_DATABASE_URL as string - const syncUrl = process.env.JS_LIBSQL_SYNC_URL - const authToken = process.env.JS_LIBSQL_AUTH_TOKEN - const intMode = process.env.JS_LIBSQL_INT_MODE as IntMode | undefined - - const client = createClient({ url, syncUrl, authToken, intMode }) - const adapter = new PrismaLibSQL(client) - - if (syncUrl) { - await client.sync() - } - - smokeTestClient(adapter) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts deleted file mode 100644 index 53156ac56249..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { describe } from 'node:test' -import { neon } from '@neondatabase/serverless' -import { PrismaNeonHTTP } from '@prisma/adapter-neon' -import { smokeTestClient } from './client' - -describe('neon with @prisma/client', async () => { - const connectionString = process.env.JS_NEON_DATABASE_URL ?? '' - - const connection = neon(connectionString) - const adapter = new PrismaNeonHTTP(connection) - - smokeTestClient(adapter) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.ws.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.ws.test.ts deleted file mode 100644 index 37b0a9088bb7..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.ws.test.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { describe } from 'node:test' -import { Pool, neonConfig } from '@neondatabase/serverless' -import { PrismaNeon } from '@prisma/adapter-neon' -import { WebSocket } from 'undici' -import { smokeTestClient } from './client' - -neonConfig.webSocketConstructor = WebSocket - -describe('neon with @prisma/client', async () => { - const connectionString = process.env.JS_NEON_DATABASE_URL ?? '' - - const pool = new Pool({ connectionString }) - const adapter = new PrismaNeon(pool) - - smokeTestClient(adapter) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/pg.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/pg.test.ts deleted file mode 100644 index 99048ad3d95f..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/pg.test.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { describe } from 'node:test' -import pg from 'pg' -import { PrismaPg } from '@prisma/adapter-pg' -import { smokeTestClient } from './client' - -describe('pg with @prisma/client', async () => { - const connectionString = process.env.JS_PG_DATABASE_URL ?? '' - - const pool = new pg.Pool({ connectionString }) - const adapter = new PrismaPg(pool) - - smokeTestClient(adapter) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/planetscale.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/planetscale.test.ts deleted file mode 100644 index 3c22b7aa3062..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/planetscale.test.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { connect } from '@planetscale/database' -import { PrismaPlanetScale } from '@prisma/adapter-planetscale' -import { describe } from 'node:test' -import { smokeTestClient } from './client' - -describe('planetscale with @prisma/client', async () => { - const connectionString = process.env.JS_PLANETSCALE_DATABASE_URL ?? '' - - const connnection = connect({ url: connectionString }) - const adapter = new PrismaPlanetScale(connnection) - - smokeTestClient(adapter) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/JsonProtocol.ts b/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/JsonProtocol.ts deleted file mode 100644 index bd491db289a3..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/JsonProtocol.ts +++ /dev/null @@ -1,78 +0,0 @@ -import * as Transaction from './Transaction' - -export type JsonQuery = { - modelName?: string - action: JsonQueryAction - query: JsonFieldSelection -} - -export type JsonBatchQuery = { - batch: JsonQuery[] - transaction?: { isolationLevel?: Transaction.IsolationLevel } -} - -export type JsonQueryAction = - | 'findUnique' - | 'findUniqueOrThrow' - | 'findFirst' - | 'findFirstOrThrow' - | 'findMany' - | 'createOne' - | 'createMany' - | 'updateOne' - | 'updateMany' - | 'deleteOne' - | 'deleteMany' - | 'upsertOne' - | 'aggregate' - | 'groupBy' - | 'executeRaw' - | 'queryRaw' - | 'runCommandRaw' - | 'findRaw' - | 'aggregateRaw' - -export type JsonFieldSelection = { - arguments?: Record - selection: JsonSelectionSet -} - -export type JsonSelectionSet = { - $scalars?: boolean - $composites?: boolean -} & { - [fieldName: string]: boolean | JsonFieldSelection -} - -export type JsonArgumentValue = - | number - | string - | boolean - | null - | JsonTaggedValue - | JsonArgumentValue[] - | { [key: string]: JsonArgumentValue } - -export type DateTaggedValue = { $type: 'DateTime'; value: string } -export type DecimalTaggedValue = { $type: 'Decimal'; value: string } -export type BytesTaggedValue = { $type: 'Bytes'; value: string } -export type BigIntTaggedValue = { $type: 'BigInt'; value: string } -export type FieldRefTaggedValue = { $type: 'FieldRef'; value: { _ref: string } } -export type EnumTaggedValue = { $type: 'Enum'; value: string } -export type JsonTaggedValue = { $type: 'Json'; value: string } - -export type JsonInputTaggedValue = - | DateTaggedValue - | DecimalTaggedValue - | BytesTaggedValue - | BigIntTaggedValue - | FieldRefTaggedValue - | JsonTaggedValue - | EnumTaggedValue - -export type JsonOutputTaggedValue = - | DateTaggedValue - | DecimalTaggedValue - | BytesTaggedValue - | BigIntTaggedValue - | JsonTaggedValue diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Library.ts b/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Library.ts deleted file mode 100644 index a25b3dd26728..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Library.ts +++ /dev/null @@ -1,46 +0,0 @@ -import type { ErrorCapturingDriverAdapter } from '@prisma/driver-adapter-utils' -import type { QueryEngineConfig } from './QueryEngine' - -export type QueryEngineInstance = { - connect(headers: string): Promise - disconnect(headers: string): Promise - /** - * @param requestStr JSON.stringified `QueryEngineRequest | QueryEngineBatchRequest` - * @param headersStr JSON.stringified `QueryEngineRequestHeaders` - */ - query(requestStr: string, headersStr: string, transactionId?: string): Promise - sdlSchema(): Promise - dmmf(traceparent: string): Promise - startTransaction(options: string, traceHeaders: string): Promise - commitTransaction(id: string, traceHeaders: string): Promise - rollbackTransaction(id: string, traceHeaders: string): Promise - metrics(options: string): Promise -} - -export interface QueryEngineConstructor { - new( - config: QueryEngineConfig, - logger: (log: string) => void, - driverAdapter?: ErrorCapturingDriverAdapter, - ): QueryEngineInstance -} - -export interface LibraryLoader { - loadLibrary(): Promise -} - -// Main -export type Library = { - QueryEngine: QueryEngineConstructor - - version: () => { - // The commit hash of the engine - commit: string - // Currently 0.1.0 (Set in Cargo.toml) - version: string - } - /** - * This returns a string representation of `DMMF.Document` - */ - dmmf: (datamodel: string) => Promise -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/QueryEngine.ts b/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/QueryEngine.ts deleted file mode 100644 index 5bab74493dee..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/QueryEngine.ts +++ /dev/null @@ -1,97 +0,0 @@ -import { JsonBatchQuery, JsonQuery } from './JsonProtocol' -import * as Transaction from './Transaction' - -// Events -export type QueryEngineEvent = QueryEngineLogEvent | QueryEngineQueryEvent | QueryEnginePanicEvent - -export type QueryEngineLogEvent = { - level: string - module_path: string - message: string - span?: boolean -} - -export type QueryEngineQueryEvent = { - level: 'info' - module_path: string - query: string - item_type: 'query' - params: string - duration_ms: string - result: string -} - -export type QueryEnginePanicEvent = { - level: 'error' - module_path: string - message: 'PANIC' - reason: string - file: string - line: string - column: string -} - -// Configuration -export type QueryEngineLogLevel = 'trace' | 'debug' | 'info' | 'warn' | 'error' | 'off' - -export type QueryEngineTelemetry = { - enabled: Boolean - endpoint: string -} - -export type GraphQLQuery = { - query: string - variables: object -} - -export type EngineProtocol = 'graphql' | 'json' -export type EngineQuery = GraphQLQuery | JsonQuery - -export type EngineBatchQueries = GraphQLQuery[] | JsonQuery[] - -export type QueryEngineConfig = { - // TODO rename datamodel here and other places - datamodel: string - configDir: string - logQueries: boolean - ignoreEnvVarErrors: boolean - datasourceOverrides?: Record - env: Record - logLevel: QueryEngineLogLevel - telemetry?: QueryEngineTelemetry - engineProtocol: EngineProtocol -} - -// Errors -export type SyncRustError = { - is_panic: boolean - message: string - meta: { - full_error: string - } - error_code: string -} - -export type RustRequestError = { - is_panic: boolean - message: string - backtrace: string -} - -export type QueryEngineResult = { - data: T - elapsed: number -} - -export type QueryEngineBatchRequest = QueryEngineBatchGraphQLRequest | JsonBatchQuery - -export type QueryEngineBatchGraphQLRequest = { - batch: QueryEngineRequest[] - transaction?: boolean - isolationLevel?: Transaction.IsolationLevel -} - -export type QueryEngineRequest = { - query: string - variables: Object -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Transaction.ts b/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Transaction.ts deleted file mode 100644 index 1c5786cc66da..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Transaction.ts +++ /dev/null @@ -1,35 +0,0 @@ -export enum IsolationLevel { - ReadUncommitted = 'ReadUncommitted', - ReadCommitted = 'ReadCommitted', - RepeatableRead = 'RepeatableRead', - Snapshot = 'Snapshot', - Serializable = 'Serializable', -} - -/** - * maxWait ?= 2000 - * timeout ?= 5000 - */ -export type Options = { - maxWait?: number - timeout?: number - isolationLevel?: IsolationLevel -} - -export type InteractiveTransactionInfo = { - /** - * Transaction ID returned by the query engine. - */ - id: string - - /** - * Arbitrary payload the meaning of which depends on the `Engine` implementation. - * For example, `DataProxyEngine` needs to associate different API endpoints with transactions. - * In `LibraryEngine` and `BinaryEngine` it is currently not used. - */ - payload: Payload -} - -export type TransactionHeaders = { - traceparent?: string -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/errors.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/errors.test.ts deleted file mode 100644 index 13ac5cd9ec81..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/errors.test.ts +++ /dev/null @@ -1,105 +0,0 @@ -import { bindAdapter } from '@prisma/driver-adapter-utils' -import test, { after, before, describe } from 'node:test' -import { createQueryFn, initQueryEngine, throwAdapterError } from './util' -import assert from 'node:assert' - -const fakeAdapter = bindAdapter({ - flavour: 'postgres', - startTransaction() { - throw new Error('Error in startTransaction') - }, - - queryRaw() { - throw new Error('Error in queryRaw') - }, - - executeRaw() { - throw new Error('Error in executeRaw') - }, - close() { - return Promise.resolve({ ok: true, value: undefined }) - }, -}) - -const engine = initQueryEngine(fakeAdapter, '../../prisma/postgres/schema.prisma') -const doQuery = createQueryFn(engine, fakeAdapter) - -const startTransaction = async () => { - const args = { isolation_level: 'Serializable', max_wait: 5000, timeout: 15000 } - const res = JSON.parse(await engine.startTransaction(JSON.stringify(args), '{}')) - if (res['error_code']) { - throwAdapterError(res, fakeAdapter) - } -} - -describe('errors propagation', () => { - before(async () => { - await engine.connect('{}') - }) - after(async () => { - await engine.disconnect('{}') - }) - - test('works for queries', async () => { - await assert.rejects( - doQuery({ - modelName: 'Product', - action: 'findMany', - query: { - arguments: {}, - selection: { - $scalars: true, - }, - }, - }), - /Error in queryRaw/, - ) - }) - - test('works for executeRaw', async () => { - await assert.rejects( - doQuery({ - action: 'executeRaw', - query: { - arguments: { - query: 'SELECT 1', - parameters: '[]', - }, - selection: { - $scalars: true, - }, - }, - }), - /Error in executeRaw/, - ) - }) - - test('works with implicit transaction', async () => { - await assert.rejects( - doQuery({ - modelName: 'User', - action: 'createOne', - query: { - arguments: { - data: { - email: 'user@example.com', - favoriteProduct: { - create: { - properties: {}, - }, - }, - }, - }, - selection: { - $scalars: true, - }, - }, - }), - /Error in startTransaction/, - ) - }) - - test('works with explicit transaction', async () => { - await assert.rejects(startTransaction(), /Error in startTransaction/) - }) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts deleted file mode 100644 index c50ad3e257ab..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts +++ /dev/null @@ -1,722 +0,0 @@ -import { describe, it, before, after } from 'node:test' -import assert from 'node:assert' -import type { ErrorCapturingDriverAdapter } from '@prisma/driver-adapter-utils' -import type { QueryEngineInstance } from '../engines/types/Library' -import { createQueryFn, initQueryEngine } from './util' -import { JsonQuery } from '../engines/types/JsonProtocol' - -export function smokeTestLibquery( - adapter: ErrorCapturingDriverAdapter, - prismaSchemaRelativePath: string, - supportsTransactions = true, -) { - const engine = initQueryEngine(adapter, prismaSchemaRelativePath) - const flavour = adapter.flavour - - const doQuery = createQueryFn(engine, adapter) - - describe('using libquery with Driver Adapters', () => { - before(async () => { - await engine.connect('trace') - }) - - after(async () => { - await engine.disconnect('trace') - await adapter.close() - }) - - it('create JSON values', async () => { - const json = JSON.stringify({ - foo: 'bar', - baz: 1, - }) - - const created = await doQuery({ - action: 'createOne', - modelName: 'Product', - query: { - arguments: { - data: { - properties: json, - properties_null: null, - }, - }, - selection: { - properties: true, - }, - }, - }) - - if (flavour !== 'sqlite') { - assert.strictEqual(created.data.createOneProduct.properties.$type, 'Json') - } - - console.log('[nodejs] created', JSON.stringify(created, null, 2)) - - const resultSet = await doQuery({ - action: 'findMany', - modelName: 'Product', - query: { - selection: { - id: true, - properties: true, - properties_null: true, - }, - }, - }) - console.log('[nodejs] resultSet', JSON.stringify(resultSet, null, 2)) - - await doQuery({ - action: 'deleteMany', - modelName: 'Product', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - }) - - it('create with autoincrement', async () => { - await doQuery({ - modelName: 'Author', - action: 'deleteMany', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - - const author = await doQuery({ - modelName: 'Author', - action: 'createOne', - query: { - arguments: { - data: { - firstName: 'Firstname from autoincrement', - lastName: 'Lastname from autoincrement', - age: 99, - }, - }, - selection: { - id: true, - firstName: true, - lastName: true, - }, - }, - }) - console.log('[nodejs] author', JSON.stringify(author, null, 2)) - }) - - it('create non scalar types', async () => { - const create = await doQuery({ - action: 'createOne', - modelName: 'type_test_2', - query: { - arguments: { - data: {}, - }, - selection: { - id: true, - datetime_column: true, - datetime_column_null: true, - }, - }, - }) - - console.log('[nodejs] create', JSON.stringify(create, null, 2)) - - const resultSet = await doQuery({ - action: 'findMany', - modelName: 'type_test_2', - query: { - selection: { - id: true, - datetime_column: true, - datetime_column_null: true, - }, - arguments: { - where: {}, - }, - }, - }) - - console.log('[nodejs] resultSet', JSON.stringify(resultSet, null, 2)) - - await doQuery({ - action: 'deleteMany', - modelName: 'type_test_2', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - }) - - it('create/delete parent and child', async () => { - /* Delete all child and parent records */ - - // Queries: [ - // 'SELECT `cf-users`.`Child`.`id` FROM `cf-users`.`Child` WHERE 1=1', - // 'SELECT `cf-users`.`Child`.`id` FROM `cf-users`.`Child` WHERE 1=1', - // 'DELETE FROM `cf-users`.`Child` WHERE (`cf-users`.`Child`.`id` IN (?) AND 1=1)' - // ] - await doQuery({ - modelName: 'Child', - action: 'deleteMany', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - - // Queries: [ - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE 1=1', - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE 1=1', - // 'DELETE FROM `cf-users`.`Parent` WHERE (`cf-users`.`Parent`.`id` IN (?) AND 1=1)' - // ] - await doQuery({ - modelName: 'Parent', - action: 'deleteMany', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - - /* Create a parent with some new children, within a transaction */ - - // Queries: [ - // 'INSERT INTO `cf-users`.`Parent` (`p`,`p_1`,`p_2`,`id`) VALUES (?,?,?,?)', - // 'INSERT INTO `cf-users`.`Child` (`c`,`c_1`,`c_2`,`parentId`,`id`) VALUES (?,?,?,?,?)', - // 'SELECT `cf-users`.`Parent`.`id`, `cf-users`.`Parent`.`p` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`id` = ? LIMIT ? OFFSET ?', - // 'SELECT `cf-users`.`Child`.`id`, `cf-users`.`Child`.`c`, `cf-users`.`Child`.`parentId` FROM `cf-users`.`Child` WHERE `cf-users`.`Child`.`parentId` IN (?)' - // ] - await doQuery({ - modelName: 'Parent', - action: 'createOne', - query: { - arguments: { - data: { - p: 'p1', - p_1: '1', - p_2: '2', - childOpt: { - create: { - c: 'c1', - c_1: 'foo', - c_2: 'bar', - }, - }, - }, - }, - selection: { - p: true, - childOpt: { - selection: { - c: true, - }, - }, - }, - }, - }) - - /* Delete the parent */ - - // Queries: [ - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`p` = ?', - // 'SELECT `cf-users`.`Child`.`id`, `cf-users`.`Child`.`parentId` FROM `cf-users`.`Child` WHERE (1=1 AND `cf-users`.`Child`.`parentId` IN (?))', - // 'UPDATE `cf-users`.`Child` SET `parentId` = ? WHERE (`cf-users`.`Child`.`id` IN (?) AND 1=1)', - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`p` = ?', - // 'DELETE FROM `cf-users`.`Parent` WHERE (`cf-users`.`Parent`.`id` IN (?) AND `cf-users`.`Parent`.`p` = ?)' - // ] - await doQuery({ - modelName: 'Parent', - action: 'deleteMany', - query: { - arguments: { - where: { - p: 'p1', - }, - }, - selection: { - count: true, - }, - }, - }) - }) - - it('create explicit transaction', async () => { - if (!supportsTransactions) return - - const args = { isolation_level: 'Serializable', max_wait: 5000, timeout: 15000 } - const startResponse = await engine.startTransaction(JSON.stringify(args), 'trace') - const tx_id = JSON.parse(startResponse).id - console.log('[nodejs] transaction id', tx_id) - assert.notStrictEqual(tx_id, undefined) - - await doQuery( - { - action: 'findMany', - modelName: 'Author', - query: { - selection: { $scalars: true }, - }, - }, - tx_id, - ) - - const commitResponse = await engine.commitTransaction(tx_id, 'trace') - console.log('[nodejs] commited', commitResponse) - }) - - it('expected error (on duplicate insert) as json result (not throwing error)', async () => { - await doQuery({ - modelName: 'Unique', - action: 'deleteMany', - query: { - arguments: {}, - selection: { - $scalars: true, - }, - }, - }) - - await doQuery({ - modelName: 'Unique', - action: 'createOne', - query: { - arguments: { - data: { email: 'duplicate@example.com' }, - }, - selection: { - $scalars: true, - }, - }, - }) - - const promise = doQuery({ - modelName: 'Unique', - action: 'createOne', - query: { - arguments: { - data: { email: 'duplicate@example.com' }, - }, - selection: { - $scalars: true, - }, - }, - }) - - const result = await promise - console.log('[nodejs] error result', JSON.stringify(result, null, 2)) - assert.equal(result?.errors?.[0]?.['user_facing_error']?.['error_code'], 'P2002') - }) - - describe('read scalar and non scalar types', () => { - if (['mysql'].includes(flavour)) { - it('mysql', async () => { - const resultSet = await doQuery({ - action: 'findMany', - modelName: 'type_test', - query: { - selection: { - tinyint_column: true, - smallint_column: true, - mediumint_column: true, - int_column: true, - bigint_column: true, - float_column: true, - double_column: true, - decimal_column: true, - boolean_column: true, - char_column: true, - varchar_column: true, - text_column: true, - date_column: true, - time_column: true, - datetime_column: true, - timestamp_column: true, - json_column: true, - enum_column: true, - binary_column: true, - varbinary_column: true, - blob_column: true, - }, - }, - }) - - console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) - }) - } else if (['postgres'].includes(flavour)) { - it('postgres', async () => { - const resultSet = await doQuery({ - action: 'findMany', - modelName: 'type_test', - query: { - selection: { - smallint_column: true, - int_column: true, - bigint_column: true, - float_column: true, - double_column: true, - decimal_column: true, - boolean_column: true, - char_column: true, - varchar_column: true, - text_column: true, - date_column: true, - time_column: true, - datetime_column: true, - timestamp_column: true, - json_column: true, - enum_column: true, - }, - }, - }) - console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) - }) - } else if (['sqlite'].includes(flavour)) { - it('sqlite', async () => { - const resultSet = await doQuery({ - action: 'findMany', - modelName: 'type_test', - query: { - selection: { - int_column: true, - bigint_column: true, - double_column: true, - decimal_column: true, - boolean_column: true, - text_column: true, - datetime_column: true, - }, - }, - }) - console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) - }) - } else { - throw new Error(`Missing test for flavour ${flavour}`) - } - }) - - it('write and read back bytes', async () => { - const createResultSet = await doQuery({ - action: 'createOne', - modelName: 'type_test_3', - query: { - selection: { - bytes: true, - }, - arguments: { - data: { - bytes: { - $type: 'Bytes', - value: 'AQID', - }, - }, - }, - }, - }) - console.log('[nodejs] createOne resultSet:') - console.dir(createResultSet, { depth: Infinity }) - - const findResultSet = await doQuery({ - action: 'findMany', - modelName: 'type_test_3', - query: { - selection: { - bytes: true, - }, - }, - }) - console.log('[nodejs] findMany resultSet:') - console.dir(findResultSet, { depth: Infinity }) - }) - }) -} - -class SmokeTest { - readonly flavour: ErrorCapturingDriverAdapter['flavour'] - - constructor(private readonly engine: QueryEngineInstance, private readonly connector: ErrorCapturingDriverAdapter) { - this.flavour = connector.flavour - } - - async testFindManyTypeTest() { - await this.testFindManyTypeTestMySQL() - await this.testFindManyTypeTestPostgres() - } - - private async testFindManyTypeTestMySQL() { - if (this.flavour !== 'mysql') { - return - } - - const resultSet = await this.doQuery({ - action: 'findMany', - modelName: 'type_test', - query: { - selection: { - tinyint_column: true, - smallint_column: true, - mediumint_column: true, - int_column: true, - bigint_column: true, - float_column: true, - double_column: true, - decimal_column: true, - boolean_column: true, - char_column: true, - varchar_column: true, - text_column: true, - date_column: true, - time_column: true, - datetime_column: true, - timestamp_column: true, - json_column: true, - enum_column: true, - binary_column: true, - varbinary_column: true, - blob_column: true, - }, - }, - }) - - console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) - - return resultSet - } - - private async testFindManyTypeTestPostgres() { - if (this.flavour !== 'postgres') { - return - } - - const resultSet = await this.doQuery({ - action: 'findMany', - modelName: 'type_test', - query: { - selection: { - smallint_column: true, - int_column: true, - bigint_column: true, - float_column: true, - double_column: true, - decimal_column: true, - boolean_column: true, - char_column: true, - varchar_column: true, - text_column: true, - date_column: true, - time_column: true, - datetime_column: true, - timestamp_column: true, - json_column: true, - enum_column: true, - }, - }, - }) - console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) - - return resultSet - } - - async createAutoIncrement() { - await this.doQuery({ - modelName: 'Author', - action: 'deleteMany', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - - const author = await this.doQuery({ - modelName: 'Author', - action: 'createOne', - query: { - arguments: { - data: { - firstName: 'Firstname from autoincrement', - lastName: 'Lastname from autoincrement', - age: 99, - }, - }, - selection: { - id: true, - firstName: true, - lastName: true, - }, - }, - }) - console.log('[nodejs] author', JSON.stringify(author, null, 2)) - } - - async testCreateAndDeleteChildParent() { - /* Delete all child and parent records */ - - // Queries: [ - // 'SELECT `cf-users`.`Child`.`id` FROM `cf-users`.`Child` WHERE 1=1', - // 'SELECT `cf-users`.`Child`.`id` FROM `cf-users`.`Child` WHERE 1=1', - // 'DELETE FROM `cf-users`.`Child` WHERE (`cf-users`.`Child`.`id` IN (?) AND 1=1)' - // ] - await this.doQuery({ - modelName: 'Child', - action: 'deleteMany', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - - // Queries: [ - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE 1=1', - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE 1=1', - // 'DELETE FROM `cf-users`.`Parent` WHERE (`cf-users`.`Parent`.`id` IN (?) AND 1=1)' - // ] - await this.doQuery({ - modelName: 'Parent', - action: 'deleteMany', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - - /* Create a parent with some new children, within a transaction */ - - // Queries: [ - // 'INSERT INTO `cf-users`.`Parent` (`p`,`p_1`,`p_2`,`id`) VALUES (?,?,?,?)', - // 'INSERT INTO `cf-users`.`Child` (`c`,`c_1`,`c_2`,`parentId`,`id`) VALUES (?,?,?,?,?)', - // 'SELECT `cf-users`.`Parent`.`id`, `cf-users`.`Parent`.`p` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`id` = ? LIMIT ? OFFSET ?', - // 'SELECT `cf-users`.`Child`.`id`, `cf-users`.`Child`.`c`, `cf-users`.`Child`.`parentId` FROM `cf-users`.`Child` WHERE `cf-users`.`Child`.`parentId` IN (?)' - // ] - await this.doQuery({ - modelName: 'Parent', - action: 'createOne', - query: { - arguments: { - data: { - p: 'p1', - p_1: '1', - p_2: '2', - childOpt: { - create: { - c: 'c1', - c_1: 'foo', - c_2: 'bar', - }, - }, - }, - }, - selection: { - p: true, - childOpt: { - selection: { - c: true, - }, - }, - }, - }, - }) - - /* Delete the parent */ - - // Queries: [ - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`p` = ?', - // 'SELECT `cf-users`.`Child`.`id`, `cf-users`.`Child`.`parentId` FROM `cf-users`.`Child` WHERE (1=1 AND `cf-users`.`Child`.`parentId` IN (?))', - // 'UPDATE `cf-users`.`Child` SET `parentId` = ? WHERE (`cf-users`.`Child`.`id` IN (?) AND 1=1)', - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`p` = ?', - // 'DELETE FROM `cf-users`.`Parent` WHERE (`cf-users`.`Parent`.`id` IN (?) AND `cf-users`.`Parent`.`p` = ?)' - // ] - const resultDeleteMany = await this.doQuery({ - modelName: 'Parent', - action: 'deleteMany', - query: { - arguments: { - where: { - p: 'p1', - }, - }, - selection: { - count: true, - }, - }, - }) - console.log('[nodejs] resultDeleteMany', JSON.stringify(resultDeleteMany, null, 2)) - } - - async testTransaction() { - const startResponse = await this.engine.startTransaction( - JSON.stringify({ isolation_level: 'Serializable', max_wait: 5000, timeout: 15000 }), - 'trace', - ) - - const tx_id = JSON.parse(startResponse).id - - console.log('[nodejs] transaction id', tx_id) - await this.doQuery( - { - action: 'findMany', - modelName: 'Author', - query: { - selection: { $scalars: true }, - }, - }, - tx_id, - ) - - const commitResponse = await this.engine.commitTransaction(tx_id, 'trace') - console.log('[nodejs] commited', commitResponse) - } - - private async doQuery(query: JsonQuery, tx_id?: string) { - const result = await this.engine.query(JSON.stringify(query), 'trace', tx_id) - const parsedResult = JSON.parse(result) - if (parsedResult.errors) { - const error = parsedResult.errors[0]?.user_facing_error - if (error.error_code === 'P2036') { - const jsError = this.connector.errorRegistry.consumeError(error.meta.id) - if (!jsError) { - throw new Error( - `Something went wrong. Engine reported external error with id ${error.meta.id}, but it was not registered.`, - ) - } - throw jsError.error - } - } - return parsedResult - } -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libsql.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libsql.test.ts deleted file mode 100644 index 7f0a1038ec74..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libsql.test.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { PrismaLibSQL } from '@prisma/adapter-libsql' -import { bindAdapter } from '@prisma/driver-adapter-utils' -import { IntMode, createClient } from '@libsql/client' -import { describe } from 'node:test' -import { smokeTestLibquery } from './libquery' - -describe('libsql', async () => { - const url = process.env.JS_LIBSQL_DATABASE_URL as string - const syncUrl = process.env.JS_LIBSQL_SYNC_URL - const authToken = process.env.JS_LIBSQL_AUTH_TOKEN - const intMode = process.env.JS_LIBSQL_INT_MODE as IntMode | undefined - - const client = createClient({ url, syncUrl, authToken, intMode }) - const adapter = new PrismaLibSQL(client) - const driverAdapter = bindAdapter(adapter) - - if (syncUrl) { - await client.sync() - } - - smokeTestLibquery(driverAdapter, '../../prisma/sqlite/schema.prisma') -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts deleted file mode 100644 index 02872b885fe3..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { PrismaNeonHTTP } from '@prisma/adapter-neon' -import { bindAdapter } from '@prisma/driver-adapter-utils' -import { neon } from '@neondatabase/serverless' -import { describe } from 'node:test' -import { smokeTestLibquery } from './libquery' - -describe('neon (HTTP)', () => { - const connectionString = process.env.JS_NEON_DATABASE_URL ?? '' - - const neonConnection = neon(connectionString) - - const adapter = new PrismaNeonHTTP(neonConnection) - const driverAdapter = bindAdapter(adapter) - - smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma', false) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts deleted file mode 100644 index 54765f5961ba..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { PrismaNeon } from '@prisma/adapter-neon' -import { bindAdapter } from '@prisma/driver-adapter-utils' -import { WebSocket } from 'undici' -import { Pool, neonConfig } from '@neondatabase/serverless' -import { describe } from 'node:test' -import { smokeTestLibquery } from './libquery' - -neonConfig.webSocketConstructor = WebSocket - -describe('neon (WebSocket)', () => { - const connectionString = process.env.JS_NEON_DATABASE_URL ?? '' - - const pool = new Pool({ connectionString }) - const adapter = new PrismaNeon(pool) - const driverAdapter = bindAdapter(adapter) - - smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma') -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.test.ts deleted file mode 100644 index 9b79e7284be8..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.test.ts +++ /dev/null @@ -1,15 +0,0 @@ -import pg from 'pg' -import { PrismaPg } from '@prisma/adapter-pg' -import { bindAdapter } from '@prisma/driver-adapter-utils' -import { describe } from 'node:test' -import { smokeTestLibquery } from './libquery' - -describe('pg', () => { - const connectionString = process.env.JS_PG_DATABASE_URL ?? '' - - const pool = new pg.Pool({ connectionString }) - const adapter = new PrismaPg(pool) - const driverAdapter = bindAdapter(adapter) - - smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma') -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.test.ts deleted file mode 100644 index bb7c81805adc..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.test.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { connect } from '@planetscale/database' -import { PrismaPlanetScale } from '@prisma/adapter-planetscale' -import { bindAdapter } from '@prisma/driver-adapter-utils' -import { describe } from 'node:test' -import { smokeTestLibquery } from './libquery' - -describe('planetscale', () => { - const connectionString = process.env.JS_PLANETSCALE_DATABASE_URL ?? '' - - const connnection = connect({ url: connectionString }) - const adapter = new PrismaPlanetScale(connnection) - const driverAdapter = bindAdapter(adapter) - - smokeTestLibquery(driverAdapter, '../../prisma/mysql/schema.prisma') -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts deleted file mode 100644 index 783eb76759d2..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts +++ /dev/null @@ -1,71 +0,0 @@ -import path from 'node:path' -import os from 'node:os' -import fs from 'node:fs' -import type { ErrorCapturingDriverAdapter } from '@prisma/driver-adapter-utils' -import { Library, QueryEngineInstance } from '../engines/types/Library' -import { JsonQuery } from '../engines/types/JsonProtocol' - -export function initQueryEngine( - driver: ErrorCapturingDriverAdapter, - prismaSchemaRelativePath: string, -): QueryEngineInstance { - const dirname = path.dirname(new URL(import.meta.url).pathname) - const libQueryEnginePath = getLibQueryEnginePath(dirname) - - const schemaPath = path.join(dirname, prismaSchemaRelativePath) - - console.log('[nodejs] read Prisma schema from', schemaPath) - - const libqueryEngine = { exports: {} as unknown as Library } - // @ts-ignore - process.dlopen(libqueryEngine, libQueryEnginePath) - - const QueryEngine = libqueryEngine.exports.QueryEngine - - const queryEngineOptions = { - datamodel: fs.readFileSync(schemaPath, 'utf-8'), - configDir: '.', - engineProtocol: 'json' as const, - logLevel: 'info' as const, - logQueries: false, - env: process.env, - ignoreEnvVarErrors: false, - } - - const logCallback = (...args) => { - console.log(args) - } - - const engine = new QueryEngine(queryEngineOptions, logCallback, driver) - - return engine -} - -export function getLibQueryEnginePath(dirname: String) { - // I assume nobody will run this on Windows ¯\_(ツ)_/¯ - const libExt = os.platform() === 'darwin' ? 'dylib' : 'so' - return path.join(dirname, `../../../../../../target/debug/libquery_engine.${libExt}`) -} - -export function createQueryFn(engine: QueryEngineInstance, adapter: ErrorCapturingDriverAdapter) { - return async function doQuery(query: JsonQuery, tx_id?: string) { - const result = await engine.query(JSON.stringify(query), 'trace', tx_id) - const parsedResult = JSON.parse(result) - if (parsedResult.errors) { - throwAdapterError(parsedResult.errors[0]?.user_facing_error, adapter) - } - return parsedResult - } -} - -export function throwAdapterError(error: any, adapter: ErrorCapturingDriverAdapter) { - if (error.error_code === 'P2036') { - const jsError = adapter.errorRegistry.consumeError(error.meta.id) - if (!jsError) { - throw new Error( - `Something went wrong. Engine reported external error with id ${error.meta.id}, but it was not registered.`, - ) - } - throw jsError.error - } -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/tsconfig.json b/query-engine/driver-adapters/js/smoke-test-js/tsconfig.json deleted file mode 100644 index 3c43903cfdd1..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/tsconfig.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../tsconfig.json" -} diff --git a/query-engine/driver-adapters/js/version.sh b/query-engine/driver-adapters/js/version.sh deleted file mode 100755 index 8f592c0e197c..000000000000 --- a/query-engine/driver-adapters/js/version.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash - -# Usage: `./version.sh x.y.z` will set the `x.y.z` to every package in the monorepo. - -target_version=$1 -package_dirs=$(pnpm -r list -r --depth -1 --json | jq -r '.[] | .path' | tail -n +2) - -# Iterate through each package directory -for package_dir in $package_dirs; do - # Check if the directory exists - if [ -d "$package_dir" ]; then - # Set the target version using pnpm - (cd "$package_dir" && pnpm version "$target_version" --no-git-tag-version --allow-same-version) - fi -done diff --git a/query-engine/driver-adapters/src/result.rs b/query-engine/driver-adapters/src/result.rs index c43f66a81e72..53133e037b6f 100644 --- a/query-engine/driver-adapters/src/result.rs +++ b/query-engine/driver-adapters/src/result.rs @@ -31,7 +31,6 @@ pub struct SqliteErrorDef { #[derive(Deserialize)] #[serde(tag = "kind")] /// Wrapper for JS-side errors -/// See driver-adapters/js/adapter-utils/src/types.ts file for example pub(crate) enum DriverAdapterError { /// Unexpected JS exception GenericJs { @@ -64,7 +63,6 @@ impl From for QuaintError { } /// Wrapper for JS-side result type -/// See driver-adapters/js/adapter-utils/src/types.ts file for example pub(crate) enum JsResult where T: FromNapiValue, From a9694da0ea5535048ec82f30d4a7e393b77b935e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Fri, 27 Oct 2023 15:43:24 +0200 Subject: [PATCH 34/67] ci: schema wasm, revert action to Node v14 for lack of other idea (#4393) --- .github/workflows/publish-prisma-schema-wasm.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/publish-prisma-schema-wasm.yml b/.github/workflows/publish-prisma-schema-wasm.yml index 78d139f80772..684576065796 100644 --- a/.github/workflows/publish-prisma-schema-wasm.yml +++ b/.github/workflows/publish-prisma-schema-wasm.yml @@ -36,16 +36,16 @@ jobs: - uses: actions/setup-node@v3 with: - node-version: '20.x' - + node-version: '14.x' + # This is needed to be done manually because of `PACKAGE_DIR` used later - name: Set up NPM token for publishing later run: echo "//registry.npmjs.org/:_authToken=${{ secrets.NPM_TOKEN }}" > ~/.npmrc - name: Update version in package.json & Publish @prisma/prisma-schema-wasm - run: + run: # Update version in package.json and return directory for later usage - PACKAGE_DIR=$( nix run .#renderPrismaSchemaWasmPackage ${{ github.event.inputs.enginesWrapperVersion }}) + PACKAGE_DIR=$( nix run .#renderPrismaSchemaWasmPackage ${{ github.event.inputs.enginesWrapperVersion }}) npm publish "$PACKAGE_DIR" --access public --tag ${{ github.event.inputs.npmDistTag }} env: # Required for publishing From 685d9bcef717766514f83f91d65c356fdc645a56 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Fri, 27 Oct 2023 15:58:33 +0200 Subject: [PATCH 35/67] ci: finally fix schema wasm publish (#4394) --- .github/workflows/publish-prisma-schema-wasm.yml | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/.github/workflows/publish-prisma-schema-wasm.yml b/.github/workflows/publish-prisma-schema-wasm.yml index 684576065796..30ecd68a2152 100644 --- a/.github/workflows/publish-prisma-schema-wasm.yml +++ b/.github/workflows/publish-prisma-schema-wasm.yml @@ -36,20 +36,17 @@ jobs: - uses: actions/setup-node@v3 with: - node-version: '14.x' + node-version: '20.x' # This is needed to be done manually because of `PACKAGE_DIR` used later - name: Set up NPM token for publishing later run: echo "//registry.npmjs.org/:_authToken=${{ secrets.NPM_TOKEN }}" > ~/.npmrc - name: Update version in package.json & Publish @prisma/prisma-schema-wasm - run: + run: | # Update version in package.json and return directory for later usage PACKAGE_DIR=$( nix run .#renderPrismaSchemaWasmPackage ${{ github.event.inputs.enginesWrapperVersion }}) npm publish "$PACKAGE_DIR" --access public --tag ${{ github.event.inputs.npmDistTag }} - env: - # Required for publishing - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} # # Failure handlers # From 582b416e6b0ed6f95140cdea85b3a49e2dc3e52b Mon Sep 17 00:00:00 2001 From: Alberto Schiabel Date: Mon, 30 Oct 2023 10:50:38 +0100 Subject: [PATCH 36/67] feat(core): add fork of "cuid" with wasm32-unknown-unknown support (#4231) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Miguel Fernández --- Cargo.lock | 14 +++++++------- .../connectors/mongodb-query-connector/Cargo.toml | 4 +--- .../connectors/sql-query-connector/Cargo.toml | 4 +--- query-engine/core/Cargo.toml | 2 +- query-engine/dmmf/Cargo.toml | 2 +- query-engine/prisma-models/Cargo.toml | 11 +++++++---- 6 files changed, 18 insertions(+), 19 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8166394f8c89..35eff530999a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -870,29 +870,26 @@ dependencies = [ [[package]] name = "cuid" version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51294db11d38eb763c92936c5c88425d0090e27dce21dd15748134af9e53e739" +source = "git+https://github.com/prisma/cuid-rust?branch=wasm32-support#81309f9a11f70d178bb545971d51ceb7da692c52" dependencies = [ "base36", "cuid-util", "cuid2", - "hostname", "num", "once_cell", "rand 0.8.5", + "sha3", ] [[package]] name = "cuid-util" version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ea2bfe0336ff1b7ca74819b2df8dfae9afea358aff6b1688baa5c181d8c3713" +source = "git+https://github.com/prisma/cuid-rust?branch=wasm32-support#81309f9a11f70d178bb545971d51ceb7da692c52" [[package]] name = "cuid2" version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47d99cacd52fd67db7490ad051c8c1973fb75520174d69aabbae08c534c9d0e8" +source = "git+https://github.com/prisma/cuid-rust?branch=wasm32-support#81309f9a11f70d178bb545971d51ceb7da692c52" dependencies = [ "cuid-util", "num", @@ -1557,8 +1554,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" dependencies = [ "cfg-if", + "js-sys", "libc", "wasi 0.11.0+wasi-snapshot-preview1", + "wasm-bindgen", ] [[package]] @@ -3353,6 +3352,7 @@ dependencies = [ "bigdecimal", "chrono", "cuid", + "getrandom 0.2.10", "itertools", "nanoid", "prisma-value", diff --git a/query-engine/connectors/mongodb-query-connector/Cargo.toml b/query-engine/connectors/mongodb-query-connector/Cargo.toml index d41210342107..c4a02eaa8643 100644 --- a/query-engine/connectors/mongodb-query-connector/Cargo.toml +++ b/query-engine/connectors/mongodb-query-connector/Cargo.toml @@ -22,6 +22,7 @@ tracing-futures = "0.2" uuid.workspace = true indexmap = "1.7" query-engine-metrics = {path = "../../metrics"} +cuid = { git = "https://github.com/prisma/cuid-rust", branch = "wasm32-support" } [dependencies.prisma-models] path = "../../prisma-models" @@ -46,9 +47,6 @@ workspace = true [dependencies.serde] workspace = true -[dependencies.cuid] -version = "1.2" - [dependencies.user-facing-errors] features = ["sql"] workspace = true diff --git a/query-engine/connectors/sql-query-connector/Cargo.toml b/query-engine/connectors/sql-query-connector/Cargo.toml index 5fe3052f2e8d..62d0be640761 100644 --- a/query-engine/connectors/sql-query-connector/Cargo.toml +++ b/query-engine/connectors/sql-query-connector/Cargo.toml @@ -25,6 +25,7 @@ uuid.workspace = true opentelemetry = { version = "0.17", features = ["tokio"] } tracing-opentelemetry = "0.17.3" quaint.workspace = true +cuid = { git = "https://github.com/prisma/cuid-rust", branch = "wasm32-support" } [dependencies.connector-interface] package = "query-connector" @@ -44,9 +45,6 @@ version = "0.4" features = ["derive"] version = "1.0" -[dependencies.cuid] -version = "1.2" - [dependencies.user-facing-errors] features = ["sql"] path = "../../../libs/user-facing-errors" diff --git a/query-engine/core/Cargo.toml b/query-engine/core/Cargo.toml index c9700bb85f19..caadf6cdba00 100644 --- a/query-engine/core/Cargo.toml +++ b/query-engine/core/Cargo.toml @@ -29,7 +29,7 @@ tracing-subscriber = { version = "0.3", features = ["env-filter"] } tracing-opentelemetry = "0.17.4" user-facing-errors = { path = "../../libs/user-facing-errors" } uuid = "1" -cuid = "1.2" +cuid = { git = "https://github.com/prisma/cuid-rust", branch = "wasm32-support" } schema = { path = "../schema" } lru = "0.7.7" enumflags2 = "0.7" diff --git a/query-engine/dmmf/Cargo.toml b/query-engine/dmmf/Cargo.toml index f4a8bfb6e6f2..cc92c914d4e6 100644 --- a/query-engine/dmmf/Cargo.toml +++ b/query-engine/dmmf/Cargo.toml @@ -10,7 +10,7 @@ serde.workspace = true serde_json.workspace = true schema = { path = "../schema" } indexmap = { version = "1.7", features = ["serde-1"] } -prisma-models = { path = "../prisma-models" } +prisma-models = { path = "../prisma-models", features = ["default_generators"] } [dev-dependencies] expect-test = "1.2.2" diff --git a/query-engine/prisma-models/Cargo.toml b/query-engine/prisma-models/Cargo.toml index c7e012afebfb..0becd1fdea70 100644 --- a/query-engine/prisma-models/Cargo.toml +++ b/query-engine/prisma-models/Cargo.toml @@ -10,13 +10,16 @@ prisma-value = { path = "../../libs/prisma-value" } bigdecimal = "0.3" thiserror = "1.0" +getrandom = { version = "0.2" } uuid = { workspace = true, optional = true } -cuid = { version = "1.2", optional = true } +cuid = { git = "https://github.com/prisma/cuid-rust", branch = "wasm32-support", optional = true } nanoid = { version = "0.4.0", optional = true } chrono = { version = "0.4.6", features = ["serde"] } +[target.'cfg(target_arch = "wasm32")'.dependencies.getrandom] +version = "0.2" +features = ["js"] + [features] -# Support for generating default UUID, CUID, nanoid and datetime values. This -# implies random number generation works, so it won't compile on targets like -# wasm32. +# Support for generating default UUID, CUID, nanoid and datetime values. default_generators = ["uuid/v4", "cuid", "nanoid"] From 79b5ee004f2141588596af36cf709e39094df7f1 Mon Sep 17 00:00:00 2001 From: Lucian Buzzo Date: Mon, 30 Oct 2023 11:19:28 +0000 Subject: [PATCH 37/67] fix: add missing periods in quaint README (#4399) [skip-ci] --- quaint/README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/quaint/README.md b/quaint/README.md index e27c56972390..92033db269b1 100644 --- a/quaint/README.md +++ b/quaint/README.md @@ -41,7 +41,7 @@ choice. ```sh > cargo build --features all - ``` +``` ### Testing @@ -70,8 +70,8 @@ This requires the rust nightly channel: > cargo rustdoc --all-features ``` -Documentation index would be created at `$CARGO_TARGET_DIR/doc/quaint/index.html` +Documentation index would be created at `$CARGO_TARGET_DIR/doc/quaint/index.html`. ## Security -If you have a security issue to report, please contact us at [security@prisma.io](mailto:security@prisma.io?subject=[GitHub]%20Prisma%202%20Security%20Report%20Quaint) +If you have a security issue to report, please contact us at [security@prisma.io](mailto:security@prisma.io?subject=[GitHub]%20Prisma%202%20Security%20Report%20Quaint). From 49b44c541132ae3c617fa9a61ad7c048cce871f0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Mon, 30 Oct 2023 17:23:38 +0100 Subject: [PATCH 38/67] Fix driver adapters tests after migration of driver adapters to prisma (#4398) * Fix tests * Make * Remove symlinks and use instead parent directory * DRIVER_ADAPTERS_BRANCH=chore/client/adapter-porting-nits add debug to workspace * Build packages in the old directory otherwise @esbuild-register is not found despite being defined as a devDependency within driver adapters @millsp --- .gitignore | 5 ++-- Makefile | 17 +++---------- .../connector-test-kit-executor/package.json | 16 ++++++------ .../pnpm-lock.yaml | 3 --- query-engine/driver-adapters/js/.nvmrc | 1 - query-engine/driver-adapters/package.json | 25 +++++++++++++++++++ .../driver-adapters/pnpm-workspace.yaml | 8 ++++++ 7 files changed, 46 insertions(+), 29 deletions(-) delete mode 100644 query-engine/driver-adapters/js/.nvmrc create mode 100644 query-engine/driver-adapters/package.json create mode 100644 query-engine/driver-adapters/pnpm-workspace.yaml diff --git a/.gitignore b/.gitignore index be185b0f7afc..75c06e9ce68b 100644 --- a/.gitignore +++ b/.gitignore @@ -47,5 +47,6 @@ graph.dot prisma-schema-wasm/nodejs -# This symlink looks orphan here, but it comes from prisma/prisma where driver adapters reference a file in their parent directory -tsconfig.build.adapter.json +# Ignore pnpm-lock.yaml +query-engine/driver-adapters/pnpm-lock.yaml +package-lock.json diff --git a/Makefile b/Makefile index 541738c35d95..a30a32ca1871 100644 --- a/Makefile +++ b/Makefile @@ -285,25 +285,14 @@ test-driver-adapter-planetscale: test-planetscale-vitess8 build-qe-napi: cargo build --package query-engine-node-api -build-connector-kit-js: build-driver-adapters symlink-driver-adapters - cd query-engine/driver-adapters/connector-test-kit-executor && pnpm i && pnpm build +build-connector-kit-js: build-driver-adapters + cd query-engine/driver-adapters && pnpm i && pnpm build build-driver-adapters: ensure-prisma-present @echo "Building driver adapters..." - @cd ../prisma && pnpm --filter "*adapter*" i && pnpm --filter "*adapter*" build + @cd ../prisma && pnpm --filter "*adapter*" i @echo "Driver adapters build completed."; -symlink-driver-adapters: ensure-prisma-present - @echo "Creating symbolic links for driver adapters..." - @for dir in $(wildcard $(realpath ../prisma)/packages/*adapter*); do \ - if [ -d "$$dir" ]; then \ - dir_name=$$(basename "$$dir"); \ - ln -sfn "$$dir" "$(realpath .)/query-engine/driver-adapters/$$dir_name"; \ - echo "Created symbolic link for $$dir_name"; \ - fi; \ - done; - echo "Symbolic links creation completed."; - ensure-prisma-present: @if [ -d ../prisma ]; then \ cd "$(realpath ../prisma)" && git fetch origin main; \ diff --git a/query-engine/driver-adapters/connector-test-kit-executor/package.json b/query-engine/driver-adapters/connector-test-kit-executor/package.json index b63694bb4459..153b833df1e1 100644 --- a/query-engine/driver-adapters/connector-test-kit-executor/package.json +++ b/query-engine/driver-adapters/connector-test-kit-executor/package.json @@ -10,8 +10,7 @@ "module": "dist/index.mjs", "private": true, "scripts": { - "build": "tsup ./src/index.ts --format esm --dts", - "lint": "tsc -p ./tsconfig.build.json" + "build": "tsup ./src/index.ts --format esm --dts" }, "keywords": [], "author": "", @@ -21,11 +20,11 @@ "@libsql/client": "0.3.5", "@neondatabase/serverless": "^0.6.0", "@planetscale/database": "1.11.0", - "@prisma/adapter-libsql": "../adapter-libsql", - "@prisma/adapter-neon": "../adapter-neon", - "@prisma/adapter-pg": "../adapter-pg", - "@prisma/adapter-planetscale": "../adapter-planetscale", - "@prisma/driver-adapter-utils": "../driver-adapter-utils", + "@prisma/adapter-libsql": "workspace:*", + "@prisma/adapter-neon": "workspace:*", + "@prisma/adapter-pg": "workspace:*", + "@prisma/adapter-planetscale": "workspace:*", + "@prisma/driver-adapter-utils": "workspace:*", "@types/pg": "^8.10.2", "pg": "^8.11.3", "undici": "^5.26.5", @@ -34,7 +33,6 @@ "devDependencies": { "@types/node": "^20.5.1", "tsup": "^7.2.0", - "tsx": "^3.12.7", - "typescript": "^5.1.6" + "typescript": "5.2.2" } } \ No newline at end of file diff --git a/query-engine/driver-adapters/connector-test-kit-executor/pnpm-lock.yaml b/query-engine/driver-adapters/connector-test-kit-executor/pnpm-lock.yaml index d140be7b516c..d4f9fa09277d 100644 --- a/query-engine/driver-adapters/connector-test-kit-executor/pnpm-lock.yaml +++ b/query-engine/driver-adapters/connector-test-kit-executor/pnpm-lock.yaml @@ -38,9 +38,6 @@ dependencies: undici: specifier: ^5.26.5 version: 5.26.5 - ws: - specifier: ^8.14.2 - version: 8.14.2 devDependencies: '@types/node': diff --git a/query-engine/driver-adapters/js/.nvmrc b/query-engine/driver-adapters/js/.nvmrc deleted file mode 100644 index 6569dfa4f323..000000000000 --- a/query-engine/driver-adapters/js/.nvmrc +++ /dev/null @@ -1 +0,0 @@ -20.8.1 diff --git a/query-engine/driver-adapters/package.json b/query-engine/driver-adapters/package.json new file mode 100644 index 000000000000..1362da87700d --- /dev/null +++ b/query-engine/driver-adapters/package.json @@ -0,0 +1,25 @@ +{ + "private": true, + "name": "js", + "version": "0.0.2", + "description": "", + "engines": { + "node": ">=16.13", + "pnpm": ">=8.6.6 <9" + }, + "license": "Apache-2.0", + "scripts": { + "build": "pnpm -r run build", + "lint": "pnpm -r run lint", + "clean": "git clean -nXd -e !query-engine/driver-adapters" + }, + "keywords": [], + "author": "", + "devDependencies": { + "@types/node": "^20.5.1", + "tsup": "^7.2.0", + "typescript": "5.2.2", + "esbuild": "0.19.5", + "esbuild-register": "3.5.0" + } +} diff --git a/query-engine/driver-adapters/pnpm-workspace.yaml b/query-engine/driver-adapters/pnpm-workspace.yaml new file mode 100644 index 000000000000..d37910ea5ae6 --- /dev/null +++ b/query-engine/driver-adapters/pnpm-workspace.yaml @@ -0,0 +1,8 @@ +packages: + - '../../../prisma/packages/adapter-libsql' + - '../../../prisma/packages/adapter-neon' + - '../../../prisma/packages/adapter-pg' + - '../../../prisma/packages/adapter-planetscale' + - '../../../prisma/packages/driver-adapter-utils' + - '../../../prisma/packages/debug' + - './connector-test-kit-executor' \ No newline at end of file From 82dc77df83bf91471cbfcd4d418e06dd04567400 Mon Sep 17 00:00:00 2001 From: Serhii Tatarintsev Date: Wed, 1 Nov 2023 10:24:45 +0100 Subject: [PATCH 39/67] driver-adapters: Rename ColumnType::Char to Character (#4402) * driver-adapters: Rename ColumnType::Char to Character To avoid confusion with SQL's CHAR type that is a fixed-length string, not single character. * Rename CharArray too --- query-engine/driver-adapters/src/proxy.rs | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/query-engine/driver-adapters/src/proxy.rs b/query-engine/driver-adapters/src/proxy.rs index 14bfd46e62e0..da03336bdf53 100644 --- a/query-engine/driver-adapters/src/proxy.rs +++ b/query-engine/driver-adapters/src/proxy.rs @@ -117,9 +117,7 @@ pub enum ColumnType { /// - BOOLEAN (BOOLEAN) -> e.g. `1` Boolean = 5, - /// The following PlanetScale type IDs are mapped into Char: - /// - CHAR (CHAR) -> e.g. `"c"` (String-encoded) - Char = 6, + Character = 6, /// The following PlanetScale type IDs are mapped into Text: /// - TEXT (TEXT) -> e.g. `"foo"` (String-encoded) @@ -184,7 +182,7 @@ pub enum ColumnType { BooleanArray = 69, /// Char array (CHAR_ARRAY in PostgreSQL) - CharArray = 70, + CharacterArray = 70, /// Text array (TEXT_ARRAY in PostgreSQL) TextArray = 71, @@ -346,7 +344,7 @@ fn js_value_to_quaint( "expected a boolean in column '{column_name}', found {mismatch}" )), }, - ColumnType::Char => match json_value { + ColumnType::Character => match json_value { serde_json::Value::String(s) => match s.chars().next() { Some(c) => Ok(QuaintValue::character(c)), None => Ok(QuaintValue::null_character()), @@ -452,7 +450,7 @@ fn js_value_to_quaint( ColumnType::DoubleArray => js_array_to_quaint(ColumnType::Double, json_value, column_name), ColumnType::NumericArray => js_array_to_quaint(ColumnType::Numeric, json_value, column_name), ColumnType::BooleanArray => js_array_to_quaint(ColumnType::Boolean, json_value, column_name), - ColumnType::CharArray => js_array_to_quaint(ColumnType::Char, json_value, column_name), + ColumnType::CharacterArray => js_array_to_quaint(ColumnType::Character, json_value, column_name), ColumnType::TextArray => js_array_to_quaint(ColumnType::Text, json_value, column_name), ColumnType::DateArray => js_array_to_quaint(ColumnType::Date, json_value, column_name), ColumnType::TimeArray => js_array_to_quaint(ColumnType::Time, json_value, column_name), @@ -790,7 +788,7 @@ mod proxy_test { #[test] fn js_value_char_to_quaint() { - let column_type = ColumnType::Char; + let column_type = ColumnType::Character; // null test_null(QuaintValue::null_character(), column_type); From 575e022f38b7d026927746aea5caf9949119149a Mon Sep 17 00:00:00 2001 From: Serhii Tatarintsev Date: Fri, 3 Nov 2023 09:31:30 +0100 Subject: [PATCH 40/67] driver-adapters: convert Decimals to Strings when calling JS (#4408) * driver-adapters: conver Decimals to Strings when calling JS When forwarding the input to the driver adapter, we used to convert decimal values to f64. On a large numbers, that lost some precision. Changing this to convert them to strings instead. Fix prisma/team-orm#497 TODO: after this gets to the client, unskip `decimal/presion` test there. * Add comment * Resotre postgres conversion * Fix libsql --- .../writes/data_types/native_types/mysql.rs | 29 ++++++++ .../driver-adapters/src/conversion.rs | 55 +++++++-------- .../src/conversion/postgres.rs | 67 +++++++------------ .../driver-adapters/src/conversion/sqlite.rs | 23 +++++++ query-engine/driver-adapters/src/queryable.rs | 1 + 5 files changed, 104 insertions(+), 71 deletions(-) create mode 100644 query-engine/driver-adapters/src/conversion/sqlite.rs diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/native_types/mysql.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/native_types/mysql.rs index c29b12d5b73f..4d3c3137f4a2 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/native_types/mysql.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/native_types/mysql.rs @@ -198,6 +198,35 @@ mod mysql { Ok(()) } + fn schema_decimal_vitess() -> String { + let schema = indoc! { + r#"model Model { + #id(id, String, @id, @default(cuid())) + decLarge Decimal @test.Decimal(20, 10) + }"# + }; + + schema.to_owned() + } + + #[connector_test(only(Vitess), schema(schema_decimal_vitess))] + async fn native_decimal_vitess_precision(runner: Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(&runner, r#"mutation { + createOneModel( + data: { + decLarge: "131603421.38724228" + } + ) { + decLarge + } + }"#), + @r###"{"data":{"createOneModel":{"decLarge":"131603421.38724228"}}}"### + ); + + Ok(()) + } + fn schema_string() -> String { let schema = indoc! { r#"model Model { diff --git a/query-engine/driver-adapters/src/conversion.rs b/query-engine/driver-adapters/src/conversion.rs index f65cc955fb21..c6ea87f1bfa2 100644 --- a/query-engine/driver-adapters/src/conversion.rs +++ b/query-engine/driver-adapters/src/conversion.rs @@ -1,4 +1,5 @@ pub(crate) mod postgres; +pub(crate) mod sqlite; use napi::bindgen_prelude::{FromNapiValue, ToNapiValue}; use napi::NapiValue; @@ -59,35 +60,31 @@ impl ToNapiValue for JSArg { } } -pub fn values_to_js_args(values: &[quaint::Value<'_>]) -> serde_json::Result> { - let mut args = Vec::with_capacity(values.len()); - - for qv in values { - let res = match &qv.typed { - quaint::ValueType::Json(s) => match s { - Some(ref s) => { - let json_str = serde_json::to_string(s)?; - JSArg::RawString(json_str) - } - None => JsonValue::Null.into(), - }, - quaint::ValueType::Bytes(bytes) => match bytes { - Some(bytes) => JSArg::Buffer(bytes.to_vec()), - None => JsonValue::Null.into(), - }, - quaint_value @ quaint::ValueType::Numeric(bd) => match bd { - Some(bd) => match bd.to_string().parse::() { - Ok(double) => JSArg::from(JsonValue::from(double)), - Err(_) => JSArg::from(JsonValue::from(quaint_value.clone())), - }, - None => JsonValue::Null.into(), - }, - quaint::ValueType::Array(Some(items)) => JSArg::Array(values_to_js_args(items)?), - quaint_value => JSArg::from(JsonValue::from(quaint_value.clone())), - }; +pub fn value_to_js_arg(value: &quaint::Value) -> serde_json::Result { + let res = match &value.typed { + quaint::ValueType::Json(s) => match s { + Some(ref s) => { + let json_str = serde_json::to_string(s)?; + JSArg::RawString(json_str) + } + None => JsonValue::Null.into(), + }, + quaint::ValueType::Bytes(bytes) => match bytes { + Some(bytes) => JSArg::Buffer(bytes.to_vec()), + None => JsonValue::Null.into(), + }, + quaint::ValueType::Numeric(bd) => match bd { + // converting decimal to string to preserve the precision + Some(bd) => JSArg::RawString(bd.to_string()), + None => JsonValue::Null.into(), + }, + quaint::ValueType::Array(Some(ref items)) => JSArg::Array(values_to_js_args(items)?), + quaint_value => JSArg::from(JsonValue::from(quaint_value.clone())), + }; - args.push(res); - } + Ok(res) +} - Ok(args) +pub fn values_to_js_args(values: &[quaint::Value<'_>]) -> serde_json::Result> { + values.iter().map(value_to_js_arg).collect() } diff --git a/query-engine/driver-adapters/src/conversion/postgres.rs b/query-engine/driver-adapters/src/conversion/postgres.rs index 21b1ec6b2fb9..8c00d0aae59f 100644 --- a/query-engine/driver-adapters/src/conversion/postgres.rs +++ b/query-engine/driver-adapters/src/conversion/postgres.rs @@ -5,48 +5,31 @@ use serde_json::value::Value as JsonValue; static TIME_FMT: Lazy = Lazy::new(|| StrftimeItems::new("%H:%M:%S%.f")); -pub fn values_to_js_args(values: &[quaint::Value<'_>]) -> serde_json::Result> { - let mut args = Vec::with_capacity(values.len()); - - for qv in values { - let res = match (&qv.typed, qv.native_column_type_name()) { - (quaint::ValueType::DateTime(value), Some("DATE")) => match value { - Some(value) => JSArg::RawString(value.date_naive().to_string()), - None => JsonValue::Null.into(), - }, - (quaint::ValueType::DateTime(value), Some("TIME")) => match value { - Some(value) => JSArg::RawString(value.time().to_string()), - None => JsonValue::Null.into(), - }, - (quaint::ValueType::DateTime(value), Some("TIMETZ")) => match value { - Some(value) => JSArg::RawString(value.time().format_with_items(TIME_FMT.clone()).to_string()), - None => JsonValue::Null.into(), - }, - (quaint::ValueType::DateTime(value), _) => match value { - Some(value) => JSArg::RawString(value.naive_utc().to_string()), - None => JsonValue::Null.into(), - }, - (quaint::ValueType::Json(s), _) => match s { - Some(ref s) => { - let json_str = serde_json::to_string(s)?; - JSArg::RawString(json_str) - } - None => JsonValue::Null.into(), - }, - (quaint::ValueType::Bytes(bytes), _) => match bytes { - Some(bytes) => JSArg::Buffer(bytes.to_vec()), - None => JsonValue::Null.into(), - }, - (quaint::ValueType::Numeric(bd), _) => match bd { - Some(bd) => JSArg::RawString(bd.to_string()), - None => JsonValue::Null.into(), - }, - (quaint::ValueType::Array(Some(items)), _) => JSArg::Array(values_to_js_args(items)?), - (quaint_value, _) => JSArg::from(JsonValue::from(quaint_value.clone())), - }; +pub fn value_to_js_arg(value: &quaint::Value) -> serde_json::Result { + let res = match (&value.typed, value.native_column_type_name()) { + (quaint::ValueType::DateTime(value), Some("DATE")) => match value { + Some(value) => JSArg::RawString(value.date_naive().to_string()), + None => JsonValue::Null.into(), + }, + (quaint::ValueType::DateTime(value), Some("TIME")) => match value { + Some(value) => JSArg::RawString(value.time().to_string()), + None => JsonValue::Null.into(), + }, + (quaint::ValueType::DateTime(value), Some("TIMETZ")) => match value { + Some(value) => JSArg::RawString(value.time().format_with_items(TIME_FMT.clone()).to_string()), + None => JsonValue::Null.into(), + }, + (quaint::ValueType::DateTime(value), _) => match value { + Some(value) => JSArg::RawString(value.naive_utc().to_string()), + None => JsonValue::Null.into(), + }, + (quaint::ValueType::Array(Some(items)), _) => JSArg::Array(values_to_js_args(items)?), + _ => super::value_to_js_arg(value)?, + }; - args.push(res); - } + Ok(res) +} - Ok(args) +pub fn values_to_js_args(values: &[quaint::Value<'_>]) -> serde_json::Result> { + values.iter().map(value_to_js_arg).collect() } diff --git a/query-engine/driver-adapters/src/conversion/sqlite.rs b/query-engine/driver-adapters/src/conversion/sqlite.rs new file mode 100644 index 000000000000..4e6e56cb274a --- /dev/null +++ b/query-engine/driver-adapters/src/conversion/sqlite.rs @@ -0,0 +1,23 @@ +use crate::conversion::JSArg; +use serde_json::value::Value as JsonValue; + +pub fn value_to_js_arg(value: &quaint::Value) -> serde_json::Result { + let res = match &value.typed { + quaint::ValueType::Numeric(bd) => match bd { + // converting decimal to string to preserve the precision + Some(bd) => match bd.to_string().parse::() { + Ok(double) => JSArg::from(JsonValue::from(double)), + Err(_) => JSArg::from(JsonValue::from(value.clone())), + }, + None => JsonValue::Null.into(), + }, + quaint::ValueType::Array(Some(ref items)) => JSArg::Array(values_to_js_args(items)?), + _ => super::value_to_js_arg(value)?, + }; + + Ok(res) +} + +pub fn values_to_js_args(values: &[quaint::Value<'_>]) -> serde_json::Result> { + values.iter().map(value_to_js_arg).collect() +} diff --git a/query-engine/driver-adapters/src/queryable.rs b/query-engine/driver-adapters/src/queryable.rs index 864ba5042083..b9a8cfe6564d 100644 --- a/query-engine/driver-adapters/src/queryable.rs +++ b/query-engine/driver-adapters/src/queryable.rs @@ -51,6 +51,7 @@ impl JsBaseQueryable { let sql: String = sql.to_string(); let args = match self.flavour { Flavour::Postgres => conversion::postgres::values_to_js_args(values), + Flavour::Sqlite => conversion::sqlite::values_to_js_args(values), _ => conversion::values_to_js_args(values), }?; Ok(Query { sql, args }) From 0a4f17d75f9571ce96148c80783bf460f07795b2 Mon Sep 17 00:00:00 2001 From: XiaoZhang Date: Fri, 3 Nov 2023 17:31:37 +0800 Subject: [PATCH 41/67] Refine error message of invalid database url (#4340) Co-authored-by: Jan Piotrowski --- schema-engine/cli/tests/cli_tests.rs | 11 +++++++++++ schema-engine/core/src/lib.rs | 4 +--- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/schema-engine/cli/tests/cli_tests.rs b/schema-engine/cli/tests/cli_tests.rs index 18866f9b1c0a..bec62a2d3a31 100644 --- a/schema-engine/cli/tests/cli_tests.rs +++ b/schema-engine/cli/tests/cli_tests.rs @@ -183,6 +183,17 @@ fn test_create_database_mssql(api: TestApi) { assert!(output.status.success()); } +#[test_connector(tags(Sqlite))] +fn test_sqlite_url(api: TestApi) { + let base_dir = tempfile::tempdir().unwrap(); + let sqlite_path = base_dir.path().join("test.db"); + let url = format!("{}", sqlite_path.to_string_lossy()); + let output = api.run(&["--datasource", &url, "can-connect-to-database"]); + assert!(!output.status.success()); + let message = String::from_utf8(output.stderr).unwrap(); + assert!(message.contains("The provided database string is invalid. The scheme is not recognized in database URL.")); +} + #[test_connector(tags(Sqlite))] fn test_create_sqlite_database(api: TestApi) { let base_dir = tempfile::tempdir().unwrap(); diff --git a/schema-engine/core/src/lib.rs b/schema-engine/core/src/lib.rs index f4288f4305bc..92329a429663 100644 --- a/schema-engine/core/src/lib.rs +++ b/schema-engine/core/src/lib.rs @@ -89,9 +89,7 @@ fn connector_for_connection_string( let connector = MongoDbSchemaConnector::new(params); Ok(Box::new(connector)) } - Some(other) => Err(CoreError::url_parse_error(format!( - "`{other}` is not a known connection URL scheme. Prisma cannot determine the connector." - ))), + Some(_other) => Err(CoreError::url_parse_error("The scheme is not recognized")), None => Err(CoreError::user_facing(InvalidConnectionString { details: String::new(), })), From 802471e912d34744d0ea204f16e64e777d20ba64 Mon Sep 17 00:00:00 2001 From: Jan Piotrowski Date: Mon, 6 Nov 2023 10:21:37 +0100 Subject: [PATCH 42/67] ci(renovate): Group Driver Adapters + Pin dependencies (#4410) --- renovate.json | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/renovate.json b/renovate.json index 28cc31716a6f..ded9b14cd9a2 100644 --- a/renovate.json +++ b/renovate.json @@ -13,6 +13,7 @@ "before 5am every weekday", "every weekend" ], + "rangeStrategy": "pin", "separateMinorPatch": true, "packageRules": [ { @@ -24,6 +25,12 @@ "groupName": "Weekly vitess docker image version update", "packageNames": ["vitess/vttestserver"], "schedule": ["before 7am on Wednesday"] + }, + { + "groupName": ["Prisma Driver Adapters"], + "matchPackageNames": ["@prisma/driver-adapter-utils"], + "matchPackagePrefixes": ["@prisma/adapter"], + "schedule": ["at any time"] } ] } From 79843f74ded807061ba2123a5227a66dc5fb4167 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 6 Nov 2023 10:23:26 +0100 Subject: [PATCH 43/67] fix(deps): pin dependencies (#4411) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- query-engine/query-engine-wasm/package-lock.json | 4 ++-- query-engine/query-engine-wasm/package.json | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/query-engine/query-engine-wasm/package-lock.json b/query-engine/query-engine-wasm/package-lock.json index bc854644f6dd..d9ac3326333b 100644 --- a/query-engine/query-engine-wasm/package-lock.json +++ b/query-engine/query-engine-wasm/package-lock.json @@ -6,8 +6,8 @@ "": { "dependencies": { "@neondatabase/serverless": "^0.6.0", - "@prisma/adapter-neon": "^5.4.1", - "@prisma/driver-adapter-utils": "^5.4.1" + "@prisma/adapter-neon": "5.4.1", + "@prisma/driver-adapter-utils": "5.4.1" } }, "node_modules/@neondatabase/serverless": { diff --git a/query-engine/query-engine-wasm/package.json b/query-engine/query-engine-wasm/package.json index 538080ec1b8c..f0cd0922f1e2 100644 --- a/query-engine/query-engine-wasm/package.json +++ b/query-engine/query-engine-wasm/package.json @@ -3,7 +3,7 @@ "main": "./example.js", "dependencies": { "@neondatabase/serverless": "^0.6.0", - "@prisma/adapter-neon": "^5.4.1", - "@prisma/driver-adapter-utils": "^5.4.1" + "@prisma/adapter-neon": "5.4.1", + "@prisma/driver-adapter-utils": "5.4.1" } } From 93ca82c505dd968185223ab2795fdf372a8d7aec Mon Sep 17 00:00:00 2001 From: Jan Piotrowski Date: Mon, 6 Nov 2023 11:05:45 +0100 Subject: [PATCH 44/67] ci(renovate): Disable updates to `engines` (#4418) --- renovate.json | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/renovate.json b/renovate.json index ded9b14cd9a2..6df4c6ff36c4 100644 --- a/renovate.json +++ b/renovate.json @@ -31,6 +31,10 @@ "matchPackageNames": ["@prisma/driver-adapter-utils"], "matchPackagePrefixes": ["@prisma/adapter"], "schedule": ["at any time"] + }, + { + "packageNames": ["node"], + "enabled": false } ] } From a6053f34fb1426dcbe1b1c08c4edf0e05ef78550 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 6 Nov 2023 11:17:01 +0100 Subject: [PATCH 45/67] chore(deps): update actions/setup-node action to v4 (#4416) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/workflows/publish-prisma-schema-wasm.yml | 2 +- .github/workflows/query-engine-driver-adapters.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish-prisma-schema-wasm.yml b/.github/workflows/publish-prisma-schema-wasm.yml index 30ecd68a2152..3d4951da0427 100644 --- a/.github/workflows/publish-prisma-schema-wasm.yml +++ b/.github/workflows/publish-prisma-schema-wasm.yml @@ -34,7 +34,7 @@ jobs: - name: Build run: nix build .#prisma-schema-wasm - - uses: actions/setup-node@v3 + - uses: actions/setup-node@v4 with: node-version: '20.x' diff --git a/.github/workflows/query-engine-driver-adapters.yml b/.github/workflows/query-engine-driver-adapters.yml index f3a3badfb804..b8434e2fa04c 100644 --- a/.github/workflows/query-engine-driver-adapters.yml +++ b/.github/workflows/query-engine-driver-adapters.yml @@ -53,7 +53,7 @@ jobs: ref: ${{ github.event.pull_request.head.sha }} - name: 'Setup Node.js' - uses: actions/setup-node@v3 + uses: actions/setup-node@v4 with: node-version: ${{ matrix.node_version }} From 16679d0b2c1fc8d82d94418c0a84a4a62d8858a2 Mon Sep 17 00:00:00 2001 From: Serhii Tatarintsev Date: Mon, 6 Nov 2023 12:25:49 +0100 Subject: [PATCH 46/67] fix(driver-adapters): Ensure transaction metrics do not get negative (#4417) * fix(driver-adapters): Ensure transaction metrics do not get negative Current dispatcher for a thread gets lost somewhere within napi-rs/napi `ThreadsafeFunction` nad thearfore, gauge increment in `Transaction::new` is not registered. That in turn means that when the same gauge is decremented in `commit`/`rollback`, it's value will get negative. Moving increment to proxy fixes the problem. Unblocks prisma/prisma-orm#21746 * clippy --- query-engine/driver-adapters/src/proxy.rs | 7 +++++++ query-engine/driver-adapters/src/transaction.rs | 6 +++--- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/query-engine/driver-adapters/src/proxy.rs b/query-engine/driver-adapters/src/proxy.rs index da03336bdf53..62086a245199 100644 --- a/query-engine/driver-adapters/src/proxy.rs +++ b/query-engine/driver-adapters/src/proxy.rs @@ -4,6 +4,7 @@ use std::str::FromStr; use crate::async_js_function::AsyncJsFunction; use crate::conversion::JSArg; use crate::transaction::JsTransaction; +use metrics::increment_gauge; use napi::bindgen_prelude::{FromNapiValue, ToNapiValue}; use napi::threadsafe_function::{ErrorStrategy, ThreadsafeFunction}; use napi::{JsObject, JsString}; @@ -555,6 +556,12 @@ impl DriverProxy { pub async fn start_transaction(&self) -> quaint::Result> { let tx = self.start_transaction.call(()).await?; + + // Decrement for this gauge is done in JsTransaction::commit/JsTransaction::rollback + // Previously, it was done in JsTransaction::new, similar to the native Transaction. + // However, correct Dispatcher is lost there and increment does not register, so we moved + // it here instead. + increment_gauge!("prisma_client_queries_active", 1.0); Ok(Box::new(tx)) } } diff --git a/query-engine/driver-adapters/src/transaction.rs b/query-engine/driver-adapters/src/transaction.rs index 0d26c7f863aa..d35a9019c6bc 100644 --- a/query-engine/driver-adapters/src/transaction.rs +++ b/query-engine/driver-adapters/src/transaction.rs @@ -1,5 +1,5 @@ use async_trait::async_trait; -use metrics::{decrement_gauge, increment_gauge}; +use metrics::decrement_gauge; use napi::{bindgen_prelude::FromNapiValue, JsObject}; use quaint::{ connector::{IsolationLevel, Transaction as QuaintTransaction}, @@ -22,8 +22,6 @@ pub(crate) struct JsTransaction { impl JsTransaction { pub(crate) fn new(inner: JsBaseQueryable, tx_proxy: TransactionProxy) -> Self { - increment_gauge!("prisma_client_queries_active", 1.0); - Self { inner, tx_proxy } } @@ -40,6 +38,7 @@ impl JsTransaction { #[async_trait] impl QuaintTransaction for JsTransaction { async fn commit(&self) -> quaint::Result<()> { + // increment of this gauge is done in DriverProxy::startTransaction decrement_gauge!("prisma_client_queries_active", 1.0); let commit_stmt = "COMMIT"; @@ -55,6 +54,7 @@ impl QuaintTransaction for JsTransaction { } async fn rollback(&self) -> quaint::Result<()> { + // increment of this gauge is done in DriverProxy::startTransaction decrement_gauge!("prisma_client_queries_active", 1.0); let rollback_stmt = "ROLLBACK"; From 07951c1aa16fcf8454c50c3815f0323753bb8bc5 Mon Sep 17 00:00:00 2001 From: Jan Piotrowski Date: Mon, 6 Nov 2023 13:24:55 +0100 Subject: [PATCH 47/67] ci(renovate): Also exclude `pnpm` in `engines` from renovate updates (#4419) [skip ci] --- renovate.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/renovate.json b/renovate.json index 6df4c6ff36c4..c25ec3daa6e8 100644 --- a/renovate.json +++ b/renovate.json @@ -33,7 +33,7 @@ "schedule": ["at any time"] }, { - "packageNames": ["node"], + "packageNames": ["node", "pnpm"], "enabled": false } ] From 55b13ceb72dcaecabdfb153e4f7e5000c82b21cf Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 6 Nov 2023 13:40:58 +0100 Subject: [PATCH 48/67] fix(deps): pin dependencies (#4413) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .../connector-test-kit-executor/package.json | 14 +++++++------- query-engine/driver-adapters/package.json | 4 ++-- query-engine/query-engine-wasm/package-lock.json | 7 +------ query-engine/query-engine-wasm/package.json | 2 +- 4 files changed, 11 insertions(+), 16 deletions(-) diff --git a/query-engine/driver-adapters/connector-test-kit-executor/package.json b/query-engine/driver-adapters/connector-test-kit-executor/package.json index 153b833df1e1..4648887f5063 100644 --- a/query-engine/driver-adapters/connector-test-kit-executor/package.json +++ b/query-engine/driver-adapters/connector-test-kit-executor/package.json @@ -18,21 +18,21 @@ "license": "Apache-2.0", "dependencies": { "@libsql/client": "0.3.5", - "@neondatabase/serverless": "^0.6.0", + "@neondatabase/serverless": "0.6.0", "@planetscale/database": "1.11.0", "@prisma/adapter-libsql": "workspace:*", "@prisma/adapter-neon": "workspace:*", "@prisma/adapter-pg": "workspace:*", "@prisma/adapter-planetscale": "workspace:*", "@prisma/driver-adapter-utils": "workspace:*", - "@types/pg": "^8.10.2", - "pg": "^8.11.3", - "undici": "^5.26.5", - "ws": "^8.14.2" + "@types/pg": "8.10.2", + "pg": "8.11.3", + "undici": "5.26.5", + "ws": "8.14.2" }, "devDependencies": { - "@types/node": "^20.5.1", - "tsup": "^7.2.0", + "@types/node": "20.5.1", + "tsup": "7.2.0", "typescript": "5.2.2" } } \ No newline at end of file diff --git a/query-engine/driver-adapters/package.json b/query-engine/driver-adapters/package.json index 1362da87700d..e137d6a524b2 100644 --- a/query-engine/driver-adapters/package.json +++ b/query-engine/driver-adapters/package.json @@ -16,8 +16,8 @@ "keywords": [], "author": "", "devDependencies": { - "@types/node": "^20.5.1", - "tsup": "^7.2.0", + "@types/node": "20.8.10", + "tsup": "7.2.0", "typescript": "5.2.2", "esbuild": "0.19.5", "esbuild-register": "3.5.0" diff --git a/query-engine/query-engine-wasm/package-lock.json b/query-engine/query-engine-wasm/package-lock.json index d9ac3326333b..1c66eec352d2 100644 --- a/query-engine/query-engine-wasm/package-lock.json +++ b/query-engine/query-engine-wasm/package-lock.json @@ -5,7 +5,7 @@ "packages": { "": { "dependencies": { - "@neondatabase/serverless": "^0.6.0", + "@neondatabase/serverless": "0.6.0", "@prisma/adapter-neon": "5.4.1", "@prisma/driver-adapter-utils": "5.4.1" } @@ -37,11 +37,6 @@ "debug": "^4.3.4" } }, - "node_modules/@types/node": { - "version": "20.8.2", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.8.2.tgz", - "integrity": "sha512-Vvycsc9FQdwhxE3y3DzeIxuEJbWGDsnrxvMADzTDF/lcdR9/K+AQIeAghTQsHtotg/q0j3WEOYS/jQgSdWue3w==" - }, "node_modules/@types/pg": { "version": "8.6.6", "resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.6.6.tgz", diff --git a/query-engine/query-engine-wasm/package.json b/query-engine/query-engine-wasm/package.json index f0cd0922f1e2..b4447ffcfb71 100644 --- a/query-engine/query-engine-wasm/package.json +++ b/query-engine/query-engine-wasm/package.json @@ -2,7 +2,7 @@ "type": "module", "main": "./example.js", "dependencies": { - "@neondatabase/serverless": "^0.6.0", + "@neondatabase/serverless": "0.6.0", "@prisma/adapter-neon": "5.4.1", "@prisma/driver-adapter-utils": "5.4.1" } From dea13481eb98a77159d85b9b4e8f526348f4e7a0 Mon Sep 17 00:00:00 2001 From: Jan Piotrowski Date: Mon, 6 Nov 2023 13:58:11 +0100 Subject: [PATCH 49/67] ci(renovate): Enable config migration (#4420) --- renovate.json | 2 ++ 1 file changed, 2 insertions(+) diff --git a/renovate.json b/renovate.json index c25ec3daa6e8..7476dbb3f3ea 100644 --- a/renovate.json +++ b/renovate.json @@ -1,4 +1,5 @@ { + "$schema": "https://docs.renovatebot.com/renovate-schema.json", "extends": [ "config:base" ], @@ -15,6 +16,7 @@ ], "rangeStrategy": "pin", "separateMinorPatch": true, + "configMigration": true, "packageRules": [ { "matchFiles": ["docker-compose.yml"], From b5d5d3fae9b3c7b222faf0fd0d7ee83237a2fb94 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 6 Nov 2023 14:02:09 +0100 Subject: [PATCH 50/67] chore(config): migrate renovate config (#4421) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- renovate.json | 38 ++++++++++++++++++++++++++++---------- 1 file changed, 28 insertions(+), 10 deletions(-) diff --git a/renovate.json b/renovate.json index 7476dbb3f3ea..4d8e7d2511d0 100644 --- a/renovate.json +++ b/renovate.json @@ -1,7 +1,7 @@ { "$schema": "https://docs.renovatebot.com/renovate-schema.json", "extends": [ - "config:base" + "config:recommended" ], "cargo": { "enabled": false @@ -19,23 +19,41 @@ "configMigration": true, "packageRules": [ { - "matchFiles": ["docker-compose.yml"], - "matchUpdateTypes": ["minor", "major"], + "matchFileNames": [ + "docker-compose.yml" + ], + "matchUpdateTypes": [ + "minor", + "major" + ], "enabled": false }, { "groupName": "Weekly vitess docker image version update", - "packageNames": ["vitess/vttestserver"], - "schedule": ["before 7am on Wednesday"] + "matchPackageNames": [ + "vitess/vttestserver" + ], + "schedule": [ + "before 7am on Wednesday" + ] }, { - "groupName": ["Prisma Driver Adapters"], - "matchPackageNames": ["@prisma/driver-adapter-utils"], - "matchPackagePrefixes": ["@prisma/adapter"], - "schedule": ["at any time"] + "groupName": "Prisma Driver Adapters", + "matchPackageNames": [ + "@prisma/driver-adapter-utils" + ], + "matchPackagePrefixes": [ + "@prisma/adapter" + ], + "schedule": [ + "at any time" + ] }, { - "packageNames": ["node", "pnpm"], + "matchPackageNames": [ + "node", + "pnpm" + ], "enabled": false } ] From b855cf231f31b5a2fdd81b40a70f2d1186f12a83 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 6 Nov 2023 17:56:58 +0100 Subject: [PATCH 51/67] fix(deps): update prisma driver adapters to v5.5.2 (minor) (#4415) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .../query-engine-wasm/package-lock.json | 40 ++++++++++++++----- query-engine/query-engine-wasm/package.json | 4 +- 2 files changed, 33 insertions(+), 11 deletions(-) diff --git a/query-engine/query-engine-wasm/package-lock.json b/query-engine/query-engine-wasm/package-lock.json index 1c66eec352d2..c2d5a7a1162e 100644 --- a/query-engine/query-engine-wasm/package-lock.json +++ b/query-engine/query-engine-wasm/package-lock.json @@ -6,8 +6,8 @@ "": { "dependencies": { "@neondatabase/serverless": "0.6.0", - "@prisma/adapter-neon": "5.4.1", - "@prisma/driver-adapter-utils": "5.4.1" + "@prisma/adapter-neon": "5.5.2", + "@prisma/driver-adapter-utils": "5.5.2" } }, "node_modules/@neondatabase/serverless": { @@ -19,24 +19,41 @@ } }, "node_modules/@prisma/adapter-neon": { - "version": "5.4.1", - "resolved": "https://registry.npmjs.org/@prisma/adapter-neon/-/adapter-neon-5.4.1.tgz", - "integrity": "sha512-mIwLmwyAwDV9HXar9lSyM2uVm9H+X8noG4reKLnC3NjFsBxBfSUgW9vS8dPGqGW/rJWX3hg4pIffjEjmX4TDqg==", + "version": "5.5.2", + "resolved": "https://registry.npmjs.org/@prisma/adapter-neon/-/adapter-neon-5.5.2.tgz", + "integrity": "sha512-XcpJ/fgh/sP7mlBFkqjIzEcU/kWnNyiZf19MBP366HF7vXg2UQTbGxmbbeFiohXSJ/rwyu1Qmos7IrKK+QJOgg==", "dependencies": { - "@prisma/driver-adapter-utils": "5.4.1" + "@prisma/driver-adapter-utils": "5.5.2", + "postgres-array": "^3.0.2" }, "peerDependencies": { "@neondatabase/serverless": "^0.6.0" } }, + "node_modules/@prisma/adapter-neon/node_modules/postgres-array": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-3.0.2.tgz", + "integrity": "sha512-6faShkdFugNQCLwucjPcY5ARoW1SlbnrZjmGl0IrrqewpvxvhSLHimCVzqeuULCbG0fQv7Dtk1yDbG3xv7Veog==", + "engines": { + "node": ">=12" + } + }, "node_modules/@prisma/driver-adapter-utils": { - "version": "5.4.1", - "resolved": "https://registry.npmjs.org/@prisma/driver-adapter-utils/-/driver-adapter-utils-5.4.1.tgz", - "integrity": "sha512-muYjkzf6qdxz4uGBi7nKyPaGRGLnSgiRautqAhZiMwbTOr9hMgyNI+aCJTCaKfYfNWjYCx2r5J6R1mJtPhzFhQ==", + "version": "5.5.2", + "resolved": "https://registry.npmjs.org/@prisma/driver-adapter-utils/-/driver-adapter-utils-5.5.2.tgz", + "integrity": "sha512-lRkxjboGcIl2VkJNomZQ9b6vc2qGFnVwjaR/o3cTPGmmSxETx71cYRYcG/NHKrhvKxI6oKNZ/xzyuzPpg1+kJQ==", "dependencies": { "debug": "^4.3.4" } }, + "node_modules/@types/node": { + "version": "20.8.10", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.8.10.tgz", + "integrity": "sha512-TlgT8JntpcbmKUFzjhsyhGfP2fsiz1Mv56im6enJ905xG1DAYesxJaeSbGqQmAw8OWPdhyJGhGSQGKRNJ45u9w==", + "dependencies": { + "undici-types": "~5.26.4" + } + }, "node_modules/@types/pg": { "version": "8.6.6", "resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.6.6.tgz", @@ -131,6 +148,11 @@ "node": ">=0.10.0" } }, + "node_modules/undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==" + }, "node_modules/xtend": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", diff --git a/query-engine/query-engine-wasm/package.json b/query-engine/query-engine-wasm/package.json index b4447ffcfb71..102db2ce14b5 100644 --- a/query-engine/query-engine-wasm/package.json +++ b/query-engine/query-engine-wasm/package.json @@ -3,7 +3,7 @@ "main": "./example.js", "dependencies": { "@neondatabase/serverless": "0.6.0", - "@prisma/adapter-neon": "5.4.1", - "@prisma/driver-adapter-utils": "5.4.1" + "@prisma/adapter-neon": "5.5.2", + "@prisma/driver-adapter-utils": "5.5.2" } } From e62f312231dc614321fc97dca4911c1bfcb5415d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Tue, 7 Nov 2023 18:12:46 +0100 Subject: [PATCH 52/67] Fix DATE / TIME argument parsing for planetscale (Rust side) (#4422) * Fix serialization of date arguments for Planetscale * Accelerate builds temporarily * Unit tests for conversions * Remove RawString variant on arguments * Plain old string objects for json, without requiring RawString * Revert "Accelerate builds temporarily" This reverts commit 6c17fc7464137051517afacfde597dfb285c8b6c. --- .../driver-adapters/src/conversion.rs | 34 +---- .../driver-adapters/src/conversion/mysql.rs | 107 +++++++++++++++ .../src/conversion/postgres.rs | 124 +++++++++++++++--- .../driver-adapters/src/conversion/sqlite.rs | 107 +++++++++++++-- query-engine/driver-adapters/src/queryable.rs | 18 ++- 5 files changed, 322 insertions(+), 68 deletions(-) create mode 100644 query-engine/driver-adapters/src/conversion/mysql.rs diff --git a/query-engine/driver-adapters/src/conversion.rs b/query-engine/driver-adapters/src/conversion.rs index c6ea87f1bfa2..a26afcf07122 100644 --- a/query-engine/driver-adapters/src/conversion.rs +++ b/query-engine/driver-adapters/src/conversion.rs @@ -1,3 +1,4 @@ +pub(crate) mod mysql; pub(crate) mod postgres; pub(crate) mod sqlite; @@ -6,10 +7,9 @@ use napi::NapiValue; use serde::Serialize; use serde_json::value::Value as JsonValue; -#[derive(Debug, Serialize)] +#[derive(Debug, PartialEq, Serialize)] #[serde(untagged)] pub enum JSArg { - RawString(String), Value(serde_json::Value), Buffer(Vec), Array(Vec), @@ -34,7 +34,6 @@ impl FromNapiValue for JSArg { impl ToNapiValue for JSArg { unsafe fn to_napi_value(env: napi::sys::napi_env, value: Self) -> napi::Result { match value { - JSArg::RawString(s) => ToNapiValue::to_napi_value(env, s), JSArg::Value(v) => ToNapiValue::to_napi_value(env, v), JSArg::Buffer(bytes) => { ToNapiValue::to_napi_value(env, napi::Env::from_raw(env).create_buffer_with_data(bytes)?.into_raw()) @@ -59,32 +58,3 @@ impl ToNapiValue for JSArg { } } } - -pub fn value_to_js_arg(value: &quaint::Value) -> serde_json::Result { - let res = match &value.typed { - quaint::ValueType::Json(s) => match s { - Some(ref s) => { - let json_str = serde_json::to_string(s)?; - JSArg::RawString(json_str) - } - None => JsonValue::Null.into(), - }, - quaint::ValueType::Bytes(bytes) => match bytes { - Some(bytes) => JSArg::Buffer(bytes.to_vec()), - None => JsonValue::Null.into(), - }, - quaint::ValueType::Numeric(bd) => match bd { - // converting decimal to string to preserve the precision - Some(bd) => JSArg::RawString(bd.to_string()), - None => JsonValue::Null.into(), - }, - quaint::ValueType::Array(Some(ref items)) => JSArg::Array(values_to_js_args(items)?), - quaint_value => JSArg::from(JsonValue::from(quaint_value.clone())), - }; - - Ok(res) -} - -pub fn values_to_js_args(values: &[quaint::Value<'_>]) -> serde_json::Result> { - values.iter().map(value_to_js_arg).collect() -} diff --git a/query-engine/driver-adapters/src/conversion/mysql.rs b/query-engine/driver-adapters/src/conversion/mysql.rs new file mode 100644 index 000000000000..ed596364a36b --- /dev/null +++ b/query-engine/driver-adapters/src/conversion/mysql.rs @@ -0,0 +1,107 @@ +use crate::conversion::JSArg; +use serde_json::value::Value as JsonValue; + +const DATETIME_FORMAT: &str = "%Y-%m-%d %H:%M:%S"; +const DATE_FORMAT: &str = "%Y-%m-%d"; +const TIME_FORMAT: &str = "%H:%M:%S"; + +#[rustfmt::skip] +pub fn value_to_js_arg(value: &quaint::Value) -> serde_json::Result { + let res = match &value.typed { + quaint::ValueType::Numeric(Some(bd)) => JSArg::Value(JsonValue::String(bd.to_string())), + quaint::ValueType::Json(Some(s)) => JSArg::Value(JsonValue::String(serde_json::to_string(s)?)), + quaint::ValueType::Bytes(Some(bytes)) => JSArg::Buffer(bytes.to_vec()), + quaint::ValueType::Date(Some(d)) => JSArg::Value(JsonValue::String(d.format(DATE_FORMAT).to_string())), + quaint::ValueType::DateTime(Some(dt)) => JSArg::Value(JsonValue::String(dt.format(DATETIME_FORMAT).to_string())), + quaint::ValueType::Time(Some(t)) => JSArg::Value(JsonValue::String(t.format(TIME_FORMAT).to_string())), + quaint::ValueType::Array(Some(ref items)) => JSArg::Array( + items + .iter() + .map(value_to_js_arg) + .collect::>>()?, + ), + quaint_value => JSArg::from(JsonValue::from(quaint_value.clone())), + }; + Ok(res) +} + +#[cfg(test)] +mod test { + use super::*; + use bigdecimal::BigDecimal; + use chrono::*; + use quaint::ValueType; + use std::str::FromStr; + + #[test] + #[rustfmt::skip] + fn test_value_to_js_arg() { + let test_cases = vec![ + ( + ValueType::Numeric(Some(1.into())), + JSArg::Value(JsonValue::String("1".to_string())) + ), + ( + ValueType::Numeric(Some(BigDecimal::from_str("-1.1").unwrap())), + JSArg::Value(JsonValue::String("-1.1".to_string())) + ), + ( + ValueType::Numeric(None), + JSArg::Value(JsonValue::Null) + ), + ( + ValueType::Json(Some(serde_json::json!({"a": 1}))), + JSArg::Value(JsonValue::String("{\"a\":1}".to_string())) + ), + ( + ValueType::Json(None), + JSArg::Value(JsonValue::Null) + ), + ( + ValueType::Date(Some(NaiveDate::from_ymd_opt(2020, 2, 29).unwrap())), + JSArg::Value(JsonValue::String("2020-02-29".to_string())) + ), + ( + ValueType::Date(None), + JSArg::Value(JsonValue::Null) + ), + ( + ValueType::DateTime(Some(Utc.with_ymd_and_hms(2020, 1, 1, 23, 13, 1).unwrap())), + JSArg::Value(JsonValue::String("2020-01-01 23:13:01".to_string())) + ), + ( + ValueType::DateTime(None), + JSArg::Value(JsonValue::Null) + ), + ( + ValueType::Time(Some(NaiveTime::from_hms_opt(23, 13, 1).unwrap())), + JSArg::Value(JsonValue::String("23:13:01".to_string())) + ), + ( + ValueType::Time(None), + JSArg::Value(JsonValue::Null) + ), + ( + ValueType::Array(Some(vec!( + ValueType::Numeric(Some(1.into())).into_value(), + ValueType::Numeric(None).into_value(), + ValueType::Time(Some(NaiveTime::from_hms_opt(23, 13, 1).unwrap())).into_value(), + ))), + JSArg::Array(vec!( + JSArg::Value(JsonValue::String("1".to_string())), + JSArg::Value(JsonValue::Null), + JSArg::Value(JsonValue::String("23:13:01".to_string())) + )) + ), + ]; + + let mut errors: Vec = vec![]; + for (val, expected) in test_cases { + let actual = value_to_js_arg(&val.clone().into_value()).unwrap(); + if actual != expected { + errors.push(format!("transforming: {:?}, expected: {:?}, actual: {:?}", &val, expected, actual)); + } + } + assert_eq!(errors.len(), 0, "{}", errors.join("\n")); + } +} diff --git a/query-engine/driver-adapters/src/conversion/postgres.rs b/query-engine/driver-adapters/src/conversion/postgres.rs index 8c00d0aae59f..113be5170a84 100644 --- a/query-engine/driver-adapters/src/conversion/postgres.rs +++ b/query-engine/driver-adapters/src/conversion/postgres.rs @@ -5,31 +5,115 @@ use serde_json::value::Value as JsonValue; static TIME_FMT: Lazy = Lazy::new(|| StrftimeItems::new("%H:%M:%S%.f")); +#[rustfmt::skip] pub fn value_to_js_arg(value: &quaint::Value) -> serde_json::Result { let res = match (&value.typed, value.native_column_type_name()) { - (quaint::ValueType::DateTime(value), Some("DATE")) => match value { - Some(value) => JSArg::RawString(value.date_naive().to_string()), - None => JsonValue::Null.into(), - }, - (quaint::ValueType::DateTime(value), Some("TIME")) => match value { - Some(value) => JSArg::RawString(value.time().to_string()), - None => JsonValue::Null.into(), - }, - (quaint::ValueType::DateTime(value), Some("TIMETZ")) => match value { - Some(value) => JSArg::RawString(value.time().format_with_items(TIME_FMT.clone()).to_string()), - None => JsonValue::Null.into(), - }, - (quaint::ValueType::DateTime(value), _) => match value { - Some(value) => JSArg::RawString(value.naive_utc().to_string()), - None => JsonValue::Null.into(), - }, - (quaint::ValueType::Array(Some(items)), _) => JSArg::Array(values_to_js_args(items)?), - _ => super::value_to_js_arg(value)?, + (quaint::ValueType::DateTime(Some(dt)), Some("DATE")) => JSArg::Value(JsonValue::String(dt.date_naive().to_string())), + (quaint::ValueType::DateTime(Some(dt)), Some("TIME")) => JSArg::Value(JsonValue::String(dt.time().to_string())), + (quaint::ValueType::DateTime(Some(dt)), Some("TIMETZ")) => JSArg::Value(JsonValue::String(dt.time().format_with_items(TIME_FMT.clone()).to_string())), + (quaint::ValueType::DateTime(Some(dt)), _) => JSArg::Value(JsonValue::String(dt.naive_utc().to_string())), + (quaint::ValueType::Json(Some(s)), _) => JSArg::Value(JsonValue::String(serde_json::to_string(s)?)), + (quaint::ValueType::Bytes(Some(bytes)), _) => JSArg::Buffer(bytes.to_vec()), + (quaint::ValueType::Numeric(Some(bd)), _) => JSArg::Value(JsonValue::String(bd.to_string())), + (quaint::ValueType::Array(Some(items)), _) => JSArg::Array( + items + .iter() + .map(value_to_js_arg) + .collect::>>()?, + ), + (quaint_value, _) => JSArg::from(JsonValue::from(quaint_value.clone())), }; Ok(res) } -pub fn values_to_js_args(values: &[quaint::Value<'_>]) -> serde_json::Result> { - values.iter().map(value_to_js_arg).collect() +#[cfg(test)] +mod test { + use super::*; + use bigdecimal::BigDecimal; + use chrono::*; + use quaint::ValueType; + use std::str::FromStr; + + #[test] + #[rustfmt::skip] + fn test_value_to_js_arg() { + let test_cases: Vec<(quaint::Value, JSArg)> = vec![ + ( + ValueType::Numeric(Some(1.into())).into_value(), + JSArg::Value(JsonValue::String("1".to_string())) + ), + ( + ValueType::Numeric(Some(BigDecimal::from_str("-1.1").unwrap())).into_value(), + JSArg::Value(JsonValue::String("-1.1".to_string())) + ), + ( + ValueType::Numeric(None).into_value(), + JSArg::Value(JsonValue::Null) + ), + ( + ValueType::Json(Some(serde_json::json!({"a": 1}))).into_value(), + JSArg::Value(JsonValue::String("{\"a\":1}".to_string())) + ), + ( + ValueType::Json(None).into_value(), + JSArg::Value(JsonValue::Null) + ), + ( + ValueType::Date(Some(NaiveDate::from_ymd_opt(2020, 2, 29).unwrap())).into_value(), + JSArg::Value(JsonValue::String("2020-02-29".to_string())) + ), + ( + ValueType::Date(None).into_value(), + JSArg::Value(JsonValue::Null) + ), + ( + ValueType::DateTime(Some(Utc.with_ymd_and_hms(2020, 1, 1, 23, 13, 1).unwrap())).into_value().with_native_column_type(Some("DATE")), + JSArg::Value(JsonValue::String("2020-01-01".to_string())) + ), + ( + ValueType::DateTime(Some(Utc.with_ymd_and_hms(2020, 1, 1, 23, 13, 1).unwrap())).into_value().with_native_column_type(Some("TIME")), + JSArg::Value(JsonValue::String("23:13:01".to_string())) + ), + ( + ValueType::DateTime(Some(Utc.with_ymd_and_hms(2020, 1, 1, 23, 13, 1).unwrap())).into_value().with_native_column_type(Some("TIMETZ")), + JSArg::Value(JsonValue::String("23:13:01".to_string())) + ), + ( + ValueType::DateTime(None).into_value(), + JSArg::Value(JsonValue::Null) + ), + ( + ValueType::Time(Some(NaiveTime::from_hms_opt(23, 13, 1).unwrap())).into_value(), + JSArg::Value(JsonValue::String("23:13:01".to_string())) + ), + ( + ValueType::Time(None).into_value(), + JSArg::Value(JsonValue::Null) + ), + ( + ValueType::Array(Some(vec!( + ValueType::Numeric(Some(1.into())).into_value(), + ValueType::Numeric(None).into_value(), + ValueType::Time(Some(NaiveTime::from_hms_opt(23, 13, 1).unwrap())).into_value(), + ValueType::Time(None).into_value(), + ))).into_value(), + JSArg::Array(vec!( + JSArg::Value(JsonValue::String("1".to_string())), + JSArg::Value(JsonValue::Null), + JSArg::Value(JsonValue::String("23:13:01".to_string())), + JSArg::Value(JsonValue::Null), + )) + ), + ]; + + let mut errors: Vec = vec![]; + for (val, expected) in test_cases { + let actual = value_to_js_arg(&val).unwrap(); + if actual != expected { + errors.push(format!("transforming: {:?}, expected: {:?}, actual: {:?}", &val, expected, actual)); + } + } + assert_eq!(errors.len(), 0, "{}", errors.join("\n")); + } } diff --git a/query-engine/driver-adapters/src/conversion/sqlite.rs b/query-engine/driver-adapters/src/conversion/sqlite.rs index 4e6e56cb274a..032c16923256 100644 --- a/query-engine/driver-adapters/src/conversion/sqlite.rs +++ b/query-engine/driver-adapters/src/conversion/sqlite.rs @@ -3,21 +3,106 @@ use serde_json::value::Value as JsonValue; pub fn value_to_js_arg(value: &quaint::Value) -> serde_json::Result { let res = match &value.typed { - quaint::ValueType::Numeric(bd) => match bd { - // converting decimal to string to preserve the precision - Some(bd) => match bd.to_string().parse::() { - Ok(double) => JSArg::from(JsonValue::from(double)), - Err(_) => JSArg::from(JsonValue::from(value.clone())), - }, - None => JsonValue::Null.into(), + quaint::ValueType::Numeric(Some(bd)) => match bd.to_string().parse::() { + Ok(double) => JSArg::from(JsonValue::from(double)), + Err(_) => JSArg::from(JsonValue::from(value.clone())), }, - quaint::ValueType::Array(Some(ref items)) => JSArg::Array(values_to_js_args(items)?), - _ => super::value_to_js_arg(value)?, + quaint::ValueType::Json(Some(s)) => JSArg::Value(s.to_owned()), + quaint::ValueType::Bytes(Some(bytes)) => JSArg::Buffer(bytes.to_vec()), + quaint::ValueType::Array(Some(ref items)) => JSArg::Array( + items + .iter() + .map(value_to_js_arg) + .collect::>>()?, + ), + quaint_value => JSArg::from(JsonValue::from(quaint_value.clone())), }; Ok(res) } -pub fn values_to_js_args(values: &[quaint::Value<'_>]) -> serde_json::Result> { - values.iter().map(value_to_js_arg).collect() +// unit tests for value_to_js_arg +#[cfg(test)] +mod test { + use super::*; + use bigdecimal::BigDecimal; + use chrono::*; + use quaint::ValueType; + use serde_json::Value; + use std::str::FromStr; + + #[test] + #[rustfmt::skip] + fn test_value_to_js_arg() { + let test_cases = vec![ + ( + // This is different than how mysql or postgres processes integral BigInt values. + ValueType::Numeric(Some(1.into())), + JSArg::Value(Value::Number("1.0".parse().unwrap())) + ), + ( + ValueType::Numeric(Some(BigDecimal::from_str("-1.1").unwrap())), + JSArg::Value(Value::Number("-1.1".parse().unwrap())), + ), + ( + ValueType::Numeric(None), + JSArg::Value(Value::Null) + ), + ( + ValueType::Json(Some(serde_json::json!({"a": 1}))), + JSArg::Value(serde_json::json!({"a": 1})), + ), + ( + ValueType::Json(None), + JSArg::Value(Value::Null) + ), + ( + ValueType::Date(Some(NaiveDate::from_ymd_opt(2020, 2, 29).unwrap())), + JSArg::Value(Value::String("2020-02-29".to_string())), + ), + ( + ValueType::Date(None), + JSArg::Value(Value::Null) + ), + ( + ValueType::DateTime(Some(Utc.with_ymd_and_hms(2020, 1, 1, 23, 13, 1).unwrap())), + JSArg::Value(Value::String("2020-01-01T23:13:01+00:00".to_string())), + ), + ( + ValueType::DateTime(None), + JSArg::Value(Value::Null) + ), + ( + ValueType::Time(Some(NaiveTime::from_hms_opt(23, 13, 1).unwrap())), + JSArg::Value(Value::String("23:13:01".to_string())), + ), + ( + ValueType::Time(None), + JSArg::Value(Value::Null) + ), + ( + ValueType::Array(Some(vec!( + ValueType::Numeric(Some(1.into())).into_value(), + ValueType::Numeric(None).into_value(), + ValueType::Time(Some(NaiveTime::from_hms_opt(23, 13, 1).unwrap())).into_value(), + ValueType::Time(None).into_value(), + ))), + JSArg::Array(vec!( + JSArg::Value(Value::Number("1.0".parse().unwrap())), + JSArg::Value(Value::Null), + JSArg::Value(Value::String("23:13:01".to_string())), + JSArg::Value(Value::Null), + )) + ), + ]; + + let mut errors: Vec = vec![]; + for (val, expected) in test_cases { + let actual = value_to_js_arg(&val.clone().into_value()).unwrap(); + if actual != expected { + errors.push(format!("transforming: {:?}, expected: {:?}, actual: {:?}", &val, expected, actual)); + } + } + assert_eq!(errors.len(), 0, "{}", errors.join("\n")); + } } diff --git a/query-engine/driver-adapters/src/queryable.rs b/query-engine/driver-adapters/src/queryable.rs index b9a8cfe6564d..ab154eccc139 100644 --- a/query-engine/driver-adapters/src/queryable.rs +++ b/query-engine/driver-adapters/src/queryable.rs @@ -49,11 +49,19 @@ impl JsBaseQueryable { async fn build_query(&self, sql: &str, values: &[quaint::Value<'_>]) -> quaint::Result { let sql: String = sql.to_string(); - let args = match self.flavour { - Flavour::Postgres => conversion::postgres::values_to_js_args(values), - Flavour::Sqlite => conversion::sqlite::values_to_js_args(values), - _ => conversion::values_to_js_args(values), - }?; + + let converter = match self.flavour { + Flavour::Postgres => conversion::postgres::value_to_js_arg, + Flavour::Sqlite => conversion::sqlite::value_to_js_arg, + Flavour::Mysql => conversion::mysql::value_to_js_arg, + _ => unreachable!("Unsupported flavour for JS connector {:?}", self.flavour), + }; + + let args = values + .iter() + .map(converter) + .collect::>>()?; + Ok(Query { sql, args }) } } From 296391964949386b93591f4ef436aa255e952130 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Wed, 8 Nov 2023 13:50:44 +0100 Subject: [PATCH 53/67] fix: Use fractionals in PS Datetimes (#4426) * Use fractionals in PS Datetimes * Update mysql.rs --- query-engine/driver-adapters/src/conversion/mysql.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/query-engine/driver-adapters/src/conversion/mysql.rs b/query-engine/driver-adapters/src/conversion/mysql.rs index ed596364a36b..aab33213431a 100644 --- a/query-engine/driver-adapters/src/conversion/mysql.rs +++ b/query-engine/driver-adapters/src/conversion/mysql.rs @@ -1,9 +1,9 @@ use crate::conversion::JSArg; use serde_json::value::Value as JsonValue; -const DATETIME_FORMAT: &str = "%Y-%m-%d %H:%M:%S"; +const DATETIME_FORMAT: &str = "%Y-%m-%d %H:%M:%S%.f"; const DATE_FORMAT: &str = "%Y-%m-%d"; -const TIME_FORMAT: &str = "%H:%M:%S"; +const TIME_FORMAT: &str = "%H:%M:%S%.f"; #[rustfmt::skip] pub fn value_to_js_arg(value: &quaint::Value) -> serde_json::Result { @@ -66,16 +66,16 @@ mod test { JSArg::Value(JsonValue::Null) ), ( - ValueType::DateTime(Some(Utc.with_ymd_and_hms(2020, 1, 1, 23, 13, 1).unwrap())), - JSArg::Value(JsonValue::String("2020-01-01 23:13:01".to_string())) + ValueType::DateTime(Some(Utc.with_ymd_and_hms(2020, 1, 1, 23, 13, 1).unwrap().with_nanosecond(100).unwrap())), + JSArg::Value(JsonValue::String("2020-01-01 23:13:01.000000100".to_string())) ), ( ValueType::DateTime(None), JSArg::Value(JsonValue::Null) ), ( - ValueType::Time(Some(NaiveTime::from_hms_opt(23, 13, 1).unwrap())), - JSArg::Value(JsonValue::String("23:13:01".to_string())) + ValueType::Time(Some(NaiveTime::from_hms_opt(23, 13, 1).unwrap().with_nanosecond(1200).unwrap())), + JSArg::Value(JsonValue::String("23:13:01.000001200".to_string())) ), ( ValueType::Time(None), From f9e46ef5680842bd262eb7e64a022577767f48d3 Mon Sep 17 00:00:00 2001 From: Alberto Schiabel Date: Thu, 9 Nov 2023 19:17:54 +0100 Subject: [PATCH 54/67] fix(docker): fix MySQL 5.7 definition in docker-compose (#4430) --- docker-compose.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index c0d4f179e0a4..a8b48748abc4 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -187,7 +187,6 @@ services: restart: unless-stopped platform: linux/x86_64 environment: - MYSQL_USER: root MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: From d5d315cf933eb01ec43ab0272722100550db7345 Mon Sep 17 00:00:00 2001 From: Sophie <29753584+Druue@users.noreply.github.com> Date: Thu, 9 Nov 2023 23:46:08 +0100 Subject: [PATCH 55/67] fix(psl): composite type validation in indices (#4401) fixed https://github.com/prisma/prisma/issues/21441 * Updated composite type validation in indexes - only check for composites in _compound unique_ indexes - clarified validation message - Added link for tracking issue to add support for composite types in compound indexes --- .../validation_pipeline/validations.rs | 2 +- .../validations/indexes.rs | 33 ++-- .../prisma-models/src/field/scalar.rs | 2 +- .../tests/datamodel_converter_tests.rs | 160 ++++++++++++++++-- 4 files changed, 165 insertions(+), 32 deletions(-) diff --git a/psl/psl-core/src/validate/validation_pipeline/validations.rs b/psl/psl-core/src/validate/validation_pipeline/validations.rs index 4040844bb767..90f8ec9fe79e 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations.rs @@ -123,7 +123,7 @@ pub(super) fn validate(ctx: &mut Context<'_>) { indexes::supports_clustering_setting(index, ctx); indexes::clustering_can_be_defined_only_once(index, ctx); indexes::opclasses_are_not_allowed_with_other_than_normal_indices(index, ctx); - indexes::composite_types_are_not_allowed_in_index(index, ctx); + indexes::composite_type_in_compound_unique_index(index, ctx); for field_attribute in index.scalar_field_attributes() { let span = index.ast_attribute().span; diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/indexes.rs b/psl/psl-core/src/validate/validation_pipeline/validations/indexes.rs index 5f3288264016..7a7d0e1d105e 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/indexes.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/indexes.rs @@ -386,20 +386,25 @@ pub(crate) fn opclasses_are_not_allowed_with_other_than_normal_indices(index: In } } -pub(crate) fn composite_types_are_not_allowed_in_index(index: IndexWalker<'_>, ctx: &mut Context<'_>) { - for field in index.fields() { - if field.scalar_field_type().as_composite_type().is_some() { - let message = format!( - "Indexes can only contain scalar attributes. Please remove {:?} from the argument list of the indexes.", - field.name() - ); - ctx.push_error(DatamodelError::new_attribute_validation_error( - &message, - index.attribute_name(), - index.ast_attribute().span, - )); - return; - } +pub(crate) fn composite_type_in_compound_unique_index(index: IndexWalker<'_>, ctx: &mut Context<'_>) { + if !index.is_unique() { + return; + } + + let composite_type = index + .fields() + .find(|f| f.scalar_field_type().as_composite_type().is_some()); + + if index.fields().len() > 1 && composite_type.is_some() { + let message = format!( + "Prisma does not currently support composite types in compound unique indices, please remove {:?} from the index. See https://pris.ly/d/mongodb-composite-compound-indices for more details", + composite_type.unwrap().name() + ); + ctx.push_error(DatamodelError::new_attribute_validation_error( + &message, + index.attribute_name(), + index.ast_attribute().span, + )); } } diff --git a/query-engine/prisma-models/src/field/scalar.rs b/query-engine/prisma-models/src/field/scalar.rs index 92039da53663..b8ef8ab204e2 100644 --- a/query-engine/prisma-models/src/field/scalar.rs +++ b/query-engine/prisma-models/src/field/scalar.rs @@ -91,7 +91,7 @@ impl ScalarField { match scalar_field_type { ScalarFieldType::CompositeType(_) => { - unreachable!("Cannot convert a composite type to a type identifier. This error is typically caused by mistakenly using a composite type within a composite index.",) + unreachable!("This shouldn't be reached; composite types are not supported in compound unique indices.",) } ScalarFieldType::Enum(x) => TypeIdentifier::Enum(x), ScalarFieldType::BuiltInScalar(scalar) => scalar.into(), diff --git a/query-engine/prisma-models/tests/datamodel_converter_tests.rs b/query-engine/prisma-models/tests/datamodel_converter_tests.rs index 0a45c80ed163..a2ee28ca6c0d 100644 --- a/query-engine/prisma-models/tests/datamodel_converter_tests.rs +++ b/query-engine/prisma-models/tests/datamodel_converter_tests.rs @@ -38,31 +38,159 @@ fn converting_enums() { } } +// region: composite #[test] -fn converting_composite_types() { +fn converting_composite_types_compound() { let res = psl::parse_schema( r#" - datasource db { - provider = "mongodb" - url = "mongodb://localhost:27017/hello" - } + datasource db { + provider = "mongodb" + url = "mongodb://localhost:27017/hello" + } - model MyModel { - id String @id @default(auto()) @map("_id") @db.ObjectId - attribute Attribute + model Post { + id String @id @default(auto()) @map("_id") @db.ObjectId + author User @relation(fields: [authorId], references: [id]) + authorId String @db.ObjectId + attributes Attribute[] + + @@index([authorId, attributes]) + } + + type Attribute { + name String + value String + group String + } + + model User { + id String @id @default(auto()) @map("_id") @db.ObjectId + Post Post[] + } + "#, + ); - @@unique([attribute], name: "composite_index") - } + assert!(res.is_ok()); +} - type Attribute { - name String - value String - group String - } +#[test] +fn converting_composite_types_compound_unique() { + let res = psl::parse_schema( + r#" + datasource db { + provider = "mongodb" + url = "mongodb://localhost:27017/hello" + } + + model Post { + id String @id @default(auto()) @map("_id") @db.ObjectId + author User @relation(fields: [authorId], references: [id]) + authorId String @db.ObjectId + attributes Attribute[] + + @@unique([authorId, attributes]) + // ^^^^^^^^^^^^^^^^^^^^^^ + // Prisma does not currently support composite types in compound unique indices... + } + + type Attribute { + name String + value String + group String + } + + model User { + id String @id @default(auto()) @map("_id") @db.ObjectId + Post Post[] + } "#, ); - assert!(res.unwrap_err().contains("Indexes can only contain scalar attributes. Please remove \"attribute\" from the argument list of the indexes.")); + + assert!(res + .unwrap_err() + .contains(r#"Prisma does not currently support composite types in compound unique indices, please remove "attributes" from the index. See https://pris.ly/d/mongodb-composite-compound-indices for more details"#)); +} + +#[test] +fn converting_composite_types_nested() { + let res = psl::parse_schema( + r#" + datasource db { + provider = "mongodb" + url = "mongodb://localhost:27017/hello" + } + + type TheatersLocation { + address TheatersLocationAddress + geo TheatersLocationGeo + } + + type TheatersLocationAddress { + city String + state String + street1 String + street2 String? + zipcode String + } + + type TheatersLocationGeo { + coordinates Float[] + type String + } + + model theaters { + id String @id @default(auto()) @map("_id") @db.ObjectId + location TheatersLocation + theaterId Int + + @@index([location.geo], map: "geo index") + } + "#, + ); + + assert!(res.is_ok()); +} + +#[test] +fn converting_composite_types_nested_scalar() { + let res = psl::parse_schema( + r#" + datasource db { + provider = "mongodb" + url = "mongodb://localhost:27017/hello" + } + + type TheatersLocation { + address TheatersLocationAddress + geo TheatersLocationGeo + } + + type TheatersLocationAddress { + city String + state String + street1 String + street2 String? + zipcode String + } + + type TheatersLocationGeo { + coordinates Float[] + type String + } + + model theaters { + id String @id @default(auto()) @map("_id") @db.ObjectId + location TheatersLocation + theaterId Int + + @@index([location.geo.type], map: "geo index") + } + "#, + ); + + assert!(res.is_ok()); } +// endregion #[test] fn models_with_only_scalar_fields() { From 0a383f5645e8d5567f55109a60ec45c0c2c8e719 Mon Sep 17 00:00:00 2001 From: Serhii Tatarintsev Date: Fri, 10 Nov 2023 14:57:04 +0100 Subject: [PATCH 56/67] driver-adapters: Use new planetscale init (#4432) See prisma/prisma#21883 DRIVER_ADAPTERS_BRANCH=fix/planetscale-client --- .../connector-test-kit-executor/src/index.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/query-engine/driver-adapters/connector-test-kit-executor/src/index.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/index.ts index b89348fb3e77..2318c0525760 100644 --- a/query-engine/driver-adapters/connector-test-kit-executor/src/index.ts +++ b/query-engine/driver-adapters/connector-test-kit-executor/src/index.ts @@ -18,7 +18,7 @@ import { createClient } from '@libsql/client' import { PrismaLibSQL } from '@prisma/adapter-libsql' // planetscale dependencies -import { connect as planetscaleConnect } from '@planetscale/database' +import { Client as PlanetscaleClient } from '@planetscale/database' import { PrismaPlanetScale } from '@prisma/adapter-planetscale' @@ -276,12 +276,12 @@ async function planetscaleAdapter(url: string): Promise { throw new Error("DRIVER_ADAPTER_CONFIG is not defined or empty, but its required for planetscale adapter."); } - const connection = planetscaleConnect({ + const client = new PlanetscaleClient({ url: proxyURL, fetch, }) - return new PrismaPlanetScale(connection) + return new PrismaPlanetScale(client) } main().catch(err) From b23895f3dcb32bcc0639539c07d23641b7caf01a Mon Sep 17 00:00:00 2001 From: Jan Piotrowski Date: Fri, 10 Nov 2023 15:38:16 +0100 Subject: [PATCH 57/67] chore: Change team name in CODEOWNERS (#4433) --- CODEOWNERS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CODEOWNERS b/CODEOWNERS index c1a996de1f21..cb8fc144133d 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1 +1 @@ -* @prisma/team-orm-rust +* @prisma/ORM-Rust From 77110460bc4e1c1ec2c720d23cbb9229351ba3c8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Mon, 13 Nov 2023 10:34:21 +0100 Subject: [PATCH 58/67] Unskip tests for driver adapters core features, skip tests for low fidelity cases (#4400) --- .../query-engine-driver-adapters.yml | 2 - Makefile | 8 +-- .../tests/new/assertion_violation_error.rs | 4 +- .../tests/new/interactive_tx.rs | 4 +- .../new/ref_actions/on_delete/set_default.rs | 8 +-- .../new/ref_actions/on_update/set_default.rs | 8 +-- .../tests/new/regressions/max_integer.rs | 53 ++++++++++++++++++- .../tests/new/regressions/prisma_15204.rs | 36 +++++++++++-- .../tests/queries/filters/json.rs | 4 +- .../query-engine-tests/tests/raw/sql/casts.rs | 15 +++++- .../tests/raw/sql/errors.rs | 19 ++++++- .../src/connector_tag/mod.rs | 28 +++++++--- .../driver-adapters/src/conversion.rs | 2 +- query-engine/driver-adapters/src/error.rs | 2 +- query-engine/driver-adapters/src/result.rs | 13 ++++- 15 files changed, 167 insertions(+), 39 deletions(-) diff --git a/.github/workflows/query-engine-driver-adapters.yml b/.github/workflows/query-engine-driver-adapters.yml index b8434e2fa04c..3de0238aa0e7 100644 --- a/.github/workflows/query-engine-driver-adapters.yml +++ b/.github/workflows/query-engine-driver-adapters.yml @@ -31,8 +31,6 @@ jobs: setup_task: 'dev-neon-ws-postgres13' - name: 'libsql' setup_task: 'dev-libsql-sqlite' - - name: 'planetscale' - setup_task: 'dev-planetscale-vitess8' node_version: ['18'] env: LOG_LEVEL: 'info' # Set to "debug" to trace the query engine and node process running the driver adapter diff --git a/Makefile b/Makefile index a30a32ca1871..e00c122e2713 100644 --- a/Makefile +++ b/Makefile @@ -130,10 +130,10 @@ test-pg-postgres13: dev-pg-postgres13 test-qe-st test-driver-adapter-pg: test-pg-postgres13 -start-neon-postgres13: build-qe-napi build-connector-kit-js +start-neon-postgres13: docker compose -f docker-compose.yml up --wait -d --remove-orphans neon-postgres13 -dev-neon-ws-postgres13: start-neon-postgres13 +dev-neon-ws-postgres13: start-neon-postgres13 build-qe-napi build-connector-kit-js cp $(CONFIG_PATH)/neon-ws-postgres13 $(CONFIG_FILE) test-neon-ws-postgres13: dev-neon-ws-postgres13 test-qe-st @@ -268,10 +268,10 @@ start-vitess_8_0: dev-vitess_8_0: start-vitess_8_0 cp $(CONFIG_PATH)/vitess_8_0 $(CONFIG_FILE) -start-planetscale-vitess8: build-qe-napi build-connector-kit-js +start-planetscale-vitess8: docker compose -f docker-compose.yml up -d --remove-orphans planetscale-vitess8 -dev-planetscale-vitess8: start-planetscale-vitess8 +dev-planetscale-vitess8: start-planetscale-vitess8 build-qe-napi build-connector-kit-js cp $(CONFIG_PATH)/planetscale-vitess8 $(CONFIG_FILE) test-planetscale-vitess8: dev-planetscale-vitess8 test-qe-st diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/assertion_violation_error.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/assertion_violation_error.rs index 62c4e3005f71..a3e45b0a05b5 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/assertion_violation_error.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/assertion_violation_error.rs @@ -1,8 +1,8 @@ use query_engine_tests::*; -#[test_suite(schema(generic), only(Postgres))] +#[test_suite(schema(generic))] mod raw_params { - #[connector_test] + #[connector_test(only(Postgres), exclude(JS))] async fn value_too_many_bind_variables(runner: Runner) -> TestResult<()> { let n = 32768; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs index 9aa34a943560..e45cef8ac306 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs @@ -213,7 +213,7 @@ mod interactive_tx { Ok(()) } - #[connector_test(exclude(JS))] + #[connector_test] async fn batch_queries_failure(mut runner: Runner) -> TestResult<()> { // Tx expires after five second. let tx_id = runner.start_tx(5000, 5000, None).await?; @@ -256,7 +256,7 @@ mod interactive_tx { Ok(()) } - #[connector_test(exclude(JS))] + #[connector_test] async fn tx_expiration_failure_cycle(mut runner: Runner) -> TestResult<()> { // Tx expires after one seconds. let tx_id = runner.start_tx(5000, 1000, None).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_default.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_default.rs index 8ea08acc85da..393581b8ad91 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_default.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_default.rs @@ -66,7 +66,7 @@ mod one2one_req { } /// Deleting the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(required_with_default), exclude(MongoDb, MySQL, JS))] + #[connector_test(schema(required_with_default), exclude(MongoDb, MySQL))] async fn delete_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, child: { create: { id: 1 }}}) { id }}"#), @@ -167,7 +167,7 @@ mod one2one_opt { } /// Deleting the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL, JS))] + #[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL))] async fn delete_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, child: { create: { id: 1 }}}) { id }}"#), @@ -270,7 +270,7 @@ mod one2many_req { } /// Deleting the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(required_with_default), exclude(MongoDb, MySQL, JS))] + #[connector_test(schema(required_with_default), exclude(MongoDb, MySQL))] async fn delete_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, children: { create: { id: 1 }}}) { id }}"#), @@ -371,7 +371,7 @@ mod one2many_opt { } /// Deleting the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL, JS))] + #[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL))] async fn delete_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, children: { create: { id: 1 }}}) { id }}"#), diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_default.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_default.rs index b0e566ffcb55..974c165ed942 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_default.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_default.rs @@ -68,7 +68,7 @@ mod one2one_req { } /// Updating the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(required_with_default), exclude(MongoDb, MySQL, JS))] + #[connector_test(schema(required_with_default), exclude(MongoDb, MySQL))] async fn update_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", child: { create: { id: 1 }}}) { id }}"#), @@ -171,7 +171,7 @@ mod one2one_opt { } /// Updating the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL, JS))] + #[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL))] async fn update_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", child: { create: { id: 1 }}}) { id }}"#), @@ -276,7 +276,7 @@ mod one2many_req { } /// Updating the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(required_with_default), exclude(MongoDb, MySQL, JS))] + #[connector_test(schema(required_with_default), exclude(MongoDb, MySQL))] async fn update_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#), @@ -379,7 +379,7 @@ mod one2many_opt { } /// Updating the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL, JS))] + #[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL))] async fn update_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#), diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/max_integer.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/max_integer.rs index 581bc21bebe8..7b25cfff279e 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/max_integer.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/max_integer.rs @@ -187,8 +187,8 @@ mod max_integer { schema.to_owned() } - #[connector_test(schema(overflow_pg), only(Postgres))] - async fn unfitted_int_should_fail_pg(runner: Runner) -> TestResult<()> { + #[connector_test(schema(overflow_pg), only(Postgres), exclude(JS))] + async fn unfitted_int_should_fail_pg_quaint(runner: Runner) -> TestResult<()> { // int assert_error!( runner, @@ -234,6 +234,55 @@ mod max_integer { Ok(()) } + // The driver adapter for neon provides different error messages on overflow + #[connector_test(schema(overflow_pg), only(JS, Postgres))] + async fn unfitted_int_should_fail_pg_js(runner: Runner) -> TestResult<()> { + // int + assert_error!( + runner, + format!("mutation {{ createOneTest(data: {{ int: {I32_OVERFLOW_MAX} }}) {{ id }} }}"), + None, + "value \\\"2147483648\\\" is out of range for type integer" + ); + assert_error!( + runner, + format!("mutation {{ createOneTest(data: {{ int: {I32_OVERFLOW_MIN} }}) {{ id }} }}"), + None, + "value \\\"-2147483649\\\" is out of range for type integer" + ); + + // smallint + assert_error!( + runner, + format!("mutation {{ createOneTest(data: {{ smallint: {I16_OVERFLOW_MAX} }}) {{ id }} }}"), + None, + "value \\\"32768\\\" is out of range for type smallint" + ); + assert_error!( + runner, + format!("mutation {{ createOneTest(data: {{ smallint: {I16_OVERFLOW_MIN} }}) {{ id }} }}"), + None, + "value \\\"-32769\\\" is out of range for type smallint" + ); + + //oid + assert_error!( + runner, + format!("mutation {{ createOneTest(data: {{ oid: {U32_OVERFLOW_MAX} }}) {{ id }} }}"), + None, + "value \\\"4294967296\\\" is out of range for type oid" + ); + + // The underlying driver swallows a negative id by interpreting it as unsigned. + // {"data":{"createOneTest":{"id":1,"oid":4294967295}}} + run_query!( + runner, + format!("mutation {{ createOneTest(data: {{ oid: {OVERFLOW_MIN} }}) {{ id, oid }} }}") + ); + + Ok(()) + } + #[connector_test(schema(overflow_pg), only(Postgres))] async fn fitted_int_should_work_pg(runner: Runner) -> TestResult<()> { // int diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_15204.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_15204.rs index c1df015c577b..ccf04dd2f4af 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_15204.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_15204.rs @@ -24,8 +24,8 @@ mod conversion_error { schema.to_owned() } - #[connector_test(schema(schema_int))] - async fn convert_to_int(runner: Runner) -> TestResult<()> { + #[connector_test(schema(schema_int), only(Sqlite), exclude(JS))] + async fn convert_to_int_sqlite_quaint(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; assert_error!( @@ -38,8 +38,22 @@ mod conversion_error { Ok(()) } - #[connector_test(schema(schema_bigint))] - async fn convert_to_bigint(runner: Runner) -> TestResult<()> { + #[connector_test(schema(schema_int), only(Sqlite, JS))] + async fn convert_to_int_sqlite_js(runner: Runner) -> TestResult<()> { + create_test_data(&runner).await?; + + assert_error!( + runner, + r#"query { findManyTestModel { field } }"#, + 2023, + "Inconsistent column data: Conversion failed: number must be an integer in column 'field'" + ); + + Ok(()) + } + + #[connector_test(schema(schema_bigint), only(Sqlite), exclude(JS))] + async fn convert_to_bigint_sqlite_quaint(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; assert_error!( @@ -52,6 +66,20 @@ mod conversion_error { Ok(()) } + #[connector_test(schema(schema_bigint), only(Sqlite, JS))] + async fn convert_to_bigint_sqlite_js(runner: Runner) -> TestResult<()> { + create_test_data(&runner).await?; + + assert_error!( + runner, + r#"query { findManyTestModel { field } }"#, + 2023, + "Inconsistent column data: Conversion failed: number must be an i64 in column 'field'" + ); + + Ok(()) + } + async fn create_test_data(runner: &Runner) -> TestResult<()> { run_query!( runner, diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/json.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/json.rs index 2fe8af850120..5440ff8218f8 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/json.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/json.rs @@ -207,7 +207,9 @@ mod json { Ok(()) } - #[connector_test(schema(json_opt))] + // The external runner for driver adapters, in spite of the protocol being used in the test matrix + // uses the JSON representation of queries, so this test should not apply to driver adapters (exclude(JS)) + #[connector_test(schema(json_opt), exclude(JS))] async fn nested_not_shorthand(runner: Runner) -> TestResult<()> { // Those tests pass with the JSON protocol because the entire object is parsed as JSON. // They remain useful to ensure we don't ever allow a full JSON filter input object type at the schema level. diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/casts.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/casts.rs index 0039b924108c..635726c71380 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/casts.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/casts.rs @@ -5,7 +5,20 @@ use query_engine_tests::*; mod casts { use query_engine_tests::{fmt_query_raw, run_query, RawParam}; - #[connector_test] + // The following tests are excluded for driver adapters. The underlying + // driver rejects queries where the values of the positional arguments do + // not match the expected types. As an example, the following query to the + // driver + // + // ```json + // { + // sql: 'SELECT $1::int4 AS decimal_to_i4; ', + // args: [ 42.51 ] + // } + // + // Bails with: ERROR: invalid input syntax for type integer: "42.51" + // + #[connector_test(only(Postgres), exclude(JS))] async fn query_numeric_casts(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query_pretty!(&runner, fmt_query_raw(r#" diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/errors.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/errors.rs index 88409d8d17f6..43417cb352e9 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/errors.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/errors.rs @@ -34,8 +34,8 @@ mod raw_errors { Ok(()) } - #[connector_test(schema(common_nullable_types))] - async fn list_param_for_scalar_column_should_not_panic(runner: Runner) -> TestResult<()> { + #[connector_test(schema(common_nullable_types), only(Postgres), exclude(JS))] + async fn list_param_for_scalar_column_should_not_panic_quaint(runner: Runner) -> TestResult<()> { assert_error!( runner, fmt_execute_raw( @@ -48,4 +48,19 @@ mod raw_errors { Ok(()) } + + #[connector_test(schema(common_nullable_types), only(JS, Postgres))] + async fn list_param_for_scalar_column_should_not_panic_pg_js(runner: Runner) -> TestResult<()> { + assert_error!( + runner, + fmt_execute_raw( + r#"INSERT INTO "TestModel" ("id") VALUES ($1);"#, + vec![RawParam::array(vec![1])], + ), + 2010, + r#"invalid input syntax for type integer"# + ); + + Ok(()) + } } diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs index d92bb5e96314..8c21dd93f903 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs @@ -296,19 +296,33 @@ pub(crate) fn should_run( return false; } - if !only.is_empty() { - return only - .iter() - .any(|only| ConnectorVersion::try_from(*only).unwrap().matches_pattern(&version)); - } - + // We skip tests that exclude JS driver adapters when an external test executor is configured. + // A test that you only want to run with rust drivers can be annotated with exclude(JS) if CONFIG.external_test_executor().is_some() && exclude.iter().any(|excl| excl.0.to_uppercase() == "JS") { println!("Excluded test execution for JS driver adapters. Skipping test"); return false; }; + // we consume the JS token to prevent it from being used in the following checks + let exclude: Vec<_> = exclude.iter().filter(|excl| excl.0.to_uppercase() != "JS").collect(); + + // We only run tests that include JS driver adapters when an external test executor is configured. + // A test that you only want to run with js driver adapters can be annotated with only(JS) + if CONFIG.external_test_executor().is_none() && only.iter().any(|incl| incl.0.to_uppercase() == "JS") { + println!("Excluded test execution for rust driver adapters. Skipping test"); + return false; + } + // we consume the JS token to prevent it from being used in the following checks + let only: Vec<_> = only.iter().filter(|incl| incl.0.to_uppercase() != "JS").collect(); + + if !only.is_empty() { + return only + .iter() + .any(|incl| ConnectorVersion::try_from(**incl).unwrap().matches_pattern(&version)); + } if exclude.iter().any(|excl| { - ConnectorVersion::try_from(*excl).map_or(false, |connector_version| connector_version.matches_pattern(&version)) + ConnectorVersion::try_from(**excl) + .map_or(false, |connector_version| connector_version.matches_pattern(&version)) }) { println!("Connector excluded. Skipping test."); return false; diff --git a/query-engine/driver-adapters/src/conversion.rs b/query-engine/driver-adapters/src/conversion.rs index a26afcf07122..00061d72de44 100644 --- a/query-engine/driver-adapters/src/conversion.rs +++ b/query-engine/driver-adapters/src/conversion.rs @@ -49,7 +49,7 @@ impl ToNapiValue for JSArg { for (index, item) in items.into_iter().enumerate() { let js_value = ToNapiValue::to_napi_value(env.raw(), item)?; // TODO: NapiRaw could be implemented for sys::napi_value directly, there should - // be no need for re-wrapping; submit a patch to napi-rs and simplify here. + // be no need for re-wrapping; submit a patch to napi-rs and simplify here. array.set(index as u32, napi::JsUnknown::from_raw_unchecked(env.raw(), js_value))?; } diff --git a/query-engine/driver-adapters/src/error.rs b/query-engine/driver-adapters/src/error.rs index f2fbb7dd9caf..4f4128088f49 100644 --- a/query-engine/driver-adapters/src/error.rs +++ b/query-engine/driver-adapters/src/error.rs @@ -12,7 +12,7 @@ pub(crate) fn into_quaint_error(napi_err: NapiError) -> QuaintError { QuaintError::raw_connector_error(status, reason) } -/// catches a panic thrown during the executuin of an asynchronous closure and transforms it into +/// catches a panic thrown during the execution of an asynchronous closure and transforms it into /// the Error variant of a napi::Result. pub(crate) async fn async_unwinding_panic(fut: F) -> napi::Result where diff --git a/query-engine/driver-adapters/src/result.rs b/query-engine/driver-adapters/src/result.rs index 53133e037b6f..ad4ce7cbb546 100644 --- a/query-engine/driver-adapters/src/result.rs +++ b/query-engine/driver-adapters/src/result.rs @@ -1,5 +1,5 @@ use napi::{bindgen_prelude::FromNapiValue, Env, JsUnknown, NapiValue}; -use quaint::error::{Error as QuaintError, MysqlError, PostgresError, SqliteError}; +use quaint::error::{Error as QuaintError, ErrorKind, MysqlError, PostgresError, SqliteError}; use serde::Deserialize; #[derive(Deserialize)] @@ -36,7 +36,10 @@ pub(crate) enum DriverAdapterError { GenericJs { id: i32, }, - + UnsupportedNativeDataType { + #[serde(rename = "type")] + native_type: String, + }, Postgres(#[serde(with = "PostgresErrorDef")] PostgresError), Mysql(#[serde(with = "MysqlErrorDef")] MysqlError), Sqlite(#[serde(with = "SqliteErrorDef")] SqliteError), @@ -53,6 +56,12 @@ impl FromNapiValue for DriverAdapterError { impl From for QuaintError { fn from(value: DriverAdapterError) -> Self { match value { + DriverAdapterError::UnsupportedNativeDataType { native_type } => { + QuaintError::builder(ErrorKind::UnsupportedColumnType { + column_type: native_type, + }) + .build() + } DriverAdapterError::GenericJs { id } => QuaintError::external_error(id), DriverAdapterError::Postgres(e) => e.into(), DriverAdapterError::Mysql(e) => e.into(), From 29b65349a29d19e47b46f5e04b201ff515721521 Mon Sep 17 00:00:00 2001 From: Jan Piotrowski Date: Mon, 13 Nov 2023 11:12:16 +0100 Subject: [PATCH 59/67] docs: "Manual integration releases from this repository to npm" and "Create local branches for fork branches from PRs" (#4406) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Joël Galeran --- README.md | 48 +++++++++++++++++++++++++++++++++++++----------- 1 file changed, 37 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index 49c7c1a8ab39..c28a53a6d657 100644 --- a/README.md +++ b/README.md @@ -259,6 +259,29 @@ GitHub actions will then pick up the branch name and use it to clone that branch When it's time to merge the sibling PRs, you'll need to merge the prisma/prisma PR first, so when merging the engines PR you have the code of the adapters ready in prisma/prisma `main` branch. +### Testing engines in `prisma/prisma` + +You can trigger releases from this repository to npm that can be used for testing the engines in `prisma/prisma` either automatically or manually: + +#### Automated integration releases from this repository to npm + +(Since July 2022). Any branch name starting with `integration/` will, first, run the full test suite in Buildkite `[Test] Prisma Engines` and, second, if passing, run the publish pipeline (build and upload engines to S3 & R2) + +The journey through the pipeline is the same as a commit on the `main` branch. +- It will trigger [`prisma/engines-wrapper`](https://github.com/prisma/engines-wrapper) and publish a new [`@prisma/engines-version`](https://www.npmjs.com/package/@prisma/engines-version) npm package but on the `integration` tag. +- Which triggers [`prisma/prisma`](https://github.com/prisma/prisma) to create a `chore(Automated Integration PR): [...]` PR with a branch name also starting with `integration/` +- Since in `prisma/prisma` we also trigger the publish pipeline when a branch name starts with `integration/`, this will publish all `prisma/prisma` monorepo packages to npm on the `integration` tag. +- Our [ecosystem-tests](https://github.com/prisma/ecosystem-tests/) tests will automatically pick up this new version and run tests, results will show in [GitHub Actions](https://github.com/prisma/ecosystem-tests/actions?query=branch%3Aintegration) + +This end to end will take minimum ~1h20 to complete, but is completely automated :robot: + +Notes: +- in `prisma/prisma` repository, we do not run tests for `integration/` branches, it is much faster and also means that there is no risk of tests failing (e.g. flaky tests, snapshots) that would stop the publishing process. +- in `prisma/prisma-engines` the Buildkite test pipeline must first pass, then the engines will be built and uploaded to our storage via the Buildkite release pipeline. These 2 pipelines can fail for different reasons, it's recommended to keep an eye on them (check notifications in Slack) and restart jobs as needed. Finally, it will trigger [`prisma/engines-wrapper`](https://github.com/prisma/engines-wrapper). + +#### Manual integration releases from this repository to npm + +Additionally to the automated integration release for `integration/` branches, you can also trigger a publish **manually** in the Buildkite `[Test] Prisma Engines` job if that succeeds for _any_ branch name. Click "🚀 Publish binaries" at the bottom of the test list to unlock the publishing step. When all the jobs in `[Release] Prisma Engines` succeed, you also have to unlock the next step by clicking "🚀 Publish client". This will then trigger the same journey as described above. ## Parallel rust-analyzer builds @@ -269,22 +292,25 @@ rust-analyzer. To avoid this. Open VSCode settings and search for `Check on Save --target-dir:/tmp/rust-analyzer-check ``` -### Automated integration releases from this repository to npm -(Since July 2022). Any branch name starting with `integration/` will, first, run the full test suite and, second, if passing, run the publish pipeline (build and upload engines to S3) +## Community PRs: create a local branch for a branch coming from a fork -The journey through the pipeline is the same as a commit on the `main` branch. -- It will trigger [prisma/engines-wrapper](https://github.com/prisma/engines-wrapper) and publish a new [`@prisma/engines-version`](https://www.npmjs.com/package/@prisma/engines-version) npm package but on the `integration` tag. -- Which triggers [prisma/prisma](https://github.com/prisma/prisma) to create a `chore(Automated Integration PR): [...]` PR with a branch name also starting with `integration/` -- Since in prisma/prisma we also trigger the publish pipeline when a branch name starts with `integration/`, this will publish all prisma/prisma monorepo packages to npm on the `integration` tag. -- Our [ecosystem-tests](https://github.com/prisma/ecosystem-tests/) tests will automatically pick up this new version and run tests, results will show in [GitHub Actions](https://github.com/prisma/ecosystem-tests/actions?query=branch%3Aintegration) +To trigger an [Automated integration releases from this repository to npm](#automated-integration-releases-from-this-repository-to-npm) or [Manual integration releases from this repository to npm](#manual-integration-releases-from-this-repository-to-npm) branches of forks need to be pulled into this repository so the Buildkite job is triggered. You can use these GitHub and git CLI commands to achieve that easily: -This end to end will take minimum ~1h20 to complete, but is completely automated :robot: +``` +gh pr checkout 4375 +git checkout -b integration/sql-nested-transactions +git push --set-upstream origin integration/sql-nested-transactions +``` -Notes: -- in prisma/prisma repository, we do not run tests for `integration/` branches, it is much faster and also means that there is no risk of test failing (e.g. flaky tests, snapshots) that would stop the publishing process. -- in prisma/prisma-engines tests must first pass, before publishing starts. So better keep an eye on them and restart them as needed. +If there is a need to re-create this branch because it has been updated, deleting it and re-creating will make sure the content is identical and avoid any conflicts. +``` +git branch --delete integration/sql-nested-transactions +gh pr checkout 4375 +git checkout -b integration/sql-nested-transactions +git push --set-upstream origin integration/sql-nested-transactions --force +``` ## Security From 10b1ce5f50d2647b7b06e2011289cc7652fb8d08 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Mon, 13 Nov 2023 11:45:06 +0100 Subject: [PATCH 60/67] test(qe): exclude json nested shorthand in MySQL 5.6 (#4438) --- .../query-engine-tests/tests/queries/filters/json.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/json.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/json.rs index 5440ff8218f8..2b4b880b4975 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/json.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/json.rs @@ -209,7 +209,7 @@ mod json { // The external runner for driver adapters, in spite of the protocol being used in the test matrix // uses the JSON representation of queries, so this test should not apply to driver adapters (exclude(JS)) - #[connector_test(schema(json_opt), exclude(JS))] + #[connector_test(schema(json_opt), exclude(JS, MySQL(5.6)))] async fn nested_not_shorthand(runner: Runner) -> TestResult<()> { // Those tests pass with the JSON protocol because the entire object is parsed as JSON. // They remain useful to ensure we don't ever allow a full JSON filter input object type at the schema level. From 174e7d3c3b4b73e62e2363855259481f1065565b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Mon, 13 Nov 2023 14:29:20 +0100 Subject: [PATCH 61/67] ci: update engineer version for .finish detection logic (#4434) --- .buildkite/engineer | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/engineer b/.buildkite/engineer index 5de99cea5390..880db1967231 100755 --- a/.buildkite/engineer +++ b/.buildkite/engineer @@ -54,7 +54,7 @@ fi # Check if the system has engineer installed, if not, use a local copy. if ! type "engineer" &> /dev/null; then # Setup Prisma engine build & test tool (engineer). - curl --fail -sSL "https://prisma-engineer.s3-eu-west-1.amazonaws.com/1.60/latest/$OS/engineer.gz" --output engineer.gz + curl --fail -sSL "https://prisma-engineer.s3-eu-west-1.amazonaws.com/1.62/latest/$OS/engineer.gz" --output engineer.gz gzip -d engineer.gz chmod +x engineer From e0f0b06624240a0fe4fffa1dffbf3a0204cb7a8a Mon Sep 17 00:00:00 2001 From: Alberto Schiabel Date: Mon, 13 Nov 2023 15:14:59 +0100 Subject: [PATCH 62/67] chore: unify error message for i32/i64 parsing (#4429) * chore: unify error message for i32/i64 parsing * chore: extract common logic to "parse_number_as_i64" --- query-engine/driver-adapters/src/proxy.rs | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/query-engine/driver-adapters/src/proxy.rs b/query-engine/driver-adapters/src/proxy.rs index 62086a245199..a708d75c0e32 100644 --- a/query-engine/driver-adapters/src/proxy.rs +++ b/query-engine/driver-adapters/src/proxy.rs @@ -249,6 +249,12 @@ fn js_value_to_quaint( column_type: ColumnType, column_name: &str, ) -> quaint::Result> { + let parse_number_as_i64 = |n: &serde_json::Number| { + n.as_i64().ok_or(conversion_error!( + "number must be an integer in column '{column_name}', got '{n}'" + )) + }; + // Note for the future: it may be worth revisiting how much bloat so many panics with different static // strings add to the compiled artefact, and in case we should come up with a restricted set of panic // messages, or even find a way of removing them altogether. @@ -256,8 +262,7 @@ fn js_value_to_quaint( ColumnType::Int32 => match json_value { serde_json::Value::Number(n) => { // n.as_i32() is not implemented, so we need to downcast from i64 instead - n.as_i64() - .ok_or(conversion_error!("number must be an integer in column '{column_name}'")) + parse_number_as_i64(&n) .and_then(|n| -> quaint::Result { n.try_into() .map_err(|e| conversion_error!("cannot convert {n} to i32 in column '{column_name}': {e}")) @@ -273,9 +278,7 @@ fn js_value_to_quaint( )), }, ColumnType::Int64 => match json_value { - serde_json::Value::Number(n) => n.as_i64().map(QuaintValue::int64).ok_or(conversion_error!( - "number must be an i64 in column '{column_name}', got {n}" - )), + serde_json::Value::Number(n) => parse_number_as_i64(&n).map(QuaintValue::int64), serde_json::Value::String(s) => s.parse::().map(QuaintValue::int64).map_err(|e| { conversion_error!("string-encoded number must be an i64 in column '{column_name}', got {s}: {e}") }), From e95e739751f42d8ca026f6b910f5a2dc5adeaeee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Mon, 13 Nov 2023 18:48:33 +0100 Subject: [PATCH 63/67] chore(qe): update mobc to include importcjj/mobc#86 (#4379) * chore(qe) update mobc after to include importcjj/mobc#86 Fixes https://github.com/prisma/prisma/issues/21221 * Add unit test --------- Co-authored-by: Jan Piotrowski --- Cargo.lock | 5 ++-- query-engine/black-box-tests/Cargo.toml | 1 + .../tests/metrics/smoke_tests.rs | 25 ++++++++++++++++++- 3 files changed, 28 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 35eff530999a..573e31eababd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -326,6 +326,7 @@ dependencies = [ "query-engine-metrics", "query-engine-tests", "query-tests-setup", + "regex", "reqwest", "serde_json", "tokio", @@ -2396,9 +2397,9 @@ dependencies = [ [[package]] name = "mobc" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bdeff49b387edef305eccfe166af3e1483bb57902dbf369dddc42dc824df23b" +checksum = "90eb49dc5d193287ff80e72a86f34cfb27aae562299d22fea215e06ea1059dd3" dependencies = [ "async-trait", "futures-channel", diff --git a/query-engine/black-box-tests/Cargo.toml b/query-engine/black-box-tests/Cargo.toml index 056ee2bcdb43..cc9e99b8ca3c 100644 --- a/query-engine/black-box-tests/Cargo.toml +++ b/query-engine/black-box-tests/Cargo.toml @@ -15,3 +15,4 @@ user-facing-errors.workspace = true insta = "1.7.1" enumflags2 = "0.7" query-engine-metrics = {path = "../metrics"} +regex = "1.9.3" diff --git a/query-engine/black-box-tests/tests/metrics/smoke_tests.rs b/query-engine/black-box-tests/tests/metrics/smoke_tests.rs index 3397de75af99..5ff7ec8ad9ba 100644 --- a/query-engine/black-box-tests/tests/metrics/smoke_tests.rs +++ b/query-engine/black-box-tests/tests/metrics/smoke_tests.rs @@ -4,6 +4,7 @@ use query_engine_tests::*; /// Asserts common basics for composite type writes. #[test_suite(schema(schema))] mod smoke_tests { + use regex::Regex; fn schema() -> String { let schema = indoc! { r#"model Person { @@ -14,6 +15,24 @@ mod smoke_tests { schema.to_owned() } + fn assert_value_in_range(metrics: &str, metric: &str, low: f64, high: f64) { + let regex = Regex::new(format!(r"{metric}\s+([+-]?\d+(\.\d+)?)").as_str()).unwrap(); + match regex.captures(&metrics) { + Some(capture) => { + let value = capture.get(1).unwrap().as_str().parse::().unwrap(); + assert!( + value >= low && value <= high, + "expected {} value of {} to be between {} and {}", + metric, + value, + low, + high + ); + } + None => panic!("Metric {} not found in metrics text", metric), + } + } + #[connector_test] #[rustfmt::skip] async fn expected_metrics_rendered(r: Runner) -> TestResult<()> { @@ -62,6 +81,8 @@ mod smoke_tests { // counters assert_eq!(metrics.matches("# HELP prisma_client_queries_total The total number of Prisma Client queries executed").count(), 1); assert_eq!(metrics.matches("# TYPE prisma_client_queries_total counter").count(), 1); + assert_eq!(metrics.matches("prisma_client_queries_total 1").count(), 1); + assert_eq!(metrics.matches("# HELP prisma_datasource_queries_total The total number of datasource queries executed").count(), 1); assert_eq!(metrics.matches("# TYPE prisma_datasource_queries_total counter").count(), 1); @@ -81,13 +102,15 @@ mod smoke_tests { assert_eq!(metrics.matches("# HELP prisma_pool_connections_busy The number of pool connections currently executing datasource queries").count(), 1); assert_eq!(metrics.matches("# TYPE prisma_pool_connections_busy gauge").count(), 1); + assert_value_in_range(&metrics, "prisma_pool_connections_busy", 0f64, 1f64); assert_eq!(metrics.matches("# HELP prisma_pool_connections_idle The number of pool connections that are not busy running a query").count(), 1); assert_eq!(metrics.matches("# TYPE prisma_pool_connections_idle gauge").count(), 1); assert_eq!(metrics.matches("# HELP prisma_pool_connections_open The number of pool connections currently open").count(), 1); assert_eq!(metrics.matches("# TYPE prisma_pool_connections_open gauge").count(), 1); - + assert_value_in_range(&metrics, "prisma_pool_connections_open", 0f64, 1f64); + // histograms assert_eq!(metrics.matches("# HELP prisma_client_queries_duration_histogram_ms The distribution of the time Prisma Client queries took to run end to end").count(), 1); assert_eq!(metrics.matches("# TYPE prisma_client_queries_duration_histogram_ms histogram").count(), 1); From 4d2e0085c118702f881d6e8b0ec0b9a5498fa6e3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Wed, 15 Nov 2023 17:37:36 +0100 Subject: [PATCH 64/67] Use revert-esm prisma/prisma branch by default (#4452) --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index e00c122e2713..614f72e5bd23 100644 --- a/Makefile +++ b/Makefile @@ -2,7 +2,7 @@ CONFIG_PATH = ./query-engine/connector-test-kit-rs/test-configs CONFIG_FILE = .test_config SCHEMA_EXAMPLES_PATH = ./query-engine/example_schemas DEV_SCHEMA_FILE = dev_datamodel.prisma -DRIVER_ADAPTERS_BRANCH ?= main +DRIVER_ADAPTERS_BRANCH ?= revert-esm LIBRARY_EXT := $(shell \ case "$$(uname -s)" in \ From 9f206d0af8e47db4c015ffde4969b6c7f8667da6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Thu, 16 Nov 2023 14:01:04 +0100 Subject: [PATCH 65/67] chore: manual update of engineer to 1.63 (#4446) --- .buildkite/engineer | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/engineer b/.buildkite/engineer index 880db1967231..71c1211a83b4 100755 --- a/.buildkite/engineer +++ b/.buildkite/engineer @@ -54,7 +54,7 @@ fi # Check if the system has engineer installed, if not, use a local copy. if ! type "engineer" &> /dev/null; then # Setup Prisma engine build & test tool (engineer). - curl --fail -sSL "https://prisma-engineer.s3-eu-west-1.amazonaws.com/1.62/latest/$OS/engineer.gz" --output engineer.gz + curl --fail -sSL "https://prisma-engineer.s3-eu-west-1.amazonaws.com/1.63/latest/$OS/engineer.gz" --output engineer.gz gzip -d engineer.gz chmod +x engineer From 2ef5b74aa83695618276f241a13850fa2063b9da Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Thu, 16 Nov 2023 14:25:08 +0100 Subject: [PATCH 66/67] Revert "Use revert-esm prisma/prisma branch by default (#4452)" (#4457) This reverts commit 4d2e0085c118702f881d6e8b0ec0b9a5498fa6e3. --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 614f72e5bd23..e00c122e2713 100644 --- a/Makefile +++ b/Makefile @@ -2,7 +2,7 @@ CONFIG_PATH = ./query-engine/connector-test-kit-rs/test-configs CONFIG_FILE = .test_config SCHEMA_EXAMPLES_PATH = ./query-engine/example_schemas DEV_SCHEMA_FILE = dev_datamodel.prisma -DRIVER_ADAPTERS_BRANCH ?= revert-esm +DRIVER_ADAPTERS_BRANCH ?= main LIBRARY_EXT := $(shell \ case "$$(uname -s)" in \ From 31d753f18172e13e8282d616b2bdb5ed9db14ff9 Mon Sep 17 00:00:00 2001 From: Flavian Desverne Date: Thu, 16 Nov 2023 15:07:34 +0100 Subject: [PATCH 67/67] chore: rename prisma-models to query-structure --- Cargo.lock | 52 +++++++-------- Cargo.toml | 2 +- .../query-tests-setup/Cargo.toml | 2 +- .../src/runner/json_adapter/request.rs | 2 +- .../src/runner/json_adapter/response.rs | 2 +- .../mongodb-query-connector/Cargo.toml | 4 +- .../mongodb-query-connector/src/cursor.rs | 2 +- .../mongodb-query-connector/src/error.rs | 2 +- .../mongodb-query-connector/src/filter.rs | 65 ++++++++----------- .../src/interface/connection.rs | 10 +-- .../src/interface/mod.rs | 2 +- .../src/interface/transaction.rs | 10 +-- .../mongodb-query-connector/src/join.rs | 2 +- .../mongodb-query-connector/src/orderby.rs | 14 ++-- .../src/output_meta.rs | 2 +- .../mongodb-query-connector/src/projection.rs | 6 +- .../src/query_builder/group_by_builder.rs | 14 ++-- .../src/query_builder/read_query_builder.rs | 4 +- .../src/root_queries/aggregate.rs | 2 +- .../src/root_queries/mod.rs | 2 +- .../src/root_queries/raw.rs | 2 +- .../src/root_queries/read.rs | 4 +- .../src/root_queries/update/into_operation.rs | 2 +- .../src/root_queries/update/operation.rs | 3 +- .../src/root_queries/write.rs | 2 +- .../mongodb-query-connector/src/value.rs | 4 +- .../connectors/query-connector/Cargo.toml | 2 +- .../connectors/query-connector/src/coerce.rs | 2 +- .../connectors/query-connector/src/error.rs | 4 +- .../query-connector/src/interface.rs | 4 +- .../connectors/query-connector/src/lib.rs | 6 -- .../connectors/query-connector/src/upsert.rs | 4 +- .../query-connector/src/write_args.rs | 10 ++- .../connectors/sql-query-connector/Cargo.toml | 4 +- .../src/column_metadata.rs | 2 +- .../src/cursor_condition.rs | 3 +- .../src/database/connection.rs | 6 +- .../src/database/operations/read.rs | 2 +- .../src/database/operations/update.rs | 2 +- .../src/database/operations/upsert.rs | 2 +- .../src/database/operations/write.rs | 2 +- .../src/database/transaction.rs | 6 +- .../sql-query-connector/src/error.rs | 8 +-- .../sql-query-connector/src/filter/alias.rs | 2 +- .../sql-query-connector/src/filter/mod.rs | 2 +- .../sql-query-connector/src/filter/visitor.rs | 3 +- .../sql-query-connector/src/join_utils.rs | 3 +- .../src/model_extensions/column.rs | 2 +- .../src/model_extensions/record.rs | 2 +- .../src/model_extensions/relation.rs | 2 +- .../src/model_extensions/scalar_field.rs | 2 +- .../src/model_extensions/selection_result.rs | 2 +- .../src/model_extensions/table.rs | 4 +- .../sql-query-connector/src/ordering.rs | 3 +- .../src/query_arguments_ext.rs | 2 +- .../src/query_builder/mod.rs | 2 +- .../src/query_builder/read.rs | 4 +- .../src/query_builder/write.rs | 2 +- .../sql-query-connector/src/query_ext.rs | 4 +- .../connectors/sql-query-connector/src/row.rs | 2 +- .../sql-query-connector/src/value.rs | 2 +- query-engine/core/Cargo.toml | 2 +- query-engine/core/src/constants.rs | 2 +- query-engine/core/src/error.rs | 2 +- .../core/src/executor/request_context.rs | 2 +- query-engine/core/src/interpreter/error.rs | 2 +- .../core/src/interpreter/interpreter_impl.rs | 2 +- .../inmemory_record_processor.rs | 3 +- .../query_interpreters/nested_read.rs | 7 +- .../interpreter/query_interpreters/read.rs | 2 +- query-engine/core/src/query_ast/mod.rs | 3 +- query-engine/core/src/query_ast/read.rs | 4 +- query-engine/core/src/query_ast/write.rs | 4 +- .../core/src/query_document/argument_value.rs | 2 +- query-engine/core/src/query_document/mod.rs | 2 +- .../core/src/query_document/parse_ast.rs | 2 +- .../core/src/query_document/parser.rs | 26 ++++---- .../core/src/query_document/transformers.rs | 2 +- query-engine/core/src/query_graph/mod.rs | 3 +- .../core/src/query_graph_builder/error.rs | 2 +- .../extractors/filters/composite.rs | 3 +- .../extractors/filters/filter_fold.rs | 2 +- .../extractors/filters/mod.rs | 7 +- .../extractors/filters/relation.rs | 3 +- .../extractors/filters/scalar.rs | 6 +- .../extractors/query_arguments.rs | 3 +- .../query_graph_builder/extractors/utils.rs | 2 +- .../read/aggregations/aggregate.rs | 2 +- .../read/aggregations/group_by.rs | 3 +- .../read/aggregations/mod.rs | 2 +- .../src/query_graph_builder/read/first.rs | 2 +- .../core/src/query_graph_builder/read/many.rs | 2 +- .../core/src/query_graph_builder/read/one.rs | 2 +- .../src/query_graph_builder/read/related.rs | 2 +- .../src/query_graph_builder/read/utils.rs | 2 +- .../src/query_graph_builder/write/connect.rs | 2 +- .../src/query_graph_builder/write/create.rs | 3 +- .../src/query_graph_builder/write/delete.rs | 3 +- .../query_graph_builder/write/disconnect.rs | 2 +- .../write/nested/connect_nested.rs | 3 +- .../write/nested/connect_or_create_nested.rs | 3 +- .../write/nested/create_nested.rs | 3 +- .../write/nested/delete_nested.rs | 4 +- .../write/nested/disconnect_nested.rs | 3 +- .../query_graph_builder/write/nested/mod.rs | 2 +- .../write/nested/set_nested.rs | 3 +- .../write/nested/update_nested.rs | 3 +- .../write/nested/upsert_nested.rs | 3 +- .../core/src/query_graph_builder/write/raw.rs | 2 +- .../src/query_graph_builder/write/update.rs | 3 +- .../src/query_graph_builder/write/upsert.rs | 3 +- .../src/query_graph_builder/write/utils.rs | 4 +- .../write/write_args_parser.rs | 2 +- query-engine/core/src/response_ir/internal.rs | 2 +- .../core/src/response_ir/ir_serializer.rs | 2 +- query-engine/core/src/response_ir/mod.rs | 2 +- query-engine/core/src/result_ast/mod.rs | 2 +- query-engine/dmmf/Cargo.toml | 2 +- .../src/ast_builders/datamodel_ast_builder.rs | 2 +- query-engine/query-engine-node-api/Cargo.toml | 2 +- query-engine/query-engine-wasm/Cargo.toml | 2 +- .../Cargo.toml | 2 +- .../src/composite_type.rs | 0 .../src/convert.rs | 0 .../src/default_value.rs | 0 .../src/error.rs | 0 .../src/field/composite.rs | 0 .../src/field/mod.rs | 0 .../src/field/relation.rs | 0 .../src/field/scalar.rs | 0 .../src/field_selection.rs | 0 .../src/fields.rs | 0 .../src/filter}/compare.rs | 2 +- .../src/filter/composite.rs | 6 +- .../src/filter/into_filter.rs | 3 +- .../src/filter/json.rs | 3 +- .../src/filter/list.rs | 3 +- .../src/filter/mod.rs | 5 +- .../src/filter/relation.rs | 4 +- .../src/filter/scalar/compare.rs | 1 - .../src/filter/scalar/condition/mod.rs | 1 - .../src/filter/scalar/condition/value.rs | 3 +- .../src/filter/scalar/mod.rs | 2 +- .../src/filter/scalar/projection.rs | 2 +- .../src/internal_data_model.rs | 0 .../src/internal_enum.rs | 0 .../src/lib.rs | 4 ++ .../src/model.rs | 0 .../src/native_type_instance.rs | 0 .../src/order_by.rs | 0 .../src/parent_container.rs | 0 .../src/prelude.rs | 0 .../src/prisma_value_ext.rs | 0 .../src/projections/mod.rs | 0 .../src/projections/model_projection.rs | 0 .../src/query_arguments.rs | 3 +- .../src/record.rs | 0 .../src/relation.rs | 0 .../src/selection_result.rs | 0 .../src/zipper.rs | 0 .../tests/datamodel_converter_tests.rs | 4 +- query-engine/request-handlers/Cargo.toml | 2 +- query-engine/request-handlers/src/handler.rs | 2 +- .../src/protocols/json/protocol_adapter.rs | 2 +- query-engine/schema/Cargo.toml | 2 +- query-engine/schema/README.md | 2 +- query-engine/schema/src/build.rs | 4 +- query-engine/schema/src/build/enum_types.rs | 2 +- .../src/build/input_types/fields/arguments.rs | 2 +- .../fields/data_input_mapper/create.rs | 2 +- .../fields/data_input_mapper/mod.rs | 2 +- .../fields/data_input_mapper/update.rs | 2 +- .../input_types/fields/field_filter_types.rs | 2 +- .../schema/src/build/input_types/mod.rs | 2 +- .../input_types/objects/filter_objects.rs | 2 +- .../input_types/objects/order_by_objects.rs | 2 +- .../schema/src/build/mutations/create_many.rs | 2 +- .../schema/src/build/mutations/create_one.rs | 2 +- .../src/build/output_types/aggregation/mod.rs | 2 +- .../schema/src/build/output_types/field.rs | 2 +- .../src/build/output_types/mutation_type.rs | 2 +- .../build/output_types/objects/composite.rs | 2 +- query-engine/schema/src/build/utils.rs | 2 +- query-engine/schema/src/enum_type.rs | 2 +- query-engine/schema/src/identifier_type.rs | 2 +- query-engine/schema/src/input_types.rs | 2 +- query-engine/schema/src/output_types.rs | 2 +- query-engine/schema/src/query_schema.rs | 2 +- 188 files changed, 286 insertions(+), 334 deletions(-) rename query-engine/{prisma-models => query-structure}/Cargo.toml (96%) rename query-engine/{prisma-models => query-structure}/src/composite_type.rs (100%) rename query-engine/{prisma-models => query-structure}/src/convert.rs (100%) rename query-engine/{prisma-models => query-structure}/src/default_value.rs (100%) rename query-engine/{prisma-models => query-structure}/src/error.rs (100%) rename query-engine/{prisma-models => query-structure}/src/field/composite.rs (100%) rename query-engine/{prisma-models => query-structure}/src/field/mod.rs (100%) rename query-engine/{prisma-models => query-structure}/src/field/relation.rs (100%) rename query-engine/{prisma-models => query-structure}/src/field/scalar.rs (100%) rename query-engine/{prisma-models => query-structure}/src/field_selection.rs (100%) rename query-engine/{prisma-models => query-structure}/src/fields.rs (100%) rename query-engine/{connectors/query-connector/src => query-structure/src/filter}/compare.rs (99%) rename query-engine/{connectors/query-connector => query-structure}/src/filter/composite.rs (95%) rename query-engine/{connectors/query-connector => query-structure}/src/filter/into_filter.rs (93%) rename query-engine/{connectors/query-connector => query-structure}/src/filter/json.rs (98%) rename query-engine/{connectors/query-connector => query-structure}/src/filter/list.rs (96%) rename query-engine/{connectors/query-connector => query-structure}/src/filter/mod.rs (99%) rename query-engine/{connectors/query-connector => query-structure}/src/filter/relation.rs (97%) rename query-engine/{connectors/query-connector => query-structure}/src/filter/scalar/compare.rs (99%) rename query-engine/{connectors/query-connector => query-structure}/src/filter/scalar/condition/mod.rs (99%) rename query-engine/{connectors/query-connector => query-structure}/src/filter/scalar/condition/value.rs (97%) rename query-engine/{connectors/query-connector => query-structure}/src/filter/scalar/mod.rs (99%) rename query-engine/{connectors/query-connector => query-structure}/src/filter/scalar/projection.rs (97%) rename query-engine/{prisma-models => query-structure}/src/internal_data_model.rs (100%) rename query-engine/{prisma-models => query-structure}/src/internal_enum.rs (100%) rename query-engine/{prisma-models => query-structure}/src/lib.rs (91%) rename query-engine/{prisma-models => query-structure}/src/model.rs (100%) rename query-engine/{prisma-models => query-structure}/src/native_type_instance.rs (100%) rename query-engine/{prisma-models => query-structure}/src/order_by.rs (100%) rename query-engine/{prisma-models => query-structure}/src/parent_container.rs (100%) rename query-engine/{prisma-models => query-structure}/src/prelude.rs (100%) rename query-engine/{prisma-models => query-structure}/src/prisma_value_ext.rs (100%) rename query-engine/{prisma-models => query-structure}/src/projections/mod.rs (100%) rename query-engine/{prisma-models => query-structure}/src/projections/model_projection.rs (100%) rename query-engine/{connectors/query-connector => query-structure}/src/query_arguments.rs (99%) rename query-engine/{prisma-models => query-structure}/src/record.rs (100%) rename query-engine/{prisma-models => query-structure}/src/relation.rs (100%) rename query-engine/{prisma-models => query-structure}/src/selection_result.rs (100%) rename query-engine/{prisma-models => query-structure}/src/zipper.rs (100%) rename query-engine/{prisma-models => query-structure}/tests/datamodel_converter_tests.rs (99%) diff --git a/Cargo.lock b/Cargo.lock index 573e31eababd..167ae495efbb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1082,8 +1082,8 @@ dependencies = [ "indoc 2.0.3", "itertools", "pretty_assertions", - "prisma-models", "psl", + "query-structure", "schema", "serde", "serde_json", @@ -2486,11 +2486,11 @@ dependencies = [ "itertools", "mongodb", "mongodb-client", - "prisma-models", "prisma-value", "psl", "query-connector", "query-engine-metrics", + "query-structure", "rand 0.7.3", "regex", "serde", @@ -3346,22 +3346,6 @@ dependencies = [ "structopt", ] -[[package]] -name = "prisma-models" -version = "0.0.0" -dependencies = [ - "bigdecimal", - "chrono", - "cuid", - "getrandom 0.2.10", - "itertools", - "nanoid", - "prisma-value", - "psl", - "thiserror", - "uuid", -] - [[package]] name = "prisma-schema-build" version = "0.1.0" @@ -3652,8 +3636,8 @@ dependencies = [ "futures", "indexmap 1.9.3", "itertools", - "prisma-models", "prisma-value", + "query-structure", "serde", "serde_json", "thiserror", @@ -3679,10 +3663,10 @@ dependencies = [ "once_cell", "opentelemetry", "petgraph 0.4.13", - "prisma-models", "psl", "query-connector", "query-engine-metrics", + "query-structure", "schema", "serde", "serde_json", @@ -3762,12 +3746,12 @@ dependencies = [ "napi-build", "napi-derive", "opentelemetry", - "prisma-models", "psl", "quaint", "query-connector", "query-core", "query-engine-metrics", + "query-structure", "request-handlers", "serde", "serde_json", @@ -3819,8 +3803,8 @@ dependencies = [ "futures", "js-sys", "log", - "prisma-models", "psl", + "query-structure", "serde", "serde-wasm-bindgen", "serde_json", @@ -3837,6 +3821,22 @@ dependencies = [ "wasm-logger", ] +[[package]] +name = "query-structure" +version = "0.0.0" +dependencies = [ + "bigdecimal", + "chrono", + "cuid", + "getrandom 0.2.10", + "itertools", + "nanoid", + "prisma-value", + "psl", + "thiserror", + "uuid", +] + [[package]] name = "query-test-macros" version = "0.1.0" @@ -3862,13 +3862,13 @@ dependencies = [ "nom", "once_cell", "parse-hyperlinks", - "prisma-models", "psl", "qe-setup", "quaint", "query-core", "query-engine", "query-engine-metrics", + "query-structure", "regex", "request-handlers", "serde", @@ -4150,10 +4150,10 @@ dependencies = [ "itertools", "mongodb-query-connector", "once_cell", - "prisma-models", "psl", "quaint", "query-core", + "query-structure", "schema", "serde", "serde_json", @@ -4421,8 +4421,8 @@ version = "0.1.0" dependencies = [ "codspeed-criterion-compat", "once_cell", - "prisma-models", "psl", + "query-structure", "rustc-hash", ] @@ -4934,11 +4934,11 @@ dependencies = [ "itertools", "once_cell", "opentelemetry", - "prisma-models", "prisma-value", "psl", "quaint", "query-connector", + "query-structure", "rand 0.7.3", "serde", "serde_json", diff --git a/Cargo.toml b/Cargo.toml index 4a3cd1450caf..4892eba3e497 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -21,7 +21,7 @@ members = [ "query-engine/dmmf", "query-engine/driver-adapters", "query-engine/metrics", - "query-engine/prisma-models", + "query-engine/query-structure", "query-engine/query-engine", "query-engine/query-engine-node-api", "query-engine/query-engine-wasm", diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/Cargo.toml b/query-engine/connector-test-kit-rs/query-tests-setup/Cargo.toml index 088a0d4b2d34..095c9cd02f60 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/Cargo.toml +++ b/query-engine/connector-test-kit-rs/query-tests-setup/Cargo.toml @@ -5,7 +5,7 @@ edition = "2021" [dependencies] serde_json.workspace = true -prisma-models = { path = "../../prisma-models" } +query-structure = { path = "../../query-structure" } once_cell = "1" qe-setup = { path = "../qe-setup" } request-handlers = { path = "../../request-handlers" } diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/json_adapter/request.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/json_adapter/request.rs index 0eee2d9e6cb6..b9354056b692 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/json_adapter/request.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/json_adapter/request.rs @@ -1,7 +1,6 @@ use crate::{TestError, TestResult}; use indexmap::IndexMap; use itertools::Itertools; -use prisma_models::PrismaValue; use query_core::{ constants::custom_types, schema::{ @@ -10,6 +9,7 @@ use query_core::{ }, ArgumentValue, ArgumentValueObject, Selection, }; +use query_structure::PrismaValue; use request_handlers::{Action, FieldQuery, GraphQLProtocolAdapter, JsonSingleQuery, SelectionSet, SelectionSetValue}; use serde_json::{json, Value as JsonValue}; diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/json_adapter/response.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/json_adapter/response.rs index 29029e3cf81c..a366fb6bdc1b 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/json_adapter/response.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/json_adapter/response.rs @@ -1,8 +1,8 @@ -use prisma_models::PrismaValue; use query_core::{ constants::custom_types, response_ir::{Item, ItemRef, Map}, }; +use query_structure::PrismaValue; use request_handlers::{GQLBatchResponse, GQLResponse, PrismaResponse}; pub struct JsonResponse; diff --git a/query-engine/connectors/mongodb-query-connector/Cargo.toml b/query-engine/connectors/mongodb-query-connector/Cargo.toml index c4a02eaa8643..b451e17f6e6f 100644 --- a/query-engine/connectors/mongodb-query-connector/Cargo.toml +++ b/query-engine/connectors/mongodb-query-connector/Cargo.toml @@ -24,8 +24,8 @@ indexmap = "1.7" query-engine-metrics = {path = "../../metrics"} cuid = { git = "https://github.com/prisma/cuid-rust", branch = "wasm32-support" } -[dependencies.prisma-models] -path = "../../prisma-models" +[dependencies.query-structure] +path = "../../query-structure" [dependencies.mongodb-client] path = "../../../libs/mongodb-client" diff --git a/query-engine/connectors/mongodb-query-connector/src/cursor.rs b/query-engine/connectors/mongodb-query-connector/src/cursor.rs index 9adbf8c1966b..1aaa22ef6b19 100644 --- a/query-engine/connectors/mongodb-query-connector/src/cursor.rs +++ b/query-engine/connectors/mongodb-query-connector/src/cursor.rs @@ -1,6 +1,6 @@ use crate::{orderby::OrderByData, IntoBson}; use mongodb::bson::{doc, Document}; -use prisma_models::{OrderBy, SelectionResult, SortOrder}; +use query_structure::{OrderBy, SelectionResult, SortOrder}; #[derive(Debug, Clone)] pub(crate) struct CursorData { diff --git a/query-engine/connectors/mongodb-query-connector/src/error.rs b/query-engine/connectors/mongodb-query-connector/src/error.rs index d71f39fb54e5..f32ff78e29c9 100644 --- a/query-engine/connectors/mongodb-query-connector/src/error.rs +++ b/query-engine/connectors/mongodb-query-connector/src/error.rs @@ -4,7 +4,7 @@ use mongodb::{ bson::{self, extjson}, error::{CommandError, Error as DriverError, TRANSIENT_TRANSACTION_ERROR}, }; -use prisma_models::{CompositeFieldRef, Field, ScalarFieldRef, SelectedField}; +use query_structure::{CompositeFieldRef, Field, ScalarFieldRef, SelectedField}; use regex::Regex; use thiserror::Error; use user_facing_errors::query_engine::DatabaseConstraint; diff --git a/query-engine/connectors/mongodb-query-connector/src/filter.rs b/query-engine/connectors/mongodb-query-connector/src/filter.rs index 44ca06cf875b..64bdadafd6a9 100644 --- a/query-engine/connectors/mongodb-query-connector/src/filter.rs +++ b/query-engine/connectors/mongodb-query-connector/src/filter.rs @@ -1,11 +1,6 @@ use crate::{constants::group_by, error::MongoError, join::JoinStage, query_builder::AggregationType, IntoBson}; -use connector_interface::{ - AggregationFilter, CompositeCondition, CompositeFilter, ConditionListValue, ConditionValue, Filter, - OneRelationIsNullFilter, QueryMode, RelationFilter, ScalarCompare, ScalarCondition, ScalarFilter, ScalarListFilter, - ScalarProjection, -}; use mongodb::bson::{doc, Bson, Document}; -use prisma_models::{CompositeFieldRef, PrismaValue, ScalarFieldRef, TypeIdentifier}; +use query_structure::*; #[derive(Debug, Clone)] pub(crate) enum MongoFilter { @@ -132,9 +127,9 @@ impl MongoFilterVisitor { fn visit_scalar_filter(&self, filter: ScalarFilter) -> crate::Result { let field = match filter.projection { - connector_interface::ScalarProjection::Single(sf) => sf, - connector_interface::ScalarProjection::Compound(mut c) if c.len() == 1 => c.pop().unwrap(), - connector_interface::ScalarProjection::Compound(_) => { + ScalarProjection::Single(sf) => sf, + ScalarProjection::Compound(mut c) if c.len() == 1 => c.pop().unwrap(), + ScalarProjection::Compound(_) => { unreachable!( "Multi-field compound filter case hit when it should have been folded into normal filters previously." ) @@ -417,7 +412,7 @@ impl MongoFilterVisitor { let field_ref = filter.as_field_ref().cloned(); let filter_doc = match filter.condition { - connector_interface::ScalarListCondition::Contains(val) => { + ScalarListCondition::Contains(val) => { let bson = match val { ConditionValue::Value(value) => (field, value).into_bson()?, ConditionValue::FieldRef(field_ref) => self.prefixed_field_ref(&field_ref)?, @@ -426,11 +421,11 @@ impl MongoFilterVisitor { doc! { "$in": [bson, coerce_as_array(&field_name)] } } - connector_interface::ScalarListCondition::ContainsEvery(vals) if vals.is_empty() => { + ScalarListCondition::ContainsEvery(vals) if vals.is_empty() => { // Empty hasEvery: Return all records. render_stub_condition(true) } - connector_interface::ScalarListCondition::ContainsEvery(ConditionListValue::List(vals)) => { + ScalarListCondition::ContainsEvery(ConditionListValue::List(vals)) => { let ins = vals .into_iter() .map(|val| { @@ -442,20 +437,18 @@ impl MongoFilterVisitor { doc! { "$and": ins } } - connector_interface::ScalarListCondition::ContainsEvery(ConditionListValue::FieldRef(field_ref)) => { - render_every( - &field_name, - "elem", - doc! { "$in": ["$$elem", coerce_as_array((self.prefix(), &field_ref).into_bson()?)] }, - true, - ) - } + ScalarListCondition::ContainsEvery(ConditionListValue::FieldRef(field_ref)) => render_every( + &field_name, + "elem", + doc! { "$in": ["$$elem", coerce_as_array((self.prefix(), &field_ref).into_bson()?)] }, + true, + ), - connector_interface::ScalarListCondition::ContainsSome(vals) if vals.is_empty() => { + ScalarListCondition::ContainsSome(vals) if vals.is_empty() => { // Empty hasSome: Return no records. render_stub_condition(false) } - connector_interface::ScalarListCondition::ContainsSome(ConditionListValue::List(vals)) => { + ScalarListCondition::ContainsSome(ConditionListValue::List(vals)) => { let ins = vals .into_iter() .map(|val| { @@ -467,19 +460,17 @@ impl MongoFilterVisitor { doc! { "$or": ins } } - connector_interface::ScalarListCondition::ContainsSome(ConditionListValue::FieldRef(field_ref)) => { - render_some( - &field_name, - "elem", - doc! { "$in": ["$$elem", coerce_as_array((self.prefix(), &field_ref).into_bson()?)] }, - true, - ) - } + ScalarListCondition::ContainsSome(ConditionListValue::FieldRef(field_ref)) => render_some( + &field_name, + "elem", + doc! { "$in": ["$$elem", coerce_as_array((self.prefix(), &field_ref).into_bson()?)] }, + true, + ), - connector_interface::ScalarListCondition::IsEmpty(true) => { + ScalarListCondition::IsEmpty(true) => { doc! { "$eq": [render_size(&field_name, true), 0] } } - connector_interface::ScalarListCondition::IsEmpty(false) => { + ScalarListCondition::IsEmpty(false) => { doc! { "$gt": [render_size(&field_name, true), 0] } } }; @@ -653,21 +644,21 @@ impl MongoFilterVisitor { let mut join_stage = JoinStage::new(from_field); let filter_doc = match filter.condition { - connector_interface::RelationCondition::EveryRelatedRecord => { + RelationCondition::EveryRelatedRecord => { let (every, nested_joins) = render_every_from_filter(&field_name, nested_filter, false, false)?; join_stage.extend_nested(nested_joins); every } - connector_interface::RelationCondition::AtLeastOneRelatedRecord => { + RelationCondition::AtLeastOneRelatedRecord => { let (some, nested_joins) = render_some_from_filter(&field_name, nested_filter, false, false)?; join_stage.extend_nested(nested_joins); some } - connector_interface::RelationCondition::NoRelatedRecord if is_to_one => { + RelationCondition::NoRelatedRecord if is_to_one => { if is_empty_filter { // Doesn't need coercing the array since joins always return arrays doc! { "$eq": [render_size(&field_name, false), 0] } @@ -688,7 +679,7 @@ impl MongoFilterVisitor { } } } - connector_interface::RelationCondition::NoRelatedRecord => { + RelationCondition::NoRelatedRecord => { if is_empty_filter { // Doesn't need coercing the array since joins always return arrays doc! { "$eq": [render_size(&field_name, false), 0] } @@ -700,7 +691,7 @@ impl MongoFilterVisitor { none } } - connector_interface::RelationCondition::ToOneRelatedRecord => { + RelationCondition::ToOneRelatedRecord => { // To-ones are coerced to single-element arrays via the join. // We render an "every" expression on that array to ensure that the predicate is matched. let (every, nested_joins) = render_every_from_filter(&field_name, nested_filter, false, false)?; diff --git a/query-engine/connectors/mongodb-query-connector/src/interface/connection.rs b/query-engine/connectors/mongodb-query-connector/src/interface/connection.rs index fc2c241e3573..e10c0e1f5b3b 100644 --- a/query-engine/connectors/mongodb-query-connector/src/interface/connection.rs +++ b/query-engine/connectors/mongodb-query-connector/src/interface/connection.rs @@ -10,7 +10,7 @@ use connector_interface::{ WriteOperations, }; use mongodb::{ClientSession, Database}; -use prisma_models::{prelude::*, SelectionResult}; +use query_structure::{prelude::*, SelectionResult}; use std::collections::HashMap; pub struct MongoDbConnection { @@ -187,7 +187,7 @@ impl ReadOperations for MongoDbConnection { async fn get_single_record( &mut self, model: &Model, - filter: &connector_interface::Filter, + filter: &query_structure::Filter, selected_fields: &FieldSelection, aggr_selections: &[RelAggregationSelection], _trace_id: Option, @@ -209,7 +209,7 @@ impl ReadOperations for MongoDbConnection { async fn get_many_records( &mut self, model: &Model, - query_arguments: connector_interface::QueryArguments, + query_arguments: query_structure::QueryArguments, selected_fields: &FieldSelection, aggregation_selections: &[RelAggregationSelection], _trace_id: Option, @@ -243,10 +243,10 @@ impl ReadOperations for MongoDbConnection { async fn aggregate_records( &mut self, model: &Model, - query_arguments: connector_interface::QueryArguments, + query_arguments: query_structure::QueryArguments, selections: Vec, group_by: Vec, - having: Option, + having: Option, _trace_id: Option, ) -> connector_interface::Result> { catch(async move { diff --git a/query-engine/connectors/mongodb-query-connector/src/interface/mod.rs b/query-engine/connectors/mongodb-query-connector/src/interface/mod.rs index 5b5821410c97..620d7628182f 100644 --- a/query-engine/connectors/mongodb-query-connector/src/interface/mod.rs +++ b/query-engine/connectors/mongodb-query-connector/src/interface/mod.rs @@ -12,8 +12,8 @@ use connector_interface::{ }; use futures::Future; use mongodb::Client; -use prisma_models::prelude::*; use psl::Datasource; +use query_structure::prelude::*; use crate::error::MongoError; diff --git a/query-engine/connectors/mongodb-query-connector/src/interface/transaction.rs b/query-engine/connectors/mongodb-query-connector/src/interface/transaction.rs index 5f7c143c442b..1de0bb8c750e 100644 --- a/query-engine/connectors/mongodb-query-connector/src/interface/transaction.rs +++ b/query-engine/connectors/mongodb-query-connector/src/interface/transaction.rs @@ -7,8 +7,8 @@ use connector_interface::{ ConnectionLike, ReadOperations, RelAggregationSelection, Transaction, UpdateType, WriteOperations, }; use mongodb::options::{Acknowledgment, ReadConcern, TransactionOptions, WriteConcern}; -use prisma_models::SelectionResult; use query_engine_metrics::{decrement_gauge, increment_gauge, metrics, PRISMA_CLIENT_QUERIES_ACTIVE}; +use query_structure::SelectionResult; use std::collections::HashMap; pub struct MongoDbTransaction<'conn> { @@ -252,7 +252,7 @@ impl<'conn> ReadOperations for MongoDbTransaction<'conn> { async fn get_single_record( &mut self, model: &Model, - filter: &connector_interface::Filter, + filter: &query_structure::Filter, selected_fields: &FieldSelection, aggr_selections: &[RelAggregationSelection], _trace_id: Option, @@ -274,7 +274,7 @@ impl<'conn> ReadOperations for MongoDbTransaction<'conn> { async fn get_many_records( &mut self, model: &Model, - query_arguments: connector_interface::QueryArguments, + query_arguments: query_structure::QueryArguments, selected_fields: &FieldSelection, aggregation_selections: &[RelAggregationSelection], _trace_id: Option, @@ -314,10 +314,10 @@ impl<'conn> ReadOperations for MongoDbTransaction<'conn> { async fn aggregate_records( &mut self, model: &Model, - query_arguments: connector_interface::QueryArguments, + query_arguments: query_structure::QueryArguments, selections: Vec, group_by: Vec, - having: Option, + having: Option, _trace_id: Option, ) -> connector_interface::Result> { catch(async move { diff --git a/query-engine/connectors/mongodb-query-connector/src/join.rs b/query-engine/connectors/mongodb-query-connector/src/join.rs index fb5c46d6858a..24c8abe2fba9 100644 --- a/query-engine/connectors/mongodb-query-connector/src/join.rs +++ b/query-engine/connectors/mongodb-query-connector/src/join.rs @@ -1,6 +1,6 @@ use crate::filter::MongoFilter; use mongodb::bson::{doc, Document}; -use prisma_models::{walkers, RelationFieldRef, ScalarFieldRef}; +use query_structure::{walkers, RelationFieldRef, ScalarFieldRef}; /// A join stage describes a tree of joins and nested joins to be performed on a collection. /// Every document of the `source` side will be joined with the collection documents diff --git a/query-engine/connectors/mongodb-query-connector/src/orderby.rs b/query-engine/connectors/mongodb-query-connector/src/orderby.rs index 2e89d4399b47..15b37691ed27 100644 --- a/query-engine/connectors/mongodb-query-connector/src/orderby.rs +++ b/query-engine/connectors/mongodb-query-connector/src/orderby.rs @@ -1,7 +1,7 @@ use crate::join::JoinStage; use itertools::Itertools; use mongodb::bson::{doc, Document}; -use prisma_models::{OrderBy, OrderByHop, OrderByToManyAggregation, SortOrder}; +use query_structure::{OrderBy, OrderByHop, OrderByToManyAggregation, SortOrder}; use std::iter; #[derive(Debug)] @@ -230,11 +230,11 @@ impl OrderByBuilder { // Can only be scalar aggregations for groupBy, ToMany aggregations are not supported yet. if let OrderBy::ScalarAggregation(order_by_aggr) = &data.order_by { let prefix = match order_by_aggr.sort_aggregation { - prisma_models::SortAggregation::Count => "count", - prisma_models::SortAggregation::Avg => "avg", - prisma_models::SortAggregation::Sum => "sum", - prisma_models::SortAggregation::Min => "min", - prisma_models::SortAggregation::Max => "max", + query_structure::SortAggregation::Count => "count", + query_structure::SortAggregation::Avg => "avg", + query_structure::SortAggregation::Sum => "sum", + query_structure::SortAggregation::Min => "min", + query_structure::SortAggregation::Max => "max", }; format!("{}_{}", prefix, data.scalar_field_name()) @@ -258,7 +258,7 @@ impl OrderByBuilder { if let OrderBy::ToManyAggregation(order_by_aggregate) = &data.order_by { if !order_by_aggregate.path.is_empty() { match order_by_aggregate.sort_aggregation { - prisma_models::SortAggregation::Count => { + query_structure::SortAggregation::Count => { if let Some(clone_to) = data.prefix.as_ref().and_then(|x| x.clone_to.clone()) { order_aggregate_proj_doc.push(doc! { "$addFields": { clone_to.clone(): { "$size": { "$ifNull": [format!("${}", data.full_reference_path(false)), []] } } } }); field_name = clone_to; // Todo: Just a hack right now, this whole function needs love. diff --git a/query-engine/connectors/mongodb-query-connector/src/output_meta.rs b/query-engine/connectors/mongodb-query-connector/src/output_meta.rs index 3e8474776772..081672f9d6e6 100644 --- a/query-engine/connectors/mongodb-query-connector/src/output_meta.rs +++ b/query-engine/connectors/mongodb-query-connector/src/output_meta.rs @@ -1,6 +1,6 @@ use connector_interface::{AggregationSelection, RelAggregationSelection}; use indexmap::IndexMap; -use prisma_models::{ +use query_structure::{ ast::FieldArity, DefaultKind, FieldSelection, PrismaValue, ScalarFieldRef, SelectedField, TypeIdentifier, }; diff --git a/query-engine/connectors/mongodb-query-connector/src/projection.rs b/query-engine/connectors/mongodb-query-connector/src/projection.rs index 4ed83cd14522..80a6a3e792e7 100644 --- a/query-engine/connectors/mongodb-query-connector/src/projection.rs +++ b/query-engine/connectors/mongodb-query-connector/src/projection.rs @@ -1,6 +1,6 @@ use crate::IntoBson; use mongodb::bson::{Bson, Document}; -use prisma_models::{FieldSelection, SelectedField}; +use query_structure::{FieldSelection, SelectedField}; /// Used as projection document for Mongo queries. impl IntoBson for FieldSelection { @@ -15,13 +15,13 @@ impl IntoBson for FieldSelection { fn path_prefixed_selection(doc: &mut Document, parent_paths: Vec, selections: Vec) { for field in selections { match field { - prisma_models::SelectedField::Scalar(sf) => { + query_structure::SelectedField::Scalar(sf) => { let mut parent_paths = parent_paths.clone(); parent_paths.push(sf.db_name().to_owned()); doc.insert(parent_paths.join("."), Bson::Int32(1)); } - prisma_models::SelectedField::Composite(cs) => { + query_structure::SelectedField::Composite(cs) => { let mut parent_paths = parent_paths.clone(); parent_paths.push(cs.field.db_name().to_owned()); path_prefixed_selection(doc, parent_paths, cs.selections); diff --git a/query-engine/connectors/mongodb-query-connector/src/query_builder/group_by_builder.rs b/query-engine/connectors/mongodb-query-connector/src/query_builder/group_by_builder.rs index 4ea3d4590446..f5ac3659f1b5 100644 --- a/query-engine/connectors/mongodb-query-connector/src/query_builder/group_by_builder.rs +++ b/query-engine/connectors/mongodb-query-connector/src/query_builder/group_by_builder.rs @@ -1,8 +1,8 @@ use crate::constants::*; -use connector_interface::{AggregationSelection, Filter}; +use connector_interface::AggregationSelection; use mongodb::bson::{doc, Bson, Document}; -use prisma_models::ScalarFieldRef; +use query_structure::{AggregationFilter, Filter, ScalarFieldRef}; use std::collections::HashSet; /// Represents a `$group` aggregation stage. @@ -161,19 +161,19 @@ impl GroupByBuilder { unfold_filters(filters); } Filter::Aggregation(aggregation) => match aggregation { - connector_interface::AggregationFilter::Count(filter) => { + AggregationFilter::Count(filter) => { self.insert_from_filter(filter.as_ref(), AggregationType::Count); } - connector_interface::AggregationFilter::Average(filter) => { + AggregationFilter::Average(filter) => { self.insert_from_filter(filter.as_ref(), AggregationType::Average); } - connector_interface::AggregationFilter::Sum(filter) => { + AggregationFilter::Sum(filter) => { self.insert_from_filter(filter.as_ref(), AggregationType::Sum); } - connector_interface::AggregationFilter::Min(filter) => { + AggregationFilter::Min(filter) => { self.insert_from_filter(filter.as_ref(), AggregationType::Min); } - connector_interface::AggregationFilter::Max(filter) => { + AggregationFilter::Max(filter) => { self.insert_from_filter(filter.as_ref(), AggregationType::Max); } }, diff --git a/query-engine/connectors/mongodb-query-connector/src/query_builder/read_query_builder.rs b/query-engine/connectors/mongodb-query-connector/src/query_builder/read_query_builder.rs index e6ca54929989..fcf749fc2d35 100644 --- a/query-engine/connectors/mongodb-query-connector/src/query_builder/read_query_builder.rs +++ b/query-engine/connectors/mongodb-query-connector/src/query_builder/read_query_builder.rs @@ -10,14 +10,14 @@ use crate::{ root_queries::observing, vacuum_cursor, BsonTransform, IntoBson, }; -use connector_interface::{AggregationSelection, Filter, QueryArguments, RelAggregationSelection}; +use connector_interface::{AggregationSelection, RelAggregationSelection}; use itertools::Itertools; use mongodb::{ bson::{doc, Document}, options::AggregateOptions, ClientSession, Collection, }; -use prisma_models::{FieldSelection, Model, ScalarFieldRef}; +use query_structure::{FieldSelection, Filter, Model, QueryArguments, ScalarFieldRef}; use std::convert::TryFrom; // Mongo Driver broke usage of the simple API, can't be used by us anymore. diff --git a/query-engine/connectors/mongodb-query-connector/src/root_queries/aggregate.rs b/query-engine/connectors/mongodb-query-connector/src/root_queries/aggregate.rs index 65ee8a71ae30..05ff57053e95 100644 --- a/query-engine/connectors/mongodb-query-connector/src/root_queries/aggregate.rs +++ b/query-engine/connectors/mongodb-query-connector/src/root_queries/aggregate.rs @@ -2,7 +2,7 @@ use crate::{constants::*, output_meta, query_builder::MongoReadQueryBuilder, val use connector_interface::*; use mongodb::{bson::Document, ClientSession, Database}; -use prisma_models::prelude::*; +use query_structure::{prelude::*, Filter, QueryArguments}; pub async fn aggregate<'conn>( database: &Database, diff --git a/query-engine/connectors/mongodb-query-connector/src/root_queries/mod.rs b/query-engine/connectors/mongodb-query-connector/src/root_queries/mod.rs index ba1257270da2..f66adbac3e3b 100644 --- a/query-engine/connectors/mongodb-query-connector/src/root_queries/mod.rs +++ b/query-engine/connectors/mongodb-query-connector/src/root_queries/mod.rs @@ -13,11 +13,11 @@ use crate::{ use futures::Future; use mongodb::bson::Bson; use mongodb::bson::Document; -use prisma_models::*; use query_engine_metrics::{ histogram, increment_counter, metrics, PRISMA_DATASOURCE_QUERIES_DURATION_HISTOGRAM_MS, PRISMA_DATASOURCE_QUERIES_TOTAL, }; +use query_structure::*; use std::time::Instant; use tracing::debug; diff --git a/query-engine/connectors/mongodb-query-connector/src/root_queries/raw.rs b/query-engine/connectors/mongodb-query-connector/src/root_queries/raw.rs index 6876ab683333..f674eafe9fd4 100644 --- a/query-engine/connectors/mongodb-query-connector/src/root_queries/raw.rs +++ b/query-engine/connectors/mongodb-query-connector/src/root_queries/raw.rs @@ -4,7 +4,7 @@ use mongodb::{ bson::{from_bson, Bson, Document}, options::*, }; -use prisma_models::{Model, PrismaValue}; +use query_structure::{Model, PrismaValue}; use std::collections::HashMap; #[allow(clippy::large_enum_variant)] diff --git a/query-engine/connectors/mongodb-query-connector/src/root_queries/read.rs b/query-engine/connectors/mongodb-query-connector/src/root_queries/read.rs index 58bf634c4c0d..0d9ac09ae26a 100644 --- a/query-engine/connectors/mongodb-query-connector/src/root_queries/read.rs +++ b/query-engine/connectors/mongodb-query-connector/src/root_queries/read.rs @@ -3,9 +3,9 @@ use crate::{ error::DecorateErrorWithFieldInformationExtension, output_meta, query_builder::MongoReadQueryBuilder, vacuum_cursor, IntoBson, }; -use connector_interface::{Filter, QueryArguments, RelAggregationSelection}; +use connector_interface::RelAggregationSelection; use mongodb::{bson::doc, options::FindOptions, ClientSession, Database}; -use prisma_models::*; +use query_structure::*; use tracing::{info_span, Instrument}; /// Finds a single record. Joins are not required at the moment because the selector is always a unique one. diff --git a/query-engine/connectors/mongodb-query-connector/src/root_queries/update/into_operation.rs b/query-engine/connectors/mongodb-query-connector/src/root_queries/update/into_operation.rs index 3f18d8351930..01ff5abcbd13 100644 --- a/query-engine/connectors/mongodb-query-connector/src/root_queries/update/into_operation.rs +++ b/query-engine/connectors/mongodb-query-connector/src/root_queries/update/into_operation.rs @@ -3,7 +3,7 @@ use crate::*; use connector_interface::{CompositeWriteOperation, FieldPath, ScalarWriteOperation, WriteOperation}; use mongodb::bson::doc; -use prisma_models::{Field, PrismaValue}; +use query_structure::{Field, PrismaValue}; pub(crate) trait IntoUpdateOperation { fn into_update_operations(self, field: &Field, path: FieldPath) -> crate::Result>; diff --git a/query-engine/connectors/mongodb-query-connector/src/root_queries/update/operation.rs b/query-engine/connectors/mongodb-query-connector/src/root_queries/update/operation.rs index cdb7be2c601c..0fa814d81af7 100644 --- a/query-engine/connectors/mongodb-query-connector/src/root_queries/update/operation.rs +++ b/query-engine/connectors/mongodb-query-connector/src/root_queries/update/operation.rs @@ -1,6 +1,7 @@ use super::{expression, into_expression::IntoUpdateExpression}; -use connector_interface::{FieldPath, Filter}; +use connector_interface::FieldPath; use mongodb::bson::{doc, Document}; +use query_structure::Filter; /// `UpdateOperation` is an intermediary AST used to perform preliminary transformations from a `WriteOperation`. /// It is meant to be transformed into an `UpdateExpression`. diff --git a/query-engine/connectors/mongodb-query-connector/src/root_queries/write.rs b/query-engine/connectors/mongodb-query-connector/src/root_queries/write.rs index f5aa24c6b5d6..c0c43f108aec 100644 --- a/query-engine/connectors/mongodb-query-connector/src/root_queries/write.rs +++ b/query-engine/connectors/mongodb-query-connector/src/root_queries/write.rs @@ -15,7 +15,7 @@ use mongodb::{ options::InsertManyOptions, ClientSession, Collection, Database, }; -use prisma_models::{Model, PrismaValue, SelectionResult}; +use query_structure::{Model, PrismaValue, SelectionResult}; use std::{collections::HashMap, convert::TryInto}; use tracing::{info_span, Instrument}; use update::IntoUpdateDocumentExtension; diff --git a/query-engine/connectors/mongodb-query-connector/src/value.rs b/query-engine/connectors/mongodb-query-connector/src/value.rs index cf984ad76830..cf6812d59b6d 100644 --- a/query-engine/connectors/mongodb-query-connector/src/value.rs +++ b/query-engine/connectors/mongodb-query-connector/src/value.rs @@ -7,10 +7,10 @@ use bigdecimal::{BigDecimal, FromPrimitive, ToPrimitive}; use chrono::{TimeZone, Utc}; use itertools::Itertools; use mongodb::bson::{oid::ObjectId, spec::BinarySubtype, Binary, Bson, Document, Timestamp}; -use prisma_models::{ +use psl::builtin_connectors::MongoDbType; +use query_structure::{ CompositeFieldRef, Field, PrismaValue, RelationFieldRef, ScalarFieldRef, SelectedField, TypeIdentifier, }; -use psl::builtin_connectors::MongoDbType; use serde_json::Value; use std::{convert::TryFrom, fmt::Display}; diff --git a/query-engine/connectors/query-connector/Cargo.toml b/query-engine/connectors/query-connector/Cargo.toml index d16771aa3daf..7fcc749b3714 100644 --- a/query-engine/connectors/query-connector/Cargo.toml +++ b/query-engine/connectors/query-connector/Cargo.toml @@ -9,7 +9,7 @@ async-trait = "0.1.31" chrono = {version = "0.4", features = ["serde"]} futures = "0.3" itertools = "0.10" -prisma-models = {path = "../../prisma-models"} +query-structure = {path = "../../query-structure"} prisma-value = {path = "../../../libs/prisma-value"} serde.workspace = true serde_json.workspace = true diff --git a/query-engine/connectors/query-connector/src/coerce.rs b/query-engine/connectors/query-connector/src/coerce.rs index 87f04eed4f24..9c09ea5235ec 100644 --- a/query-engine/connectors/query-connector/src/coerce.rs +++ b/query-engine/connectors/query-connector/src/coerce.rs @@ -1,4 +1,4 @@ -use prisma_models::PrismaValue; +use query_structure::PrismaValue; pub fn coerce_null_to_zero_value(value: PrismaValue) -> PrismaValue { if let PrismaValue::Null = value { diff --git a/query-engine/connectors/query-connector/src/error.rs b/query-engine/connectors/query-connector/src/error.rs index 96d8d9dcbacb..e34b7668a7dc 100644 --- a/query-engine/connectors/query-connector/src/error.rs +++ b/query-engine/connectors/query-connector/src/error.rs @@ -1,6 +1,6 @@ -use crate::filter::Filter; use itertools::Itertools; -use prisma_models::prelude::DomainError; +use query_structure::prelude::DomainError; +use query_structure::Filter; use std::fmt::Display; use thiserror::Error; use user_facing_errors::{query_engine::DatabaseConstraint, KnownError}; diff --git a/query-engine/connectors/query-connector/src/interface.rs b/query-engine/connectors/query-connector/src/interface.rs index 80e01578bd01..942edd1868fc 100644 --- a/query-engine/connectors/query-connector/src/interface.rs +++ b/query-engine/connectors/query-connector/src/interface.rs @@ -1,7 +1,7 @@ -use crate::{coerce_null_to_zero_value, Filter, NativeUpsert, QueryArguments, WriteArgs}; +use crate::{coerce_null_to_zero_value, NativeUpsert, WriteArgs}; use async_trait::async_trait; -use prisma_models::{ast::FieldArity, *}; use prisma_value::PrismaValue; +use query_structure::{ast::FieldArity, *}; use std::collections::HashMap; #[async_trait] diff --git a/query-engine/connectors/query-connector/src/lib.rs b/query-engine/connectors/query-connector/src/lib.rs index b60554c54b50..5488dfaef494 100644 --- a/query-engine/connectors/query-connector/src/lib.rs +++ b/query-engine/connectors/query-connector/src/lib.rs @@ -1,20 +1,14 @@ #![allow(clippy::derive_partial_eq_without_eq)] pub mod error; -pub mod filter; mod coerce; -mod compare; mod interface; -mod query_arguments; mod upsert; mod write_args; pub use coerce::*; -pub use compare::*; -pub use filter::*; pub use interface::*; -pub use query_arguments::*; pub use upsert::*; pub use write_args::*; diff --git a/query-engine/connectors/query-connector/src/upsert.rs b/query-engine/connectors/query-connector/src/upsert.rs index 87421511de1e..9455fbc30c49 100644 --- a/query-engine/connectors/query-connector/src/upsert.rs +++ b/query-engine/connectors/query-connector/src/upsert.rs @@ -1,5 +1,5 @@ -use crate::{Filter, RecordFilter, WriteArgs}; -use prisma_models::{FieldSelection, Model, ScalarFieldRef}; +use crate::{RecordFilter, WriteArgs}; +use query_structure::{FieldSelection, Filter, Model, ScalarFieldRef}; #[derive(Debug, Clone)] pub struct NativeUpsert { diff --git a/query-engine/connectors/query-connector/src/write_args.rs b/query-engine/connectors/query-connector/src/write_args.rs index e75ca288ac0c..e0b030975042 100644 --- a/query-engine/connectors/query-connector/src/write_args.rs +++ b/query-engine/connectors/query-connector/src/write_args.rs @@ -1,10 +1,8 @@ -use crate::{ - error::{ConnectorError, ErrorKind}, - Filter, -}; +use crate::error::{ConnectorError, ErrorKind}; use indexmap::{map::Keys, IndexMap}; -use prisma_models::{ - CompositeFieldRef, Field, Model, ModelProjection, PrismaValue, ScalarFieldRef, SelectedField, SelectionResult, +use query_structure::{ + CompositeFieldRef, Field, Filter, Model, ModelProjection, PrismaValue, ScalarFieldRef, SelectedField, + SelectionResult, }; use std::{borrow::Borrow, convert::TryInto, ops::Deref}; diff --git a/query-engine/connectors/sql-query-connector/Cargo.toml b/query-engine/connectors/sql-query-connector/Cargo.toml index 62d0be640761..ba2ff436823f 100644 --- a/query-engine/connectors/sql-query-connector/Cargo.toml +++ b/query-engine/connectors/sql-query-connector/Cargo.toml @@ -31,8 +31,8 @@ cuid = { git = "https://github.com/prisma/cuid-rust", branch = "wasm32-support" package = "query-connector" path = "../query-connector" -[dependencies.prisma-models] -path = "../../prisma-models" +[dependencies.query-structure] +path = "../../query-structure" [dependencies.prisma-value] path = "../../../libs/prisma-value" diff --git a/query-engine/connectors/sql-query-connector/src/column_metadata.rs b/query-engine/connectors/sql-query-connector/src/column_metadata.rs index 7555bbf3331b..c64871b7eb22 100644 --- a/query-engine/connectors/sql-query-connector/src/column_metadata.rs +++ b/query-engine/connectors/sql-query-connector/src/column_metadata.rs @@ -1,4 +1,4 @@ -use prisma_models::{FieldArity, TypeIdentifier}; +use query_structure::{FieldArity, TypeIdentifier}; /// Helps dealing with column value conversion and possible error resolution. #[derive(Clone, Debug, Copy)] diff --git a/query-engine/connectors/sql-query-connector/src/cursor_condition.rs b/query-engine/connectors/sql-query-connector/src/cursor_condition.rs index 34373eaf3d5b..d34fd49b2a3d 100644 --- a/query-engine/connectors/sql-query-connector/src/cursor_condition.rs +++ b/query-engine/connectors/sql-query-connector/src/cursor_condition.rs @@ -5,10 +5,9 @@ use crate::{ query_arguments_ext::QueryArgumentsExt, Context, }; -use connector_interface::QueryArguments; use itertools::Itertools; -use prisma_models::*; use quaint::ast::*; +use query_structure::*; #[derive(Debug)] struct CursorOrderDefinition { diff --git a/query-engine/connectors/sql-query-connector/src/database/connection.rs b/query-engine/connectors/sql-query-connector/src/database/connection.rs index 0247e8c4b601..381d3bd17b23 100644 --- a/query-engine/connectors/sql-query-connector/src/database/connection.rs +++ b/query-engine/connectors/sql-query-connector/src/database/connection.rs @@ -3,15 +3,15 @@ use crate::{database::operations::*, Context, SqlError}; use async_trait::async_trait; use connector::{ConnectionLike, RelAggregationSelection}; use connector_interface::{ - self as connector, filter::Filter, AggregationRow, AggregationSelection, Connection, QueryArguments, - ReadOperations, RecordFilter, Transaction, WriteArgs, WriteOperations, + self as connector, AggregationRow, AggregationSelection, Connection, ReadOperations, RecordFilter, Transaction, + WriteArgs, WriteOperations, }; -use prisma_models::{prelude::*, SelectionResult}; use prisma_value::PrismaValue; use quaint::{ connector::{IsolationLevel, TransactionCapable}, prelude::{ConnectionInfo, Queryable}, }; +use query_structure::{prelude::*, Filter, QueryArguments, SelectionResult}; use std::{collections::HashMap, str::FromStr}; pub(crate) struct SqlConnection { diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/read.rs b/query-engine/connectors/sql-query-connector/src/database/operations/read.rs index 470628de1132..d512325d2cd5 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/read.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/read.rs @@ -7,8 +7,8 @@ use crate::{ }; use connector_interface::*; use futures::stream::{FuturesUnordered, StreamExt}; -use prisma_models::*; use quaint::ast::*; +use query_structure::*; pub(crate) async fn get_single_record( conn: &dyn Queryable, diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/update.rs b/query-engine/connectors/sql-query-connector/src/database/operations/update.rs index 2270d6c6fefa..617e02455abd 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/update.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/update.rs @@ -8,7 +8,7 @@ use crate::{Context, QueryExt, Queryable}; use connector_interface::*; use itertools::Itertools; -use prisma_models::*; +use query_structure::*; use std::usize; /// Performs an update with an explicit selection set. diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/upsert.rs b/query-engine/connectors/sql-query-connector/src/database/operations/upsert.rs index cfd473923ffc..f086e4c60798 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/upsert.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/upsert.rs @@ -7,8 +7,8 @@ use crate::{ Context, Queryable, }; use connector_interface::NativeUpsert; -use prisma_models::{ModelProjection, Record, SingleRecord}; use quaint::prelude::{OnConflict, Query}; +use query_structure::{ModelProjection, Record, SingleRecord}; pub(crate) async fn native_upsert( conn: &dyn Queryable, diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/write.rs b/query-engine/connectors/sql-query-connector/src/database/operations/write.rs index 425f4ac1d4b3..8503e1434001 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/write.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/write.rs @@ -8,11 +8,11 @@ use crate::{ }; use connector_interface::*; use itertools::Itertools; -use prisma_models::*; use quaint::{ error::ErrorKind, prelude::{native_uuid, uuid_to_bin, uuid_to_bin_swapped, Aliasable, Select, SqlFamily}, }; +use query_structure::*; use std::{ collections::{HashMap, HashSet}, ops::Deref, diff --git a/query-engine/connectors/sql-query-connector/src/database/transaction.rs b/query-engine/connectors/sql-query-connector/src/database/transaction.rs index 517c293457f1..7fa9aaf3b5bc 100644 --- a/query-engine/connectors/sql-query-connector/src/database/transaction.rs +++ b/query-engine/connectors/sql-query-connector/src/database/transaction.rs @@ -3,12 +3,12 @@ use crate::{database::operations::*, Context, SqlError}; use async_trait::async_trait; use connector::{ConnectionLike, RelAggregationSelection}; use connector_interface::{ - self as connector, filter::Filter, AggregationRow, AggregationSelection, QueryArguments, ReadOperations, - RecordFilter, Transaction, WriteArgs, WriteOperations, + self as connector, AggregationRow, AggregationSelection, ReadOperations, RecordFilter, Transaction, WriteArgs, + WriteOperations, }; -use prisma_models::{prelude::*, SelectionResult}; use prisma_value::PrismaValue; use quaint::prelude::ConnectionInfo; +use query_structure::{prelude::*, Filter, QueryArguments, SelectionResult}; use std::collections::HashMap; pub struct SqlConnectorTransaction<'tx> { diff --git a/query-engine/connectors/sql-query-connector/src/error.rs b/query-engine/connectors/sql-query-connector/src/error.rs index 9744aa0d5ef8..a7770879c510 100644 --- a/query-engine/connectors/sql-query-connector/src/error.rs +++ b/query-engine/connectors/sql-query-connector/src/error.rs @@ -1,6 +1,6 @@ -use connector_interface::{error::*, Filter}; -use prisma_models::prelude::DomainError; +use connector_interface::error::*; use quaint::error::ErrorKind as QuaintKind; +use query_structure::{prelude::DomainError, Filter}; use std::{any::Any, string::FromUtf8Error}; use thiserror::Error; use user_facing_errors::query_engine::DatabaseConstraint; @@ -267,8 +267,8 @@ impl SqlError { } } -impl From for SqlError { - fn from(e: prisma_models::ConversionFailure) -> Self { +impl From for SqlError { + fn from(e: query_structure::ConversionFailure) -> Self { Self::ConversionError(e.into()) } } diff --git a/query-engine/connectors/sql-query-connector/src/filter/alias.rs b/query-engine/connectors/sql-query-connector/src/filter/alias.rs index 61686929d400..c7a62bba02ab 100644 --- a/query-engine/connectors/sql-query-connector/src/filter/alias.rs +++ b/query-engine/connectors/sql-query-connector/src/filter/alias.rs @@ -1,7 +1,7 @@ use crate::{model_extensions::AsColumn, *}; -use prisma_models::ScalarField; use quaint::prelude::Column; +use query_structure::ScalarField; #[derive(Clone, Copy, Debug)] /// A distinction in aliasing to separate the parent table and the joined data diff --git a/query-engine/connectors/sql-query-connector/src/filter/mod.rs b/query-engine/connectors/sql-query-connector/src/filter/mod.rs index bbf3557b16b7..b9ae856ef655 100644 --- a/query-engine/connectors/sql-query-connector/src/filter/mod.rs +++ b/query-engine/connectors/sql-query-connector/src/filter/mod.rs @@ -1,8 +1,8 @@ mod alias; mod visitor; -use connector_interface::Filter; use quaint::prelude::*; +use query_structure::Filter; use visitor::*; use crate::{context::Context, join_utils::AliasedJoin}; diff --git a/query-engine/connectors/sql-query-connector/src/filter/visitor.rs b/query-engine/connectors/sql-query-connector/src/filter/visitor.rs index 6ab32f89735f..1a71cdd824a8 100644 --- a/query-engine/connectors/sql-query-connector/src/filter/visitor.rs +++ b/query-engine/connectors/sql-query-connector/src/filter/visitor.rs @@ -2,11 +2,10 @@ use super::alias::*; use crate::join_utils::{compute_one2m_join, AliasedJoin}; use crate::{model_extensions::*, Context}; -use connector_interface::filter::*; -use prisma_models::prelude::*; use psl::datamodel_connector::ConnectorCapability; use quaint::ast::concat; use quaint::ast::*; +use query_structure::{filter::*, prelude::*}; use std::convert::TryInto; pub(crate) trait FilterVisitorExt { diff --git a/query-engine/connectors/sql-query-connector/src/join_utils.rs b/query-engine/connectors/sql-query-connector/src/join_utils.rs index 4b4d2fc8aa24..cedc264f94b2 100644 --- a/query-engine/connectors/sql-query-connector/src/join_utils.rs +++ b/query-engine/connectors/sql-query-connector/src/join_utils.rs @@ -1,7 +1,6 @@ use crate::{filter::FilterBuilder, model_extensions::*, Context}; -use connector_interface::Filter; -use prisma_models::*; use quaint::prelude::*; +use query_structure::*; #[derive(Debug, Clone)] pub(crate) struct AliasedJoin { diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/column.rs b/query-engine/connectors/sql-query-connector/src/model_extensions/column.rs index 445bada9c45c..045587df049b 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/column.rs +++ b/query-engine/connectors/sql-query-connector/src/model_extensions/column.rs @@ -1,7 +1,7 @@ use crate::{model_extensions::ScalarFieldExt, Context}; use itertools::Itertools; -use prisma_models::{Field, ModelProjection, RelationField, ScalarField}; use quaint::ast::{Column, Row}; +use query_structure::{Field, ModelProjection, RelationField, ScalarField}; pub struct ColumnIterator { inner: Box> + 'static>, diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/record.rs b/query-engine/connectors/sql-query-connector/src/model_extensions/record.rs index 0204046315ad..e764aa8e58ba 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/record.rs +++ b/query-engine/connectors/sql-query-connector/src/model_extensions/record.rs @@ -1,6 +1,6 @@ use crate::{value::to_prisma_value, SqlError}; -use prisma_models::{DomainError, ModelProjection, SelectionResult}; use quaint::connector::ResultSet; +use query_structure::{DomainError, ModelProjection, SelectionResult}; pub fn try_convert(model_projection: &ModelProjection, result_set: ResultSet) -> crate::Result { let columns: Vec = result_set.columns().iter().map(|c| c.to_string()).collect(); diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/relation.rs b/query-engine/connectors/sql-query-connector/src/model_extensions/relation.rs index 6941546c51e1..981390536807 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/relation.rs +++ b/query-engine/connectors/sql-query-connector/src/model_extensions/relation.rs @@ -2,8 +2,8 @@ use crate::{ model_extensions::{AsColumns, AsTable, ColumnIterator}, Context, }; -use prisma_models::{walkers, ModelProjection, Relation, RelationField}; use quaint::{ast::Table, prelude::Column}; +use query_structure::{walkers, ModelProjection, Relation, RelationField}; pub(crate) trait RelationFieldExt { fn m2m_columns(&self, ctx: &Context<'_>) -> Vec>; diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs b/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs index 7eb414dd92a8..21612e1a6392 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs +++ b/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs @@ -1,11 +1,11 @@ use crate::context::Context; use chrono::Utc; -use prisma_models::{ScalarField, TypeIdentifier}; use prisma_value::PrismaValue; use quaint::{ ast::{EnumName, Value, ValueType}, prelude::{EnumVariant, TypeDataLength, TypeFamily}, }; +use query_structure::{ScalarField, TypeIdentifier}; pub(crate) trait ScalarFieldExt { fn value<'a>(&self, pv: PrismaValue, ctx: &Context<'_>) -> Value<'a>; diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/selection_result.rs b/query-engine/connectors/sql-query-connector/src/model_extensions/selection_result.rs index 25d994b1d64d..51eb7768d068 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/selection_result.rs +++ b/query-engine/connectors/sql-query-connector/src/model_extensions/selection_result.rs @@ -1,7 +1,7 @@ use super::ScalarFieldExt; use crate::context::Context; -use prisma_models::{PrismaValue, SelectedField, SelectionResult}; use quaint::Value; +use query_structure::{PrismaValue, SelectedField, SelectionResult}; pub(crate) trait SelectionResultExt { fn misses_autogen_value(&self) -> bool; diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/table.rs b/query-engine/connectors/sql-query-connector/src/model_extensions/table.rs index 8217b5e12044..ead15c34658e 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/table.rs +++ b/query-engine/connectors/sql-query-connector/src/model_extensions/table.rs @@ -1,6 +1,6 @@ use crate::{model_extensions::AsColumns, Context}; -use prisma_models::Model; use quaint::ast::{Column, Table}; +use query_structure::Model; pub(crate) fn db_name_with_schema(model: &Model, ctx: &Context<'_>) -> Table<'static> { let schema_prefix = model @@ -32,7 +32,7 @@ impl AsTable for Model { self.unique_indexes().fold(table, |table, index| { let fields: Vec<_> = index .fields() - .map(|f| prisma_models::ScalarFieldRef::from((self.dm.clone(), f))) + .map(|f| query_structure::ScalarFieldRef::from((self.dm.clone(), f))) .collect(); let index: Vec> = fields.as_columns(ctx).collect(); table.add_unique_index(index) diff --git a/query-engine/connectors/sql-query-connector/src/ordering.rs b/query-engine/connectors/sql-query-connector/src/ordering.rs index cf49698405ef..5f61d0c3a907 100644 --- a/query-engine/connectors/sql-query-connector/src/ordering.rs +++ b/query-engine/connectors/sql-query-connector/src/ordering.rs @@ -1,8 +1,7 @@ use crate::{join_utils::*, model_extensions::*, query_arguments_ext::QueryArgumentsExt, Context}; -use connector_interface::QueryArguments; use itertools::Itertools; -use prisma_models::*; use quaint::ast::*; +use query_structure::*; static ORDER_JOIN_PREFIX: &str = "orderby_"; static ORDER_AGGREGATOR_ALIAS: &str = "orderby_aggregator"; diff --git a/query-engine/connectors/sql-query-connector/src/query_arguments_ext.rs b/query-engine/connectors/sql-query-connector/src/query_arguments_ext.rs index b0319b28b22c..414ab7247c67 100644 --- a/query-engine/connectors/sql-query-connector/src/query_arguments_ext.rs +++ b/query-engine/connectors/sql-query-connector/src/query_arguments_ext.rs @@ -1,4 +1,4 @@ -use connector_interface::QueryArguments; +use query_structure::QueryArguments; pub(crate) trait QueryArgumentsExt { /// If we need to take rows before a cursor position, then we need to reverse the order in SQL. diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/mod.rs b/query-engine/connectors/sql-query-connector/src/query_builder/mod.rs index f9a3d43905e3..b605d076eed4 100644 --- a/query-engine/connectors/sql-query-connector/src/query_builder/mod.rs +++ b/query-engine/connectors/sql-query-connector/src/query_builder/mod.rs @@ -3,8 +3,8 @@ pub(crate) mod write; use crate::context::Context; use crate::model_extensions::SelectionResultExt; -use prisma_models::SelectionResult; use quaint::ast::{Column, Comparable, ConditionTree, Query, Row, Values}; +use query_structure::SelectionResult; const PARAMETER_LIMIT: usize = 2000; diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/read.rs b/query-engine/connectors/sql-query-connector/src/query_builder/read.rs index a5385f1dd56a..a720f4720b56 100644 --- a/query-engine/connectors/sql-query-connector/src/query_builder/read.rs +++ b/query-engine/connectors/sql-query-connector/src/query_builder/read.rs @@ -2,10 +2,10 @@ use crate::{ cursor_condition, filter::FilterBuilder, model_extensions::*, nested_aggregations, ordering::OrderByBuilder, sql_trace::SqlTraceComment, Context, }; -use connector_interface::{filter::Filter, AggregationSelection, QueryArguments, RelAggregationSelection}; +use connector_interface::{AggregationSelection, RelAggregationSelection}; use itertools::Itertools; -use prisma_models::*; use quaint::ast::*; +use query_structure::*; use tracing::Span; pub(crate) trait SelectDefinition { diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/write.rs b/query-engine/connectors/sql-query-connector/src/query_builder/write.rs index b9356842b285..c5bb3e24ddb6 100644 --- a/query-engine/connectors/sql-query-connector/src/query_builder/write.rs +++ b/query-engine/connectors/sql-query-connector/src/query_builder/write.rs @@ -1,7 +1,7 @@ use crate::{model_extensions::*, sql_trace::SqlTraceComment, Context}; use connector_interface::{DatasourceFieldName, ScalarWriteOperation, WriteArgs}; -use prisma_models::*; use quaint::ast::*; +use query_structure::*; use std::{collections::HashSet, convert::TryInto}; use tracing::Span; diff --git a/query-engine/connectors/sql-query-connector/src/query_ext.rs b/query-engine/connectors/sql-query-connector/src/query_ext.rs index 2dba40dcb7fc..a78145773478 100644 --- a/query-engine/connectors/sql-query-connector/src/query_ext.rs +++ b/query-engine/connectors/sql-query-connector/src/query_ext.rs @@ -4,13 +4,13 @@ use crate::{ value_ext::IntoTypedJsonExtension, ColumnMetadata, Context, SqlRow, ToSqlRow, }; use async_trait::async_trait; -use connector_interface::{filter::Filter, RecordFilter}; +use connector_interface::RecordFilter; use futures::future::FutureExt; use itertools::Itertools; use opentelemetry::trace::TraceContextExt; use opentelemetry::trace::TraceFlags; -use prisma_models::*; use quaint::{ast::*, connector::Queryable}; +use query_structure::*; use serde_json::{Map, Value}; use std::{collections::HashMap, panic::AssertUnwindSafe}; use tracing::{info_span, Span}; diff --git a/query-engine/connectors/sql-query-connector/src/row.rs b/query-engine/connectors/sql-query-connector/src/row.rs index 250ee7d9420f..6f154b1f77dc 100644 --- a/query-engine/connectors/sql-query-connector/src/row.rs +++ b/query-engine/connectors/sql-query-connector/src/row.rs @@ -2,8 +2,8 @@ use crate::{column_metadata::ColumnMetadata, error::SqlError, value::to_prisma_v use bigdecimal::{BigDecimal, FromPrimitive, ToPrimitive}; use chrono::{DateTime, NaiveDate, Utc}; use connector_interface::{coerce_null_to_zero_value, AggregationResult, AggregationSelection}; -use prisma_models::{ConversionFailure, FieldArity, PrismaValue, Record, TypeIdentifier}; use quaint::{connector::ResultRow, Value, ValueType}; +use query_structure::{ConversionFailure, FieldArity, PrismaValue, Record, TypeIdentifier}; use std::{io, str::FromStr}; use uuid::Uuid; diff --git a/query-engine/connectors/sql-query-connector/src/value.rs b/query-engine/connectors/sql-query-connector/src/value.rs index 4c31fc9eedb9..0929003955f7 100644 --- a/query-engine/connectors/sql-query-connector/src/value.rs +++ b/query-engine/connectors/sql-query-connector/src/value.rs @@ -1,8 +1,8 @@ use crate::row::{sanitize_f32, sanitize_f64}; use bigdecimal::{BigDecimal, FromPrimitive}; use chrono::{DateTime, NaiveDate, Utc}; -use prisma_models::PrismaValue; use quaint::ValueType; +use query_structure::PrismaValue; pub fn to_prisma_value<'a, T: Into>>(qv: T) -> crate::Result { let val = match qv.into() { diff --git a/query-engine/core/Cargo.toml b/query-engine/core/Cargo.toml index caadf6cdba00..86f3825cadc3 100644 --- a/query-engine/core/Cargo.toml +++ b/query-engine/core/Cargo.toml @@ -16,7 +16,7 @@ indexmap = { version = "1.7", features = ["serde-1"] } itertools = "0.10" once_cell = "1" petgraph = "0.4" -prisma-models = { path = "../prisma-models", features = ["default_generators"] } +query-structure = { path = "../query-structure", features = ["default_generators"] } opentelemetry = { version = "0.17.0", features = ["rt-tokio", "serialize"] } query-engine-metrics = {path = "../metrics"} serde.workspace = true diff --git a/query-engine/core/src/constants.rs b/query-engine/core/src/constants.rs index 6d185a1c6866..abf320a2969c 100644 --- a/query-engine/core/src/constants.rs +++ b/query-engine/core/src/constants.rs @@ -1,5 +1,5 @@ pub mod custom_types { - use prisma_models::PrismaValue; + use query_structure::PrismaValue; pub const TYPE: &str = "$type"; pub const VALUE: &str = "value"; diff --git a/query-engine/core/src/error.rs b/query-engine/core/src/error.rs index 6ca41f1a9d7b..3a3803bf0d67 100644 --- a/query-engine/core/src/error.rs +++ b/query-engine/core/src/error.rs @@ -1,6 +1,6 @@ use crate::{InterpreterError, QueryGraphBuilderError, RelationViolation, TransactionError}; use connector::error::ConnectorError; -use prisma_models::DomainError; +use query_structure::DomainError; use thiserror::Error; use user_facing_errors::UnknownError; diff --git a/query-engine/core/src/executor/request_context.rs b/query-engine/core/src/executor/request_context.rs index 13d2e7245178..e4f0c7122ee9 100644 --- a/query-engine/core/src/executor/request_context.rs +++ b/query-engine/core/src/executor/request_context.rs @@ -1,5 +1,5 @@ use crate::protocol::EngineProtocol; -use prisma_models::PrismaValue; +use query_structure::PrismaValue; #[derive(Debug)] struct RequestContext { diff --git a/query-engine/core/src/interpreter/error.rs b/query-engine/core/src/interpreter/error.rs index 0660c65adb7e..0a60c5d7848e 100644 --- a/query-engine/core/src/interpreter/error.rs +++ b/query-engine/core/src/interpreter/error.rs @@ -1,6 +1,6 @@ use crate::{QueryGraphBuilderError, QueryGraphError}; use connector::error::ConnectorError; -use prisma_models::DomainError; +use query_structure::DomainError; use std::fmt; #[derive(Debug)] diff --git a/query-engine/core/src/interpreter/interpreter_impl.rs b/query-engine/core/src/interpreter/interpreter_impl.rs index 1f4de95224c2..8aa3d77ae761 100644 --- a/query-engine/core/src/interpreter/interpreter_impl.rs +++ b/query-engine/core/src/interpreter/interpreter_impl.rs @@ -6,7 +6,7 @@ use super::{ use crate::{Query, QueryResult}; use connector::ConnectionLike; use futures::future::BoxFuture; -use prisma_models::prelude::*; +use query_structure::prelude::*; use std::{collections::HashMap, fmt}; use tracing::Instrument; diff --git a/query-engine/core/src/interpreter/query_interpreters/inmemory_record_processor.rs b/query-engine/core/src/interpreter/query_interpreters/inmemory_record_processor.rs index a324b499d759..f4c0465412e2 100644 --- a/query-engine/core/src/interpreter/query_interpreters/inmemory_record_processor.rs +++ b/query-engine/core/src/interpreter/query_interpreters/inmemory_record_processor.rs @@ -1,6 +1,5 @@ -use connector::QueryArguments; use itertools::Itertools; -use prisma_models::{FieldSelection, ManyRecords, Record, SelectionResult}; +use query_structure::*; use std::ops::Deref; #[derive(Debug)] diff --git a/query-engine/core/src/interpreter/query_interpreters/nested_read.rs b/query-engine/core/src/interpreter/query_interpreters/nested_read.rs index 238dd814f812..fa4dc7c6e529 100644 --- a/query-engine/core/src/interpreter/query_interpreters/nested_read.rs +++ b/query-engine/core/src/interpreter/query_interpreters/nested_read.rs @@ -1,10 +1,7 @@ use super::{inmemory_record_processor::InMemoryRecordProcessor, read}; use crate::{interpreter::InterpretationResult, query_ast::*}; -use connector::{ - self, filter::Filter, ConditionListValue, ConnectionLike, QueryArguments, RelAggregationRow, - RelAggregationSelection, ScalarCompare, -}; -use prisma_models::{FieldSelection, ManyRecords, PrismaValue, Record, RelationFieldRef, SelectionResult}; +use connector::{self, ConnectionLike, RelAggregationRow, RelAggregationSelection}; +use query_structure::*; use std::collections::HashMap; pub(crate) async fn m2m( diff --git a/query-engine/core/src/interpreter/query_interpreters/read.rs b/query-engine/core/src/interpreter/query_interpreters/read.rs index 7653c675938f..464ac6651677 100644 --- a/query-engine/core/src/interpreter/query_interpreters/read.rs +++ b/query-engine/core/src/interpreter/query_interpreters/read.rs @@ -3,7 +3,7 @@ use crate::{interpreter::InterpretationResult, query_ast::*, result_ast::*}; use connector::{self, error::ConnectorError, ConnectionLike, RelAggregationRow, RelAggregationSelection}; use futures::future::{BoxFuture, FutureExt}; use inmemory_record_processor::InMemoryRecordProcessor; -use prisma_models::ManyRecords; +use query_structure::ManyRecords; use std::collections::HashMap; use user_facing_errors::KnownError; diff --git a/query-engine/core/src/query_ast/mod.rs b/query-engine/core/src/query_ast/mod.rs index 43c73a657238..4b67b0ba4548 100644 --- a/query-engine/core/src/query_ast/mod.rs +++ b/query-engine/core/src/query_ast/mod.rs @@ -5,8 +5,7 @@ pub use read::*; pub use write::*; use crate::ToGraphviz; -use connector::filter::Filter; -use prisma_models::{FieldSelection, Model, SelectionResult}; +use query_structure::{FieldSelection, Filter, Model, SelectionResult}; #[derive(Debug, Clone)] #[allow(clippy::large_enum_variant)] diff --git a/query-engine/core/src/query_ast/read.rs b/query-engine/core/src/query_ast/read.rs index 9b68f533300c..271ff44e3883 100644 --- a/query-engine/core/src/query_ast/read.rs +++ b/query-engine/core/src/query_ast/read.rs @@ -1,9 +1,9 @@ //! Prisma read query AST use super::FilteredQuery; use crate::ToGraphviz; -use connector::{filter::Filter, AggregationSelection, QueryArguments, RelAggregationSelection}; +use connector::{AggregationSelection, RelAggregationSelection}; use enumflags2::BitFlags; -use prisma_models::prelude::*; +use query_structure::{prelude::*, Filter, QueryArguments}; use std::fmt::Display; #[allow(clippy::enum_variant_names)] diff --git a/query-engine/core/src/query_ast/write.rs b/query-engine/core/src/query_ast/write.rs index 9d02e0d49d17..ee51830e7962 100644 --- a/query-engine/core/src/query_ast/write.rs +++ b/query-engine/core/src/query_ast/write.rs @@ -1,8 +1,8 @@ //! Write query AST use super::{FilteredNestedMutation, FilteredQuery}; use crate::{RecordQuery, ToGraphviz}; -use connector::{filter::Filter, DatasourceFieldName, NativeUpsert, RecordFilter, WriteArgs}; -use prisma_models::prelude::*; +use connector::{DatasourceFieldName, NativeUpsert, RecordFilter, WriteArgs}; +use query_structure::{prelude::*, Filter}; use std::collections::HashMap; #[derive(Debug, Clone)] diff --git a/query-engine/core/src/query_document/argument_value.rs b/query-engine/core/src/query_document/argument_value.rs index 3d085ee57e77..7629ea73c9fb 100644 --- a/query-engine/core/src/query_document/argument_value.rs +++ b/query-engine/core/src/query_document/argument_value.rs @@ -1,7 +1,7 @@ use bigdecimal::BigDecimal; use chrono::{DateTime, FixedOffset}; use indexmap::IndexMap; -use prisma_models::PrismaValue; +use query_structure::PrismaValue; use serde::Serialize; pub type ArgumentValueObject = IndexMap; diff --git a/query-engine/core/src/query_document/mod.rs b/query-engine/core/src/query_document/mod.rs index 7b86d5e87bea..fa424bc44d6e 100644 --- a/query-engine/core/src/query_document/mod.rs +++ b/query-engine/core/src/query_document/mod.rs @@ -32,7 +32,7 @@ use crate::{ query_ast::{QueryOption, QueryOptions}, query_graph_builder::resolve_compound_field, }; -use prisma_models::Model; +use query_structure::Model; use schema::{constants::*, QuerySchema}; use std::collections::HashMap; use user_facing_errors::query_engine::validation::ValidationError; diff --git a/query-engine/core/src/query_document/parse_ast.rs b/query-engine/core/src/query_document/parse_ast.rs index b9d1e8f68168..256763f59e68 100644 --- a/query-engine/core/src/query_document/parse_ast.rs +++ b/query-engine/core/src/query_document/parse_ast.rs @@ -2,7 +2,7 @@ //! Structures represent parsed and validated parts of the query document, used by the query builders. use crate::QueryParserResult; use indexmap::IndexMap; -use prisma_models::{OrderBy, PrismaValue, ScalarFieldRef}; +use query_structure::{OrderBy, PrismaValue, ScalarFieldRef}; use schema::ObjectTag; use std::{ borrow::Cow, diff --git a/query-engine/core/src/query_document/parser.rs b/query-engine/core/src/query_document/parser.rs index 58a814692271..79f30e1bd8b7 100644 --- a/query-engine/core/src/query_document/parser.rs +++ b/query-engine/core/src/query_document/parser.rs @@ -4,7 +4,7 @@ use bigdecimal::{BigDecimal, ToPrimitive}; use chrono::prelude::*; use core::fmt; use indexmap::{IndexMap, IndexSet}; -use prisma_models::{DefaultKind, PrismaValue, ValueGeneratorFn}; +use query_structure::{DefaultKind, PrismaValue, ValueGeneratorFn}; use std::{borrow::Cow, convert::TryFrom, rc::Rc, str::FromStr}; use user_facing_errors::query_engine::validation::ValidationError; use uuid::Uuid; @@ -414,7 +414,7 @@ impl QueryDocumentParser { argument_path: &Path, s: &str, ) -> QueryParserResult> { - prisma_models::parse_datetime(s).map_err(|err| { + query_structure::parse_datetime(s).map_err(|err| { ValidationError::invalid_argument_value( selection_path.segments(), argument_path.segments(), @@ -426,15 +426,17 @@ impl QueryDocumentParser { } fn parse_bytes(&self, selection_path: &Path, argument_path: &Path, s: String) -> QueryParserResult { - prisma_models::decode_bytes(&s).map(PrismaValue::Bytes).map_err(|err| { - ValidationError::invalid_argument_value( - selection_path.segments(), - argument_path.segments(), - s.to_string(), - "base64 String", - Some(Box::new(err)), - ) - }) + query_structure::decode_bytes(&s) + .map(PrismaValue::Bytes) + .map_err(|err| { + ValidationError::invalid_argument_value( + selection_path.segments(), + argument_path.segments(), + s.to_string(), + "base64 String", + Some(Box::new(err)), + ) + }) } fn parse_decimal( @@ -782,7 +784,7 @@ pub(crate) mod conversions { schema::{InputType, OutputType}, ArgumentValue, }; - use prisma_models::PrismaValue; + use query_structure::PrismaValue; use schema::InnerOutputType; use user_facing_errors::query_engine::validation::{self, InputTypeDescription}; diff --git a/query-engine/core/src/query_document/transformers.rs b/query-engine/core/src/query_document/transformers.rs index 8d5894e9bbc8..20296b7ff04f 100644 --- a/query-engine/core/src/query_document/transformers.rs +++ b/query-engine/core/src/query_document/transformers.rs @@ -7,7 +7,7 @@ use super::*; use bigdecimal::ToPrimitive; use chrono::prelude::*; -use prisma_models::{OrderBy, PrismaValue, ScalarFieldRef}; +use query_structure::{OrderBy, PrismaValue, ScalarFieldRef}; use std::convert::TryInto; use user_facing_errors::query_engine::validation::ValidationError; diff --git a/query-engine/core/src/query_graph/mod.rs b/query-engine/core/src/query_graph/mod.rs index e51dcea97110..6086fa243331 100644 --- a/query-engine/core/src/query_graph/mod.rs +++ b/query-engine/core/src/query_graph/mod.rs @@ -10,7 +10,6 @@ use crate::{ interpreter::ExpressionResult, FilteredQuery, ManyRecordsQuery, Query, QueryGraphBuilderResult, QueryOptions, ReadQuery, }; -use connector::{IntoFilter, QueryArguments}; use guard::*; use itertools::Itertools; use petgraph::{ @@ -18,7 +17,7 @@ use petgraph::{ visit::{EdgeRef as PEdgeRef, NodeIndexable}, *, }; -use prisma_models::{FieldSelection, SelectionResult}; +use query_structure::{FieldSelection, IntoFilter, QueryArguments, SelectionResult}; use std::{collections::HashSet, fmt}; pub type QueryGraphResult = std::result::Result; diff --git a/query-engine/core/src/query_graph_builder/error.rs b/query-engine/core/src/query_graph_builder/error.rs index 6fa1b82001a9..825b312bbbf5 100644 --- a/query-engine/core/src/query_graph_builder/error.rs +++ b/query-engine/core/src/query_graph_builder/error.rs @@ -1,5 +1,5 @@ use crate::QueryGraphError; -use prisma_models::{DomainError, RelationFieldRef}; +use query_structure::{DomainError, RelationFieldRef}; use user_facing_errors::query_engine::validation::ValidationError; #[derive(Debug)] diff --git a/query-engine/core/src/query_graph_builder/extractors/filters/composite.rs b/query-engine/core/src/query_graph_builder/extractors/filters/composite.rs index 9bb26c388894..66bd7c44ebb0 100644 --- a/query-engine/core/src/query_graph_builder/extractors/filters/composite.rs +++ b/query-engine/core/src/query_graph_builder/extractors/filters/composite.rs @@ -1,7 +1,6 @@ use super::extract_filter; use crate::{ParsedInputMap, ParsedInputValue, QueryGraphBuilderError, QueryGraphBuilderResult}; -use connector::{CompositeCompare, Filter}; -use prisma_models::{CompositeFieldRef, PrismaValue}; +use query_structure::{CompositeCompare, CompositeFieldRef, Filter, PrismaValue}; use schema::{constants::filters, ObjectTag}; use std::convert::TryInto; diff --git a/query-engine/core/src/query_graph_builder/extractors/filters/filter_fold.rs b/query-engine/core/src/query_graph_builder/extractors/filters/filter_fold.rs index 46ef17314c3a..3438ecec8477 100644 --- a/query-engine/core/src/query_graph_builder/extractors/filters/filter_fold.rs +++ b/query-engine/core/src/query_graph_builder/extractors/filters/filter_fold.rs @@ -1,4 +1,4 @@ -use connector::Filter; +use query_structure::Filter; pub fn fold_filter(filter: Filter) -> Filter { match filter { diff --git a/query-engine/core/src/query_graph_builder/extractors/filters/mod.rs b/query-engine/core/src/query_graph_builder/extractors/filters/mod.rs index e15aca250668..cb9e4e7f8025 100644 --- a/query-engine/core/src/query_graph_builder/extractors/filters/mod.rs +++ b/query-engine/core/src/query_graph_builder/extractors/filters/mod.rs @@ -9,15 +9,10 @@ use crate::{ query_document::{ParsedInputMap, ParsedInputValue}, QueryGraphBuilderError, QueryGraphBuilderResult, }; -use connector::{ - filter::Filter, CompositeCompare, QueryMode, RelationCompare, ScalarCompare, ScalarCondition, ScalarProjection, -}; use filter_fold::*; use filter_grouping::*; use indexmap::IndexMap; -use prisma_models::{ - prelude::ParentContainer, CompositeFieldRef, Field, Model, PrismaValue, RelationFieldRef, ScalarFieldRef, -}; +use query_structure::{prelude::ParentContainer, *}; use schema::constants::filters; use std::{borrow::Cow, collections::HashMap, convert::TryInto, str::FromStr}; diff --git a/query-engine/core/src/query_graph_builder/extractors/filters/relation.rs b/query-engine/core/src/query_graph_builder/extractors/filters/relation.rs index e3df0144f67b..47ec7ab9d193 100644 --- a/query-engine/core/src/query_graph_builder/extractors/filters/relation.rs +++ b/query-engine/core/src/query_graph_builder/extractors/filters/relation.rs @@ -1,7 +1,6 @@ use super::extract_filter; use crate::{ParsedInputMap, ParsedInputValue, QueryGraphBuilderError, QueryGraphBuilderResult}; -use connector::{Filter, RelationCompare}; -use prisma_models::RelationFieldRef; +use query_structure::*; use schema::constants::filters; use std::convert::TryInto; diff --git a/query-engine/core/src/query_graph_builder/extractors/filters/scalar.rs b/query-engine/core/src/query_graph_builder/extractors/filters/scalar.rs index 0c3a100b7af9..ac84ce06aa21 100644 --- a/query-engine/core/src/query_graph_builder/extractors/filters/scalar.rs +++ b/query-engine/core/src/query_graph_builder/extractors/filters/scalar.rs @@ -1,9 +1,5 @@ use crate::{ParsedInputMap, ParsedInputValue, QueryGraphBuilderError, QueryGraphBuilderResult}; -use connector::{ - ConditionListValue, ConditionValue, Filter, JsonCompare, JsonFilterPath, JsonTargetType, ScalarCompare, - ScalarListCompare, -}; -use prisma_models::{prelude::ParentContainer, Field, PrismaValue, ScalarFieldRef, TypeIdentifier}; +use query_structure::{prelude::ParentContainer, *}; use schema::constants::{aggregations, filters, json_null}; use std::convert::TryInto; diff --git a/query-engine/core/src/query_graph_builder/extractors/query_arguments.rs b/query-engine/core/src/query_graph_builder/extractors/query_arguments.rs index f4c890aa7d00..e42cdaca63f9 100644 --- a/query-engine/core/src/query_graph_builder/extractors/query_arguments.rs +++ b/query-engine/core/src/query_graph_builder/extractors/query_arguments.rs @@ -3,8 +3,7 @@ use crate::{ query_document::{ParsedArgument, ParsedInputMap}, QueryGraphBuilderError, QueryGraphBuilderResult, }; -use connector::QueryArguments; -use prisma_models::prelude::*; +use query_structure::{prelude::*, QueryArguments}; use schema::constants::{aggregations, args, ordering}; use std::convert::TryInto; diff --git a/query-engine/core/src/query_graph_builder/extractors/utils.rs b/query-engine/core/src/query_graph_builder/extractors/utils.rs index d216db2706fb..b05720d4f727 100644 --- a/query-engine/core/src/query_graph_builder/extractors/utils.rs +++ b/query-engine/core/src/query_graph_builder/extractors/utils.rs @@ -1,4 +1,4 @@ -use prisma_models::{Model, ScalarFieldRef}; +use query_structure::{Model, ScalarFieldRef}; /// Attempts to resolve a field name to a compound field. pub fn resolve_compound_field(name: &str, model: &Model) -> Option> { diff --git a/query-engine/core/src/query_graph_builder/read/aggregations/aggregate.rs b/query-engine/core/src/query_graph_builder/read/aggregations/aggregate.rs index 91e5d1169aa3..6a6332dbd868 100644 --- a/query-engine/core/src/query_graph_builder/read/aggregations/aggregate.rs +++ b/query-engine/core/src/query_graph_builder/read/aggregations/aggregate.rs @@ -1,6 +1,6 @@ use super::*; use crate::{query_document::ParsedField, AggregateRecordsQuery}; -use prisma_models::Model; +use query_structure::Model; pub(crate) fn aggregate(field: ParsedField<'_>, model: Model) -> QueryGraphBuilderResult { let name = field.name; diff --git a/query-engine/core/src/query_graph_builder/read/aggregations/group_by.rs b/query-engine/core/src/query_graph_builder/read/aggregations/group_by.rs index 167c35f78bf4..5b821b460003 100644 --- a/query-engine/core/src/query_graph_builder/read/aggregations/group_by.rs +++ b/query-engine/core/src/query_graph_builder/read/aggregations/group_by.rs @@ -1,7 +1,6 @@ use super::*; use crate::{query_document::ParsedField, AggregateRecordsQuery, ArgumentListLookup, ParsedInputValue, ReadQuery}; -use connector::Filter; -use prisma_models::{Model, OrderBy, ScalarFieldRef}; +use query_structure::{Filter, Model, OrderBy, ScalarFieldRef}; use schema::constants::args; use std::convert::TryInto; diff --git a/query-engine/core/src/query_graph_builder/read/aggregations/mod.rs b/query-engine/core/src/query_graph_builder/read/aggregations/mod.rs index 43354d708072..94e8b1bcbdc9 100644 --- a/query-engine/core/src/query_graph_builder/read/aggregations/mod.rs +++ b/query-engine/core/src/query_graph_builder/read/aggregations/mod.rs @@ -8,7 +8,7 @@ use super::*; use crate::FieldPair; use connector::AggregationSelection; use itertools::Itertools; -use prisma_models::{Model, ScalarFieldRef}; +use query_structure::{Model, ScalarFieldRef}; use schema::constants::aggregations::*; /// Resolves the given field as a aggregation query. diff --git a/query-engine/core/src/query_graph_builder/read/first.rs b/query-engine/core/src/query_graph_builder/read/first.rs index c68969b07b1f..84c90016858a 100644 --- a/query-engine/core/src/query_graph_builder/read/first.rs +++ b/query-engine/core/src/query_graph_builder/read/first.rs @@ -1,4 +1,4 @@ -use prisma_models::Model; +use query_structure::Model; use super::*; use crate::ParsedField; diff --git a/query-engine/core/src/query_graph_builder/read/many.rs b/query-engine/core/src/query_graph_builder/read/many.rs index b84b546dcfd0..6c9242330a83 100644 --- a/query-engine/core/src/query_graph_builder/read/many.rs +++ b/query-engine/core/src/query_graph_builder/read/many.rs @@ -1,6 +1,6 @@ use super::*; use crate::{query_document::ParsedField, ManyRecordsQuery, QueryOption, QueryOptions, ReadQuery}; -use prisma_models::Model; +use query_structure::Model; pub(crate) fn find_many(field: ParsedField<'_>, model: Model) -> QueryGraphBuilderResult { find_many_with_options(field, model, QueryOptions::none()) diff --git a/query-engine/core/src/query_graph_builder/read/one.rs b/query-engine/core/src/query_graph_builder/read/one.rs index edb7425bc20d..d71c2535bb2f 100644 --- a/query-engine/core/src/query_graph_builder/read/one.rs +++ b/query-engine/core/src/query_graph_builder/read/one.rs @@ -1,6 +1,6 @@ use super::*; use crate::{query_document::*, QueryOption, QueryOptions, ReadQuery, RecordQuery}; -use prisma_models::Model; +use query_structure::Model; use schema::constants::args; use std::convert::TryInto; diff --git a/query-engine/core/src/query_graph_builder/read/related.rs b/query-engine/core/src/query_graph_builder/read/related.rs index c8beb952e02b..9c73699b0477 100644 --- a/query-engine/core/src/query_graph_builder/read/related.rs +++ b/query-engine/core/src/query_graph_builder/read/related.rs @@ -1,6 +1,6 @@ use super::*; use crate::{query_document::ParsedField, ReadQuery, RelatedRecordsQuery}; -use prisma_models::{Model, RelationFieldRef}; +use query_structure::{Model, RelationFieldRef}; pub(crate) fn find_related( field: ParsedField<'_>, diff --git a/query-engine/core/src/query_graph_builder/read/utils.rs b/query-engine/core/src/query_graph_builder/read/utils.rs index 234a50d6f09f..545393ba3d15 100644 --- a/query-engine/core/src/query_graph_builder/read/utils.rs +++ b/query-engine/core/src/query_graph_builder/read/utils.rs @@ -1,7 +1,7 @@ use super::*; use crate::{ArgumentListLookup, FieldPair, ParsedField, ReadQuery}; use connector::RelAggregationSelection; -use prisma_models::prelude::*; +use query_structure::prelude::*; use schema::constants::{aggregations::*, args}; pub fn collect_selection_order(from: &[FieldPair<'_>]) -> Vec { diff --git a/query-engine/core/src/query_graph_builder/write/connect.rs b/query-engine/core/src/query_graph_builder/write/connect.rs index 7df971de5e6f..03e681477cac 100644 --- a/query-engine/core/src/query_graph_builder/write/connect.rs +++ b/query-engine/core/src/query_graph_builder/write/connect.rs @@ -3,7 +3,7 @@ use crate::{ query_graph::{Node, NodeRef, QueryGraph, QueryGraphDependency}, QueryGraphBuilderError, QueryGraphBuilderResult, }; -use prisma_models::RelationFieldRef; +use query_structure::RelationFieldRef; /// Only for many to many relations. /// diff --git a/query-engine/core/src/query_graph_builder/write/create.rs b/query-engine/core/src/query_graph_builder/write/create.rs index 7707f4818514..59661c6c16b3 100644 --- a/query-engine/core/src/query_graph_builder/write/create.rs +++ b/query-engine/core/src/query_graph_builder/write/create.rs @@ -4,9 +4,8 @@ use crate::{ query_graph::{Node, NodeRef, QueryGraph, QueryGraphDependency}, ArgumentListLookup, ParsedField, ParsedInputList, ParsedInputMap, }; -use connector::IntoFilter; -use prisma_models::Model; use psl::datamodel_connector::ConnectorCapability; +use query_structure::{IntoFilter, Model}; use schema::{constants::args, QuerySchema}; use std::convert::TryInto; use write_args_parser::*; diff --git a/query-engine/core/src/query_graph_builder/write/delete.rs b/query-engine/core/src/query_graph_builder/write/delete.rs index a5bca5af7758..df6a66436022 100644 --- a/query-engine/core/src/query_graph_builder/write/delete.rs +++ b/query-engine/core/src/query_graph_builder/write/delete.rs @@ -4,8 +4,7 @@ use crate::{ query_graph::{Node, QueryGraph, QueryGraphDependency}, ArgumentListLookup, FilteredQuery, ParsedField, }; -use connector::filter::Filter; -use prisma_models::Model; +use query_structure::{Filter, Model}; use schema::{constants::args, QuerySchema}; use std::convert::TryInto; diff --git a/query-engine/core/src/query_graph_builder/write/disconnect.rs b/query-engine/core/src/query_graph_builder/write/disconnect.rs index fea9d6f4f140..e354e9dc5400 100644 --- a/query-engine/core/src/query_graph_builder/write/disconnect.rs +++ b/query-engine/core/src/query_graph_builder/write/disconnect.rs @@ -3,7 +3,7 @@ use crate::{ query_graph::{Node, NodeRef, QueryGraph, QueryGraphDependency}, QueryGraphBuilderError, QueryGraphBuilderResult, }; -use prisma_models::RelationFieldRef; +use query_structure::RelationFieldRef; /// Only for many to many relations. /// diff --git a/query-engine/core/src/query_graph_builder/write/nested/connect_nested.rs b/query-engine/core/src/query_graph_builder/write/nested/connect_nested.rs index 6052778037d0..81038c18a57e 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/connect_nested.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/connect_nested.rs @@ -4,9 +4,8 @@ use crate::{ query_graph::{Node, NodeRef, QueryGraph, QueryGraphDependency}, ParsedInputMap, ParsedInputValue, QueryResult, }; -use connector::{Filter, IntoFilter}; use itertools::Itertools; -use prisma_models::{Model, RelationFieldRef}; +use query_structure::{Filter, IntoFilter, Model, RelationFieldRef}; use std::convert::TryInto; /// Handles nested connect cases. diff --git a/query-engine/core/src/query_graph_builder/write/nested/connect_or_create_nested.rs b/query-engine/core/src/query_graph_builder/write/nested/connect_or_create_nested.rs index bbe63701cbd2..bcaacc1f5811 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/connect_or_create_nested.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/connect_or_create_nested.rs @@ -4,8 +4,7 @@ use crate::{ query_graph::{Flow, Node, NodeRef, QueryGraph, QueryGraphDependency}, Computation, ParsedInputMap, ParsedInputValue, }; -use connector::{Filter, IntoFilter}; -use prisma_models::{Model, RelationFieldRef, SelectionResult}; +use query_structure::{Filter, IntoFilter, Model, RelationFieldRef, SelectionResult}; use schema::constants::args; use std::convert::TryInto; diff --git a/query-engine/core/src/query_graph_builder/write/nested/create_nested.rs b/query-engine/core/src/query_graph_builder/write/nested/create_nested.rs index 80229d018515..d0f649c3ecf6 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/create_nested.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/create_nested.rs @@ -5,8 +5,7 @@ use crate::{ write::write_args_parser::WriteArgsParser, ParsedInputList, ParsedInputValue, }; -use connector::{Filter, IntoFilter}; -use prisma_models::{Model, RelationFieldRef}; +use query_structure::{Filter, IntoFilter, Model, RelationFieldRef}; use schema::constants::args; use std::convert::TryInto; diff --git a/query-engine/core/src/query_graph_builder/write/nested/delete_nested.rs b/query-engine/core/src/query_graph_builder/write/nested/delete_nested.rs index 875902a7ecad..ceed2b578b03 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/delete_nested.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/delete_nested.rs @@ -4,8 +4,8 @@ use crate::{ query_graph::{Node, NodeRef, QueryGraph, QueryGraphDependency}, ParsedInputMap, ParsedInputValue, }; -use connector::{Filter, RecordFilter}; -use prisma_models::{Model, PrismaValue, RelationFieldRef}; +use connector::RecordFilter; +use query_structure::{Filter, Model, PrismaValue, RelationFieldRef}; use std::convert::TryInto; /// Adds a delete (single) record node to the graph and connects it to the parent. diff --git a/query-engine/core/src/query_graph_builder/write/nested/disconnect_nested.rs b/query-engine/core/src/query_graph_builder/write/nested/disconnect_nested.rs index 1b4c208a7855..841b7722eb28 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/disconnect_nested.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/disconnect_nested.rs @@ -3,9 +3,8 @@ use crate::{ query_graph::{Node, NodeRef, QueryGraph, QueryGraphDependency}, ParsedInputMap, ParsedInputValue, Query, WriteQuery, }; -use connector::{Filter, RelationCompare}; use itertools::Itertools; -use prisma_models::{Model, PrismaValue, RelationFieldRef, SelectionResult}; +use query_structure::{Filter, Model, PrismaValue, RelationCompare, RelationFieldRef, SelectionResult}; use std::convert::TryInto; /// Handles nested disconnect cases. diff --git a/query-engine/core/src/query_graph_builder/write/nested/mod.rs b/query-engine/core/src/query_graph_builder/write/nested/mod.rs index 7f8049c76433..5d0ad21a4c7e 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/mod.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/mod.rs @@ -17,7 +17,7 @@ use connect_or_create_nested::*; use create_nested::*; use delete_nested::*; use disconnect_nested::*; -use prisma_models::RelationFieldRef; +use query_structure::RelationFieldRef; use schema::{constants::operations, QuerySchema}; use set_nested::*; use update_nested::*; diff --git a/query-engine/core/src/query_graph_builder/write/nested/set_nested.rs b/query-engine/core/src/query_graph_builder/write/nested/set_nested.rs index 0f71bc2816aa..67fa227aa2c6 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/set_nested.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/set_nested.rs @@ -1,8 +1,7 @@ use super::*; use crate::{query_ast::*, query_graph::*, ParsedInputValue}; -use connector::Filter; use itertools::Itertools; -use prisma_models::{Model, RelationFieldRef, SelectionResult}; +use query_structure::{Filter, Model, RelationFieldRef, SelectionResult}; use std::convert::TryInto; /// Only for x-to-many relations. diff --git a/query-engine/core/src/query_graph_builder/write/nested/update_nested.rs b/query-engine/core/src/query_graph_builder/write/nested/update_nested.rs index bca227dd3a08..78bf69af2f79 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/update_nested.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/update_nested.rs @@ -4,8 +4,7 @@ use crate::{ query_graph::{Node, NodeRef, QueryGraph, QueryGraphDependency}, ParsedInputValue, }; -use connector::Filter; -use prisma_models::{Model, RelationFieldRef}; +use query_structure::{Filter, Model, RelationFieldRef}; use schema::constants::args; use std::convert::TryInto; diff --git a/query-engine/core/src/query_graph_builder/write/nested/upsert_nested.rs b/query-engine/core/src/query_graph_builder/write/nested/upsert_nested.rs index 006c0b516099..0e72e1fa141c 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/upsert_nested.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/upsert_nested.rs @@ -5,8 +5,7 @@ use crate::{ query_graph::{Flow, Node, NodeRef, QueryGraph, QueryGraphDependency}, ParsedInputMap, ParsedInputValue, }; -use connector::Filter; -use prisma_models::RelationFieldRef; +use query_structure::{Filter, RelationFieldRef}; use schema::constants::args; use std::convert::TryInto; diff --git a/query-engine/core/src/query_graph_builder/write/raw.rs b/query-engine/core/src/query_graph_builder/write/raw.rs index 80835b9691cb..af44770aa3f0 100644 --- a/query-engine/core/src/query_graph_builder/write/raw.rs +++ b/query-engine/core/src/query_graph_builder/write/raw.rs @@ -1,6 +1,6 @@ use super::*; use crate::{query_ast::*, query_graph::QueryGraph, ParsedField}; -use prisma_models::{Model, PrismaValue}; +use query_structure::{Model, PrismaValue}; use std::{collections::HashMap, convert::TryInto}; pub(crate) fn execute_raw(graph: &mut QueryGraph, field: ParsedField<'_>) -> QueryGraphBuilderResult<()> { diff --git a/query-engine/core/src/query_graph_builder/write/update.rs b/query-engine/core/src/query_graph_builder/write/update.rs index 847efc38410c..001e2b48a96d 100644 --- a/query-engine/core/src/query_graph_builder/write/update.rs +++ b/query-engine/core/src/query_graph_builder/write/update.rs @@ -5,9 +5,8 @@ use crate::{ query_graph::{Node, NodeRef, QueryGraph, QueryGraphDependency}, ArgumentListLookup, ParsedField, ParsedInputMap, }; -use connector::{Filter, IntoFilter}; -use prisma_models::Model; use psl::datamodel_connector::ConnectorCapability; +use query_structure::{Filter, IntoFilter, Model}; use schema::{constants::args, QuerySchema}; use std::convert::TryInto; diff --git a/query-engine/core/src/query_graph_builder/write/upsert.rs b/query-engine/core/src/query_graph_builder/write/upsert.rs index 0a01e43e73c0..92fcd6d12efd 100644 --- a/query-engine/core/src/query_graph_builder/write/upsert.rs +++ b/query-engine/core/src/query_graph_builder/write/upsert.rs @@ -4,8 +4,7 @@ use crate::{ query_graph::{Flow, Node, QueryGraph, QueryGraphDependency}, ParsedField, ParsedInputMap, ParsedInputValue, ParsedObject, }; -use connector::IntoFilter; -use prisma_models::Model; +use query_structure::{IntoFilter, Model}; use schema::QuerySchema; /// Handles a top-level upsert diff --git a/query-engine/core/src/query_graph_builder/write/utils.rs b/query-engine/core/src/query_graph_builder/write/utils.rs index 113e09e39230..2f2e736aedaf 100644 --- a/query-engine/core/src/query_graph_builder/write/utils.rs +++ b/query-engine/core/src/query_graph_builder/write/utils.rs @@ -3,10 +3,10 @@ use crate::{ query_graph::{Flow, Node, NodeRef, QueryGraph, QueryGraphDependency}, Computation, ParsedInputValue, QueryGraphBuilderError, QueryGraphBuilderResult, }; -use connector::{DatasourceFieldName, Filter, RecordFilter, WriteArgs, WriteOperation}; +use connector::{DatasourceFieldName, RecordFilter, WriteArgs, WriteOperation}; use indexmap::IndexMap; -use prisma_models::{FieldSelection, Model, PrismaValue, RelationFieldRef, SelectionResult}; use psl::parser_database::ReferentialAction; +use query_structure::{FieldSelection, Filter, Model, PrismaValue, RelationFieldRef, SelectionResult}; use schema::QuerySchema; /// Coerces single values (`ParsedInputValue::Single` and `ParsedInputValue::Map`) into a vector. diff --git a/query-engine/core/src/query_graph_builder/write/write_args_parser.rs b/query-engine/core/src/query_graph_builder/write/write_args_parser.rs index c5473065ac60..255247e4cee9 100644 --- a/query-engine/core/src/query_graph_builder/write/write_args_parser.rs +++ b/query-engine/core/src/query_graph_builder/write/write_args_parser.rs @@ -1,7 +1,7 @@ use super::*; use crate::query_document::{ParsedInputMap, ParsedInputValue}; use connector::{DatasourceFieldName, WriteArgs, WriteOperation}; -use prisma_models::{CompositeFieldRef, Field, Model, PrismaValue, RelationFieldRef, ScalarFieldRef, TypeIdentifier}; +use query_structure::{CompositeFieldRef, Field, Model, PrismaValue, RelationFieldRef, ScalarFieldRef, TypeIdentifier}; use schema::constants::{args, json_null, operations}; use std::{borrow::Cow, convert::TryInto}; diff --git a/query-engine/core/src/response_ir/internal.rs b/query-engine/core/src/response_ir/internal.rs index 2ad67707f22c..7becb19e768b 100644 --- a/query-engine/core/src/response_ir/internal.rs +++ b/query-engine/core/src/response_ir/internal.rs @@ -5,7 +5,7 @@ use crate::{ use connector::{AggregationResult, RelAggregationResult, RelAggregationRow}; use indexmap::IndexMap; use itertools::Itertools; -use prisma_models::{CompositeFieldRef, Field, PrismaValue, SelectionResult}; +use query_structure::{CompositeFieldRef, Field, PrismaValue, SelectionResult}; use schema::{ constants::{aggregations::*, output_fields::*}, *, diff --git a/query-engine/core/src/response_ir/ir_serializer.rs b/query-engine/core/src/response_ir/ir_serializer.rs index 4e69b5534e17..d8efa710bff8 100644 --- a/query-engine/core/src/response_ir/ir_serializer.rs +++ b/query-engine/core/src/response_ir/ir_serializer.rs @@ -1,6 +1,6 @@ use super::{internal::serialize_internal, response::*, *}; use crate::{CoreError, ExpressionResult, QueryResult}; -use prisma_models::PrismaValue; +use query_structure::PrismaValue; use schema::{OutputField, QuerySchema}; #[derive(Debug)] diff --git a/query-engine/core/src/response_ir/mod.rs b/query-engine/core/src/response_ir/mod.rs index e53422ff8962..e9a4eeb0c9a4 100644 --- a/query-engine/core/src/response_ir/mod.rs +++ b/query-engine/core/src/response_ir/mod.rs @@ -18,7 +18,7 @@ pub(crate) use ir_serializer::*; use crate::ArgumentValue; use indexmap::IndexMap; -use prisma_models::PrismaValue; +use query_structure::PrismaValue; use serde::ser::{Serialize, SerializeMap, SerializeSeq, Serializer}; use std::{collections::HashMap, fmt, sync::Arc}; diff --git a/query-engine/core/src/result_ast/mod.rs b/query-engine/core/src/result_ast/mod.rs index 636fa611607b..91c58f8551a9 100644 --- a/query-engine/core/src/result_ast/mod.rs +++ b/query-engine/core/src/result_ast/mod.rs @@ -1,5 +1,5 @@ use connector::{AggregationRow, RelAggregationRow}; -use prisma_models::{ManyRecords, Model, SelectionResult}; +use query_structure::{ManyRecords, Model, SelectionResult}; #[derive(Debug, Clone)] pub(crate) enum QueryResult { diff --git a/query-engine/dmmf/Cargo.toml b/query-engine/dmmf/Cargo.toml index cc92c914d4e6..367c87be5a60 100644 --- a/query-engine/dmmf/Cargo.toml +++ b/query-engine/dmmf/Cargo.toml @@ -10,7 +10,7 @@ serde.workspace = true serde_json.workspace = true schema = { path = "../schema" } indexmap = { version = "1.7", features = ["serde-1"] } -prisma-models = { path = "../prisma-models", features = ["default_generators"] } +query-structure = { path = "../query-structure", features = ["default_generators"] } [dev-dependencies] expect-test = "1.2.2" diff --git a/query-engine/dmmf/src/ast_builders/datamodel_ast_builder.rs b/query-engine/dmmf/src/ast_builders/datamodel_ast_builder.rs index 8d078719d4fa..c367695150f6 100644 --- a/query-engine/dmmf/src/ast_builders/datamodel_ast_builder.rs +++ b/query-engine/dmmf/src/ast_builders/datamodel_ast_builder.rs @@ -2,11 +2,11 @@ use crate::serialization_ast::datamodel_ast::{ Datamodel, Enum, EnumValue, Field, Function, Model, PrimaryKey, UniqueIndex, }; use bigdecimal::ToPrimitive; -use prisma_models::{dml_default_kind, encode_bytes, DefaultKind, FieldArity, PrismaValue}; use psl::{ parser_database::{walkers, ScalarFieldType}, schema_ast::ast::WithDocumentation, }; +use query_structure::{dml_default_kind, encode_bytes, DefaultKind, FieldArity, PrismaValue}; pub(crate) fn schema_to_dmmf(schema: &psl::ValidatedSchema) -> Datamodel { let mut datamodel = Datamodel { diff --git a/query-engine/query-engine-node-api/Cargo.toml b/query-engine/query-engine-node-api/Cargo.toml index 74f9686189fc..187297b7529f 100644 --- a/query-engine/query-engine-node-api/Cargo.toml +++ b/query-engine/query-engine-node-api/Cargo.toml @@ -22,7 +22,7 @@ query-connector = { path = "../connectors/query-connector" } user-facing-errors = { path = "../../libs/user-facing-errors" } psl.workspace = true sql-connector = { path = "../connectors/sql-query-connector", package = "sql-query-connector" } -prisma-models = { path = "../prisma-models" } +query-structure = { path = "../query-structure" } driver-adapters = { path = "../driver-adapters" } napi.workspace = true napi-derive.workspace = true diff --git a/query-engine/query-engine-wasm/Cargo.toml b/query-engine/query-engine-wasm/Cargo.toml index a8bc393aee3f..95ecd228a152 100644 --- a/query-engine/query-engine-wasm/Cargo.toml +++ b/query-engine/query-engine-wasm/Cargo.toml @@ -13,7 +13,7 @@ anyhow = "1" async-trait = "0.1" user-facing-errors = { path = "../../libs/user-facing-errors" } psl.workspace = true -prisma-models = { path = "../prisma-models" } +query-structure = { path = "../query-structure" } thiserror = "1" connection-string.workspace = true diff --git a/query-engine/prisma-models/Cargo.toml b/query-engine/query-structure/Cargo.toml similarity index 96% rename from query-engine/prisma-models/Cargo.toml rename to query-engine/query-structure/Cargo.toml index 0becd1fdea70..db8cb68aaf83 100644 --- a/query-engine/prisma-models/Cargo.toml +++ b/query-engine/query-structure/Cargo.toml @@ -1,6 +1,6 @@ [package] edition = "2021" -name = "prisma-models" +name = "query-structure" version = "0.0.0" [dependencies] diff --git a/query-engine/prisma-models/src/composite_type.rs b/query-engine/query-structure/src/composite_type.rs similarity index 100% rename from query-engine/prisma-models/src/composite_type.rs rename to query-engine/query-structure/src/composite_type.rs diff --git a/query-engine/prisma-models/src/convert.rs b/query-engine/query-structure/src/convert.rs similarity index 100% rename from query-engine/prisma-models/src/convert.rs rename to query-engine/query-structure/src/convert.rs diff --git a/query-engine/prisma-models/src/default_value.rs b/query-engine/query-structure/src/default_value.rs similarity index 100% rename from query-engine/prisma-models/src/default_value.rs rename to query-engine/query-structure/src/default_value.rs diff --git a/query-engine/prisma-models/src/error.rs b/query-engine/query-structure/src/error.rs similarity index 100% rename from query-engine/prisma-models/src/error.rs rename to query-engine/query-structure/src/error.rs diff --git a/query-engine/prisma-models/src/field/composite.rs b/query-engine/query-structure/src/field/composite.rs similarity index 100% rename from query-engine/prisma-models/src/field/composite.rs rename to query-engine/query-structure/src/field/composite.rs diff --git a/query-engine/prisma-models/src/field/mod.rs b/query-engine/query-structure/src/field/mod.rs similarity index 100% rename from query-engine/prisma-models/src/field/mod.rs rename to query-engine/query-structure/src/field/mod.rs diff --git a/query-engine/prisma-models/src/field/relation.rs b/query-engine/query-structure/src/field/relation.rs similarity index 100% rename from query-engine/prisma-models/src/field/relation.rs rename to query-engine/query-structure/src/field/relation.rs diff --git a/query-engine/prisma-models/src/field/scalar.rs b/query-engine/query-structure/src/field/scalar.rs similarity index 100% rename from query-engine/prisma-models/src/field/scalar.rs rename to query-engine/query-structure/src/field/scalar.rs diff --git a/query-engine/prisma-models/src/field_selection.rs b/query-engine/query-structure/src/field_selection.rs similarity index 100% rename from query-engine/prisma-models/src/field_selection.rs rename to query-engine/query-structure/src/field_selection.rs diff --git a/query-engine/prisma-models/src/fields.rs b/query-engine/query-structure/src/fields.rs similarity index 100% rename from query-engine/prisma-models/src/fields.rs rename to query-engine/query-structure/src/fields.rs diff --git a/query-engine/connectors/query-connector/src/compare.rs b/query-engine/query-structure/src/filter/compare.rs similarity index 99% rename from query-engine/connectors/query-connector/src/compare.rs rename to query-engine/query-structure/src/filter/compare.rs index 783d847939c4..7757965050ad 100644 --- a/query-engine/connectors/query-connector/src/compare.rs +++ b/query-engine/query-structure/src/filter/compare.rs @@ -1,7 +1,7 @@ use super::*; use crate::filter::Filter; -use prisma_models::PrismaValue; +use prisma_value::PrismaValue; /// Comparing methods for scalar fields. pub trait ScalarCompare { diff --git a/query-engine/connectors/query-connector/src/filter/composite.rs b/query-engine/query-structure/src/filter/composite.rs similarity index 95% rename from query-engine/connectors/query-connector/src/filter/composite.rs rename to query-engine/query-structure/src/filter/composite.rs index cb113e9c0355..d231421c325f 100644 --- a/query-engine/connectors/query-connector/src/filter/composite.rs +++ b/query-engine/query-structure/src/filter/composite.rs @@ -1,7 +1,5 @@ -use crate::compare::CompositeCompare; -use crate::filter::Filter; -use prisma_models::{CompositeFieldRef, PrismaValue}; -// use std::sync::Arc; +use crate::{filter::Filter, CompositeCompare, CompositeFieldRef}; +use prisma_value::PrismaValue; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct CompositeFilter { diff --git a/query-engine/connectors/query-connector/src/filter/into_filter.rs b/query-engine/query-structure/src/filter/into_filter.rs similarity index 93% rename from query-engine/connectors/query-connector/src/filter/into_filter.rs rename to query-engine/query-structure/src/filter/into_filter.rs index d2c32782795e..b180b3b80c49 100644 --- a/query-engine/connectors/query-connector/src/filter/into_filter.rs +++ b/query-engine/query-structure/src/filter/into_filter.rs @@ -1,6 +1,7 @@ use super::*; + use crate::ScalarCompare; -use prisma_models::{SelectedField, SelectionResult}; +use crate::{SelectedField, SelectionResult}; pub trait IntoFilter { fn filter(self) -> Filter; diff --git a/query-engine/connectors/query-connector/src/filter/json.rs b/query-engine/query-structure/src/filter/json.rs similarity index 98% rename from query-engine/connectors/query-connector/src/filter/json.rs rename to query-engine/query-structure/src/filter/json.rs index c1cae9b81872..b0452106d19f 100644 --- a/query-engine/connectors/query-connector/src/filter/json.rs +++ b/query-engine/query-structure/src/filter/json.rs @@ -1,6 +1,5 @@ use super::scalar::*; -use crate::{Filter, JsonCompare, ScalarFilter}; -use prisma_models::ScalarFieldRef; +use crate::{Filter, JsonCompare, ScalarFieldRef, ScalarFilter}; #[derive(Debug, Clone, Eq, Hash, PartialEq)] pub enum JsonTargetType { diff --git a/query-engine/connectors/query-connector/src/filter/list.rs b/query-engine/query-structure/src/filter/list.rs similarity index 96% rename from query-engine/connectors/query-connector/src/filter/list.rs rename to query-engine/query-structure/src/filter/list.rs index ab1cf913880b..e71064c67091 100644 --- a/query-engine/connectors/query-connector/src/filter/list.rs +++ b/query-engine/query-structure/src/filter/list.rs @@ -1,6 +1,5 @@ use super::*; -use crate::compare::ScalarListCompare; -use prisma_models::{ScalarField, ScalarFieldRef}; +use crate::{ScalarField, ScalarFieldRef, ScalarListCompare}; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct ScalarListFilter { diff --git a/query-engine/connectors/query-connector/src/filter/mod.rs b/query-engine/query-structure/src/filter/mod.rs similarity index 99% rename from query-engine/connectors/query-connector/src/filter/mod.rs rename to query-engine/query-structure/src/filter/mod.rs index 856fb5207f95..a05d3e9c4095 100644 --- a/query-engine/connectors/query-connector/src/filter/mod.rs +++ b/query-engine/query-structure/src/filter/mod.rs @@ -5,6 +5,7 @@ //! [RelationCompare](/query-connector/trait.RelationCompare.html). //! [CompositeCompare](/query-connector/trait.RelationCompare.html). +mod compare; mod composite; mod into_filter; mod json; @@ -12,14 +13,16 @@ mod list; mod relation; mod scalar; +pub use compare::*; pub use composite::*; pub use into_filter::*; pub use json::*; pub use list::*; -use prisma_models::ScalarFieldRef; pub use relation::*; pub use scalar::*; +use crate::ScalarFieldRef; + #[derive(Debug, Clone, Eq, Hash, PartialEq)] pub enum Filter { And(Vec), diff --git a/query-engine/connectors/query-connector/src/filter/relation.rs b/query-engine/query-structure/src/filter/relation.rs similarity index 97% rename from query-engine/connectors/query-connector/src/filter/relation.rs rename to query-engine/query-structure/src/filter/relation.rs index 500e406e407f..d806ba2dec1e 100644 --- a/query-engine/connectors/query-connector/src/filter/relation.rs +++ b/query-engine/query-structure/src/filter/relation.rs @@ -1,6 +1,4 @@ -use crate::compare::RelationCompare; -use crate::filter::Filter; -use prisma_models::RelationField; +use crate::{filter::Filter, RelationCompare, RelationField}; #[derive(Clone, PartialEq, Eq, Hash)] pub struct RelationFilter { diff --git a/query-engine/connectors/query-connector/src/filter/scalar/compare.rs b/query-engine/query-structure/src/filter/scalar/compare.rs similarity index 99% rename from query-engine/connectors/query-connector/src/filter/scalar/compare.rs rename to query-engine/query-structure/src/filter/scalar/compare.rs index f93798441027..efbbb370f664 100644 --- a/query-engine/connectors/query-connector/src/filter/scalar/compare.rs +++ b/query-engine/query-structure/src/filter/scalar/compare.rs @@ -1,6 +1,5 @@ use super::*; use crate::*; -use prisma_models::*; impl ScalarCompare for ScalarFieldRef { /// Field is in a given value diff --git a/query-engine/connectors/query-connector/src/filter/scalar/condition/mod.rs b/query-engine/query-structure/src/filter/scalar/condition/mod.rs similarity index 99% rename from query-engine/connectors/query-connector/src/filter/scalar/condition/mod.rs rename to query-engine/query-structure/src/filter/scalar/condition/mod.rs index 4845fab126f1..ff32d3d52219 100644 --- a/query-engine/connectors/query-connector/src/filter/scalar/condition/mod.rs +++ b/query-engine/query-structure/src/filter/scalar/condition/mod.rs @@ -4,7 +4,6 @@ pub use value::{ConditionListValue, ConditionValue}; use super::*; use crate::*; -use prisma_models::*; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum ScalarCondition { diff --git a/query-engine/connectors/query-connector/src/filter/scalar/condition/value.rs b/query-engine/query-structure/src/filter/scalar/condition/value.rs similarity index 97% rename from query-engine/connectors/query-connector/src/filter/scalar/condition/value.rs rename to query-engine/query-structure/src/filter/scalar/condition/value.rs index a93a8e304adb..5a1b7b7aecb7 100644 --- a/query-engine/connectors/query-connector/src/filter/scalar/condition/value.rs +++ b/query-engine/query-structure/src/filter/scalar/condition/value.rs @@ -1,4 +1,5 @@ -use prisma_models::{PrismaListValue, PrismaValue, ScalarFieldRef}; +use crate::field::*; +use prisma_value::{PrismaListValue, PrismaValue}; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum ConditionValue { diff --git a/query-engine/connectors/query-connector/src/filter/scalar/mod.rs b/query-engine/query-structure/src/filter/scalar/mod.rs similarity index 99% rename from query-engine/connectors/query-connector/src/filter/scalar/mod.rs rename to query-engine/query-structure/src/filter/scalar/mod.rs index a2179b758b26..adc2cc6de320 100644 --- a/query-engine/connectors/query-connector/src/filter/scalar/mod.rs +++ b/query-engine/query-structure/src/filter/scalar/mod.rs @@ -6,7 +6,7 @@ pub use compare::*; pub use condition::*; pub use projection::*; -use prisma_models::*; +use crate::*; use std::collections::BTreeSet; diff --git a/query-engine/connectors/query-connector/src/filter/scalar/projection.rs b/query-engine/query-structure/src/filter/scalar/projection.rs similarity index 97% rename from query-engine/connectors/query-connector/src/filter/scalar/projection.rs rename to query-engine/query-structure/src/filter/scalar/projection.rs index 63170be55e5a..799ab0859cb9 100644 --- a/query-engine/connectors/query-connector/src/filter/scalar/projection.rs +++ b/query-engine/query-structure/src/filter/scalar/projection.rs @@ -1,4 +1,4 @@ -use prisma_models::ScalarFieldRef; +use crate::field::ScalarFieldRef; #[derive(Clone, PartialEq, Eq, Hash)] pub enum ScalarProjection { diff --git a/query-engine/prisma-models/src/internal_data_model.rs b/query-engine/query-structure/src/internal_data_model.rs similarity index 100% rename from query-engine/prisma-models/src/internal_data_model.rs rename to query-engine/query-structure/src/internal_data_model.rs diff --git a/query-engine/prisma-models/src/internal_enum.rs b/query-engine/query-structure/src/internal_enum.rs similarity index 100% rename from query-engine/prisma-models/src/internal_enum.rs rename to query-engine/query-structure/src/internal_enum.rs diff --git a/query-engine/prisma-models/src/lib.rs b/query-engine/query-structure/src/lib.rs similarity index 91% rename from query-engine/prisma-models/src/lib.rs rename to query-engine/query-structure/src/lib.rs index 87bdacfb8ce0..25519a6d856c 100644 --- a/query-engine/prisma-models/src/lib.rs +++ b/query-engine/query-structure/src/lib.rs @@ -13,11 +13,13 @@ mod order_by; mod parent_container; mod prisma_value_ext; mod projections; +mod query_arguments; mod record; mod relation; mod selection_result; mod zipper; +pub mod filter; pub mod prelude; pub use self::{default_value::*, native_type_instance::*, zipper::*}; @@ -27,11 +29,13 @@ pub use error::*; pub use field::*; pub use field_selection::*; pub use fields::*; +pub use filter::*; pub use internal_data_model::*; pub use internal_enum::*; pub use model::*; pub use order_by::*; pub use projections::*; +pub use query_arguments::*; pub use record::*; pub use relation::*; pub use selection_result::*; diff --git a/query-engine/prisma-models/src/model.rs b/query-engine/query-structure/src/model.rs similarity index 100% rename from query-engine/prisma-models/src/model.rs rename to query-engine/query-structure/src/model.rs diff --git a/query-engine/prisma-models/src/native_type_instance.rs b/query-engine/query-structure/src/native_type_instance.rs similarity index 100% rename from query-engine/prisma-models/src/native_type_instance.rs rename to query-engine/query-structure/src/native_type_instance.rs diff --git a/query-engine/prisma-models/src/order_by.rs b/query-engine/query-structure/src/order_by.rs similarity index 100% rename from query-engine/prisma-models/src/order_by.rs rename to query-engine/query-structure/src/order_by.rs diff --git a/query-engine/prisma-models/src/parent_container.rs b/query-engine/query-structure/src/parent_container.rs similarity index 100% rename from query-engine/prisma-models/src/parent_container.rs rename to query-engine/query-structure/src/parent_container.rs diff --git a/query-engine/prisma-models/src/prelude.rs b/query-engine/query-structure/src/prelude.rs similarity index 100% rename from query-engine/prisma-models/src/prelude.rs rename to query-engine/query-structure/src/prelude.rs diff --git a/query-engine/prisma-models/src/prisma_value_ext.rs b/query-engine/query-structure/src/prisma_value_ext.rs similarity index 100% rename from query-engine/prisma-models/src/prisma_value_ext.rs rename to query-engine/query-structure/src/prisma_value_ext.rs diff --git a/query-engine/prisma-models/src/projections/mod.rs b/query-engine/query-structure/src/projections/mod.rs similarity index 100% rename from query-engine/prisma-models/src/projections/mod.rs rename to query-engine/query-structure/src/projections/mod.rs diff --git a/query-engine/prisma-models/src/projections/model_projection.rs b/query-engine/query-structure/src/projections/model_projection.rs similarity index 100% rename from query-engine/prisma-models/src/projections/model_projection.rs rename to query-engine/query-structure/src/projections/model_projection.rs diff --git a/query-engine/connectors/query-connector/src/query_arguments.rs b/query-engine/query-structure/src/query_arguments.rs similarity index 99% rename from query-engine/connectors/query-connector/src/query_arguments.rs rename to query-engine/query-structure/src/query_arguments.rs index cb9e5509734d..f9c222d80dbe 100644 --- a/query-engine/connectors/query-connector/src/query_arguments.rs +++ b/query-engine/query-structure/src/query_arguments.rs @@ -1,5 +1,4 @@ -use crate::filter::Filter; -use prisma_models::*; +use crate::*; /// `QueryArguments` define various constraints queried data should fulfill: /// - `cursor`, `take`, `skip` page through the data. diff --git a/query-engine/prisma-models/src/record.rs b/query-engine/query-structure/src/record.rs similarity index 100% rename from query-engine/prisma-models/src/record.rs rename to query-engine/query-structure/src/record.rs diff --git a/query-engine/prisma-models/src/relation.rs b/query-engine/query-structure/src/relation.rs similarity index 100% rename from query-engine/prisma-models/src/relation.rs rename to query-engine/query-structure/src/relation.rs diff --git a/query-engine/prisma-models/src/selection_result.rs b/query-engine/query-structure/src/selection_result.rs similarity index 100% rename from query-engine/prisma-models/src/selection_result.rs rename to query-engine/query-structure/src/selection_result.rs diff --git a/query-engine/prisma-models/src/zipper.rs b/query-engine/query-structure/src/zipper.rs similarity index 100% rename from query-engine/prisma-models/src/zipper.rs rename to query-engine/query-structure/src/zipper.rs diff --git a/query-engine/prisma-models/tests/datamodel_converter_tests.rs b/query-engine/query-structure/tests/datamodel_converter_tests.rs similarity index 99% rename from query-engine/prisma-models/tests/datamodel_converter_tests.rs rename to query-engine/query-structure/tests/datamodel_converter_tests.rs index a2ee28ca6c0d..c7ef83e0617f 100644 --- a/query-engine/prisma-models/tests/datamodel_converter_tests.rs +++ b/query-engine/query-structure/tests/datamodel_converter_tests.rs @@ -1,6 +1,6 @@ #![allow(non_snake_case)] -use prisma_models::*; +use query_structure::*; use std::sync::Arc; #[test] @@ -420,7 +420,7 @@ fn duplicate_relation_name() { fn convert(datamodel: &str) -> InternalDataModel { let schema = psl::parse_schema(datamodel).unwrap(); - prisma_models::convert(Arc::new(schema)) + query_structure::convert(Arc::new(schema)) } trait DatamodelAssertions { diff --git a/query-engine/request-handlers/Cargo.toml b/query-engine/request-handlers/Cargo.toml index f5fb433b13ba..ad838c5999b7 100644 --- a/query-engine/request-handlers/Cargo.toml +++ b/query-engine/request-handlers/Cargo.toml @@ -4,7 +4,7 @@ version = "0.1.0" edition = "2021" [dependencies] -prisma-models = { path = "../prisma-models" } +query-structure = { path = "../query-structure" } query-core = { path = "../core" } user-facing-errors = { path = "../../libs/user-facing-errors" } psl.workspace = true diff --git a/query-engine/request-handlers/src/handler.rs b/query-engine/request-handlers/src/handler.rs index df25616c2201..cd5d887718f0 100644 --- a/query-engine/request-handlers/src/handler.rs +++ b/query-engine/request-handlers/src/handler.rs @@ -2,7 +2,6 @@ use super::GQLResponse; use crate::{GQLError, PrismaResponse, RequestBody}; use futures::FutureExt; use indexmap::IndexMap; -use prisma_models::{parse_datetime, stringify_datetime, PrismaValue}; use query_core::{ constants::custom_types, protocol::EngineProtocol, @@ -11,6 +10,7 @@ use query_core::{ ArgumentValue, ArgumentValueObject, BatchDocument, BatchDocumentTransaction, CompactedDocument, Operation, QueryDocument, QueryExecutor, TxId, }; +use query_structure::{parse_datetime, stringify_datetime, PrismaValue}; use std::{collections::HashMap, fmt, panic::AssertUnwindSafe}; type ArgsToResult = (HashMap, IndexMap); diff --git a/query-engine/request-handlers/src/protocols/json/protocol_adapter.rs b/query-engine/request-handlers/src/protocols/json/protocol_adapter.rs index 208705268c1e..09ceeae20c0e 100644 --- a/query-engine/request-handlers/src/protocols/json/protocol_adapter.rs +++ b/query-engine/request-handlers/src/protocols/json/protocol_adapter.rs @@ -1,12 +1,12 @@ use crate::{FieldQuery, HandlerError, JsonSingleQuery, SelectionSet}; use bigdecimal::{BigDecimal, FromPrimitive}; use indexmap::IndexMap; -use prisma_models::{decode_bytes, parse_datetime, prelude::ParentContainer, Field}; use query_core::{ constants::custom_types, schema::{ObjectType, OutputField, QuerySchema}, ArgumentValue, Operation, Selection, }; +use query_structure::{decode_bytes, parse_datetime, prelude::ParentContainer, Field}; use serde_json::Value as JsonValue; use std::str::FromStr; diff --git a/query-engine/schema/Cargo.toml b/query-engine/schema/Cargo.toml index 0d4c06ad944e..12664344572d 100644 --- a/query-engine/schema/Cargo.toml +++ b/query-engine/schema/Cargo.toml @@ -4,7 +4,7 @@ version = "0.1.0" edition = "2021" [dependencies] -prisma-models = { path = "../prisma-models" } +query-structure = { path = "../query-structure" } psl.workspace = true rustc-hash = "1.1.0" once_cell = "1" diff --git a/query-engine/schema/README.md b/query-engine/schema/README.md index 4952270b0cd5..9522bdd563a0 100644 --- a/query-engine/schema/README.md +++ b/query-engine/schema/README.md @@ -1,6 +1,6 @@ # schema -This crate contains the logic responsible for building a query schema from a Prisma datamodel (presented as a `prisma_models::InternalDataModel`). +This crate contains the logic responsible for building a query schema from a Prisma datamodel (presented as a `query_structure::InternalDataModel`). ## Benchmarks diff --git a/query-engine/schema/src/build.rs b/query-engine/schema/src/build.rs index 6fa03c3fabbc..3c589989f21e 100644 --- a/query-engine/schema/src/build.rs +++ b/query-engine/schema/src/build.rs @@ -15,8 +15,8 @@ pub(crate) use output_types::{mutation_type, query_type}; use self::{enum_types::*, utils::*}; use crate::*; -use prisma_models::{ast, Field as ModelField, Model, RelationFieldRef, TypeIdentifier}; use psl::{datamodel_connector::ConnectorCapability, PreviewFeatures}; +use query_structure::{ast, Field as ModelField, Model, RelationFieldRef, TypeIdentifier}; pub fn build(schema: Arc, enable_raw_queries: bool) -> QuerySchema { let preview_features = schema.configuration.preview_features(); @@ -29,6 +29,6 @@ pub fn build_with_features( enable_raw_queries: bool, ) -> QuerySchema { let connector = schema.connector; - let internal_data_model = prisma_models::convert(schema); + let internal_data_model = query_structure::convert(schema); QuerySchema::new(enable_raw_queries, connector, preview_features, internal_data_model) } diff --git a/query-engine/schema/src/build/enum_types.rs b/query-engine/schema/src/build/enum_types.rs index 715a161b08b9..48490ccc914c 100644 --- a/query-engine/schema/src/build/enum_types.rs +++ b/query-engine/schema/src/build/enum_types.rs @@ -1,7 +1,7 @@ use super::*; use crate::EnumType; use constants::{filters, itx, json_null, ordering}; -use prisma_models::prelude::ParentContainer; +use query_structure::prelude::ParentContainer; pub(crate) fn sort_order_enum() -> EnumType { let ident = Identifier::new_prisma(IdentifierType::SortOrder); diff --git a/query-engine/schema/src/build/input_types/fields/arguments.rs b/query-engine/schema/src/build/input_types/fields/arguments.rs index 33400b7d0002..58393083f7fc 100644 --- a/query-engine/schema/src/build/input_types/fields/arguments.rs +++ b/query-engine/schema/src/build/input_types/fields/arguments.rs @@ -3,7 +3,7 @@ use constants::args; use input_types::objects::order_by_objects::OrderByOptions; use mutations::create_one; use objects::*; -use prisma_models::{prelude::ParentContainer, CompositeFieldRef}; +use query_structure::{prelude::ParentContainer, CompositeFieldRef}; /// Builds "where" argument. pub(crate) fn where_argument<'a>(ctx: &'a QuerySchema, model: &Model) -> InputField<'a> { diff --git a/query-engine/schema/src/build/input_types/fields/data_input_mapper/create.rs b/query-engine/schema/src/build/input_types/fields/data_input_mapper/create.rs index 5f00ee47c652..d952cb0d18d2 100644 --- a/query-engine/schema/src/build/input_types/fields/data_input_mapper/create.rs +++ b/query-engine/schema/src/build/input_types/fields/data_input_mapper/create.rs @@ -1,6 +1,6 @@ use super::*; use constants::*; -use prisma_models::CompositeFieldRef; +use query_structure::CompositeFieldRef; pub(crate) struct CreateDataInputFieldMapper { unchecked: bool, diff --git a/query-engine/schema/src/build/input_types/fields/data_input_mapper/mod.rs b/query-engine/schema/src/build/input_types/fields/data_input_mapper/mod.rs index 91ae15322687..b8269875527e 100644 --- a/query-engine/schema/src/build/input_types/fields/data_input_mapper/mod.rs +++ b/query-engine/schema/src/build/input_types/fields/data_input_mapper/mod.rs @@ -5,7 +5,7 @@ pub(crate) use create::*; pub(crate) use update::*; use super::*; -use prisma_models::prelude::*; +use query_structure::prelude::*; // Todo: This isn't final, this is only the first draft to get structure into the // wild cross-dependency waste that was the create/update inputs. diff --git a/query-engine/schema/src/build/input_types/fields/data_input_mapper/update.rs b/query-engine/schema/src/build/input_types/fields/data_input_mapper/update.rs index a9b0395d2e00..e6f051b70586 100644 --- a/query-engine/schema/src/build/input_types/fields/data_input_mapper/update.rs +++ b/query-engine/schema/src/build/input_types/fields/data_input_mapper/update.rs @@ -1,6 +1,6 @@ use super::*; use constants::*; -use prisma_models::CompositeFieldRef; +use query_structure::CompositeFieldRef; pub(crate) struct UpdateDataInputFieldMapper { unchecked: bool, diff --git a/query-engine/schema/src/build/input_types/fields/field_filter_types.rs b/query-engine/schema/src/build/input_types/fields/field_filter_types.rs index af2c77d006b4..84e6faa749ea 100644 --- a/query-engine/schema/src/build/input_types/fields/field_filter_types.rs +++ b/query-engine/schema/src/build/input_types/fields/field_filter_types.rs @@ -1,7 +1,7 @@ use super::{field_ref_type::WithFieldRefInputExt, objects::*, *}; use constants::{aggregations, filters}; -use prisma_models::{CompositeFieldRef, DefaultKind, NativeTypeInstance, PrismaValue}; use psl::datamodel_connector::ConnectorCapability; +use query_structure::{CompositeFieldRef, DefaultKind, NativeTypeInstance, PrismaValue}; /// Builds filter types for the given model field. pub(crate) fn get_field_filter_types( diff --git a/query-engine/schema/src/build/input_types/mod.rs b/query-engine/schema/src/build/input_types/mod.rs index 98c8caa84a38..14ff37722d6d 100644 --- a/query-engine/schema/src/build/input_types/mod.rs +++ b/query-engine/schema/src/build/input_types/mod.rs @@ -3,7 +3,7 @@ pub(crate) mod objects; use super::*; use fields::*; -use prisma_models::ScalarFieldRef; +use query_structure::ScalarFieldRef; fn map_scalar_input_type_for_field<'a>(ctx: &'a QuerySchema, field: &ScalarFieldRef) -> InputType<'a> { map_scalar_input_type(ctx, field.type_identifier(), field.is_list()) diff --git a/query-engine/schema/src/build/input_types/objects/filter_objects.rs b/query-engine/schema/src/build/input_types/objects/filter_objects.rs index 0ea555f77724..6ae66ca4219b 100644 --- a/query-engine/schema/src/build/input_types/objects/filter_objects.rs +++ b/query-engine/schema/src/build/input_types/objects/filter_objects.rs @@ -1,6 +1,6 @@ use super::*; use constants::filters; -use prisma_models::{prelude::ParentContainer, CompositeFieldRef}; +use query_structure::{prelude::ParentContainer, CompositeFieldRef}; pub(crate) fn scalar_filter_object_type( ctx: &'_ QuerySchema, diff --git a/query-engine/schema/src/build/input_types/objects/order_by_objects.rs b/query-engine/schema/src/build/input_types/objects/order_by_objects.rs index e5112c5c71f5..b36670f053d2 100644 --- a/query-engine/schema/src/build/input_types/objects/order_by_objects.rs +++ b/query-engine/schema/src/build/input_types/objects/order_by_objects.rs @@ -3,7 +3,7 @@ use std::borrow::Cow; use super::*; use constants::{aggregations, ordering}; use output_types::aggregation; -use prisma_models::prelude::ParentContainer; +use query_structure::prelude::ParentContainer; #[derive(Debug, Default, Clone, Copy)] pub(crate) struct OrderByOptions { diff --git a/query-engine/schema/src/build/mutations/create_many.rs b/query-engine/schema/src/build/mutations/create_many.rs index c96314e90a1a..9ef94df26240 100644 --- a/query-engine/schema/src/build/mutations/create_many.rs +++ b/query-engine/schema/src/build/mutations/create_many.rs @@ -3,8 +3,8 @@ use crate::{Identifier, IdentifierType, InputField, InputType, OutputField, Outp use constants::*; use input_types::{fields::data_input_mapper::*, list_union_type}; use output_types::objects; -use prisma_models::{Model, RelationFieldRef}; use psl::datamodel_connector::ConnectorCapability; +use query_structure::{Model, RelationFieldRef}; /// Builds a create many mutation field (e.g. createManyUsers) for given model. pub(crate) fn create_many(ctx: &'_ QuerySchema, model: Model) -> OutputField<'_> { diff --git a/query-engine/schema/src/build/mutations/create_one.rs b/query-engine/schema/src/build/mutations/create_one.rs index 0452e4885011..11699c7cce19 100644 --- a/query-engine/schema/src/build/mutations/create_one.rs +++ b/query-engine/schema/src/build/mutations/create_one.rs @@ -5,7 +5,7 @@ use crate::{ use constants::*; use input_types::fields::data_input_mapper::*; use output_types::objects; -use prisma_models::{Model, RelationFieldRef}; +use query_structure::{Model, RelationFieldRef}; /// Builds a create mutation field (e.g. createUser) for given model. pub(crate) fn create_one(ctx: &QuerySchema, model: Model) -> OutputField<'_> { diff --git a/query-engine/schema/src/build/output_types/aggregation/mod.rs b/query-engine/schema/src/build/output_types/aggregation/mod.rs index 30db9edca672..4b3043c6c804 100644 --- a/query-engine/schema/src/build/output_types/aggregation/mod.rs +++ b/query-engine/schema/src/build/output_types/aggregation/mod.rs @@ -1,5 +1,5 @@ use super::*; -use prisma_models::{prelude::ParentContainer, ScalarField}; +use query_structure::{prelude::ParentContainer, ScalarField}; pub(crate) mod group_by; pub(crate) mod plain; diff --git a/query-engine/schema/src/build/output_types/field.rs b/query-engine/schema/src/build/output_types/field.rs index 2fb5bce366df..29924c9d98c1 100644 --- a/query-engine/schema/src/build/output_types/field.rs +++ b/query-engine/schema/src/build/output_types/field.rs @@ -1,6 +1,6 @@ use super::*; use input_types::fields::arguments; -use prisma_models::{CompositeFieldRef, ScalarFieldRef}; +use query_structure::{CompositeFieldRef, ScalarFieldRef}; pub(crate) fn map_output_field(ctx: &'_ QuerySchema, model_field: ModelField) -> OutputField<'_> { let cloned_model_field = model_field.clone(); diff --git a/query-engine/schema/src/build/output_types/mutation_type.rs b/query-engine/schema/src/build/output_types/mutation_type.rs index 6aff5185de74..b0202360acb3 100644 --- a/query-engine/schema/src/build/output_types/mutation_type.rs +++ b/query-engine/schema/src/build/output_types/mutation_type.rs @@ -1,8 +1,8 @@ use super::*; use input_types::fields::arguments; use mutations::{create_many, create_one}; -use prisma_models::{DefaultKind, PrismaValue}; use psl::datamodel_connector::ConnectorCapability; +use query_structure::{DefaultKind, PrismaValue}; /// Builds the root `Mutation` type. pub(crate) fn mutation_fields(ctx: &QuerySchema) -> Vec { diff --git a/query-engine/schema/src/build/output_types/objects/composite.rs b/query-engine/schema/src/build/output_types/objects/composite.rs index f52d25de3c10..ca64cdaaaa55 100644 --- a/query-engine/schema/src/build/output_types/objects/composite.rs +++ b/query-engine/schema/src/build/output_types/objects/composite.rs @@ -1,7 +1,7 @@ #![allow(clippy::unnecessary_to_owned)] use super::*; -use prisma_models::CompositeType; +use query_structure::CompositeType; pub(crate) fn composite_object_type(ctx: &'_ QuerySchema, composite: CompositeType) -> ObjectType<'_> { ObjectType::new(Identifier::new_model(composite.name().to_owned()), move || { diff --git a/query-engine/schema/src/build/utils.rs b/query-engine/schema/src/build/utils.rs index 4eeafcb23c82..d7ee3106d230 100644 --- a/query-engine/schema/src/build/utils.rs +++ b/query-engine/schema/src/build/utils.rs @@ -1,6 +1,6 @@ use super::*; use once_cell::sync::Lazy; -use prisma_models::{walkers, DefaultKind}; +use query_structure::{walkers, DefaultKind}; use std::borrow::Cow; /// Input object type convenience wrapper function. diff --git a/query-engine/schema/src/enum_type.rs b/query-engine/schema/src/enum_type.rs index b9225b30cd17..7529e95d27da 100644 --- a/query-engine/schema/src/enum_type.rs +++ b/query-engine/schema/src/enum_type.rs @@ -1,5 +1,5 @@ use super::*; -use prisma_models::{InternalEnum, PrismaValue, ScalarFieldRef}; +use query_structure::{InternalEnum, PrismaValue, ScalarFieldRef}; #[derive(Debug, Clone, PartialEq)] pub enum EnumType { diff --git a/query-engine/schema/src/identifier_type.rs b/query-engine/schema/src/identifier_type.rs index 0d24efc0cc58..825a8dd741c9 100644 --- a/query-engine/schema/src/identifier_type.rs +++ b/query-engine/schema/src/identifier_type.rs @@ -1,5 +1,5 @@ use crate::{capitalize, constants::ordering, scalar_filter_name}; -use prisma_models::{ast::FieldArity, prelude::*, *}; +use query_structure::{ast::FieldArity, prelude::*, *}; /// Enum used to represent unique schema type names. /// It helps deferring the allocation + formatting of strings diff --git a/query-engine/schema/src/input_types.rs b/query-engine/schema/src/input_types.rs index 176a31a60a16..3a6c0610f600 100644 --- a/query-engine/schema/src/input_types.rs +++ b/query-engine/schema/src/input_types.rs @@ -1,7 +1,7 @@ use super::*; use fmt::Debug; use once_cell::sync::Lazy; -use prisma_models::{prelude::ParentContainer, DefaultKind}; +use query_structure::{prelude::ParentContainer, DefaultKind}; use std::{borrow::Cow, boxed::Box, fmt}; type InputObjectFields<'a> = diff --git a/query-engine/schema/src/output_types.rs b/query-engine/schema/src/output_types.rs index 7aa949f79083..32956d01d50b 100644 --- a/query-engine/schema/src/output_types.rs +++ b/query-engine/schema/src/output_types.rs @@ -1,7 +1,7 @@ use super::*; use fmt::Debug; use once_cell::sync::Lazy; -use prisma_models::ast::ModelId; +use query_structure::ast::ModelId; use std::{borrow::Cow, fmt}; #[derive(Debug, Clone)] diff --git a/query-engine/schema/src/query_schema.rs b/query-engine/schema/src/query_schema.rs index f48572db32fb..0324896aea07 100644 --- a/query-engine/schema/src/query_schema.rs +++ b/query-engine/schema/src/query_schema.rs @@ -1,9 +1,9 @@ use crate::{IdentifierType, ObjectType, OutputField}; -use prisma_models::{ast, InternalDataModel}; use psl::{ datamodel_connector::{Connector, ConnectorCapabilities, ConnectorCapability, RelationMode}, PreviewFeature, PreviewFeatures, }; +use query_structure::{ast, InternalDataModel}; use std::{collections::HashMap, fmt}; #[derive(Clone, Debug, Hash, Eq, PartialEq)]