From 9b21b9f1a34ef29c3992abdb81d40c38174752f3 Mon Sep 17 00:00:00 2001 From: Alencar da Costa Date: Fri, 8 Sep 2023 07:52:20 -0300 Subject: [PATCH 001/128] fix(metrics): update mobc to latest version (#4193) It also fixes reporting of initial values of aliased metrics, which otherwise wouldn't be reported until emitted by mobc. Co-authored-by: Jan Piotrowski --- Cargo.lock | 4 ++-- .../black-box-tests/tests/metrics/smoke_tests.rs | 16 +++++++++++----- query-engine/metrics/src/lib.rs | 11 +++++++++-- 3 files changed, 22 insertions(+), 9 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c9fb7e1e1498..3cabff4234ff 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2308,9 +2308,9 @@ dependencies = [ [[package]] name = "mobc" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc79c4a77e312fee9c7bd4b957c12ad1196db73c4a81e5c0b13f02083c4f7f2f" +checksum = "0bdeff49b387edef305eccfe166af3e1483bb57902dbf369dddc42dc824df23b" dependencies = [ "async-trait", "futures-channel", diff --git a/query-engine/black-box-tests/tests/metrics/smoke_tests.rs b/query-engine/black-box-tests/tests/metrics/smoke_tests.rs index e3826d9cafe2..6b81dd2e3e87 100644 --- a/query-engine/black-box-tests/tests/metrics/smoke_tests.rs +++ b/query-engine/black-box-tests/tests/metrics/smoke_tests.rs @@ -55,15 +55,21 @@ mod smoke_tests { .await .unwrap(); + // counters assert!(metrics.contains("prisma_client_queries_total counter")); assert!(metrics.contains("prisma_datasource_queries_total counter")); - assert!(metrics.contains("prisma_pool_connections_open counter")); - assert!(metrics.contains("prisma_client_queries_active gauge")); - assert!(metrics.contains("prisma_client_queries_wait gauge")); + assert!(metrics.contains("prisma_pool_connections_opened_total counter")); + assert!(metrics.contains("prisma_pool_connections_closed_total counter")); + // gauges + assert!(metrics.contains("prisma_pool_connections_open gauge")); assert!(metrics.contains("prisma_pool_connections_busy gauge")); assert!(metrics.contains("prisma_pool_connections_idle gauge")); - assert!(metrics.contains("prisma_pool_connections_opened_total gauge")); - assert!(metrics.contains("prisma_datasource_queries_duration_histogram_ms_bucket")); + assert!(metrics.contains("prisma_client_queries_active gauge")); + assert!(metrics.contains("prisma_client_queries_wait gauge")); + // histograms + assert!(metrics.contains("prisma_client_queries_duration_histogram_ms histogram")); + assert!(metrics.contains("prisma_client_queries_wait_histogram_ms histogram")); + assert!(metrics.contains("prisma_datasource_queries_duration_histogram_ms histogram")); }) .await } diff --git a/query-engine/metrics/src/lib.rs b/query-engine/metrics/src/lib.rs index 4e9fcc69f5c5..4fb3be90ad42 100644 --- a/query-engine/metrics/src/lib.rs +++ b/query-engine/metrics/src/lib.rs @@ -79,11 +79,18 @@ const ACCEPT_LIST: &[&str] = &[ MOBC_POOL_CONNECTIONS_IDLE, MOBC_POOL_WAIT_COUNT, MOBC_POOL_WAIT_DURATION, - PRISMA_CLIENT_QUERIES_DURATION_HISTOGRAM_MS, PRISMA_CLIENT_QUERIES_TOTAL, - PRISMA_DATASOURCE_QUERIES_DURATION_HISTOGRAM_MS, PRISMA_DATASOURCE_QUERIES_TOTAL, + PRISMA_POOL_CONNECTIONS_OPENED_TOTAL, + PRISMA_POOL_CONNECTIONS_CLOSED_TOTAL, + PRISMA_POOL_CONNECTIONS_OPEN, + PRISMA_POOL_CONNECTIONS_BUSY, + PRISMA_POOL_CONNECTIONS_IDLE, + PRISMA_CLIENT_QUERIES_WAIT, PRISMA_CLIENT_QUERIES_ACTIVE, + PRISMA_CLIENT_QUERIES_DURATION_HISTOGRAM_MS, + PRISMA_CLIENT_QUERIES_WAIT_HISTOGRAM_MS, + PRISMA_DATASOURCE_QUERIES_DURATION_HISTOGRAM_MS, ]; // Some of the metrics we receive have their internal names, and we need to expose them under a different From ae074a73c82cffe83fa074ac12510c28a868b69d Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Mon, 11 Sep 2023 10:38:16 +0200 Subject: [PATCH 002/128] qe: fix dmmf for compound unique and id with unsupported types (#4219) --- query-engine/dmmf/src/tests/tests.rs | 48 +++++++++++++++++++ .../input_types/objects/filter_objects.rs | 2 + 2 files changed, 50 insertions(+) diff --git a/query-engine/dmmf/src/tests/tests.rs b/query-engine/dmmf/src/tests/tests.rs index f1d597710b56..25f83e64447d 100644 --- a/query-engine/dmmf/src/tests/tests.rs +++ b/query-engine/dmmf/src/tests/tests.rs @@ -87,6 +87,54 @@ fn unsupported_in_composite_type() { dmmf_from_schema(schema); } +// Regression test for https://github.com/prisma/prisma/issues/20986 +#[test] +fn unusupported_in_compound_unique_must_not_panic() { + let schema = r#" + datasource db { + provider = "postgresql" + url = env("TEST_DATABASE_URL") + } + + generator client { + provider = "postgresql" + } + + model A { + id Int @id + field Int + unsupported Unsupported("tstzrange") + + @@unique([field, unsupported]) + } + "#; + + dmmf_from_schema(schema); +} + +#[test] +fn unusupported_in_compound_id_must_not_panic() { + let schema = r#" + datasource db { + provider = "postgresql" + url = env("TEST_DATABASE_URL") + } + + generator client { + provider = "postgresql" + } + + model A { + field Int @unique + unsupported Unsupported("tstzrange") + + @@id([field, unsupported]) + } + "#; + + dmmf_from_schema(schema); +} + const SNAPSHOTS_PATH: &str = concat!( env!("CARGO_MANIFEST_DIR"), "/src", diff --git a/query-engine/schema/src/build/input_types/objects/filter_objects.rs b/query-engine/schema/src/build/input_types/objects/filter_objects.rs index b8af982182a1..0ea555f77724 100644 --- a/query-engine/schema/src/build/input_types/objects/filter_objects.rs +++ b/query-engine/schema/src/build/input_types/objects/filter_objects.rs @@ -113,6 +113,7 @@ pub(crate) fn where_unique_object_type(ctx: &'_ QuerySchema, model: Model) -> In .indexes() .filter(|idx| idx.is_unique()) .filter(|index| index.fields().len() > 1) + .filter(|index| !index.fields().any(|f| f.is_unsupported())) .map(|index| { let fields = index .fields() @@ -130,6 +131,7 @@ pub(crate) fn where_unique_object_type(ctx: &'_ QuerySchema, model: Model) -> In .walk(model.id) .primary_key() .filter(|pk| pk.fields().len() > 1) + .filter(|pk| !pk.fields().any(|f| f.is_unsupported())) .map(|pk| { let name = compound_id_field_name(pk); let fields = model.fields().id_fields().unwrap().collect(); From e90b936d84779543cbe0e494bc8b9d7337fad8e4 Mon Sep 17 00:00:00 2001 From: Sophie <29753584+Druue@users.noreply.github.com> Date: Mon, 11 Sep 2023 14:11:57 +0200 Subject: [PATCH 003/128] feat(fmt): qf `SetDefault` -> `NoAction` on mysql w/ foreignkeys (#4210) * Add quickfix to migrate `SetDefault` -> `NoAction` when using `relationMode = "foreignkeys"` on mysql closes https://github.com/prisma/language-tools/issues/1286 --- prisma-fmt/src/code_actions.rs | 10 ++++ prisma-fmt/src/code_actions/relation_mode.rs | 50 ++++++++++++++++++- .../result.json | 41 +++++++++++++++ .../schema.prisma | 29 +++++++++++ prisma-fmt/tests/code_actions/tests.rs | 1 + .../src/walkers/relation/inline/complete.rs | 5 ++ 6 files changed, 135 insertions(+), 1 deletion(-) create mode 100644 prisma-fmt/tests/code_actions/scenarios/relation_mode_mysql_foreign_keys_set_default/result.json create mode 100644 prisma-fmt/tests/code_actions/scenarios/relation_mode_mysql_foreign_keys_set_default/schema.prisma diff --git a/prisma-fmt/src/code_actions.rs b/prisma-fmt/src/code_actions.rs index b9dbdc58067d..1037192b1a93 100644 --- a/prisma-fmt/src/code_actions.rs +++ b/prisma-fmt/src/code_actions.rs @@ -99,6 +99,16 @@ pub(crate) fn available_actions(schema: String, params: CodeActionParams) -> Vec complete_relation.referencing_field(), ); } + + if validated_schema.relation_mode().uses_foreign_keys() { + relation_mode::replace_set_default_mysql( + &mut actions, + ¶ms, + validated_schema.db.source(), + complete_relation, + config, + ) + } } } diff --git a/prisma-fmt/src/code_actions/relation_mode.rs b/prisma-fmt/src/code_actions/relation_mode.rs index 28d9018220e7..751fb956073b 100644 --- a/prisma-fmt/src/code_actions/relation_mode.rs +++ b/prisma-fmt/src/code_actions/relation_mode.rs @@ -1,5 +1,5 @@ use lsp_types::{CodeAction, CodeActionKind, CodeActionOrCommand}; -use psl::schema_ast::ast::SourceConfig; +use psl::{parser_database::walkers::CompleteInlineRelationWalker, schema_ast::ast::SourceConfig, Configuration}; pub(crate) fn edit_referential_integrity( actions: &mut Vec, @@ -35,3 +35,51 @@ pub(crate) fn edit_referential_integrity( actions.push(CodeActionOrCommand::CodeAction(action)) } + +pub(crate) fn replace_set_default_mysql( + actions: &mut Vec, + params: &lsp_types::CodeActionParams, + schema: &str, + relation: CompleteInlineRelationWalker<'_>, + config: &Configuration, +) { + let datasource = match config.datasources.first() { + Some(ds) => ds, + None => return, + }; + + if datasource.active_connector.provider_name() != "mysql" { + return; + } + + let span = match relation.on_update_span() { + Some(span) => span, + None => return, + }; + + let span_diagnostics = match super::diagnostics_for_span(schema, ¶ms.context.diagnostics, span) { + Some(sd) => sd, + None => return, + }; + + let diagnostics = match + super::filter_diagnostics( + span_diagnostics, + "MySQL does not actually support the `SetDefault` referential action, so using it may result in unexpected errors.") { + Some(value) => value, + None => return, + }; + + let edit = super::create_text_edit(schema, "NoAction".to_owned(), false, span, params); + + let action = CodeAction { + title: r#"Replace SetDefault with NoAction"#.to_owned(), + + kind: Some(CodeActionKind::QUICKFIX), + edit: Some(edit), + diagnostics: Some(diagnostics), + ..Default::default() + }; + + actions.push(CodeActionOrCommand::CodeAction(action)) +} diff --git a/prisma-fmt/tests/code_actions/scenarios/relation_mode_mysql_foreign_keys_set_default/result.json b/prisma-fmt/tests/code_actions/scenarios/relation_mode_mysql_foreign_keys_set_default/result.json new file mode 100644 index 000000000000..d31f54355c36 --- /dev/null +++ b/prisma-fmt/tests/code_actions/scenarios/relation_mode_mysql_foreign_keys_set_default/result.json @@ -0,0 +1,41 @@ +[ + { + "title": "Replace SetDefault with NoAction", + "kind": "quickfix", + "diagnostics": [ + { + "range": { + "start": { + "line": 14, + "character": 62 + }, + "end": { + "line": 14, + "character": 82 + } + }, + "severity": 2, + "message": "MySQL does not actually support the `SetDefault` referential action, so using it may result in unexpected errors. Read more at https://pris.ly/d/mysql-set-default " + } + ], + "edit": { + "changes": { + "file:///path/to/schema.prisma": [ + { + "range": { + "start": { + "line": 14, + "character": 72 + }, + "end": { + "line": 14, + "character": 82 + } + }, + "newText": "NoAction" + } + ] + } + } + } +] \ No newline at end of file diff --git a/prisma-fmt/tests/code_actions/scenarios/relation_mode_mysql_foreign_keys_set_default/schema.prisma b/prisma-fmt/tests/code_actions/scenarios/relation_mode_mysql_foreign_keys_set_default/schema.prisma new file mode 100644 index 000000000000..b13952553002 --- /dev/null +++ b/prisma-fmt/tests/code_actions/scenarios/relation_mode_mysql_foreign_keys_set_default/schema.prisma @@ -0,0 +1,29 @@ +generator client { + provider = "prisma-client-js" +} + +datasource db { + provider = "mysql" + url = env("DATABASE_URL") + relationMode = "foreignKeys" +} + +/// multi line +/// commennttt +model Foo { + id Int @id + bar Bar @relation(fields: [bar_id], references: [id], onUpdate: SetDefault) + bar_id Int @unique + t Test +} + +model Bar { + id Int @id + foo Foo? +} + +// This is a test enum. +enum Test { + TestUno + TestDue +} diff --git a/prisma-fmt/tests/code_actions/tests.rs b/prisma-fmt/tests/code_actions/tests.rs index 41035ed65d93..e76179204d92 100644 --- a/prisma-fmt/tests/code_actions/tests.rs +++ b/prisma-fmt/tests/code_actions/tests.rs @@ -25,6 +25,7 @@ scenarios! { one_to_one_referencing_side_misses_unique_compound_field_indentation_four_spaces relation_mode_prisma_missing_index relation_mode_referential_integrity + relation_mode_mysql_foreign_keys_set_default multi_schema_one_model multi_schema_one_model_one_enum multi_schema_two_models diff --git a/psl/parser-database/src/walkers/relation/inline/complete.rs b/psl/parser-database/src/walkers/relation/inline/complete.rs index 1c5536e948a6..3f7b1b67dc60 100644 --- a/psl/parser-database/src/walkers/relation/inline/complete.rs +++ b/psl/parser-database/src/walkers/relation/inline/complete.rs @@ -2,6 +2,7 @@ use crate::{ walkers::{ModelWalker, RelationFieldId, RelationFieldWalker, ScalarFieldWalker}, ParserDatabase, ReferentialAction, }; +use diagnostics::Span; use schema_ast::ast; /// Represents a relation that has fields and references defined in one of the @@ -65,6 +66,10 @@ impl<'db> CompleteInlineRelationWalker<'db> { .unwrap_or(Cascade) } + pub fn on_update_span(self) -> Option { + self.referencing_field().attributes().on_update.map(|(_, span)| span) + } + /// Prisma allows setting the relation field as optional, even if one of the /// underlying scalar fields is required. For the purpose of referential /// actions, we count the relation field required if any of the underlying From 4991945b6a4f1c44dfd4bb39f38a9af64ade7465 Mon Sep 17 00:00:00 2001 From: Alberto Schiabel Date: Mon, 11 Sep 2023 15:45:18 +0200 Subject: [PATCH 004/128] fix(driver-adapters): Idempotent activation of driver adapters (#4222) * fix(driver-adapters): stop caching the previous driver adapter in case of same "provider" on the nth libquery constructor invocation * fix(driver-adapters): clippy * fix(driver-adapters): replace "HashMap\ registry with simpler "Option" * fix(driver-adapters): clippy * remove indirection, change naming --------- Co-authored-by: Miguel Fernandez --- .../sql-query-connector/src/database/js.rs | 37 ++++++------------- .../connectors/sql-query-connector/src/lib.rs | 2 +- .../query-engine-node-api/src/engine.rs | 13 +++---- 3 files changed, 17 insertions(+), 35 deletions(-) diff --git a/query-engine/connectors/sql-query-connector/src/database/js.rs b/query-engine/connectors/sql-query-connector/src/database/js.rs index 1dced9453fa3..5b22653647f8 100644 --- a/query-engine/connectors/sql-query-connector/src/database/js.rs +++ b/query-engine/connectors/sql-query-connector/src/database/js.rs @@ -11,40 +11,25 @@ use quaint::{ connector::{IsolationLevel, Transaction}, prelude::{Queryable as QuaintQueryable, *}, }; -use std::{ - collections::{hash_map::Entry, HashMap}, - sync::{Arc, Mutex}, -}; +use std::sync::{Arc, Mutex}; -/// Registry is the type for the global registry of driver adapters. -type Registry = HashMap; +static ACTIVE_DRIVER_ADAPTER: Lazy>> = Lazy::new(|| Mutex::new(None)); -/// REGISTRY is the global registry of Driver Adapters. -static REGISTRY: Lazy> = Lazy::new(|| Mutex::new(HashMap::new())); +fn active_driver_adapter(provider: &str) -> connector::Result { + let lock = ACTIVE_DRIVER_ADAPTER.lock().unwrap(); -fn registered_driver_adapter(provider: &str) -> connector::Result { - let lock = REGISTRY.lock().unwrap(); - lock.get(provider) + lock.as_ref() + .map(|conn_ref| conn_ref.to_owned()) .ok_or(ConnectorError::from_kind(ErrorKind::UnsupportedConnector(format!( "A driver adapter for {} was not registered", provider )))) - .map(|conn_ref| conn_ref.to_owned()) } -pub fn register_driver_adapter(provider: &str, connector: Arc) -> Result<(), String> { - let mut lock = REGISTRY.lock().unwrap(); - let entry = lock.entry(provider.to_string()); - match entry { - Entry::Occupied(_) => Err(format!( - "A driver adapter for {} was already registered, and cannot be overridden.", - provider - )), - Entry::Vacant(v) => { - v.insert(DriverAdapter { connector }); - Ok(()) - } - } +pub fn activate_driver_adapter(connector: Arc) { + let mut lock = ACTIVE_DRIVER_ADAPTER.lock().unwrap(); + + *lock = Some(DriverAdapter { connector }); } pub struct Js { @@ -69,7 +54,7 @@ impl FromSource for Js { url: &str, features: psl::PreviewFeatures, ) -> connector_interface::Result { - let connector = registered_driver_adapter(source.active_provider)?; + let connector = active_driver_adapter(source.active_provider)?; let connection_info = get_connection_info(url)?; Ok(Js { diff --git a/query-engine/connectors/sql-query-connector/src/lib.rs b/query-engine/connectors/sql-query-connector/src/lib.rs index 06aa1e376c4a..d98f87d9a92e 100644 --- a/query-engine/connectors/sql-query-connector/src/lib.rs +++ b/query-engine/connectors/sql-query-connector/src/lib.rs @@ -23,7 +23,7 @@ use self::{column_metadata::*, context::Context, filter_conversion::*, query_ext use quaint::prelude::Queryable; #[cfg(feature = "driver-adapters")] -pub use database::{register_driver_adapter, Js}; +pub use database::{activate_driver_adapter, Js}; pub use database::{FromSource, Mssql, Mysql, PostgreSql, Sqlite}; pub use error::SqlError; diff --git a/query-engine/query-engine-node-api/src/engine.rs b/query-engine/query-engine-node-api/src/engine.rs index 37baeaee2c60..8b53f9dfc8c5 100644 --- a/query-engine/query-engine-node-api/src/engine.rs +++ b/query-engine/query-engine-node-api/src/engine.rs @@ -183,15 +183,12 @@ impl QueryEngine { #[cfg(feature = "driver-adapters")] if let Some(driver) = maybe_driver { let js_queryable = driver_adapters::from_napi(driver); - let provider_name = schema.connector.provider_name(); - match sql_connector::register_driver_adapter(provider_name, Arc::new(js_queryable)) { - Ok(_) => { - connector_mode = ConnectorMode::Js; - tracing::info!("Registered driver adapter for {provider_name}.") - } - Err(err) => tracing::error!("Failed to register driver adapter for {provider_name}. {err}"), - } + sql_connector::activate_driver_adapter(Arc::new(js_queryable)); + connector_mode = ConnectorMode::Js; + + let provider_name = schema.connector.provider_name(); + tracing::info!("Registered driver adapter for {provider_name}."); } } From 9e8c303386f912f83149807b0a143ef9b1e47900 Mon Sep 17 00:00:00 2001 From: Alberto Schiabel Date: Mon, 11 Sep 2023 18:13:53 +0200 Subject: [PATCH 005/128] feat(driver-adapters): assertions in driver adapters (#4211) * chore(driver-adapters): remove outdated part of README * test(driver-adapters): turn libquery tests into assertions * chore: merge main, fix conflicts, unify test commands * chore: bump prisma version * fix: README * chore: add pgbouncer=true comment in .envrc.example to trigger "DEALLOCATE ALL" in "@prisma/client" tests with Rust drivers * fix: remove type ignores and update Prisma Client initialisation * chore: address comments * chore: remove unused var * chore: Update query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts Co-authored-by: Alexey Orlenko * chore: apply nit Co-authored-by: Alexey Orlenko * chore: remove unused code --------- Co-authored-by: Alexey Orlenko --- .../driver-adapters/js/pnpm-lock.yaml | 24 +- .../js/smoke-test-js/README.md | 30 +- .../js/smoke-test-js/package.json | 18 +- .../js/smoke-test-js/src/client/client.ts | 26 +- .../js/smoke-test-js/src/libquery/libquery.ts | 560 ++++++++++++------ .../{neon.http.ts => neon.http.test.ts} | 14 +- .../libquery/{neon.ws.ts => neon.ws.test.ts} | 14 +- .../src/libquery/{pg.ts => pg.test.ts} | 12 +- .../{planetscale.ts => planetscale.test.ts} | 16 +- 9 files changed, 469 insertions(+), 245 deletions(-) rename query-engine/driver-adapters/js/smoke-test-js/src/libquery/{neon.http.ts => neon.http.test.ts} (67%) rename query-engine/driver-adapters/js/smoke-test-js/src/libquery/{neon.ws.ts => neon.ws.test.ts} (68%) rename query-engine/driver-adapters/js/smoke-test-js/src/libquery/{pg.ts => pg.test.ts} (70%) rename query-engine/driver-adapters/js/smoke-test-js/src/libquery/{planetscale.ts => planetscale.test.ts} (57%) diff --git a/query-engine/driver-adapters/js/pnpm-lock.yaml b/query-engine/driver-adapters/js/pnpm-lock.yaml index 0c4b01a68f67..4b4225882ba1 100644 --- a/query-engine/driver-adapters/js/pnpm-lock.yaml +++ b/query-engine/driver-adapters/js/pnpm-lock.yaml @@ -82,8 +82,8 @@ importers: specifier: ^1.11.0 version: 1.11.0 '@prisma/client': - specifier: 5.3.0-integration-feat-driver-adapters-in-client.1 - version: 5.3.0-integration-feat-driver-adapters-in-client.1(prisma@5.3.0-integration-feat-driver-adapters-in-client.1) + specifier: 5.3.0-integration-feat-driver-adapters-in-client.3 + version: 5.3.0-integration-feat-driver-adapters-in-client.3(prisma@5.3.0-integration-feat-driver-adapters-in-client.3) pg: specifier: ^8.11.3 version: 8.11.3 @@ -104,8 +104,8 @@ importers: specifier: ^7.0.3 version: 7.0.3 prisma: - specifier: 5.3.0-integration-feat-driver-adapters-in-client.1 - version: 5.3.0-integration-feat-driver-adapters-in-client.1 + specifier: 5.3.0-integration-feat-driver-adapters-in-client.3 + version: 5.3.0-integration-feat-driver-adapters-in-client.3 tsx: specifier: ^3.12.7 version: 3.12.7 @@ -391,8 +391,8 @@ packages: resolution: {integrity: sha512-aWbU+D/IRHoDE9975y+Q4c+EwwAWxCPwFId+N1AhQVFXzbeJMkj6KN2iQtoi03elcLMRdfT+V3i9Z4WRw+/oIA==} engines: {node: '>=16'} - /@prisma/client@5.3.0-integration-feat-driver-adapters-in-client.1(prisma@5.3.0-integration-feat-driver-adapters-in-client.1): - resolution: {integrity: sha512-izGFo8RFgmHibBzQGRx66xfh08LcGaOysNWvMRgqT018kZ8c98qqfI0/E+LFgxb3Ar0hqz2zX8M4Fa56KvI6cw==} + /@prisma/client@5.3.0-integration-feat-driver-adapters-in-client.3(prisma@5.3.0-integration-feat-driver-adapters-in-client.3): + resolution: {integrity: sha512-L/y90JbrWyJGXiXvtKfRKDgqTjTEQ2rkzdb0tlf4Uu9jIyBxiKr5qmDl7vRagN1JXeOkYqinsi+598MtJ7pGFA==} engines: {node: '>=16.13'} requiresBuild: true peerDependencies: @@ -402,15 +402,15 @@ packages: optional: true dependencies: '@prisma/engines-version': 5.3.0-28.3457e5de04da1741c969a80068702ad103e99553 - prisma: 5.3.0-integration-feat-driver-adapters-in-client.1 + prisma: 5.3.0-integration-feat-driver-adapters-in-client.3 dev: false /@prisma/engines-version@5.3.0-28.3457e5de04da1741c969a80068702ad103e99553: resolution: {integrity: sha512-eb+8hgURyTu1qAWmTxgZCgBjf0UV6REC525fa1XnPpL6hxMZ7cEtFCX0f9GDopa/piCM9pq5H2ttthGOKQyVLA==} dev: false - /@prisma/engines@5.3.0-integration-feat-driver-adapters-in-client.1: - resolution: {integrity: sha512-euFOT9Wq0dVVXZjcLP/6/XRPr04dm4t9DtKJXUCk5Kja87bAy+knLdcC6Pkmbbjhi0fTThiKQOOxKxWBfXrr4A==} + /@prisma/engines@5.3.0-integration-feat-driver-adapters-in-client.3: + resolution: {integrity: sha512-Nt+lbsiE4jj4GGIyhLrcNy8fVwBjsZeQqNI2oMbgoCyMSZwkcUmMRuK7OJdzbxHBKpivnneF0WMhbv/fZTRGig==} requiresBuild: true /@types/debug@4.1.8: @@ -1020,13 +1020,13 @@ packages: resolution: {integrity: sha512-VdlZoocy5lCP0c/t66xAfclglEapXPCIVhqqJRncYpvbCgImF0w67aPKfbqUMr72tO2k5q0TdTZwCLjPTI6C9g==} dev: true - /prisma@5.3.0-integration-feat-driver-adapters-in-client.1: - resolution: {integrity: sha512-M5EjBFZ3P3mjgYOfRBLqg5wKKeXq/VTv2wF9Ft4YCMMsHlcIJJ9IMV1UkzZLmP1yTdMxougJcLeDA9QGmdpsMA==} + /prisma@5.3.0-integration-feat-driver-adapters-in-client.3: + resolution: {integrity: sha512-M9FQjLmJL7g4GnHwcsuf2WPqE3/B3k/laBkaq5XCxJcBMjoipNIGW0ZlZKY9t+TdJ14asGrv4+7o7mAmKLZqrw==} engines: {node: '>=16.13'} hasBin: true requiresBuild: true dependencies: - '@prisma/engines': 5.3.0-integration-feat-driver-adapters-in-client.1 + '@prisma/engines': 5.3.0-integration-feat-driver-adapters-in-client.3 /punycode@2.3.0: resolution: {integrity: sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==} diff --git a/query-engine/driver-adapters/js/smoke-test-js/README.md b/query-engine/driver-adapters/js/smoke-test-js/README.md index 62ec1d0439ea..e9c79ef930c8 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/README.md +++ b/query-engine/driver-adapters/js/smoke-test-js/README.md @@ -7,34 +7,52 @@ It contains a subset of `@prisma/client`, plus some handy executable smoke tests ## How to setup -We assume Node.js `v20.5.1`+ is installed. If not, run `nvm use` in the current directory. +We assume a recent Node.js is installed (e.g., `v20.5.x`). If not, run `nvm use` in the current directory. It's very important to double-check if you have multiple versions installed, as both PlanetScale and Neon requires either Node.js `v18`+ or a custom `fetch` function. +In the parent directory (`cd ..`): +- Build the driver adapters via `pnpm i && pnpm build` + +In the current directoy: - Create a `.envrc` starting from `.envrc.example`, and fill in the missing values following the given template - Install Node.js dependencies via ```bash pnpm i ``` + +Anywhere in the repository: - Run `cargo build -p query-engine-node-api` to compile the `libquery` Query Engine ### PlanetScale +If you don't have a connection string yet: + +- [Follow the Notion document](https://www.notion.so/How-to-get-a-PlanetScale-and-Neon-database-for-testing-93d978061f9c4ffc80ebfed36896af16) or create a new database on [PlanetScale](https://planetscale.com/) - Create a new database on [PlanetScale](https://planetscale.com/) - Go to `Settings` > `Passwords`, and create a new password for the `main` database branch. Select the `Prisma` template and copy the generated URL (comprising username, password, etc). Paste it in the `JS_PLANETSCALE_DATABASE_URL` environment variable in `.envrc`. In the current directory: - Run `pnpm prisma:planetscale` to push the Prisma schema and insert the test data. - Run `pnpm planetscale` to run smoke tests using `libquery` against the PlanetScale database. -- Run `pnpm planetscale:client` to run smoke tests using `@prisma/client` against the PlanetScale database. - -Note: you used to be able to run these Prisma commands without changing the provider name, but [#4074](https://github.com/prisma/prisma-engines/pull/4074) changed that (see https://github.com/prisma/prisma-engines/pull/4074#issuecomment-1649942475). + For more fine-grained control: + - Run `pnpm planetscale:libquery` to test using `libquery` + - Run `pnpm planetscale:client` to test using `@prisma/client` ### Neon +If you don't have a connection string yet: + +- [Follow the Notion document](https://www.notion.so/How-to-get-a-PlanetScale-and-Neon-database-for-testing-93d978061f9c4ffc80ebfed36896af16) or create a new database with Neon CLI `npx neonctl projects create` or in [Neon Console](https://neon.tech). - Create a new database with Neon CLI `npx neonctl projects create` or in [Neon Console](https://neon.tech). - Paste the connection string to `JS_NEON_DATABASE_URL`. In the current directory: - Run `pnpm prisma:neon` to push the Prisma schema and insert the test data. -- Run `pnpm neon` to run smoke tests using `libquery` against the Neon database. -- Run `pnpm neon:client` to run smoke tests using `@prisma/client` against the Neon database. +- Run `pnpm neon:ws` to run smoke tests using `libquery` against the Neon database, using a WebSocket connection. + For more fine-grained control: + - Run `pnpm neon:ws:libquery` to test using `libquery` + - Run `pnpm neon:ws:client` to test using `@prisma/client` +- Run `pnpm neon:http` to run smoke tests using `libquery` against the Neon database, using an HTTP connection. In this case, transactions won't work, and tests are expected to fail. + For more fine-grained control: + - Run `pnpm neon:ws:http` to test using `libquery` + - Run `pnpm neon:ws:http` to test using `@prisma/client` diff --git a/query-engine/driver-adapters/js/smoke-test-js/package.json b/query-engine/driver-adapters/js/smoke-test-js/package.json index b04840a3cae2..3f05fa5b69d9 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/package.json +++ b/query-engine/driver-adapters/js/smoke-test-js/package.json @@ -10,16 +10,20 @@ "prisma:db:push:mysql": "prisma db push --schema ./prisma/mysql/schema.prisma --force-reset", "prisma:db:execute:mysql": "prisma db execute --schema ./prisma/mysql/schema.prisma --file ./prisma/mysql/commands/type_test/insert.sql", "prisma:neon": "cross-env-shell DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" \"pnpm prisma:db:push:postgres && pnpm prisma:db:execute:postgres\"", - "neon:ws": "cross-env-shell DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" \"tsx ./src/libquery/neon.ws.ts\"", - "neon:http": "cross-env-shell DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" \"tsx ./src/libquery/neon.http.ts\"", + "neon:ws:libquery": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --loader=tsx ./src/libquery/neon.ws.test.ts", + "neon:http:libquery": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --loader=tsx ./src/libquery/neon.http.test.ts", "neon:ws:client": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --loader=tsx ./src/client/neon.ws.test.ts", "neon:http:client": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --loader=tsx ./src/client/neon.http.test.ts", + "neon:ws": "pnpm neon:ws:libquery && pnpm neon:ws:client", + "neon:http": "pnpm neon:http:libquery && pnpm neon:http:client", "prisma:pg": "cross-env-shell DATABASE_URL=\"${JS_PG_DATABASE_URL}\" \"pnpm prisma:db:push:postgres && pnpm prisma:db:execute:postgres\"", - "pg": "cross-env-shell DATABASE_URL=\"${JS_PG_DATABASE_URL}\" \"tsx ./src/libquery/pg.ts\"", + "pg:libquery": "cross-env-shell DATABASE_URL=\"${JS_PG_DATABASE_URL}\" node --test --loader=tsx ./src/libquery/pg.test.ts", "pg:client": "DATABASE_URL=\"${JS_PG_DATABASE_URL}\" node --test --loader=tsx ./src/client/pg.test.ts", + "pg": "pnpm pg:libquery && pnpm pg:client", "prisma:planetscale": "cross-env-shell DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" \"pnpm prisma:db:push:mysql && pnpm prisma:db:execute:mysql\"", - "planetscale": "cross-env-shell DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" \"tsx ./src/libquery/planetscale.ts\"", - "planetscale:client": "DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" node --test --loader=tsx ./src/client/planetscale.test.ts" + "planetscale:libquery": "DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" node --test --loader=tsx ./src/libquery/planetscale.test.ts", + "planetscale:client": "DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" node --test --loader=tsx ./src/client/planetscale.test.ts", + "planetscale": "pnpm planetscale:libquery && pnpm planetscale:client" }, "keywords": [], "author": "Alberto Schiabel ", @@ -32,7 +36,7 @@ "@jkomyno/prisma-driver-adapter-utils": "workspace:*", "@neondatabase/serverless": "^0.6.0", "@planetscale/database": "^1.11.0", - "@prisma/client": "5.3.0-integration-feat-driver-adapters-in-client.1", + "@prisma/client": "5.3.0-integration-feat-driver-adapters-in-client.3", "pg": "^8.11.3", "superjson": "^1.13.1", "undici": "^5.23.0" @@ -41,7 +45,7 @@ "@types/node": "^20.5.1", "@types/pg": "^8.10.2", "cross-env": "^7.0.3", - "prisma": "5.3.0-integration-feat-driver-adapters-in-client.1", + "prisma": "5.3.0-integration-feat-driver-adapters-in-client.3", "tsx": "^3.12.7" } } diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts index 8367b43a7acf..7a9400fbd291 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts @@ -18,8 +18,7 @@ export async function smokeTestClient(driverAdapter: DriverAdapter) { describe(isUsingDriverAdapters ? `using Driver Adapters` : `using Rust drivers`, () => { it('batch queries', async () => { const prisma = new PrismaClient({ - // @ts-ignore - jsConnector: adapter, + adapter, log, }) @@ -48,15 +47,6 @@ export async function smokeTestClient(driverAdapter: DriverAdapter) { '-- Implicit "COMMIT" query via underlying driver', ] - const postgresExpectedQueries = [ - 'BEGIN', - 'DEALLOCATE ALL', - 'SELECT 1', - 'SELECT 2', - 'SELECT 3', - 'COMMIT', - ] - if (['mysql'].includes(provider)) { if (isUsingDriverAdapters) { assert.deepEqual(queries, driverAdapterExpectedQueries) @@ -64,18 +54,18 @@ export async function smokeTestClient(driverAdapter: DriverAdapter) { assert.deepEqual(queries, defaultExpectedQueries) } } else if (['postgres'].includes(provider)) { - if (isUsingDriverAdapters) { - assert.deepEqual(queries, defaultExpectedQueries) - } else { - assert.deepEqual(queries, postgresExpectedQueries) - } + // Note: the "DEALLOCATE ALL" query is only present after "BEGIN" when using Rust Postgres with pgbouncer. + assert.deepEqual(queries.at(0), defaultExpectedQueries.at(0)) + assert.deepEqual( + queries.filter((q) => q !== 'DEALLOCATE ALL'), + defaultExpectedQueries + ) } }) it('applies isolation level when using batch $transaction', async () => { const prisma = new PrismaClient({ - // @ts-ignore - jsConnector: adapter, + adapter, log, }) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts index f9c7925c9be3..324ca62d12dd 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts @@ -1,170 +1,417 @@ -import { setTimeout } from 'node:timers/promises' +import { describe, it, before, after } from 'node:test' +import assert from 'node:assert' import type { ErrorCapturingDriverAdapter } from '@jkomyno/prisma-driver-adapter-utils' import type { QueryEngineInstance } from '../engines/types/Library' import { initQueryEngine } from './util' import { JsonQuery } from '../engines/types/JsonProtocol' -export async function smokeTestLibquery(db: ErrorCapturingDriverAdapter, prismaSchemaRelativePath: string) { - const engine = initQueryEngine(db, prismaSchemaRelativePath) +export function smokeTestLibquery(adapter: ErrorCapturingDriverAdapter, prismaSchemaRelativePath: string) { + const engine = initQueryEngine(adapter, prismaSchemaRelativePath) + const flavour = adapter.flavour - console.log('[nodejs] connecting...') - await engine.connect('trace') - console.log('[nodejs] connected') - - const test = new SmokeTest(engine, db) - - await test.testJSON() - await test.testTypeTest2() - await test.testFindManyTypeTest() - await test.createAutoIncrement() - await test.testCreateAndDeleteChildParent() - await test.testTransaction() - await test.testRawError() - - // Note: calling `engine.disconnect` won't actually close the database connection. - console.log('[nodejs] disconnecting...') - await engine.disconnect('trace') - console.log('[nodejs] disconnected') - - console.log('[nodejs] re-connecting...') - await engine.connect('trace') - console.log('[nodejs] re-connecting') - - await setTimeout(0) - - console.log('[nodejs] re-disconnecting...') - await engine.disconnect('trace') - console.log('[nodejs] re-disconnected') - - // Close the database connection. This is required to prevent the process from hanging. - console.log('[nodejs] closing database connection...') - await db.close() - console.log('[nodejs] closed database connection') -} - -class SmokeTest { - readonly flavour: ErrorCapturingDriverAdapter['flavour'] - - constructor(private readonly engine: QueryEngineInstance, private readonly connector: ErrorCapturingDriverAdapter) { - this.flavour = connector.flavour + const doQuery = async (query: JsonQuery, tx_id?: string) => { + const result = await engine.query(JSON.stringify(query), 'trace', tx_id) + const parsedResult = JSON.parse(result) + if (parsedResult.errors) { + const error = parsedResult.errors[0]?.user_facing_error + if (error.error_code === 'P2036') { + const jsError = adapter.errorRegistry.consumeError(error.meta.id) + if (!jsError) { + throw new Error(`Something went wrong. Engine reported external error with id ${error.meta.id}, but it was not registered.`) + } + throw jsError.error + } + } + return parsedResult } - async testJSON() { - const json = JSON.stringify({ - foo: 'bar', - baz: 1, + describe('using libquery with Driver Adapters', () => { + before(async () => { + await engine.connect('trace') }) - const created = await this.doQuery( - { - "action": "createOne", - "modelName": "Product", - "query": { - "arguments": { - "data": { - "properties": json, - "properties_null": null + after(async () => { + await engine.disconnect('trace') + await adapter.close() + }) + + it('raw error', async () => { + await assert.rejects(async () => { + await doQuery({ + action: 'queryRaw', + query: { + selection: { $scalars: true }, + arguments: { + query: 'NOT A VALID SQL, THIS WILL FAIL', + parameters: '[]' } - }, - "selection": { - "properties": true } - } + }) }) + }) - console.log('[nodejs] created', JSON.stringify(created, null, 2)) + it('create JSON values', async () => { + const json = JSON.stringify({ + foo: 'bar', + baz: 1, + }) - const resultSet = await this.doQuery( - { - "action": "findMany", - "modelName": "Product", - "query": { - "selection": { - "id": true, - "properties": true, - "properties_null": true + const created = await doQuery( + { + "action": "createOne", + "modelName": "Product", + "query": { + "arguments": { + "data": { + "properties": json, + "properties_null": null + } + }, + "selection": { + "properties": true + } } - } - } - ) - - console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) - - await this.doQuery( - { - "action": "deleteMany", - "modelName": "Product", - "query": { - "arguments": { - "where": {} - }, - "selection": { - "count": true + }) + + assert.strictEqual(created.data.createOneProduct.properties.$type, 'Json') + console.log('[nodejs] created', JSON.stringify(created, null, 2)) + + const resultSet = await doQuery( + { + "action": "findMany", + "modelName": "Product", + "query": { + "selection": { + "id": true, + "properties": true, + "properties_null": true + } } } - } - ) - - return resultSet - } - - async testTypeTest2() { - const create = await this.doQuery( - { - "action": "createOne", - "modelName": "type_test_2", - "query": { - "arguments": { - "data": {} - }, - "selection": { - "id": true, - "datetime_column": true, - "datetime_column_null": true + ) + console.log('[nodejs] resultSet', JSON.stringify(resultSet, null, 2)) + + await doQuery( + { + "action": "deleteMany", + "modelName": "Product", + "query": { + "arguments": { + "where": {} + }, + "selection": { + "count": true + } } } - } - ) - - console.log('[nodejs] create', JSON.stringify(create, null, 2)) + ) + }) - const resultSet = await this.doQuery( - { - "action": "findMany", - "modelName": "type_test_2", - "query": { - "selection": { - "id": true, - "datetime_column": true, - "datetime_column_null": true - }, - "arguments": { - "where": {} + it('create with autoincrement', async () => { + await doQuery( + { + "modelName": "Author", + "action": "deleteMany", + "query": { + "arguments": { + "where": {} + }, + "selection": { + "count": true + } } } - } - ) + ) + + const author = await doQuery( + { + "modelName": "Author", + "action": "createOne", + "query": { + "arguments": { + "data": { + "firstName": "Firstname from autoincrement", + "lastName": "Lastname from autoincrement", + "age": 99 + } + }, + "selection": { + "id": true, + "firstName": true, + "lastName": true + } + } + } + ) + console.log('[nodejs] author', JSON.stringify(author, null, 2)) + }) - console.log('[nodejs] resultSet', JSON.stringify(resultSet, null, 2)) + it('create non scalar types', async () => { + const create = await doQuery( + { + "action": "createOne", + "modelName": "type_test_2", + "query": { + "arguments": { + "data": {} + }, + "selection": { + "id": true, + "datetime_column": true, + "datetime_column_null": true + } + } + } + ) + + console.log('[nodejs] create', JSON.stringify(create, null, 2)) + + const resultSet = await doQuery( + { + "action": "findMany", + "modelName": "type_test_2", + "query": { + "selection": { + "id": true, + "datetime_column": true, + "datetime_column_null": true + }, + "arguments": { + "where": {} + } + } + } + ) + + console.log('[nodejs] resultSet', JSON.stringify(resultSet, null, 2)) + + await doQuery( + { + "action": "deleteMany", + "modelName": "type_test_2", + "query": { + "arguments": { + "where": {} + }, + "selection": { + "count": true + } + } + } + ) + }) - await this.doQuery( - { - "action": "deleteMany", - "modelName": "type_test_2", - "query": { - "arguments": { - "where": {} - }, - "selection": { - "count": true + it('create/delete parent and child', async () => { + /* Delete all child and parent records */ + + // Queries: [ + // 'SELECT `cf-users`.`Child`.`id` FROM `cf-users`.`Child` WHERE 1=1', + // 'SELECT `cf-users`.`Child`.`id` FROM `cf-users`.`Child` WHERE 1=1', + // 'DELETE FROM `cf-users`.`Child` WHERE (`cf-users`.`Child`.`id` IN (?) AND 1=1)' + // ] + await doQuery( + { + "modelName": "Child", + "action": "deleteMany", + "query": { + "arguments": { + "where": {} + }, + "selection": { + "count": true + } + } + } + ) + + // Queries: [ + // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE 1=1', + // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE 1=1', + // 'DELETE FROM `cf-users`.`Parent` WHERE (`cf-users`.`Parent`.`id` IN (?) AND 1=1)' + // ] + await doQuery( + { + "modelName": "Parent", + "action": "deleteMany", + "query": { + "arguments": { + "where": {} + }, + "selection": { + "count": true + } + } + } + ) + + /* Create a parent with some new children, within a transaction */ + + // Queries: [ + // 'INSERT INTO `cf-users`.`Parent` (`p`,`p_1`,`p_2`,`id`) VALUES (?,?,?,?)', + // 'INSERT INTO `cf-users`.`Child` (`c`,`c_1`,`c_2`,`parentId`,`id`) VALUES (?,?,?,?,?)', + // 'SELECT `cf-users`.`Parent`.`id`, `cf-users`.`Parent`.`p` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`id` = ? LIMIT ? OFFSET ?', + // 'SELECT `cf-users`.`Child`.`id`, `cf-users`.`Child`.`c`, `cf-users`.`Child`.`parentId` FROM `cf-users`.`Child` WHERE `cf-users`.`Child`.`parentId` IN (?)' + // ] + await doQuery( + { + "modelName": "Parent", + "action": "createOne", + "query": { + "arguments": { + "data": { + "p": "p1", + "p_1": "1", + "p_2": "2", + "childOpt": { + "create": { + "c": "c1", + "c_1": "foo", + "c_2": "bar" + } + } + } + }, + "selection": { + "p": true, + "childOpt": { + "selection": { + "c": true + } + } + } } } + ) + + /* Delete the parent */ + + // Queries: [ + // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`p` = ?', + // 'SELECT `cf-users`.`Child`.`id`, `cf-users`.`Child`.`parentId` FROM `cf-users`.`Child` WHERE (1=1 AND `cf-users`.`Child`.`parentId` IN (?))', + // 'UPDATE `cf-users`.`Child` SET `parentId` = ? WHERE (`cf-users`.`Child`.`id` IN (?) AND 1=1)', + // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`p` = ?', + // 'DELETE FROM `cf-users`.`Parent` WHERE (`cf-users`.`Parent`.`id` IN (?) AND `cf-users`.`Parent`.`p` = ?)' + // ] + await doQuery( + { + "modelName": "Parent", + "action": "deleteMany", + "query": { + "arguments": { + "where": { + "p": "p1" + } + }, + "selection": { + "count": true + } + } + } + ) + }) + + it('create explicit transaction', async () => { + const args = { isolation_level: 'Serializable', max_wait: 5000, timeout: 15000 } + const startResponse = await engine.startTransaction(JSON.stringify(args), 'trace') + const tx_id = JSON.parse(startResponse).id + + console.log('[nodejs] transaction id', tx_id) + await doQuery( + { + "action": "findMany", + "modelName": "Author", + "query": { + "selection": { "$scalars": true } + } + }, + tx_id + ) + + const commitResponse = await engine.commitTransaction(tx_id, 'trace') + console.log('[nodejs] commited', commitResponse) + }) + + describe('read scalar and non scalar types', () => { + if (['mysql'].includes(flavour)) { + it('mysql', async () => { + const resultSet = await doQuery( + { + "action": "findMany", + "modelName": "type_test", + "query": { + "selection": { + "tinyint_column": true, + "smallint_column": true, + "mediumint_column": true, + "int_column": true, + "bigint_column": true, + "float_column": true, + "double_column": true, + "decimal_column": true, + "boolean_column": true, + "char_column": true, + "varchar_column": true, + "text_column": true, + "date_column": true, + "time_column": true, + "datetime_column": true, + "timestamp_column": true, + "json_column": true, + "enum_column": true, + "binary_column": true, + "varbinary_column": true, + "blob_column": true + } + } + }) + + console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) + }) + } else if (['postgres'].includes(flavour)) { + it('postgres', async () => { + const resultSet = await doQuery( + { + "action": "findMany", + "modelName": "type_test", + "query": { + "selection": { + "smallint_column": true, + "int_column": true, + "bigint_column": true, + "float_column": true, + "double_column": true, + "decimal_column": true, + "boolean_column": true, + "char_column": true, + "varchar_column": true, + "text_column": true, + "date_column": true, + "time_column": true, + "datetime_column": true, + "timestamp_column": true, + "json_column": true, + "enum_column": true + } + } + } + ) + console.log('[nodejs] findMany resultSet', JSON.stringify((resultSet), null, 2)) + }) + } else { + throw new Error(`Missing test for flavour ${flavour}`) } - ) + }) + }) +} - return resultSet +class SmokeTest { + readonly flavour: ErrorCapturingDriverAdapter['flavour'] + + constructor(private readonly engine: QueryEngineInstance, private readonly connector: ErrorCapturingDriverAdapter) { + this.flavour = connector.flavour } + async testFindManyTypeTest() { await this.testFindManyTypeTestMySQL() await this.testFindManyTypeTestPostgres() @@ -203,11 +450,11 @@ class SmokeTest { "varbinary_column": true, "blob_column": true } - } + } }) console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) - + return resultSet } @@ -239,11 +486,11 @@ class SmokeTest { "json_column": true, "enum_column": true } - } + } } ) console.log('[nodejs] findMany resultSet', JSON.stringify((resultSet), null, 2)) - + return resultSet } @@ -288,7 +535,7 @@ class SmokeTest { async testCreateAndDeleteChildParent() { /* Delete all child and parent records */ - + // Queries: [ // 'SELECT `cf-users`.`Child`.`id` FROM `cf-users`.`Child` WHERE 1=1', // 'SELECT `cf-users`.`Child`.`id` FROM `cf-users`.`Child` WHERE 1=1', @@ -308,7 +555,7 @@ class SmokeTest { } } ) - + // Queries: [ // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE 1=1', // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE 1=1', @@ -328,9 +575,9 @@ class SmokeTest { } } ) - + /* Create a parent with some new children, within a transaction */ - + // Queries: [ // 'INSERT INTO `cf-users`.`Parent` (`p`,`p_1`,`p_2`,`id`) VALUES (?,?,?,?)', // 'INSERT INTO `cf-users`.`Child` (`c`,`c_1`,`c_2`,`parentId`,`id`) VALUES (?,?,?,?,?)', @@ -367,9 +614,9 @@ class SmokeTest { } } ) - + /* Delete the parent */ - + // Queries: [ // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`p` = ?', // 'SELECT `cf-users`.`Child`.`id`, `cf-users`.`Child`.`parentId` FROM `cf-users`.`Child` WHERE (1=1 AND `cf-users`.`Child`.`parentId` IN (?))', @@ -417,32 +664,13 @@ class SmokeTest { console.log('[nodejs] commited', commitResponse) } - async testRawError() { - try { - await this.doQuery({ - action: 'queryRaw', - query: { - selection: { $scalars: true }, - arguments: { - query: 'NOT A VALID SQL, THIS WILL FAIL', - parameters: '[]' - } - } - }) - console.log(`[nodejs] expected exception, but query succeeded`) - } catch (error) { - console.log('[nodejs] caught expected error', error) - } - - } - private async doQuery(query: JsonQuery, tx_id?: string) { const result = await this.engine.query(JSON.stringify(query), 'trace', tx_id) const parsedResult = JSON.parse(result) if (parsedResult.errors) { const error = parsedResult.errors[0]?.user_facing_error if (error.error_code === 'P2036') { - const jsError = this.connector.errorRegistry.consumeError(error.meta.id) + const jsError = this.connector.errorRegistry.consumeError(error.meta.id) if (!jsError) { throw new Error(`Something went wrong. Engine reported external error with id ${error.meta.id}, but it was not registered.`) } diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts similarity index 67% rename from query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.ts rename to query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts index 755289dcd42c..24e12fe631cd 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts @@ -1,9 +1,10 @@ import { PrismaNeonHTTP } from '@jkomyno/prisma-adapter-neon' import { bindAdapter } from '@jkomyno/prisma-driver-adapter-utils' import { neon } from '@neondatabase/serverless' -import { smokeTestLibquery } from './libquery' +import { describe } from 'node:test' +import { smokeTestLibquery } from './libquery' -async function main() { +describe('neon (HTTP)', () => { const connectionString = `${process.env.JS_NEON_DATABASE_URL as string}` const neonConnection = neon(connectionString, { @@ -13,11 +14,6 @@ async function main() { const adapter = new PrismaNeonHTTP(neonConnection) const driverAdapter = bindAdapter(adapter) - - await smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma') -} - -main().catch((e) => { - console.error(e) - process.exit(1) + + smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma') }) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts similarity index 68% rename from query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.ts rename to query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts index 888f29d35e26..3510a5c2709c 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts @@ -2,21 +2,17 @@ import { PrismaNeon } from '@jkomyno/prisma-adapter-neon' import { bindAdapter } from '@jkomyno/prisma-driver-adapter-utils' import { WebSocket } from 'undici' import { Pool, neonConfig } from '@neondatabase/serverless' -import { smokeTestLibquery } from './libquery' +import { describe } from 'node:test' +import { smokeTestLibquery } from './libquery' neonConfig.webSocketConstructor = WebSocket -async function main() { +describe('neon (WebSocket)', () => { const connectionString = `${process.env.JS_NEON_DATABASE_URL as string}` const pool = new Pool({ connectionString }) const adapter = new PrismaNeon(pool) const driverAdapter = bindAdapter(adapter) - - await smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma') -} - -main().catch((e) => { - console.error(e) - process.exit(1) + + smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma') }) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.test.ts similarity index 70% rename from query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.ts rename to query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.test.ts index cc657ddbca33..0cb19343e7fb 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.test.ts @@ -1,19 +1,15 @@ import pg from 'pg' import { PrismaPg } from '@jkomyno/prisma-adapter-pg' import { bindAdapter } from '@jkomyno/prisma-driver-adapter-utils' +import { describe } from 'node:test' import { smokeTestLibquery } from './libquery' -async function main() { +describe('pg', () => { const connectionString = `${process.env.JS_PG_DATABASE_URL as string}` const pool = new pg.Pool({ connectionString }) const adapter = new PrismaPg(pool) const driverAdapter = bindAdapter(adapter) - - await smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma') -} - -main().catch((e) => { - console.error(e) - process.exit(1) + + smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma') }) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.test.ts similarity index 57% rename from query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.ts rename to query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.test.ts index 971c3fa0fb85..85b9b722c9c3 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.test.ts @@ -1,19 +1,15 @@ import { connect } from '@planetscale/database' import { PrismaPlanetScale } from '@jkomyno/prisma-adapter-planetscale' import { bindAdapter } from '@jkomyno/prisma-driver-adapter-utils' +import { describe } from 'node:test' import { smokeTestLibquery } from './libquery' -async function main() { +describe('planetscale', () => { const connectionString = `${process.env.JS_PLANETSCALE_DATABASE_URL as string}` - const planetscale = connect({ url: connectionString }) - const adapter = new PrismaPlanetScale(planetscale) + const connnection = connect({ url: connectionString }) + const adapter = new PrismaPlanetScale(connnection) const driverAdapter = bindAdapter(adapter) - - await smokeTestLibquery(driverAdapter, '../../prisma/mysql/schema.prisma') -} - -main().catch((e) => { - console.error(e) - process.exit(1) + + smokeTestLibquery(driverAdapter, '../../prisma/mysql/schema.prisma') }) From 5106138372c1c3548573a157b3853d067694f669 Mon Sep 17 00:00:00 2001 From: Alberto Schiabel Date: Wed, 13 Sep 2023 10:25:54 +0200 Subject: [PATCH 006/128] feat(quaint): remove `chrono` conditional flag (#4227) * feat(quaint): remove "chrono" conditional flag * feat(quaint): remove "chrono" conditional flag from tests as well --- .github/workflows/quaint.yml | 8 ++-- Cargo.toml | 1 - quaint/.github/workflows/test.yml | 8 ++-- quaint/Cargo.toml | 7 ++-- quaint/src/ast/values.rs | 44 --------------------- quaint/src/connector/mssql/conversion.rs | 9 ----- quaint/src/connector/mysql/conversion.rs | 19 --------- quaint/src/connector/postgres/conversion.rs | 25 ------------ quaint/src/connector/sqlite/conversion.rs | 9 ----- quaint/src/serde.rs | 6 --- quaint/src/tests/query.rs | 1 - quaint/src/tests/query/error.rs | 1 - quaint/src/tests/types/mssql.rs | 6 --- quaint/src/tests/types/mysql.rs | 4 -- quaint/src/tests/types/postgres.rs | 8 ---- quaint/src/tests/types/sqlite.rs | 7 ---- quaint/src/visitor/mssql.rs | 4 -- quaint/src/visitor/mysql.rs | 4 -- quaint/src/visitor/postgres.rs | 4 -- quaint/src/visitor/sqlite.rs | 4 -- 20 files changed, 11 insertions(+), 168 deletions(-) diff --git a/.github/workflows/quaint.yml b/.github/workflows/quaint.yml index d4a840728272..3ea87d7fcae0 100644 --- a/.github/workflows/quaint.yml +++ b/.github/workflows/quaint.yml @@ -17,13 +17,13 @@ jobs: features: - "--lib --features=all" - "--lib --no-default-features --features=sqlite" - - "--lib --no-default-features --features=sqlite --features=chrono --features=json --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=sqlite --features=json --features=uuid --features=pooled --features=serde-support --features=bigdecimal" - "--lib --no-default-features --features=postgresql" - - "--lib --no-default-features --features=postgresql --features=chrono --features=json --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=postgresql --features=json --features=uuid --features=pooled --features=serde-support --features=bigdecimal" - "--lib --no-default-features --features=mysql" - - "--lib --no-default-features --features=mysql --features=chrono --features=json --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=mysql --features=json --features=uuid --features=pooled --features=serde-support --features=bigdecimal" - "--lib --no-default-features --features=mssql" - - "--lib --no-default-features --features=mssql --features=chrono --features=json --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=mssql --features=json --features=uuid --features=pooled --features=serde-support --features=bigdecimal" - "--doc --features=all" env: TEST_MYSQL: "mysql://root:prisma@localhost:3306/prisma" diff --git a/Cargo.toml b/Cargo.toml index 02e1f7373d04..e464a500dcc9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -56,7 +56,6 @@ napi-derive = "2.12.4" path = "quaint" features = [ "bigdecimal", - "chrono", "expose-drivers", "fmt-sql", "json", diff --git a/quaint/.github/workflows/test.yml b/quaint/.github/workflows/test.yml index a067743f714e..058c177a0bd4 100644 --- a/quaint/.github/workflows/test.yml +++ b/quaint/.github/workflows/test.yml @@ -46,13 +46,13 @@ jobs: features: - "--lib --features=all" - "--lib --no-default-features --features=sqlite" - - "--lib --no-default-features --features=sqlite --features=chrono --features=json --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=sqlite --features=json --features=uuid --features=pooled --features=serde-support --features=bigdecimal" - "--lib --no-default-features --features=postgresql" - - "--lib --no-default-features --features=postgresql --features=chrono --features=json --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=postgresql --features=json --features=uuid --features=pooled --features=serde-support --features=bigdecimal" - "--lib --no-default-features --features=mysql" - - "--lib --no-default-features --features=mysql --features=chrono --features=json --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=mysql --features=json --features=uuid --features=pooled --features=serde-support --features=bigdecimal" - "--lib --no-default-features --features=mssql" - - "--lib --no-default-features --features=mssql --features=chrono --features=json --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=mssql --features=json --features=uuid --features=pooled --features=serde-support --features=bigdecimal" - "--doc --features=all" env: TEST_MYSQL: "mysql://root:prisma@localhost:3306/prisma" diff --git a/quaint/Cargo.toml b/quaint/Cargo.toml index ccda5e087360..2ff5a69bd97a 100644 --- a/quaint/Cargo.toml +++ b/quaint/Cargo.toml @@ -30,7 +30,6 @@ docs = [] expose-drivers = [] all = [ - "chrono", "json", "mssql", "mysql", @@ -60,10 +59,10 @@ postgresql = [ ] json = ["serde_json", "base64"] -mssql = ["tiberius", "uuid", "chrono", "tokio-util", "tokio/time", "tokio/net", "either"] +mssql = ["tiberius", "uuid", "tokio-util", "tokio/time", "tokio/net", "either"] mysql = ["mysql_async", "tokio/time", "lru-cache"] pooled = ["mobc"] -serde-support = ["serde", "chrono/serde"] +serde-support = ["serde"] sqlite = ["rusqlite", "tokio/sync"] bigdecimal = ["bigdecimal_"] fmt-sql = ["sqlformat"] @@ -83,7 +82,7 @@ hex = "0.4" either = { version = "1.6", optional = true } base64 = { version = "0.12.3", optional = true } -chrono = { version = "0.4", optional = true, default-features = false } +chrono = { version = "0.4", default-features = false, features = ["serde"] } lru-cache = { version = "0.1", optional = true } serde_json = { version = "1.0.48", optional = true, features = ["float_roundtrip"] } native-tls = { version = "0.2", optional = true } diff --git a/quaint/src/ast/values.rs b/quaint/src/ast/values.rs index 5296146646a7..3daf6655bf6d 100644 --- a/quaint/src/ast/values.rs +++ b/quaint/src/ast/values.rs @@ -3,7 +3,6 @@ use crate::error::{Error, ErrorKind}; #[cfg(feature = "bigdecimal")] use bigdecimal::{BigDecimal, FromPrimitive, ToPrimitive}; -#[cfg(feature = "chrono")] use chrono::{DateTime, NaiveDate, NaiveTime, Utc}; #[cfg(feature = "json")] use serde_json::{Number, Value as JsonValue}; @@ -75,16 +74,10 @@ pub enum Value<'a> { #[cfg_attr(feature = "docs", doc(cfg(feature = "uuid")))] /// An UUID value. Uuid(Option), - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] /// A datetime value. DateTime(Option>), - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] /// A date value. Date(Option), - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] /// A time value. Time(Option), } @@ -139,11 +132,8 @@ impl<'a> fmt::Display for Value<'a> { Value::Json(val) => val.as_ref().map(|v| write!(f, "{v}")), #[cfg(feature = "uuid")] Value::Uuid(val) => val.map(|v| write!(f, "\"{v}\"")), - #[cfg(feature = "chrono")] Value::DateTime(val) => val.map(|v| write!(f, "\"{v}\"")), - #[cfg(feature = "chrono")] Value::Date(val) => val.map(|v| write!(f, "\"{v}\"")), - #[cfg(feature = "chrono")] Value::Time(val) => val.map(|v| write!(f, "\"{v}\"")), }; @@ -190,11 +180,8 @@ impl<'a> From> for serde_json::Value { Value::Json(v) => v, #[cfg(feature = "uuid")] Value::Uuid(u) => u.map(|u| serde_json::Value::String(u.hyphenated().to_string())), - #[cfg(feature = "chrono")] Value::DateTime(dt) => dt.map(|dt| serde_json::Value::String(dt.to_rfc3339())), - #[cfg(feature = "chrono")] Value::Date(date) => date.map(|date| serde_json::Value::String(format!("{date}"))), - #[cfg(feature = "chrono")] Value::Time(time) => time.map(|time| serde_json::Value::String(format!("{time}"))), }; @@ -304,22 +291,16 @@ impl<'a> Value<'a> { } /// Creates a new datetime value. - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] pub const fn datetime(value: DateTime) -> Self { Value::DateTime(Some(value)) } /// Creates a new date value. - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] pub const fn date(value: NaiveDate) -> Self { Value::Date(Some(value)) } /// Creates a new time value. - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] pub const fn time(value: NaiveTime) -> Self { Value::Time(Some(value)) } @@ -357,11 +338,8 @@ impl<'a> Value<'a> { Value::Numeric(r) => r.is_none(), #[cfg(feature = "uuid")] Value::Uuid(u) => u.is_none(), - #[cfg(feature = "chrono")] Value::DateTime(dt) => dt.is_none(), - #[cfg(feature = "chrono")] Value::Date(d) => d.is_none(), - #[cfg(feature = "chrono")] Value::Time(t) => t.is_none(), #[cfg(feature = "json")] Value::Json(json) => json.is_none(), @@ -564,15 +542,11 @@ impl<'a> Value<'a> { } /// `true` if the `Value` is a DateTime. - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] pub const fn is_datetime(&self) -> bool { matches!(self, Value::DateTime(_)) } /// Returns a `DateTime` if the value is a `DateTime`, otherwise `None`. - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] pub const fn as_datetime(&self) -> Option> { match self { Value::DateTime(dt) => *dt, @@ -581,15 +555,11 @@ impl<'a> Value<'a> { } /// `true` if the `Value` is a Date. - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] pub const fn is_date(&self) -> bool { matches!(self, Value::Date(_)) } /// Returns a `NaiveDate` if the value is a `Date`, otherwise `None`. - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] pub const fn as_date(&self) -> Option { match self { Value::Date(dt) => *dt, @@ -598,15 +568,11 @@ impl<'a> Value<'a> { } /// `true` if the `Value` is a `Time`. - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] pub const fn is_time(&self) -> bool { matches!(self, Value::Time(_)) } /// Returns a `NaiveTime` if the value is a `Time`, otherwise `None`. - #[cfg(feature = "chrono")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] pub const fn as_time(&self) -> Option { match self { Value::Time(time) => *time, @@ -687,11 +653,8 @@ value!(val: &'a [u8], Bytes, val.into()); value!(val: f64, Double, val); value!(val: f32, Float, val); -#[cfg(feature = "chrono")] value!(val: DateTime, DateTime, val); -#[cfg(feature = "chrono")] value!(val: chrono::NaiveTime, Time, val); -#[cfg(feature = "chrono")] value!(val: chrono::NaiveDate, Date, val); #[cfg(feature = "bigdecimal")] value!(val: BigDecimal, Numeric, val); @@ -761,8 +724,6 @@ impl<'a> TryFrom> for bool { } } -#[cfg(feature = "chrono")] -#[cfg_attr(feature = "docs", doc(cfg(feature = "chrono")))] impl<'a> TryFrom> for DateTime { type Error = Error; @@ -926,7 +887,6 @@ impl<'a> IntoIterator for Values<'a> { #[cfg(test)] mod tests { use super::*; - #[cfg(feature = "chrono")] use std::str::FromStr; #[test] @@ -965,7 +925,6 @@ mod tests { } #[test] - #[cfg(feature = "chrono")] fn a_parameterized_value_of_datetimes_can_be_converted_into_a_vec() { let datetime = DateTime::from_str("2019-07-27T05:30:30Z").expect("parsing date/time"); let pv = Value::array(vec![datetime]); @@ -981,7 +940,6 @@ mod tests { } #[test] - #[cfg(feature = "chrono")] fn display_format_for_datetime() { let dt: DateTime = DateTime::from_str("2019-07-27T05:30:30Z").expect("failed while parsing date"); let pv = Value::datetime(dt); @@ -990,7 +948,6 @@ mod tests { } #[test] - #[cfg(feature = "chrono")] fn display_format_for_date() { let date = NaiveDate::from_ymd_opt(2022, 8, 11).unwrap(); let pv = Value::date(date); @@ -999,7 +956,6 @@ mod tests { } #[test] - #[cfg(feature = "chrono")] fn display_format_for_time() { let time = NaiveTime::from_hms_opt(16, 17, 00).unwrap(); let pv = Value::time(time); diff --git a/quaint/src/connector/mssql/conversion.rs b/quaint/src/connector/mssql/conversion.rs index 60e3f4ab6eb7..862993e2e3eb 100644 --- a/quaint/src/connector/mssql/conversion.rs +++ b/quaint/src/connector/mssql/conversion.rs @@ -28,11 +28,8 @@ impl<'a> IntoSql<'a> for &'a Value<'a> { Value::Json(val) => val.as_ref().map(|val| serde_json::to_string(&val).unwrap()).into_sql(), #[cfg(feature = "uuid")] Value::Uuid(val) => val.into_sql(), - #[cfg(feature = "chrono")] Value::DateTime(val) => val.into_sql(), - #[cfg(feature = "chrono")] Value::Date(val) => val.into_sql(), - #[cfg(feature = "chrono")] Value::Time(val) => val.into_sql(), } } @@ -60,32 +57,27 @@ impl TryFrom> for Value<'static> { let kind = ErrorKind::conversion("Please enable `bigdecimal` feature to read numeric values"); return Err(Error::builder(kind).build()); } - #[cfg(feature = "chrono")] dt @ ColumnData::DateTime(_) => { use tiberius::time::chrono::{DateTime, NaiveDateTime, Utc}; let dt = NaiveDateTime::from_sql(&dt)?.map(|dt| DateTime::::from_utc(dt, Utc)); Value::DateTime(dt) } - #[cfg(feature = "chrono")] dt @ ColumnData::SmallDateTime(_) => { use tiberius::time::chrono::{DateTime, NaiveDateTime, Utc}; let dt = NaiveDateTime::from_sql(&dt)?.map(|dt| DateTime::::from_utc(dt, Utc)); Value::DateTime(dt) } - #[cfg(feature = "chrono")] dt @ ColumnData::Time(_) => { use tiberius::time::chrono::NaiveTime; Value::Time(NaiveTime::from_sql(&dt)?) } - #[cfg(feature = "chrono")] dt @ ColumnData::Date(_) => { use tiberius::time::chrono::NaiveDate; Value::Date(NaiveDate::from_sql(&dt)?) } - #[cfg(feature = "chrono")] dt @ ColumnData::DateTime2(_) => { use tiberius::time::chrono::{DateTime, NaiveDateTime, Utc}; @@ -93,7 +85,6 @@ impl TryFrom> for Value<'static> { Value::DateTime(dt) } - #[cfg(feature = "chrono")] dt @ ColumnData::DateTimeOffset(_) => { use tiberius::time::chrono::{DateTime, Utc}; diff --git a/quaint/src/connector/mysql/conversion.rs b/quaint/src/connector/mysql/conversion.rs index ea634f8dc87f..18e1ce4cd431 100644 --- a/quaint/src/connector/mysql/conversion.rs +++ b/quaint/src/connector/mysql/conversion.rs @@ -3,7 +3,6 @@ use crate::{ connector::{queryable::TakeRow, TypeIdentifier}, error::{Error, ErrorKind}, }; -#[cfg(feature = "chrono")] use chrono::{DateTime, Datelike, NaiveDate, NaiveDateTime, NaiveTime, Timelike, Utc}; use mysql_async::{ self as my, @@ -54,15 +53,12 @@ pub fn conv_params(params: &[Value<'_>]) -> crate::Result { }, #[cfg(feature = "uuid")] Value::Uuid(u) => u.map(|u| my::Value::Bytes(u.hyphenated().to_string().into_bytes())), - #[cfg(feature = "chrono")] Value::Date(d) => { d.map(|d| my::Value::Date(d.year() as u16, d.month() as u8, d.day() as u8, 0, 0, 0, 0)) } - #[cfg(feature = "chrono")] Value::Time(t) => { t.map(|t| my::Value::Time(false, 0, t.hour() as u8, t.minute() as u8, t.second() as u8, 0)) } - #[cfg(feature = "chrono")] Value::DateTime(dt) => dt.map(|dt| { my::Value::Date( dt.year() as u16, @@ -276,7 +272,6 @@ impl TakeRow for my::Row { })?), my::Value::Float(f) => Value::from(f), my::Value::Double(f) => Value::from(f), - #[cfg(feature = "chrono")] my::Value::Date(year, month, day, hour, min, sec, micro) => { if day == 0 || month == 0 { let msg = format!( @@ -294,7 +289,6 @@ impl TakeRow for my::Row { Value::datetime(DateTime::::from_utc(dt, Utc)) } - #[cfg(feature = "chrono")] my::Value::Time(is_neg, days, hours, minutes, seconds, micros) => { if is_neg { let kind = ErrorKind::conversion("Failed to convert a negative time"); @@ -322,11 +316,8 @@ impl TakeRow for my::Row { t if t.is_bytes() => Value::Bytes(None), #[cfg(feature = "bigdecimal")] t if t.is_real() => Value::Numeric(None), - #[cfg(feature = "chrono")] t if t.is_datetime() => Value::DateTime(None), - #[cfg(feature = "chrono")] t if t.is_time() => Value::Time(None), - #[cfg(feature = "chrono")] t if t.is_date() => Value::Date(None), #[cfg(feature = "json")] t if t.is_json() => Value::Json(None), @@ -337,16 +328,6 @@ impl TakeRow for my::Row { return Err(Error::builder(kind).build()); } }, - #[cfg(not(feature = "chrono"))] - typ => { - let msg = format!( - "Value of type {:?} is not supported with the current configuration", - typ - ); - - let kind = ErrorKind::conversion(msg); - Err(Error::builder(kind).build())? - } }; Ok(res) diff --git a/quaint/src/connector/postgres/conversion.rs b/quaint/src/connector/postgres/conversion.rs index b4b0a256ad11..181557c2e48d 100644 --- a/quaint/src/connector/postgres/conversion.rs +++ b/quaint/src/connector/postgres/conversion.rs @@ -10,7 +10,6 @@ use crate::{ use bigdecimal::{num_bigint::BigInt, BigDecimal, FromPrimitive, ToPrimitive}; use bit_vec::BitVec; use bytes::BytesMut; -#[cfg(feature = "chrono")] use chrono::{DateTime, NaiveDateTime, Utc}; #[cfg(feature = "bigdecimal")] pub(crate) use decimal::DecimalWrapper; @@ -57,11 +56,8 @@ pub(crate) fn params_to_types(params: &[Value<'_>]) -> Vec { Value::Xml(_) => PostgresType::XML, #[cfg(feature = "uuid")] Value::Uuid(_) => PostgresType::UUID, - #[cfg(feature = "chrono")] Value::DateTime(_) => PostgresType::TIMESTAMPTZ, - #[cfg(feature = "chrono")] Value::Date(_) => PostgresType::TIMESTAMP, - #[cfg(feature = "chrono")] Value::Time(_) => PostgresType::TIME, Value::Array(ref arr) => { let arr = arr.as_ref().unwrap(); @@ -99,11 +95,8 @@ pub(crate) fn params_to_types(params: &[Value<'_>]) -> Vec { Value::Xml(_) => PostgresType::XML_ARRAY, #[cfg(feature = "uuid")] Value::Uuid(_) => PostgresType::UUID_ARRAY, - #[cfg(feature = "chrono")] Value::DateTime(_) => PostgresType::TIMESTAMPTZ_ARRAY, - #[cfg(feature = "chrono")] Value::Date(_) => PostgresType::TIMESTAMP_ARRAY, - #[cfg(feature = "chrono")] Value::Time(_) => PostgresType::TIME_ARRAY, // In the case of nested arrays, we let PG infer the type Value::Array(_) => PostgresType::UNKNOWN, @@ -142,10 +135,8 @@ impl<'a> FromSql<'a> for EnumString { } } -#[cfg(feature = "chrono")] struct TimeTz(chrono::NaiveTime); -#[cfg(feature = "chrono")] impl<'a> FromSql<'a> for TimeTz { fn from_sql(_ty: &PostgresType, raw: &'a [u8]) -> Result> { // We assume UTC. @@ -248,7 +239,6 @@ impl GetRow for PostgresRow { } None => Value::Numeric(None), }, - #[cfg(feature = "chrono")] PostgresType::TIMESTAMP => match row.try_get(i)? { Some(val) => { let ts: NaiveDateTime = val; @@ -257,7 +247,6 @@ impl GetRow for PostgresRow { } None => Value::DateTime(None), }, - #[cfg(feature = "chrono")] PostgresType::TIMESTAMPTZ => match row.try_get(i)? { Some(val) => { let ts: DateTime = val; @@ -265,17 +254,14 @@ impl GetRow for PostgresRow { } None => Value::DateTime(None), }, - #[cfg(feature = "chrono")] PostgresType::DATE => match row.try_get(i)? { Some(val) => Value::date(val), None => Value::Date(None), }, - #[cfg(feature = "chrono")] PostgresType::TIME => match row.try_get(i)? { Some(val) => Value::time(val), None => Value::Time(None), }, - #[cfg(feature = "chrono")] PostgresType::TIMETZ => match row.try_get(i)? { Some(val) => { let time: TimeTz = val; @@ -357,7 +343,6 @@ impl GetRow for PostgresRow { } None => Value::Array(None), }, - #[cfg(feature = "chrono")] PostgresType::TIMESTAMP_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; @@ -412,7 +397,6 @@ impl GetRow for PostgresRow { } None => Value::Array(None), }, - #[cfg(feature = "chrono")] PostgresType::TIMESTAMPTZ_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec>> = val; @@ -422,7 +406,6 @@ impl GetRow for PostgresRow { } None => Value::Array(None), }, - #[cfg(feature = "chrono")] PostgresType::DATE_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; @@ -432,7 +415,6 @@ impl GetRow for PostgresRow { } None => Value::Array(None), }, - #[cfg(feature = "chrono")] PostgresType::TIME_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; @@ -442,7 +424,6 @@ impl GetRow for PostgresRow { } None => Value::Array(None), }, - #[cfg(feature = "chrono")] PostgresType::TIMETZ_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; @@ -885,22 +866,16 @@ impl<'a> ToSql for Value<'a> { (Value::Xml(value), _) => value.as_ref().map(|value| value.to_sql(ty, out)), #[cfg(feature = "uuid")] (Value::Uuid(value), _) => value.map(|value| value.to_sql(ty, out)), - #[cfg(feature = "chrono")] (Value::DateTime(value), &PostgresType::DATE) => value.map(|value| value.date_naive().to_sql(ty, out)), - #[cfg(feature = "chrono")] (Value::Date(value), _) => value.map(|value| value.to_sql(ty, out)), - #[cfg(feature = "chrono")] (Value::Time(value), _) => value.map(|value| value.to_sql(ty, out)), - #[cfg(feature = "chrono")] (Value::DateTime(value), &PostgresType::TIME) => value.map(|value| value.time().to_sql(ty, out)), - #[cfg(feature = "chrono")] (Value::DateTime(value), &PostgresType::TIMETZ) => value.map(|value| { let result = value.time().to_sql(ty, out)?; // We assume UTC. see https://www.postgresql.org/docs/9.5/datatype-datetime.html out.extend_from_slice(&[0; 4]); Ok(result) }), - #[cfg(feature = "chrono")] (Value::DateTime(value), _) => value.map(|value| value.naive_utc().to_sql(ty, out)), }; diff --git a/quaint/src/connector/sqlite/conversion.rs b/quaint/src/connector/sqlite/conversion.rs index 68442d2a7202..23f20edbabe0 100644 --- a/quaint/src/connector/sqlite/conversion.rs +++ b/quaint/src/connector/sqlite/conversion.rs @@ -14,7 +14,6 @@ use rusqlite::{ Column, Error as RusqlError, Row as SqliteRow, Rows as SqliteRows, }; -#[cfg(feature = "chrono")] use chrono::TimeZone; impl TypeIdentifier for Column<'_> { @@ -147,9 +146,7 @@ impl<'a> GetRow for SqliteRow<'a> { c if c.is_double() => Value::Double(None), #[cfg(feature = "bigdecimal")] c if c.is_real() => Value::Numeric(None), - #[cfg(feature = "chrono")] c if c.is_datetime() => Value::DateTime(None), - #[cfg(feature = "chrono")] c if c.is_date() => Value::Date(None), c if c.is_bool() => Value::Boolean(None), c => match c.decl_type() { @@ -172,12 +169,10 @@ impl<'a> GetRow for SqliteRow<'a> { Value::boolean(true) } } - #[cfg(feature = "chrono")] c if c.is_date() => { let dt = chrono::NaiveDateTime::from_timestamp_opt(i / 1000, 0).unwrap(); Value::date(dt.date()) } - #[cfg(feature = "chrono")] c if c.is_datetime() => { let dt = chrono::Utc.timestamp_millis_opt(i).unwrap(); Value::datetime(dt) @@ -203,7 +198,6 @@ impl<'a> GetRow for SqliteRow<'a> { Value::numeric(BigDecimal::from_f64(f).unwrap()) } ValueRef::Real(f) => Value::double(f), - #[cfg(feature = "chrono")] ValueRef::Text(bytes) if column.is_datetime() => { let parse_res = std::str::from_utf8(bytes).map_err(|_| { let builder = Error::builder(ErrorKind::ConversionError( @@ -285,13 +279,10 @@ impl<'a> ToSql for Value<'a> { Value::Xml(cow) => cow.as_ref().map(|cow| ToSqlOutput::from(cow.as_ref())), #[cfg(feature = "uuid")] Value::Uuid(value) => value.map(|value| ToSqlOutput::from(value.hyphenated().to_string())), - #[cfg(feature = "chrono")] Value::DateTime(value) => value.map(|value| ToSqlOutput::from(value.timestamp_millis())), - #[cfg(feature = "chrono")] Value::Date(date) => date .and_then(|date| date.and_hms_opt(0, 0, 0)) .map(|dt| ToSqlOutput::from(dt.timestamp_millis())), - #[cfg(feature = "chrono")] Value::Time(time) => time .and_then(|time| chrono::NaiveDate::from_ymd_opt(1970, 1, 1).map(|d| (d, time))) .and_then(|(date, time)| { diff --git a/quaint/src/serde.rs b/quaint/src/serde.rs index aa26bb98c2b0..7bd5d10f72c0 100644 --- a/quaint/src/serde.rs +++ b/quaint/src/serde.rs @@ -153,19 +153,13 @@ impl<'de> Deserializer<'de> for ValueDeserializer<'de> { Value::Xml(Some(s)) => visitor.visit_string(s.into_owned()), Value::Xml(None) => visitor.visit_none(), - #[cfg(feature = "chrono")] Value::DateTime(Some(dt)) => visitor.visit_string(dt.to_rfc3339()), - #[cfg(feature = "chrono")] Value::DateTime(None) => visitor.visit_none(), - #[cfg(feature = "chrono")] Value::Date(Some(d)) => visitor.visit_string(format!("{d}")), - #[cfg(feature = "chrono")] Value::Date(None) => visitor.visit_none(), - #[cfg(feature = "chrono")] Value::Time(Some(t)) => visitor.visit_string(format!("{t}")), - #[cfg(feature = "chrono")] Value::Time(None) => visitor.visit_none(), Value::Array(Some(values)) => { diff --git a/quaint/src/tests/query.rs b/quaint/src/tests/query.rs index 9fc67e9d662f..26a0162bb8f6 100644 --- a/quaint/src/tests/query.rs +++ b/quaint/src/tests/query.rs @@ -3085,7 +3085,6 @@ async fn query_raw_typed_numeric(api: &mut dyn TestApi) -> crate::Result<()> { Ok(()) } -#[cfg(feature = "chrono")] #[test_each_connector(tags("postgresql"))] async fn query_raw_typed_date(api: &mut dyn TestApi) -> crate::Result<()> { use chrono::DateTime; diff --git a/quaint/src/tests/query/error.rs b/quaint/src/tests/query/error.rs index 63bfd3ef0357..d9884a2c574a 100644 --- a/quaint/src/tests/query/error.rs +++ b/quaint/src/tests/query/error.rs @@ -257,7 +257,6 @@ async fn ms_my_foreign_key_constraint_violation(api: &mut dyn TestApi) -> crate: Ok(()) } -#[cfg(feature = "chrono")] #[test_each_connector(tags("mysql"))] async fn garbage_datetime_values(api: &mut dyn TestApi) -> crate::Result<()> { api.conn() diff --git a/quaint/src/tests/types/mssql.rs b/quaint/src/tests/types/mssql.rs index 2f9a125022cb..6824562cde51 100644 --- a/quaint/src/tests/types/mssql.rs +++ b/quaint/src/tests/types/mssql.rs @@ -127,7 +127,6 @@ test_type!(image( Value::bytes(b"DEADBEEF".to_vec()), )); -#[cfg(feature = "chrono")] test_type!(date( mssql, "date", @@ -135,7 +134,6 @@ test_type!(date( Value::date(chrono::NaiveDate::from_ymd_opt(2020, 4, 20).unwrap()) )); -#[cfg(feature = "chrono")] test_type!(time( mssql, "time", @@ -143,25 +141,21 @@ test_type!(time( Value::time(chrono::NaiveTime::from_hms_opt(16, 20, 00).unwrap()) )); -#[cfg(feature = "chrono")] test_type!(datetime2(mssql, "datetime2", Value::DateTime(None), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:00Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) })); -#[cfg(feature = "chrono")] test_type!(datetime(mssql, "datetime", Value::DateTime(None), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:22Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) })); -#[cfg(feature = "chrono")] test_type!(datetimeoffset(mssql, "datetimeoffset", Value::DateTime(None), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:22Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) })); -#[cfg(feature = "chrono")] test_type!(smalldatetime(mssql, "smalldatetime", Value::DateTime(None), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:00Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) diff --git a/quaint/src/tests/types/mysql.rs b/quaint/src/tests/types/mysql.rs index 15e2c4f6478b..57b3738f6bd4 100644 --- a/quaint/src/tests/types/mysql.rs +++ b/quaint/src/tests/types/mysql.rs @@ -216,13 +216,11 @@ test_type!(json( Value::json(serde_json::json!({"this": "is", "a": "json", "number": 2})) )); -#[cfg(feature = "chrono")] test_type!(date(mysql, "date", Value::Date(None), { let dt = chrono::DateTime::parse_from_rfc3339("2020-04-20T00:00:00Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) })); -#[cfg(feature = "chrono")] test_type!(time( mysql, "time", @@ -230,13 +228,11 @@ test_type!(time( Value::time(chrono::NaiveTime::from_hms_opt(16, 20, 00).unwrap()) )); -#[cfg(feature = "chrono")] test_type!(datetime(mysql, "datetime", Value::DateTime(None), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:22Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) })); -#[cfg(feature = "chrono")] test_type!(timestamp(mysql, "timestamp", { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:22Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) diff --git a/quaint/src/tests/types/postgres.rs b/quaint/src/tests/types/postgres.rs index a098fa7a5eb4..954082a3c5d5 100644 --- a/quaint/src/tests/types/postgres.rs +++ b/quaint/src/tests/types/postgres.rs @@ -332,7 +332,6 @@ test_type!(uuid_array( ]) )); -#[cfg(feature = "chrono")] test_type!(date( postgresql, "date", @@ -340,7 +339,6 @@ test_type!(date( Value::date(chrono::NaiveDate::from_ymd_opt(2020, 4, 20).unwrap()) )); -#[cfg(feature = "chrono")] test_type!(date_array( postgresql, "date[]", @@ -351,7 +349,6 @@ test_type!(date_array( ]) )); -#[cfg(feature = "chrono")] test_type!(time( postgresql, "time", @@ -359,7 +356,6 @@ test_type!(time( Value::time(chrono::NaiveTime::from_hms_opt(16, 20, 00).unwrap()) )); -#[cfg(feature = "chrono")] test_type!(time_array( postgresql, "time[]", @@ -370,13 +366,11 @@ test_type!(time_array( ]) )); -#[cfg(feature = "chrono")] test_type!(timestamp(postgresql, "timestamp", Value::DateTime(None), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:22Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) })); -#[cfg(feature = "chrono")] test_type!(timestamp_array(postgresql, "timestamp[]", Value::Array(None), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:22Z").unwrap(); @@ -386,13 +380,11 @@ test_type!(timestamp_array(postgresql, "timestamp[]", Value::Array(None), { ]) })); -#[cfg(feature = "chrono")] test_type!(timestamptz(postgresql, "timestamptz", Value::DateTime(None), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:22Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) })); -#[cfg(feature = "chrono")] test_type!(timestamptz_array(postgresql, "timestamptz[]", Value::Array(None), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:22Z").unwrap(); diff --git a/quaint/src/tests/types/sqlite.rs b/quaint/src/tests/types/sqlite.rs index 39aca6de2d52..80ab4bb5b8f2 100644 --- a/quaint/src/tests/types/sqlite.rs +++ b/quaint/src/tests/types/sqlite.rs @@ -1,9 +1,7 @@ #![allow(clippy::approx_constant)] use crate::tests::test_api::sqlite_test_api; -#[cfg(feature = "chrono")] use crate::tests::test_api::TestApi; -#[cfg(feature = "chrono")] use crate::{ast::*, connector::Queryable}; #[cfg(feature = "bigdecimal")] use std::str::FromStr; @@ -78,7 +76,6 @@ test_type!(boolean( Value::boolean(false) )); -#[cfg(feature = "chrono")] test_type!(date( sqlite, "DATE", @@ -86,7 +83,6 @@ test_type!(date( Value::date(chrono::NaiveDate::from_ymd_opt(1984, 1, 1).unwrap()) )); -#[cfg(feature = "chrono")] test_type!(datetime( sqlite, "DATETIME", @@ -94,7 +90,6 @@ test_type!(datetime( Value::datetime(chrono::DateTime::from_str("2020-07-29T09:23:44.458Z").unwrap()) )); -#[cfg(feature = "chrono")] #[test_macros::test_each_connector(tags("sqlite"))] async fn test_type_text_datetime_rfc3339(api: &mut dyn TestApi) -> crate::Result<()> { let table = api.create_type_table("DATETIME").await?; @@ -115,7 +110,6 @@ async fn test_type_text_datetime_rfc3339(api: &mut dyn TestApi) -> crate::Result Ok(()) } -#[cfg(feature = "chrono")] #[test_macros::test_each_connector(tags("sqlite"))] async fn test_type_text_datetime_rfc2822(api: &mut dyn TestApi) -> crate::Result<()> { let table = api.create_type_table("DATETIME").await?; @@ -138,7 +132,6 @@ async fn test_type_text_datetime_rfc2822(api: &mut dyn TestApi) -> crate::Result Ok(()) } -#[cfg(feature = "chrono")] #[test_macros::test_each_connector(tags("sqlite"))] async fn test_type_text_datetime_custom(api: &mut dyn TestApi) -> crate::Result<()> { let table = api.create_type_table("DATETIME").await?; diff --git a/quaint/src/visitor/mssql.rs b/quaint/src/visitor/mssql.rs index 111b43ed8ebe..0353684a6cd3 100644 --- a/quaint/src/visitor/mssql.rs +++ b/quaint/src/visitor/mssql.rs @@ -348,17 +348,14 @@ impl<'a> Visitor<'a> for Mssql<'a> { let s = format!("CONVERT(uniqueidentifier, N'{}')", uuid.hyphenated()); self.write(s) }), - #[cfg(feature = "chrono")] Value::DateTime(dt) => dt.map(|dt| { let s = format!("CONVERT(datetimeoffset, N'{}')", dt.to_rfc3339()); self.write(s) }), - #[cfg(feature = "chrono")] Value::Date(date) => date.map(|date| { let s = format!("CONVERT(date, N'{date}')"); self.write(s) }), - #[cfg(feature = "chrono")] Value::Time(time) => time.map(|time| { let s = format!("CONVERT(time, N'{time}')"); self.write(s) @@ -1270,7 +1267,6 @@ mod tests { } #[test] - #[cfg(feature = "chrono")] fn test_raw_datetime() { let dt = chrono::Utc::now(); let (sql, params) = Mssql::build(Select::default().value(dt.raw())).unwrap(); diff --git a/quaint/src/visitor/mysql.rs b/quaint/src/visitor/mysql.rs index 68bc62ec617f..fc23fd8ecf64 100644 --- a/quaint/src/visitor/mysql.rs +++ b/quaint/src/visitor/mysql.rs @@ -163,11 +163,8 @@ impl<'a> Visitor<'a> for Mysql<'a> { }, #[cfg(feature = "uuid")] Value::Uuid(uuid) => uuid.map(|uuid| self.write(format!("'{}'", uuid.hyphenated()))), - #[cfg(feature = "chrono")] Value::DateTime(dt) => dt.map(|dt| self.write(format!("'{}'", dt.to_rfc3339(),))), - #[cfg(feature = "chrono")] Value::Date(date) => date.map(|date| self.write(format!("'{date}'"))), - #[cfg(feature = "chrono")] Value::Time(time) => time.map(|time| self.write(format!("'{time}'"))), Value::Xml(cow) => cow.map(|cow| self.write(format!("'{cow}'"))), }; @@ -878,7 +875,6 @@ mod tests { } #[test] - #[cfg(feature = "chrono")] fn test_raw_datetime() { let dt = chrono::Utc::now(); let (sql, params) = Mysql::build(Select::default().value(dt.raw())).unwrap(); diff --git a/quaint/src/visitor/postgres.rs b/quaint/src/visitor/postgres.rs index 8dc02180881b..fa6511ea3a40 100644 --- a/quaint/src/visitor/postgres.rs +++ b/quaint/src/visitor/postgres.rs @@ -111,11 +111,8 @@ impl<'a> Visitor<'a> for Postgres<'a> { Value::Numeric(r) => r.map(|r| self.write(r)), #[cfg(feature = "uuid")] Value::Uuid(uuid) => uuid.map(|uuid| self.write(format!("'{}'", uuid.hyphenated()))), - #[cfg(feature = "chrono")] Value::DateTime(dt) => dt.map(|dt| self.write(format!("'{}'", dt.to_rfc3339(),))), - #[cfg(feature = "chrono")] Value::Date(date) => date.map(|date| self.write(format!("'{date}'"))), - #[cfg(feature = "chrono")] Value::Time(time) => time.map(|time| self.write(format!("'{time}'"))), }; @@ -921,7 +918,6 @@ mod tests { } #[test] - #[cfg(feature = "chrono")] fn test_raw_datetime() { let dt = chrono::Utc::now(); let (sql, params) = Postgres::build(Select::default().value(dt.raw())).unwrap(); diff --git a/quaint/src/visitor/sqlite.rs b/quaint/src/visitor/sqlite.rs index 91d3240df67d..3ac0d0866dd1 100644 --- a/quaint/src/visitor/sqlite.rs +++ b/quaint/src/visitor/sqlite.rs @@ -115,11 +115,8 @@ impl<'a> Visitor<'a> for Sqlite<'a> { Value::Numeric(r) => r.map(|r| self.write(r)), #[cfg(feature = "uuid")] Value::Uuid(uuid) => uuid.map(|uuid| self.write(format!("'{}'", uuid.hyphenated()))), - #[cfg(feature = "chrono")] Value::DateTime(dt) => dt.map(|dt| self.write(format!("'{}'", dt.to_rfc3339(),))), - #[cfg(feature = "chrono")] Value::Date(date) => date.map(|date| self.write(format!("'{date}'"))), - #[cfg(feature = "chrono")] Value::Time(time) => time.map(|time| self.write(format!("'{time}'"))), Value::Xml(cow) => cow.map(|cow| self.write(format!("'{cow}'"))), }; @@ -935,7 +932,6 @@ mod tests { } #[test] - #[cfg(feature = "chrono")] fn test_raw_datetime() { let dt = chrono::Utc::now(); let (sql, params) = Sqlite::build(Select::default().value(dt.raw())).unwrap(); From e6d9ad62eff75e6db8d3c5e1edd81166883e6088 Mon Sep 17 00:00:00 2001 From: Khoo Hao Yit Date: Wed, 13 Sep 2023 18:48:33 +0800 Subject: [PATCH 007/128] perf: remove unnecessary join in RelationFilter (#3882) --- .../src/filter_conversion.rs | 97 ++++++++++++------- 1 file changed, 60 insertions(+), 37 deletions(-) diff --git a/query-engine/connectors/sql-query-connector/src/filter_conversion.rs b/query-engine/connectors/sql-query-connector/src/filter_conversion.rs index a95df0ce5aa1..ed2202bcdc84 100644 --- a/query-engine/connectors/sql-query-connector/src/filter_conversion.rs +++ b/query-engine/connectors/sql-query-connector/src/filter_conversion.rs @@ -330,43 +330,66 @@ impl AliasedSelect for RelationFilter { let alias = alias.unwrap_or_default(); let condition = self.condition; - let table = self.field.as_table(ctx); - let selected_identifier: Vec = self - .field - .identifier_columns(ctx) - .map(|col| col.aliased_col(Some(alias), ctx)) - .collect(); - - let join_columns: Vec = self - .field - .join_columns(ctx) - .map(|c| c.aliased_col(Some(alias), ctx)) - .collect(); - - let related_table = self.field.related_model().as_table(ctx); - let related_join_columns: Vec<_> = ModelProjection::from(self.field.related_field().linking_fields()) - .as_columns(ctx) - .map(|col| col.aliased_col(Some(alias.flip(AliasMode::Join)), ctx)) - .collect(); - - let nested_conditions = self - .nested_filter - .aliased_condition_from(Some(alias.flip(AliasMode::Join)), false, ctx) - .invert_if(condition.invert_of_subselect()); - - let conditions = selected_identifier - .clone() - .into_iter() - .fold(nested_conditions, |acc, column| acc.and(column.is_not_null())); - - let join = related_table - .alias(alias.to_string(Some(AliasMode::Join))) - .on(Row::from(related_join_columns).equals(Row::from(join_columns))); - - Select::from_table(table.alias(alias.to_string(Some(AliasMode::Table)))) - .columns(selected_identifier) - .inner_join(join) - .so_that(conditions) + // Performance can be improved by using fields in related table which skip a join table operation + if self.field.related_field().walker().fields().is_some() { + let related_table = self.field.related_model().as_table(ctx); + let related_columns: Vec<_> = ModelProjection::from(self.field.related_field().linking_fields()) + .as_columns(ctx) + .map(|col| col.aliased_col(Some(alias), ctx)) + .collect(); + + let nested_conditions = self + .nested_filter + .aliased_condition_from(Some(alias), false, ctx) + .invert_if(condition.invert_of_subselect()); + + let conditions = related_columns + .clone() + .into_iter() + .fold(nested_conditions, |acc, column| acc.and(column.is_not_null())); + + Select::from_table(related_table.alias(alias.to_string(Some(AliasMode::Table)))) + .columns(related_columns) + .so_that(conditions) + } else { + let table = self.field.as_table(ctx); + let selected_identifier: Vec = self + .field + .identifier_columns(ctx) + .map(|col| col.aliased_col(Some(alias), ctx)) + .collect(); + + let join_columns: Vec = self + .field + .join_columns(ctx) + .map(|c| c.aliased_col(Some(alias), ctx)) + .collect(); + + let related_table = self.field.related_model().as_table(ctx); + let related_join_columns: Vec<_> = ModelProjection::from(self.field.related_field().linking_fields()) + .as_columns(ctx) + .map(|col| col.aliased_col(Some(alias.flip(AliasMode::Join)), ctx)) + .collect(); + + let nested_conditions = self + .nested_filter + .aliased_condition_from(Some(alias.flip(AliasMode::Join)), false, ctx) + .invert_if(condition.invert_of_subselect()); + + let conditions = selected_identifier + .clone() + .into_iter() + .fold(nested_conditions, |acc, column| acc.and(column.is_not_null())); + + let join = related_table + .alias(alias.to_string(Some(AliasMode::Join))) + .on(Row::from(related_join_columns).equals(Row::from(join_columns))); + + Select::from_table(table.alias(alias.to_string(Some(AliasMode::Table)))) + .columns(selected_identifier) + .inner_join(join) + .so_that(conditions) + } } } From 6ff772fb9ffe514775f5ad4f812d7ec25848be4c Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Wed, 13 Sep 2023 12:50:47 +0200 Subject: [PATCH 008/128] driver-adapters: add prettierrc (#4225) --- query-engine/driver-adapters/js/.prettierrc.yml | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 query-engine/driver-adapters/js/.prettierrc.yml diff --git a/query-engine/driver-adapters/js/.prettierrc.yml b/query-engine/driver-adapters/js/.prettierrc.yml new file mode 100644 index 000000000000..f0beb50a2167 --- /dev/null +++ b/query-engine/driver-adapters/js/.prettierrc.yml @@ -0,0 +1,5 @@ +tabWidth: 2 +trailingComma: all +singleQuote: true +semi: false +printWidth: 120 From f4104c076ba453b09b6296a2533f7dbef7cc2157 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Wed, 13 Sep 2023 15:22:26 +0200 Subject: [PATCH 009/128] nix: fix and update flake (#4224) - Update the Nix flake to get new things (especially newer Node.js and pnpm, current pnpm version was older than what driver-adapters required) - Switch from Node.js 18 to Node.js 20 to match .nvmrc - Fix the flake after updating to latest nixpkgs due to changes in cargo-auditable. The problem was that we were overriding cargo and rustc packages using our own overlay on top of rust-overlay, but the packages from rust-overlay do not accept the same arguments as those in nixpkgs. That's probably the reason why rust-overlay doesn't do that and defines new packages instead. The `rustToolchain` package we were defining was fine but I moved it to module arguments instead and got rid of the overlay completely. - Add a way to opt-out of nix for gitpod --- .envrc | 4 +++- Cargo.lock | 24 +++++++++++------------ flake.lock | 36 +++++++++++++++++------------------ nix/all-engines.nix | 13 ++++++++----- nix/args.nix | 10 +++++----- nix/shell.nix | 11 ++++++----- prisma-schema-wasm/Cargo.toml | 2 +- 7 files changed, 53 insertions(+), 47 deletions(-) diff --git a/.envrc b/.envrc index 3bd875aed813..29a3b25822d5 100644 --- a/.envrc +++ b/.envrc @@ -36,7 +36,9 @@ fi # Set up env vars and build inputs from flake.nix automatically for nix users. # If you don't use nix, you can safely ignore this. -if command -v nix &> /dev/null +# You can set the DISABLE_NIX environment variable if you're in an environment +# where nix is pre-installed (e.g. gitpod) but you don't want to use it. +if command -v nix &> /dev/null && [ -z ${DISABLE_NIX+x} ] then if nix flake metadata > /dev/null; then if type nix_direnv_watch_file &> /dev/null; then diff --git a/Cargo.lock b/Cargo.lock index 3cabff4234ff..66ab1a8b832d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5731,9 +5731,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.84" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31f8dcbc21f30d9b8f2ea926ecb58f6b91192c17e9d33594b3df58b2007ca53b" +checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" dependencies = [ "cfg-if", "wasm-bindgen-macro", @@ -5741,16 +5741,16 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.84" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95ce90fd5bcc06af55a641a86428ee4229e44e07033963a2290a8e241607ccb9" +checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.28", "wasm-bindgen-shared", ] @@ -5768,9 +5768,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.84" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c21f77c0bedc37fd5dc21f897894a5ca01e7bb159884559461862ae90c0b4c5" +checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -5778,22 +5778,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.84" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2aff81306fcac3c7515ad4e177f521b5c9a15f2b08f4e32d823066102f35a5f6" +checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.28", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.84" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0046fef7e28c3804e5e38bfa31ea2a0f73905319b677e57ebe37e49358989b5d" +checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" [[package]] name = "wasm-logger" diff --git a/flake.lock b/flake.lock index 725613da576b..6f11c280dd3d 100644 --- a/flake.lock +++ b/flake.lock @@ -14,11 +14,11 @@ ] }, "locked": { - "lastModified": 1693163878, - "narHash": "sha256-HXuyMUVaRSoIA602jfFuYGXt6AMZ+WUxuvLq8iJmYTA=", + "lastModified": 1693787605, + "narHash": "sha256-rwq5U8dy+a9JFny/73L0SJu1GfWwATMPMTp7D+mjHy8=", "owner": "ipetkov", "repo": "crane", - "rev": "43db881168bc65b568d36ceb614a0fc8b276191b", + "rev": "8b4f7a4dab2120cf41e7957a28a853f45016bd9d", "type": "github" }, "original": { @@ -50,11 +50,11 @@ ] }, "locked": { - "lastModified": 1688466019, - "narHash": "sha256-VeM2akYrBYMsb4W/MmBo1zmaMfgbL4cH3Pu8PGyIwJ0=", + "lastModified": 1693611461, + "narHash": "sha256-aPODl8vAgGQ0ZYFIRisxYG5MOGSkIczvu2Cd8Gb9+1Y=", "owner": "hercules-ci", "repo": "flake-parts", - "rev": "8e8d955c22df93dbe24f19ea04f47a74adbdc5ec", + "rev": "7f53fdb7bdc5bb237da7fefef12d099e4fd611ca", "type": "github" }, "original": { @@ -70,11 +70,11 @@ ] }, "locked": { - "lastModified": 1689068808, - "narHash": "sha256-6ixXo3wt24N/melDWjq70UuHQLxGV8jZvooRanIHXw0=", + "lastModified": 1692799911, + "narHash": "sha256-3eihraek4qL744EvQXsK1Ha6C3CR7nnT8X2qWap4RNk=", "owner": "numtide", "repo": "flake-utils", - "rev": "919d646de7be200f3bf08cb76ae1f09402b6f9b4", + "rev": "f9e7cf818399d17d347f847525c5a5a8032e4e44", "type": "github" }, "original": { @@ -90,11 +90,11 @@ ] }, "locked": { - "lastModified": 1660459072, - "narHash": "sha256-8DFJjXG8zqoONA1vXtgeKXy68KdJL5UaXR8NtVMUbx8=", + "lastModified": 1694102001, + "narHash": "sha256-vky6VPK1n1od6vXbqzOXnekrQpTL4hbPAwUhT5J9c9E=", "owner": "hercules-ci", "repo": "gitignore.nix", - "rev": "a20de23b925fd8264fd7fad6454652e142fd7f73", + "rev": "9e21c80adf67ebcb077d75bd5e7d724d21eeafd6", "type": "github" }, "original": { @@ -105,11 +105,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1689192006, - "narHash": "sha256-QM0f0d8oPphOTYJebsHioR9+FzJcy1QNIzREyubB91U=", + "lastModified": 1694422566, + "narHash": "sha256-lHJ+A9esOz9vln/3CJG23FV6Wd2OoOFbDeEs4cMGMqc=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "2de8efefb6ce7f5e4e75bdf57376a96555986841", + "rev": "3a2786eea085f040a66ecde1bc3ddc7099f6dbeb", "type": "github" }, "original": { @@ -139,11 +139,11 @@ ] }, "locked": { - "lastModified": 1693361441, - "narHash": "sha256-TRFdMQj9wSKMduNqe/1xF8TzcPWEdcn/hKWcVcZ5fO8=", + "lastModified": 1694484610, + "narHash": "sha256-aeSDkp7fkAqtVjW3QUn7vq7BKNlFul/BiGgdv7rK+mA=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "1fb2aa49635e9f30b6fa211ab7c454f7175e1ba3", + "rev": "c5b977a7e6a295697fa1f9c42174fd6313b38df4", "type": "github" }, "original": { diff --git a/nix/all-engines.nix b/nix/all-engines.nix index 9235060463a7..0e6a1c05b236 100644 --- a/nix/all-engines.nix +++ b/nix/all-engines.nix @@ -1,4 +1,4 @@ -{ pkgs, flakeInputs, lib, self', ... }: +{ pkgs, flakeInputs, lib, self', rustToolchain, ... }: let stdenv = pkgs.clangStdenv; @@ -15,7 +15,7 @@ let src = srcPath; name = "prisma-engines-source"; }; - craneLib = flakeInputs.crane.mkLib pkgs; + craneLib = (flakeInputs.crane.mkLib pkgs).overrideToolchain rustToolchain.default; deps = craneLib.vendorCargoDeps { inherit src; }; libSuffix = stdenv.hostPlatform.extensions.sharedLibrary; in @@ -34,6 +34,7 @@ in ] ++ lib.optionals stdenv.isDarwin [ perl # required to build openssl darwin.apple_sdk.frameworks.Security + iconv ]; configurePhase = '' @@ -53,13 +54,15 @@ in cp target/release/prisma-fmt $out/bin/ cp target/release/libquery_engine${libSuffix} $out/lib/libquery_engine.node ''; + + dontStrip = true; }; packages.test-cli = lib.makeOverridable ({ profile }: stdenv.mkDerivation { name = "test-cli"; inherit src; - inherit (self'.packages.prisma-engines) buildInputs nativeBuildInputs configurePhase; + inherit (self'.packages.prisma-engines) buildInputs nativeBuildInputs configurePhase dontStrip; buildPhase = "cargo build --profile=${profile} --bin=test-cli"; @@ -76,7 +79,7 @@ in ({ profile }: stdenv.mkDerivation { name = "query-engine-bin"; inherit src; - inherit (self'.packages.prisma-engines) buildInputs nativeBuildInputs configurePhase; + inherit (self'.packages.prisma-engines) buildInputs nativeBuildInputs configurePhase dontStrip; buildPhase = "cargo build --profile=${profile} --bin=query-engine"; @@ -96,7 +99,7 @@ in ({ profile }: stdenv.mkDerivation { name = "query-engine-bin-and-lib"; inherit src; - inherit (self'.packages.prisma-engines) buildInputs nativeBuildInputs configurePhase; + inherit (self'.packages.prisma-engines) buildInputs nativeBuildInputs configurePhase dontStrip; buildPhase = '' cargo build --profile=${profile} --bin=query-engine diff --git a/nix/args.nix b/nix/args.nix index d3a2e54dbc8a..2254b7f5b138 100644 --- a/nix/args.nix +++ b/nix/args.nix @@ -4,10 +4,10 @@ let overlays = [ flakeInputs.rust-overlay.overlays.default - (self: super: - let toolchain = super.rust-bin.stable.latest; in - { cargo = toolchain.minimal; rustc = toolchain.minimal; rustToolchain = toolchain; }) ]; - in - { pkgs = import flakeInputs.nixpkgs { inherit system overlays; }; }; + in rec + { + pkgs = import flakeInputs.nixpkgs { inherit system overlays; }; + rustToolchain = pkgs.rust-bin.stable.latest; + }; } diff --git a/nix/shell.nix b/nix/shell.nix index c30ca9080d47..94661c972d01 100644 --- a/nix/shell.nix +++ b/nix/shell.nix @@ -1,7 +1,8 @@ -{ self', pkgs, ... }: +{ self', pkgs, rustToolchain, ... }: let - devToolchain = pkgs.rustToolchain.default.override { extensions = [ "rust-analyzer" "rust-src" ]; }; + devToolchain = rustToolchain.default.override { extensions = [ "rust-analyzer" "rust-src" ]; }; + nodejs = pkgs.nodejs_latest; in { devShells.default = pkgs.mkShell { @@ -9,9 +10,9 @@ in devToolchain pkgs.llvmPackages_latest.bintools - pkgs.nodejs - pkgs.nodePackages.typescript-language-server - pkgs.nodePackages.pnpm + nodejs + nodejs.pkgs.typescript-language-server + nodejs.pkgs.pnpm ]; inputsFrom = [ self'.packages.prisma-engines ]; shellHook = pkgs.lib.optionalString pkgs.stdenv.isLinux diff --git a/prisma-schema-wasm/Cargo.toml b/prisma-schema-wasm/Cargo.toml index 6387aeedfbba..248c726c9ba4 100644 --- a/prisma-schema-wasm/Cargo.toml +++ b/prisma-schema-wasm/Cargo.toml @@ -7,6 +7,6 @@ edition = "2021" crate-type = ["cdylib"] [dependencies] -wasm-bindgen = "=0.2.84" +wasm-bindgen = "=0.2.87" wasm-logger = { version = "0.2.0", optional = true } prisma-fmt = { path = "../prisma-fmt" } From b90a8206229a3b24aeb96d7f9836930cf2a5ada5 Mon Sep 17 00:00:00 2001 From: Alberto Schiabel Date: Wed, 13 Sep 2023 18:02:40 +0200 Subject: [PATCH 010/128] feat(quaint): remove "json" conditional flag (#4232) --- .github/workflows/quaint.yml | 8 +-- Cargo.toml | 1 - quaint/.github/workflows/test.yml | 8 +-- quaint/Cargo.toml | 6 +- quaint/src/ast/compare.rs | 34 ++++----- quaint/src/ast/expression.rs | 34 ++++----- quaint/src/ast/function.rs | 46 ++++++------ quaint/src/ast/function/row_to_json.rs | 4 +- quaint/src/ast/row.rs | 18 ++--- quaint/src/ast/values.rs | 17 ----- quaint/src/connector/mssql/conversion.rs | 1 - quaint/src/connector/mysql/conversion.rs | 3 - quaint/src/connector/postgres/conversion.rs | 7 -- quaint/src/connector/result_set.rs | 6 +- quaint/src/connector/sqlite/conversion.rs | 1 - quaint/src/error.rs | 2 - quaint/src/serde.rs | 2 - quaint/src/tests/query.rs | 77 ++++++++++----------- quaint/src/tests/types/mysql.rs | 1 - quaint/src/tests/types/postgres.rs | 4 -- quaint/src/visitor.rs | 24 +++---- quaint/src/visitor/mssql.rs | 18 ++--- quaint/src/visitor/mysql.rs | 38 +++------- quaint/src/visitor/postgres.rs | 24 +++---- quaint/src/visitor/sqlite.rs | 16 ++--- 25 files changed, 162 insertions(+), 238 deletions(-) diff --git a/.github/workflows/quaint.yml b/.github/workflows/quaint.yml index 3ea87d7fcae0..d84590b951b4 100644 --- a/.github/workflows/quaint.yml +++ b/.github/workflows/quaint.yml @@ -17,13 +17,13 @@ jobs: features: - "--lib --features=all" - "--lib --no-default-features --features=sqlite" - - "--lib --no-default-features --features=sqlite --features=json --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=sqlite --features=uuid --features=pooled --features=serde-support --features=bigdecimal" - "--lib --no-default-features --features=postgresql" - - "--lib --no-default-features --features=postgresql --features=json --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=postgresql --features=uuid --features=pooled --features=serde-support --features=bigdecimal" - "--lib --no-default-features --features=mysql" - - "--lib --no-default-features --features=mysql --features=json --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=mysql --features=uuid --features=pooled --features=serde-support --features=bigdecimal" - "--lib --no-default-features --features=mssql" - - "--lib --no-default-features --features=mssql --features=json --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=mssql --features=uuid --features=pooled --features=serde-support --features=bigdecimal" - "--doc --features=all" env: TEST_MYSQL: "mysql://root:prisma@localhost:3306/prisma" diff --git a/Cargo.toml b/Cargo.toml index e464a500dcc9..77afa5ee21d4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -58,7 +58,6 @@ features = [ "bigdecimal", "expose-drivers", "fmt-sql", - "json", "mssql", "mysql", "pooled", diff --git a/quaint/.github/workflows/test.yml b/quaint/.github/workflows/test.yml index 058c177a0bd4..998a1a71ca46 100644 --- a/quaint/.github/workflows/test.yml +++ b/quaint/.github/workflows/test.yml @@ -46,13 +46,13 @@ jobs: features: - "--lib --features=all" - "--lib --no-default-features --features=sqlite" - - "--lib --no-default-features --features=sqlite --features=json --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=sqlite --features=uuid --features=pooled --features=serde-support --features=bigdecimal" - "--lib --no-default-features --features=postgresql" - - "--lib --no-default-features --features=postgresql --features=json --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=postgresql --features=uuid --features=pooled --features=serde-support --features=bigdecimal" - "--lib --no-default-features --features=mysql" - - "--lib --no-default-features --features=mysql --features=json --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=mysql --features=uuid --features=pooled --features=serde-support --features=bigdecimal" - "--lib --no-default-features --features=mssql" - - "--lib --no-default-features --features=mssql --features=json --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=mssql --features=uuid --features=pooled --features=serde-support --features=bigdecimal" - "--doc --features=all" env: TEST_MYSQL: "mysql://root:prisma@localhost:3306/prisma" diff --git a/quaint/Cargo.toml b/quaint/Cargo.toml index 2ff5a69bd97a..90f0eacc1fd2 100644 --- a/quaint/Cargo.toml +++ b/quaint/Cargo.toml @@ -30,7 +30,6 @@ docs = [] expose-drivers = [] all = [ - "json", "mssql", "mysql", "pooled", @@ -58,7 +57,6 @@ postgresql = [ "byteorder", ] -json = ["serde_json", "base64"] mssql = ["tiberius", "uuid", "tokio-util", "tokio/time", "tokio/net", "either"] mysql = ["mysql_async", "tokio/time", "lru-cache"] pooled = ["mobc"] @@ -81,10 +79,10 @@ url = "2.1" hex = "0.4" either = { version = "1.6", optional = true } -base64 = { version = "0.12.3", optional = true } +base64 = { version = "0.12.3" } chrono = { version = "0.4", default-features = false, features = ["serde"] } lru-cache = { version = "0.1", optional = true } -serde_json = { version = "1.0.48", optional = true, features = ["float_roundtrip"] } +serde_json = { version = "1.0.48", features = ["float_roundtrip"] } native-tls = { version = "0.2", optional = true } bit-vec = { version = "0.6.1", optional = true } bytes = { version = "1.0", optional = true } diff --git a/quaint/src/ast/compare.rs b/quaint/src/ast/compare.rs index d92843a23557..9c7548303466 100644 --- a/quaint/src/ast/compare.rs +++ b/quaint/src/ast/compare.rs @@ -37,7 +37,7 @@ pub enum Compare<'a> { /// without visitor transformation in between. Raw(Box>, Cow<'a, str>, Box>), /// All json related comparators - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] JsonCompare(JsonCompare<'a>), /// `left` @@ to_tsquery(`value`) #[cfg(feature = "postgresql")] @@ -558,7 +558,7 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_contains(self, item: T) -> Compare<'a> where T: Into>; @@ -578,7 +578,7 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_contains(self, item: T) -> Compare<'a> where T: Into>; @@ -608,7 +608,7 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_begins_with(self, item: T) -> Compare<'a> where T: Into>; @@ -638,7 +638,7 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_begins_with(self, item: T) -> Compare<'a> where T: Into>; @@ -666,7 +666,7 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_ends_into(self, item: T) -> Compare<'a> where T: Into>; @@ -694,7 +694,7 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_ends_into(self, item: T) -> Compare<'a> where T: Into>; @@ -713,7 +713,7 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_type_equals(self, json_type: T) -> Compare<'a> where T: Into>; @@ -732,7 +732,7 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_type_not_equals(self, json_type: T) -> Compare<'a> where T: Into>; @@ -977,7 +977,7 @@ where left.compare_raw(raw_comparator.into(), right) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_contains(self, item: T) -> Compare<'a> where T: Into>, @@ -988,7 +988,7 @@ where val.json_array_contains(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_contains(self, item: T) -> Compare<'a> where T: Into>, @@ -999,7 +999,7 @@ where val.json_array_not_contains(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_begins_with(self, item: T) -> Compare<'a> where T: Into>, @@ -1010,7 +1010,7 @@ where val.json_array_begins_with(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_begins_with(self, item: T) -> Compare<'a> where T: Into>, @@ -1021,7 +1021,7 @@ where val.json_array_not_begins_with(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_ends_into(self, item: T) -> Compare<'a> where T: Into>, @@ -1032,7 +1032,7 @@ where val.json_array_ends_into(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_ends_into(self, item: T) -> Compare<'a> where T: Into>, @@ -1043,7 +1043,7 @@ where val.json_array_not_ends_into(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_type_equals(self, json_type: T) -> Compare<'a> where T: Into>, @@ -1054,7 +1054,7 @@ where val.json_type_equals(json_type) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_type_not_equals(self, json_type: T) -> Compare<'a> where T: Into>, diff --git a/quaint/src/ast/expression.rs b/quaint/src/ast/expression.rs index b3993abc523b..7e5912d1e027 100644 --- a/quaint/src/ast/expression.rs +++ b/quaint/src/ast/expression.rs @@ -1,4 +1,4 @@ -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] use super::compare::{JsonCompare, JsonType}; use crate::ast::*; use query::SelectQuery; @@ -43,38 +43,34 @@ impl<'a> Expression<'a> { } } - #[cfg(feature = "json")] pub(crate) fn is_json_expr(&self) -> bool { match &self.kind { - #[cfg(feature = "json")] ExpressionKind::Parameterized(Value::Json(_)) => true, - #[cfg(feature = "json")] + ExpressionKind::Value(expr) => expr.is_json_value(), - #[cfg(feature = "json")] + ExpressionKind::Function(fun) => fun.returns_json(), _ => false, } } #[allow(dead_code)] - #[cfg(feature = "json")] + pub(crate) fn is_json_value(&self) -> bool { match &self.kind { - #[cfg(feature = "json")] ExpressionKind::Parameterized(Value::Json(_)) => true, - #[cfg(feature = "json")] + ExpressionKind::Value(expr) => expr.is_json_value(), _ => false, } } #[allow(dead_code)] - #[cfg(feature = "json")] + pub(crate) fn into_json_value(self) -> Option { match self.kind { - #[cfg(feature = "json")] ExpressionKind::Parameterized(Value::Json(json_val)) => json_val, - #[cfg(feature = "json")] + ExpressionKind::Value(expr) => expr.into_json_value(), _ => None, } @@ -427,7 +423,7 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::Raw(Box::new(self), raw_comparator.into(), Box::new(right.into())) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_contains(self, item: T) -> Compare<'a> where T: Into>, @@ -435,7 +431,7 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::JsonCompare(JsonCompare::ArrayContains(Box::new(self), Box::new(item.into()))) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_contains(self, item: T) -> Compare<'a> where T: Into>, @@ -443,7 +439,7 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::JsonCompare(JsonCompare::ArrayNotContains(Box::new(self), Box::new(item.into()))) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_begins_with(self, item: T) -> Compare<'a> where T: Into>, @@ -453,7 +449,7 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::Equals(Box::new(array_starts_with), Box::new(item.into())) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_begins_with(self, item: T) -> Compare<'a> where T: Into>, @@ -463,7 +459,7 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::NotEquals(Box::new(array_starts_with), Box::new(item.into())) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_ends_into(self, item: T) -> Compare<'a> where T: Into>, @@ -473,7 +469,7 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::Equals(Box::new(array_ends_into), Box::new(item.into())) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_ends_into(self, item: T) -> Compare<'a> where T: Into>, @@ -483,7 +479,7 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::NotEquals(Box::new(array_ends_into), Box::new(item.into())) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_type_equals(self, json_type: T) -> Compare<'a> where T: Into>, @@ -491,7 +487,7 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::JsonCompare(JsonCompare::TypeEquals(Box::new(self), json_type.into())) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_type_not_equals(self, json_type: T) -> Compare<'a> where T: Into>, diff --git a/quaint/src/ast/function.rs b/quaint/src/ast/function.rs index 123b95566b30..5b6373795485 100644 --- a/quaint/src/ast/function.rs +++ b/quaint/src/ast/function.rs @@ -3,17 +3,17 @@ mod average; mod coalesce; mod concat; mod count; -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] mod json_extract; -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] mod json_extract_array; -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] mod json_unquote; mod lower; mod maximum; mod minimum; mod row_number; -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] mod row_to_json; #[cfg(any(feature = "postgresql", feature = "mysql"))] mod search; @@ -28,19 +28,19 @@ pub use average::*; pub use coalesce::*; pub use concat::*; pub use count::*; -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] pub use json_extract::*; -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] pub(crate) use json_extract_array::*; -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] pub use json_unquote::*; pub use lower::*; pub use maximum::*; pub use minimum::*; pub use row_number::*; -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] pub use row_to_json::*; -#[cfg(any(feature = "postgresql", feature = "mysql"))] +#[cfg(feature = "mysql")] pub use search::*; pub use sum::*; pub use upper::*; @@ -61,13 +61,13 @@ pub struct Function<'a> { impl<'a> Function<'a> { pub fn returns_json(&self) -> bool { match self.typ_ { - #[cfg(all(feature = "json", feature = "postgresql"))] + #[cfg(feature = "postgresql")] FunctionType::RowToJson(_) => true, - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(feature = "mysql")] FunctionType::JsonExtract(_) => true, - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] FunctionType::JsonExtractLastArrayElem(_) => true, - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] FunctionType::JsonExtractFirstArrayElem(_) => true, _ => false, } @@ -77,7 +77,7 @@ impl<'a> Function<'a> { /// A database function type #[derive(Debug, Clone, PartialEq)] pub(crate) enum FunctionType<'a> { - #[cfg(all(feature = "json", feature = "postgresql"))] + #[cfg(feature = "postgresql")] RowToJson(RowToJson<'a>), RowNumber(RowNumber<'a>), Count(Count<'a>), @@ -90,13 +90,13 @@ pub(crate) enum FunctionType<'a> { Maximum(Maximum<'a>), Coalesce(Coalesce<'a>), Concat(Concat<'a>), - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] JsonExtract(JsonExtract<'a>), - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] JsonExtractLastArrayElem(JsonExtractLastArrayElem<'a>), - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] JsonExtractFirstArrayElem(JsonExtractFirstArrayElem<'a>), - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] JsonUnquote(JsonUnquote<'a>), #[cfg(any(feature = "postgresql", feature = "mysql"))] TextSearch(TextSearch<'a>), @@ -122,19 +122,19 @@ impl<'a> Aliasable<'a> for Function<'a> { } } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] function!(RowToJson); -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] function!(JsonExtract); -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] function!(JsonExtractLastArrayElem); -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] function!(JsonExtractFirstArrayElem); -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] function!(JsonUnquote); #[cfg(any(feature = "postgresql", feature = "mysql"))] diff --git a/quaint/src/ast/function/row_to_json.rs b/quaint/src/ast/function/row_to_json.rs index 7ce8e0c98cc6..1093431e7412 100644 --- a/quaint/src/ast/function/row_to_json.rs +++ b/quaint/src/ast/function/row_to_json.rs @@ -3,7 +3,7 @@ use crate::ast::Table; #[derive(Debug, Clone, PartialEq)] #[cfg_attr(feature = "docs", doc(cfg(feature = "postgresql")))] -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] /// A representation of the `ROW_TO_JSON` function in the database. /// Only for `Postgresql` pub struct RowToJson<'a> { @@ -40,7 +40,7 @@ pub struct RowToJson<'a> { /// # } /// ``` #[cfg_attr(feature = "docs", doc(cfg(feature = "postgresql")))] -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] pub fn row_to_json<'a, T>(expr: T, pretty_print: bool) -> Function<'a> where T: Into>, diff --git a/quaint/src/ast/row.rs b/quaint/src/ast/row.rs index 3022b9127758..e556cee966af 100644 --- a/quaint/src/ast/row.rs +++ b/quaint/src/ast/row.rs @@ -1,4 +1,4 @@ -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] use super::compare::JsonType; use crate::ast::{Comparable, Compare, Expression}; use std::borrow::Cow; @@ -283,7 +283,7 @@ impl<'a> Comparable<'a> for Row<'a> { value.compare_raw(raw_comparator, right) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_contains(self, item: T) -> Compare<'a> where T: Into>, @@ -293,7 +293,7 @@ impl<'a> Comparable<'a> for Row<'a> { value.json_array_contains(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_contains(self, item: T) -> Compare<'a> where T: Into>, @@ -303,7 +303,7 @@ impl<'a> Comparable<'a> for Row<'a> { value.json_array_not_contains(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_begins_with(self, item: T) -> Compare<'a> where T: Into>, @@ -313,7 +313,7 @@ impl<'a> Comparable<'a> for Row<'a> { value.json_array_begins_with(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_begins_with(self, item: T) -> Compare<'a> where T: Into>, @@ -323,7 +323,7 @@ impl<'a> Comparable<'a> for Row<'a> { value.json_array_not_begins_with(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_ends_into(self, item: T) -> Compare<'a> where T: Into>, @@ -333,7 +333,7 @@ impl<'a> Comparable<'a> for Row<'a> { value.json_array_ends_into(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_ends_into(self, item: T) -> Compare<'a> where T: Into>, @@ -343,7 +343,7 @@ impl<'a> Comparable<'a> for Row<'a> { value.json_array_not_ends_into(item) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_type_equals(self, json_type: T) -> Compare<'a> where T: Into>, @@ -353,7 +353,7 @@ impl<'a> Comparable<'a> for Row<'a> { value.json_type_equals(json_type) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_type_not_equals(self, json_type: T) -> Compare<'a> where T: Into>, diff --git a/quaint/src/ast/values.rs b/quaint/src/ast/values.rs index 3daf6655bf6d..874e6f4cb0b7 100644 --- a/quaint/src/ast/values.rs +++ b/quaint/src/ast/values.rs @@ -4,7 +4,6 @@ use crate::error::{Error, ErrorKind}; #[cfg(feature = "bigdecimal")] use bigdecimal::{BigDecimal, FromPrimitive, ToPrimitive}; use chrono::{DateTime, NaiveDate, NaiveTime, Utc}; -#[cfg(feature = "json")] use serde_json::{Number, Value as JsonValue}; use std::{ borrow::{Borrow, Cow}, @@ -64,8 +63,6 @@ pub enum Value<'a> { #[cfg(feature = "bigdecimal")] #[cfg_attr(feature = "docs", doc(cfg(feature = "bigdecimal")))] Numeric(Option), - #[cfg(feature = "json")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "json")))] /// A JSON value. Json(Option), /// A XML value. @@ -128,7 +125,6 @@ impl<'a> fmt::Display for Value<'a> { Value::Xml(val) => val.as_ref().map(|v| write!(f, "{v}")), #[cfg(feature = "bigdecimal")] Value::Numeric(val) => val.as_ref().map(|v| write!(f, "{v}")), - #[cfg(feature = "json")] Value::Json(val) => val.as_ref().map(|v| write!(f, "{v}")), #[cfg(feature = "uuid")] Value::Uuid(val) => val.map(|v| write!(f, "\"{v}\"")), @@ -144,8 +140,6 @@ impl<'a> fmt::Display for Value<'a> { } } -#[cfg(feature = "json")] -#[cfg_attr(feature = "docs", doc(cfg(feature = "json")))] impl<'a> From> for serde_json::Value { fn from(pv: Value<'a>) -> Self { let res = match pv { @@ -176,7 +170,6 @@ impl<'a> From> for serde_json::Value { } #[cfg(feature = "bigdecimal")] Value::Numeric(d) => d.map(|d| serde_json::to_value(d.to_f64().unwrap()).unwrap()), - #[cfg(feature = "json")] Value::Json(v) => v, #[cfg(feature = "uuid")] Value::Uuid(u) => u.map(|u| serde_json::Value::String(u.hyphenated().to_string())), @@ -306,8 +299,6 @@ impl<'a> Value<'a> { } /// Creates a new JSON value. - #[cfg(feature = "json")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "json")))] pub const fn json(value: serde_json::Value) -> Self { Value::Json(Some(value)) } @@ -341,7 +332,6 @@ impl<'a> Value<'a> { Value::DateTime(dt) => dt.is_none(), Value::Date(d) => d.is_none(), Value::Time(t) => t.is_none(), - #[cfg(feature = "json")] Value::Json(json) => json.is_none(), } } @@ -581,15 +571,11 @@ impl<'a> Value<'a> { } /// `true` if the `Value` is a JSON value. - #[cfg(feature = "json")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "json")))] pub const fn is_json(&self) -> bool { matches!(self, Value::Json(_)) } /// Returns a reference to a JSON Value if of Json type, otherwise `None`. - #[cfg(feature = "json")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "json")))] pub const fn as_json(&self) -> Option<&serde_json::Value> { match self { Value::Json(Some(j)) => Some(j), @@ -598,8 +584,6 @@ impl<'a> Value<'a> { } /// Transforms to a JSON Value if of Json type, otherwise `None`. - #[cfg(feature = "json")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "json")))] pub fn into_json(self) -> Option { match self { Value::Json(Some(j)) => Some(j), @@ -658,7 +642,6 @@ value!(val: chrono::NaiveTime, Time, val); value!(val: chrono::NaiveDate, Date, val); #[cfg(feature = "bigdecimal")] value!(val: BigDecimal, Numeric, val); -#[cfg(feature = "json")] value!(val: JsonValue, Json, val); #[cfg(feature = "uuid")] value!(val: Uuid, Uuid, val); diff --git a/quaint/src/connector/mssql/conversion.rs b/quaint/src/connector/mssql/conversion.rs index 862993e2e3eb..d80c0aa6b0dc 100644 --- a/quaint/src/connector/mssql/conversion.rs +++ b/quaint/src/connector/mssql/conversion.rs @@ -24,7 +24,6 @@ impl<'a> IntoSql<'a> for &'a Value<'a> { Value::Array(_) => panic!("Arrays are not supported on SQL Server."), #[cfg(feature = "bigdecimal")] Value::Numeric(val) => (*val).to_sql(), - #[cfg(feature = "json")] Value::Json(val) => val.as_ref().map(|val| serde_json::to_string(&val).unwrap()).into_sql(), #[cfg(feature = "uuid")] Value::Uuid(val) => val.into_sql(), diff --git a/quaint/src/connector/mysql/conversion.rs b/quaint/src/connector/mysql/conversion.rs index 18e1ce4cd431..41e08f5a4416 100644 --- a/quaint/src/connector/mysql/conversion.rs +++ b/quaint/src/connector/mysql/conversion.rs @@ -41,7 +41,6 @@ pub fn conv_params(params: &[Value<'_>]) -> crate::Result { } #[cfg(feature = "bigdecimal")] Value::Numeric(f) => f.as_ref().map(|f| my::Value::Bytes(f.to_string().as_bytes().to_vec())), - #[cfg(feature = "json")] Value::Json(s) => match s { Some(ref s) => { let json = serde_json::to_string(s)?; @@ -223,7 +222,6 @@ impl TakeRow for my::Row { let res = match value { // JSON is returned as bytes. - #[cfg(feature = "json")] my::Value::Bytes(b) if column.is_json() => { serde_json::from_slice(&b).map(Value::json).map_err(|_| { let msg = "Unable to convert bytes to JSON"; @@ -319,7 +317,6 @@ impl TakeRow for my::Row { t if t.is_datetime() => Value::DateTime(None), t if t.is_time() => Value::Time(None), t if t.is_date() => Value::Date(None), - #[cfg(feature = "json")] t if t.is_json() => Value::Json(None), typ => { let msg = format!("Value of type {typ:?} is not supported with the current configuration"); diff --git a/quaint/src/connector/postgres/conversion.rs b/quaint/src/connector/postgres/conversion.rs index 181557c2e48d..8ceddcd3d704 100644 --- a/quaint/src/connector/postgres/conversion.rs +++ b/quaint/src/connector/postgres/conversion.rs @@ -51,7 +51,6 @@ pub(crate) fn params_to_types(params: &[Value<'_>]) -> Vec { Value::Char(_) => PostgresType::CHAR, #[cfg(feature = "bigdecimal")] Value::Numeric(_) => PostgresType::NUMERIC, - #[cfg(feature = "json")] Value::Json(_) => PostgresType::JSONB, Value::Xml(_) => PostgresType::XML, #[cfg(feature = "uuid")] @@ -90,7 +89,6 @@ pub(crate) fn params_to_types(params: &[Value<'_>]) -> Vec { Value::Char(_) => PostgresType::CHAR_ARRAY, #[cfg(feature = "bigdecimal")] Value::Numeric(_) => PostgresType::NUMERIC_ARRAY, - #[cfg(feature = "json")] Value::Json(_) => PostgresType::JSONB_ARRAY, Value::Xml(_) => PostgresType::XML_ARRAY, #[cfg(feature = "uuid")] @@ -287,7 +285,6 @@ impl GetRow for PostgresRow { } None => Value::Array(None), }, - #[cfg(feature = "json")] PostgresType::JSON | PostgresType::JSONB => Value::Json(row.try_get(i)?), PostgresType::INT2_ARRAY => match row.try_get(i)? { Some(val) => { @@ -433,7 +430,6 @@ impl GetRow for PostgresRow { } None => Value::Array(None), }, - #[cfg(feature = "json")] PostgresType::JSON_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; @@ -443,7 +439,6 @@ impl GetRow for PostgresRow { } None => Value::Array(None), }, - #[cfg(feature = "json")] PostgresType::JSONB_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; @@ -822,7 +817,6 @@ impl<'a> ToSql for Value<'a> { parsed_ip_addr.to_sql(ty, out) }) } - #[cfg(feature = "json")] (Value::Text(string), &PostgresType::JSON) | (Value::Text(string), &PostgresType::JSONB) => string .as_ref() .map(|string| serde_json::from_str::(string)?.to_sql(ty, out)), @@ -861,7 +855,6 @@ impl<'a> ToSql for Value<'a> { return Err(Error::builder(kind).build().into()); } - #[cfg(feature = "json")] (Value::Json(value), _) => value.as_ref().map(|value| value.to_sql(ty, out)), (Value::Xml(value), _) => value.as_ref().map(|value| value.to_sql(ty, out)), #[cfg(feature = "uuid")] diff --git a/quaint/src/connector/result_set.rs b/quaint/src/connector/result_set.rs index dedc49d23ff9..b98d252a0579 100644 --- a/quaint/src/connector/result_set.rs +++ b/quaint/src/connector/result_set.rs @@ -5,10 +5,8 @@ pub use index::*; pub use result_row::*; use crate::{ast::Value, error::*}; -use std::sync::Arc; - -#[cfg(feature = "json")] use serde_json::Map; +use std::sync::Arc; /// Encapsulates a set of results and their respective column names. #[derive(Debug, Default)] @@ -108,8 +106,6 @@ impl Iterator for ResultSetIterator { } } -#[cfg(feature = "json")] -#[cfg_attr(feature = "docs", doc(cfg(feature = "json")))] impl From for serde_json::Value { fn from(result_set: ResultSet) -> Self { let columns: Vec = result_set.columns().iter().map(ToString::to_string).collect(); diff --git a/quaint/src/connector/sqlite/conversion.rs b/quaint/src/connector/sqlite/conversion.rs index 23f20edbabe0..ab62992e7a67 100644 --- a/quaint/src/connector/sqlite/conversion.rs +++ b/quaint/src/connector/sqlite/conversion.rs @@ -268,7 +268,6 @@ impl<'a> ToSql for Value<'a> { Value::Numeric(d) => d .as_ref() .map(|d| ToSqlOutput::from(d.to_string().parse::().expect("BigDecimal is not a f64."))), - #[cfg(feature = "json")] Value::Json(value) => value.as_ref().map(|value| { let stringified = serde_json::to_string(value) .map_err(|err| RusqlError::ToSqlConversionFailure(Box::new(err))) diff --git a/quaint/src/error.rs b/quaint/src/error.rs index 73f88dc90b30..5ca712c7be71 100644 --- a/quaint/src/error.rs +++ b/quaint/src/error.rs @@ -326,8 +326,6 @@ impl From for Error { } } -#[cfg(feature = "json")] -#[cfg_attr(feature = "docs", doc(cfg(feature = "json")))] impl From for Error { fn from(_: serde_json::Error) -> Self { Self::builder(ErrorKind::conversion("Malformed JSON data.")).build() diff --git a/quaint/src/serde.rs b/quaint/src/serde.rs index 7bd5d10f72c0..85b605aec034 100644 --- a/quaint/src/serde.rs +++ b/quaint/src/serde.rs @@ -140,14 +140,12 @@ impl<'de> Deserializer<'de> for ValueDeserializer<'de> { #[cfg(feature = "uuid")] Value::Uuid(None) => visitor.visit_none(), - #[cfg(feature = "json")] Value::Json(Some(value)) => { let de = value.into_deserializer(); de.deserialize_any(visitor) .map_err(|err| serde::de::value::Error::custom(format!("Error deserializing JSON value: {err}"))) } - #[cfg(feature = "json")] Value::Json(None) => visitor.visit_none(), Value::Xml(Some(s)) => visitor.visit_string(s.into_owned()), diff --git a/quaint/src/tests/query.rs b/quaint/src/tests/query.rs index 26a0162bb8f6..dc286307cf1d 100644 --- a/quaint/src/tests/query.rs +++ b/quaint/src/tests/query.rs @@ -1,7 +1,7 @@ mod error; use super::test_api::*; -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] use crate::ast::JsonPath; use crate::{ connector::{IsolationLevel, Queryable, TransactionCapable}, @@ -1412,7 +1412,6 @@ async fn unsigned_integers_are_handled(api: &mut dyn TestApi) -> crate::Result<( Ok(()) } -#[cfg(feature = "json")] #[test_each_connector(tags("mysql", "postgresql"))] async fn json_filtering_works(api: &mut dyn TestApi) -> crate::Result<()> { let json_type = match api.system() { @@ -1691,7 +1690,7 @@ async fn enum_values(api: &mut dyn TestApi) -> crate::Result<()> { } #[test_each_connector(tags("postgresql"))] -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] async fn row_to_json_normal(api: &mut dyn TestApi) -> crate::Result<()> { let cte = Select::default() .value(val!("hello_world").alias("toto")) @@ -1710,7 +1709,7 @@ async fn row_to_json_normal(api: &mut dyn TestApi) -> crate::Result<()> { } #[test_each_connector(tags("postgresql"))] -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] async fn row_to_json_pretty(api: &mut dyn TestApi) -> crate::Result<()> { let cte = Select::default() .value(val!("hello_world").alias("toto")) @@ -2067,7 +2066,6 @@ async fn coalesce_fun(api: &mut dyn TestApi) -> crate::Result<()> { Ok(()) } -#[cfg(feature = "json")] fn value_into_json(value: &Value) -> Option { match value.clone() { // MariaDB returns JSON as text @@ -2082,7 +2080,7 @@ fn value_into_json(value: &Value) -> Option { } } -#[cfg(all(feature = "json", feature = "mysql"))] +#[cfg(feature = "mysql")] #[test_each_connector(tags("mysql"))] async fn json_extract_path_fun(api: &mut dyn TestApi) -> crate::Result<()> { let table = api @@ -2133,7 +2131,7 @@ async fn json_extract_path_fun(api: &mut dyn TestApi) -> crate::Result<()> { Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] async fn json_extract_array_path_postgres(api: &mut dyn TestApi, json_type: &str) -> crate::Result<()> { let table = api .create_temp_table(&format!("{}, obj {}", api.autogen_id("id"), json_type)) @@ -2192,7 +2190,7 @@ async fn json_extract_array_path_postgres(api: &mut dyn TestApi, json_type: &str Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_extract_array_path_fun_on_jsonb(api: &mut dyn TestApi) -> crate::Result<()> { json_extract_array_path_postgres(api, "jsonb").await?; @@ -2200,7 +2198,7 @@ async fn json_extract_array_path_fun_on_jsonb(api: &mut dyn TestApi) -> crate::R Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_extract_array_path_fun_on_json(api: &mut dyn TestApi) -> crate::Result<()> { json_extract_array_path_postgres(api, "json").await?; @@ -2208,7 +2206,7 @@ async fn json_extract_array_path_fun_on_json(api: &mut dyn TestApi) -> crate::Re Ok(()) } -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] async fn json_array_contains(api: &mut dyn TestApi, json_type: &str) -> crate::Result<()> { let table = api .create_temp_table(&format!("{}, obj {}", api.autogen_id("id"), json_type)) @@ -2287,7 +2285,7 @@ async fn json_array_contains(api: &mut dyn TestApi, json_type: &str) -> crate::R Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_contains_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::Result<()> { json_array_contains(api, "jsonb").await?; @@ -2295,7 +2293,7 @@ async fn json_array_contains_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::Resul Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_contains_fun_pg_json(api: &mut dyn TestApi) -> crate::Result<()> { json_array_contains(api, "json").await?; @@ -2303,7 +2301,7 @@ async fn json_array_contains_fun_pg_json(api: &mut dyn TestApi) -> crate::Result Ok(()) } -#[cfg(all(feature = "json", feature = "mysql"))] +#[cfg(feature = "mysql")] #[test_each_connector(tags("mysql"))] async fn json_array_contains_fun(api: &mut dyn TestApi) -> crate::Result<()> { json_array_contains(api, "json").await?; @@ -2311,7 +2309,7 @@ async fn json_array_contains_fun(api: &mut dyn TestApi) -> crate::Result<()> { Ok(()) } -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] async fn json_array_not_contains(api: &mut dyn TestApi, json_type: &str) -> crate::Result<()> { let table = api .create_temp_table(&format!("{}, obj {}", api.autogen_id("id"), json_type)) @@ -2345,7 +2343,7 @@ async fn json_array_not_contains(api: &mut dyn TestApi, json_type: &str) -> crat Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_not_contains_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::Result<()> { json_array_not_contains(api, "jsonb").await?; @@ -2353,7 +2351,7 @@ async fn json_array_not_contains_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::R Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_not_contains_fun_pg_json(api: &mut dyn TestApi) -> crate::Result<()> { json_array_not_contains(api, "json").await?; @@ -2361,7 +2359,7 @@ async fn json_array_not_contains_fun_pg_json(api: &mut dyn TestApi) -> crate::Re Ok(()) } -#[cfg(all(feature = "json", feature = "mysql"))] +#[cfg(feature = "mysql")] #[test_each_connector(tags("mysql"))] async fn json_array_not_contains_fun(api: &mut dyn TestApi) -> crate::Result<()> { json_array_not_contains(api, "json").await?; @@ -2369,7 +2367,7 @@ async fn json_array_not_contains_fun(api: &mut dyn TestApi) -> crate::Result<()> Ok(()) } -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] async fn json_array_begins_with(api: &mut dyn TestApi, json_type: &str) -> crate::Result<()> { let table = api .create_temp_table(&format!("{}, obj {}", api.autogen_id("id"), json_type)) @@ -2437,7 +2435,7 @@ async fn json_array_begins_with(api: &mut dyn TestApi, json_type: &str) -> crate Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_begins_with_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::Result<()> { json_array_begins_with(api, "jsonb").await?; @@ -2445,7 +2443,7 @@ async fn json_array_begins_with_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::Re Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_begins_with_fun_pg_json(api: &mut dyn TestApi) -> crate::Result<()> { json_array_begins_with(api, "json").await?; @@ -2453,7 +2451,7 @@ async fn json_array_begins_with_fun_pg_json(api: &mut dyn TestApi) -> crate::Res Ok(()) } -#[cfg(all(feature = "json", feature = "mysql"))] +#[cfg(feature = "mysql")] #[test_each_connector(tags("mysql"))] async fn json_array_begins_with_fun(api: &mut dyn TestApi) -> crate::Result<()> { json_array_begins_with(api, "json").await?; @@ -2461,7 +2459,7 @@ async fn json_array_begins_with_fun(api: &mut dyn TestApi) -> crate::Result<()> Ok(()) } -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] async fn json_array_not_begins_with(api: &mut dyn TestApi, json_type: &str) -> crate::Result<()> { let table = api .create_temp_table(&format!("{}, obj {}", api.autogen_id("id"), json_type)) @@ -2496,7 +2494,7 @@ async fn json_array_not_begins_with(api: &mut dyn TestApi, json_type: &str) -> c Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_not_begins_with_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::Result<()> { json_array_not_begins_with(api, "jsonb").await?; @@ -2504,7 +2502,7 @@ async fn json_array_not_begins_with_fun_pg_jsonb(api: &mut dyn TestApi) -> crate Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_not_begins_with_fun_pg_json(api: &mut dyn TestApi) -> crate::Result<()> { json_array_not_begins_with(api, "json").await?; @@ -2512,7 +2510,7 @@ async fn json_array_not_begins_with_fun_pg_json(api: &mut dyn TestApi) -> crate: Ok(()) } -#[cfg(all(feature = "json", feature = "mysql"))] +#[cfg(feature = "mysql")] #[test_each_connector(tags("mysql"))] async fn json_array_not_begins_with_fun(api: &mut dyn TestApi) -> crate::Result<()> { json_array_not_begins_with(api, "json").await?; @@ -2520,7 +2518,7 @@ async fn json_array_not_begins_with_fun(api: &mut dyn TestApi) -> crate::Result< Ok(()) } -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] async fn json_array_ends_into(api: &mut dyn TestApi, json_type: &str) -> crate::Result<()> { let table = api .create_temp_table(&format!("{}, obj {}", api.autogen_id("id"), json_type)) @@ -2589,7 +2587,7 @@ async fn json_array_ends_into(api: &mut dyn TestApi, json_type: &str) -> crate:: Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_ends_into_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::Result<()> { json_array_ends_into(api, "jsonb").await?; @@ -2597,7 +2595,7 @@ async fn json_array_ends_into_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::Resu Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_ends_into_fun_pg_json(api: &mut dyn TestApi) -> crate::Result<()> { json_array_ends_into(api, "json").await?; @@ -2605,7 +2603,7 @@ async fn json_array_ends_into_fun_pg_json(api: &mut dyn TestApi) -> crate::Resul Ok(()) } -#[cfg(all(feature = "json", feature = "mysql"))] +#[cfg(feature = "mysql")] #[test_each_connector(tags("mysql"))] async fn json_array_ends_into_fun(api: &mut dyn TestApi) -> crate::Result<()> { json_array_ends_into(api, "json").await?; @@ -2613,7 +2611,7 @@ async fn json_array_ends_into_fun(api: &mut dyn TestApi) -> crate::Result<()> { Ok(()) } -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] async fn json_array_not_ends_into(api: &mut dyn TestApi, json_type: &str) -> crate::Result<()> { let table = api .create_temp_table(&format!("{}, obj {}", api.autogen_id("id"), json_type)) @@ -2649,7 +2647,7 @@ async fn json_array_not_ends_into(api: &mut dyn TestApi, json_type: &str) -> cra Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_not_ends_into_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::Result<()> { json_array_not_ends_into(api, "jsonb").await?; @@ -2657,7 +2655,7 @@ async fn json_array_not_ends_into_fun_pg_jsonb(api: &mut dyn TestApi) -> crate:: Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_array_not_ends_into_fun_pg_json(api: &mut dyn TestApi) -> crate::Result<()> { json_array_not_ends_into(api, "json").await?; @@ -2665,7 +2663,7 @@ async fn json_array_not_ends_into_fun_pg_json(api: &mut dyn TestApi) -> crate::R Ok(()) } -#[cfg(all(feature = "json", feature = "mysql"))] +#[cfg(feature = "mysql")] #[test_each_connector(tags("mysql"))] async fn json_array_not_ends_into_fun(api: &mut dyn TestApi) -> crate::Result<()> { json_array_not_ends_into(api, "json").await?; @@ -2673,7 +2671,7 @@ async fn json_array_not_ends_into_fun(api: &mut dyn TestApi) -> crate::Result<() Ok(()) } -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] async fn json_gt_gte_lt_lte(api: &mut dyn TestApi, json_type: &str) -> crate::Result<()> { let table = api .create_temp_table(&format!("{}, json {}", api.autogen_id("id"), json_type)) @@ -2817,7 +2815,7 @@ async fn json_gt_gte_lt_lte(api: &mut dyn TestApi, json_type: &str) -> crate::Re Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_gt_gte_lt_lte_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::Result<()> { json_gt_gte_lt_lte(api, "jsonb").await?; @@ -2825,7 +2823,7 @@ async fn json_gt_gte_lt_lte_fun_pg_jsonb(api: &mut dyn TestApi) -> crate::Result Ok(()) } -#[cfg(all(feature = "json", feature = "postgresql"))] +#[cfg(feature = "postgresql")] #[test_each_connector(tags("postgresql"))] async fn json_gt_gte_lt_lte_fun_pg_json(api: &mut dyn TestApi) -> crate::Result<()> { json_gt_gte_lt_lte(api, "json").await?; @@ -2833,7 +2831,7 @@ async fn json_gt_gte_lt_lte_fun_pg_json(api: &mut dyn TestApi) -> crate::Result< Ok(()) } -#[cfg(all(feature = "json", feature = "mysql"))] +#[cfg(feature = "mysql")] #[test_each_connector(tags("mysql"))] async fn json_gt_gte_lt_lte_fun(api: &mut dyn TestApi) -> crate::Result<()> { json_gt_gte_lt_lte(api, "json").await?; @@ -3116,7 +3114,6 @@ async fn query_raw_typed_date(api: &mut dyn TestApi) -> crate::Result<()> { Ok(()) } -#[cfg(feature = "json")] #[test_each_connector(tags("postgresql"))] async fn query_raw_typed_json(api: &mut dyn TestApi) -> crate::Result<()> { use serde_json::json; @@ -3395,7 +3392,7 @@ async fn any_in_expression(api: &mut dyn TestApi) -> crate::Result<()> { Ok(()) } -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] #[test_each_connector(tags("postgresql", "mysql"))] async fn json_unquote_fun(api: &mut dyn TestApi) -> crate::Result<()> { let json_type = match api.system() { @@ -3433,7 +3430,7 @@ async fn json_unquote_fun(api: &mut dyn TestApi) -> crate::Result<()> { Ok(()) } -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] #[test_each_connector(tags("postgresql", "mysql"))] async fn json_col_equal_json_col(api: &mut dyn TestApi) -> crate::Result<()> { let json_type = match api.system() { diff --git a/quaint/src/tests/types/mysql.rs b/quaint/src/tests/types/mysql.rs index 57b3738f6bd4..b69d736d4a3c 100644 --- a/quaint/src/tests/types/mysql.rs +++ b/quaint/src/tests/types/mysql.rs @@ -208,7 +208,6 @@ test_type!(enum( Value::enum_variant("pollicle_dogs") )); -#[cfg(feature = "json")] test_type!(json( mysql, "json", diff --git a/quaint/src/tests/types/postgres.rs b/quaint/src/tests/types/postgres.rs index 954082a3c5d5..3ca40c822a77 100644 --- a/quaint/src/tests/types/postgres.rs +++ b/quaint/src/tests/types/postgres.rs @@ -260,7 +260,6 @@ test_type!(inet_array( ]) )); -#[cfg(feature = "json")] test_type!(json( postgresql, "json", @@ -268,7 +267,6 @@ test_type!(json( Value::json(serde_json::json!({"foo": "bar"})) )); -#[cfg(feature = "json")] test_type!(json_array( postgresql, "json[]", @@ -280,7 +278,6 @@ test_type!(json_array( ]) )); -#[cfg(feature = "json")] test_type!(jsonb( postgresql, "jsonb", @@ -288,7 +285,6 @@ test_type!(jsonb( Value::json(serde_json::json!({"foo": "bar"})) )); -#[cfg(feature = "json")] test_type!(jsonb_array( postgresql, "jsonb[]", diff --git a/quaint/src/visitor.rs b/quaint/src/visitor.rs index 9f4d9bcb5bcd..a77e86533174 100644 --- a/quaint/src/visitor.rs +++ b/quaint/src/visitor.rs @@ -121,22 +121,22 @@ pub trait Visitor<'a> { /// Visit a non-parameterized value. fn visit_raw_value(&mut self, value: Value<'a>) -> Result; - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract(&mut self, json_extract: JsonExtract<'a>) -> Result; - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract_last_array_item(&mut self, extract: JsonExtractLastArrayElem<'a>) -> Result; - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract_first_array_item(&mut self, extract: JsonExtractFirstArrayElem<'a>) -> Result; - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_array_contains(&mut self, left: Expression<'a>, right: Expression<'a>, not: bool) -> Result; - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_type_equals(&mut self, left: Expression<'a>, right: JsonType<'a>, not: bool) -> Result; - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_unquote(&mut self, json_unquote: JsonUnquote<'a>) -> Result; #[cfg(any(feature = "postgresql", feature = "mysql"))] @@ -915,7 +915,7 @@ pub trait Visitor<'a> { self.write(" ")?; self.visit_expression(*right) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] Compare::JsonCompare(json_compare) => match json_compare { JsonCompare::ArrayContains(left, right) => self.visit_json_array_contains(*left, *right, false), JsonCompare::ArrayNotContains(left, right) => self.visit_json_array_contains(*left, *right, true), @@ -997,7 +997,7 @@ pub trait Visitor<'a> { FunctionType::AggregateToString(agg) => { self.visit_aggregate_to_string(agg.value.as_ref().clone())?; } - #[cfg(all(feature = "json", feature = "postgresql"))] + #[cfg(feature = "postgresql")] FunctionType::RowToJson(row_to_json) => { self.write("ROW_TO_JSON")?; self.surround_with("(", ")", |ref mut s| s.visit_table(row_to_json.expr, false))? @@ -1029,19 +1029,19 @@ pub trait Visitor<'a> { self.write("COALESCE")?; self.surround_with("(", ")", |s| s.visit_columns(coalesce.exprs))?; } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] FunctionType::JsonExtract(json_extract) => { self.visit_json_extract(json_extract)?; } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] FunctionType::JsonExtractFirstArrayElem(extract) => { self.visit_json_extract_first_array_item(extract)?; } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] FunctionType::JsonExtractLastArrayElem(extract) => { self.visit_json_extract_last_array_item(extract)?; } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] FunctionType::JsonUnquote(unquote) => { self.visit_json_unquote(unquote)?; } diff --git a/quaint/src/visitor/mssql.rs b/quaint/src/visitor/mssql.rs index 0353684a6cd3..a5975e6249fa 100644 --- a/quaint/src/visitor/mssql.rs +++ b/quaint/src/visitor/mssql.rs @@ -1,5 +1,5 @@ use super::Visitor; -#[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] +#[cfg(any(feature = "postgresql", feature = "mysql"))] use crate::prelude::{JsonExtract, JsonType, JsonUnquote}; use crate::{ ast::{ @@ -339,7 +339,7 @@ impl<'a> Visitor<'a> for Mssql<'a> { return Err(builder.build()); } - #[cfg(feature = "json")] + Value::Json(j) => j.map(|j| self.write(format!("'{}'", serde_json::to_string(&j).unwrap()))), #[cfg(feature = "bigdecimal")] Value::Numeric(r) => r.map(|r| self.write(r)), @@ -633,12 +633,12 @@ impl<'a> Visitor<'a> for Mssql<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract(&mut self, _json_extract: JsonExtract<'a>) -> visitor::Result { unimplemented!("JSON filtering is not yet supported on MSSQL") } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_array_contains( &mut self, _left: Expression<'a>, @@ -648,12 +648,12 @@ impl<'a> Visitor<'a> for Mssql<'a> { unimplemented!("JSON filtering is not yet supported on MSSQL") } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_type_equals(&mut self, _left: Expression<'a>, _json_type: JsonType, _not: bool) -> visitor::Result { unimplemented!("JSON_TYPE is not yet supported on MSSQL") } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_unquote(&mut self, _json_unquote: JsonUnquote<'a>) -> visitor::Result { unimplemented!("JSON filtering is not yet supported on MSSQL") } @@ -681,7 +681,7 @@ impl<'a> Visitor<'a> for Mssql<'a> { unimplemented!("Full-text search is not yet supported on MSSQL") } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract_last_array_item( &mut self, _extract: crate::prelude::JsonExtractLastArrayElem<'a>, @@ -689,7 +689,7 @@ impl<'a> Visitor<'a> for Mssql<'a> { unimplemented!("JSON filtering is not yet supported on MSSQL") } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract_first_array_item( &mut self, _extract: crate::prelude::JsonExtractFirstArrayElem<'a>, @@ -1245,7 +1245,7 @@ mod tests { } #[test] - #[cfg(feature = "json")] + fn test_raw_json() { let (sql, params) = Mssql::build(Select::default().value(serde_json::json!({ "foo": "bar" }).raw())).unwrap(); assert_eq!("SELECT '{\"foo\":\"bar\"}'", sql); diff --git a/quaint/src/visitor/mysql.rs b/quaint/src/visitor/mysql.rs index fc23fd8ecf64..bbe4591575ef 100644 --- a/quaint/src/visitor/mysql.rs +++ b/quaint/src/visitor/mysql.rs @@ -34,7 +34,6 @@ impl<'a> Mysql<'a> { } fn visit_numeric_comparison(&mut self, left: Expression<'a>, right: Expression<'a>, sign: &str) -> visitor::Result { - #[cfg(feature = "json")] fn json_to_quaint_value<'a>(json: serde_json::Value) -> crate::Result> { match json { serde_json::Value::String(str) => Ok(Value::text(str)), @@ -61,7 +60,6 @@ impl<'a> Mysql<'a> { } match (left, right) { - #[cfg(feature = "json")] (left, right) if left.is_json_value() && right.is_fun_retuning_json() => { let quaint_value = json_to_quaint_value(left.into_json_value().unwrap())?; @@ -69,7 +67,7 @@ impl<'a> Mysql<'a> { self.write(format!(" {sign} "))?; self.visit_expression(right)?; } - #[cfg(feature = "json")] + (left, right) if left.is_fun_retuning_json() && right.is_json_value() => { let quaint_value = json_to_quaint_value(right.into_json_value().unwrap())?; @@ -153,7 +151,7 @@ impl<'a> Visitor<'a> for Mysql<'a> { } #[cfg(feature = "bigdecimal")] Value::Numeric(r) => r.map(|r| self.write(r)), - #[cfg(feature = "json")] + Value::Json(j) => match j { Some(ref j) => { let s = serde_json::to_string(&j)?; @@ -318,7 +316,6 @@ impl<'a> Visitor<'a> for Mysql<'a> { } fn visit_equals(&mut self, left: Expression<'a>, right: Expression<'a>) -> visitor::Result { - #[cfg(feature = "json")] { if right.is_json_expr() || left.is_json_expr() { self.surround_with("(", ")", |ref mut s| { @@ -342,15 +339,9 @@ impl<'a> Visitor<'a> for Mysql<'a> { self.visit_regular_equality_comparison(left, right) } } - - #[cfg(not(feature = "json"))] - { - self.visit_regular_equality_comparison(left, right) - } } fn visit_not_equals(&mut self, left: Expression<'a>, right: Expression<'a>) -> visitor::Result { - #[cfg(feature = "json")] { if right.is_json_expr() || left.is_json_expr() { self.surround_with("(", ")", |ref mut s| { @@ -374,14 +365,9 @@ impl<'a> Visitor<'a> for Mysql<'a> { self.visit_regular_difference_comparison(left, right) } } - - #[cfg(not(feature = "json"))] - { - self.visit_regular_difference_comparison(left, right) - } } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract(&mut self, json_extract: JsonExtract<'a>) -> visitor::Result { if json_extract.extract_as_string { self.write("JSON_UNQUOTE(")?; @@ -406,7 +392,7 @@ impl<'a> Visitor<'a> for Mysql<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_array_contains(&mut self, left: Expression<'a>, right: Expression<'a>, not: bool) -> visitor::Result { self.write("JSON_CONTAINS(")?; self.visit_expression(left)?; @@ -421,7 +407,7 @@ impl<'a> Visitor<'a> for Mysql<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_type_equals(&mut self, left: Expression<'a>, json_type: JsonType<'a>, not: bool) -> visitor::Result { self.write("(")?; self.write("JSON_TYPE")?; @@ -533,7 +519,7 @@ impl<'a> Visitor<'a> for Mysql<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract_last_array_item(&mut self, extract: JsonExtractLastArrayElem<'a>) -> visitor::Result { self.write("JSON_EXTRACT(")?; self.visit_expression(*extract.expr.clone())?; @@ -546,7 +532,7 @@ impl<'a> Visitor<'a> for Mysql<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract_first_array_item(&mut self, extract: JsonExtractFirstArrayElem<'a>) -> visitor::Result { self.write("JSON_EXTRACT(")?; self.visit_expression(*extract.expr)?; @@ -557,7 +543,7 @@ impl<'a> Visitor<'a> for Mysql<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_unquote(&mut self, json_unquote: JsonUnquote<'a>) -> visitor::Result { self.write("JSON_UNQUOTE(")?; self.visit_expression(*json_unquote.expr)?; @@ -724,7 +710,6 @@ mod tests { ); } - #[cfg(feature = "json")] #[test] fn equality_with_a_json_value() { let expected = expected_values( @@ -739,7 +724,6 @@ mod tests { assert_eq!(expected.1, params); } - #[cfg(feature = "json")] #[test] fn difference_with_a_json_value() { let expected = expected_values( @@ -856,7 +840,7 @@ mod tests { } #[test] - #[cfg(feature = "json")] + fn test_raw_json() { let (sql, params) = Mysql::build(Select::default().value(serde_json::json!({ "foo": "bar" }).raw())).unwrap(); assert_eq!("SELECT CONVERT('{\"foo\":\"bar\"}', JSON)", sql); @@ -911,7 +895,7 @@ mod tests { } #[test] - #[cfg(feature = "json")] + fn test_json_negation() { let conditions = ConditionTree::not("json".equals(Value::Json(Some(serde_json::Value::Null)))); let (sql, _) = Mysql::build(Select::from_table("test").so_that(conditions)).unwrap(); @@ -923,7 +907,7 @@ mod tests { } #[test] - #[cfg(feature = "json")] + fn test_json_not_negation() { let conditions = ConditionTree::not("json".not_equals(Value::Json(Some(serde_json::Value::Null)))); let (sql, _) = Mysql::build(Select::from_table("test").so_that(conditions)).unwrap(); diff --git a/quaint/src/visitor/postgres.rs b/quaint/src/visitor/postgres.rs index fa6511ea3a40..f18114ba5888 100644 --- a/quaint/src/visitor/postgres.rs +++ b/quaint/src/visitor/postgres.rs @@ -105,7 +105,7 @@ impl<'a> Visitor<'a> for Postgres<'a> { Ok(()) }) }), - #[cfg(feature = "json")] + Value::Json(j) => j.map(|j| self.write(format!("'{}'", serde_json::to_string(&j).unwrap()))), #[cfg(feature = "bigdecimal")] Value::Numeric(r) => r.map(|r| self.write(r)), @@ -226,14 +226,12 @@ impl<'a> Visitor<'a> for Postgres<'a> { fn visit_equals(&mut self, left: Expression<'a>, right: Expression<'a>) -> visitor::Result { // LHS must be cast to json/xml-text if the right is a json/xml-text value and vice versa. let right_cast = match left { - #[cfg(feature = "json")] _ if left.is_json_value() => "::jsonb", _ if left.is_xml_value() => "::text", _ => "", }; let left_cast = match right { - #[cfg(feature = "json")] _ if right.is_json_value() => "::jsonb", _ if right.is_xml_value() => "::text", _ => "", @@ -251,14 +249,12 @@ impl<'a> Visitor<'a> for Postgres<'a> { fn visit_not_equals(&mut self, left: Expression<'a>, right: Expression<'a>) -> visitor::Result { // LHS must be cast to json/xml-text if the right is a json/xml-text value and vice versa. let right_cast = match left { - #[cfg(feature = "json")] _ if left.is_json_value() => "::jsonb", _ if left.is_xml_value() => "::text", _ => "", }; let left_cast = match right { - #[cfg(feature = "json")] _ if right.is_json_value() => "::jsonb", _ if right.is_xml_value() => "::text", _ => "", @@ -273,7 +269,7 @@ impl<'a> Visitor<'a> for Postgres<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract(&mut self, json_extract: JsonExtract<'a>) -> visitor::Result { match json_extract.path { #[cfg(feature = "mysql")] @@ -313,7 +309,7 @@ impl<'a> Visitor<'a> for Postgres<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_unquote(&mut self, json_unquote: JsonUnquote<'a>) -> visitor::Result { self.write("(")?; self.visit_expression(*json_unquote.expr)?; @@ -323,7 +319,7 @@ impl<'a> Visitor<'a> for Postgres<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_array_contains(&mut self, left: Expression<'a>, right: Expression<'a>, not: bool) -> visitor::Result { if not { self.write("( NOT ")?; @@ -340,7 +336,7 @@ impl<'a> Visitor<'a> for Postgres<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract_last_array_item(&mut self, extract: JsonExtractLastArrayElem<'a>) -> visitor::Result { self.write("(")?; self.visit_expression(*extract.expr)?; @@ -350,7 +346,7 @@ impl<'a> Visitor<'a> for Postgres<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract_first_array_item(&mut self, extract: JsonExtractFirstArrayElem<'a>) -> visitor::Result { self.write("(")?; self.visit_expression(*extract.expr)?; @@ -360,7 +356,7 @@ impl<'a> Visitor<'a> for Postgres<'a> { Ok(()) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_type_equals(&mut self, left: Expression<'a>, json_type: JsonType<'a>, not: bool) -> visitor::Result { self.write("JSONB_TYPEOF")?; self.write("(")?; @@ -720,7 +716,6 @@ mod tests { assert_eq!(expected_sql, sql); } - #[cfg(feature = "json")] #[test] fn equality_with_a_json_value() { let expected = expected_values( @@ -735,7 +730,6 @@ mod tests { assert_eq!(expected.1, params); } - #[cfg(feature = "json")] #[test] fn equality_with_a_lhs_json_value() { // A bit artificial, but checks if the ::jsonb casting is done correctly on the right side as well. @@ -752,7 +746,6 @@ mod tests { assert_eq!(expected.1, params); } - #[cfg(feature = "json")] #[test] fn difference_with_a_json_value() { let expected = expected_values( @@ -768,7 +761,6 @@ mod tests { assert_eq!(expected.1, params); } - #[cfg(feature = "json")] #[test] fn difference_with_a_lhs_json_value() { let expected = expected_values( @@ -898,7 +890,7 @@ mod tests { } #[test] - #[cfg(feature = "json")] + fn test_raw_json() { let (sql, params) = Postgres::build(Select::default().value(serde_json::json!({ "foo": "bar" }).raw())).unwrap(); diff --git a/quaint/src/visitor/sqlite.rs b/quaint/src/visitor/sqlite.rs index 3ac0d0866dd1..838ead467571 100644 --- a/quaint/src/visitor/sqlite.rs +++ b/quaint/src/visitor/sqlite.rs @@ -103,7 +103,7 @@ impl<'a> Visitor<'a> for Sqlite<'a> { return Err(builder.build()); } - #[cfg(feature = "json")] + Value::Json(j) => match j { Some(ref j) => { let s = serde_json::to_string(j)?; @@ -276,12 +276,12 @@ impl<'a> Visitor<'a> for Sqlite<'a> { }) } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract(&mut self, _json_extract: JsonExtract<'a>) -> visitor::Result { unimplemented!("JSON filtering is not yet supported on SQLite") } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_array_contains( &mut self, _left: Expression<'a>, @@ -291,7 +291,7 @@ impl<'a> Visitor<'a> for Sqlite<'a> { unimplemented!("JSON filtering is not yet supported on SQLite") } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_type_equals(&mut self, _left: Expression<'a>, _json_type: JsonType, _not: bool) -> visitor::Result { unimplemented!("JSON_TYPE is not yet supported on SQLite") } @@ -316,17 +316,17 @@ impl<'a> Visitor<'a> for Sqlite<'a> { unimplemented!("Full-text search is not yet supported on SQLite") } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract_last_array_item(&mut self, _extract: JsonExtractLastArrayElem<'a>) -> visitor::Result { unimplemented!("JSON filtering is not yet supported on SQLite") } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract_first_array_item(&mut self, _extract: JsonExtractFirstArrayElem<'a>) -> visitor::Result { unimplemented!("JSON filtering is not yet supported on SQLite") } - #[cfg(all(feature = "json", any(feature = "postgresql", feature = "mysql")))] + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_unquote(&mut self, _json_unquote: JsonUnquote<'a>) -> visitor::Result { unimplemented!("JSON filtering is not yet supported on SQLite") } @@ -913,7 +913,7 @@ mod tests { } #[test] - #[cfg(feature = "json")] + fn test_raw_json() { let (sql, params) = Sqlite::build(Select::default().value(serde_json::json!({ "foo": "bar" }).raw())).unwrap(); assert_eq!("SELECT '{\"foo\":\"bar\"}'", sql); From d4650dff6bb16e6fdfcb91eaf05a3c2ceb8c2efd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Thu, 14 Sep 2023 10:36:30 +0200 Subject: [PATCH 011/128] fix(qe). Metrics: fix metrics duplicates (#4234) * Remove duplicates from metrics listing * Instead of whitelisting prisma metrics to have an initial value, we are only whitelisting mobc metrics, and assigning an initial value to them. Then on the renaming phase (see `METRIC_RENAMES`) these metrics will be renamed from the mobc name to the internal prisma name, and no duplicates will appear. Fixes https://github.com/prisma/prisma/issues/21069 --- .../tests/metrics/smoke_tests.rs | 28 +++++++++---------- query-engine/metrics/src/lib.rs | 17 ++++------- 2 files changed, 19 insertions(+), 26 deletions(-) diff --git a/query-engine/black-box-tests/tests/metrics/smoke_tests.rs b/query-engine/black-box-tests/tests/metrics/smoke_tests.rs index 6b81dd2e3e87..13e0389f868c 100644 --- a/query-engine/black-box-tests/tests/metrics/smoke_tests.rs +++ b/query-engine/black-box-tests/tests/metrics/smoke_tests.rs @@ -15,6 +15,7 @@ mod smoke_tests { } #[connector_test] + #[rustfmt::skip] async fn expected_metrics_rendered(r: Runner) -> TestResult<()> { let mut qe_cmd = query_engine_cmd(r.prisma_dml(), "57582"); qe_cmd.arg("--enable-metrics"); @@ -56,21 +57,20 @@ mod smoke_tests { .unwrap(); // counters - assert!(metrics.contains("prisma_client_queries_total counter")); - assert!(metrics.contains("prisma_datasource_queries_total counter")); - assert!(metrics.contains("prisma_pool_connections_opened_total counter")); - assert!(metrics.contains("prisma_pool_connections_closed_total counter")); + assert_eq!(metrics.matches("prisma_client_queries_total counter").count(), 1); + assert_eq!(metrics.matches("prisma_datasource_queries_total counter").count(), 1); + assert_eq!(metrics.matches("prisma_pool_connections_opened_total counter").count(), 1); + assert_eq!(metrics.matches("prisma_pool_connections_closed_total counter").count(), 1); // gauges - assert!(metrics.contains("prisma_pool_connections_open gauge")); - assert!(metrics.contains("prisma_pool_connections_busy gauge")); - assert!(metrics.contains("prisma_pool_connections_idle gauge")); - assert!(metrics.contains("prisma_client_queries_active gauge")); - assert!(metrics.contains("prisma_client_queries_wait gauge")); + assert_eq!(metrics.matches("prisma_pool_connections_open gauge").count(), 1); + assert_eq!(metrics.matches("prisma_pool_connections_busy gauge").count(), 1); + assert_eq!(metrics.matches("prisma_pool_connections_idle gauge").count(), 1); + assert_eq!(metrics.matches("prisma_client_queries_active gauge").count(), 1); + assert_eq!(metrics.matches("prisma_client_queries_wait gauge").count(), 1); // histograms - assert!(metrics.contains("prisma_client_queries_duration_histogram_ms histogram")); - assert!(metrics.contains("prisma_client_queries_wait_histogram_ms histogram")); - assert!(metrics.contains("prisma_datasource_queries_duration_histogram_ms histogram")); - }) - .await + assert_eq!(metrics.matches("prisma_client_queries_duration_histogram_ms histogram").count(), 1); + assert_eq!(metrics.matches("prisma_client_queries_wait_histogram_ms histogram").count(), 1); + assert_eq!(metrics.matches("prisma_datasource_queries_duration_histogram_ms histogram").count(), 1) + }).await } } diff --git a/query-engine/metrics/src/lib.rs b/query-engine/metrics/src/lib.rs index 4fb3be90ad42..dea808de0685 100644 --- a/query-engine/metrics/src/lib.rs +++ b/query-engine/metrics/src/lib.rs @@ -81,15 +81,8 @@ const ACCEPT_LIST: &[&str] = &[ MOBC_POOL_WAIT_DURATION, PRISMA_CLIENT_QUERIES_TOTAL, PRISMA_DATASOURCE_QUERIES_TOTAL, - PRISMA_POOL_CONNECTIONS_OPENED_TOTAL, - PRISMA_POOL_CONNECTIONS_CLOSED_TOTAL, - PRISMA_POOL_CONNECTIONS_OPEN, - PRISMA_POOL_CONNECTIONS_BUSY, - PRISMA_POOL_CONNECTIONS_IDLE, - PRISMA_CLIENT_QUERIES_WAIT, PRISMA_CLIENT_QUERIES_ACTIVE, PRISMA_CLIENT_QUERIES_DURATION_HISTOGRAM_MS, - PRISMA_CLIENT_QUERIES_WAIT_HISTOGRAM_MS, PRISMA_DATASOURCE_QUERIES_DURATION_HISTOGRAM_MS, ]; @@ -148,8 +141,8 @@ pub fn describe_metrics() { absolute_counter!(PRISMA_CLIENT_QUERIES_TOTAL, 0); absolute_counter!(PRISMA_DATASOURCE_QUERIES_TOTAL, 0); - absolute_counter!(PRISMA_POOL_CONNECTIONS_OPENED_TOTAL, 0); - absolute_counter!(PRISMA_POOL_CONNECTIONS_CLOSED_TOTAL, 0); + absolute_counter!(MOBC_POOL_CONNECTIONS_OPENED_TOTAL, 0); + absolute_counter!(MOBC_POOL_CONNECTIONS_CLOSED_TOTAL, 0); // gauges describe_gauge!( @@ -173,9 +166,9 @@ pub fn describe_metrics() { "Number of currently active Prisma Client queries" ); - gauge!(PRISMA_POOL_CONNECTIONS_OPEN, 0.0); - gauge!(PRISMA_POOL_CONNECTIONS_BUSY, 0.0); - gauge!(PRISMA_POOL_CONNECTIONS_IDLE, 0.0); + gauge!(MOBC_POOL_CONNECTIONS_OPEN, 0.0); + gauge!(MOBC_POOL_CONNECTIONS_BUSY, 0.0); + gauge!(MOBC_POOL_CONNECTIONS_IDLE, 0.0); gauge!(PRISMA_CLIENT_QUERIES_WAIT, 0.0); gauge!(PRISMA_CLIENT_QUERIES_ACTIVE, 0.0); From 3ef05ffb36cba7bf2d6b2a395abfe466195b6fbb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=20Houl=C3=A9?= <13155277+tomhoule@users.noreply.github.com> Date: Thu, 14 Sep 2023 13:24:08 +0200 Subject: [PATCH 012/128] psl: simplify attribute list validation (#4237) Since type aliases are not a thing, `AttributesValidationState.attributes` does not need to be a Vec anymore (we were accumulating attributes defined on the model and on aliases), it can be an Option. This is simpler to reason about. --- psl/parser-database/src/context.rs | 24 +++++++++---------- psl/parser-database/src/context/attributes.rs | 7 +++--- 2 files changed, 16 insertions(+), 15 deletions(-) diff --git a/psl/parser-database/src/context.rs b/psl/parser-database/src/context.rs index 54c06ddd9a42..450146953024 100644 --- a/psl/parser-database/src/context.rs +++ b/psl/parser-database/src/context.rs @@ -117,7 +117,7 @@ impl<'db> Context<'db> { /// - When you are done validating an attribute set, you must call /// `validate_visited_attributes()`. Otherwise, Context will helpfully panic. pub(super) fn visit_attributes(&mut self, ast_attributes: ast::AttributeContainer) { - if !self.attributes.attributes.is_empty() || !self.attributes.unused_attributes.is_empty() { + if self.attributes.attributes.is_some() || !self.attributes.unused_attributes.is_empty() { panic!( "`ctx.visit_attributes() called with {:?} while the Context is still validating previous attribute set on {:?}`", ast_attributes, @@ -125,9 +125,7 @@ impl<'db> Context<'db> { ); } - self.attributes.attributes.clear(); - self.attributes.unused_attributes.clear(); - self.attributes.extend_attributes(ast_attributes, self.ast); + self.attributes.set_attributes(ast_attributes, self.ast); } /// Look for an optional attribute with a name of the form @@ -139,8 +137,8 @@ impl<'db> Context<'db> { /// with a default that can be first, but with native types, arguments are /// purely positional. pub(crate) fn visit_datasource_scoped(&mut self) -> Option<(StringId, StringId, ast::AttributeId)> { - let attrs = - iter_attributes(&self.attributes.attributes, self.ast).filter(|(_, attr)| attr.name.name.contains('.')); + let attrs = iter_attributes(self.attributes.attributes.as_ref(), self.ast) + .filter(|(_, attr)| attr.name.name.contains('.')); let mut native_type_attr = None; let diagnostics = &mut self.diagnostics; @@ -173,7 +171,8 @@ impl<'db> Context<'db> { /// is defined. #[must_use] pub(crate) fn visit_optional_single_attr(&mut self, name: &'static str) -> bool { - let mut attrs = iter_attributes(&self.attributes.attributes, self.ast).filter(|(_, a)| a.name.name == name); + let mut attrs = + iter_attributes(self.attributes.attributes.as_ref(), self.ast).filter(|(_, a)| a.name.name == name); let (first_idx, first) = match attrs.next() { Some(first) => first, None => return false, @@ -182,7 +181,7 @@ impl<'db> Context<'db> { if attrs.next().is_some() { for (idx, attr) in - iter_attributes(&self.attributes.attributes, self.ast).filter(|(_, a)| a.name.name == name) + iter_attributes(self.attributes.attributes.as_ref(), self.ast).filter(|(_, a)| a.name.name == name) { diagnostics.push_error(DatamodelError::new_duplicate_attribute_error( &attr.name.name, @@ -206,7 +205,7 @@ impl<'db> Context<'db> { let mut has_valid_attribute = false; while !has_valid_attribute { - let first_attr = iter_attributes(&self.attributes.attributes, self.ast) + let first_attr = iter_attributes(self.attributes.attributes.as_ref(), self.ast) .filter(|(_, attr)| attr.name.name == name) .find(|(attr_id, _)| self.attributes.unused_attributes.contains(attr_id)); let (attr_id, attr) = if let Some(first_attr) = first_attr { @@ -297,7 +296,8 @@ impl<'db> Context<'db> { attribute.span, )) } - self.attributes.attributes.clear(); + + self.attributes.attributes = None; self.attributes.unused_attributes.clear(); } @@ -430,11 +430,11 @@ impl<'db> Context<'db> { // Implementation detail. Used for arguments validation. fn iter_attributes<'a, 'ast: 'a>( - attrs: &'a [ast::AttributeContainer], + attrs: Option<&'a ast::AttributeContainer>, ast: &'ast ast::SchemaAst, ) -> impl Iterator + 'a { attrs - .iter() + .into_iter() .flat_map(move |container| ast[*container].iter().enumerate().map(|a| (a, *container))) .map(|((idx, attr), container)| (ast::AttributeId::new_in_container(container, idx), attr)) } diff --git a/psl/parser-database/src/context/attributes.rs b/psl/parser-database/src/context/attributes.rs index 39655decf8b4..9f35f5cc3644 100644 --- a/psl/parser-database/src/context/attributes.rs +++ b/psl/parser-database/src/context/attributes.rs @@ -4,7 +4,7 @@ use crate::interner::StringId; #[derive(Default, Debug)] pub(super) struct AttributesValidationState { /// The attributes list being validated. - pub(super) attributes: Vec, + pub(super) attributes: Option, pub(super) unused_attributes: HashSet, // the _remaining_ attributes /// The attribute being validated. @@ -13,10 +13,11 @@ pub(super) struct AttributesValidationState { } impl AttributesValidationState { - pub(super) fn extend_attributes(&mut self, attributes: ast::AttributeContainer, ast: &ast::SchemaAst) { + pub(super) fn set_attributes(&mut self, attributes: ast::AttributeContainer, ast: &ast::SchemaAst) { let attribute_ids = (0..ast[attributes].len()).map(|idx| ast::AttributeId::new_in_container(attributes, idx)); + self.unused_attributes.clear(); self.unused_attributes.extend(attribute_ids); - self.attributes.push(attributes); + self.attributes = Some(attributes); } } From 79f2257e754a7450abd79d6e93b60f1b2f53203d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Fri, 15 Sep 2023 16:24:22 +0200 Subject: [PATCH 013/128] fix(qe): Prevent descriptions from mobc leaking into the presentation of metrics (#4239) * Prevent descriptions from mobc leaking into the presentation * Add a condition over the text, to match on the expected number of metrics * Fix library engine use of metrics initialization * Omit pool size counts * Comment on test * Do not depend on variable data, TYPE is always displayed when rendering the metric * Update query-engine/black-box-tests/tests/metrics/smoke_tests.rs Co-authored-by: Jan Piotrowski --------- Co-authored-by: Jan Piotrowski --- Cargo.lock | 1 + query-engine/black-box-tests/Cargo.toml | 1 + .../black-box-tests/tests/black_box_tests.rs | 2 + .../tests/metrics/smoke_tests.rs | 59 +++-- query-engine/metrics/src/common.rs | 32 ++- query-engine/metrics/src/lib.rs | 203 ++++++++---------- query-engine/metrics/src/registry.rs | 16 +- .../query-engine-node-api/src/engine.rs | 2 +- 8 files changed, 170 insertions(+), 146 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 66ab1a8b832d..1a4c1812e90c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -323,6 +323,7 @@ dependencies = [ "enumflags2", "indoc", "insta", + "query-engine-metrics", "query-engine-tests", "query-tests-setup", "reqwest", diff --git a/query-engine/black-box-tests/Cargo.toml b/query-engine/black-box-tests/Cargo.toml index 8bea1998d7c1..056ee2bcdb43 100644 --- a/query-engine/black-box-tests/Cargo.toml +++ b/query-engine/black-box-tests/Cargo.toml @@ -14,3 +14,4 @@ tokio.workspace = true user-facing-errors.workspace = true insta = "1.7.1" enumflags2 = "0.7" +query-engine-metrics = {path = "../metrics"} diff --git a/query-engine/black-box-tests/tests/black_box_tests.rs b/query-engine/black-box-tests/tests/black_box_tests.rs index 6c2028e1fe0f..d3e6c7065b45 100644 --- a/query-engine/black-box-tests/tests/black_box_tests.rs +++ b/query-engine/black-box-tests/tests/black_box_tests.rs @@ -4,3 +4,5 @@ mod helpers; mod metrics; mod protocols; + +use query_engine_metrics; diff --git a/query-engine/black-box-tests/tests/metrics/smoke_tests.rs b/query-engine/black-box-tests/tests/metrics/smoke_tests.rs index 13e0389f868c..8542f753b78e 100644 --- a/query-engine/black-box-tests/tests/metrics/smoke_tests.rs +++ b/query-engine/black-box-tests/tests/metrics/smoke_tests.rs @@ -55,22 +55,57 @@ mod smoke_tests { .text() .await .unwrap(); + + // I would have loved to use insta in here and check the snapshot but the order of the metrics is not guaranteed + // And I opted for the manual checking of invariant data that provided enough confidence instead // counters - assert_eq!(metrics.matches("prisma_client_queries_total counter").count(), 1); - assert_eq!(metrics.matches("prisma_datasource_queries_total counter").count(), 1); - assert_eq!(metrics.matches("prisma_pool_connections_opened_total counter").count(), 1); - assert_eq!(metrics.matches("prisma_pool_connections_closed_total counter").count(), 1); + assert_eq!(metrics.matches("# HELP prisma_client_queries_total The total number of Prisma Client queries executed").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_client_queries_total counter").count(), 1); + + assert_eq!(metrics.matches("# HELP prisma_datasource_queries_total The total number of datasource queries executed").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_datasource_queries_total counter").count(), 1); + + assert_eq!(metrics.matches("# HELP prisma_pool_connections_closed_total The total number of pool connections closed").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_pool_connections_closed_total counter").count(), 1); + + assert_eq!(metrics.matches("# HELP prisma_pool_connections_opened_total The total number of pool connections opened").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_pool_connections_opened_total counter").count(), 1); + // gauges - assert_eq!(metrics.matches("prisma_pool_connections_open gauge").count(), 1); - assert_eq!(metrics.matches("prisma_pool_connections_busy gauge").count(), 1); - assert_eq!(metrics.matches("prisma_pool_connections_idle gauge").count(), 1); - assert_eq!(metrics.matches("prisma_client_queries_active gauge").count(), 1); - assert_eq!(metrics.matches("prisma_client_queries_wait gauge").count(), 1); + assert_eq!(metrics.matches("# HELP prisma_client_queries_active The number of currently active Prisma Client queries").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_client_queries_active gauge").count(), 1); + + assert_eq!(metrics.matches("# HELP prisma_client_queries_wait The number of datasource queries currently waiting for an free connection").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_client_queries_wait gauge").count(), 1); + + assert_eq!(metrics.matches("# HELP prisma_pool_connections_busy The number of pool connections currently executing datasource queries").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_pool_connections_busy gauge").count(), 1); + + assert_eq!(metrics.matches("# HELP prisma_pool_connections_idle The number of pool connections that are not busy running a query").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_pool_connections_idle gauge").count(), 1); + + assert_eq!(metrics.matches("# HELP prisma_pool_connections_open The number of pool connections currently open").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_pool_connections_open gauge").count(), 1); + // histograms - assert_eq!(metrics.matches("prisma_client_queries_duration_histogram_ms histogram").count(), 1); - assert_eq!(metrics.matches("prisma_client_queries_wait_histogram_ms histogram").count(), 1); - assert_eq!(metrics.matches("prisma_datasource_queries_duration_histogram_ms histogram").count(), 1) + assert_eq!(metrics.matches("# HELP prisma_client_queries_duration_histogram_ms The distribution of the time Prisma Client queries took to run end to end").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_client_queries_duration_histogram_ms histogram").count(), 1); + + assert_eq!(metrics.matches("# HELP prisma_client_queries_wait_histogram_ms The distribution of the time all datasource queries spent waiting for a free connection").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_client_queries_wait_histogram_ms histogram").count(), 1); + + assert_eq!(metrics.matches("# HELP prisma_datasource_queries_duration_histogram_ms The distribution of the time datasource queries took to run").count(), 1); + assert_eq!(metrics.matches("# TYPE prisma_datasource_queries_duration_histogram_ms histogram").count(), 1); + + // Check that exist as many metrics as being accepted + let accepted_metric_count = query_engine_metrics::ACCEPT_LIST.len(); + let displayed_metric_count = metrics.matches("# TYPE").count(); + let non_prisma_metric_count = displayed_metric_count - metrics.matches("# TYPE prisma").count(); + + assert_eq!(displayed_metric_count, accepted_metric_count); + assert_eq!(non_prisma_metric_count, 0); + }).await } } diff --git a/query-engine/metrics/src/common.rs b/query-engine/metrics/src/common.rs index 76549a4bb2dc..c859e142b533 100644 --- a/query-engine/metrics/src/common.rs +++ b/query-engine/metrics/src/common.rs @@ -52,7 +52,30 @@ pub(crate) struct Metric { } impl Metric { - pub fn new(key: Key, description: String, value: MetricValue, global_labels: HashMap) -> Self { + pub(crate) fn renamed( + key: Key, + descriptions: &HashMap, + value: MetricValue, + global_labels: &HashMap, + ) -> Self { + match crate::METRIC_RENAMES.get(key.name()) { + Some((new_key, new_description)) => Self::new( + Key::from_parts(new_key.to_string(), key.labels()), + new_description.to_string(), + value, + global_labels.clone(), + ), + None => { + let description = descriptions + .get(key.name()) + .map(|s| s.to_string()) + .unwrap_or(String::new()); + Self::new(key, description, value, global_labels.clone()) + } + } + } + + fn new(key: Key, description: String, value: MetricValue, global_labels: HashMap) -> Self { let (name, labels) = key.into_parts(); let mut labels_map: HashMap = labels @@ -62,13 +85,8 @@ impl Metric { labels_map.extend(global_labels); - let mut key = name.as_str(); - if let Some(rename) = crate::METRIC_RENAMES.get(key) { - key = rename; - } - Self { - key: key.to_string(), + key: name.as_str().to_string(), value, description, labels: labels_map, diff --git a/query-engine/metrics/src/lib.rs b/query-engine/metrics/src/lib.rs index dea808de0685..7f34f84a8612 100644 --- a/query-engine/metrics/src/lib.rs +++ b/query-engine/metrics/src/lib.rs @@ -34,7 +34,8 @@ use once_cell::sync::Lazy; use recorder::*; pub use registry::MetricRegistry; use serde::Deserialize; -use std::{collections::HashMap, sync::Once}; +use std::collections::HashMap; +use std::sync::Once; pub extern crate metrics; pub use metrics::{ @@ -42,36 +43,31 @@ pub use metrics::{ increment_counter, increment_gauge, }; -// Dependency metrics names emitted by the connector pool implementation (mobc) that will be renamed -// using the `METRIC_RENAMES` map. -pub const MOBC_POOL_CONNECTIONS_OPENED_TOTAL: &str = "mobc_pool_connections_opened_total"; -pub const MOBC_POOL_CONNECTIONS_CLOSED_TOTAL: &str = "mobc_pool_connections_closed_total"; -pub const MOBC_POOL_CONNECTIONS_OPEN: &str = "mobc_pool_connections_open"; -pub const MOBC_POOL_CONNECTIONS_BUSY: &str = "mobc_pool_connections_busy"; -pub const MOBC_POOL_CONNECTIONS_IDLE: &str = "mobc_pool_connections_idle"; -pub const MOBC_POOL_WAIT_COUNT: &str = "mobc_client_queries_wait"; -pub const MOBC_POOL_WAIT_DURATION: &str = "mobc_client_queries_wait_histogram_ms"; - -// External metrics names that we expose. -// counters -pub const PRISMA_CLIENT_QUERIES_TOTAL: &str = "prisma_client_queries_total"; -pub const PRISMA_DATASOURCE_QUERIES_TOTAL: &str = "prisma_datasource_queries_total"; -pub const PRISMA_POOL_CONNECTIONS_OPENED_TOTAL: &str = "prisma_pool_connections_opened_total"; -pub const PRISMA_POOL_CONNECTIONS_CLOSED_TOTAL: &str = "prisma_pool_connections_closed_total"; -// gauges -pub const PRISMA_POOL_CONNECTIONS_OPEN: &str = "prisma_pool_connections_open"; -pub const PRISMA_POOL_CONNECTIONS_BUSY: &str = "prisma_pool_connections_busy"; -pub const PRISMA_POOL_CONNECTIONS_IDLE: &str = "prisma_pool_connections_idle"; -pub const PRISMA_CLIENT_QUERIES_WAIT: &str = "prisma_client_queries_wait"; -pub const PRISMA_CLIENT_QUERIES_ACTIVE: &str = "prisma_client_queries_active"; -// histograms -pub const PRISMA_CLIENT_QUERIES_DURATION_HISTOGRAM_MS: &str = "prisma_client_queries_duration_histogram_ms"; -pub const PRISMA_CLIENT_QUERIES_WAIT_HISTOGRAM_MS: &str = "prisma_client_queries_wait_histogram_ms"; -pub const PRISMA_DATASOURCE_QUERIES_DURATION_HISTOGRAM_MS: &str = "prisma_datasource_queries_duration_histogram_ms"; - -// We need a list of acceptable metrics, we don't want to accidentally process metrics emitted by a -// third party library -const ACCEPT_LIST: &[&str] = &[ +// Metrics that we emit from the engines, third party metrics emitted by libraries and that we rename are omitted. +pub const PRISMA_CLIENT_QUERIES_TOTAL: &str = "prisma_client_queries_total"; // counter +pub const PRISMA_DATASOURCE_QUERIES_TOTAL: &str = "prisma_datasource_queries_total"; // counter +pub const PRISMA_CLIENT_QUERIES_ACTIVE: &str = "prisma_client_queries_active"; // gauge +pub const PRISMA_CLIENT_QUERIES_DURATION_HISTOGRAM_MS: &str = "prisma_client_queries_duration_histogram_ms"; // histogram +pub const PRISMA_DATASOURCE_QUERIES_DURATION_HISTOGRAM_MS: &str = "prisma_datasource_queries_duration_histogram_ms"; // histogram + +// metrics emitted by the connector pool implementation (mobc) that will be renamed using the `METRIC_RENAMES` map. +const MOBC_POOL_CONNECTIONS_OPENED_TOTAL: &str = "mobc_pool_connections_opened_total"; // counter +const MOBC_POOL_CONNECTIONS_CLOSED_TOTAL: &str = "mobc_pool_connections_closed_total"; // counter +const MOBC_POOL_CONNECTIONS_OPEN: &str = "mobc_pool_connections_open"; // gauge +const MOBC_POOL_CONNECTIONS_BUSY: &str = "mobc_pool_connections_busy"; // gauge +const MOBC_POOL_CONNECTIONS_IDLE: &str = "mobc_pool_connections_idle"; // gauge +const MOBC_POOL_WAIT_COUNT: &str = "mobc_client_queries_wait"; // gauge +const MOBC_POOL_WAIT_DURATION: &str = "mobc_client_queries_wait_histogram_ms"; // histogram + +/// Accept list: both first-party (emitted by the query engine) and third-party (emitted) metrics +pub const ACCEPT_LIST: &[&str] = &[ + // first-party + PRISMA_CLIENT_QUERIES_TOTAL, + PRISMA_DATASOURCE_QUERIES_TOTAL, + PRISMA_CLIENT_QUERIES_ACTIVE, + PRISMA_CLIENT_QUERIES_DURATION_HISTOGRAM_MS, + PRISMA_DATASOURCE_QUERIES_DURATION_HISTOGRAM_MS, + // third-party, emitted by mobc MOBC_POOL_CONNECTIONS_OPENED_TOTAL, MOBC_POOL_CONNECTIONS_CLOSED_TOTAL, MOBC_POOL_CONNECTIONS_OPEN, @@ -79,120 +75,97 @@ const ACCEPT_LIST: &[&str] = &[ MOBC_POOL_CONNECTIONS_IDLE, MOBC_POOL_WAIT_COUNT, MOBC_POOL_WAIT_DURATION, - PRISMA_CLIENT_QUERIES_TOTAL, - PRISMA_DATASOURCE_QUERIES_TOTAL, - PRISMA_CLIENT_QUERIES_ACTIVE, - PRISMA_CLIENT_QUERIES_DURATION_HISTOGRAM_MS, - PRISMA_DATASOURCE_QUERIES_DURATION_HISTOGRAM_MS, ]; -// Some of the metrics we receive have their internal names, and we need to expose them under a different -// name, this map translates from the internal names used by mobc to the external names we want to expose -static METRIC_RENAMES: Lazy> = Lazy::new(|| { +/// Map that for any given accepted metric that is emitted by a third-party, in this case only the +/// connection pool library mobc, it points to an internal, accepted metrics name and its description +/// as displayed to users. This is used to rebrand the third-party metrics to accepted, prisma-specific +/// ones. +#[rustfmt::skip] +static METRIC_RENAMES: Lazy> = Lazy::new(|| { HashMap::from([ - (MOBC_POOL_CONNECTIONS_OPENED_TOTAL, PRISMA_POOL_CONNECTIONS_OPENED_TOTAL), - (MOBC_POOL_CONNECTIONS_CLOSED_TOTAL, PRISMA_POOL_CONNECTIONS_CLOSED_TOTAL), - (MOBC_POOL_CONNECTIONS_OPEN, PRISMA_POOL_CONNECTIONS_OPEN), - (MOBC_POOL_CONNECTIONS_BUSY, PRISMA_POOL_CONNECTIONS_BUSY), - (MOBC_POOL_CONNECTIONS_IDLE, PRISMA_POOL_CONNECTIONS_IDLE), - (MOBC_POOL_WAIT_COUNT, PRISMA_CLIENT_QUERIES_WAIT), - (MOBC_POOL_WAIT_DURATION, PRISMA_CLIENT_QUERIES_WAIT_HISTOGRAM_MS), + (MOBC_POOL_CONNECTIONS_OPENED_TOTAL, ("prisma_pool_connections_opened_total", "The total number of pool connections opened")), + (MOBC_POOL_CONNECTIONS_CLOSED_TOTAL, ("prisma_pool_connections_closed_total", "The total number of pool connections closed")), + (MOBC_POOL_CONNECTIONS_OPEN, ("prisma_pool_connections_open", "The number of pool connections currently open")), + (MOBC_POOL_CONNECTIONS_BUSY, ("prisma_pool_connections_busy", "The number of pool connections currently executing datasource queries")), + (MOBC_POOL_CONNECTIONS_IDLE, ("prisma_pool_connections_idle", "The number of pool connections that are not busy running a query")), + (MOBC_POOL_WAIT_COUNT, ("prisma_client_queries_wait", "The number of datasource queries currently waiting for an free connection")), + (MOBC_POOL_WAIT_DURATION, ("prisma_client_queries_wait_histogram_ms", "The distribution of the time all datasource queries spent waiting for a free connection")), ]) }); -// At the moment the histogram is only used for timings. So the bounds are hard coded here -// The buckets are for ms -pub(crate) const HISTOGRAM_BOUNDS: [f64; 10] = [0.0, 1.0, 5.0, 10.0, 50.0, 100.0, 500.0, 1000.0, 5000.0, 50000.0]; +pub fn setup() { + set_recorder(); + initialize_metrics(); +} -#[derive(PartialEq, Eq, Debug, Deserialize)] -pub enum MetricFormat { - #[serde(alias = "json")] - Json, - #[serde(alias = "prometheus")] - Prometheus, +static METRIC_RECORDER: Once = Once::new(); + +fn set_recorder() { + METRIC_RECORDER.call_once(|| { + metrics::set_boxed_recorder(Box::new(MetricRecorder)).unwrap(); + }); } -pub fn setup() { - set_recorder(); - describe_metrics(); +/// Initialize metrics descriptions and values +pub fn initialize_metrics() { + initialize_metrics_descriptions(); + initialize_metrics_values(); } -// Describe all metric here so that every time for create -// a new metric registry for a Query Instance the descriptions -// will be in place -pub fn describe_metrics() { - // counters +/// Describe all first-party metrics that we record in prisma-engines. Metrics recorded by third-parties +/// --like mobc-- are described by such third parties, but ignored, and replaced by the descriptions in the +/// METRICS_RENAMES map. +fn initialize_metrics_descriptions() { describe_counter!( PRISMA_CLIENT_QUERIES_TOTAL, - "Total number of Prisma Client queries executed" + "The total number of Prisma Client queries executed" ); describe_counter!( PRISMA_DATASOURCE_QUERIES_TOTAL, - "Total number of Datasource Queries executed" + "The total number of datasource queries executed" ); - describe_counter!( - PRISMA_POOL_CONNECTIONS_OPENED_TOTAL, - "Total number of Pool Connections opened" + describe_gauge!( + PRISMA_CLIENT_QUERIES_ACTIVE, + "The number of currently active Prisma Client queries" ); - describe_counter!( - PRISMA_POOL_CONNECTIONS_CLOSED_TOTAL, - "Total number of Pool Connections closed" + describe_histogram!( + PRISMA_CLIENT_QUERIES_DURATION_HISTOGRAM_MS, + "The distribution of the time Prisma Client queries took to run end to end" + ); + describe_histogram!( + PRISMA_DATASOURCE_QUERIES_DURATION_HISTOGRAM_MS, + "The distribution of the time datasource queries took to run" ); +} +/// Initialize all metrics values (first and third-party) +/// +/// FIXME: https://github.com/prisma/prisma/issues/21070 +/// Histograms are excluded, as their initialization will alter the histogram values. +/// (i.e. histograms don't have a neutral value, like counters or gauges) +fn initialize_metrics_values() { absolute_counter!(PRISMA_CLIENT_QUERIES_TOTAL, 0); absolute_counter!(PRISMA_DATASOURCE_QUERIES_TOTAL, 0); + gauge!(PRISMA_CLIENT_QUERIES_ACTIVE, 0.0); absolute_counter!(MOBC_POOL_CONNECTIONS_OPENED_TOTAL, 0); absolute_counter!(MOBC_POOL_CONNECTIONS_CLOSED_TOTAL, 0); - - // gauges - describe_gauge!( - PRISMA_POOL_CONNECTIONS_OPEN, - "Number of currently open Pool Connections (able to execute a datasource query)" - ); - describe_gauge!( - PRISMA_POOL_CONNECTIONS_BUSY, - "Number of currently busy Pool Connections (executing a datasource query)" - ); - describe_gauge!( - PRISMA_POOL_CONNECTIONS_IDLE, - "Number of currently unused Pool Connections (waiting for the next datasource query to run)" - ); - describe_gauge!( - PRISMA_CLIENT_QUERIES_WAIT, - "Number of Prisma Client queries currently waiting for a connection" - ); - describe_gauge!( - PRISMA_CLIENT_QUERIES_ACTIVE, - "Number of currently active Prisma Client queries" - ); - gauge!(MOBC_POOL_CONNECTIONS_OPEN, 0.0); gauge!(MOBC_POOL_CONNECTIONS_BUSY, 0.0); gauge!(MOBC_POOL_CONNECTIONS_IDLE, 0.0); - gauge!(PRISMA_CLIENT_QUERIES_WAIT, 0.0); - gauge!(PRISMA_CLIENT_QUERIES_ACTIVE, 0.0); - - // histograms - describe_histogram!( - PRISMA_CLIENT_QUERIES_WAIT_HISTOGRAM_MS, - "Histogram of the wait time of all Prisma Client Queries in ms" - ); - describe_histogram!( - PRISMA_DATASOURCE_QUERIES_DURATION_HISTOGRAM_MS, - "Histogram of the duration of all executed Datasource Queries in ms" - ); - describe_histogram!( - PRISMA_CLIENT_QUERIES_DURATION_HISTOGRAM_MS, - "Histogram of the duration of all executed Prisma Client queries in ms" - ); + gauge!(MOBC_POOL_WAIT_COUNT, 0.0); } -static METRIC_RECORDER: Once = Once::new(); +// At the moment the histogram is only used for timings. So the bounds are hard coded here +// The buckets are for ms +pub(crate) const HISTOGRAM_BOUNDS: [f64; 10] = [0.0, 1.0, 5.0, 10.0, 50.0, 100.0, 500.0, 1000.0, 5000.0, 50000.0]; -fn set_recorder() { - METRIC_RECORDER.call_once(|| { - metrics::set_boxed_recorder(Box::new(MetricRecorder)).unwrap(); - }); +#[derive(PartialEq, Eq, Debug, Deserialize)] +pub enum MetricFormat { + #[serde(alias = "json")] + Json, + #[serde(alias = "prometheus")] + Prometheus, } #[cfg(test)] diff --git a/query-engine/metrics/src/registry.rs b/query-engine/metrics/src/registry.rs index 3f4a892b7088..6530edbe8764 100644 --- a/query-engine/metrics/src/registry.rs +++ b/query-engine/metrics/src/registry.rs @@ -160,20 +160,16 @@ impl MetricRegistry { let mut counters: Vec = counter_handles .into_iter() .map(|(key, counter)| { - let key_name = key.name(); let value = counter.get_inner().load(Ordering::Acquire); - let description = descriptions.get(key_name).cloned().unwrap_or_default(); - Metric::new(key, description, MetricValue::Counter(value), global_labels.clone()) + Metric::renamed(key, &descriptions, MetricValue::Counter(value), &global_labels) }) .collect(); let mut gauges: Vec = gauge_handles .into_iter() .map(|(key, gauge)| { - let key_name = key.name(); - let description = descriptions.get(key_name).cloned().unwrap_or_default(); let value = f64::from_bits(gauge.get_inner().load(Ordering::Acquire)); - Metric::new(key, description, MetricValue::Gauge(value), global_labels.clone()) + Metric::renamed(key, &descriptions, MetricValue::Gauge(value), &global_labels) }) .collect(); @@ -185,13 +181,11 @@ impl MetricRegistry { histogram.record_many(s); }); - let key_name = key.name(); - let description = descriptions.get(key_name).cloned().unwrap_or_default(); - Metric::new( + Metric::renamed( key, - description, + &descriptions, MetricValue::Histogram(histogram.into()), - global_labels.clone(), + &global_labels, ) }) .collect(); diff --git a/query-engine/query-engine-node-api/src/engine.rs b/query-engine/query-engine-node-api/src/engine.rs index 8b53f9dfc8c5..8d5f56ca7bfa 100644 --- a/query-engine/query-engine-node-api/src/engine.rs +++ b/query-engine/query-engine-node-api/src/engine.rs @@ -232,7 +232,7 @@ impl QueryEngine { if enable_metrics { napi_env.execute_tokio_future( async { - query_engine_metrics::describe_metrics(); + query_engine_metrics::initialize_metrics(); Ok(()) } .with_subscriber(logger.dispatcher()), From 237634ddcf9c9b981eea961f1c537fc9cba914eb Mon Sep 17 00:00:00 2001 From: Flavian Desverne Date: Mon, 18 Sep 2023 18:04:06 +0200 Subject: [PATCH 014/128] perf: remove redundant read on upsert (#4248) --- .../src/query_graph_builder/write/upsert.rs | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/query-engine/core/src/query_graph_builder/write/upsert.rs b/query-engine/core/src/query_graph_builder/write/upsert.rs index 69d362b09b70..0a01e43e73c0 100644 --- a/query-engine/core/src/query_graph_builder/write/upsert.rs +++ b/query-engine/core/src/query_graph_builder/write/upsert.rs @@ -156,6 +156,23 @@ pub(crate) fn upsert_record( } graph.create_edge(&if_node, &create_node, QueryGraphDependency::Else)?; + + // Pass-in the read parent record result to the update node RecordFilter to avoid a redundant read. + graph.create_edge( + &read_parent_records_node, + &update_node, + QueryGraphDependency::ProjectedDataDependency( + model_id.clone(), + Box::new(move |mut update_node, parent_ids| { + if let Node::Query(Query::Write(WriteQuery::UpdateRecord(ref mut ur))) = update_node { + ur.set_selectors(parent_ids); + } + + Ok(update_node) + }), + ), + )?; + graph.create_edge( &update_node, &read_node_update, From 23d1a7db23c7565cffd429e0160953a447e3e4f2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=20Houl=C3=A9?= <13155277+tomhoule@users.noreply.github.com> Date: Tue, 19 Sep 2023 14:31:20 +0200 Subject: [PATCH 015/128] test(qe): driver adapters connector test kit (#4190) Co-authored-by: Sergey Tatarintsev Co-authored-by: Miguel Fernandez --- Cargo.lock | 2 + .../query-engine-tests/tests/new/metrics.rs | 4 +- .../queries/batch/transactional_batch.rs | 2 +- .../writes/top_level_mutations/create.rs | 15 - .../writes/top_level_mutations/create_many.rs | 17 -- .../writes/top_level_mutations/update.rs | 15 - .../writes/top_level_mutations/update_many.rs | 15 - .../query-tests-setup/Cargo.toml | 2 + .../query-tests-setup/src/connector_tag/js.rs | 14 + .../src/connector_tag/js/node_process.rs | 171 +++++++++++ .../src/connector_tag/mod.rs | 23 +- .../query-tests-setup/src/error.rs | 6 + .../query-tests-setup/src/lib.rs | 19 +- .../query-tests-setup/src/query_result.rs | 227 ++++++++++++++- .../src/runner/json_adapter/request.rs | 42 +-- .../query-tests-setup/src/runner/mod.rs | 266 +++++++++++++++--- .../sql-query-connector/src/database/js.rs | 2 +- query-engine/core/src/executor/mod.rs | 10 +- .../core/src/interactive_transactions/mod.rs | 3 +- query-engine/core/src/lib.rs | 5 +- .../js/adapter-neon/src/neon.ts | 2 +- .../driver-adapters/js/adapter-pg/src/pg.ts | 9 +- .../connector-test-kit-executor/package.json | 20 ++ .../src/engines/JsonProtocol.ts | 78 +++++ .../src/engines/Library.ts | 42 +++ .../src/engines/QueryEngine.ts | 89 ++++++ .../src/engines/Transaction.ts | 35 +++ .../connector-test-kit-executor/src/index.ts | 179 ++++++++++++ .../src/jsonRpc.ts | 28 ++ .../js/connector-test-kit-executor/src/qe.ts | 42 +++ query-engine/driver-adapters/js/package.json | 2 +- .../driver-adapters/js/pnpm-lock.yaml | 24 +- .../driver-adapters/js/pnpm-workspace.yaml | 3 +- .../request-handlers/src/load_executor.rs | 45 ++- query-engine/request-handlers/src/response.rs | 12 +- 35 files changed, 1287 insertions(+), 183 deletions(-) create mode 100644 query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js.rs create mode 100644 query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/node_process.rs create mode 100644 query-engine/driver-adapters/js/connector-test-kit-executor/package.json create mode 100644 query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/JsonProtocol.ts create mode 100644 query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Library.ts create mode 100644 query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/QueryEngine.ts create mode 100644 query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Transaction.ts create mode 100644 query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts create mode 100644 query-engine/driver-adapters/js/connector-test-kit-executor/src/jsonRpc.ts create mode 100644 query-engine/driver-adapters/js/connector-test-kit-executor/src/qe.ts diff --git a/Cargo.lock b/Cargo.lock index 1a4c1812e90c..2de4d23d8516 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3688,6 +3688,7 @@ dependencies = [ "indexmap 1.9.3", "indoc", "itertools", + "jsonrpc-core", "nom", "once_cell", "parse-hyperlinks", @@ -3702,6 +3703,7 @@ dependencies = [ "request-handlers", "serde", "serde_json", + "sql-query-connector", "strip-ansi-escapes", "thiserror", "tokio", diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs index 260fd58e61fd..77a56f46c34b 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs @@ -9,7 +9,7 @@ mod metrics { use query_engine_tests::*; use serde_json::Value; - #[connector_test] + #[connector_test(exclude(Js))] async fn metrics_are_recorded(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneTestModel(data: { id: 1 }) { id }}"#), @@ -40,7 +40,7 @@ mod metrics { Ok(()) } - #[connector_test] + #[connector_test(exclude(Js))] async fn metrics_tx_do_not_go_negative(mut runner: Runner) -> TestResult<()> { let tx_id = runner.start_tx(5000, 5000, None).await?; runner.set_active_tx(tx_id.clone()); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batch/transactional_batch.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batch/transactional_batch.rs index a3a7c7d34a9b..8c6e24db67ea 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batch/transactional_batch.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batch/transactional_batch.rs @@ -72,7 +72,7 @@ mod transactional { let batch_results = runner.batch(queries, true, None).await?; let batch_request_idx = batch_results.errors().get(0).unwrap().batch_request_idx(); - assert_eq!(batch_request_idx, Some(1usize)); + assert_eq!(batch_request_idx, Some(1)); Ok(()) } diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create.rs index 55cdb5a85bed..1247b3e27bea 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create.rs @@ -353,21 +353,6 @@ mod json_create { #[connector_test(capabilities(AdvancedJsonNullability))] async fn create_json_errors(runner: Runner) -> TestResult<()> { - // On the JSON protocol, this succeeds because `null` is serialized as JSON. - // It doesn't matter since the client does _not_ allow to send null values, but only DbNull or JsonNull. - if runner.protocol().is_graphql() { - assert_error!( - &runner, - r#"mutation { - createOneTestModel(data: { id: 1, json: null }) { - json - } - }"#, - 2009, - "A value is required but not set" - ); - } - assert_error!( &runner, r#"mutation { diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create_many.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create_many.rs index f82e217bb670..35a044b1473d 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create_many.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create_many.rs @@ -360,23 +360,6 @@ mod json_create_many { #[connector_test(capabilities(AdvancedJsonNullability))] async fn create_many_json_errors(runner: Runner) -> TestResult<()> { - // On the JSON protocol, this succeeds because `null` is serialized as JSON. - // It doesn't matter since the client does _not_ allow to send null values, but only DbNull or JsonNull. - if runner.protocol().is_graphql() { - assert_error!( - &runner, - r#"mutation { - createManyTestModel(data: [ - { id: 1, json: null }, - ]) { - count - } - }"#, - 2009, - "A value is required but not set" - ); - } - assert_error!( &runner, r#"mutation { diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update.rs index c2ed7f92cb42..4fe0726a3cc5 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update.rs @@ -764,21 +764,6 @@ mod json_update { #[connector_test(capabilities(AdvancedJsonNullability))] async fn update_json_errors(runner: Runner) -> TestResult<()> { - // On the JSON protocol, this succeeds because `null` is serialized as JSON. - // It doesn't matter since the client does _not_ allow to send null values, but only DbNull or JsonNull. - if runner.protocol().is_graphql() { - assert_error!( - &runner, - r#"mutation { - updateOneTestModel(where: { id: 1 }, data: { json: null }) { - json - } - }"#, - 2009, - "A value is required but not set" - ); - } - assert_error!( &runner, r#"mutation { diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update_many.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update_many.rs index fd0068761a55..7e969e21cdce 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update_many.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update_many.rs @@ -386,21 +386,6 @@ mod json_update_many { #[connector_test(capabilities(AdvancedJsonNullability))] async fn update_json_errors(runner: Runner) -> TestResult<()> { - // On the JSON protocol, this succeeds because `null` is serialized as JSON. - // It doesn't matter since the client does _not_ allow to send null values, but only DbNull or JsonNull. - if runner.protocol().is_graphql() { - assert_error!( - &runner, - r#"mutation { - updateManyTestModel(where: { id: 1 }, data: { json: null }) { - json - } - }"#, - 2009, - "A value is required but not set" - ); - } - assert_error!( &runner, r#"mutation { diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/Cargo.toml b/query-engine/connector-test-kit-rs/query-tests-setup/Cargo.toml index be09bc26ac40..088a0d4b2d34 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/Cargo.toml +++ b/query-engine/connector-test-kit-rs/query-tests-setup/Cargo.toml @@ -11,6 +11,7 @@ qe-setup = { path = "../qe-setup" } request-handlers = { path = "../../request-handlers" } tokio.workspace = true query-core = { path = "../../core" } +sql-query-connector = { path = "../../connectors/sql-query-connector" } query-engine = { path = "../../query-engine"} psl.workspace = true user-facing-errors = { path = "../../../libs/user-facing-errors" } @@ -31,6 +32,7 @@ hyper = { version = "0.14", features = ["full"] } indexmap = { version = "1.0", features = ["serde-1"] } query-engine-metrics = {path = "../../metrics"} quaint.workspace = true +jsonrpc-core = "17" # Only this version is vetted, upgrade only after going through the code, # as this is a small crate with little user base. diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js.rs new file mode 100644 index 000000000000..6e174808a38d --- /dev/null +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js.rs @@ -0,0 +1,14 @@ +mod node_process; + +use super::*; +use node_process::*; +use serde::de::DeserializeOwned; +use std::{collections::HashMap, sync::atomic::AtomicU64}; +use tokio::io::{AsyncBufReadExt, AsyncWriteExt, BufReader}; + +pub(crate) async fn executor_process_request( + method: &str, + params: serde_json::Value, +) -> Result> { + NODE_PROCESS.request(method, params).await +} diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/node_process.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/node_process.rs new file mode 100644 index 000000000000..189c39ec2300 --- /dev/null +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/node_process.rs @@ -0,0 +1,171 @@ +use super::*; +use once_cell::sync::Lazy; +use serde::de::DeserializeOwned; +use std::{fmt::Display, io::Write as _, sync::atomic::Ordering}; +use tokio::sync::{mpsc, oneshot}; + +type Result = std::result::Result>; + +#[derive(Debug)] +struct GenericError(String); + +impl Display for GenericError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} + +impl std::error::Error for GenericError {} + +pub(crate) struct ExecutorProcess { + task_handle: mpsc::Sender, + request_id_counter: AtomicU64, +} + +fn exit_with_message(status_code: i32, message: &str) -> ! { + let stdout = std::io::stdout(); + stdout.lock().write_all(message.as_bytes()).unwrap(); + std::process::exit(status_code) +} + +impl ExecutorProcess { + fn new() -> Result { + let (sender, receiver) = mpsc::channel::(300); + + std::thread::spawn(|| match start_rpc_thread(receiver) { + Ok(()) => (), + Err(err) => { + exit_with_message(1, &err.to_string()); + } + }); + + Ok(ExecutorProcess { + task_handle: sender, + request_id_counter: Default::default(), + }) + } + + /// Convenient façade. Allocates more than necessary, but this is only for testing. + #[tracing::instrument(skip(self))] + pub(crate) async fn request(&self, method: &str, params: serde_json::Value) -> Result { + let (sender, receiver) = oneshot::channel(); + let params = if let serde_json::Value::Object(params) = params { + params + } else { + panic!("params aren't an object") + }; + let method_call = jsonrpc_core::MethodCall { + jsonrpc: Some(jsonrpc_core::Version::V2), + method: method.to_owned(), + params: jsonrpc_core::Params::Map(params), + id: jsonrpc_core::Id::Num(self.request_id_counter.fetch_add(1, Ordering::Relaxed)), + }; + + self.task_handle.send((method_call, sender)).await?; + let raw_response = receiver.await?; + tracing::debug!(%raw_response); + let response = serde_json::from_value(raw_response)?; + Ok(response) + } +} + +pub(super) static NODE_PROCESS: Lazy = + Lazy::new(|| match std::thread::spawn(ExecutorProcess::new).join() { + Ok(Ok(process)) => process, + Ok(Err(err)) => exit_with_message(1, &format!("Failed to start node process. Details: {err}")), + Err(err) => { + let err = err.downcast_ref::().map(ToOwned::to_owned).unwrap_or_default(); + exit_with_message(1, &format!("Panic while trying to start node process.\nDetails: {err}")) + } + }); + +type ReqImpl = (jsonrpc_core::MethodCall, oneshot::Sender); + +fn start_rpc_thread(mut receiver: mpsc::Receiver) -> Result<()> { + use std::process::Stdio; + use tokio::process::Command; + + let env_var = match crate::EXTERNAL_TEST_EXECUTOR.as_ref() { + Some(env_var) => env_var, + None => exit_with_message(1, "start_rpc_thread() error: NODE_TEST_EXECUTOR env var is not defined"), + }; + + tokio::runtime::Builder::new_current_thread() + .enable_io() + .build() + .unwrap() + .block_on(async move { + eprintln!("Spawning test executor process at `{env_var}`"); + let process = match Command::new(env_var) + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::inherit()) + .spawn() + { + Ok(process) => process, + Err(err) => exit_with_message(1, &format!("Failed to spawn the executor process.\nDetails: {err}\n")), + }; + + let mut stdout = BufReader::new(process.stdout.unwrap()).lines(); + let mut stdin = process.stdin.unwrap(); + let mut pending_requests: HashMap> = + HashMap::new(); + + loop { + tokio::select! { + line = stdout.next_line() => { + match line { + // Two error modes in here: the external process can response with + // something that is not a jsonrpc response (basically any normal logging + // output), or it can respond with a jsonrpc response that represents a + // failure. + Ok(Some(line)) => // new response + { + match serde_json::from_str::(&line) { + Ok(response) => { + let sender = pending_requests.remove(response.id()).unwrap(); + match response { + jsonrpc_core::Output::Success(success) => { + sender.send(success.result).unwrap(); + } + jsonrpc_core::Output::Failure(err) => { + panic!("error response from jsonrpc: {err:?}") + } + } + } + Err(err) => { + tracing::error!(%err, "error when decoding response from child node process. Response was: `{}`", &line); + continue + } + }; + } + Ok(None) => // end of the stream + { + exit_with_message(1, "child node process stdout closed") + } + Err(err) => // log it + { + tracing::error!(%err, "Error when reading from child node process"); + } + } + } + request = receiver.recv() => { + match request { + None => // channel closed + { + exit_with_message(1, "The json-rpc client channel was closed"); + } + Some((request, response_sender)) => { + pending_requests.insert(request.id.clone(), response_sender); + let mut req = serde_json::to_vec(&request).unwrap(); + req.push(b'\n'); + stdin.write_all(&req).await.unwrap(); + } + } + } + } + } + }); + + Ok(()) +} diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs index 035acedb5696..cf320b4fbfca 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs @@ -1,4 +1,5 @@ mod cockroachdb; +mod js; mod mongodb; mod mysql; mod postgres; @@ -6,16 +7,18 @@ mod sql_server; mod sqlite; mod vitess; -pub use mysql::*; +pub use mysql::MySqlVersion; pub(crate) use cockroachdb::*; +pub(crate) use js::*; pub(crate) use mongodb::*; +pub(crate) use mysql::*; pub(crate) use postgres::*; pub(crate) use sql_server::*; pub(crate) use sqlite::*; pub(crate) use vitess::*; -use crate::{datamodel_rendering::DatamodelRenderer, BoxFuture, TestError, CONFIG}; +use crate::{datamodel_rendering::DatamodelRenderer, BoxFuture, TestError, CONFIG, EXTERNAL_TEST_EXECUTOR}; use psl::datamodel_connector::ConnectorCapabilities; use std::{convert::TryFrom, fmt}; @@ -24,7 +27,7 @@ pub trait ConnectorTagInterface { /// The name of the datamodel provider for this connector. /// Must match valid datamodel provider strings. - fn datamodel_provider(&self) -> &'static str; + fn datamodel_provider(&self) -> &str; /// Returns the renderer to be used for templating the datamodel (the models portion). fn datamodel_renderer(&self) -> Box; @@ -35,7 +38,7 @@ pub trait ConnectorTagInterface { /// Defines where relational constraints are handled: /// - "prisma" is handled in the Query Engine core /// - "foreignKeys" lets the database handle them - fn relation_mode(&self) -> &'static str { + fn relation_mode(&self) -> &str { "foreignKeys" } } @@ -299,10 +302,14 @@ pub(crate) fn should_run( .any(|only| ConnectorVersion::try_from(*only).unwrap().matches_pattern(&version)); } - if exclude - .iter() - .any(|excl| ConnectorVersion::try_from(*excl).unwrap().matches_pattern(&version)) - { + if EXTERNAL_TEST_EXECUTOR.is_some() && exclude.iter().any(|excl| excl.0.to_uppercase() == "JS") { + println!("Excluded test execution for JS driver adapters. Skipping test"); + return false; + }; + + if exclude.iter().any(|excl| { + ConnectorVersion::try_from(*excl).map_or(false, |connector_version| connector_version.matches_pattern(&version)) + }) { println!("Connector excluded. Skipping test."); return false; } diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/error.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/error.rs index 79fb457015be..041c63f9dd45 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/error.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/error.rs @@ -6,6 +6,9 @@ use thiserror::Error; #[allow(clippy::large_enum_variant)] #[derive(Debug, Error)] pub enum TestError { + #[error("Handler Error: {0}")] + RequestHandlerError(request_handlers::HandlerError), + #[error("Parse error: {0}")] ParseError(String), @@ -26,6 +29,9 @@ pub enum TestError { #[error("Raw execute error: {0}")] RawExecute(QuaintError), + + #[error("External process error: {0}")] + External(#[from] Box), } impl TestError { diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/lib.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/lib.rs index 0cdf3d1d3ef6..f1a53f6b2f53 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/lib.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/lib.rs @@ -42,8 +42,16 @@ pub static ENV_LOG_LEVEL: Lazy = Lazy::new(|| std::env::var("LOG_LEVEL") pub static ENGINE_PROTOCOL: Lazy = Lazy::new(|| std::env::var("PRISMA_ENGINE_PROTOCOL").unwrap_or_else(|_| "graphql".to_owned())); +// TODO: rename env var to EXTERNAL_TEST_EXECUTOR +static EXTERNAL_TEST_EXECUTOR: Lazy> = Lazy::new(|| std::env::var("NODE_TEST_EXECUTOR").ok()); + /// Teardown of a test setup. -async fn teardown_project(datamodel: &str, db_schemas: &[&str]) -> TestResult<()> { +async fn teardown_project(datamodel: &str, db_schemas: &[&str], schema_id: Option) -> TestResult<()> { + if let Some(schema_id) = schema_id { + let params = serde_json::json!({ "schemaId": schema_id }); + crate::executor_process_request::("teardown", params).await?; + } + Ok(qe_setup::teardown(datamodel, db_schemas).await?) } @@ -167,7 +175,9 @@ fn run_relation_link_test_impl( test_fn(&runner, &dm).await.unwrap(); - teardown_project(&datamodel, Default::default()).await.unwrap(); + teardown_project(&datamodel, Default::default(), runner.schema_id()) + .await + .unwrap(); } .with_subscriber(test_tracing_subscriber( ENV_LOG_LEVEL.to_string(), @@ -275,10 +285,13 @@ fn run_connector_test_impl( ) .await .unwrap(); + let schema_id = runner.schema_id(); test_fn(runner).await.unwrap(); - crate::teardown_project(&datamodel, db_schemas).await.unwrap(); + crate::teardown_project(&datamodel, db_schemas, schema_id) + .await + .unwrap(); } .with_subscriber(test_tracing_subscriber( ENV_LOG_LEVEL.to_string(), diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/query_result.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/query_result.rs index 83855fde1c59..d45f4ae04c7f 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/query_result.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/query_result.rs @@ -1,15 +1,66 @@ +use query_core::constants::custom_types; use request_handlers::{GQLError, PrismaResponse}; +use serde::{Deserialize, Serialize}; +#[derive(Serialize, Deserialize, Debug, PartialEq)] +struct SimpleGqlErrorResponse { + #[serde(skip_serializing_if = "Vec::is_empty")] + errors: Vec, +} + +#[derive(Serialize, Deserialize, Debug, PartialEq)] +struct SimpleGqlResponse { + #[serde(skip_serializing_if = "SimpleGqlResponse::data_is_empty")] + #[serde(default)] + data: serde_json::Value, + #[serde(skip_serializing_if = "Vec::is_empty")] + #[serde(default)] + errors: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + #[serde(default)] + extensions: Option, +} + +impl SimpleGqlResponse { + fn data_is_empty(data: &serde_json::Value) -> bool { + match data { + serde_json::Value::Object(o) => o.is_empty(), + serde_json::Value::Null => true, + _ => false, + } + } +} + +#[derive(Serialize, Deserialize, Debug, PartialEq)] +#[serde(rename_all = "camelCase")] +struct SimpleGqlBatchResponse { + batch_result: Vec, + #[serde(skip_serializing_if = "Vec::is_empty")] + #[serde(default)] + errors: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + extensions: Option, +} -#[derive(Debug)] +#[derive(Debug, Serialize, Deserialize, PartialEq)] +#[serde(untagged)] +enum Response { + Error(SimpleGqlErrorResponse), + Multi(SimpleGqlBatchResponse), + Single(SimpleGqlResponse), +} + +#[derive(Debug, Deserialize, PartialEq)] +#[serde(transparent)] pub struct QueryResult { - response: PrismaResponse, + response: Response, } impl QueryResult { pub fn failed(&self) -> bool { match self.response { - PrismaResponse::Single(ref s) => s.errors().next().is_some(), - PrismaResponse::Multi(ref m) => m.errors().next().is_some(), + Response::Error(ref s) => !s.errors.is_empty(), + Response::Single(ref s) => !s.errors.is_empty(), + Response::Multi(ref m) => !(m.errors.is_empty() && m.batch_result.iter().all(|res| res.errors.is_empty())), } } @@ -70,8 +121,13 @@ impl QueryResult { pub fn errors(&self) -> Vec<&GQLError> { match self.response { - PrismaResponse::Single(ref s) => s.errors().collect(), - PrismaResponse::Multi(ref m) => m.errors().collect(), + Response::Error(ref s) => s.errors.iter().collect(), + Response::Single(ref s) => s.errors.iter().collect(), + Response::Multi(ref m) => m + .errors + .iter() + .chain(m.batch_result.iter().flat_map(|res| res.errors.iter())) + .collect(), } } @@ -82,6 +138,20 @@ impl QueryResult { pub fn to_string_pretty(&self) -> String { serde_json::to_string_pretty(&self.response).unwrap() } + + /// Transform a JSON protocol response to a GraphQL protocol response, by removing the type + /// tags. + pub(crate) fn detag(&mut self) { + match &mut self.response { + Response::Error(_) => (), + Response::Single(res) => detag_value(&mut res.data), + Response::Multi(res) => { + for res in &mut res.batch_result { + detag_value(&mut res.data) + } + } + } + } } impl ToString for QueryResult { @@ -92,6 +162,149 @@ impl ToString for QueryResult { impl From for QueryResult { fn from(response: PrismaResponse) -> Self { - Self { response } + match response { + PrismaResponse::Single(res) => QueryResult { + response: Response::Single(SimpleGqlResponse { + data: serde_json::to_value(res.data).unwrap(), + errors: res.errors, + extensions: (!res.extensions.is_empty()).then(|| serde_json::to_value(&res.extensions).unwrap()), + }), + }, + PrismaResponse::Multi(reses) => QueryResult { + response: Response::Multi(SimpleGqlBatchResponse { + batch_result: reses + .batch_result + .into_iter() + .map(|res| SimpleGqlResponse { + data: serde_json::to_value(&res.data).unwrap(), + errors: res.errors, + extensions: (!res.extensions.is_empty()) + .then(|| serde_json::to_value(&res.extensions).unwrap()), + }) + .collect(), + errors: reses.errors, + extensions: (!reses.extensions.is_empty()) + .then(|| serde_json::to_value(&reses.extensions).unwrap()), + }), + }, + } + } +} + +fn detag_value(val: &mut serde_json::Value) { + match val { + serde_json::Value::Object(obj) => { + if obj.len() == 2 && obj.contains_key(custom_types::TYPE) && obj.contains_key(custom_types::VALUE) { + let mut new_val = obj.remove(custom_types::VALUE).unwrap(); + detag_value(&mut new_val); + *val = new_val; + } else { + for elem in obj.values_mut() { + detag_value(elem); + } + } + } + serde_json::Value::Array(arr) => { + for elem in arr { + detag_value(elem) + } + } + _ => (), + } +} + +#[cfg(test)] +mod tests { + use super::*; + use serde_json::json; + + #[test] + fn test_deserializing_successful_batch_response() { + let response = "{\"batchResult\":[{\"data\":{\"findUniqueTestModelOrThrow\":{\"id\":1}}},{\"data\":{\"findUniqueTestModelOrThrow\":{\"id\":2}}}]}"; + let result: QueryResult = serde_json::from_str(response).unwrap(); + + let expected = QueryResult { + response: Response::Multi(SimpleGqlBatchResponse { + batch_result: vec![ + SimpleGqlResponse { + data: json!({ + "findUniqueTestModelOrThrow": { + "id": 1, + }, + }), + errors: vec![], + extensions: None, + }, + SimpleGqlResponse { + data: json!({ + "findUniqueTestModelOrThrow": { + "id": 2, + }, + }), + errors: vec![], + extensions: None, + }, + ], + errors: vec![], + extensions: None, + }), + }; + assert_eq!(result, expected); + } + + #[test] + fn test_deserializing_error_batch_response() { + let response = r###" +{ + "batchResult":[ + { + "data":{ + "findUniqueTestModelOrThrow":{ + "id":2 + } + } + }, + { + "errors":[ + { + "error":"An operation failed because it depends on one or more records that were required but not found. Expected a record, found none.", + "user_facing_error":{ + "is_panic":false, + "message":"An operation failed because it depends on one or more records that were required but not found. Expected a record, found none.", + "meta":{ + "cause":"Expected a record, found none." + }, + "error_code":"P2025" + } + } + ] + } + ] +}"###; + let result: QueryResult = serde_json::from_str(response).unwrap(); + + let expected = QueryResult { + response: Response::Multi(SimpleGqlBatchResponse { + batch_result: vec![ + SimpleGqlResponse { + data: json!({"findUniqueTestModelOrThrow": {"id": 2}}), + errors: vec![], + extensions: None, + }, + SimpleGqlResponse { + data: serde_json::Value::Null, + errors: vec![GQLError::from_user_facing_error(user_facing_errors::KnownError { + message: "An operation failed because it depends on one or more records that were required but not found. Expected a record, found none.".to_string(), + meta: json!({"cause": "Expected a record, found none."}), + error_code: std::borrow::Cow::from("P2025"), + }.into())], + extensions: None, + }, + ], + errors: vec![], + extensions: None, + }), + }; + assert_eq!(result, expected); } } diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/json_adapter/request.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/json_adapter/request.rs index 0486c291e8ba..0eee2d9e6cb6 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/json_adapter/request.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/json_adapter/request.rs @@ -1,4 +1,4 @@ -use crate::TestResult; +use crate::{TestError, TestResult}; use indexmap::IndexMap; use itertools::Itertools; use prisma_models::PrismaValue; @@ -18,24 +18,28 @@ pub struct JsonRequest; impl JsonRequest { /// Translates a GraphQL query to a JSON query. This is used to keep the same test-suite running on both protocols. pub fn from_graphql(gql: &str, query_schema: &QuerySchema) -> TestResult { - let operation = GraphQLProtocolAdapter::convert_query_to_operation(gql, None).unwrap(); - let operation_name = operation.name(); - let schema_field = query_schema - .find_query_field(operation_name) - .unwrap_or_else(|| query_schema.find_mutation_field(operation_name).unwrap()); - let model_name = schema_field - .model() - .map(|m| query_schema.internal_data_model.walk(m).name().to_owned()); - let query_tag = schema_field.query_tag().unwrap().to_owned(); - let selection = operation.into_selection(); - - let output = JsonSingleQuery { - model_name, - action: Action::new(query_tag), - query: graphql_selection_to_json_field_query(selection, &schema_field), - }; - - Ok(output) + match GraphQLProtocolAdapter::convert_query_to_operation(gql, None) { + Ok(operation) => { + let operation_name = operation.name(); + let schema_field = query_schema + .find_query_field(operation_name) + .unwrap_or_else(|| query_schema.find_mutation_field(operation_name).unwrap()); + let model_name = schema_field + .model() + .map(|m| query_schema.internal_data_model.walk(m).name().to_owned()); + let query_tag = schema_field.query_tag().unwrap().to_owned(); + let selection = operation.into_selection(); + + let output = JsonSingleQuery { + model_name, + action: Action::new(query_tag), + query: graphql_selection_to_json_field_query(selection, &schema_field), + }; + + Ok(output) + } + Err(err) => Err(TestError::RequestHandlerError(err)), + } } } diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/mod.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/mod.rs index e9fce19c2c15..b0367a9628c6 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/mod.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/mod.rs @@ -1,8 +1,12 @@ mod json_adapter; pub use json_adapter::*; +use serde::Deserialize; -use crate::{ConnectorTag, ConnectorVersion, QueryResult, TestLogCapture, TestResult, ENGINE_PROTOCOL}; +use crate::{ + executor_process_request, ConnectorTag, ConnectorVersion, QueryResult, TestError, TestLogCapture, TestResult, + ENGINE_PROTOCOL, +}; use colored::Colorize; use query_core::{ protocol::EngineProtocol, @@ -11,18 +15,76 @@ use query_core::{ }; use query_engine_metrics::MetricRegistry; use request_handlers::{ - load_executor, BatchTransactionOption, ConnectorMode, GraphqlBody, JsonBatchQuery, JsonBody, JsonSingleQuery, - MultiQuery, RequestBody, RequestHandler, + BatchTransactionOption, ConnectorMode, GraphqlBody, JsonBatchQuery, JsonBody, JsonSingleQuery, MultiQuery, + RequestBody, RequestHandler, +}; +use serde_json::json; +use std::{ + env, + sync::{atomic::AtomicUsize, Arc}, }; -use std::{env, sync::Arc}; pub type TxResult = Result<(), user_facing_errors::Error>; pub(crate) type Executor = Box; +#[derive(Deserialize, Debug)] +struct Empty {} + +#[derive(Deserialize, Debug)] +#[serde(untagged)] +enum TransactionEndResponse { + Error(user_facing_errors::Error), + Ok(Empty), +} + +impl From for TxResult { + fn from(value: TransactionEndResponse) -> Self { + match value { + TransactionEndResponse::Ok(_) => Ok(()), + TransactionEndResponse::Error(error) => Err(error), + } + } +} + +pub enum RunnerExecutor { + // Builtin is a runner that uses the query engine in-process, issuing queries against a + // `core::InterpretingExecutor` that uses the particular connector under test in the test suite. + Builtin(Executor), + + // External is a runner that uses an external process that responds to queries piped to its STDIN + // in JsonRPC format. In particular this is used to test the query engine against a node process + // running a library engine configured to use a javascript driver adapter to connect to a database. + // + // In this struct variant, usize represents the index of the schema used for the test suite to + // execute queries against. When the suite starts, a message with the schema and the id is sent to + // the external process, which will create a new instance of the library engine configured to + // access that schema. + // + // Everytime a query is sent to the external process, it's provided the id of the schema, so the + // process knows how to associate the query to the instance of the library engine that will dispatch + // it. + External(usize), +} + +impl RunnerExecutor { + async fn new_external(url: &str, schema: &str) -> TestResult { + static COUNTER: AtomicUsize = AtomicUsize::new(0); + let id = COUNTER.fetch_add(1, std::sync::atomic::Ordering::Relaxed); + + executor_process_request( + "initializeSchema", + json!({ "schema": schema, "schemaId": id, "url": url }), + ) + .await?; + + Ok(RunnerExecutor::External(id)) + } +} + /// Direct engine runner. pub struct Runner { - executor: Executor, + executor: RunnerExecutor, query_schema: QuerySchemaRef, version: ConnectorVersion, connector_tag: ConnectorTag, @@ -34,6 +96,13 @@ pub struct Runner { } impl Runner { + pub(crate) fn schema_id(&self) -> Option { + match self.executor { + RunnerExecutor::Builtin(_) => None, + RunnerExecutor::External(schema_id) => Some(schema_id), + } + } + pub fn prisma_dml(&self) -> &str { self.query_schema.internal_data_model.schema.db.source() } @@ -49,18 +118,22 @@ impl Runner { qe_setup::setup(&datamodel, db_schemas).await?; let protocol = EngineProtocol::from(&ENGINE_PROTOCOL.to_string()); - let schema = psl::parse_schema(datamodel).unwrap(); + let schema = psl::parse_schema(&datamodel).unwrap(); let data_source = schema.configuration.datasources.first().unwrap(); let url = data_source.load_url(|key| env::var(key).ok()).unwrap(); - let connector_mode = ConnectorMode::Rust; - let executor = load_executor( - connector_mode, - data_source, - schema.configuration.preview_features(), - &url, - ) - .await?; + let executor = match crate::EXTERNAL_TEST_EXECUTOR.as_ref() { + Some(_) => RunnerExecutor::new_external(&url, &datamodel).await?, + None => RunnerExecutor::Builtin( + request_handlers::load_executor( + ConnectorMode::Rust, + data_source, + schema.configuration.preview_features(), + &url, + ) + .await?, + ), + }; let query_schema: QuerySchemaRef = Arc::new(schema::build(Arc::new(schema), true)); Ok(Self { @@ -82,9 +155,33 @@ impl Runner { { let query = query.into(); + let executor = match &self.executor { + RunnerExecutor::Builtin(e) => e, + RunnerExecutor::External(schema_id) => match JsonRequest::from_graphql(&query, self.query_schema()) { + Ok(json_query) => { + let response_str: String = + executor_process_request("query", json!({ "query": json_query, "schemaId": schema_id, "txId": self.current_tx_id.as_ref().map(ToString::to_string) })).await?; + let mut response: QueryResult = serde_json::from_str(&response_str).unwrap(); + response.detag(); + return Ok(response); + } + // Conversion from graphql to JSON might fail, and in that case we should consider the error + // (a Handler error) as an error response. + Err(TestError::RequestHandlerError(err)) => { + let gql_err = request_handlers::GQLError::from_handler_error(err); + let gql_res = request_handlers::GQLResponse::from(gql_err); + let prisma_res = request_handlers::PrismaResponse::Single(gql_res); + let mut response = QueryResult::from(prisma_res); + response.detag(); + return Ok(response); + } + Err(err) => return Err(err), + }, + }; + tracing::debug!("Querying: {}", query.clone().green()); - let handler = RequestHandler::new(&*self.executor, &self.query_schema, self.protocol); + let handler = RequestHandler::new(&**executor, &self.query_schema, self.protocol); let request_body = match self.protocol { EngineProtocol::Json => { @@ -127,7 +224,20 @@ impl Runner { println!("{}", query.bright_green()); - let handler = RequestHandler::new(&*self.executor, &self.query_schema, EngineProtocol::Json); + let executor = match &self.executor { + RunnerExecutor::Builtin(e) => e, + RunnerExecutor::External(_) => { + let response_str: String = executor_process_request( + "query", + json!({ "query": query, "txId": self.current_tx_id.as_ref().map(ToString::to_string) }), + ) + .await?; + let response: QueryResult = serde_json::from_str(&response_str).unwrap(); + return Ok(response); + } + }; + + let handler = RequestHandler::new(&**executor, &self.query_schema, EngineProtocol::Json); let serialized_query: JsonSingleQuery = serde_json::from_str(&query).unwrap(); let request_body = RequestBody::Json(JsonBody::Single(serialized_query)); @@ -164,7 +274,12 @@ impl Runner { transaction: bool, isolation_level: Option, ) -> TestResult { - let handler = RequestHandler::new(&*self.executor, &self.query_schema, self.protocol); + let executor = match &self.executor { + RunnerExecutor::External(_) => todo!(), + RunnerExecutor::Builtin(e) => e, + }; + + let handler = RequestHandler::new(&**executor, &self.query_schema, self.protocol); let body = RequestBody::Json(JsonBody::Batch(JsonBatchQuery { batch: queries .into_iter() @@ -184,7 +299,32 @@ impl Runner { transaction: bool, isolation_level: Option, ) -> TestResult { - let handler = RequestHandler::new(&*self.executor, &self.query_schema, self.protocol); + let executor = match &self.executor { + RunnerExecutor::External(schema_id) => { + // Translate the GraphQL query to JSON + let batch = queries + .into_iter() + .map(|query| JsonRequest::from_graphql(&query, self.query_schema())) + .collect::>>() + .unwrap(); + let transaction = match transaction { + true => Some(BatchTransactionOption { isolation_level }), + false => None, + }; + let json_query = JsonBody::Batch(JsonBatchQuery { batch, transaction }); + let response_str: String = executor_process_request( + "query", + json!({ "query": json_query, "schemaId": schema_id, "txId": self.current_tx_id.as_ref().map(ToString::to_string) }) + ).await?; + + let mut response: QueryResult = serde_json::from_str(&response_str).unwrap(); + response.detag(); + return Ok(response); + } + RunnerExecutor::Builtin(e) => e, + }; + + let handler = RequestHandler::new(&**executor, &self.query_schema, self.protocol); let body = match self.protocol { EngineProtocol::Json => { // Translate the GraphQL query to JSON @@ -227,31 +367,74 @@ impl Runner { isolation_level: Option, ) -> TestResult { let tx_opts = TransactionOptions::new(max_acquisition_millis, valid_for_millis, isolation_level); - - let id = self - .executor - .start_tx(self.query_schema.clone(), self.protocol, tx_opts) - .await?; - Ok(id) + match &self.executor { + RunnerExecutor::Builtin(executor) => { + let id = executor + .start_tx(self.query_schema.clone(), self.protocol, tx_opts) + .await?; + Ok(id) + } + RunnerExecutor::External(schema_id) => { + #[derive(Deserialize, Debug)] + #[serde(untagged)] + enum StartTransactionResponse { + Ok { id: String }, + Error(user_facing_errors::Error), + } + let response: StartTransactionResponse = + executor_process_request("startTx", json!({ "schemaId": schema_id, "options": tx_opts })).await?; + + match response { + StartTransactionResponse::Ok { id } => Ok(id.into()), + StartTransactionResponse::Error(err) => { + Err(crate::TestError::InteractiveTransactionError(err.message().into())) + } + } + } + } } pub async fn commit_tx(&self, tx_id: TxId) -> TestResult { - let res = self.executor.commit_tx(tx_id).await; + match &self.executor { + RunnerExecutor::Builtin(executor) => { + let res = executor.commit_tx(tx_id).await; + + if let Err(error) = res { + Ok(Err(error.into())) + } else { + Ok(Ok(())) + } + } + RunnerExecutor::External(schema_id) => { + let response: TransactionEndResponse = + executor_process_request("commitTx", json!({ "schemaId": schema_id, "txId": tx_id.to_string() })) + .await?; - if let Err(error) = res { - Ok(Err(error.into())) - } else { - Ok(Ok(())) + Ok(response.into()) + } } } pub async fn rollback_tx(&self, tx_id: TxId) -> TestResult { - let res = self.executor.rollback_tx(tx_id).await; - - if let Err(error) = res { - Ok(Err(error.into())) - } else { - Ok(Ok(())) + match &self.executor { + RunnerExecutor::Builtin(executor) => { + let res = executor.rollback_tx(tx_id).await; + + if let Err(error) = res { + Ok(Err(error.into())) + } else { + Ok(Ok(())) + } + } + RunnerExecutor::External(schema_id) => { + let response: TransactionEndResponse = executor_process_request( + "rollbackTx", + json!({ "schemaId": schema_id, "txId": tx_id.to_string() }), + ) + .await?; + + Ok(response.into()) + } } } @@ -276,7 +459,18 @@ impl Runner { } pub async fn get_logs(&mut self) -> Vec { - self.log_capture.get_logs().await + let mut logs = self.log_capture.get_logs().await; + match &self.executor { + RunnerExecutor::Builtin(_) => logs, + RunnerExecutor::External(schema_id) => { + let mut external_logs: Vec = + executor_process_request("getLogs", json!({ "schemaId": schema_id })) + .await + .unwrap(); + logs.append(&mut external_logs); + logs + } + } } pub fn connector_version(&self) -> &ConnectorVersion { diff --git a/query-engine/connectors/sql-query-connector/src/database/js.rs b/query-engine/connectors/sql-query-connector/src/database/js.rs index 5b22653647f8..0d4714871e59 100644 --- a/query-engine/connectors/sql-query-connector/src/database/js.rs +++ b/query-engine/connectors/sql-query-connector/src/database/js.rs @@ -102,7 +102,7 @@ impl Connector for Js { // declaration, so finally I couldn't come up with anything better then wrapping a QuaintQueryable // in this object, and implementing TransactionCapable (and quaint::Queryable) explicitly for it. #[derive(Clone)] -struct DriverAdapter { +pub struct DriverAdapter { connector: Arc, } diff --git a/query-engine/core/src/executor/mod.rs b/query-engine/core/src/executor/mod.rs index 35ed20ab0c55..ddbb7dfc8429 100644 --- a/query-engine/core/src/executor/mod.rs +++ b/query-engine/core/src/executor/mod.rs @@ -21,7 +21,7 @@ use crate::{ }; use async_trait::async_trait; use connector::Connector; -use serde::Deserialize; +use serde::{Deserialize, Serialize}; use tracing::Dispatch; #[async_trait] @@ -57,14 +57,14 @@ pub trait QueryExecutor: TransactionManager { fn primary_connector(&self) -> &(dyn Connector + Send + Sync); } -#[derive(Debug, Deserialize)] +#[derive(Debug, Serialize, Deserialize)] pub struct TransactionOptions { /// Maximum wait time for tx acquisition in milliseconds. - #[serde(rename(deserialize = "max_wait"))] + #[serde(rename = "max_wait")] pub max_acquisition_millis: u64, /// Time in milliseconds after which the transaction rolls back automatically. - #[serde(rename(deserialize = "timeout"))] + #[serde(rename = "timeout")] pub valid_for_millis: u64, /// Isolation level to use for the transaction. @@ -72,7 +72,7 @@ pub struct TransactionOptions { /// An optional pre-defined transaction id. Some value might be provided in case we want to generate /// a new id at the beginning of the transaction - #[serde(skip_deserializing)] + #[serde(skip)] pub new_tx_id: Option, } diff --git a/query-engine/core/src/interactive_transactions/mod.rs b/query-engine/core/src/interactive_transactions/mod.rs index 79eba2bb82e5..ce125e8fa17e 100644 --- a/query-engine/core/src/interactive_transactions/mod.rs +++ b/query-engine/core/src/interactive_transactions/mod.rs @@ -1,5 +1,6 @@ use crate::CoreError; use connector::Transaction; +use serde::Deserialize; use std::fmt::Display; use tokio::time::{Duration, Instant}; @@ -37,7 +38,7 @@ pub(crate) use messages::*; /// the TransactionActorManager can reply with a helpful error message which explains that no operation can be performed on a closed transaction /// rather than an error message stating that the transaction does not exist. -#[derive(Debug, Clone, Hash, Eq, PartialEq)] +#[derive(Debug, Clone, Hash, Eq, PartialEq, Deserialize)] pub struct TxId(String); const MINIMUM_TX_ID_LENGTH: usize = 24; diff --git a/query-engine/core/src/lib.rs b/query-engine/core/src/lib.rs index fb6806e44501..7970c96139b7 100644 --- a/query-engine/core/src/lib.rs +++ b/query-engine/core/src/lib.rs @@ -18,7 +18,10 @@ pub use self::{ query_document::*, telemetry::*, }; -pub use connector::{error::ConnectorError, Connector}; +pub use connector::{ + error::{ConnectorError, ErrorKind as ConnectorErrorKind}, + Connector, +}; mod error; mod interactive_transactions; diff --git a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts b/query-engine/driver-adapters/js/adapter-neon/src/neon.ts index ba17da3036b6..0839523e131d 100644 --- a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts +++ b/query-engine/driver-adapters/js/adapter-neon/src/neon.ts @@ -57,7 +57,7 @@ class NeonWsQueryable extends NeonQ const { sql, args: values } = query try { - return await this.client.query(sql, values) + return await this.client.query({ text: sql, values, rowMode: 'array'}) } catch (e) { const error = e as Error debug('Error in performIO: %O', error) diff --git a/query-engine/driver-adapters/js/adapter-pg/src/pg.ts b/query-engine/driver-adapters/js/adapter-pg/src/pg.ts index a6a4ba4b58d9..2f0fac5d2249 100644 --- a/query-engine/driver-adapters/js/adapter-pg/src/pg.ts +++ b/query-engine/driver-adapters/js/adapter-pg/src/pg.ts @@ -22,13 +22,14 @@ class PgQueryable const tag = '[js::query_raw]' debug(`${tag} %O`, query) - const { fields, rows: results } = await this.performIO(query) + const { fields, rows } = await this.performIO(query) const columns = fields.map((field) => field.name) + const columnTypes = fields.map((field) => fieldToColumnType(field.dataTypeID)); const resultSet: ResultSet = { columnNames: columns, - columnTypes: fields.map((field) => fieldToColumnType(field.dataTypeID)), - rows: results.map((result) => columns.map((column) => result[column])), + columnTypes, + rows, } return { ok: true, value: resultSet } @@ -58,7 +59,7 @@ class PgQueryable const { sql, args: values } = query try { - const result = await this.client.query(sql, values) + const result = await this.client.query({ text: sql, values, rowMode: 'array' }) return result } catch (e) { const error = e as Error diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/package.json b/query-engine/driver-adapters/js/connector-test-kit-executor/package.json new file mode 100644 index 000000000000..fc04c8fabd02 --- /dev/null +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/package.json @@ -0,0 +1,20 @@ +{ + "name": "connector-test-kit-executor", + "version": "1.0.0", + "description": "", + "main": "dist/index.js", + "scripts": { + "build": "tsup ./src/index.ts --format cjs,esm --dts", + "lint": "tsc -p ./tsconfig.build.json" + }, + "keywords": [], + "author": "", + "sideEffects": false, + "license": "Apache-2.0", + "dependencies": { + "@jkomyno/prisma-adapter-pg": "workspace:*", + "@jkomyno/prisma-driver-adapter-utils": "workspace:*", + "pg": "^8.11.3", + "@types/pg": "^8.10.2" + } +} diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/JsonProtocol.ts b/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/JsonProtocol.ts new file mode 100644 index 000000000000..bd491db289a3 --- /dev/null +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/JsonProtocol.ts @@ -0,0 +1,78 @@ +import * as Transaction from './Transaction' + +export type JsonQuery = { + modelName?: string + action: JsonQueryAction + query: JsonFieldSelection +} + +export type JsonBatchQuery = { + batch: JsonQuery[] + transaction?: { isolationLevel?: Transaction.IsolationLevel } +} + +export type JsonQueryAction = + | 'findUnique' + | 'findUniqueOrThrow' + | 'findFirst' + | 'findFirstOrThrow' + | 'findMany' + | 'createOne' + | 'createMany' + | 'updateOne' + | 'updateMany' + | 'deleteOne' + | 'deleteMany' + | 'upsertOne' + | 'aggregate' + | 'groupBy' + | 'executeRaw' + | 'queryRaw' + | 'runCommandRaw' + | 'findRaw' + | 'aggregateRaw' + +export type JsonFieldSelection = { + arguments?: Record + selection: JsonSelectionSet +} + +export type JsonSelectionSet = { + $scalars?: boolean + $composites?: boolean +} & { + [fieldName: string]: boolean | JsonFieldSelection +} + +export type JsonArgumentValue = + | number + | string + | boolean + | null + | JsonTaggedValue + | JsonArgumentValue[] + | { [key: string]: JsonArgumentValue } + +export type DateTaggedValue = { $type: 'DateTime'; value: string } +export type DecimalTaggedValue = { $type: 'Decimal'; value: string } +export type BytesTaggedValue = { $type: 'Bytes'; value: string } +export type BigIntTaggedValue = { $type: 'BigInt'; value: string } +export type FieldRefTaggedValue = { $type: 'FieldRef'; value: { _ref: string } } +export type EnumTaggedValue = { $type: 'Enum'; value: string } +export type JsonTaggedValue = { $type: 'Json'; value: string } + +export type JsonInputTaggedValue = + | DateTaggedValue + | DecimalTaggedValue + | BytesTaggedValue + | BigIntTaggedValue + | FieldRefTaggedValue + | JsonTaggedValue + | EnumTaggedValue + +export type JsonOutputTaggedValue = + | DateTaggedValue + | DecimalTaggedValue + | BytesTaggedValue + | BigIntTaggedValue + | JsonTaggedValue diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Library.ts b/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Library.ts new file mode 100644 index 000000000000..5a5e59b563ad --- /dev/null +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Library.ts @@ -0,0 +1,42 @@ +import type { Connector } from '@jkomyno/prisma-pg-js-connector' +import type { QueryEngineConfig } from './QueryEngine' + +export type QueryEngineInstance = { + connect(headers: string): Promise + disconnect(headers: string): Promise + /** + * @param requestStr JSON.stringified `QueryEngineRequest | QueryEngineBatchRequest` + * @param headersStr JSON.stringified `QueryEngineRequestHeaders` + */ + query(requestStr: string, headersStr: string, transactionId?: string): Promise + sdlSchema(): Promise + dmmf(traceparent: string): Promise + startTransaction(options: string, traceHeaders: string): Promise + commitTransaction(id: string, traceHeaders: string): Promise + rollbackTransaction(id: string, traceHeaders: string): Promise + metrics(options: string): Promise +} + +export interface QueryEngineConstructor { + new(config: QueryEngineConfig, logger: (log: string) => void, nodejsFnCtx?: Connector): QueryEngineInstance +} + +export interface LibraryLoader { + loadLibrary(): Promise +} + +// Main +export type Library = { + QueryEngine: QueryEngineConstructor + + version: () => { + // The commit hash of the engine + commit: string + // Currently 0.1.0 (Set in Cargo.toml) + version: string + } + /** + * This returns a string representation of `DMMF.Document` + */ + dmmf: (datamodel: string) => Promise +} diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/QueryEngine.ts b/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/QueryEngine.ts new file mode 100644 index 000000000000..416da634fc91 --- /dev/null +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/QueryEngine.ts @@ -0,0 +1,89 @@ +import { JsonBatchQuery, JsonQuery } from './JsonProtocol' +import * as Transaction from './Transaction' + +// Events +export type QueryEngineEvent = QueryEngineLogEvent | QueryEngineQueryEvent | QueryEnginePanicEvent + +export type QueryEngineLogEvent = { + level: string + module_path: string + message: string + span?: boolean +} + +export type QueryEngineQueryEvent = { + level: 'info' + module_path: string + query: string + item_type: 'query' + params: string + duration_ms: string + result: string +} + +export type QueryEnginePanicEvent = { + level: 'error' + module_path: string + message: 'PANIC' + reason: string + file: string + line: string + column: string +} + + +export type GraphQLQuery = { + query: string + variables: object +} + +export type EngineProtocol = 'graphql' | 'json' +export type EngineQuery = GraphQLQuery | JsonQuery + +export type EngineBatchQueries = GraphQLQuery[] | JsonQuery[] + +export type QueryEngineConfig = { + // TODO rename datamodel here and other places + datamodel: string + configDir: string + logQueries: boolean + ignoreEnvVarErrors: boolean + datasourceOverrides?: Record + env: Record + logLevel?: string + engineProtocol: EngineProtocol +} + +// Errors +export type SyncRustError = { + is_panic: boolean + message: string + meta: { + full_error: string + } + error_code: string +} + +export type RustRequestError = { + is_panic: boolean + message: string + backtrace: string +} + +export type QueryEngineResult = { + data: T + elapsed: number +} + +export type QueryEngineBatchRequest = QueryEngineBatchGraphQLRequest | JsonBatchQuery + +export type QueryEngineBatchGraphQLRequest = { + batch: QueryEngineRequest[] + transaction?: boolean + isolationLevel?: Transaction.IsolationLevel +} + +export type QueryEngineRequest = { + query: string + variables: Object +} diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Transaction.ts b/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Transaction.ts new file mode 100644 index 000000000000..1c5786cc66da --- /dev/null +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Transaction.ts @@ -0,0 +1,35 @@ +export enum IsolationLevel { + ReadUncommitted = 'ReadUncommitted', + ReadCommitted = 'ReadCommitted', + RepeatableRead = 'RepeatableRead', + Snapshot = 'Snapshot', + Serializable = 'Serializable', +} + +/** + * maxWait ?= 2000 + * timeout ?= 5000 + */ +export type Options = { + maxWait?: number + timeout?: number + isolationLevel?: IsolationLevel +} + +export type InteractiveTransactionInfo = { + /** + * Transaction ID returned by the query engine. + */ + id: string + + /** + * Arbitrary payload the meaning of which depends on the `Engine` implementation. + * For example, `DataProxyEngine` needs to associate different API endpoints with transactions. + * In `LibraryEngine` and `BinaryEngine` it is currently not used. + */ + payload: Payload +} + +export type TransactionHeaders = { + traceparent?: string +} diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts b/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts new file mode 100644 index 000000000000..2a6432c7a7a0 --- /dev/null +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts @@ -0,0 +1,179 @@ +import pgDriver from 'pg' +import * as pg from '@jkomyno/prisma-adapter-pg' +import * as qe from './qe' +import * as engines from './engines/Library' +import * as readline from 'node:readline' +import * as jsonRpc from './jsonRpc' +import {bindAdapter, ErrorCapturingDriverAdapter} from "@jkomyno/prisma-driver-adapter-utils"; + +async function main(): Promise { + const iface = readline.createInterface({ + input: process.stdin, + output: process.stdout, + terminal: false, + }); + + iface.on('line', async (line) => { + try { + const request: jsonRpc.Request = JSON.parse(line); // todo: validate + console.error(`Got a request: ${line}`) + try { + const response = await handleRequest(request.method, request.params) + respondOk(request.id, response) + } catch (err) { + console.error("[nodejs] Error from request handler: ", err) + respondErr(request.id, { + code: 1, + message: err.toString(), + }) + } + } catch (_) { + // skip non-JSON line + } + + }); +} + +const schemas: Record = {} +const adapters: Record = {} +const queryLogs: Record = [] + +async function handleRequest(method: string, params: unknown): Promise { + switch (method) { + case 'initializeSchema': { + interface InitializeSchemaParams { + schema: string + schemaId: string + url: string + } + + const castParams = params as InitializeSchemaParams; + const logs = queryLogs[castParams.schemaId] = [] as string[] + const [engine, adapter] = await initQe(castParams.url, castParams.schema, (log) => { + logs.push(log) + }); + await engine.connect("") + schemas[castParams.schemaId] = engine + adapters[castParams.schemaId] = adapter + return null + } + case 'query': { + interface QueryPayload { + query: string + schemaId: number + txId?: string + } + + console.error("Got `query`", params) + const castParams = params as QueryPayload; + const engine = schemas[castParams.schemaId] + const result = await engine.query(JSON.stringify(castParams.query), "", castParams.txId) + + const parsedResult = JSON.parse(result) + if (parsedResult.errors) { + const error = parsedResult.errors[0]?.user_facing_error + if (error.error_code === 'P2036') { + const jsError = adapters[castParams.schemaId].errorRegistry.consumeError(error.meta.id) + if (!jsError) { + console.error(`Something went wrong. Engine reported external error with id ${error.meta.id}, but it was not registered.`) + } else { + console.error("[nodejs] got error response from the engine caused by the driver: ", jsError) + } + } + } + + console.error("[nodejs] got response from engine: ", result) + + // returning unparsed string: otherwise, some information gots lost during this round-trip. + // In particular, floating point without decimal part turn into integers + return result + } + + case 'startTx': { + interface StartTxPayload { + schemaId: number, + options: unknown + } + console.error("Got `startTx", params) + const { schemaId, options } = params as StartTxPayload + const result = await schemas[schemaId].startTransaction(JSON.stringify(options), "") + return JSON.parse(result) + + + + } + + case 'commitTx': { + interface CommitTxPayload { + schemaId: number, + txId: string, + } + console.error("Got `commitTx", params) + const { schemaId, txId } = params as CommitTxPayload + const result = await schemas[schemaId].commitTransaction(txId, '{}') + return JSON.parse(result) + } + + case 'rollbackTx': { + interface RollbackTxPayload { + schemaId: number, + txId: string, + } + console.error("Got `rollbackTx", params) + const { schemaId, txId } = params as RollbackTxPayload + const result = await schemas[schemaId].rollbackTransaction(txId, '{}') + return JSON.parse(result) + } + case 'teardown': { + interface TeardownPayload { + schemaId: number + } + + const castParams = params as TeardownPayload; + await schemas[castParams.schemaId].disconnect("") + delete schemas[castParams.schemaId] + delete queryLogs[castParams.schemaId] + return {} + + } + + case 'getLogs': { + interface GetLogsPayload { + schemaId: number + } + const castParams = params as GetLogsPayload + return queryLogs[castParams.schemaId] ?? [] + } + default: { + throw new Error(`Unknown method: \`${method}\``) + } + } +} + +function respondErr(requestId: number, error: jsonRpc.RpcError) { + const msg: jsonRpc.ErrResponse = { + jsonrpc: '2.0', + id: requestId, + error, + } + console.log(JSON.stringify(msg)) +} + +function respondOk(requestId: number, payload: unknown) { + const msg: jsonRpc.OkResponse = { + jsonrpc: '2.0', + id: requestId, + result: payload + + }; + console.log(JSON.stringify(msg)) +} + +async function initQe(url: string, prismaSchema: string, logCallback: qe.QueryLogCallback): Promise<[engines.QueryEngineInstance, ErrorCapturingDriverAdapter]> { + const pool = new pgDriver.Pool({ connectionString: url }) + const adapter = bindAdapter(new pg.PrismaPg(pool)) + const engineInstance = qe.initQueryEngine(adapter, prismaSchema, logCallback) + return [engineInstance, adapter]; +} + +main().catch(console.error) diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/jsonRpc.ts b/query-engine/driver-adapters/js/connector-test-kit-executor/src/jsonRpc.ts new file mode 100644 index 000000000000..ec734e7b543f --- /dev/null +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/src/jsonRpc.ts @@ -0,0 +1,28 @@ +export interface Request { + jsonrpc: '2.0' + method: string + params?: Object, + id: number +} + +export type Response = OkResponse | ErrResponse + +export interface OkResponse { + jsonrpc: '2.0' + result: unknown + error?: never + id: number +} + +export interface ErrResponse { + jsonrpc: '2.0' + error: RpcError + result?: never + id: number +} + +export interface RpcError { + code: number + message: string + data?: unknown +} diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/qe.ts b/query-engine/driver-adapters/js/connector-test-kit-executor/src/qe.ts new file mode 100644 index 000000000000..3f86aa028c77 --- /dev/null +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/src/qe.ts @@ -0,0 +1,42 @@ +import * as pg from '@jkomyno/prisma-adapter-pg' +import {bindAdapter, ErrorCapturingDriverAdapter} from '@jkomyno/prisma-driver-adapter-utils' +import * as lib from './engines/Library' +import * as os from 'node:os' +import * as path from 'node:path' + +export type QueryLogCallback = (log: string) => void + +export function initQueryEngine(adapter: ErrorCapturingDriverAdapter, datamodel: string, queryLogCallback: QueryLogCallback): lib.QueryEngineInstance { + // I assume nobody will run this on Windows ¯\_(ツ)_/¯ + const libExt = os.platform() === 'darwin' ? 'dylib' : 'so' + const dirname = path.dirname(new URL(import.meta.url).pathname) + + const libQueryEnginePath = path.join(dirname, `../../../../../target/debug/libquery_engine.${libExt}`) + + const libqueryEngine = { exports: {} as unknown as lib.Library } + // @ts-ignore + process.dlopen(libqueryEngine, libQueryEnginePath) + + const QueryEngine = libqueryEngine.exports.QueryEngine + + const queryEngineOptions = { + datamodel, + configDir: '.', + engineProtocol: 'json' as const, + logLevel: process.env["RUST_LOG"] as any, + logQueries: true, + env: process.env, + ignoreEnvVarErrors: false, + } + + const logCallback = (event: any) => { + const parsed = JSON.parse(event) + if (parsed.is_query) { + queryLogCallback(parsed.query) + } + console.error("[nodejs] ", parsed) + } + const engine = new QueryEngine(queryEngineOptions, logCallback, adapter) + + return engine +} diff --git a/query-engine/driver-adapters/js/package.json b/query-engine/driver-adapters/js/package.json index 6864220623d4..1fc20228fe6f 100644 --- a/query-engine/driver-adapters/js/package.json +++ b/query-engine/driver-adapters/js/package.json @@ -5,7 +5,7 @@ "description": "", "engines": { "node": ">=16.13", - "pnpm": ">=8.6.7 <9" + "pnpm": ">=8.6.6 <9" }, "license": "Apache-2.0", "scripts": { diff --git a/query-engine/driver-adapters/js/pnpm-lock.yaml b/query-engine/driver-adapters/js/pnpm-lock.yaml index 4b4225882ba1..0a348f862d51 100644 --- a/query-engine/driver-adapters/js/pnpm-lock.yaml +++ b/query-engine/driver-adapters/js/pnpm-lock.yaml @@ -51,6 +51,21 @@ importers: specifier: ^1.11.0 version: 1.11.0 + connector-test-kit-executor: + dependencies: + '@jkomyno/prisma-adapter-pg': + specifier: workspace:* + version: link:../adapter-pg + '@jkomyno/prisma-driver-adapter-utils': + specifier: workspace:* + version: link:../driver-adapter-utils + '@types/pg': + specifier: ^8.10.2 + version: 8.10.2 + pg: + specifier: ^8.11.3 + version: 8.11.3 + driver-adapter-utils: dependencies: debug: @@ -436,7 +451,6 @@ packages: '@types/node': 20.5.9 pg-protocol: 1.6.0 pg-types: 4.0.1 - dev: true /@types/pg@8.6.6: resolution: {integrity: sha512-O2xNmXebtwVekJDD+02udOncjVcMZQuTEQEMpKJ0ZRf5E7/9JJX3izhKUcUifBkyKpljyUM6BTgy2trmviKlpw==} @@ -843,7 +857,6 @@ packages: /obuf@1.1.2: resolution: {integrity: sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==} - dev: true /once@1.4.0: resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} @@ -891,7 +904,6 @@ packages: /pg-numeric@1.0.2: resolution: {integrity: sha512-BM/Thnrw5jm2kKLE5uJkXqqExRUY/toLHda65XgFTBTFYZyopbKjBe29Ii3RbkvlsMoFwD+tHeGaCjjv0gHlyw==} engines: {node: '>=4'} - dev: true /pg-pool@3.6.1(pg@8.11.3): resolution: {integrity: sha512-jizsIzhkIitxCGfPRzJn1ZdcosIt3pz9Sh3V01fm1vZnbnCMgmGl5wvGGdNN2EL9Rmb0EcFoCkixH4Pu+sP9Og==} @@ -924,7 +936,6 @@ packages: postgres-date: 2.0.1 postgres-interval: 3.0.0 postgres-range: 1.1.3 - dev: true /pg@8.11.3: resolution: {integrity: sha512-+9iuvG8QfaaUrrph+kpF24cXkH1YOOUeArRNYIxq1viYHZagBxrTno7cecY1Fa44tJeZvaoG+Djpkc3JwehN5g==} @@ -983,7 +994,6 @@ packages: /postgres-array@3.0.2: resolution: {integrity: sha512-6faShkdFugNQCLwucjPcY5ARoW1SlbnrZjmGl0IrrqewpvxvhSLHimCVzqeuULCbG0fQv7Dtk1yDbG3xv7Veog==} engines: {node: '>=12'} - dev: true /postgres-bytea@1.0.0: resolution: {integrity: sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==} @@ -994,7 +1004,6 @@ packages: engines: {node: '>= 6'} dependencies: obuf: 1.1.2 - dev: true /postgres-date@1.0.7: resolution: {integrity: sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==} @@ -1003,7 +1012,6 @@ packages: /postgres-date@2.0.1: resolution: {integrity: sha512-YtMKdsDt5Ojv1wQRvUhnyDJNSr2dGIC96mQVKz7xufp07nfuFONzdaowrMHjlAzY6GDLd4f+LUHHAAM1h4MdUw==} engines: {node: '>=12'} - dev: true /postgres-interval@1.2.0: resolution: {integrity: sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==} @@ -1014,11 +1022,9 @@ packages: /postgres-interval@3.0.0: resolution: {integrity: sha512-BSNDnbyZCXSxgA+1f5UU2GmwhoI0aU5yMxRGO8CdFEcY2BQF9xm/7MqKnYoM1nJDk8nONNWDk9WeSmePFhQdlw==} engines: {node: '>=12'} - dev: true /postgres-range@1.1.3: resolution: {integrity: sha512-VdlZoocy5lCP0c/t66xAfclglEapXPCIVhqqJRncYpvbCgImF0w67aPKfbqUMr72tO2k5q0TdTZwCLjPTI6C9g==} - dev: true /prisma@5.3.0-integration-feat-driver-adapters-in-client.3: resolution: {integrity: sha512-M9FQjLmJL7g4GnHwcsuf2WPqE3/B3k/laBkaq5XCxJcBMjoipNIGW0ZlZKY9t+TdJ14asGrv4+7o7mAmKLZqrw==} diff --git a/query-engine/driver-adapters/js/pnpm-workspace.yaml b/query-engine/driver-adapters/js/pnpm-workspace.yaml index 6a17ebd231fd..a7ffe6b09c5f 100644 --- a/query-engine/driver-adapters/js/pnpm-workspace.yaml +++ b/query-engine/driver-adapters/js/pnpm-workspace.yaml @@ -1,6 +1,7 @@ packages: - './adapter-neon' - - './adapter-planetscale' - './adapter-pg' + - './adapter-planetscale' + - './connector-test-kit-executor' - './driver-adapter-utils' - './smoke-test-js' diff --git a/query-engine/request-handlers/src/load_executor.rs b/query-engine/request-handlers/src/load_executor.rs index 6d4dec482285..652ad3108f0d 100644 --- a/query-engine/request-handlers/src/load_executor.rs +++ b/query-engine/request-handlers/src/load_executor.rs @@ -2,6 +2,7 @@ use psl::{builtin_connectors::*, Datasource, PreviewFeatures}; use query_core::{executor::InterpretingExecutor, Connector, QueryExecutor}; use sql_query_connector::*; use std::collections::HashMap; +use std::env; use tracing::trace; use url::Url; @@ -17,24 +18,38 @@ pub async fn load( features: PreviewFeatures, url: &str, ) -> query_core::Result> { - if connector_mode == ConnectorMode::Js { - #[cfg(feature = "driver-adapters")] - return driver_adapter(source, url, features).await; - } + match connector_mode { + ConnectorMode::Js => { + #[cfg(not(feature = "driver-adapters"))] + panic!("Driver adapters are not enabled, but connector mode is set to JS"); + + #[cfg(feature = "driver-adapters")] + driver_adapter(source, url, features).await + } + + ConnectorMode::Rust => { + if let Ok(value) = env::var("PRISMA_DISABLE_QUAINT_EXECUTORS") { + let disable = value.to_uppercase(); + if disable == "TRUE" || disable == "1" { + panic!("Quaint executors are disabled, as per env var PRISMA_DISABLE_QUAINT_EXECUTORS."); + } + } - match source.active_provider { - p if SQLITE.is_provider(p) => sqlite(source, url, features).await, - p if MYSQL.is_provider(p) => mysql(source, url, features).await, - p if POSTGRES.is_provider(p) => postgres(source, url, features).await, - p if MSSQL.is_provider(p) => mssql(source, url, features).await, - p if COCKROACH.is_provider(p) => postgres(source, url, features).await, + match source.active_provider { + p if SQLITE.is_provider(p) => sqlite(source, url, features).await, + p if MYSQL.is_provider(p) => mysql(source, url, features).await, + p if POSTGRES.is_provider(p) => postgres(source, url, features).await, + p if MSSQL.is_provider(p) => mssql(source, url, features).await, + p if COCKROACH.is_provider(p) => postgres(source, url, features).await, - #[cfg(feature = "mongodb")] - p if MONGODB.is_provider(p) => mongodb(source, url, features).await, + #[cfg(feature = "mongodb")] + p if MONGODB.is_provider(p) => mongodb(source, url, features).await, - x => Err(query_core::CoreError::ConfigurationError(format!( - "Unsupported connector type: {x}" - ))), + x => Err(query_core::CoreError::ConfigurationError(format!( + "Unsupported connector type: {x}" + ))), + } + } } } diff --git a/query-engine/request-handlers/src/response.rs b/query-engine/request-handlers/src/response.rs index af99835813e8..a196daade4be 100644 --- a/query-engine/request-handlers/src/response.rs +++ b/query-engine/request-handlers/src/response.rs @@ -9,26 +9,26 @@ use crate::HandlerError; #[derive(Debug, serde::Serialize, Default, PartialEq)] pub struct GQLResponse { #[serde(skip_serializing_if = "IndexMap::is_empty")] - data: Map, + pub data: Map, #[serde(skip_serializing_if = "Vec::is_empty")] - errors: Vec, + pub errors: Vec, #[serde(skip_serializing_if = "IndexMap::is_empty")] - extensions: Map, + pub extensions: Map, } #[derive(Debug, serde::Serialize, Default, PartialEq)] #[serde(rename_all = "camelCase")] pub struct GQLBatchResponse { #[serde(skip_serializing_if = "Vec::is_empty")] - batch_result: Vec, + pub batch_result: Vec, #[serde(skip_serializing_if = "Vec::is_empty")] - errors: Vec, + pub errors: Vec, #[serde(skip_serializing_if = "IndexMap::is_empty")] - extensions: Map, + pub extensions: Map, } #[derive(Debug, serde::Serialize, serde::Deserialize, PartialEq)] From 6784900dc9768ad7b51e4e5c656e904941376ff9 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Tue, 19 Sep 2023 15:28:15 +0200 Subject: [PATCH 016/128] driver-adapters: allow sqlite flavour (#4255) * Allow driver adapters for sqlite family * Bump driver-adapter-utils version --- .../driver-adapters/js/driver-adapter-utils/package.json | 2 +- .../driver-adapters/js/driver-adapter-utils/src/types.ts | 2 +- query-engine/driver-adapters/src/queryable.rs | 1 + 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/package.json b/query-engine/driver-adapters/js/driver-adapter-utils/package.json index 524d59e551fc..a1e1174c8c91 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/package.json +++ b/query-engine/driver-adapters/js/driver-adapter-utils/package.json @@ -1,6 +1,6 @@ { "name": "@jkomyno/prisma-driver-adapter-utils", - "version": "0.2.1", + "version": "0.3.0", "description": "Internal set of utilities and types for Prisma's driver adapters.", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts index 826bc67acea7..0dc06f33dc9f 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts +++ b/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts @@ -46,7 +46,7 @@ export type Result = { } export interface Queryable { - readonly flavour: 'mysql' | 'postgres' + readonly flavour: 'mysql' | 'postgres' | 'sqlite' /** * Execute a query given as SQL, interpolating the given parameters, diff --git a/query-engine/driver-adapters/src/queryable.rs b/query-engine/driver-adapters/src/queryable.rs index 2a1d6755f684..5dbb549e677e 100644 --- a/query-engine/driver-adapters/src/queryable.rs +++ b/query-engine/driver-adapters/src/queryable.rs @@ -46,6 +46,7 @@ impl JsBaseQueryable { match self.flavour { Flavour::Mysql => visitor::Mysql::build(q), Flavour::Postgres => visitor::Postgres::build(q), + Flavour::Sqlite => visitor::Sqlite::build(q), _ => unimplemented!("Unsupported flavour for JS connector {:?}", self.flavour), } } From 6769d01a2e5e7fd6e34b8bd7155c91e14e1a672a Mon Sep 17 00:00:00 2001 From: Faizan Qazi Date: Wed, 20 Sep 2023 02:48:41 -0400 Subject: [PATCH 017/128] perf: optimize get_columns for CRDB (#4251) Currently, get_columns on CockroachDB can end up using a parital index on pg_class, which can end up internally scanning the pg_class index multiple times. Since get_columns only cares about tables, it can have an extra predicate to avoid unnecessary overhead. Fixes: #4250 --- schema-engine/sql-schema-describer/src/postgres.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/schema-engine/sql-schema-describer/src/postgres.rs b/schema-engine/sql-schema-describer/src/postgres.rs index 10a6bd76cb53..8b6db47651ce 100644 --- a/schema-engine/sql-schema-describer/src/postgres.rs +++ b/schema-engine/sql-schema-describer/src/postgres.rs @@ -886,6 +886,7 @@ impl<'a> SqlSchemaDescriber<'a> { FROM pg_class JOIN pg_namespace on pg_namespace.oid = pg_class.relnamespace AND pg_namespace.nspname = ANY ( $1 ) + WHERE reltype > 0 ) as oid on oid.oid = att.attrelid AND relname = info.table_name AND namespace = info.table_schema From 984fef8f242a28b1705c046815aa1184cf5b9186 Mon Sep 17 00:00:00 2001 From: Jan Piotrowski Date: Wed, 20 Sep 2023 08:49:19 +0200 Subject: [PATCH 018/128] fix(driver-adapters): Add studio command to smoke-tests (#4220) --- query-engine/driver-adapters/js/smoke-test-js/README.md | 5 ++--- query-engine/driver-adapters/js/smoke-test-js/package.json | 5 +++++ 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/query-engine/driver-adapters/js/smoke-test-js/README.md b/query-engine/driver-adapters/js/smoke-test-js/README.md index e9c79ef930c8..d0424d5eacc9 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/README.md +++ b/query-engine/driver-adapters/js/smoke-test-js/README.md @@ -28,8 +28,8 @@ Anywhere in the repository: If you don't have a connection string yet: - [Follow the Notion document](https://www.notion.so/How-to-get-a-PlanetScale-and-Neon-database-for-testing-93d978061f9c4ffc80ebfed36896af16) or create a new database on [PlanetScale](https://planetscale.com/) -- Create a new database on [PlanetScale](https://planetscale.com/) -- Go to `Settings` > `Passwords`, and create a new password for the `main` database branch. Select the `Prisma` template and copy the generated URL (comprising username, password, etc). Paste it in the `JS_PLANETSCALE_DATABASE_URL` environment variable in `.envrc`. +- Go to `Settings` > `Passwords`, and create a new password for the `main` database branch. Select the `Prisma` template and copy the generated URL (comprising username, password, etc). +- Paste it in the `JS_PLANETSCALE_DATABASE_URL` environment variable in `.envrc`. In the current directory: - Run `pnpm prisma:planetscale` to push the Prisma schema and insert the test data. @@ -43,7 +43,6 @@ In the current directory: If you don't have a connection string yet: - [Follow the Notion document](https://www.notion.so/How-to-get-a-PlanetScale-and-Neon-database-for-testing-93d978061f9c4ffc80ebfed36896af16) or create a new database with Neon CLI `npx neonctl projects create` or in [Neon Console](https://neon.tech). -- Create a new database with Neon CLI `npx neonctl projects create` or in [Neon Console](https://neon.tech). - Paste the connection string to `JS_NEON_DATABASE_URL`. In the current directory: diff --git a/query-engine/driver-adapters/js/smoke-test-js/package.json b/query-engine/driver-adapters/js/smoke-test-js/package.json index 3f05fa5b69d9..bece48d69796 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/package.json +++ b/query-engine/driver-adapters/js/smoke-test-js/package.json @@ -7,9 +7,12 @@ "scripts": { "prisma:db:push:postgres": "prisma db push --schema ./prisma/postgres/schema.prisma --force-reset", "prisma:db:execute:postgres": "prisma db execute --schema ./prisma/postgres/schema.prisma --file ./prisma/postgres/commands/type_test/insert.sql", + "prisma:studio:postgres": "prisma studio --schema ./prisma/postgres/schema.prisma", "prisma:db:push:mysql": "prisma db push --schema ./prisma/mysql/schema.prisma --force-reset", "prisma:db:execute:mysql": "prisma db execute --schema ./prisma/mysql/schema.prisma --file ./prisma/mysql/commands/type_test/insert.sql", + "prisma:studio:mysql": "prisma studio --schema ./prisma/mysql/schema.prisma", "prisma:neon": "cross-env-shell DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" \"pnpm prisma:db:push:postgres && pnpm prisma:db:execute:postgres\"", + "studio:neon": "cross-env-shell DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" \"pnpm prisma:studio:postgres\"", "neon:ws:libquery": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --loader=tsx ./src/libquery/neon.ws.test.ts", "neon:http:libquery": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --loader=tsx ./src/libquery/neon.http.test.ts", "neon:ws:client": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --loader=tsx ./src/client/neon.ws.test.ts", @@ -17,10 +20,12 @@ "neon:ws": "pnpm neon:ws:libquery && pnpm neon:ws:client", "neon:http": "pnpm neon:http:libquery && pnpm neon:http:client", "prisma:pg": "cross-env-shell DATABASE_URL=\"${JS_PG_DATABASE_URL}\" \"pnpm prisma:db:push:postgres && pnpm prisma:db:execute:postgres\"", + "studio:pg": "cross-env-shell DATABASE_URL=\"${JS_PG_DATABASE_URL}\" \"pnpm prisma:studio:postgres\"", "pg:libquery": "cross-env-shell DATABASE_URL=\"${JS_PG_DATABASE_URL}\" node --test --loader=tsx ./src/libquery/pg.test.ts", "pg:client": "DATABASE_URL=\"${JS_PG_DATABASE_URL}\" node --test --loader=tsx ./src/client/pg.test.ts", "pg": "pnpm pg:libquery && pnpm pg:client", "prisma:planetscale": "cross-env-shell DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" \"pnpm prisma:db:push:mysql && pnpm prisma:db:execute:mysql\"", + "studio:planetscale": "cross-env-shell DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" \"pnpm prisma:studio:mysql\"", "planetscale:libquery": "DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" node --test --loader=tsx ./src/libquery/planetscale.test.ts", "planetscale:client": "DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" node --test --loader=tsx ./src/client/planetscale.test.ts", "planetscale": "pnpm planetscale:libquery && pnpm planetscale:client" From d2a5227ae93ec715bc7bb2aa173bbf897bf84232 Mon Sep 17 00:00:00 2001 From: Serhii Tatarintsev Date: Wed, 20 Sep 2023 11:20:57 +0200 Subject: [PATCH 019/128] Ensure test-kit executor is not published (#4258) --- .../js/connector-test-kit-executor/package.json | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/package.json b/query-engine/driver-adapters/js/connector-test-kit-executor/package.json index fc04c8fabd02..f6bf317367dd 100644 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/package.json +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/package.json @@ -3,6 +3,7 @@ "version": "1.0.0", "description": "", "main": "dist/index.js", + "private": true, "scripts": { "build": "tsup ./src/index.ts --format cjs,esm --dts", "lint": "tsc -p ./tsconfig.build.json" @@ -12,9 +13,9 @@ "sideEffects": false, "license": "Apache-2.0", "dependencies": { - "@jkomyno/prisma-adapter-pg": "workspace:*", - "@jkomyno/prisma-driver-adapter-utils": "workspace:*", - "pg": "^8.11.3", - "@types/pg": "^8.10.2" + "@jkomyno/prisma-adapter-pg": "workspace:*", + "@jkomyno/prisma-driver-adapter-utils": "workspace:*", + "pg": "^8.11.3", + "@types/pg": "^8.10.2" } } From 783aff397fa040782b07b36c4e8868022d62eb54 Mon Sep 17 00:00:00 2001 From: Serhii Tatarintsev Date: Wed, 20 Sep 2023 12:01:39 +0200 Subject: [PATCH 020/128] 0.3.1 of adapter packages (#4259) --- query-engine/driver-adapters/js/adapter-neon/package.json | 2 +- query-engine/driver-adapters/js/adapter-pg/package.json | 2 +- .../driver-adapters/js/adapter-planetscale/package.json | 2 +- .../driver-adapters/js/driver-adapter-utils/package.json | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/query-engine/driver-adapters/js/adapter-neon/package.json b/query-engine/driver-adapters/js/adapter-neon/package.json index 52ee08f97be2..975df65ce3dc 100644 --- a/query-engine/driver-adapters/js/adapter-neon/package.json +++ b/query-engine/driver-adapters/js/adapter-neon/package.json @@ -1,6 +1,6 @@ { "name": "@jkomyno/prisma-adapter-neon", - "version": "0.2.1", + "version": "0.3.1", "description": "Prisma's driver adapter for \"@neondatabase/serverless\"", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/adapter-pg/package.json b/query-engine/driver-adapters/js/adapter-pg/package.json index 3055976cb51b..b6b85df0a01e 100644 --- a/query-engine/driver-adapters/js/adapter-pg/package.json +++ b/query-engine/driver-adapters/js/adapter-pg/package.json @@ -1,6 +1,6 @@ { "name": "@jkomyno/prisma-adapter-pg", - "version": "0.2.1", + "version": "0.3.1", "description": "Prisma's driver adapter for \"pg\"", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/adapter-planetscale/package.json b/query-engine/driver-adapters/js/adapter-planetscale/package.json index bdda6c0a5c99..45037532c0c9 100644 --- a/query-engine/driver-adapters/js/adapter-planetscale/package.json +++ b/query-engine/driver-adapters/js/adapter-planetscale/package.json @@ -1,6 +1,6 @@ { "name": "@jkomyno/prisma-adapter-planetscale", - "version": "0.2.1", + "version": "0.3.1", "description": "Prisma's driver adapter for \"@planetscale/database\"", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/package.json b/query-engine/driver-adapters/js/driver-adapter-utils/package.json index a1e1174c8c91..8221f293e9f6 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/package.json +++ b/query-engine/driver-adapters/js/driver-adapter-utils/package.json @@ -1,6 +1,6 @@ { "name": "@jkomyno/prisma-driver-adapter-utils", - "version": "0.3.0", + "version": "0.3.1", "description": "Internal set of utilities and types for Prisma's driver adapters.", "main": "dist/index.js", "module": "dist/index.mjs", From e0be60b2def92bf0594c974a13e66ea644fe71d4 Mon Sep 17 00:00:00 2001 From: Alberto Schiabel Date: Wed, 20 Sep 2023 12:25:11 +0200 Subject: [PATCH 021/128] feat(driver-adapters): renamed npm scope to "@prisma/" (#4252) --- .../js/adapter-neon/package.json | 4 +-- .../js/adapter-neon/src/conversion.ts | 2 +- .../js/adapter-neon/src/neon.ts | 4 +-- .../js/adapter-pg/package.json | 4 +-- .../js/adapter-pg/src/conversion.ts | 2 +- .../driver-adapters/js/adapter-pg/src/pg.ts | 4 +-- .../js/adapter-planetscale/package.json | 4 +-- .../js/adapter-planetscale/src/conversion.ts | 2 +- .../js/adapter-planetscale/src/planetscale.ts | 4 +-- .../js/driver-adapter-utils/package.json | 2 +- .../driver-adapters/js/pnpm-lock.yaml | 30 +++++++++---------- .../js/smoke-test-js/README.md | 2 +- .../js/smoke-test-js/package.json | 10 +++---- .../js/smoke-test-js/src/client/client.ts | 2 +- .../src/client/neon.http.test.ts | 2 +- .../smoke-test-js/src/client/neon.ws.test.ts | 2 +- .../js/smoke-test-js/src/client/pg.test.ts | 2 +- .../src/client/planetscale.test.ts | 2 +- .../src/engines/types/Library.ts | 2 +- .../js/smoke-test-js/src/libquery/libquery.ts | 2 +- .../src/libquery/neon.http.test.ts | 4 +-- .../src/libquery/neon.ws.test.ts | 4 +-- .../js/smoke-test-js/src/libquery/pg.test.ts | 4 +-- .../src/libquery/planetscale.test.ts | 4 +-- .../js/smoke-test-js/src/libquery/util.ts | 2 +- 25 files changed, 53 insertions(+), 53 deletions(-) diff --git a/query-engine/driver-adapters/js/adapter-neon/package.json b/query-engine/driver-adapters/js/adapter-neon/package.json index 975df65ce3dc..584906aa8734 100644 --- a/query-engine/driver-adapters/js/adapter-neon/package.json +++ b/query-engine/driver-adapters/js/adapter-neon/package.json @@ -1,5 +1,5 @@ { - "name": "@jkomyno/prisma-adapter-neon", + "name": "@prisma/adapter-neon", "version": "0.3.1", "description": "Prisma's driver adapter for \"@neondatabase/serverless\"", "main": "dist/index.js", @@ -18,7 +18,7 @@ "license": "Apache-2.0", "sideEffects": false, "dependencies": { - "@jkomyno/prisma-driver-adapter-utils": "workspace:*" + "@prisma/driver-adapter-utils": "workspace:*" }, "devDependencies": { "@neondatabase/serverless": "^0.6.0" diff --git a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts b/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts index ea91f57eefdf..c05ad1f65108 100644 --- a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts +++ b/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts @@ -1,4 +1,4 @@ -import { ColumnTypeEnum, type ColumnType } from '@jkomyno/prisma-driver-adapter-utils' +import { ColumnTypeEnum, type ColumnType } from '@prisma/driver-adapter-utils' import { types } from '@neondatabase/serverless' const NeonColumnType = types.builtins diff --git a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts b/query-engine/driver-adapters/js/adapter-neon/src/neon.ts index 0839523e131d..541879b7d4b8 100644 --- a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts +++ b/query-engine/driver-adapters/js/adapter-neon/src/neon.ts @@ -1,6 +1,6 @@ import type neon from '@neondatabase/serverless' -import { Debug } from '@jkomyno/prisma-driver-adapter-utils' -import type { DriverAdapter, ResultSet, Query, Queryable, Transaction, Result, TransactionOptions } from '@jkomyno/prisma-driver-adapter-utils' +import { Debug } from '@prisma/driver-adapter-utils' +import type { DriverAdapter, ResultSet, Query, Queryable, Transaction, Result, TransactionOptions } from '@prisma/driver-adapter-utils' import { fieldToColumnType } from './conversion' const debug = Debug('prisma:driver-adapter:neon') diff --git a/query-engine/driver-adapters/js/adapter-pg/package.json b/query-engine/driver-adapters/js/adapter-pg/package.json index b6b85df0a01e..ff91a82dddac 100644 --- a/query-engine/driver-adapters/js/adapter-pg/package.json +++ b/query-engine/driver-adapters/js/adapter-pg/package.json @@ -1,5 +1,5 @@ { - "name": "@jkomyno/prisma-adapter-pg", + "name": "@prisma/adapter-pg", "version": "0.3.1", "description": "Prisma's driver adapter for \"pg\"", "main": "dist/index.js", @@ -18,7 +18,7 @@ "license": "Apache-2.0", "sideEffects": false, "dependencies": { - "@jkomyno/prisma-driver-adapter-utils": "workspace:*" + "@prisma/driver-adapter-utils": "workspace:*" }, "devDependencies": { "pg": "^8.11.3", diff --git a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts b/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts index fc9ad43e9f0e..8943ae2d16b2 100644 --- a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts +++ b/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts @@ -1,4 +1,4 @@ -import { ColumnTypeEnum, type ColumnType } from '@jkomyno/prisma-driver-adapter-utils' +import { ColumnTypeEnum, type ColumnType } from '@prisma/driver-adapter-utils' import { types } from 'pg' const PgColumnType = types.builtins diff --git a/query-engine/driver-adapters/js/adapter-pg/src/pg.ts b/query-engine/driver-adapters/js/adapter-pg/src/pg.ts index 2f0fac5d2249..bc111e84d8a1 100644 --- a/query-engine/driver-adapters/js/adapter-pg/src/pg.ts +++ b/query-engine/driver-adapters/js/adapter-pg/src/pg.ts @@ -1,6 +1,6 @@ import type pg from 'pg' -import { Debug } from '@jkomyno/prisma-driver-adapter-utils' -import type { DriverAdapter, Query, Queryable, Result, ResultSet, Transaction, TransactionOptions } from '@jkomyno/prisma-driver-adapter-utils' +import { Debug } from '@prisma/driver-adapter-utils' +import type { DriverAdapter, Query, Queryable, Result, ResultSet, Transaction, TransactionOptions } from '@prisma/driver-adapter-utils' import { fieldToColumnType } from './conversion' const debug = Debug('prisma:driver-adapter:pg') diff --git a/query-engine/driver-adapters/js/adapter-planetscale/package.json b/query-engine/driver-adapters/js/adapter-planetscale/package.json index 45037532c0c9..862c0f63c7fb 100644 --- a/query-engine/driver-adapters/js/adapter-planetscale/package.json +++ b/query-engine/driver-adapters/js/adapter-planetscale/package.json @@ -1,5 +1,5 @@ { - "name": "@jkomyno/prisma-adapter-planetscale", + "name": "@prisma/adapter-planetscale", "version": "0.3.1", "description": "Prisma's driver adapter for \"@planetscale/database\"", "main": "dist/index.js", @@ -18,7 +18,7 @@ "license": "Apache-2.0", "sideEffects": false, "dependencies": { - "@jkomyno/prisma-driver-adapter-utils": "workspace:*" + "@prisma/driver-adapter-utils": "workspace:*" }, "devDependencies": { "@planetscale/database": "^1.11.0" diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/conversion.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/conversion.ts index 2c79afdddd64..1c46538806b2 100644 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/conversion.ts +++ b/query-engine/driver-adapters/js/adapter-planetscale/src/conversion.ts @@ -1,4 +1,4 @@ -import { ColumnTypeEnum, type ColumnType } from '@jkomyno/prisma-driver-adapter-utils' +import { ColumnTypeEnum, type ColumnType } from '@prisma/driver-adapter-utils' // See: https://github.com/planetscale/vitess-types/blob/06235e372d2050b4c0fff49972df8111e696c564/src/vitess/query/v16/query.proto#L108-L218 export type PlanetScaleColumnType diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts index 8bd2610336b5..979302649faa 100644 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts +++ b/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts @@ -1,6 +1,6 @@ import type planetScale from '@planetscale/database' -import { Debug } from '@jkomyno/prisma-driver-adapter-utils' -import type { DriverAdapter, ResultSet, Query, Queryable, Transaction, Result, TransactionOptions } from '@jkomyno/prisma-driver-adapter-utils' +import { Debug } from '@prisma/driver-adapter-utils' +import type { DriverAdapter, ResultSet, Query, Queryable, Transaction, Result, TransactionOptions } from '@prisma/driver-adapter-utils' import { type PlanetScaleColumnType, fieldToColumnType } from './conversion' import { createDeferred, Deferred } from './deferred' diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/package.json b/query-engine/driver-adapters/js/driver-adapter-utils/package.json index 8221f293e9f6..545effc0c465 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/package.json +++ b/query-engine/driver-adapters/js/driver-adapter-utils/package.json @@ -1,5 +1,5 @@ { - "name": "@jkomyno/prisma-driver-adapter-utils", + "name": "@prisma/driver-adapter-utils", "version": "0.3.1", "description": "Internal set of utilities and types for Prisma's driver adapters.", "main": "dist/index.js", diff --git a/query-engine/driver-adapters/js/pnpm-lock.yaml b/query-engine/driver-adapters/js/pnpm-lock.yaml index 0a348f862d51..33b32e8e17d1 100644 --- a/query-engine/driver-adapters/js/pnpm-lock.yaml +++ b/query-engine/driver-adapters/js/pnpm-lock.yaml @@ -20,7 +20,7 @@ importers: adapter-neon: dependencies: - '@jkomyno/prisma-driver-adapter-utils': + '@prisma/driver-adapter-utils': specifier: workspace:* version: link:../driver-adapter-utils devDependencies: @@ -30,7 +30,7 @@ importers: adapter-pg: dependencies: - '@jkomyno/prisma-driver-adapter-utils': + '@prisma/driver-adapter-utils': specifier: workspace:* version: link:../driver-adapter-utils devDependencies: @@ -43,7 +43,7 @@ importers: adapter-planetscale: dependencies: - '@jkomyno/prisma-driver-adapter-utils': + '@prisma/driver-adapter-utils': specifier: workspace:* version: link:../driver-adapter-utils devDependencies: @@ -78,27 +78,27 @@ importers: smoke-test-js: dependencies: - '@jkomyno/prisma-adapter-neon': - specifier: workspace:* - version: link:../adapter-neon - '@jkomyno/prisma-adapter-pg': - specifier: workspace:* - version: link:../adapter-pg - '@jkomyno/prisma-adapter-planetscale': - specifier: workspace:* - version: link:../adapter-planetscale - '@jkomyno/prisma-driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils '@neondatabase/serverless': specifier: ^0.6.0 version: 0.6.0 '@planetscale/database': specifier: ^1.11.0 version: 1.11.0 + '@prisma/adapter-neon': + specifier: workspace:* + version: link:../adapter-neon + '@prisma/adapter-pg': + specifier: workspace:* + version: link:../adapter-pg + '@prisma/adapter-planetscale': + specifier: workspace:* + version: link:../adapter-planetscale '@prisma/client': specifier: 5.3.0-integration-feat-driver-adapters-in-client.3 version: 5.3.0-integration-feat-driver-adapters-in-client.3(prisma@5.3.0-integration-feat-driver-adapters-in-client.3) + '@prisma/driver-adapter-utils': + specifier: workspace:* + version: link:../driver-adapter-utils pg: specifier: ^8.11.3 version: 8.11.3 diff --git a/query-engine/driver-adapters/js/smoke-test-js/README.md b/query-engine/driver-adapters/js/smoke-test-js/README.md index d0424d5eacc9..f719a7189282 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/README.md +++ b/query-engine/driver-adapters/js/smoke-test-js/README.md @@ -1,4 +1,4 @@ -# @prisma/smoke-test-js +# @prisma/driver-adapters-smoke-tests-js This is a playground for testing the `libquery` client with the experimental Node.js drivers. It contains a subset of `@prisma/client`, plus some handy executable smoke tests: diff --git a/query-engine/driver-adapters/js/smoke-test-js/package.json b/query-engine/driver-adapters/js/smoke-test-js/package.json index bece48d69796..5de722eabcca 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/package.json +++ b/query-engine/driver-adapters/js/smoke-test-js/package.json @@ -1,5 +1,5 @@ { - "name": "@jkomyno/smoke-test-js", + "name": "@prisma/driver-adapters-smoke-tests-js", "private": true, "type": "module", "version": "0.0.0", @@ -35,10 +35,10 @@ "license": "Apache-2.0", "sideEffects": true, "dependencies": { - "@jkomyno/prisma-adapter-neon": "workspace:*", - "@jkomyno/prisma-adapter-planetscale": "workspace:*", - "@jkomyno/prisma-adapter-pg": "workspace:*", - "@jkomyno/prisma-driver-adapter-utils": "workspace:*", + "@prisma/adapter-neon": "workspace:*", + "@prisma/adapter-planetscale": "workspace:*", + "@prisma/adapter-pg": "workspace:*", + "@prisma/driver-adapter-utils": "workspace:*", "@neondatabase/serverless": "^0.6.0", "@planetscale/database": "^1.11.0", "@prisma/client": "5.3.0-integration-feat-driver-adapters-in-client.3", diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts index 7a9400fbd291..ca434271c8de 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts @@ -1,7 +1,7 @@ import { describe, it } from 'node:test' import assert from 'node:assert' import { PrismaClient } from '@prisma/client' -import type { DriverAdapter } from '@jkomyno/prisma-driver-adapter-utils' +import type { DriverAdapter } from '@prisma/driver-adapter-utils' export async function smokeTestClient(driverAdapter: DriverAdapter) { const provider = driverAdapter.flavour diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts index e2de75384b33..137bb0ed9835 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts @@ -1,6 +1,6 @@ import { describe } from 'node:test' import { neon } from '@neondatabase/serverless' -import { PrismaNeonHTTP } from '@jkomyno/prisma-adapter-neon' +import { PrismaNeonHTTP } from '@prisma/adapter-neon' import { smokeTestClient } from './client' describe('neon with @prisma/client', async () => { diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.ws.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.ws.test.ts index fddc42eeadeb..c6e85eca0891 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.ws.test.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.ws.test.ts @@ -1,6 +1,6 @@ import { describe } from 'node:test' import { Pool, neonConfig } from '@neondatabase/serverless' -import { PrismaNeon } from '@jkomyno/prisma-adapter-neon' +import { PrismaNeon } from '@prisma/adapter-neon' import { WebSocket } from 'undici' import { smokeTestClient } from './client' diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/pg.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/pg.test.ts index a6652d714370..7394e5f55e89 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/pg.test.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/client/pg.test.ts @@ -1,6 +1,6 @@ import { describe } from 'node:test' import pg from 'pg' -import { PrismaPg } from '@jkomyno/prisma-adapter-pg' +import { PrismaPg } from '@prisma/adapter-pg' import { smokeTestClient } from './client' describe('pg with @prisma/client', async () => { diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/planetscale.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/planetscale.test.ts index 07a9809b8c07..e82e209247f4 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/planetscale.test.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/client/planetscale.test.ts @@ -1,5 +1,5 @@ import { connect } from '@planetscale/database' -import { PrismaPlanetScale } from '@jkomyno/prisma-adapter-planetscale' +import { PrismaPlanetScale } from '@prisma/adapter-planetscale' import { describe } from 'node:test' import { smokeTestClient } from './client' diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Library.ts b/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Library.ts index a8f1c28bb64c..a25b3dd26728 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Library.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Library.ts @@ -1,4 +1,4 @@ -import type { ErrorCapturingDriverAdapter } from '@jkomyno/prisma-driver-adapter-utils' +import type { ErrorCapturingDriverAdapter } from '@prisma/driver-adapter-utils' import type { QueryEngineConfig } from './QueryEngine' export type QueryEngineInstance = { diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts index 324ca62d12dd..f91a72c0383e 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts @@ -1,6 +1,6 @@ import { describe, it, before, after } from 'node:test' import assert from 'node:assert' -import type { ErrorCapturingDriverAdapter } from '@jkomyno/prisma-driver-adapter-utils' +import type { ErrorCapturingDriverAdapter } from '@prisma/driver-adapter-utils' import type { QueryEngineInstance } from '../engines/types/Library' import { initQueryEngine } from './util' import { JsonQuery } from '../engines/types/JsonProtocol' diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts index 24e12fe631cd..b27e69747365 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts @@ -1,5 +1,5 @@ -import { PrismaNeonHTTP } from '@jkomyno/prisma-adapter-neon' -import { bindAdapter } from '@jkomyno/prisma-driver-adapter-utils' +import { PrismaNeonHTTP } from '@prisma/adapter-neon' +import { bindAdapter } from '@prisma/driver-adapter-utils' import { neon } from '@neondatabase/serverless' import { describe } from 'node:test' import { smokeTestLibquery } from './libquery' diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts index 3510a5c2709c..c51150920812 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts @@ -1,5 +1,5 @@ -import { PrismaNeon } from '@jkomyno/prisma-adapter-neon' -import { bindAdapter } from '@jkomyno/prisma-driver-adapter-utils' +import { PrismaNeon } from '@prisma/adapter-neon' +import { bindAdapter } from '@prisma/driver-adapter-utils' import { WebSocket } from 'undici' import { Pool, neonConfig } from '@neondatabase/serverless' import { describe } from 'node:test' diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.test.ts index 0cb19343e7fb..ca4f297bb1f2 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.test.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.test.ts @@ -1,6 +1,6 @@ import pg from 'pg' -import { PrismaPg } from '@jkomyno/prisma-adapter-pg' -import { bindAdapter } from '@jkomyno/prisma-driver-adapter-utils' +import { PrismaPg } from '@prisma/adapter-pg' +import { bindAdapter } from '@prisma/driver-adapter-utils' import { describe } from 'node:test' import { smokeTestLibquery } from './libquery' diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.test.ts index 85b9b722c9c3..2d3137fbe500 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.test.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.test.ts @@ -1,6 +1,6 @@ import { connect } from '@planetscale/database' -import { PrismaPlanetScale } from '@jkomyno/prisma-adapter-planetscale' -import { bindAdapter } from '@jkomyno/prisma-driver-adapter-utils' +import { PrismaPlanetScale } from '@prisma/adapter-planetscale' +import { bindAdapter } from '@prisma/driver-adapter-utils' import { describe } from 'node:test' import { smokeTestLibquery } from './libquery' diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts index 187d8b86c7f4..d028f6929303 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts @@ -1,7 +1,7 @@ import path from 'node:path' import os from 'node:os' import fs from 'node:fs' -import type { ErrorCapturingDriverAdapter } from '@jkomyno/prisma-driver-adapter-utils' +import type { ErrorCapturingDriverAdapter } from '@prisma/driver-adapter-utils' import { Library, QueryEngineInstance } from '../engines/types/Library' export function initQueryEngine(driver: ErrorCapturingDriverAdapter, prismaSchemaRelativePath: string): QueryEngineInstance { From f5a416b0e8c3b9ec1f10b6d6eeeb5e67797a9147 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Wed, 20 Sep 2023 12:33:11 +0200 Subject: [PATCH 022/128] driver-adapters: fixup dependencies of connector-test-kit-executor (#4260) Update dependencies of `connector-test-kit-executor` after renaming the packages in https://github.com/prisma/prisma-engines/pull/4252. --- .../js/connector-test-kit-executor/package.json | 4 ++-- query-engine/driver-adapters/js/pnpm-lock.yaml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/package.json b/query-engine/driver-adapters/js/connector-test-kit-executor/package.json index f6bf317367dd..38cb115ace79 100644 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/package.json +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/package.json @@ -13,8 +13,8 @@ "sideEffects": false, "license": "Apache-2.0", "dependencies": { - "@jkomyno/prisma-adapter-pg": "workspace:*", - "@jkomyno/prisma-driver-adapter-utils": "workspace:*", + "@prisma/adapter-pg": "workspace:*", + "@prisma/driver-adapter-utils": "workspace:*", "pg": "^8.11.3", "@types/pg": "^8.10.2" } diff --git a/query-engine/driver-adapters/js/pnpm-lock.yaml b/query-engine/driver-adapters/js/pnpm-lock.yaml index 33b32e8e17d1..123db305f805 100644 --- a/query-engine/driver-adapters/js/pnpm-lock.yaml +++ b/query-engine/driver-adapters/js/pnpm-lock.yaml @@ -53,10 +53,10 @@ importers: connector-test-kit-executor: dependencies: - '@jkomyno/prisma-adapter-pg': + '@prisma/adapter-pg': specifier: workspace:* version: link:../adapter-pg - '@jkomyno/prisma-driver-adapter-utils': + '@prisma/driver-adapter-utils': specifier: workspace:* version: link:../driver-adapter-utils '@types/pg': From d3525d18dcd38d5b0b8d3fe693050f74b86fe21d Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Wed, 20 Sep 2023 12:56:07 +0200 Subject: [PATCH 023/128] driver-adapters: fixup remnants of renamed packages (#4261) Fixup things that were missed after the last and even one of the previous renames. --- query-engine/driver-adapters/js/README.md | 8 ++++---- .../js/connector-test-kit-executor/src/engines/Library.ts | 4 ++-- .../js/connector-test-kit-executor/src/index.ts | 4 ++-- .../js/connector-test-kit-executor/src/qe.ts | 3 +-- 4 files changed, 9 insertions(+), 10 deletions(-) diff --git a/query-engine/driver-adapters/js/README.md b/query-engine/driver-adapters/js/README.md index d4198f4c31f3..e5e64c60dfc8 100644 --- a/query-engine/driver-adapters/js/README.md +++ b/query-engine/driver-adapters/js/README.md @@ -1,17 +1,17 @@ # Prisma Driver Adapters This TypeScript monorepo contains the following packages: -- `@jkomyno/prisma-driver-adapter-utils` (later: `@prisma/driver-adapter-utils`) +- `@prisma/driver-adapter-utils` - Internal set of utilities and types for Prisma's driver adapters. -- `@jkomyno/prisma-adapter-neon` (later: `@prisma/adapter-neon`) +- `@prisma/adapter-neon` - Prisma's Driver Adapter that wraps the `@neondatabase/serverless` driver - It uses `provider = "postgres"` - It exposes debug logs via `DEBUG="prisma:driver-adapter:neon"` -- `@jkomyno/prisma-adapter-planetscale` (later: `@prisma/adapter-planetscale`) +- `@prisma/adapter-planetscale` - Prisma's Driver Adapter that wraps the `@planetscale/database` driver - It uses `provider = "mysql"` - It exposes debug logs via `DEBUG="prisma:driver-adapter:planetscale"` -- `@jkomyno/prisma-adapter-pg` (later: `@prisma/adapter-pg`) +- `@prisma/adapter-pg` - Prisma's Driver Adapter that wraps the `pg` driver - It uses `provider = "postgres"` - It exposes debug logs via `DEBUG="prisma:driver-adapter:pg"` diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Library.ts b/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Library.ts index 5a5e59b563ad..b0e0b06abc49 100644 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Library.ts +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Library.ts @@ -1,4 +1,4 @@ -import type { Connector } from '@jkomyno/prisma-pg-js-connector' +import type { DriverAdapter } from '@prisma/driver-adapter-utils' import type { QueryEngineConfig } from './QueryEngine' export type QueryEngineInstance = { @@ -18,7 +18,7 @@ export type QueryEngineInstance = { } export interface QueryEngineConstructor { - new(config: QueryEngineConfig, logger: (log: string) => void, nodejsFnCtx?: Connector): QueryEngineInstance + new(config: QueryEngineConfig, logger: (log: string) => void, nodejsFnCtx?: DriverAdapter): QueryEngineInstance } export interface LibraryLoader { diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts b/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts index 2a6432c7a7a0..7188615a0609 100644 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts @@ -1,10 +1,10 @@ import pgDriver from 'pg' -import * as pg from '@jkomyno/prisma-adapter-pg' +import * as pg from '@prisma/adapter-pg' import * as qe from './qe' import * as engines from './engines/Library' import * as readline from 'node:readline' import * as jsonRpc from './jsonRpc' -import {bindAdapter, ErrorCapturingDriverAdapter} from "@jkomyno/prisma-driver-adapter-utils"; +import {bindAdapter, ErrorCapturingDriverAdapter} from "@prisma/driver-adapter-utils"; async function main(): Promise { const iface = readline.createInterface({ diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/qe.ts b/query-engine/driver-adapters/js/connector-test-kit-executor/src/qe.ts index 3f86aa028c77..af32bd0bdb8b 100644 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/src/qe.ts +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/src/qe.ts @@ -1,5 +1,4 @@ -import * as pg from '@jkomyno/prisma-adapter-pg' -import {bindAdapter, ErrorCapturingDriverAdapter} from '@jkomyno/prisma-driver-adapter-utils' +import type { ErrorCapturingDriverAdapter } from '@prisma/driver-adapter-utils' import * as lib from './engines/Library' import * as os from 'node:os' import * as path from 'node:path' From e0844c11b731a9d90600e927ee5548f24b179ed3 Mon Sep 17 00:00:00 2001 From: Serhii Tatarintsev Date: Wed, 20 Sep 2023 15:06:04 +0200 Subject: [PATCH 024/128] qe: Fix undefined values in neon adapter (#4262) When we switched to array mode, we adapted PG driver correctly, but `neon` only partially. As a result, row values were returned as `undefined` and deserialuzatuin failed on Rust side. --- .../js/adapter-neon/src/neon.ts | 29 +++++++++++-------- .../src/libquery/neon.http.test.ts | 4 +-- .../src/libquery/neon.ws.test.ts | 2 +- 3 files changed, 20 insertions(+), 15 deletions(-) diff --git a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts b/query-engine/driver-adapters/js/adapter-neon/src/neon.ts index 541879b7d4b8..e3d44878b1d9 100644 --- a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts +++ b/query-engine/driver-adapters/js/adapter-neon/src/neon.ts @@ -1,14 +1,22 @@ import type neon from '@neondatabase/serverless' import { Debug } from '@prisma/driver-adapter-utils' -import type { DriverAdapter, ResultSet, Query, Queryable, Transaction, Result, TransactionOptions } from '@prisma/driver-adapter-utils' +import type { + DriverAdapter, + ResultSet, + Query, + Queryable, + Transaction, + Result, + TransactionOptions, +} from '@prisma/driver-adapter-utils' import { fieldToColumnType } from './conversion' const debug = Debug('prisma:driver-adapter:neon') -type ARRAY_MODE_DISABLED = false +type ARRAY_MODE_ENABLED = true type FULL_RESULTS_ENABLED = true -type PerformIOResult = neon.QueryResult | neon.FullQueryResults +type PerformIOResult = neon.QueryResult | neon.FullQueryResults /** * Base class for http client, ws client and ws transaction @@ -20,13 +28,13 @@ abstract class NeonQueryable implements Queryable { const tag = '[js::query_raw]' debug(`${tag} %O`, query) - const { fields, rows: results } = await this.performIO(query) + const { fields, rows } = await this.performIO(query) - const columns = fields.map(field => field.name) + const columns = fields.map((field) => field.name) const resultSet: ResultSet = { columnNames: columns, - columnTypes: fields.map(field => fieldToColumnType(field.dataTypeID)), - rows: results.map(result => columns.map(column => result[column])), + columnTypes: fields.map((field) => fieldToColumnType(field.dataTypeID)), + rows, } return { ok: true, value: resultSet } @@ -57,7 +65,7 @@ class NeonWsQueryable extends NeonQ const { sql, args: values } = query try { - return await this.client.query({ text: sql, values, rowMode: 'array'}) + return await this.client.query({ text: sql, values, rowMode: 'array' }) } catch (e) { const error = e as Error debug('Error in performIO: %O', error) @@ -115,10 +123,7 @@ export class PrismaNeon extends NeonWsQueryable implements DriverAdap } export class PrismaNeonHTTP extends NeonQueryable implements DriverAdapter { - constructor(private client: neon.NeonQueryFunction< - ARRAY_MODE_DISABLED, - FULL_RESULTS_ENABLED - >) { + constructor(private client: neon.NeonQueryFunction) { super() } diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts index b27e69747365..afdcb3cfb55b 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts @@ -8,12 +8,12 @@ describe('neon (HTTP)', () => { const connectionString = `${process.env.JS_NEON_DATABASE_URL as string}` const neonConnection = neon(connectionString, { - arrayMode: false, + arrayMode: true, fullResults: true, }) const adapter = new PrismaNeonHTTP(neonConnection) const driverAdapter = bindAdapter(adapter) - + smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma') }) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts index c51150920812..017cb8f1c31d 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts @@ -13,6 +13,6 @@ describe('neon (WebSocket)', () => { const pool = new Pool({ connectionString }) const adapter = new PrismaNeon(pool) const driverAdapter = bindAdapter(adapter) - + smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma') }) From 3771b9b6e3405e1aaa630d6fcae7eac8560b59bd Mon Sep 17 00:00:00 2001 From: Serhii Tatarintsev Date: Wed, 20 Sep 2023 15:48:06 +0200 Subject: [PATCH 025/128] js-connectors: Fixes for array mode in JS drivers (#4263) - Switch planetscale driver to array mode too - For neon-http, pass `arrayMode` and `fullResult` options on all invocations, making us agnostic to the settings used in constructor. --- .../js/adapter-neon/src/neon.ts | 8 ++- .../js/adapter-planetscale/src/planetscale.ts | 59 +++++++++++-------- .../src/libquery/neon.http.test.ts | 5 +- 3 files changed, 40 insertions(+), 32 deletions(-) diff --git a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts b/query-engine/driver-adapters/js/adapter-neon/src/neon.ts index e3d44878b1d9..da47c9192094 100644 --- a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts +++ b/query-engine/driver-adapters/js/adapter-neon/src/neon.ts @@ -14,7 +14,6 @@ import { fieldToColumnType } from './conversion' const debug = Debug('prisma:driver-adapter:neon') type ARRAY_MODE_ENABLED = true -type FULL_RESULTS_ENABLED = true type PerformIOResult = neon.QueryResult | neon.FullQueryResults @@ -123,13 +122,16 @@ export class PrismaNeon extends NeonWsQueryable implements DriverAdap } export class PrismaNeonHTTP extends NeonQueryable implements DriverAdapter { - constructor(private client: neon.NeonQueryFunction) { + constructor(private client: neon.NeonQueryFunction) { super() } override async performIO(query: Query): Promise { const { sql, args: values } = query - return await this.client(sql, values) + return await this.client(sql, values, { + arrayMode: true, + fullResults: true, + }) } startTransaction(): Promise> { diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts index 979302649faa..b1d640398004 100644 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts +++ b/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts @@ -1,6 +1,14 @@ import type planetScale from '@planetscale/database' import { Debug } from '@prisma/driver-adapter-utils' -import type { DriverAdapter, ResultSet, Query, Queryable, Transaction, Result, TransactionOptions } from '@prisma/driver-adapter-utils' +import type { + DriverAdapter, + ResultSet, + Query, + Queryable, + Transaction, + Result, + TransactionOptions, +} from '@prisma/driver-adapter-utils' import { type PlanetScaleColumnType, fieldToColumnType } from './conversion' import { createDeferred, Deferred } from './deferred' @@ -17,11 +25,9 @@ class RollbackError extends Error { } } - class PlanetScaleQueryable implements Queryable { readonly flavour = 'mysql' - constructor(protected client: ClientT) { - } + constructor(protected client: ClientT) {} /** * Execute a query given as SQL, interpolating the given parameters. @@ -30,13 +36,13 @@ class PlanetScaleQueryable field.name) + const columns = fields.map((field) => field.name) const resultSet: ResultSet = { columnNames: columns, - columnTypes: fields.map(field => fieldToColumnType(field.type as PlanetScaleColumnType)), - rows: results.map(result => columns.map(column => result[column])), + columnTypes: fields.map((field) => fieldToColumnType(field.type as PlanetScaleColumnType)), + rows: rows as ResultSet['rows'], lastInsertId, } @@ -65,7 +71,9 @@ class PlanetScaleQueryable implements DriverAdapter { @@ -115,21 +122,23 @@ export class PrismaPlanetScale extends PlanetScaleQueryable>((resolve, reject) => { - const txResultPromise = this.client.transaction(async tx => { - const [txDeferred, deferredPromise] = createDeferred() - const txWrapper = new PlanetScaleTransaction(tx, options, txDeferred, txResultPromise) - - resolve({ ok: true, value: txWrapper }) - return deferredPromise - }).catch(error => { - // Rollback error is ignored (so that tx.rollback() won't crash) - // any other error is legit and is re-thrown - if (!(error instanceof RollbackError)) { - return reject(error) - } - - return undefined - }) + const txResultPromise = this.client + .transaction(async (tx) => { + const [txDeferred, deferredPromise] = createDeferred() + const txWrapper = new PlanetScaleTransaction(tx, options, txDeferred, txResultPromise) + + resolve({ ok: true, value: txWrapper }) + return deferredPromise + }) + .catch((error) => { + // Rollback error is ignored (so that tx.rollback() won't crash) + // any other error is legit and is re-thrown + if (!(error instanceof RollbackError)) { + return reject(error) + } + + return undefined + }) }) } diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts index afdcb3cfb55b..dc839405b21f 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts @@ -7,10 +7,7 @@ import { smokeTestLibquery } from './libquery' describe('neon (HTTP)', () => { const connectionString = `${process.env.JS_NEON_DATABASE_URL as string}` - const neonConnection = neon(connectionString, { - arrayMode: true, - fullResults: true, - }) + const neonConnection = neon(connectionString) const adapter = new PrismaNeonHTTP(neonConnection) const driverAdapter = bindAdapter(adapter) From a6fe22af4895558462fd647ef0469bc033d1c80d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Wed, 20 Sep 2023 15:49:34 +0200 Subject: [PATCH 026/128] test(qe) driver adapters tests GitHub action (#4257) --- .../query-engine-driver-adapters.yml | 97 +++++++++++++++++++ .../src/connector_tag/js/node_process.rs | 2 +- .../script/start_node.sh | 2 + .../connector-test-kit-executor/src/index.ts | 4 +- 4 files changed, 102 insertions(+), 3 deletions(-) create mode 100644 .github/workflows/query-engine-driver-adapters.yml create mode 100755 query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh diff --git a/.github/workflows/query-engine-driver-adapters.yml b/.github/workflows/query-engine-driver-adapters.yml new file mode 100644 index 000000000000..9a5af5f6d03e --- /dev/null +++ b/.github/workflows/query-engine-driver-adapters.yml @@ -0,0 +1,97 @@ +name: Driver Adapters +on: + push: + branches: + - main + pull_request: + paths-ignore: + - '.buildkite/**' + - '*.md' + - 'LICENSE' + - 'CODEOWNERS' + - 'renovate.json' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + rust-query-engine-tests: + name: "Test `${{ matrix.database.driver_adapter }}` on node v${{ matrix.node_version }}" + + strategy: + fail-fast: false + matrix: + database: + - name: "postgres13" + connector: "postgres" + version: "13" + driver_adapter: "pg" + node_version: ["18"] + + env: + LOG_LEVEL: "info" + LOG_QUERIES: "y" + RUST_LOG: "info" + RUST_LOG_FORMAT: "devel" + RUST_BACKTRACE: "1" + CLICOLOR_FORCE: "1" + CLOSED_TX_CLEANUP: "2" + SIMPLE_TEST_MODE: "1" + QUERY_BATCH_SIZE: "10" + TEST_CONNECTOR: ${{ matrix.database.connector }} + TEST_CONNECTOR_VERSION: ${{ matrix.database.version }} + TEST_DRIVER_ADAPTER: ${{ matrix.database.driver_adapter }} + + runs-on: buildjet-16vcpu-ubuntu-2004 + steps: + - uses: actions/checkout@v4 + + - name: "Setup Node.js" + uses: actions/setup-node@v3 + with: + node-version: ${{ matrix.node_version }} + + - name: "Setup pnpm" + uses: pnpm/action-setup@v2 + with: + version: 8 + + - name: "Get pnpm store directory" + shell: bash + run: | + echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV + + - uses: actions/cache@v3 + name: "Setup pnpm cache" + with: + path: ${{ env.STORE_PATH }} + key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} + restore-keys: | + ${{ runner.os }}-pnpm-store- + + - name: "Login to Docker Hub" + uses: docker/login-action@v2 + continue-on-error: true + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: "Start ${{ matrix.database.name }} (${{ matrix.engine_protocol }})" + run: make start-${{ matrix.database.name }} + + - uses: dtolnay/rust-toolchain@stable + + - name: "Build query-engine-node-api with driver support" + run: cargo build -p query-engine-node-api + + - name: "Install and build driver adapter JS dependencies" + run: cd query-engine/driver-adapters/js && pnpm i && pnpm build + + - name: "Run tests" + run: cargo test --package query-engine-tests -- --test-threads=1 + env: + CLICOLOR_FORCE: 1 + WORKSPACE_ROOT: ${{ github.workspace }} + NODE_TEST_EXECUTOR: "${{ github.workspace }}/query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh" + diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/node_process.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/node_process.rs index 189c39ec2300..3b58ad527925 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/node_process.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/node_process.rs @@ -103,7 +103,7 @@ fn start_rpc_thread(mut receiver: mpsc::Receiver) -> Result<()> { .spawn() { Ok(process) => process, - Err(err) => exit_with_message(1, &format!("Failed to spawn the executor process.\nDetails: {err}\n")), + Err(err) => exit_with_message(1, &format!("Failed to spawn the executor process: `{env_var}`. Details: {err}\n")), }; let mut stdout = BufReader::new(process.stdout.unwrap()).lines(); diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh b/query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh new file mode 100755 index 000000000000..000f3bd1d45c --- /dev/null +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh @@ -0,0 +1,2 @@ +#!/usr/bin/env bash +node "$(dirname "${BASH_SOURCE[0]}")/../dist/index.mjs" \ No newline at end of file diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts b/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts index 7188615a0609..3648276c37f3 100644 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts @@ -27,8 +27,8 @@ async function main(): Promise { message: err.toString(), }) } - } catch (_) { - // skip non-JSON line + } catch (err) { + console.error("Received non-json line: ", line); } }); From a8d82f7d09e39f6f93b9a0675adca8c331c2ef3f Mon Sep 17 00:00:00 2001 From: Serhii Tatarintsev Date: Wed, 20 Sep 2023 15:58:31 +0200 Subject: [PATCH 027/128] js-drivers: Bump packages to 0.3.2 (#4264) --- query-engine/driver-adapters/js/adapter-neon/package.json | 2 +- query-engine/driver-adapters/js/adapter-pg/package.json | 2 +- .../driver-adapters/js/adapter-planetscale/package.json | 2 +- .../driver-adapters/js/driver-adapter-utils/package.json | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/query-engine/driver-adapters/js/adapter-neon/package.json b/query-engine/driver-adapters/js/adapter-neon/package.json index 584906aa8734..8d5dbc3ed3f3 100644 --- a/query-engine/driver-adapters/js/adapter-neon/package.json +++ b/query-engine/driver-adapters/js/adapter-neon/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/adapter-neon", - "version": "0.3.1", + "version": "0.3.2", "description": "Prisma's driver adapter for \"@neondatabase/serverless\"", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/adapter-pg/package.json b/query-engine/driver-adapters/js/adapter-pg/package.json index ff91a82dddac..f3b2d03c6da4 100644 --- a/query-engine/driver-adapters/js/adapter-pg/package.json +++ b/query-engine/driver-adapters/js/adapter-pg/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/adapter-pg", - "version": "0.3.1", + "version": "0.3.2", "description": "Prisma's driver adapter for \"pg\"", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/adapter-planetscale/package.json b/query-engine/driver-adapters/js/adapter-planetscale/package.json index 862c0f63c7fb..f2a43486d4e6 100644 --- a/query-engine/driver-adapters/js/adapter-planetscale/package.json +++ b/query-engine/driver-adapters/js/adapter-planetscale/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/adapter-planetscale", - "version": "0.3.1", + "version": "0.3.2", "description": "Prisma's driver adapter for \"@planetscale/database\"", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/package.json b/query-engine/driver-adapters/js/driver-adapter-utils/package.json index 545effc0c465..24cd503329f9 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/package.json +++ b/query-engine/driver-adapters/js/driver-adapter-utils/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/driver-adapter-utils", - "version": "0.3.1", + "version": "0.3.2", "description": "Internal set of utilities and types for Prisma's driver adapters.", "main": "dist/index.js", "module": "dist/index.mjs", From 08728127fb82f91fe575a912f8938013f09a1464 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Thu, 21 Sep 2023 14:05:06 +0200 Subject: [PATCH 028/128] test(qe): driver adapters: parameterize test kit to test any driver adapter that we support (#4265) This PR changes the executor for driver adapters to instantiate a different driver adapter based on environment configuration. From the chunk of documentation added to the connector kit README: #### Running tests through driver adapters The query engine is able to delegate query execution to javascript through [driver adapters](query-engine/driver-adapters/js/README.md). This means that instead of drivers being implemented in Rust, it's a layer of adapters over NodeJs drivers the code that actually communicates with the databases. To run tests through a driver adapters, you should also configure the following environment variables: * `NODE_TEST_EXECUTOR`: tells the query engine test kit to use an external process to run the queries, this is a node process running a program that will read the queries to run from STDIN, and return responses to STDOUT. The connector kit follows a protocol over JSON RPC for this communication. * `DRIVER_ADAPTER`: tells the test executor to use a particular driver adapter. Set to `neon`, `planetscale` or any other supported adapter. * `DRIVER_ADAPTER_URL_OVERRIDE`: it overrides the schema URL for the database to use one understood by the driver adapter (ex. neon, planetscale) Example: ```shell export NODE_TEST_EXECUTOR="$WORKSPACE_ROOT/query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh" export DRIVER_ADAPTER=neon export DRIVER_ADAPTER_URL_OVERRIDE ="postgres://USER:PASSWORD@DATABASExxxx" ```` Closes https://github.com/prisma/team-orm/issues/364 --- .envrc | 6 ++ .../query-engine-driver-adapters.yml | 15 ++-- query-engine/connector-test-kit-rs/README.md | 28 +++++++- .../query-tests-setup/src/connector_tag/js.rs | 6 +- .../{node_process.rs => external_process.rs} | 7 +- .../query-tests-setup/src/lib.rs | 3 +- .../connector-test-kit-executor/package.json | 5 +- .../connector-test-kit-executor/src/index.ts | 71 +++++++++++++++---- .../js/connector-test-kit-executor/src/qe.ts | 10 ++- .../driver-adapters/js/pnpm-lock.yaml | 9 +++ .../src/client/neon.http.test.ts | 2 +- .../smoke-test-js/src/client/neon.ws.test.ts | 2 +- .../js/smoke-test-js/src/client/pg.test.ts | 4 +- .../src/client/planetscale.test.ts | 4 +- .../src/libquery/neon.http.test.ts | 2 +- .../src/libquery/neon.ws.test.ts | 2 +- .../js/smoke-test-js/src/libquery/pg.test.ts | 4 +- .../src/libquery/planetscale.test.ts | 6 +- .../query-engine-node-api/src/engine.rs | 6 +- 19 files changed, 141 insertions(+), 51 deletions(-) rename query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/{node_process.rs => external_process.rs} (97%) diff --git a/.envrc b/.envrc index 29a3b25822d5..64aea05c728e 100644 --- a/.envrc +++ b/.envrc @@ -22,6 +22,12 @@ export QE_LOG_LEVEL=debug # Set it to "trace" to enable query-graph debugging lo # export PRISMA_RENDER_DOT_FILE=1 # Uncomment to enable rendering a dot file of the Query Graph from an executed query. # export FMT_SQL=1 # Uncomment it to enable logging formatted SQL queries +### Uncomment to run driver adapters tests. See query-engine-driver-adapters.yml workflow for how tests run in CI. +# export EXTERNAL_TEST_EXECUTOR="$(pwd)/query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh" +# export DRIVER_ADAPTER=pg # Set to pg, neon or planetscale +# export PRISMA_DISABLE_QUAINT_EXECUTORS=1 # Disable quaint executors for driver adapters +# export DRIVER_ADAPTER_URL_OVERRIDE ="postgres://USER:PASSWORD@DATABASExxxx" # Override the database url for the driver adapter tests + # Mongo image requires additional wait time on arm arch for some reason. if uname -a | grep -q 'arm64'; then export INIT_WAIT_SEC="10" diff --git a/.github/workflows/query-engine-driver-adapters.yml b/.github/workflows/query-engine-driver-adapters.yml index 9a5af5f6d03e..ad7b51fc2beb 100644 --- a/.github/workflows/query-engine-driver-adapters.yml +++ b/.github/workflows/query-engine-driver-adapters.yml @@ -28,20 +28,24 @@ jobs: version: "13" driver_adapter: "pg" node_version: ["18"] - env: LOG_LEVEL: "info" LOG_QUERIES: "y" RUST_LOG: "info" RUST_LOG_FORMAT: "devel" RUST_BACKTRACE: "1" + PRISMA_DISABLE_QUAINT_EXECUTORS: "1" CLICOLOR_FORCE: "1" CLOSED_TX_CLEANUP: "2" SIMPLE_TEST_MODE: "1" QUERY_BATCH_SIZE: "10" TEST_CONNECTOR: ${{ matrix.database.connector }} TEST_CONNECTOR_VERSION: ${{ matrix.database.version }} - TEST_DRIVER_ADAPTER: ${{ matrix.database.driver_adapter }} + WORKSPACE_ROOT: ${{ github.workspace }} + # Driver adapter testing specific env vars + EXTERNAL_TEST_EXECUTOR: "${{ github.workspace }}/query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh" + DRIVER_ADAPTER: ${{ matrix.database.driver_adapter }} + DRIVER_ADAPTER_URL_OVERRIDE: ${{ matrix.database.driver_adapter_url }} runs-on: buildjet-16vcpu-ubuntu-2004 steps: @@ -77,7 +81,7 @@ jobs: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: "Start ${{ matrix.database.name }} (${{ matrix.engine_protocol }})" + - name: "Start ${{ matrix.database.name }}" run: make start-${{ matrix.database.name }} - uses: dtolnay/rust-toolchain@stable @@ -90,8 +94,5 @@ jobs: - name: "Run tests" run: cargo test --package query-engine-tests -- --test-threads=1 - env: - CLICOLOR_FORCE: 1 - WORKSPACE_ROOT: ${{ github.workspace }} - NODE_TEST_EXECUTOR: "${{ github.workspace }}/query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh" + diff --git a/query-engine/connector-test-kit-rs/README.md b/query-engine/connector-test-kit-rs/README.md index e6821193a628..3cbaadb63523 100644 --- a/query-engine/connector-test-kit-rs/README.md +++ b/query-engine/connector-test-kit-rs/README.md @@ -1,6 +1,5 @@ # Query Engine Test Kit - A Full Guide -The test kit is a (currently incomplete) port of the Scala test kit, located in `../connector-test-kit`. -It's fully focused on integration testing the query engine through request-response assertions. +The test kit is focused on integration testing the query engine through request-response assertions. ## Test organization @@ -35,8 +34,10 @@ Contains the main bulk of logic to make tests run, which is mostly invisible to Tests are executed in the context of *one* _connector_ (with version) and _runner_. Some tests may only be specified to run for a subset of connectors or versions, in which case they will be skipped. Testing all connectors at once is not supported, however, for example, CI will run all the different connectors and versions concurrently in separate runs. ### Configuration + Tests must be configured to run There's a set of env vars that is always useful to have and an optional one. Always useful to have: + ```shell export WORKSPACE_ROOT=/path/to/engines/repository/root ``` @@ -54,6 +55,7 @@ As previously stated, the above can be omitted in favor of the `.test_config` co "version": "10" } ``` + The config file must be either in the current working folder from which you invoke a test run or in `$WORKSPACE_ROOT`. It's recommended to use the file-based config as it's easier to switch between providers with an open IDE (reloading env vars would usually require reloading the IDE). The workspace root makefile contains a series of convenience commands to setup different connector test configs, e.g. `make dev-postgres10` sets up the correct test config file for the tests to pick up. @@ -62,7 +64,29 @@ On the note of docker containers: Most connectors require an endpoint to run aga If you choose to set up the databases yourself, please note that the connection strings used in the tests (found in the files in `/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/`) to set up user, password and database for the test user. +#### Running tests through driver adapters + +The query engine is able to delegate query execution to javascript through [driver adapters](query-engine/driver-adapters/js/README.md). +This means that instead of drivers being implemented in Rust, it's a layer of adapters over NodeJs drivers the code that actually communicates with the databases. + +To run tests through a driver adapters, you should also configure the following environment variables: + +* `EXTERNAL_TEST_EXECUTOR`: tells the query engine test kit to use an external process to run the queries, this is a node process running +a program that will read the queries to run from STDIN, and return responses to STDOUT. The connector kit follows a protocol over JSON RPC for this communication. +* `DRIVER_ADAPTER`: tells the test executor to use a particular driver adapter. Set to `neon`, `planetscale` or any other supported adapter. +* `DRIVER_ADAPTER_URL_OVERRIDE`: it overrides the schema URL for the database to use one understood by the driver adapter (ex. neon, planetscale) + + +Example: + +```shell +export EXTERNAL_TEST_EXECUTOR="$WORKSPACE_ROOT/query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh" +export DRIVER_ADAPTER=neon +export DRIVER_ADAPTER_URL_OVERRIDE ="postgres://USER:PASSWORD@DATABASExxxx" +```` + ### Running + Note that by default tests run concurrently. - VSCode should automatically detect tests and display `run test`. diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js.rs index 6e174808a38d..2ec8513baeda 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js.rs @@ -1,7 +1,7 @@ -mod node_process; +mod external_process; use super::*; -use node_process::*; +use external_process::*; use serde::de::DeserializeOwned; use std::{collections::HashMap, sync::atomic::AtomicU64}; use tokio::io::{AsyncBufReadExt, AsyncWriteExt, BufReader}; @@ -10,5 +10,5 @@ pub(crate) async fn executor_process_request( method: &str, params: serde_json::Value, ) -> Result> { - NODE_PROCESS.request(method, params).await + EXTERNAL_PROCESS.request(method, params).await } diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/node_process.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/external_process.rs similarity index 97% rename from query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/node_process.rs rename to query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/external_process.rs index 3b58ad527925..332eb3ea50d2 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/node_process.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/external_process.rs @@ -69,7 +69,7 @@ impl ExecutorProcess { } } -pub(super) static NODE_PROCESS: Lazy = +pub(super) static EXTERNAL_PROCESS: Lazy = Lazy::new(|| match std::thread::spawn(ExecutorProcess::new).join() { Ok(Ok(process)) => process, Ok(Err(err)) => exit_with_message(1, &format!("Failed to start node process. Details: {err}")), @@ -87,7 +87,10 @@ fn start_rpc_thread(mut receiver: mpsc::Receiver) -> Result<()> { let env_var = match crate::EXTERNAL_TEST_EXECUTOR.as_ref() { Some(env_var) => env_var, - None => exit_with_message(1, "start_rpc_thread() error: NODE_TEST_EXECUTOR env var is not defined"), + None => exit_with_message( + 1, + "start_rpc_thread() error: EXTERNAL_TEST_EXECUTOR env var is not defined", + ), }; tokio::runtime::Builder::new_current_thread() diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/lib.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/lib.rs index f1a53f6b2f53..99bd486f51d3 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/lib.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/lib.rs @@ -42,8 +42,7 @@ pub static ENV_LOG_LEVEL: Lazy = Lazy::new(|| std::env::var("LOG_LEVEL") pub static ENGINE_PROTOCOL: Lazy = Lazy::new(|| std::env::var("PRISMA_ENGINE_PROTOCOL").unwrap_or_else(|_| "graphql".to_owned())); -// TODO: rename env var to EXTERNAL_TEST_EXECUTOR -static EXTERNAL_TEST_EXECUTOR: Lazy> = Lazy::new(|| std::env::var("NODE_TEST_EXECUTOR").ok()); +static EXTERNAL_TEST_EXECUTOR: Lazy> = Lazy::new(|| std::env::var("EXTERNAL_TEST_EXECUTOR").ok()); /// Teardown of a test setup. async fn teardown_project(datamodel: &str, db_schemas: &[&str], schema_id: Option) -> TestResult<()> { diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/package.json b/query-engine/driver-adapters/js/connector-test-kit-executor/package.json index 38cb115ace79..1dc1315afc83 100644 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/package.json +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/package.json @@ -13,9 +13,12 @@ "sideEffects": false, "license": "Apache-2.0", "dependencies": { + "@neondatabase/serverless": "^0.6.0", + "@prisma/adapter-neon": "workspace:*", "@prisma/adapter-pg": "workspace:*", "@prisma/driver-adapter-utils": "workspace:*", + "@types/pg": "^8.10.2", "pg": "^8.11.3", - "@types/pg": "^8.10.2" + "undici": "^5.23.0" } } diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts b/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts index 3648276c37f3..78e1b8954ae5 100644 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts @@ -1,10 +1,22 @@ -import pgDriver from 'pg' -import * as pg from '@prisma/adapter-pg' import * as qe from './qe' import * as engines from './engines/Library' import * as readline from 'node:readline' import * as jsonRpc from './jsonRpc' -import {bindAdapter, ErrorCapturingDriverAdapter} from "@prisma/driver-adapter-utils"; + +// pg dependencies +import pgDriver from 'pg' +import * as prismaPg from '@prisma/adapter-pg' + +// neon dependencies +import { Pool as NeonPool, neonConfig } from '@neondatabase/serverless' +import { WebSocket } from 'undici' +import * as prismaNeon from '@prisma/adapter-neon' +neonConfig.webSocketConstructor = WebSocket + +import {bindAdapter, DriverAdapter, ErrorCapturingDriverAdapter} from "@prisma/driver-adapter-utils"; + +const SUPPORTED_ADAPTERS: Record Promise> + = {pg: pgAdapter, neon: neonAdapter}; async function main(): Promise { const iface = readline.createInterface({ @@ -94,13 +106,11 @@ async function handleRequest(method: string, params: unknown): Promise schemaId: number, options: unknown } + console.error("Got `startTx", params) - const { schemaId, options } = params as StartTxPayload + const {schemaId, options} = params as StartTxPayload const result = await schemas[schemaId].startTransaction(JSON.stringify(options), "") return JSON.parse(result) - - - } case 'commitTx': { @@ -108,8 +118,9 @@ async function handleRequest(method: string, params: unknown): Promise schemaId: number, txId: string, } + console.error("Got `commitTx", params) - const { schemaId, txId } = params as CommitTxPayload + const {schemaId, txId} = params as CommitTxPayload const result = await schemas[schemaId].commitTransaction(txId, '{}') return JSON.parse(result) } @@ -119,8 +130,9 @@ async function handleRequest(method: string, params: unknown): Promise schemaId: number, txId: string, } + console.error("Got `rollbackTx", params) - const { schemaId, txId } = params as RollbackTxPayload + const {schemaId, txId} = params as RollbackTxPayload const result = await schemas[schemaId].rollbackTransaction(txId, '{}') return JSON.parse(result) } @@ -132,15 +144,15 @@ async function handleRequest(method: string, params: unknown): Promise const castParams = params as TeardownPayload; await schemas[castParams.schemaId].disconnect("") delete schemas[castParams.schemaId] + delete adapters[castParams.schemaId] delete queryLogs[castParams.schemaId] return {} - } - case 'getLogs': { interface GetLogsPayload { schemaId: number } + const castParams = params as GetLogsPayload return queryLogs[castParams.schemaId] ?? [] } @@ -170,10 +182,39 @@ function respondOk(requestId: number, payload: unknown) { } async function initQe(url: string, prismaSchema: string, logCallback: qe.QueryLogCallback): Promise<[engines.QueryEngineInstance, ErrorCapturingDriverAdapter]> { - const pool = new pgDriver.Pool({ connectionString: url }) - const adapter = bindAdapter(new pg.PrismaPg(pool)) - const engineInstance = qe.initQueryEngine(adapter, prismaSchema, logCallback) - return [engineInstance, adapter]; + const adapter = await adapterFromEnv(url) as DriverAdapter + const errorCapturingAdapter = bindAdapter(adapter) + const engineInstance = qe.initQueryEngine(errorCapturingAdapter, prismaSchema, logCallback) + return [engineInstance, errorCapturingAdapter]; +} + +async function adapterFromEnv(url: string): Promise { + const adapter = process.env.DRIVER_ADAPTER ?? '' + + if (adapter == '') { + throw new Error("DRIVER_ADAPTER is not defined or empty.") + } + + if (!(adapter in SUPPORTED_ADAPTERS)) { + throw new Error(`Unsupported driver adapter: ${adapter}`) + } + + return await SUPPORTED_ADAPTERS[adapter](url); +} + +async function pgAdapter(url: string): Promise { + const pool = new pgDriver.Pool({connectionString: url}) + return new prismaPg.PrismaPg(pool) +} + +async function neonAdapter(_: string): Promise { + const connectionString = process.env.DRIVER_ADAPTER_URL_OVERRIDE ?? '' + if (connectionString == '') { + throw new Error("DRIVER_ADAPTER_URL_OVERRIDE is not defined or empty, but its required for neon adapter."); + } + + const pool = new NeonPool({ connectionString }) + return new prismaNeon.PrismaNeon(pool) } main().catch(console.error) diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/qe.ts b/query-engine/driver-adapters/js/connector-test-kit-executor/src/qe.ts index af32bd0bdb8b..a8256bf08b3f 100644 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/src/qe.ts +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/src/qe.ts @@ -28,14 +28,18 @@ export function initQueryEngine(adapter: ErrorCapturingDriverAdapter, datamodel: ignoreEnvVarErrors: false, } + const logCallback = (event: any) => { const parsed = JSON.parse(event) if (parsed.is_query) { queryLogCallback(parsed.query) } - console.error("[nodejs] ", parsed) + + const level = process.env.LOG_LEVEL ?? '' + if (level.toLowerCase() == 'debug') { + console.error("[nodejs] ", parsed) + } } - const engine = new QueryEngine(queryEngineOptions, logCallback, adapter) - return engine + return new QueryEngine(queryEngineOptions, logCallback, adapter) } diff --git a/query-engine/driver-adapters/js/pnpm-lock.yaml b/query-engine/driver-adapters/js/pnpm-lock.yaml index 123db305f805..f919d4a1a066 100644 --- a/query-engine/driver-adapters/js/pnpm-lock.yaml +++ b/query-engine/driver-adapters/js/pnpm-lock.yaml @@ -53,6 +53,12 @@ importers: connector-test-kit-executor: dependencies: + '@neondatabase/serverless': + specifier: ^0.6.0 + version: 0.6.0 + '@prisma/adapter-neon': + specifier: workspace:* + version: link:../adapter-neon '@prisma/adapter-pg': specifier: workspace:* version: link:../adapter-pg @@ -65,6 +71,9 @@ importers: pg: specifier: ^8.11.3 version: 8.11.3 + undici: + specifier: ^5.23.0 + version: 5.23.0 driver-adapter-utils: dependencies: diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts index 137bb0ed9835..44cb1fde98a8 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts @@ -4,7 +4,7 @@ import { PrismaNeonHTTP } from '@prisma/adapter-neon' import { smokeTestClient } from './client' describe('neon with @prisma/client', async () => { - const connectionString = `${process.env.JS_NEON_DATABASE_URL as string}` + const connectionString = process.env.JS_NEON_DATABASE_URL ?? '' const connection = neon(connectionString, { arrayMode: false, diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.ws.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.ws.test.ts index c6e85eca0891..37b0a9088bb7 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.ws.test.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.ws.test.ts @@ -7,7 +7,7 @@ import { smokeTestClient } from './client' neonConfig.webSocketConstructor = WebSocket describe('neon with @prisma/client', async () => { - const connectionString = `${process.env.JS_NEON_DATABASE_URL as string}` + const connectionString = process.env.JS_NEON_DATABASE_URL ?? '' const pool = new Pool({ connectionString }) const adapter = new PrismaNeon(pool) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/pg.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/pg.test.ts index 7394e5f55e89..99048ad3d95f 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/pg.test.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/client/pg.test.ts @@ -4,10 +4,10 @@ import { PrismaPg } from '@prisma/adapter-pg' import { smokeTestClient } from './client' describe('pg with @prisma/client', async () => { - const connectionString = `${process.env.JS_PG_DATABASE_URL as string}` + const connectionString = process.env.JS_PG_DATABASE_URL ?? '' const pool = new pg.Pool({ connectionString }) const adapter = new PrismaPg(pool) - + smokeTestClient(adapter) }) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/planetscale.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/planetscale.test.ts index e82e209247f4..3c22b7aa3062 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/planetscale.test.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/client/planetscale.test.ts @@ -4,10 +4,10 @@ import { describe } from 'node:test' import { smokeTestClient } from './client' describe('planetscale with @prisma/client', async () => { - const connectionString = `${process.env.JS_PLANETSCALE_DATABASE_URL as string}` + const connectionString = process.env.JS_PLANETSCALE_DATABASE_URL ?? '' const connnection = connect({ url: connectionString }) const adapter = new PrismaPlanetScale(connnection) - + smokeTestClient(adapter) }) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts index dc839405b21f..ac165d29f584 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts @@ -5,7 +5,7 @@ import { describe } from 'node:test' import { smokeTestLibquery } from './libquery' describe('neon (HTTP)', () => { - const connectionString = `${process.env.JS_NEON_DATABASE_URL as string}` + const connectionString = process.env.JS_NEON_DATABASE_URL ?? '' const neonConnection = neon(connectionString) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts index 017cb8f1c31d..54765f5961ba 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts @@ -8,7 +8,7 @@ import { smokeTestLibquery } from './libquery' neonConfig.webSocketConstructor = WebSocket describe('neon (WebSocket)', () => { - const connectionString = `${process.env.JS_NEON_DATABASE_URL as string}` + const connectionString = process.env.JS_NEON_DATABASE_URL ?? '' const pool = new Pool({ connectionString }) const adapter = new PrismaNeon(pool) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.test.ts index ca4f297bb1f2..9b79e7284be8 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.test.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.test.ts @@ -5,11 +5,11 @@ import { describe } from 'node:test' import { smokeTestLibquery } from './libquery' describe('pg', () => { - const connectionString = `${process.env.JS_PG_DATABASE_URL as string}` + const connectionString = process.env.JS_PG_DATABASE_URL ?? '' const pool = new pg.Pool({ connectionString }) const adapter = new PrismaPg(pool) const driverAdapter = bindAdapter(adapter) - + smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma') }) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.test.ts index 2d3137fbe500..bb7c81805adc 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.test.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.test.ts @@ -2,14 +2,14 @@ import { connect } from '@planetscale/database' import { PrismaPlanetScale } from '@prisma/adapter-planetscale' import { bindAdapter } from '@prisma/driver-adapter-utils' import { describe } from 'node:test' -import { smokeTestLibquery } from './libquery' +import { smokeTestLibquery } from './libquery' describe('planetscale', () => { - const connectionString = `${process.env.JS_PLANETSCALE_DATABASE_URL as string}` + const connectionString = process.env.JS_PLANETSCALE_DATABASE_URL ?? '' const connnection = connect({ url: connectionString }) const adapter = new PrismaPlanetScale(connnection) const driverAdapter = bindAdapter(adapter) - + smokeTestLibquery(driverAdapter, '../../prisma/mysql/schema.prisma') }) diff --git a/query-engine/query-engine-node-api/src/engine.rs b/query-engine/query-engine-node-api/src/engine.rs index 8d5f56ca7bfa..e376cf16a049 100644 --- a/query-engine/query-engine-node-api/src/engine.rs +++ b/query-engine/query-engine-node-api/src/engine.rs @@ -147,7 +147,7 @@ impl QueryEngine { napi_env: Env, options: JsUnknown, callback: JsFunction, - maybe_driver: Option, + maybe_adapter: Option, ) -> napi::Result { let mut log_callback = callback.create_threadsafe_function(0usize, |ctx: ThreadSafeCallContext| { Ok(vec![ctx.env.create_string(&ctx.value)?]) @@ -181,8 +181,8 @@ impl QueryEngine { ); } else { #[cfg(feature = "driver-adapters")] - if let Some(driver) = maybe_driver { - let js_queryable = driver_adapters::from_napi(driver); + if let Some(adapter) = maybe_adapter { + let js_queryable = driver_adapters::from_napi(adapter); sql_connector::activate_driver_adapter(Arc::new(js_queryable)); connector_mode = ConnectorMode::Js; From 5a6164f8b0bd5c52a147a60be5dd9fa94e154f15 Mon Sep 17 00:00:00 2001 From: Serhii Tatarintsev Date: Thu, 21 Sep 2023 17:40:00 +0200 Subject: [PATCH 029/128] driver-adapters: Ensure error propogation works for `startTransaction` (#4267) * driver-adapters: Ensure error propohation works for `startTransaction` `startTransaction` was not correctly wrapped when converting `Adapter` to `ErrorCapturingAdapter`. As a result, `GenericError` message was returned in case of JS error. This PR fixes the problem. Since it is hard to test those kinds of errors with real drivers, new test suite for the error propogation is introduced. It uses fake postgres adapter that throws on every call. * Add test for executeRaw --- .../js/driver-adapter-utils/src/binder.ts | 25 +- .../js/smoke-test-js/package.json | 1 + .../prisma/postgres/schema.prisma | 5 + .../smoke-test-js/src/libquery/errors.test.ts | 96 ++ .../js/smoke-test-js/src/libquery/libquery.ts | 847 ++++++++---------- .../js/smoke-test-js/src/libquery/util.ts | 29 +- 6 files changed, 546 insertions(+), 457 deletions(-) create mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/libquery/errors.test.ts diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts index 9d399056f9a1..0c251fe298a1 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts +++ b/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts @@ -1,4 +1,11 @@ -import type { ErrorCapturingDriverAdapter, DriverAdapter, Transaction, ErrorRegistry, ErrorRecord, Result } from './types' +import type { + ErrorCapturingDriverAdapter, + DriverAdapter, + Transaction, + ErrorRegistry, + ErrorRecord, + Result, +} from './types' class ErrorRegistryInternal implements ErrorRegistry { private registeredErrors: ErrorRecord[] = [] @@ -22,36 +29,40 @@ class ErrorRegistryInternal implements ErrorRegistry { export const bindAdapter = (adapter: DriverAdapter): ErrorCapturingDriverAdapter => { const errorRegistry = new ErrorRegistryInternal() + const startTransaction = wrapAsync(errorRegistry, adapter.startTransaction.bind(adapter)) return { errorRegistry, queryRaw: wrapAsync(errorRegistry, adapter.queryRaw.bind(adapter)), executeRaw: wrapAsync(errorRegistry, adapter.executeRaw.bind(adapter)), flavour: adapter.flavour, startTransaction: async (...args) => { - const result = await adapter.startTransaction(...args) + const result = await startTransaction(...args) if (result.ok) { return { ok: true, value: bindTransaction(errorRegistry, result.value) } } return result }, - close: wrapAsync(errorRegistry, adapter.close.bind(adapter)) + close: wrapAsync(errorRegistry, adapter.close.bind(adapter)), } } // *.bind(transaction) is required to preserve the `this` context of functions whose // execution is delegated to napi.rs. const bindTransaction = (errorRegistry: ErrorRegistryInternal, transaction: Transaction): Transaction => { - return ({ + return { flavour: transaction.flavour, options: transaction.options, queryRaw: wrapAsync(errorRegistry, transaction.queryRaw.bind(transaction)), executeRaw: wrapAsync(errorRegistry, transaction.executeRaw.bind(transaction)), commit: wrapAsync(errorRegistry, transaction.commit.bind(transaction)), rollback: wrapAsync(errorRegistry, transaction.rollback.bind(transaction)), - }); + } } -function wrapAsync(registry: ErrorRegistryInternal, fn: (...args: A) => Promise>): (...args: A) => Promise> { +function wrapAsync( + registry: ErrorRegistryInternal, + fn: (...args: A) => Promise>, +): (...args: A) => Promise> { return async (...args) => { try { return await fn(...args) @@ -60,4 +71,4 @@ function wrapAsync(registry: ErrorRegistryInternal, fn: return { ok: false, error: { kind: 'GenericJsError', id } } } } -} \ No newline at end of file +} diff --git a/query-engine/driver-adapters/js/smoke-test-js/package.json b/query-engine/driver-adapters/js/smoke-test-js/package.json index 5de722eabcca..37cecd082393 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/package.json +++ b/query-engine/driver-adapters/js/smoke-test-js/package.json @@ -24,6 +24,7 @@ "pg:libquery": "cross-env-shell DATABASE_URL=\"${JS_PG_DATABASE_URL}\" node --test --loader=tsx ./src/libquery/pg.test.ts", "pg:client": "DATABASE_URL=\"${JS_PG_DATABASE_URL}\" node --test --loader=tsx ./src/client/pg.test.ts", "pg": "pnpm pg:libquery && pnpm pg:client", + "errors": "DATABASE_URL=\"${JS_PG_DATABASE_URL}\" node --test --loader=tsx ./src/libquery/errors.test.ts", "prisma:planetscale": "cross-env-shell DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" \"pnpm prisma:db:push:mysql && pnpm prisma:db:execute:mysql\"", "studio:planetscale": "cross-env-shell DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" \"pnpm prisma:studio:mysql\"", "planetscale:libquery": "DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" node --test --loader=tsx ./src/libquery/planetscale.test.ts", diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma b/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma index c2564af557e4..4c92945ea852 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma +++ b/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma @@ -98,3 +98,8 @@ model Product { properties Json properties_null Json? } + +model User { + id String @id @default(uuid()) + email String +} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/errors.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/errors.test.ts new file mode 100644 index 000000000000..c917f35fd7b5 --- /dev/null +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/errors.test.ts @@ -0,0 +1,96 @@ +import { bindAdapter } from '@prisma/driver-adapter-utils' +import test, { after, before, describe } from 'node:test' +import { createQueryFn, initQueryEngine, throwAdapterError } from './util' +import assert from 'node:assert' + +const fakeAdapter = bindAdapter({ + flavour: 'postgres', + startTransaction() { + throw new Error('Error in startTransaction') + }, + + queryRaw() { + throw new Error('Error in queryRaw') + }, + + executeRaw() { + throw new Error('Error in executeRaw') + }, + close() { + return Promise.resolve({ ok: true, value: undefined }) + }, +}) + +const engine = initQueryEngine(fakeAdapter, '../../prisma/postgres/schema.prisma') +const doQuery = createQueryFn(engine, fakeAdapter) + +const startTransaction = async () => { + const args = { isolation_level: 'Serializable', max_wait: 5000, timeout: 15000 } + const res = JSON.parse(await engine.startTransaction(JSON.stringify(args), '{}')) + if (res['error_code']) { + throwAdapterError(res, fakeAdapter) + } +} + +describe('errors propagation', () => { + before(async () => { + await engine.connect('{}') + }) + after(async () => { + await engine.disconnect('{}') + }) + + test('works for queries', async () => { + await assert.rejects( + doQuery({ + modelName: 'Product', + action: 'findMany', + query: { + arguments: {}, + selection: { + $scalars: true, + }, + }, + }), + /Error in queryRaw/, + ) + }) + + test('works for executeRaw', async () => { + await assert.rejects( + doQuery({ + action: 'executeRaw', + query: { + arguments: { + query: 'SELECT 1', + parameters: '[]', + }, + selection: { + $scalars: true, + }, + }, + }), + /Error in executeRaw/, + ) + }) + + test('works with implicit transaction', async () => { + await assert.rejects( + doQuery({ + modelName: 'Product', + action: 'deleteMany', + query: { + arguments: {}, + selection: { + $scalars: true, + }, + }, + }), + /Error in startTransaction/, + ) + }) + + test('works with explicit transaction', async () => { + await assert.rejects(startTransaction(), /Error in startTransaction/) + }) +}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts index f91a72c0383e..44d07abb9a4e 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts @@ -2,28 +2,14 @@ import { describe, it, before, after } from 'node:test' import assert from 'node:assert' import type { ErrorCapturingDriverAdapter } from '@prisma/driver-adapter-utils' import type { QueryEngineInstance } from '../engines/types/Library' -import { initQueryEngine } from './util' +import { createQueryFn, initQueryEngine } from './util' import { JsonQuery } from '../engines/types/JsonProtocol' export function smokeTestLibquery(adapter: ErrorCapturingDriverAdapter, prismaSchemaRelativePath: string) { const engine = initQueryEngine(adapter, prismaSchemaRelativePath) const flavour = adapter.flavour - const doQuery = async (query: JsonQuery, tx_id?: string) => { - const result = await engine.query(JSON.stringify(query), 'trace', tx_id) - const parsedResult = JSON.parse(result) - if (parsedResult.errors) { - const error = parsedResult.errors[0]?.user_facing_error - if (error.error_code === 'P2036') { - const jsError = adapter.errorRegistry.consumeError(error.meta.id) - if (!jsError) { - throw new Error(`Something went wrong. Engine reported external error with id ${error.meta.id}, but it was not registered.`) - } - throw jsError.error - } - } - return parsedResult - } + const doQuery = createQueryFn(engine, adapter) describe('using libquery with Driver Adapters', () => { before(async () => { @@ -43,9 +29,9 @@ export function smokeTestLibquery(adapter: ErrorCapturingDriverAdapter, prismaSc selection: { $scalars: true }, arguments: { query: 'NOT A VALID SQL, THIS WILL FAIL', - parameters: '[]' - } - } + parameters: '[]', + }, + }, }) }) }) @@ -56,149 +42,134 @@ export function smokeTestLibquery(adapter: ErrorCapturingDriverAdapter, prismaSc baz: 1, }) - const created = await doQuery( - { - "action": "createOne", - "modelName": "Product", - "query": { - "arguments": { - "data": { - "properties": json, - "properties_null": null - } + const created = await doQuery({ + action: 'createOne', + modelName: 'Product', + query: { + arguments: { + data: { + properties: json, + properties_null: null, }, - "selection": { - "properties": true - } - } - }) + }, + selection: { + properties: true, + }, + }, + }) assert.strictEqual(created.data.createOneProduct.properties.$type, 'Json') console.log('[nodejs] created', JSON.stringify(created, null, 2)) - const resultSet = await doQuery( - { - "action": "findMany", - "modelName": "Product", - "query": { - "selection": { - "id": true, - "properties": true, - "properties_null": true - } - } - } - ) + const resultSet = await doQuery({ + action: 'findMany', + modelName: 'Product', + query: { + selection: { + id: true, + properties: true, + properties_null: true, + }, + }, + }) console.log('[nodejs] resultSet', JSON.stringify(resultSet, null, 2)) - await doQuery( - { - "action": "deleteMany", - "modelName": "Product", - "query": { - "arguments": { - "where": {} - }, - "selection": { - "count": true - } - } - } - ) + await doQuery({ + action: 'deleteMany', + modelName: 'Product', + query: { + arguments: { + where: {}, + }, + selection: { + count: true, + }, + }, + }) }) it('create with autoincrement', async () => { - await doQuery( - { - "modelName": "Author", - "action": "deleteMany", - "query": { - "arguments": { - "where": {} - }, - "selection": { - "count": true - } - } - } - ) + await doQuery({ + modelName: 'Author', + action: 'deleteMany', + query: { + arguments: { + where: {}, + }, + selection: { + count: true, + }, + }, + }) - const author = await doQuery( - { - "modelName": "Author", - "action": "createOne", - "query": { - "arguments": { - "data": { - "firstName": "Firstname from autoincrement", - "lastName": "Lastname from autoincrement", - "age": 99 - } + const author = await doQuery({ + modelName: 'Author', + action: 'createOne', + query: { + arguments: { + data: { + firstName: 'Firstname from autoincrement', + lastName: 'Lastname from autoincrement', + age: 99, }, - "selection": { - "id": true, - "firstName": true, - "lastName": true - } - } - } - ) + }, + selection: { + id: true, + firstName: true, + lastName: true, + }, + }, + }) console.log('[nodejs] author', JSON.stringify(author, null, 2)) }) it('create non scalar types', async () => { - const create = await doQuery( - { - "action": "createOne", - "modelName": "type_test_2", - "query": { - "arguments": { - "data": {} - }, - "selection": { - "id": true, - "datetime_column": true, - "datetime_column_null": true - } - } - } - ) + const create = await doQuery({ + action: 'createOne', + modelName: 'type_test_2', + query: { + arguments: { + data: {}, + }, + selection: { + id: true, + datetime_column: true, + datetime_column_null: true, + }, + }, + }) console.log('[nodejs] create', JSON.stringify(create, null, 2)) - const resultSet = await doQuery( - { - "action": "findMany", - "modelName": "type_test_2", - "query": { - "selection": { - "id": true, - "datetime_column": true, - "datetime_column_null": true - }, - "arguments": { - "where": {} - } - } - } - ) + const resultSet = await doQuery({ + action: 'findMany', + modelName: 'type_test_2', + query: { + selection: { + id: true, + datetime_column: true, + datetime_column_null: true, + }, + arguments: { + where: {}, + }, + }, + }) console.log('[nodejs] resultSet', JSON.stringify(resultSet, null, 2)) - await doQuery( - { - "action": "deleteMany", - "modelName": "type_test_2", - "query": { - "arguments": { - "where": {} - }, - "selection": { - "count": true - } - } - } - ) + await doQuery({ + action: 'deleteMany', + modelName: 'type_test_2', + query: { + arguments: { + where: {}, + }, + selection: { + count: true, + }, + }, + }) }) it('create/delete parent and child', async () => { @@ -209,40 +180,36 @@ export function smokeTestLibquery(adapter: ErrorCapturingDriverAdapter, prismaSc // 'SELECT `cf-users`.`Child`.`id` FROM `cf-users`.`Child` WHERE 1=1', // 'DELETE FROM `cf-users`.`Child` WHERE (`cf-users`.`Child`.`id` IN (?) AND 1=1)' // ] - await doQuery( - { - "modelName": "Child", - "action": "deleteMany", - "query": { - "arguments": { - "where": {} - }, - "selection": { - "count": true - } - } - } - ) + await doQuery({ + modelName: 'Child', + action: 'deleteMany', + query: { + arguments: { + where: {}, + }, + selection: { + count: true, + }, + }, + }) // Queries: [ // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE 1=1', // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE 1=1', // 'DELETE FROM `cf-users`.`Parent` WHERE (`cf-users`.`Parent`.`id` IN (?) AND 1=1)' // ] - await doQuery( - { - "modelName": "Parent", - "action": "deleteMany", - "query": { - "arguments": { - "where": {} - }, - "selection": { - "count": true - } - } - } - ) + await doQuery({ + modelName: 'Parent', + action: 'deleteMany', + query: { + arguments: { + where: {}, + }, + selection: { + count: true, + }, + }, + }) /* Create a parent with some new children, within a transaction */ @@ -252,36 +219,34 @@ export function smokeTestLibquery(adapter: ErrorCapturingDriverAdapter, prismaSc // 'SELECT `cf-users`.`Parent`.`id`, `cf-users`.`Parent`.`p` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`id` = ? LIMIT ? OFFSET ?', // 'SELECT `cf-users`.`Child`.`id`, `cf-users`.`Child`.`c`, `cf-users`.`Child`.`parentId` FROM `cf-users`.`Child` WHERE `cf-users`.`Child`.`parentId` IN (?)' // ] - await doQuery( - { - "modelName": "Parent", - "action": "createOne", - "query": { - "arguments": { - "data": { - "p": "p1", - "p_1": "1", - "p_2": "2", - "childOpt": { - "create": { - "c": "c1", - "c_1": "foo", - "c_2": "bar" - } - } - } + await doQuery({ + modelName: 'Parent', + action: 'createOne', + query: { + arguments: { + data: { + p: 'p1', + p_1: '1', + p_2: '2', + childOpt: { + create: { + c: 'c1', + c_1: 'foo', + c_2: 'bar', + }, + }, }, - "selection": { - "p": true, - "childOpt": { - "selection": { - "c": true - } - } - } - } - } - ) + }, + selection: { + p: true, + childOpt: { + selection: { + c: true, + }, + }, + }, + }, + }) /* Delete the parent */ @@ -292,41 +257,39 @@ export function smokeTestLibquery(adapter: ErrorCapturingDriverAdapter, prismaSc // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`p` = ?', // 'DELETE FROM `cf-users`.`Parent` WHERE (`cf-users`.`Parent`.`id` IN (?) AND `cf-users`.`Parent`.`p` = ?)' // ] - await doQuery( - { - "modelName": "Parent", - "action": "deleteMany", - "query": { - "arguments": { - "where": { - "p": "p1" - } + await doQuery({ + modelName: 'Parent', + action: 'deleteMany', + query: { + arguments: { + where: { + p: 'p1', }, - "selection": { - "count": true - } - } - } - ) + }, + selection: { + count: true, + }, + }, + }) }) it('create explicit transaction', async () => { const args = { isolation_level: 'Serializable', max_wait: 5000, timeout: 15000 } const startResponse = await engine.startTransaction(JSON.stringify(args), 'trace') const tx_id = JSON.parse(startResponse).id - + console.log('[nodejs] transaction id', tx_id) await doQuery( { - "action": "findMany", - "modelName": "Author", - "query": { - "selection": { "$scalars": true } - } + action: 'findMany', + modelName: 'Author', + query: { + selection: { $scalars: true }, + }, }, - tx_id + tx_id, ) - + const commitResponse = await engine.commitTransaction(tx_id, 'trace') console.log('[nodejs] commited', commitResponse) }) @@ -334,68 +297,65 @@ export function smokeTestLibquery(adapter: ErrorCapturingDriverAdapter, prismaSc describe('read scalar and non scalar types', () => { if (['mysql'].includes(flavour)) { it('mysql', async () => { - const resultSet = await doQuery( - { - "action": "findMany", - "modelName": "type_test", - "query": { - "selection": { - "tinyint_column": true, - "smallint_column": true, - "mediumint_column": true, - "int_column": true, - "bigint_column": true, - "float_column": true, - "double_column": true, - "decimal_column": true, - "boolean_column": true, - "char_column": true, - "varchar_column": true, - "text_column": true, - "date_column": true, - "time_column": true, - "datetime_column": true, - "timestamp_column": true, - "json_column": true, - "enum_column": true, - "binary_column": true, - "varbinary_column": true, - "blob_column": true - } - } - }) - + const resultSet = await doQuery({ + action: 'findMany', + modelName: 'type_test', + query: { + selection: { + tinyint_column: true, + smallint_column: true, + mediumint_column: true, + int_column: true, + bigint_column: true, + float_column: true, + double_column: true, + decimal_column: true, + boolean_column: true, + char_column: true, + varchar_column: true, + text_column: true, + date_column: true, + time_column: true, + datetime_column: true, + timestamp_column: true, + json_column: true, + enum_column: true, + binary_column: true, + varbinary_column: true, + blob_column: true, + }, + }, + }) + console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) }) } else if (['postgres'].includes(flavour)) { it('postgres', async () => { - const resultSet = await doQuery( - { - "action": "findMany", - "modelName": "type_test", - "query": { - "selection": { - "smallint_column": true, - "int_column": true, - "bigint_column": true, - "float_column": true, - "double_column": true, - "decimal_column": true, - "boolean_column": true, - "char_column": true, - "varchar_column": true, - "text_column": true, - "date_column": true, - "time_column": true, - "datetime_column": true, - "timestamp_column": true, - "json_column": true, - "enum_column": true - } - } - } - ) - console.log('[nodejs] findMany resultSet', JSON.stringify((resultSet), null, 2)) + const resultSet = await doQuery({ + action: 'findMany', + modelName: 'type_test', + query: { + selection: { + smallint_column: true, + int_column: true, + bigint_column: true, + float_column: true, + double_column: true, + decimal_column: true, + boolean_column: true, + char_column: true, + varchar_column: true, + text_column: true, + date_column: true, + time_column: true, + datetime_column: true, + timestamp_column: true, + json_column: true, + enum_column: true, + }, + }, + }) + console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) }) } else { throw new Error(`Missing test for flavour ${flavour}`) @@ -411,7 +371,6 @@ class SmokeTest { this.flavour = connector.flavour } - async testFindManyTypeTest() { await this.testFindManyTypeTestMySQL() await this.testFindManyTypeTestPostgres() @@ -422,36 +381,35 @@ class SmokeTest { return } - const resultSet = await this.doQuery( - { - "action": "findMany", - "modelName": "type_test", - "query": { - "selection": { - "tinyint_column": true, - "smallint_column": true, - "mediumint_column": true, - "int_column": true, - "bigint_column": true, - "float_column": true, - "double_column": true, - "decimal_column": true, - "boolean_column": true, - "char_column": true, - "varchar_column": true, - "text_column": true, - "date_column": true, - "time_column": true, - "datetime_column": true, - "timestamp_column": true, - "json_column": true, - "enum_column": true, - "binary_column": true, - "varbinary_column": true, - "blob_column": true - } - } - }) + const resultSet = await this.doQuery({ + action: 'findMany', + modelName: 'type_test', + query: { + selection: { + tinyint_column: true, + smallint_column: true, + mediumint_column: true, + int_column: true, + bigint_column: true, + float_column: true, + double_column: true, + decimal_column: true, + boolean_column: true, + char_column: true, + varchar_column: true, + text_column: true, + date_column: true, + time_column: true, + datetime_column: true, + timestamp_column: true, + json_column: true, + enum_column: true, + binary_column: true, + varbinary_column: true, + blob_column: true, + }, + }, + }) console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) @@ -463,73 +421,67 @@ class SmokeTest { return } - const resultSet = await this.doQuery( - { - "action": "findMany", - "modelName": "type_test", - "query": { - "selection": { - "smallint_column": true, - "int_column": true, - "bigint_column": true, - "float_column": true, - "double_column": true, - "decimal_column": true, - "boolean_column": true, - "char_column": true, - "varchar_column": true, - "text_column": true, - "date_column": true, - "time_column": true, - "datetime_column": true, - "timestamp_column": true, - "json_column": true, - "enum_column": true - } - } - } - ) - console.log('[nodejs] findMany resultSet', JSON.stringify((resultSet), null, 2)) + const resultSet = await this.doQuery({ + action: 'findMany', + modelName: 'type_test', + query: { + selection: { + smallint_column: true, + int_column: true, + bigint_column: true, + float_column: true, + double_column: true, + decimal_column: true, + boolean_column: true, + char_column: true, + varchar_column: true, + text_column: true, + date_column: true, + time_column: true, + datetime_column: true, + timestamp_column: true, + json_column: true, + enum_column: true, + }, + }, + }) + console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) return resultSet } async createAutoIncrement() { - await this.doQuery( - { - "modelName": "Author", - "action": "deleteMany", - "query": { - "arguments": { - "where": {} - }, - "selection": { - "count": true - } - } - } - ) + await this.doQuery({ + modelName: 'Author', + action: 'deleteMany', + query: { + arguments: { + where: {}, + }, + selection: { + count: true, + }, + }, + }) - const author = await this.doQuery( - { - "modelName": "Author", - "action": "createOne", - "query": { - "arguments": { - "data": { - "firstName": "Firstname from autoincrement", - "lastName": "Lastname from autoincrement", - "age": 99 - } + const author = await this.doQuery({ + modelName: 'Author', + action: 'createOne', + query: { + arguments: { + data: { + firstName: 'Firstname from autoincrement', + lastName: 'Lastname from autoincrement', + age: 99, }, - "selection": { - "id": true, - "firstName": true, - "lastName": true - } - } - } - ) + }, + selection: { + id: true, + firstName: true, + lastName: true, + }, + }, + }) console.log('[nodejs] author', JSON.stringify(author, null, 2)) } @@ -541,40 +493,36 @@ class SmokeTest { // 'SELECT `cf-users`.`Child`.`id` FROM `cf-users`.`Child` WHERE 1=1', // 'DELETE FROM `cf-users`.`Child` WHERE (`cf-users`.`Child`.`id` IN (?) AND 1=1)' // ] - await this.doQuery( - { - "modelName": "Child", - "action": "deleteMany", - "query": { - "arguments": { - "where": {} - }, - "selection": { - "count": true - } - } - } - ) + await this.doQuery({ + modelName: 'Child', + action: 'deleteMany', + query: { + arguments: { + where: {}, + }, + selection: { + count: true, + }, + }, + }) // Queries: [ // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE 1=1', // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE 1=1', // 'DELETE FROM `cf-users`.`Parent` WHERE (`cf-users`.`Parent`.`id` IN (?) AND 1=1)' // ] - await this.doQuery( - { - "modelName": "Parent", - "action": "deleteMany", - "query": { - "arguments": { - "where": {} - }, - "selection": { - "count": true - } - } - } - ) + await this.doQuery({ + modelName: 'Parent', + action: 'deleteMany', + query: { + arguments: { + where: {}, + }, + selection: { + count: true, + }, + }, + }) /* Create a parent with some new children, within a transaction */ @@ -584,36 +532,34 @@ class SmokeTest { // 'SELECT `cf-users`.`Parent`.`id`, `cf-users`.`Parent`.`p` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`id` = ? LIMIT ? OFFSET ?', // 'SELECT `cf-users`.`Child`.`id`, `cf-users`.`Child`.`c`, `cf-users`.`Child`.`parentId` FROM `cf-users`.`Child` WHERE `cf-users`.`Child`.`parentId` IN (?)' // ] - await this.doQuery( - { - "modelName": "Parent", - "action": "createOne", - "query": { - "arguments": { - "data": { - "p": "p1", - "p_1": "1", - "p_2": "2", - "childOpt": { - "create": { - "c": "c1", - "c_1": "foo", - "c_2": "bar" - } - } - } + await this.doQuery({ + modelName: 'Parent', + action: 'createOne', + query: { + arguments: { + data: { + p: 'p1', + p_1: '1', + p_2: '2', + childOpt: { + create: { + c: 'c1', + c_1: 'foo', + c_2: 'bar', + }, + }, }, - "selection": { - "p": true, - "childOpt": { - "selection": { - "c": true - } - } - } - } - } - ) + }, + selection: { + p: true, + childOpt: { + selection: { + c: true, + }, + }, + }, + }, + }) /* Delete the parent */ @@ -624,40 +570,41 @@ class SmokeTest { // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`p` = ?', // 'DELETE FROM `cf-users`.`Parent` WHERE (`cf-users`.`Parent`.`id` IN (?) AND `cf-users`.`Parent`.`p` = ?)' // ] - const resultDeleteMany = await this.doQuery( - { - "modelName": "Parent", - "action": "deleteMany", - "query": { - "arguments": { - "where": { - "p": "p1" - } + const resultDeleteMany = await this.doQuery({ + modelName: 'Parent', + action: 'deleteMany', + query: { + arguments: { + where: { + p: 'p1', }, - "selection": { - "count": true - } - } - } - ) + }, + selection: { + count: true, + }, + }, + }) console.log('[nodejs] resultDeleteMany', JSON.stringify(resultDeleteMany, null, 2)) } async testTransaction() { - const startResponse = await this.engine.startTransaction(JSON.stringify({ isolation_level: 'Serializable', max_wait: 5000, timeout: 15000 }), 'trace') + const startResponse = await this.engine.startTransaction( + JSON.stringify({ isolation_level: 'Serializable', max_wait: 5000, timeout: 15000 }), + 'trace', + ) const tx_id = JSON.parse(startResponse).id console.log('[nodejs] transaction id', tx_id) await this.doQuery( { - "action": "findMany", - "modelName": "Author", - "query": { - "selection": { "$scalars": true } - } + action: 'findMany', + modelName: 'Author', + query: { + selection: { $scalars: true }, + }, }, - tx_id + tx_id, ) const commitResponse = await this.engine.commitTransaction(tx_id, 'trace') @@ -672,7 +619,9 @@ class SmokeTest { if (error.error_code === 'P2036') { const jsError = this.connector.errorRegistry.consumeError(error.meta.id) if (!jsError) { - throw new Error(`Something went wrong. Engine reported external error with id ${error.meta.id}, but it was not registered.`) + throw new Error( + `Something went wrong. Engine reported external error with id ${error.meta.id}, but it was not registered.`, + ) } throw jsError.error } diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts index d028f6929303..ad453ae9313b 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts @@ -3,8 +3,12 @@ import os from 'node:os' import fs from 'node:fs' import type { ErrorCapturingDriverAdapter } from '@prisma/driver-adapter-utils' import { Library, QueryEngineInstance } from '../engines/types/Library' +import { JsonQuery } from '../engines/types/JsonProtocol' -export function initQueryEngine(driver: ErrorCapturingDriverAdapter, prismaSchemaRelativePath: string): QueryEngineInstance { +export function initQueryEngine( + driver: ErrorCapturingDriverAdapter, + prismaSchemaRelativePath: string, +): QueryEngineInstance { // I assume nobody will run this on Windows ¯\_(ツ)_/¯ const libExt = os.platform() === 'darwin' ? 'dylib' : 'so' const dirname = path.dirname(new URL(import.meta.url).pathname) @@ -38,3 +42,26 @@ export function initQueryEngine(driver: ErrorCapturingDriverAdapter, prismaSchem return engine } + +export function createQueryFn(engine: QueryEngineInstance, adapter: ErrorCapturingDriverAdapter) { + return async function doQuery(query: JsonQuery, tx_id?: string) { + const result = await engine.query(JSON.stringify(query), 'trace', tx_id) + const parsedResult = JSON.parse(result) + if (parsedResult.errors) { + throwAdapterError(parsedResult.errors[0]?.user_facing_error, adapter) + } + return parsedResult + } +} + +export function throwAdapterError(error: any, adapter: ErrorCapturingDriverAdapter) { + if (error.error_code === 'P2036') { + const jsError = adapter.errorRegistry.consumeError(error.meta.id) + if (!jsError) { + throw new Error( + `Something went wrong. Engine reported external error with id ${error.meta.id}, but it was not registered.`, + ) + } + throw jsError.error + } +} From 32692fd2ce90d456c093eb8eae68511575243419 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Thu, 21 Sep 2023 17:57:29 +0200 Subject: [PATCH 030/128] driver-adapters: bump package versions for publishing (#4270) --- query-engine/driver-adapters/js/adapter-neon/package.json | 2 +- query-engine/driver-adapters/js/adapter-pg/package.json | 2 +- .../driver-adapters/js/adapter-planetscale/package.json | 2 +- .../driver-adapters/js/driver-adapter-utils/package.json | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/query-engine/driver-adapters/js/adapter-neon/package.json b/query-engine/driver-adapters/js/adapter-neon/package.json index 8d5dbc3ed3f3..78d188891748 100644 --- a/query-engine/driver-adapters/js/adapter-neon/package.json +++ b/query-engine/driver-adapters/js/adapter-neon/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/adapter-neon", - "version": "0.3.2", + "version": "0.3.3", "description": "Prisma's driver adapter for \"@neondatabase/serverless\"", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/adapter-pg/package.json b/query-engine/driver-adapters/js/adapter-pg/package.json index f3b2d03c6da4..dc3e73929609 100644 --- a/query-engine/driver-adapters/js/adapter-pg/package.json +++ b/query-engine/driver-adapters/js/adapter-pg/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/adapter-pg", - "version": "0.3.2", + "version": "0.3.3", "description": "Prisma's driver adapter for \"pg\"", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/adapter-planetscale/package.json b/query-engine/driver-adapters/js/adapter-planetscale/package.json index f2a43486d4e6..aedfde584c37 100644 --- a/query-engine/driver-adapters/js/adapter-planetscale/package.json +++ b/query-engine/driver-adapters/js/adapter-planetscale/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/adapter-planetscale", - "version": "0.3.2", + "version": "0.3.3", "description": "Prisma's driver adapter for \"@planetscale/database\"", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/package.json b/query-engine/driver-adapters/js/driver-adapter-utils/package.json index 24cd503329f9..9bb375dff34c 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/package.json +++ b/query-engine/driver-adapters/js/driver-adapter-utils/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/driver-adapter-utils", - "version": "0.3.2", + "version": "0.3.3", "description": "Internal set of utilities and types for Prisma's driver adapters.", "main": "dist/index.js", "module": "dist/index.mjs", From 818b9fcddf4998f2e6ef048f9fdf54822bde3053 Mon Sep 17 00:00:00 2001 From: Flavian Desverne Date: Fri, 22 Sep 2023 14:17:22 +0200 Subject: [PATCH 031/128] perf: remove enum variant query (#4269) --- Cargo.lock | 10 +++++----- quaint/src/connector/postgres/conversion.rs | 4 ++-- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2de4d23d8516..803c2ccda82c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3152,7 +3152,7 @@ dependencies = [ [[package]] name = "postgres-native-tls" version = "0.5.0" -source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#429e76047f28e64761ad63bc6cc9335c3d3337b5" +source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#a1a2dc6d9584deaf70a14293c428e7b6ca614d98" dependencies = [ "native-tls", "tokio", @@ -3163,7 +3163,7 @@ dependencies = [ [[package]] name = "postgres-protocol" version = "0.6.4" -source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#429e76047f28e64761ad63bc6cc9335c3d3337b5" +source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#a1a2dc6d9584deaf70a14293c428e7b6ca614d98" dependencies = [ "base64 0.13.1", "byteorder", @@ -3180,7 +3180,7 @@ dependencies = [ [[package]] name = "postgres-types" version = "0.2.4" -source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#429e76047f28e64761ad63bc6cc9335c3d3337b5" +source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#a1a2dc6d9584deaf70a14293c428e7b6ca614d98" dependencies = [ "bit-vec", "bytes", @@ -5162,7 +5162,7 @@ dependencies = [ [[package]] name = "tokio-postgres" version = "0.7.7" -source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#429e76047f28e64761ad63bc6cc9335c3d3337b5" +source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#a1a2dc6d9584deaf70a14293c428e7b6ca614d98" dependencies = [ "async-trait", "byteorder", @@ -5500,7 +5500,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675" dependencies = [ "cfg-if", - "rand 0.8.5", + "rand 0.7.3", "static_assertions", ] diff --git a/quaint/src/connector/postgres/conversion.rs b/quaint/src/connector/postgres/conversion.rs index 8ceddcd3d704..4760f436fdcd 100644 --- a/quaint/src/connector/postgres/conversion.rs +++ b/quaint/src/connector/postgres/conversion.rs @@ -519,7 +519,7 @@ impl GetRow for PostgresRow { None => Value::Array(None), }, ref x => match x.kind() { - Kind::Enum(_) => match row.try_get(i)? { + Kind::Enum => match row.try_get(i)? { Some(val) => { let val: EnumString = val; @@ -528,7 +528,7 @@ impl GetRow for PostgresRow { None => Value::Enum(None), }, Kind::Array(inner) => match inner.kind() { - Kind::Enum(_) => match row.try_get(i)? { + Kind::Enum => match row.try_get(i)? { Some(val) => { let val: Vec> = val; let variants = val.into_iter().map(|x| Value::Enum(x.map(|x| x.value.into()))); From c78977b06aa20b736283c9589a177d4c0dfd459e Mon Sep 17 00:00:00 2001 From: Serhii Tatarintsev Date: Fri, 22 Sep 2023 15:44:31 +0200 Subject: [PATCH 032/128] js-adapters: Better result type (#4274) Adds convenience functions for creating result types: `err` and `ok`, similar to Rust ones. Adds `map` and `flatMap` methods on `Result` type for easier way to work with a `Result` on a happy path. Split from #4213 --- .../js/adapter-neon/src/neon.ts | 16 ++++---- .../driver-adapters/js/adapter-pg/src/pg.ts | 37 +++++++++-------- .../js/adapter-planetscale/src/planetscale.ts | 14 +++---- .../js/driver-adapter-utils/src/binder.ts | 17 ++------ .../js/driver-adapter-utils/src/index.ts | 1 + .../js/driver-adapter-utils/src/result.ts | 41 +++++++++++++++++++ .../js/driver-adapter-utils/src/types.ts | 19 +++------ 7 files changed, 88 insertions(+), 57 deletions(-) create mode 100644 query-engine/driver-adapters/js/driver-adapter-utils/src/result.ts diff --git a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts b/query-engine/driver-adapters/js/adapter-neon/src/neon.ts index da47c9192094..0c915e954dba 100644 --- a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts +++ b/query-engine/driver-adapters/js/adapter-neon/src/neon.ts @@ -1,5 +1,5 @@ import type neon from '@neondatabase/serverless' -import { Debug } from '@prisma/driver-adapter-utils' +import { Debug, ok, err } from '@prisma/driver-adapter-utils' import type { DriverAdapter, ResultSet, @@ -36,7 +36,7 @@ abstract class NeonQueryable implements Queryable { rows, } - return { ok: true, value: resultSet } + return ok(resultSet) } async executeRaw(query: Query): Promise> { @@ -46,7 +46,7 @@ abstract class NeonQueryable implements Queryable { const { rowCount: rowsAffected } = await this.performIO(query) // Note: `rowsAffected` can sometimes be null (e.g., when executing `"BEGIN"`) - return { ok: true, value: rowsAffected ?? 0 } + return ok(rowsAffected ?? 0) } abstract performIO(query: Query): Promise @@ -82,14 +82,14 @@ class NeonTransaction extends NeonWsQueryable implements Transa debug(`[js::commit]`) this.client.release() - return Promise.resolve({ ok: true, value: undefined }) + return Promise.resolve(ok(undefined)) } async rollback(): Promise> { debug(`[js::rollback]`) this.client.release() - return Promise.resolve({ ok: true, value: undefined }) + return Promise.resolve(ok(undefined)) } } @@ -109,7 +109,7 @@ export class PrismaNeon extends NeonWsQueryable implements DriverAdap debug(`${tag} options: %O`, options) const connection = await this.client.connect() - return { ok: true, value: new NeonTransaction(connection, options) } + return ok(new NeonTransaction(connection, options)) } async close() { @@ -117,7 +117,7 @@ export class PrismaNeon extends NeonWsQueryable implements DriverAdap await this.client.end() this.isRunning = false } - return { ok: true as const, value: undefined } + return ok(undefined) } } @@ -139,6 +139,6 @@ export class PrismaNeonHTTP extends NeonQueryable implements DriverAdapter { } async close() { - return { ok: true as const, value: undefined } + return ok(undefined) } } diff --git a/query-engine/driver-adapters/js/adapter-pg/src/pg.ts b/query-engine/driver-adapters/js/adapter-pg/src/pg.ts index bc111e84d8a1..5c574460b49b 100644 --- a/query-engine/driver-adapters/js/adapter-pg/src/pg.ts +++ b/query-engine/driver-adapters/js/adapter-pg/src/pg.ts @@ -1,6 +1,14 @@ import type pg from 'pg' -import { Debug } from '@prisma/driver-adapter-utils' -import type { DriverAdapter, Query, Queryable, Result, ResultSet, Transaction, TransactionOptions } from '@prisma/driver-adapter-utils' +import { Debug, ok } from '@prisma/driver-adapter-utils' +import type { + DriverAdapter, + Query, + Queryable, + Result, + ResultSet, + Transaction, + TransactionOptions, +} from '@prisma/driver-adapter-utils' import { fieldToColumnType } from './conversion' const debug = Debug('prisma:driver-adapter:pg') @@ -8,12 +16,10 @@ const debug = Debug('prisma:driver-adapter:pg') type StdClient = pg.Pool type TransactionClient = pg.PoolClient -class PgQueryable - implements Queryable { +class PgQueryable implements Queryable { readonly flavour = 'postgres' - constructor(protected readonly client: ClientT) { - } + constructor(protected readonly client: ClientT) {} /** * Execute a query given as SQL, interpolating the given parameters. @@ -25,14 +31,14 @@ class PgQueryable const { fields, rows } = await this.performIO(query) const columns = fields.map((field) => field.name) - const columnTypes = fields.map((field) => fieldToColumnType(field.dataTypeID)); + const columnTypes = fields.map((field) => fieldToColumnType(field.dataTypeID)) const resultSet: ResultSet = { columnNames: columns, columnTypes, rows, } - return { ok: true, value: resultSet } + return ok(resultSet) } /** @@ -45,9 +51,9 @@ class PgQueryable debug(`${tag} %O`, query) const { rowCount: rowsAffected } = await this.performIO(query) - + // Note: `rowsAffected` can sometimes be null (e.g., when executing `"BEGIN"`) - return { ok: true, value: rowsAffected ?? 0 } + return ok(rowsAffected ?? 0) } /** @@ -69,8 +75,7 @@ class PgQueryable } } -class PgTransaction extends PgQueryable - implements Transaction { +class PgTransaction extends PgQueryable implements Transaction { constructor(client: pg.PoolClient, readonly options: TransactionOptions) { super(client) } @@ -79,14 +84,14 @@ class PgTransaction extends PgQueryable debug(`[js::commit]`) this.client.release() - return Promise.resolve({ ok: true, value: undefined }) + return ok(undefined) } async rollback(): Promise> { debug(`[js::rollback]`) this.client.release() - return Promise.resolve({ ok: true, value: undefined }) + return ok(undefined) } } @@ -104,10 +109,10 @@ export class PrismaPg extends PgQueryable implements DriverAdapter { debug(`${tag} options: %O`, options) const connection = await this.client.connect() - return { ok: true, value: new PgTransaction(connection, options) } + return ok(new PgTransaction(connection, options)) } async close() { - return { ok: true as const, value: undefined } + return ok(undefined) } } diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts index b1d640398004..b5dffb89272f 100644 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts +++ b/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts @@ -1,5 +1,5 @@ import type planetScale from '@planetscale/database' -import { Debug } from '@prisma/driver-adapter-utils' +import { Debug, ok } from '@prisma/driver-adapter-utils' import type { DriverAdapter, ResultSet, @@ -46,7 +46,7 @@ class PlanetScaleQueryable> { debug(`[js::rollback]`) this.txDeferred.reject(new RollbackError()) - return Promise.resolve({ ok: true, value: await this.txResultPromise }) + return Promise.resolve(ok(await this.txResultPromise)) } } @@ -127,7 +127,7 @@ export class PrismaPlanetScale extends PlanetScaleQueryable() const txWrapper = new PlanetScaleTransaction(tx, options, txDeferred, txResultPromise) - resolve({ ok: true, value: txWrapper }) + resolve(ok(txWrapper)) return deferredPromise }) .catch((error) => { @@ -143,6 +143,6 @@ export class PrismaPlanetScale extends PlanetScaleQueryable { const result = await startTransaction(...args) - if (result.ok) { - return { ok: true, value: bindTransaction(errorRegistry, result.value) } - } - return result + return result.map((tx) => bindTransaction(errorRegistry, tx)) }, close: wrapAsync(errorRegistry, adapter.close.bind(adapter)), } @@ -68,7 +59,7 @@ function wrapAsync( return await fn(...args) } catch (error) { const id = registry.registerNewError(error) - return { ok: false, error: { kind: 'GenericJsError', id } } + return err({ kind: 'GenericJsError', id }) } } } diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/index.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/index.ts index ce04822473d9..ee851d6961c6 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/index.ts +++ b/query-engine/driver-adapters/js/driver-adapter-utils/src/index.ts @@ -1,4 +1,5 @@ export { bindAdapter } from './binder' export { ColumnTypeEnum } from './const' export { Debug } from './debug' +export { ok, err, type Result } from './result' export type * from './types' diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/result.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/result.ts new file mode 100644 index 000000000000..5af95db68671 --- /dev/null +++ b/query-engine/driver-adapters/js/driver-adapter-utils/src/result.ts @@ -0,0 +1,41 @@ +import { Error } from './types' +export type Result = { + // common methods + map(fn: (value: T) => U): Result + flatMap(fn: (value: T) => Result): Result +} & ( + | { + readonly ok: true + readonly value: T + } + | { + readonly ok: false + readonly error: Error + } +) + +export function ok(value: T): Result { + return { + ok: true, + value, + map(fn) { + return ok(fn(value)) + }, + flatMap(fn) { + return fn(value) + }, + } +} + +export function err(error: Error): Result { + return { + ok: false, + error, + map() { + return err(error) + }, + flatMap() { + return err(error) + }, + } +} diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts index 0dc06f33dc9f..763a85b7be67 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts +++ b/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts @@ -1,6 +1,7 @@ import { ColumnTypeEnum } from './const' +import { Result } from './result' -export type ColumnType = typeof ColumnTypeEnum[keyof typeof ColumnTypeEnum] +export type ColumnType = (typeof ColumnTypeEnum)[keyof typeof ColumnTypeEnum] export interface ResultSet { /** @@ -33,25 +34,17 @@ export type Query = { } export type Error = { - kind: 'GenericJsError', + kind: 'GenericJsError' id: number } -export type Result = { - ok: true, - value: T -} | { - ok: false, - error: Error -} - -export interface Queryable { +export interface Queryable { readonly flavour: 'mysql' | 'postgres' | 'sqlite' /** * Execute a query given as SQL, interpolating the given parameters, * and returning the type-aware result set of the query. - * + * * This is the preferred way of executing `SELECT` queries. */ queryRaw(params: Query): Promise> @@ -59,7 +52,7 @@ export interface Queryable { /** * Execute a query given as SQL, interpolating the given parameters, * and returning the number of affected rows. - * + * * This is the preferred way of executing `INSERT`, `UPDATE`, `DELETE` queries, * as well as transactional queries. */ From 53b99985c86f73501feb5d050668b0debee7afed Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Fri, 22 Sep 2023 15:45:21 +0200 Subject: [PATCH 033/128] test(qe) Improve DX for testing driver adapters locally in the query-engine and test prisma engines against postgres 13 using neon driver adapter (#4268) This commit allows to run tests with driver adapters as any other connector test using some conventions: `make dev-adapter-database` and then `cargo test -p query-engine-tests` There are four new make tasks. Two for `pg-postgres13` and two for `neon-ws-postgres13`, testing the postgres13 connector suite, with the pg and neon driver adapters respectively: ```Makefile .PHONY: start-pg-postgres13 start-pg-postgres13: build-qe-napi build-connector-kit-js start-postgres13 dev-pg-postgres13: start-pg-postgres13 cp $(CONFIG_PATH)/pg-postgres13 $(CONFIG_FILE) ``` and ```Makefile start-neon-postgres13: build-qe-napi build-connector-kit-js docker compose -f docker-compose.yml up -d --remove-orphans neon-ws-postgres13 dev-neon-ws-postgres13: start-neon-postgres13 cp $(CONFIG_PATH)/neon-ws-postgres13 $(CONFIG_FILE) ``` The new .test_config file is copied from a template that has a few more keys: ```rust /// The central test configuration. #[derive(Debug, Default, Deserialize)] pub struct TestConfig { /// The connector that tests should run for. /// Env key: `TEST_CONNECTOR` connector: String, /// The connector version tests should run for. /// If the test connector is versioned, this option is required. /// Env key: `TEST_CONNECTOR_VERSION` #[serde(rename = "version")] connector_version: Option, /// An external process to execute the test queries and produced responses for assertion /// Used when testing driver adapters, this process is expected to be a javascript process /// loading the library engine (as a library, or WASM modules) and providing it with a /// driver adapter. /// Env key: `EXTERNAL_TEST_EXECUTOR` external_test_executor: Option, /// The driver adapter to use when running tests, will be forwarded to the external test /// executor by setting the `DRIVER_ADAPTER` env var when spawning the executor process driver_adapter: Option, /// The driver adapter configuration to forward as a stringified JSON object to the external /// test executor by setting the `DRIVER_ADAPTER_CONFIG` env var when spawning the executor driver_adapter_config: Option, /// Indicates whether or not the tests are running in CI context. /// Env key: `BUILDKITE` #[serde(default)] is_ci: bool, } ``` When the `default` key is provided in `EXTERNAL_TEST_EXECUTOR`, and `WORKSPACE_ROOT` is set, the executor being used is that in `query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh` Given all the above, a normal workflow to test driver adapters would be to either: * `make dev-pg-postgres13` * `make dev-neon-ws-postgres13` * ... and then run: `cargo test -p query-engine-tests` The PR also adds configuration run the `neon:ws` driver adapter against postgres 13 in CI --- .../query-engine-driver-adapters.yml | 30 +--- Makefile | 17 ++ docker-compose.yml | 15 ++ query-engine/connector-test-kit-rs/README.md | 8 +- .../query-tests-setup/src/config.rs | 147 +++++++++++++++++- .../src/connector_tag/js/external_process.rs | 16 +- .../src/connector_tag/mod.rs | 4 +- .../query-tests-setup/src/lib.rs | 4 +- .../query-tests-setup/src/runner/mod.rs | 2 +- .../test-configs/neon-ws-postgres13 | 7 + .../test-configs/pg-postgres13 | 6 + .../test-configs/postgres13 | 3 +- .../connector-test-kit-executor/src/index.ts | 96 +++++++----- .../js/connector-test-kit-executor/src/qe.ts | 10 +- .../query-engine-node-api/src/engine.rs | 12 +- 15 files changed, 281 insertions(+), 96 deletions(-) create mode 100644 query-engine/connector-test-kit-rs/test-configs/neon-ws-postgres13 create mode 100644 query-engine/connector-test-kit-rs/test-configs/pg-postgres13 diff --git a/.github/workflows/query-engine-driver-adapters.yml b/.github/workflows/query-engine-driver-adapters.yml index ad7b51fc2beb..dda32259ff80 100644 --- a/.github/workflows/query-engine-driver-adapters.yml +++ b/.github/workflows/query-engine-driver-adapters.yml @@ -17,35 +17,28 @@ concurrency: jobs: rust-query-engine-tests: - name: "Test `${{ matrix.database.driver_adapter }}` on node v${{ matrix.node_version }}" + name: "Test `${{ matrix.adapter.name }}` on node v${{ matrix.node_version }}" strategy: fail-fast: false matrix: - database: - - name: "postgres13" - connector: "postgres" - version: "13" - driver_adapter: "pg" + adapter: + - name: "pg" + setup_task: "dev-pg-postgres13" + - name: "neon:ws" + setup_task: "dev-neon-ws-postgres13" node_version: ["18"] env: - LOG_LEVEL: "info" + LOG_LEVEL: "info" # Set to "debug" to trace the query engine and node process running the driver adapter LOG_QUERIES: "y" RUST_LOG: "info" RUST_LOG_FORMAT: "devel" RUST_BACKTRACE: "1" - PRISMA_DISABLE_QUAINT_EXECUTORS: "1" CLICOLOR_FORCE: "1" CLOSED_TX_CLEANUP: "2" SIMPLE_TEST_MODE: "1" QUERY_BATCH_SIZE: "10" - TEST_CONNECTOR: ${{ matrix.database.connector }} - TEST_CONNECTOR_VERSION: ${{ matrix.database.version }} WORKSPACE_ROOT: ${{ github.workspace }} - # Driver adapter testing specific env vars - EXTERNAL_TEST_EXECUTOR: "${{ github.workspace }}/query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh" - DRIVER_ADAPTER: ${{ matrix.database.driver_adapter }} - DRIVER_ADAPTER_URL_OVERRIDE: ${{ matrix.database.driver_adapter_url }} runs-on: buildjet-16vcpu-ubuntu-2004 steps: @@ -81,17 +74,10 @@ jobs: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: "Start ${{ matrix.database.name }}" - run: make start-${{ matrix.database.name }} + - run: make ${{ matrix.adapter.setup_task }} - uses: dtolnay/rust-toolchain@stable - - name: "Build query-engine-node-api with driver support" - run: cargo build -p query-engine-node-api - - - name: "Install and build driver adapter JS dependencies" - run: cd query-engine/driver-adapters/js && pnpm i && pnpm build - - name: "Run tests" run: cargo test --package query-engine-tests -- --test-threads=1 diff --git a/Makefile b/Makefile index 4645b32328e6..d0f4bdd04631 100644 --- a/Makefile +++ b/Makefile @@ -107,6 +107,17 @@ start-postgres13: dev-postgres13: start-postgres13 cp $(CONFIG_PATH)/postgres13 $(CONFIG_FILE) +start-pg-postgres13: build-qe-napi build-connector-kit-js start-postgres13 + +dev-pg-postgres13: start-pg-postgres13 + cp $(CONFIG_PATH)/pg-postgres13 $(CONFIG_FILE) + +start-neon-postgres13: build-qe-napi build-connector-kit-js + docker compose -f docker-compose.yml up -d --remove-orphans neon-postgres13 + +dev-neon-ws-postgres13: start-neon-postgres13 + cp $(CONFIG_PATH)/neon-ws-postgres13 $(CONFIG_FILE) + start-postgres14: docker compose -f docker-compose.yml up -d --remove-orphans postgres14 @@ -239,6 +250,12 @@ dev-vitess_8_0: start-vitess_8_0 # Local dev commands # ###################### +build-qe-napi: + cargo build --package query-engine-node-api + +build-connector-kit-js: + cd query-engine/driver-adapters/js && pnpm i && pnpm build + # Quick schema validation of whatever you have in the dev_datamodel.prisma file. validate: cargo run --bin test-cli -- validate-datamodel dev_datamodel.prisma diff --git a/docker-compose.yml b/docker-compose.yml index 5f637f7d10a6..1988f864d304 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -106,6 +106,21 @@ services: networks: - databases + neon-postgres13: + image: ghcr.io/neondatabase/wsproxy:latest + environment: + # the port of the postgres13 within the databases network + APPEND_PORT: 'postgres13:5432' + ALLOW_ADDR_REGEX: '.*' + LOG_TRAFFIC: 'true' + LOG_CONN_INFO: 'true' + ports: + - '5488:80' + depends_on: + - postgres13 + networks: + - databases + postgres14: image: postgres:14 restart: always diff --git a/query-engine/connector-test-kit-rs/README.md b/query-engine/connector-test-kit-rs/README.md index 3cbaadb63523..2c849a2aa985 100644 --- a/query-engine/connector-test-kit-rs/README.md +++ b/query-engine/connector-test-kit-rs/README.md @@ -71,18 +71,16 @@ This means that instead of drivers being implemented in Rust, it's a layer of ad To run tests through a driver adapters, you should also configure the following environment variables: -* `EXTERNAL_TEST_EXECUTOR`: tells the query engine test kit to use an external process to run the queries, this is a node process running -a program that will read the queries to run from STDIN, and return responses to STDOUT. The connector kit follows a protocol over JSON RPC for this communication. +* `EXTERNAL_TEST_EXECUTOR`: tells the query engine test kit to use an external process to run the queries, this is a node process running a program that will read the queries to run from STDIN, and return responses to STDOUT. The connector kit follows a protocol over JSON RPC for this communication. * `DRIVER_ADAPTER`: tells the test executor to use a particular driver adapter. Set to `neon`, `planetscale` or any other supported adapter. -* `DRIVER_ADAPTER_URL_OVERRIDE`: it overrides the schema URL for the database to use one understood by the driver adapter (ex. neon, planetscale) - +* `DRIVER_ADAPTER_CONFIG`: a json string with the configuration for the driver adapter. This is adapter specific. See the [github workflow for driver adapter tests](.github/workflows/query-engine-driver-adapters.yml) for examples on how to configure the driver adapters. Example: ```shell export EXTERNAL_TEST_EXECUTOR="$WORKSPACE_ROOT/query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh" export DRIVER_ADAPTER=neon -export DRIVER_ADAPTER_URL_OVERRIDE ="postgres://USER:PASSWORD@DATABASExxxx" +export DRIVER_ADAPTER_CONFIG ='{ "proxyUrl": "127.0.0.1:5488/v1" }' ```` ### Running diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs index 8807b4e0dbd8..944f1c400400 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs @@ -20,6 +20,21 @@ pub struct TestConfig { #[serde(rename = "version")] connector_version: Option, + /// An external process to execute the test queries and produced responses for assertion + /// Used when testing driver adapters, this process is expected to be a javascript process + /// loading the library engine (as a library, or WASM modules) and providing it with a + /// driver adapter. + /// Env key: `EXTERNAL_TEST_EXECUTOR` + external_test_executor: Option, + + /// The driver adapter to use when running tests, will be forwarded to the external test + /// executor by setting the `DRIVER_ADAPTER` env var when spawning the executor process + driver_adapter: Option, + + /// The driver adapter configuration to forward as a stringified JSON object to the external + /// test executor by setting the `DRIVER_ADAPTER_CONFIG` env var when spawning the executor + driver_adapter_config: Option, + /// Indicates whether or not the tests are running in CI context. /// Env key: `BUILDKITE` #[serde(default)] @@ -35,13 +50,22 @@ const CONFIG_LOAD_FAILED: &str = r####" Test config can come from the environment, or a config file. -♻️ Environment +♻️ Environment variables + +Be sure to have WORKSPACE_ROOT set to the root of the prisma-engines +repository. -Set the following env vars: +Set the following vars to denote the connector under test - TEST_CONNECTOR - TEST_CONNECTOR_VERSION (optional) +And optionally, to test driver adapters + +- EXTERNAL_TEST_EXECUTOR +- DRIVER_ADAPTER +- DRIVER_ADAPTER_CONFIG (optional, not required by all driver adapters) + 📁 Config file Use the Makefile. @@ -51,8 +75,9 @@ fn exit_with_message(msg: &str) -> ! { use std::io::{stderr, Write}; let stderr = stderr(); let mut sink = stderr.lock(); + sink.write_all(b"Error in the test configuration:\n").unwrap(); sink.write_all(msg.as_bytes()).unwrap(); - sink.write_all(b"\n").unwrap(); + sink.write_all(b"Aborting test process\n").unwrap(); std::process::exit(1) } @@ -60,31 +85,44 @@ fn exit_with_message(msg: &str) -> ! { impl TestConfig { /// Loads a configuration. File-based config has precedence over env config. pub(crate) fn load() -> Self { - let config = match Self::from_file().or_else(Self::from_env) { + let mut config = match Self::from_file().or_else(Self::from_env) { Some(config) => config, None => exit_with_message(CONFIG_LOAD_FAILED), }; + config.fill_defaults(); config.validate(); config.log_info(); + config } + #[rustfmt::skip] fn log_info(&self) { println!("******************************"); println!("* Test run information:"); println!( "* Connector: {} {}", self.connector, - self.connector_version.as_ref().unwrap_or(&"".to_owned()) + self.connector_version().unwrap_or_default() ); println!("* CI? {}", self.is_ci); + if self.external_test_executor.as_ref().is_some() { + println!("* External test executor: {}", self.external_test_executor().unwrap_or_default()); + println!("* Driver adapter: {}", self.driver_adapter().unwrap_or_default()); + println!("* Driver adapter url override: {}", self.json_stringify_driver_adapter_config()); + } println!("******************************"); } fn from_env() -> Option { let connector = std::env::var("TEST_CONNECTOR").ok(); let connector_version = std::env::var("TEST_CONNECTOR_VERSION").ok(); + let external_test_executor = std::env::var("EXTERNAL_TEST_EXECUTOR").ok(); + let driver_adapter = std::env::var("DRIVER_ADAPTER").ok(); + let driver_adapter_config = std::env::var("DRIVER_ADAPTER_CONFIG") + .map(|config| serde_json::from_str::(config.as_str()).ok()) + .unwrap_or_default(); // Just care for a set value for now. let is_ci = std::env::var("BUILDKITE").is_ok(); @@ -93,16 +131,18 @@ impl TestConfig { connector, connector_version, is_ci, + external_test_executor, + driver_adapter, + driver_adapter_config, }) } fn from_file() -> Option { let current_dir = env::current_dir().ok(); - let workspace_root = std::env::var("WORKSPACE_ROOT").ok().map(PathBuf::from); current_dir .and_then(|path| Self::try_path(config_path(path))) - .or_else(|| workspace_root.and_then(|path| Self::try_path(config_path(path)))) + .or_else(|| Self::workspace_root().and_then(|path| Self::try_path(config_path(path)))) } fn try_path(path: PathBuf) -> Option { @@ -115,6 +155,33 @@ impl TestConfig { }) } + /// if the loaded value for external_test_executor is "default" (case insensitive), + /// and the workspace_root is set, then use the default external test executor. + fn fill_defaults(&mut self) { + const DEFAULT_TEST_EXECUTOR: &str = + "query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh"; + + if self + .external_test_executor + .as_ref() + .filter(|s| s.eq_ignore_ascii_case("default")) + .is_some() + { + self.external_test_executor = Self::workspace_root() + .map(|path| path.join(DEFAULT_TEST_EXECUTOR)) + .or_else(|| { + exit_with_message( + "WORKSPACE_ROOT needs to be correctly set to the root of the prisma-engines repository", + ) + }) + .and_then(|path| path.to_str().map(|s| s.to_owned())); + } + } + + fn workspace_root() -> Option { + env::var("WORKSPACE_ROOT").ok().map(PathBuf::from) + } + fn validate(&self) { if self.connector.is_empty() { exit_with_message("A test connector is required but was not set."); @@ -138,6 +205,38 @@ impl TestConfig { | Ok(ConnectorVersion::Sqlite) => (), Err(err) => exit_with_message(&err.to_string()), } + + if let Some(file) = self.external_test_executor.as_ref() { + let path = PathBuf::from(file); + let md = path.metadata(); + if !path.exists() || md.is_err() || !md.unwrap().is_file() { + exit_with_message(&format!("The external test executor path `{}` must be a file", file)); + } + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + if path.metadata().is_ok_and(|md| md.permissions().mode() & 0o111 == 0) { + exit_with_message(&format!( + "The external test executor file `{}` must be have permissions to execute", + file + )); + } + } + } + + if self.external_test_executor.is_some() && self.driver_adapter.is_none() { + exit_with_message( + "When using an external test executor, the driver adapter (DRIVER_ADAPTER env var) must be set.", + ); + } + + if self.driver_adapter.is_some() && self.external_test_executor.is_none() { + exit_with_message("When using a driver adapter, the external test executor must be set."); + } + + if self.driver_adapter.is_none() && self.driver_adapter_config.is_some() { + exit_with_message("When using a driver adapter config, the driver adapter must be set."); + } } pub fn connector(&self) -> &str { @@ -145,13 +244,28 @@ impl TestConfig { } pub(crate) fn connector_version(&self) -> Option<&str> { - self.connector_version.as_ref().map(AsRef::as_ref) + self.connector_version.as_deref() } pub fn is_ci(&self) -> bool { self.is_ci } + pub fn external_test_executor(&self) -> Option<&str> { + self.external_test_executor.as_deref() + } + + pub fn driver_adapter(&self) -> Option<&str> { + self.driver_adapter.as_deref() + } + + pub fn json_stringify_driver_adapter_config(&self) -> String { + self.driver_adapter_config + .as_ref() + .map(|value| value.to_string()) + .unwrap_or("{}".to_string()) + } + pub fn test_connector(&self) -> TestResult<(ConnectorTag, ConnectorVersion)> { let version = ConnectorVersion::try_from((self.connector(), self.connector_version()))?; let tag = match version { @@ -166,6 +280,23 @@ impl TestConfig { Ok((tag, version)) } + + #[rustfmt::skip] + pub fn for_external_executor(&self) -> Vec<(String, String)> { + vec!( + ( + "DRIVER_ADAPTER".to_string(), + self.driver_adapter.clone().unwrap_or_default()), + ( + "DRIVER_ADAPTER_CONFIG".to_string(), + self.json_stringify_driver_adapter_config() + ), + ( + "PRISMA_DISABLE_QUAINT_EXECUTORS".to_string(), + "1".to_string(), + ), + ) + } } fn config_path(mut path: PathBuf) -> PathBuf { diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/external_process.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/external_process.rs index 332eb3ea50d2..7ab0e6e8a021 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/external_process.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/external_process.rs @@ -85,28 +85,24 @@ fn start_rpc_thread(mut receiver: mpsc::Receiver) -> Result<()> { use std::process::Stdio; use tokio::process::Command; - let env_var = match crate::EXTERNAL_TEST_EXECUTOR.as_ref() { - Some(env_var) => env_var, - None => exit_with_message( - 1, - "start_rpc_thread() error: EXTERNAL_TEST_EXECUTOR env var is not defined", - ), - }; + let path = crate::CONFIG + .external_test_executor() + .unwrap_or_else(|| exit_with_message(1, "start_rpc_thread() error: external test executor is not set")); tokio::runtime::Builder::new_current_thread() .enable_io() .build() .unwrap() .block_on(async move { - eprintln!("Spawning test executor process at `{env_var}`"); - let process = match Command::new(env_var) + let process = match Command::new(path) + .envs(CONFIG.for_external_executor()) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::inherit()) .spawn() { Ok(process) => process, - Err(err) => exit_with_message(1, &format!("Failed to spawn the executor process: `{env_var}`. Details: {err}\n")), + Err(err) => exit_with_message(1, &format!("Failed to spawn the executor process: `{path}`. Details: {err}\n")), }; let mut stdout = BufReader::new(process.stdout.unwrap()).lines(); diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs index cf320b4fbfca..d92bb5e96314 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs @@ -18,7 +18,7 @@ pub(crate) use sql_server::*; pub(crate) use sqlite::*; pub(crate) use vitess::*; -use crate::{datamodel_rendering::DatamodelRenderer, BoxFuture, TestError, CONFIG, EXTERNAL_TEST_EXECUTOR}; +use crate::{datamodel_rendering::DatamodelRenderer, BoxFuture, TestError, CONFIG}; use psl::datamodel_connector::ConnectorCapabilities; use std::{convert::TryFrom, fmt}; @@ -302,7 +302,7 @@ pub(crate) fn should_run( .any(|only| ConnectorVersion::try_from(*only).unwrap().matches_pattern(&version)); } - if EXTERNAL_TEST_EXECUTOR.is_some() && exclude.iter().any(|excl| excl.0.to_uppercase() == "JS") { + if CONFIG.external_test_executor().is_some() && exclude.iter().any(|excl| excl.0.to_uppercase() == "JS") { println!("Excluded test execution for JS driver adapters. Skipping test"); return false; }; diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/lib.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/lib.rs index 99bd486f51d3..2e79581a0c78 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/lib.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/lib.rs @@ -42,13 +42,11 @@ pub static ENV_LOG_LEVEL: Lazy = Lazy::new(|| std::env::var("LOG_LEVEL") pub static ENGINE_PROTOCOL: Lazy = Lazy::new(|| std::env::var("PRISMA_ENGINE_PROTOCOL").unwrap_or_else(|_| "graphql".to_owned())); -static EXTERNAL_TEST_EXECUTOR: Lazy> = Lazy::new(|| std::env::var("EXTERNAL_TEST_EXECUTOR").ok()); - /// Teardown of a test setup. async fn teardown_project(datamodel: &str, db_schemas: &[&str], schema_id: Option) -> TestResult<()> { if let Some(schema_id) = schema_id { let params = serde_json::json!({ "schemaId": schema_id }); - crate::executor_process_request::("teardown", params).await?; + executor_process_request::("teardown", params).await?; } Ok(qe_setup::teardown(datamodel, db_schemas).await?) diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/mod.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/mod.rs index b0367a9628c6..d6505206356b 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/mod.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/mod.rs @@ -122,7 +122,7 @@ impl Runner { let data_source = schema.configuration.datasources.first().unwrap(); let url = data_source.load_url(|key| env::var(key).ok()).unwrap(); - let executor = match crate::EXTERNAL_TEST_EXECUTOR.as_ref() { + let executor = match crate::CONFIG.external_test_executor() { Some(_) => RunnerExecutor::new_external(&url, &datamodel).await?, None => RunnerExecutor::Builtin( request_handlers::load_executor( diff --git a/query-engine/connector-test-kit-rs/test-configs/neon-ws-postgres13 b/query-engine/connector-test-kit-rs/test-configs/neon-ws-postgres13 new file mode 100644 index 000000000000..0097d8c91f57 --- /dev/null +++ b/query-engine/connector-test-kit-rs/test-configs/neon-ws-postgres13 @@ -0,0 +1,7 @@ +{ + "connector": "postgres", + "version": "13", + "driver_adapter": "neon:ws", + "driver_adapter_config": { "proxyUrl": "127.0.0.1:5488/v1" }, + "external_test_executor": "default" +} \ No newline at end of file diff --git a/query-engine/connector-test-kit-rs/test-configs/pg-postgres13 b/query-engine/connector-test-kit-rs/test-configs/pg-postgres13 new file mode 100644 index 000000000000..00f0c75ed736 --- /dev/null +++ b/query-engine/connector-test-kit-rs/test-configs/pg-postgres13 @@ -0,0 +1,6 @@ +{ + "connector": "postgres", + "version": "13", + "driver_adapter": "pg", + "external_test_executor": "default" +} \ No newline at end of file diff --git a/query-engine/connector-test-kit-rs/test-configs/postgres13 b/query-engine/connector-test-kit-rs/test-configs/postgres13 index 84fb5e1c04f1..f7b61cb4f888 100644 --- a/query-engine/connector-test-kit-rs/test-configs/postgres13 +++ b/query-engine/connector-test-kit-rs/test-configs/postgres13 @@ -1,3 +1,4 @@ { "connector": "postgres", - "version": "13"} \ No newline at end of file + "version": "13" +} \ No newline at end of file diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts b/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts index 78e1b8954ae5..21df3430d3b1 100644 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts @@ -11,12 +11,26 @@ import * as prismaPg from '@prisma/adapter-pg' import { Pool as NeonPool, neonConfig } from '@neondatabase/serverless' import { WebSocket } from 'undici' import * as prismaNeon from '@prisma/adapter-neon' -neonConfig.webSocketConstructor = WebSocket import {bindAdapter, DriverAdapter, ErrorCapturingDriverAdapter} from "@prisma/driver-adapter-utils"; const SUPPORTED_ADAPTERS: Record Promise> - = {pg: pgAdapter, neon: neonAdapter}; + = {"pg": pgAdapter, "neon:ws" : neonWsAdapter}; + +// conditional debug logging based on LOG_LEVEL env var +const debug = (() => { + if ((process.env.LOG_LEVEL ?? '').toLowerCase() != 'debug') { + return (...args: any[]) => {} + } + + return (...args: any[]) => { + console.error('[nodejs] DEBUG:', ...args); + }; +})(); + +// error logger +const err = (...args: any[]) => console.error('[nodejs] ERROR:', ...args); + async function main(): Promise { const iface = readline.createInterface({ @@ -28,27 +42,29 @@ async function main(): Promise { iface.on('line', async (line) => { try { const request: jsonRpc.Request = JSON.parse(line); // todo: validate - console.error(`Got a request: ${line}`) + debug(`Got a request: ${line}`) try { const response = await handleRequest(request.method, request.params) respondOk(request.id, response) } catch (err) { - console.error("[nodejs] Error from request handler: ", err) + debug("[nodejs] Error from request handler: ", err) respondErr(request.id, { code: 1, message: err.toString(), }) } } catch (err) { - console.error("Received non-json line: ", line); + debug("Received non-json line: ", line); } }); } -const schemas: Record = {} -const adapters: Record = {} -const queryLogs: Record = [] +const state: Record = {} async function handleRequest(method: string, params: unknown): Promise { switch (method) { @@ -60,13 +76,17 @@ async function handleRequest(method: string, params: unknown): Promise } const castParams = params as InitializeSchemaParams; - const logs = queryLogs[castParams.schemaId] = [] as string[] + const logs = [] as string[] const [engine, adapter] = await initQe(castParams.url, castParams.schema, (log) => { logs.push(log) }); await engine.connect("") - schemas[castParams.schemaId] = engine - adapters[castParams.schemaId] = adapter + + state[castParams.schemaId] = { + engine, + adapter, + logs + } return null } case 'query': { @@ -76,26 +96,25 @@ async function handleRequest(method: string, params: unknown): Promise txId?: string } - console.error("Got `query`", params) + debug("Got `query`", params) const castParams = params as QueryPayload; - const engine = schemas[castParams.schemaId] + const engine = state[castParams.schemaId].engine const result = await engine.query(JSON.stringify(castParams.query), "", castParams.txId) const parsedResult = JSON.parse(result) if (parsedResult.errors) { const error = parsedResult.errors[0]?.user_facing_error if (error.error_code === 'P2036') { - const jsError = adapters[castParams.schemaId].errorRegistry.consumeError(error.meta.id) + const jsError = state[castParams.schemaId].adapter.errorRegistry.consumeError(error.meta.id) if (!jsError) { - console.error(`Something went wrong. Engine reported external error with id ${error.meta.id}, but it was not registered.`) + err(`Something went wrong. Engine reported external error with id ${error.meta.id}, but it was not registered.`) } else { - console.error("[nodejs] got error response from the engine caused by the driver: ", jsError) + err("got error response from the engine caused by the driver: ", jsError) } } } - console.error("[nodejs] got response from engine: ", result) - + debug("got response from engine: ", result) // returning unparsed string: otherwise, some information gots lost during this round-trip. // In particular, floating point without decimal part turn into integers return result @@ -107,9 +126,9 @@ async function handleRequest(method: string, params: unknown): Promise options: unknown } - console.error("Got `startTx", params) + debug("Got `startTx", params) const {schemaId, options} = params as StartTxPayload - const result = await schemas[schemaId].startTransaction(JSON.stringify(options), "") + const result = await state[schemaId].engine.startTransaction(JSON.stringify(options), "") return JSON.parse(result) } @@ -119,9 +138,9 @@ async function handleRequest(method: string, params: unknown): Promise txId: string, } - console.error("Got `commitTx", params) + debug("Got `commitTx", params) const {schemaId, txId} = params as CommitTxPayload - const result = await schemas[schemaId].commitTransaction(txId, '{}') + const result = await state[schemaId].engine.commitTransaction(txId, '{}') return JSON.parse(result) } @@ -131,9 +150,9 @@ async function handleRequest(method: string, params: unknown): Promise txId: string, } - console.error("Got `rollbackTx", params) + debug("Got `rollbackTx", params) const {schemaId, txId} = params as RollbackTxPayload - const result = await schemas[schemaId].rollbackTransaction(txId, '{}') + const result = await state[schemaId].engine.rollbackTransaction(txId, '{}') return JSON.parse(result) } case 'teardown': { @@ -141,11 +160,11 @@ async function handleRequest(method: string, params: unknown): Promise schemaId: number } + debug("Got `teardown", params) const castParams = params as TeardownPayload; - await schemas[castParams.schemaId].disconnect("") - delete schemas[castParams.schemaId] - delete adapters[castParams.schemaId] - delete queryLogs[castParams.schemaId] + await state[castParams.schemaId].engine.disconnect("") + delete state[castParams.schemaId] + return {} } case 'getLogs': { @@ -154,7 +173,7 @@ async function handleRequest(method: string, params: unknown): Promise } const castParams = params as GetLogsPayload - return queryLogs[castParams.schemaId] ?? [] + return state[castParams.schemaId].queryLogs ?? [] } default: { throw new Error(`Unknown method: \`${method}\``) @@ -184,7 +203,7 @@ function respondOk(requestId: number, payload: unknown) { async function initQe(url: string, prismaSchema: string, logCallback: qe.QueryLogCallback): Promise<[engines.QueryEngineInstance, ErrorCapturingDriverAdapter]> { const adapter = await adapterFromEnv(url) as DriverAdapter const errorCapturingAdapter = bindAdapter(adapter) - const engineInstance = qe.initQueryEngine(errorCapturingAdapter, prismaSchema, logCallback) + const engineInstance = qe.initQueryEngine(errorCapturingAdapter, prismaSchema, logCallback, debug) return [engineInstance, errorCapturingAdapter]; } @@ -199,7 +218,7 @@ async function adapterFromEnv(url: string): Promise { throw new Error(`Unsupported driver adapter: ${adapter}`) } - return await SUPPORTED_ADAPTERS[adapter](url); + return await SUPPORTED_ADAPTERS[adapter](url) } async function pgAdapter(url: string): Promise { @@ -207,14 +226,19 @@ async function pgAdapter(url: string): Promise { return new prismaPg.PrismaPg(pool) } -async function neonAdapter(_: string): Promise { - const connectionString = process.env.DRIVER_ADAPTER_URL_OVERRIDE ?? '' - if (connectionString == '') { +async function neonWsAdapter(url: string): Promise { + const proxyURL = JSON.parse(process.env.DRIVER_ADAPTER_CONFIG || '{}').proxyUrl ?? '' + if (proxyURL == '') { throw new Error("DRIVER_ADAPTER_URL_OVERRIDE is not defined or empty, but its required for neon adapter."); } - const pool = new NeonPool({ connectionString }) + neonConfig.wsProxy = () => `127.0.0.1:5488/v1` + neonConfig.webSocketConstructor = WebSocket + neonConfig.useSecureWebSocket = false + neonConfig.pipelineConnect = false + + const pool = new NeonPool({ connectionString: url }) return new prismaNeon.PrismaNeon(pool) } -main().catch(console.error) +main().catch(err) diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/qe.ts b/query-engine/driver-adapters/js/connector-test-kit-executor/src/qe.ts index a8256bf08b3f..764df8f6108d 100644 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/src/qe.ts +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/src/qe.ts @@ -5,7 +5,7 @@ import * as path from 'node:path' export type QueryLogCallback = (log: string) => void -export function initQueryEngine(adapter: ErrorCapturingDriverAdapter, datamodel: string, queryLogCallback: QueryLogCallback): lib.QueryEngineInstance { +export function initQueryEngine(adapter: ErrorCapturingDriverAdapter, datamodel: string, queryLogCallback: QueryLogCallback, debug: (...args: any[]) => void): lib.QueryEngineInstance { // I assume nobody will run this on Windows ¯\_(ツ)_/¯ const libExt = os.platform() === 'darwin' ? 'dylib' : 'so' const dirname = path.dirname(new URL(import.meta.url).pathname) @@ -22,7 +22,7 @@ export function initQueryEngine(adapter: ErrorCapturingDriverAdapter, datamodel: datamodel, configDir: '.', engineProtocol: 'json' as const, - logLevel: process.env["RUST_LOG"] as any, + logLevel: process.env["RUST_LOG"] ?? 'info' as any, logQueries: true, env: process.env, ignoreEnvVarErrors: false, @@ -34,11 +34,7 @@ export function initQueryEngine(adapter: ErrorCapturingDriverAdapter, datamodel: if (parsed.is_query) { queryLogCallback(parsed.query) } - - const level = process.env.LOG_LEVEL ?? '' - if (level.toLowerCase() == 'debug') { - console.error("[nodejs] ", parsed) - } + debug(parsed) } return new QueryEngine(queryEngineOptions, logCallback, adapter) diff --git a/query-engine/query-engine-node-api/src/engine.rs b/query-engine/query-engine-node-api/src/engine.rs index e376cf16a049..e9e7ad681cd4 100644 --- a/query-engine/query-engine-node-api/src/engine.rs +++ b/query-engine/query-engine-node-api/src/engine.rs @@ -163,7 +163,17 @@ impl QueryEngine { config_dir, ignore_env_var_errors, engine_protocol, - } = napi_env.from_js_value(options)?; + } = napi_env.from_js_value(options).expect( + r###" + Failed to deserialize constructor options. + + This usually happens when the javascript object passed to the constructor is missing + properties for the ConstructorOptions fields that must have some value. + + If you set some of these in javascript trough environment variables, make sure there are + values for data_model, log_level, and any field that is not Option + "###, + ); let env = stringify_env_values(env)?; // we cannot trust anything JS sends us from process.env let overrides: Vec<(_, _)> = datasource_overrides.into_iter().collect(); From d96b0d44b6e7486a913a2869d52353581d2c581a Mon Sep 17 00:00:00 2001 From: Serhii Tatarintsev Date: Fri, 22 Sep 2023 17:06:28 +0200 Subject: [PATCH 034/128] driver-adapters: Map neon errors to Prisma errors (#4213) * driver-adapters: Map neon errors to Prisma errors Approach taken: instead of duplicating error handling logic in TS, we extract all information we need from the error and forward it to engine. Engine is than reuses error parsing code from native driver. `PostgresError` is intermediate form for storing this information. It can be created from eithe `tokio_postgres` error or from JS. It then gets parsed and converted to a `quaint:Error`. This allows to handle most of the DB level errors (read: everything that actually reached running DB). It does not handle TLS and networking errors since these look completely different in JS. We'll need to handle them later. * Add hint & severity fields on TS side --- quaint/src/connector/postgres.rs | 2 +- quaint/src/connector/postgres/error.rs | 414 +++++++++--------- quaint/src/error.rs | 2 + .../js/adapter-neon/src/neon.ts | 56 ++- .../js/driver-adapter-utils/src/types.ts | 8 + .../smoke-test-js/prisma/mysql/schema.prisma | 4 + .../prisma/postgres/schema.prisma | 4 + .../js/smoke-test-js/src/libquery/libquery.ts | 32 +- query-engine/driver-adapters/src/result.rs | 22 +- 9 files changed, 287 insertions(+), 257 deletions(-) diff --git a/quaint/src/connector/postgres.rs b/quaint/src/connector/postgres.rs index d4dc008bd5f9..c35208f84199 100644 --- a/quaint/src/connector/postgres.rs +++ b/quaint/src/connector/postgres.rs @@ -1,5 +1,5 @@ mod conversion; -mod error; +pub mod error; use crate::{ ast::{Query, Value}, diff --git a/quaint/src/connector/postgres/error.rs b/quaint/src/connector/postgres/error.rs index 40634e2aa336..4f7bb23a5c85 100644 --- a/quaint/src/connector/postgres/error.rs +++ b/quaint/src/connector/postgres/error.rs @@ -1,37 +1,64 @@ +use std::fmt::{Display, Formatter}; + +use tokio_postgres::error::DbError; + use crate::error::{DatabaseConstraint, Error, ErrorKind, Name}; -impl From for Error { - fn from(e: tokio_postgres::error::Error) -> Error { - use tokio_postgres::error::DbError; +#[derive(Debug)] +pub struct PostgresError { + pub code: String, + pub message: String, + pub severity: String, + pub detail: Option, + pub column: Option, + pub hint: Option, +} - if e.is_closed() { - return Error::builder(ErrorKind::ConnectionClosed).build(); +impl std::error::Error for PostgresError {} + +impl Display for PostgresError { + // copy of DbError::fmt + fn fmt(&self, fmt: &mut Formatter<'_>) -> std::fmt::Result { + write!(fmt, "{}: {}", self.severity, self.message)?; + if let Some(detail) = &self.detail { + write!(fmt, "\nDETAIL: {}", detail)?; + } + if let Some(hint) = &self.hint { + write!(fmt, "\nHINT: {}", hint)?; } + Ok(()) + } +} - match e.code().map(|c| c.code()) { - Some(code) if code == "22001" => { - let code = code.to_string(); +impl From<&DbError> for PostgresError { + fn from(value: &DbError) -> Self { + PostgresError { + code: value.code().code().to_string(), + severity: value.severity().to_string(), + message: value.message().to_string(), + detail: value.detail().map(ToString::to_string), + column: value.column().map(ToString::to_string), + hint: value.hint().map(ToString::to_string), + } + } +} +impl From for Error { + fn from(value: PostgresError) -> Self { + match value.code.as_str() { + "22001" => { let mut builder = Error::builder(ErrorKind::LengthMismatch { column: Name::Unavailable, }); - builder.set_original_code(code); - - let db_error = e.into_source().and_then(|e| e.downcast::().ok()); - if let Some(db_error) = db_error { - builder.set_original_message(db_error.to_string()); - } + builder.set_original_code(&value.code); + builder.set_original_message(value.to_string()); builder.build() } - Some(code) if code == "23505" => { - let code = code.to_string(); - - let db_error = e.into_source().and_then(|e| e.downcast::().ok()); - let detail = db_error.as_ref().and_then(|e| e.detail()).map(ToString::to_string); - - let constraint = detail + "23505" => { + let constraint = value + .detail .as_ref() .and_then(|d| d.split(")=(").next()) .and_then(|d| d.split(" (").nth(1).map(|s| s.replace('\"', ""))) @@ -41,189 +68,138 @@ impl From for Error { let kind = ErrorKind::UniqueConstraintViolation { constraint }; let mut builder = Error::builder(kind); - builder.set_original_code(code); + builder.set_original_code(value.code); - if let Some(detail) = detail { + if let Some(detail) = value.detail { builder.set_original_message(detail); } builder.build() } - // Even lipstick will not save this... - Some(code) if code == "23502" => { - let code = code.to_string(); - let db_error = e.into_source().and_then(|e| e.downcast::().ok()); - let detail = db_error.as_ref().and_then(|e| e.detail()).map(ToString::to_string); - - let constraint = db_error - .as_ref() - .map(|e| e.column()) - .map(DatabaseConstraint::fields) - .unwrap_or(DatabaseConstraint::CannotParse); + // Even lipstick will not save this... + "23502" => { + let constraint = DatabaseConstraint::fields(value.column); let kind = ErrorKind::NullConstraintViolation { constraint }; let mut builder = Error::builder(kind); - builder.set_original_code(code); + builder.set_original_code(value.code); - if let Some(detail) = detail { + if let Some(detail) = value.detail { builder.set_original_message(detail); } builder.build() } - Some(code) if code == "23503" => { - let code = code.to_string(); - let db_error = e.into_source().and_then(|e| e.downcast::().ok()); - - match db_error.as_ref().and_then(|e| e.column()) { - Some(column) => { - let mut builder = Error::builder(ErrorKind::ForeignKeyConstraintViolation { - constraint: DatabaseConstraint::fields(Some(column)), - }); - - builder.set_original_code(code); - - if let Some(message) = db_error.as_ref().map(|e| e.message()) { - builder.set_original_message(message); - } - - builder.build() - } - None => { - let constraint = db_error - .as_ref() - .map(|e| e.message()) - .and_then(|e| e.split_whitespace().nth(10)) - .and_then(|s| s.split('"').nth(1)) - .map(ToString::to_string) - .map(DatabaseConstraint::Index) - .unwrap_or(DatabaseConstraint::CannotParse); - - let kind = ErrorKind::ForeignKeyConstraintViolation { constraint }; - let mut builder = Error::builder(kind); - - builder.set_original_code(code); - - if let Some(message) = db_error.as_ref().map(|e| e.message()) { - builder.set_original_message(message); - } - - builder.build() - } - } - } - Some(code) if code == "3D000" => { - let code = code.to_string(); - let db_error = e.into_source().and_then(|e| e.downcast::().ok()); - let message = db_error.as_ref().map(|e| e.message()); + "23503" => match value.column { + Some(column) => { + let mut builder = Error::builder(ErrorKind::ForeignKeyConstraintViolation { + constraint: DatabaseConstraint::fields(Some(column)), + }); - let db_name = message - .as_ref() - .and_then(|s| s.split_whitespace().nth(1)) + builder.set_original_code(value.code); + builder.set_original_message(value.message); + + builder.build() + } + None => { + let constraint = value + .message + .split_whitespace() + .nth(10) + .and_then(|s| s.split('"').nth(1)) + .map(ToString::to_string) + .map(DatabaseConstraint::Index) + .unwrap_or(DatabaseConstraint::CannotParse); + + let kind = ErrorKind::ForeignKeyConstraintViolation { constraint }; + let mut builder = Error::builder(kind); + + builder.set_original_code(value.code); + builder.set_original_message(value.message); + + builder.build() + } + }, + "3D000" => { + let db_name = value + .message + .split_whitespace() + .nth(1) .and_then(|s| s.split('"').nth(1)) .into(); let kind = ErrorKind::DatabaseDoesNotExist { db_name }; let mut builder = Error::builder(kind); - builder.set_original_code(code); - - if let Some(message) = message { - builder.set_original_message(message); - } + builder.set_original_code(value.code); + builder.set_original_message(value.message); builder.build() } - Some(code) if code == "28000" => { - let code = code.to_string(); - let db_error = e.into_source().and_then(|e| e.downcast::().ok()); - let message = db_error.as_ref().map(|e| e.message()); - - let db_name = message - .as_ref() - .and_then(|m| m.split_whitespace().nth(5)) + "28000" => { + let db_name = value + .message + .split_whitespace() + .nth(5) .and_then(|s| s.split('"').nth(1)) .into(); let kind = ErrorKind::DatabaseAccessDenied { db_name }; let mut builder = Error::builder(kind); - builder.set_original_code(code); - - if let Some(message) = message { - builder.set_original_message(message); - } + builder.set_original_code(value.code); + builder.set_original_message(value.message); builder.build() } - Some(code) if code == "28P01" => { - let code = code.to_string(); - let db_error = e.into_source().and_then(|e| e.downcast::().ok()); - let message = db_error.as_ref().map(|e| e.message()); + "28P01" => { + let message = value.message; let user = message - .as_ref() - .and_then(|m| m.split_whitespace().last()) + .split_whitespace() + .last() .and_then(|s| s.split('"').nth(1)) .into(); let kind = ErrorKind::AuthenticationFailed { user }; let mut builder = Error::builder(kind); - builder.set_original_code(code); - - if let Some(message) = message { - builder.set_original_message(message); - } + builder.set_original_code(value.code); + builder.set_original_message(message); builder.build() } - Some(code) if code == "40001" => { - let code = code.to_string(); - let db_error = e.into_source().and_then(|e| e.downcast::().ok()); - let message = db_error.as_ref().map(|e| e.message()); - let mut builder = Error::builder(ErrorKind::TransactionWriteConflict); + "40001" => { + let mut builder: crate::error::ErrorBuilder = Error::builder(ErrorKind::TransactionWriteConflict); - builder.set_original_code(code); - - if let Some(message) = message { - builder.set_original_message(message); - } + builder.set_original_code(value.code); + builder.set_original_message(value.message); builder.build() } - Some(code) if code == "42P01" => { - let code = code.to_string(); - let db_error = e.into_source().and_then(|e| e.downcast::().ok()); - let message = db_error.as_ref().map(|e| e.message()); - - let table = message - .as_ref() - .and_then(|m| m.split_whitespace().nth(1)) + "42P01" => { + let table = value + .message + .split_whitespace() + .nth(1) .and_then(|s| s.split('"').nth(1)) .into(); let kind = ErrorKind::TableDoesNotExist { table }; let mut builder = Error::builder(kind); - builder.set_original_code(code); - - if let Some(message) = message { - builder.set_original_message(message); - } + builder.set_original_code(value.code); + builder.set_original_message(value.message); builder.build() } - Some(code) if code == "42703" => { - let code = code.to_string(); - let db_error = e.into_source().and_then(|e| e.downcast::().ok()); - let message = db_error.as_ref().map(|e| e.message()); - - let column = message - .as_ref() - .and_then(|m| m.split_whitespace().nth(1)) + "42703" => { + let column = value + .message + .split_whitespace() + .nth(1) .map(|s| s.split('\"')) .and_then(|mut s| match (s.next(), s.next()) { (Some(column), _) if !column.is_empty() => Some(column), @@ -235,92 +211,102 @@ impl From for Error { let kind = ErrorKind::ColumnNotFound { column }; let mut builder = Error::builder(kind); - builder.set_original_code(code); - - if let Some(message) = message { - builder.set_original_message(message); - } - + builder.set_original_code(value.code); + builder.set_original_message(value.message); builder.build() } - Some(code) if code == "42P04" => { - let code = code.to_string(); - let db_error = e.into_source().and_then(|e| e.downcast::().ok()); - let message = db_error.as_ref().map(|e| e.message()); - - let db_name = message - .as_ref() - .and_then(|m| m.split_whitespace().nth(1)) + "42P04" => { + let db_name = value + .message + .split_whitespace() + .nth(1) .and_then(|s| s.split('"').nth(1)) .into(); let kind = ErrorKind::DatabaseAlreadyExists { db_name }; let mut builder = Error::builder(kind); - builder.set_original_code(code); + builder.set_original_code(value.code); + builder.set_original_message(value.message); - if let Some(message) = message { - builder.set_original_message(message); - } + builder.build() + } + + _ => { + let code = value.code.to_owned(); + let message = value.to_string(); + let mut builder = Error::builder(ErrorKind::QueryError(value.into())); + builder.set_original_code(code); + builder.set_original_message(message); builder.build() } - code => { - // This is necessary, on top of the other conversions, for the cases where a - // native_tls error comes wrapped in a tokio_postgres error. - if let Some(tls_error) = try_extracting_tls_error(&e) { - return tls_error; - } + } + } +} - // Same for IO errors. - if let Some(io_error) = try_extracting_io_error(&e) { - return io_error; - } +impl From for Error { + fn from(e: tokio_postgres::error::Error) -> Error { + if e.is_closed() { + return Error::builder(ErrorKind::ConnectionClosed).build(); + } - #[cfg(feature = "uuid")] - if let Some(uuid_error) = try_extracting_uuid_error(&e) { - return uuid_error; - } + if let Some(db_error) = e.as_db_error() { + return PostgresError::from(db_error).into(); + } - let reason = format!("{e}"); - - match reason.as_str() { - "error connecting to server: timed out" => { - let mut builder = Error::builder(ErrorKind::ConnectTimeout); - - if let Some(code) = code { - builder.set_original_code(code); - }; - - builder.set_original_message(reason); - builder.build() - } // sigh... - // https://github.com/sfackler/rust-postgres/blob/0c84ed9f8201f4e5b4803199a24afa2c9f3723b2/tokio-postgres/src/connect_tls.rs#L37 - "error performing TLS handshake: server does not support TLS" => { - let mut builder = Error::builder(ErrorKind::TlsError { - message: reason.clone(), - }); - - if let Some(code) = code { - builder.set_original_code(code); - }; - - builder.set_original_message(reason); - builder.build() - } // double sigh - _ => { - let code = code.map(|c| c.to_string()); - let mut builder = Error::builder(ErrorKind::QueryError(e.into())); - - if let Some(code) = code { - builder.set_original_code(code); - }; - - builder.set_original_message(reason); - builder.build() - } - } + if let Some(tls_error) = try_extracting_tls_error(&e) { + return tls_error; + } + + // Same for IO errors. + if let Some(io_error) = try_extracting_io_error(&e) { + return io_error; + } + + #[cfg(feature = "uuid")] + if let Some(uuid_error) = try_extracting_uuid_error(&e) { + return uuid_error; + } + + let reason = format!("{e}"); + let code = e.code().map(|c| c.code()); + + match reason.as_str() { + "error connecting to server: timed out" => { + let mut builder = Error::builder(ErrorKind::ConnectTimeout); + + if let Some(code) = code { + builder.set_original_code(code); + }; + + builder.set_original_message(reason); + return builder.build(); + } // sigh... + // https://github.com/sfackler/rust-postgres/blob/0c84ed9f8201f4e5b4803199a24afa2c9f3723b2/tokio-postgres/src/connect_tls.rs#L37 + "error performing TLS handshake: server does not support TLS" => { + let mut builder = Error::builder(ErrorKind::TlsError { + message: reason.clone(), + }); + + if let Some(code) = code { + builder.set_original_code(code); + }; + + builder.set_original_message(reason); + return builder.build(); + } // double sigh + _ => { + let code = code.map(|c| c.to_string()); + let mut builder = Error::builder(ErrorKind::QueryError(e.into())); + + if let Some(code) = code { + builder.set_original_code(code); + }; + + builder.set_original_message(reason); + return builder.build(); } } } diff --git a/quaint/src/error.rs b/quaint/src/error.rs index 5ca712c7be71..e9bdc890f279 100644 --- a/quaint/src/error.rs +++ b/quaint/src/error.rs @@ -6,6 +6,8 @@ use thiserror::Error; #[cfg(feature = "pooled")] use std::time::Duration; +pub use crate::connector::postgres::error::PostgresError; + #[derive(Debug, PartialEq, Eq)] pub enum DatabaseConstraint { Fields(Vec), diff --git a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts b/query-engine/driver-adapters/js/adapter-neon/src/neon.ts index 0c915e954dba..e2dac37a911c 100644 --- a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts +++ b/query-engine/driver-adapters/js/adapter-neon/src/neon.ts @@ -27,29 +27,25 @@ abstract class NeonQueryable implements Queryable { const tag = '[js::query_raw]' debug(`${tag} %O`, query) - const { fields, rows } = await this.performIO(query) - - const columns = fields.map((field) => field.name) - const resultSet: ResultSet = { - columnNames: columns, - columnTypes: fields.map((field) => fieldToColumnType(field.dataTypeID)), - rows, - } - - return ok(resultSet) + return (await this.performIO(query)).map(({ fields, rows }) => { + const columns = fields.map((field) => field.name) + return { + columnNames: columns, + columnTypes: fields.map((field) => fieldToColumnType(field.dataTypeID)), + rows, + } + }) } async executeRaw(query: Query): Promise> { const tag = '[js::execute_raw]' debug(`${tag} %O`, query) - const { rowCount: rowsAffected } = await this.performIO(query) - // Note: `rowsAffected` can sometimes be null (e.g., when executing `"BEGIN"`) - return ok(rowsAffected ?? 0) + return (await this.performIO(query)).map((r) => r.rowCount ?? 0) } - abstract performIO(query: Query): Promise + abstract performIO(query: Query): Promise> } /** @@ -60,15 +56,25 @@ class NeonWsQueryable extends NeonQ super() } - override async performIO(query: Query): Promise { + override async performIO(query: Query): Promise> { const { sql, args: values } = query try { - return await this.client.query({ text: sql, values, rowMode: 'array' }) + return ok(await this.client.query({ text: sql, values, rowMode: 'array' })) } catch (e) { - const error = e as Error - debug('Error in performIO: %O', error) - throw error + debug('Error in performIO: %O', e) + if (e && e.code) { + return err({ + kind: 'PostgresError', + code: e.code, + severity: e.severity, + message: e.message, + detail: e.detail, + column: e.column, + hint: e.hint, + }) + } + throw e } } } @@ -126,12 +132,14 @@ export class PrismaNeonHTTP extends NeonQueryable implements DriverAdapter { super() } - override async performIO(query: Query): Promise { + override async performIO(query: Query): Promise> { const { sql, args: values } = query - return await this.client(sql, values, { - arrayMode: true, - fullResults: true, - }) + return ok( + await this.client(sql, values, { + arrayMode: true, + fullResults: true, + }), + ) } startTransaction(): Promise> { diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts index 763a85b7be67..409f3958bcd5 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts +++ b/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts @@ -36,6 +36,14 @@ export type Query = { export type Error = { kind: 'GenericJsError' id: number +} | { + kind: 'PostgresError' + code: string, + severity: string + message: string + detail: string | undefined + column: string | undefined + hint: string | undefined } export interface Queryable { diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma b/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma index 6681f70e6c69..00418d57cc2c 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma +++ b/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma @@ -114,3 +114,7 @@ model Product { properties Json properties_null Json? } + +model Unique { + email String @id +} diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma b/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma index 4c92945ea852..74ffd428c728 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma +++ b/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma @@ -103,3 +103,7 @@ model User { id String @id @default(uuid()) email String } + +model Unique { + email String @id +} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts index 44d07abb9a4e..3f659a6cb592 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts @@ -21,21 +21,6 @@ export function smokeTestLibquery(adapter: ErrorCapturingDriverAdapter, prismaSc await adapter.close() }) - it('raw error', async () => { - await assert.rejects(async () => { - await doQuery({ - action: 'queryRaw', - query: { - selection: { $scalars: true }, - arguments: { - query: 'NOT A VALID SQL, THIS WILL FAIL', - parameters: '[]', - }, - }, - }) - }) - }) - it('create JSON values', async () => { const json = JSON.stringify({ foo: 'bar', @@ -294,6 +279,23 @@ export function smokeTestLibquery(adapter: ErrorCapturingDriverAdapter, prismaSc console.log('[nodejs] commited', commitResponse) }) + it('expected error', async () => { + const result = await doQuery({ + modelName: 'Unique', + action: 'createMany', + query: { + arguments: { + data: [{ email: 'duplicate@example.com' }, { email: 'duplicate@example.com' }], + }, + selection: { + $scalars: true, + }, + }, + }) + + console.log('[nodejs] error result', JSON.stringify(result, null, 2)) + }) + describe('read scalar and non scalar types', () => { if (['mysql'].includes(flavour)) { it('mysql', async () => { diff --git a/query-engine/driver-adapters/src/result.rs b/query-engine/driver-adapters/src/result.rs index a5965509ef84..fc6f52bd2743 100644 --- a/query-engine/driver-adapters/src/result.rs +++ b/query-engine/driver-adapters/src/result.rs @@ -1,14 +1,29 @@ use napi::{bindgen_prelude::FromNapiValue, Env, JsUnknown, NapiValue}; -use quaint::error::Error as QuaintError; +use quaint::error::{Error as QuaintError, PostgresError}; use serde::Deserialize; -#[derive(Deserialize, Debug)] +#[derive(Deserialize)] +#[serde(remote = "PostgresError")] +pub struct PostgresErrorDef { + code: String, + message: String, + severity: String, + detail: Option, + column: Option, + hint: Option, +} + +#[derive(Deserialize)] #[serde(tag = "kind")] /// Wrapper for JS-side errors /// See driver-adapters/js/adapter-utils/src/types.ts file for example pub(crate) enum DriverAdapterError { /// Unexpected JS exception - GenericJsError { id: i32 }, + GenericJsError { + id: i32, + }, + + PostgresError(#[serde(with = "PostgresErrorDef")] PostgresError), // in the future, expected errors that map to known user errors with PXXX codes will also go here } @@ -24,6 +39,7 @@ impl From for QuaintError { fn from(value: DriverAdapterError) -> Self { match value { DriverAdapterError::GenericJsError { id } => QuaintError::external_error(id), + DriverAdapterError::PostgresError(e) => e.into(), // in future, more error types would be added and we'll need to convert them to proper QuaintErrors here } } From 571a52139c4b1573746f9c195bb4b3111f6915da Mon Sep 17 00:00:00 2001 From: Jan Piotrowski Date: Fri, 22 Sep 2023 22:02:47 +0200 Subject: [PATCH 035/128] ci(driver adapters): Workflow that runs driver adapters smoke test in CI (#4266) --- .../workflows/driver-adapter-smoke-tests.yml | 78 +++++++++++++++++++ .../driver-adapters/js/pnpm-lock.yaml | 30 +++---- .../js/smoke-test-js/package.json | 30 +++---- .../js/smoke-test-js/src/client/client.ts | 5 ++ .../src/client/neon.http.test.ts | 5 +- .../js/smoke-test-js/src/libquery/util.ts | 10 ++- 6 files changed, 122 insertions(+), 36 deletions(-) create mode 100644 .github/workflows/driver-adapter-smoke-tests.yml diff --git a/.github/workflows/driver-adapter-smoke-tests.yml b/.github/workflows/driver-adapter-smoke-tests.yml new file mode 100644 index 000000000000..e3a233339b1a --- /dev/null +++ b/.github/workflows/driver-adapter-smoke-tests.yml @@ -0,0 +1,78 @@ +name: Driver Adapters, Smoke Tests +on: + push: + branches: + - main + pull_request: + paths-ignore: + - '.buildkite/**' + - '*.md' + - 'LICENSE' + - 'CODEOWNERS' + - 'renovate.json' + +jobs: + driver-adapter-smoke-tests: + name: Smoke tests for adapter ${{ matrix.adapter }} + + strategy: + fail-fast: false + matrix: + adapter: ["neon:ws", "neon:http", planetscale, pg] + + runs-on: ubuntu-latest + + services: + postgres: + image: postgres + env: + POSTGRES_PASSWORD: postgres + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 5432:5432 + + env: + JS_NEON_DATABASE_URL: ${{ secrets.JS_NEON_DATABASE_URL }} + JS_PLANETSCALE_DATABASE_URL: ${{ secrets.JS_PLANETSCALE_DATABASE_URL }} + JS_PG_DATABASE_URL: postgres://postgres:postgres@localhost:5432/test # ${{ secrets.JS_PG_DATABASE_URL }} + + steps: + - uses: actions/checkout@v4 + + - uses: dtolnay/rust-toolchain@stable + + - uses: pnpm/action-setup@v2 + with: + version: 8 + - uses: actions/setup-node@v3 + with: + node-version: 18 + #cache: 'pnpm' + + - name: Compile Query Engine + run: cargo build -p query-engine-node-api + + - name: Install Dependencies (Driver Adapters) + run: pnpm install + working-directory: ./query-engine/driver-adapters/js + - name: Build Driver Adapters + run: pnpm build + working-directory: ./query-engine/driver-adapters/js + + - run: pnpm prisma:${{ matrix.adapter }} + working-directory: ./query-engine/driver-adapters/js/smoke-test-js + - run: pnpm ${{ matrix.adapter }}:libquery + working-directory: ./query-engine/driver-adapters/js/smoke-test-js + - name: pnpm ${{ matrix.adapter }}:client (using @prisma/client - including engine! - from Npm) + run: pnpm ${{ matrix.adapter }}:client + if: always() + working-directory: ./query-engine/driver-adapters/js/smoke-test-js + + - name: pnpm errors + run: pnpm errors + if: always() + working-directory: ./query-engine/driver-adapters/js/smoke-test-js \ No newline at end of file diff --git a/query-engine/driver-adapters/js/pnpm-lock.yaml b/query-engine/driver-adapters/js/pnpm-lock.yaml index f919d4a1a066..ce7a442a5388 100644 --- a/query-engine/driver-adapters/js/pnpm-lock.yaml +++ b/query-engine/driver-adapters/js/pnpm-lock.yaml @@ -103,8 +103,8 @@ importers: specifier: workspace:* version: link:../adapter-planetscale '@prisma/client': - specifier: 5.3.0-integration-feat-driver-adapters-in-client.3 - version: 5.3.0-integration-feat-driver-adapters-in-client.3(prisma@5.3.0-integration-feat-driver-adapters-in-client.3) + specifier: 5.4.0-dev.29 + version: 5.4.0-dev.29(prisma@5.4.0-dev.29) '@prisma/driver-adapter-utils': specifier: workspace:* version: link:../driver-adapter-utils @@ -128,8 +128,8 @@ importers: specifier: ^7.0.3 version: 7.0.3 prisma: - specifier: 5.3.0-integration-feat-driver-adapters-in-client.3 - version: 5.3.0-integration-feat-driver-adapters-in-client.3 + specifier: 5.4.0-dev.29 + version: 5.4.0-dev.29 tsx: specifier: ^3.12.7 version: 3.12.7 @@ -415,8 +415,8 @@ packages: resolution: {integrity: sha512-aWbU+D/IRHoDE9975y+Q4c+EwwAWxCPwFId+N1AhQVFXzbeJMkj6KN2iQtoi03elcLMRdfT+V3i9Z4WRw+/oIA==} engines: {node: '>=16'} - /@prisma/client@5.3.0-integration-feat-driver-adapters-in-client.3(prisma@5.3.0-integration-feat-driver-adapters-in-client.3): - resolution: {integrity: sha512-L/y90JbrWyJGXiXvtKfRKDgqTjTEQ2rkzdb0tlf4Uu9jIyBxiKr5qmDl7vRagN1JXeOkYqinsi+598MtJ7pGFA==} + /@prisma/client@5.4.0-dev.29(prisma@5.4.0-dev.29): + resolution: {integrity: sha512-rpuBku3CFmX6FDq2SANcc9Ch6ZTqT6fyhvhe66bI/kzJjVY4NN7PwleJesB8/VfS5TkAYMmK5HcPQUwi5hZEVw==} engines: {node: '>=16.13'} requiresBuild: true peerDependencies: @@ -425,16 +425,16 @@ packages: prisma: optional: true dependencies: - '@prisma/engines-version': 5.3.0-28.3457e5de04da1741c969a80068702ad103e99553 - prisma: 5.3.0-integration-feat-driver-adapters-in-client.3 + '@prisma/engines-version': 5.4.0-18.32692fd2ce90d456c093eb8eae68511575243419 + prisma: 5.4.0-dev.29 dev: false - /@prisma/engines-version@5.3.0-28.3457e5de04da1741c969a80068702ad103e99553: - resolution: {integrity: sha512-eb+8hgURyTu1qAWmTxgZCgBjf0UV6REC525fa1XnPpL6hxMZ7cEtFCX0f9GDopa/piCM9pq5H2ttthGOKQyVLA==} + /@prisma/engines-version@5.4.0-18.32692fd2ce90d456c093eb8eae68511575243419: + resolution: {integrity: sha512-6qgjyvmru90p7sn+mWQlZDmX8WgYTZ/cB2kpDShjbg1ymF4dIszqUm6RZqESoZ39Mgp5d620AgDtZqfFQ8sWRQ==} dev: false - /@prisma/engines@5.3.0-integration-feat-driver-adapters-in-client.3: - resolution: {integrity: sha512-Nt+lbsiE4jj4GGIyhLrcNy8fVwBjsZeQqNI2oMbgoCyMSZwkcUmMRuK7OJdzbxHBKpivnneF0WMhbv/fZTRGig==} + /@prisma/engines@5.4.0-dev.29: + resolution: {integrity: sha512-3uZ/rLbrJcVSv6js2haSGb2QqB4n26j3Gr4w7iySiQ5O83L11rtOjmkRGdX8yXxEBibuWtOn43SJb24AlQDj8g==} requiresBuild: true /@types/debug@4.1.8: @@ -1035,13 +1035,13 @@ packages: /postgres-range@1.1.3: resolution: {integrity: sha512-VdlZoocy5lCP0c/t66xAfclglEapXPCIVhqqJRncYpvbCgImF0w67aPKfbqUMr72tO2k5q0TdTZwCLjPTI6C9g==} - /prisma@5.3.0-integration-feat-driver-adapters-in-client.3: - resolution: {integrity: sha512-M9FQjLmJL7g4GnHwcsuf2WPqE3/B3k/laBkaq5XCxJcBMjoipNIGW0ZlZKY9t+TdJ14asGrv4+7o7mAmKLZqrw==} + /prisma@5.4.0-dev.29: + resolution: {integrity: sha512-TlhIZLVZsDVIQBcVZ8bRi9CJrThkEhKMJ9sEBtsINYx4ju3k7lGl9Kdqlm7zOW4FVwSNPgKvgsdzRgsO6fbDug==} engines: {node: '>=16.13'} hasBin: true requiresBuild: true dependencies: - '@prisma/engines': 5.3.0-integration-feat-driver-adapters-in-client.3 + '@prisma/engines': 5.4.0-dev.29 /punycode@2.3.0: resolution: {integrity: sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==} diff --git a/query-engine/driver-adapters/js/smoke-test-js/package.json b/query-engine/driver-adapters/js/smoke-test-js/package.json index 37cecd082393..95459d0237d4 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/package.json +++ b/query-engine/driver-adapters/js/smoke-test-js/package.json @@ -11,24 +11,26 @@ "prisma:db:push:mysql": "prisma db push --schema ./prisma/mysql/schema.prisma --force-reset", "prisma:db:execute:mysql": "prisma db execute --schema ./prisma/mysql/schema.prisma --file ./prisma/mysql/commands/type_test/insert.sql", "prisma:studio:mysql": "prisma studio --schema ./prisma/mysql/schema.prisma", + "prisma:neon:ws": "pnpm prisma:neon", + "prisma:neon:http": "pnpm prisma:neon", "prisma:neon": "cross-env-shell DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" \"pnpm prisma:db:push:postgres && pnpm prisma:db:execute:postgres\"", "studio:neon": "cross-env-shell DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" \"pnpm prisma:studio:postgres\"", - "neon:ws:libquery": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --loader=tsx ./src/libquery/neon.ws.test.ts", - "neon:http:libquery": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --loader=tsx ./src/libquery/neon.http.test.ts", - "neon:ws:client": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --loader=tsx ./src/client/neon.ws.test.ts", - "neon:http:client": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --loader=tsx ./src/client/neon.http.test.ts", + "neon:ws:libquery": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/neon.ws.test.ts", + "neon:http:libquery": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/neon.http.test.ts", + "neon:ws:client": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/neon.ws.test.ts", + "neon:http:client": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/neon.http.test.ts", "neon:ws": "pnpm neon:ws:libquery && pnpm neon:ws:client", "neon:http": "pnpm neon:http:libquery && pnpm neon:http:client", "prisma:pg": "cross-env-shell DATABASE_URL=\"${JS_PG_DATABASE_URL}\" \"pnpm prisma:db:push:postgres && pnpm prisma:db:execute:postgres\"", "studio:pg": "cross-env-shell DATABASE_URL=\"${JS_PG_DATABASE_URL}\" \"pnpm prisma:studio:postgres\"", - "pg:libquery": "cross-env-shell DATABASE_URL=\"${JS_PG_DATABASE_URL}\" node --test --loader=tsx ./src/libquery/pg.test.ts", - "pg:client": "DATABASE_URL=\"${JS_PG_DATABASE_URL}\" node --test --loader=tsx ./src/client/pg.test.ts", + "pg:libquery": "cross-env-shell DATABASE_URL=\"${JS_PG_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/pg.test.ts", + "pg:client": "DATABASE_URL=\"${JS_PG_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/pg.test.ts", "pg": "pnpm pg:libquery && pnpm pg:client", - "errors": "DATABASE_URL=\"${JS_PG_DATABASE_URL}\" node --test --loader=tsx ./src/libquery/errors.test.ts", + "errors": "DATABASE_URL=\"${JS_PG_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/errors.test.ts", "prisma:planetscale": "cross-env-shell DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" \"pnpm prisma:db:push:mysql && pnpm prisma:db:execute:mysql\"", "studio:planetscale": "cross-env-shell DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" \"pnpm prisma:studio:mysql\"", - "planetscale:libquery": "DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" node --test --loader=tsx ./src/libquery/planetscale.test.ts", - "planetscale:client": "DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" node --test --loader=tsx ./src/client/planetscale.test.ts", + "planetscale:libquery": "DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/planetscale.test.ts", + "planetscale:client": "DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/planetscale.test.ts", "planetscale": "pnpm planetscale:libquery && pnpm planetscale:client" }, "keywords": [], @@ -36,13 +38,13 @@ "license": "Apache-2.0", "sideEffects": true, "dependencies": { + "@neondatabase/serverless": "^0.6.0", + "@planetscale/database": "^1.11.0", "@prisma/adapter-neon": "workspace:*", - "@prisma/adapter-planetscale": "workspace:*", "@prisma/adapter-pg": "workspace:*", + "@prisma/adapter-planetscale": "workspace:*", + "@prisma/client": "5.4.0-dev.29", "@prisma/driver-adapter-utils": "workspace:*", - "@neondatabase/serverless": "^0.6.0", - "@planetscale/database": "^1.11.0", - "@prisma/client": "5.3.0-integration-feat-driver-adapters-in-client.3", "pg": "^8.11.3", "superjson": "^1.13.1", "undici": "^5.23.0" @@ -51,7 +53,7 @@ "@types/node": "^20.5.1", "@types/pg": "^8.10.2", "cross-env": "^7.0.3", - "prisma": "5.3.0-integration-feat-driver-adapters-in-client.3", + "prisma": "5.4.0-dev.29", "tsx": "^3.12.7" } } diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts index ca434271c8de..35a7e8975f26 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts @@ -1,7 +1,9 @@ import { describe, it } from 'node:test' +import path from 'node:path' import assert from 'node:assert' import { PrismaClient } from '@prisma/client' import type { DriverAdapter } from '@prisma/driver-adapter-utils' +import { getLibQueryEnginePath } from '../libquery/util' export async function smokeTestClient(driverAdapter: DriverAdapter) { const provider = driverAdapter.flavour @@ -13,6 +15,9 @@ export async function smokeTestClient(driverAdapter: DriverAdapter) { } as const, ] + const dirname = path.dirname(new URL(import.meta.url).pathname) + process.env.PRISMA_QUERY_ENGINE_LIBRARY = getLibQueryEnginePath(dirname) + for (const adapter of [driverAdapter, undefined]) { const isUsingDriverAdapters = adapter !== undefined describe(isUsingDriverAdapters ? `using Driver Adapters` : `using Rust drivers`, () => { diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts index 44cb1fde98a8..53156ac56249 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts @@ -6,10 +6,7 @@ import { smokeTestClient } from './client' describe('neon with @prisma/client', async () => { const connectionString = process.env.JS_NEON_DATABASE_URL ?? '' - const connection = neon(connectionString, { - arrayMode: false, - fullResults: true, - }) + const connection = neon(connectionString) const adapter = new PrismaNeonHTTP(connection) smokeTestClient(adapter) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts index ad453ae9313b..783eb76759d2 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts @@ -9,11 +9,9 @@ export function initQueryEngine( driver: ErrorCapturingDriverAdapter, prismaSchemaRelativePath: string, ): QueryEngineInstance { - // I assume nobody will run this on Windows ¯\_(ツ)_/¯ - const libExt = os.platform() === 'darwin' ? 'dylib' : 'so' const dirname = path.dirname(new URL(import.meta.url).pathname) + const libQueryEnginePath = getLibQueryEnginePath(dirname) - const libQueryEnginePath = path.join(dirname, `../../../../../../target/debug/libquery_engine.${libExt}`) const schemaPath = path.join(dirname, prismaSchemaRelativePath) console.log('[nodejs] read Prisma schema from', schemaPath) @@ -43,6 +41,12 @@ export function initQueryEngine( return engine } +export function getLibQueryEnginePath(dirname: String) { + // I assume nobody will run this on Windows ¯\_(ツ)_/¯ + const libExt = os.platform() === 'darwin' ? 'dylib' : 'so' + return path.join(dirname, `../../../../../../target/debug/libquery_engine.${libExt}`) +} + export function createQueryFn(engine: QueryEngineInstance, adapter: ErrorCapturingDriverAdapter) { return async function doQuery(query: JsonQuery, tx_id?: string) { const result = await engine.query(JSON.stringify(query), 'trace', tx_id) From 3eac23cb88a079b5becc20cbf84f26394c115207 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Mon, 25 Sep 2023 09:04:23 +0200 Subject: [PATCH 036/128] test(qe): don't crash in rpc thread if request future was dropped (#4276) Don't crash in the RPC thread in external process executor if the receiver side of the oneshot channel was already dropped from the other side. This may happen if the request future is dropped without polling it to completion after the point in time where a `MethodCall` is already sent over `task_handle` MPSC channel. --- .../src/connector_tag/js/external_process.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/external_process.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/external_process.rs index 7ab0e6e8a021..7059d64dc641 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/external_process.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/external_process.rs @@ -125,7 +125,10 @@ fn start_rpc_thread(mut receiver: mpsc::Receiver) -> Result<()> { let sender = pending_requests.remove(response.id()).unwrap(); match response { jsonrpc_core::Output::Success(success) => { - sender.send(success.result).unwrap(); + // The other end may be dropped if the whole + // request future was dropped and not polled to + // completion, so we ignore send errors here. + _ = sender.send(success.result); } jsonrpc_core::Output::Failure(err) => { panic!("error response from jsonrpc: {err:?}") From 642b51ffcd01d1a43dee32f5b9e6261697528774 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Mon, 25 Sep 2023 09:04:43 +0200 Subject: [PATCH 037/128] query-tests-setup: handle external process rpc thread panic (#4275) * query-tests-setup: handle external process rpc thread panic Terminate the process if the RPC thread crashes and print the panic message instead of silently continuing and failing every test with `SendError` trying to send requests to a closed channel. * Improve message on panic --- .../src/connector_tag/js/external_process.rs | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/external_process.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/external_process.rs index 7059d64dc641..583d5058c62e 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/external_process.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/external_process.rs @@ -32,13 +32,25 @@ impl ExecutorProcess { fn new() -> Result { let (sender, receiver) = mpsc::channel::(300); - std::thread::spawn(|| match start_rpc_thread(receiver) { + let handle = std::thread::spawn(|| match start_rpc_thread(receiver) { Ok(()) => (), Err(err) => { exit_with_message(1, &err.to_string()); } }); + std::thread::spawn(move || { + if let Err(e) = handle.join() { + exit_with_message( + 1, + &format!( + "rpc thread panicked with: {}", + e.downcast::().unwrap_or_default() + ), + ); + } + }); + Ok(ExecutorProcess { task_handle: sender, request_id_counter: Default::default(), From 53d58ea0a4600a7f95eaeb1bff7b99c7d552671d Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Mon, 25 Sep 2023 17:07:53 +0200 Subject: [PATCH 038/128] driver-adapters: update versions (#4281) Already released on npm, versions need to be updated in the repo. --- query-engine/driver-adapters/js/adapter-neon/package.json | 2 +- query-engine/driver-adapters/js/adapter-pg/package.json | 2 +- .../driver-adapters/js/adapter-planetscale/package.json | 2 +- .../driver-adapters/js/driver-adapter-utils/package.json | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/query-engine/driver-adapters/js/adapter-neon/package.json b/query-engine/driver-adapters/js/adapter-neon/package.json index 78d188891748..04291500ce57 100644 --- a/query-engine/driver-adapters/js/adapter-neon/package.json +++ b/query-engine/driver-adapters/js/adapter-neon/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/adapter-neon", - "version": "0.3.3", + "version": "0.3.4", "description": "Prisma's driver adapter for \"@neondatabase/serverless\"", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/adapter-pg/package.json b/query-engine/driver-adapters/js/adapter-pg/package.json index dc3e73929609..ab19ba1a341d 100644 --- a/query-engine/driver-adapters/js/adapter-pg/package.json +++ b/query-engine/driver-adapters/js/adapter-pg/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/adapter-pg", - "version": "0.3.3", + "version": "0.3.4", "description": "Prisma's driver adapter for \"pg\"", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/adapter-planetscale/package.json b/query-engine/driver-adapters/js/adapter-planetscale/package.json index aedfde584c37..a34f9b52b4fc 100644 --- a/query-engine/driver-adapters/js/adapter-planetscale/package.json +++ b/query-engine/driver-adapters/js/adapter-planetscale/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/adapter-planetscale", - "version": "0.3.3", + "version": "0.3.4", "description": "Prisma's driver adapter for \"@planetscale/database\"", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/package.json b/query-engine/driver-adapters/js/driver-adapter-utils/package.json index 9bb375dff34c..22130b4ff536 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/package.json +++ b/query-engine/driver-adapters/js/driver-adapter-utils/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/driver-adapter-utils", - "version": "0.3.3", + "version": "0.4.0", "description": "Internal set of utilities and types for Prisma's driver adapters.", "main": "dist/index.js", "module": "dist/index.mjs", From 4bf3cce422a49f49c661da32d4016a5be81d28b4 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Tue, 26 Sep 2023 14:00:42 +0200 Subject: [PATCH 039/128] driver-adapters: libsql/turso adapter (#4229) -- Co-authored-by: Miguel Fernandez Co-authored-by: Jan Piotrowski --- .envrc | 1 + .../workflows/driver-adapter-smoke-tests.yml | 8 +- .../query-engine-driver-adapters.yml | 2 + Makefile | 3 + .../tests/new/interactive_tx.rs | 4 +- .../new/ref_actions/on_delete/set_default.rs | 8 +- .../new/ref_actions/on_update/set_default.rs | 8 +- .../query-tests-setup/src/config.rs | 8 +- .../test-configs/libsql-sqlite | 5 + .../js/adapter-libsql/.gitignore | 1 + .../js/adapter-libsql/README.md | 3 + .../js/adapter-libsql/package.json | 30 ++ .../js/adapter-libsql/src/conversion.ts | 159 +++++++++++ .../js/adapter-libsql/src/index.ts | 1 + .../js/adapter-libsql/src/libsql.ts | 123 ++++++++ .../js/adapter-libsql/tests/types.test.mts | 151 ++++++++++ .../js/adapter-libsql/tsconfig.build.json | 6 + .../js/adapter-libsql/tsconfig.json | 3 + .../connector-test-kit-executor/package.json | 2 + .../connector-test-kit-executor/src/index.ts | 17 +- query-engine/driver-adapters/js/package.json | 1 + .../driver-adapters/js/pnpm-lock.yaml | 267 +++++++++++++++++- .../driver-adapters/js/pnpm-workspace.yaml | 1 + .../js/smoke-test-js/.envrc.example | 15 + .../js/smoke-test-js/.gitignore | 4 + .../js/smoke-test-js/package.json | 14 +- .../sqlite/commands/type_test/insert.sql | 17 ++ .../20230915202554_init/migration.sql | 80 ++++++ .../sqlite/migrations/migration_lock.toml | 3 + .../smoke-test-js/prisma/sqlite/schema.prisma | 75 +++++ .../js/smoke-test-js/src/client/client.ts | 99 +++++-- .../smoke-test-js/src/client/libsql.test.ts | 20 ++ .../js/smoke-test-js/src/libquery/libquery.ts | 60 +++- .../smoke-test-js/src/libquery/libsql.test.ts | 22 ++ .../driver-adapters/src/conversion.rs | 13 +- query-engine/driver-adapters/src/proxy.rs | 74 ++++- query-engine/driver-adapters/src/queryable.rs | 12 +- 37 files changed, 1232 insertions(+), 88 deletions(-) create mode 100644 query-engine/connector-test-kit-rs/test-configs/libsql-sqlite create mode 100644 query-engine/driver-adapters/js/adapter-libsql/.gitignore create mode 100644 query-engine/driver-adapters/js/adapter-libsql/README.md create mode 100644 query-engine/driver-adapters/js/adapter-libsql/package.json create mode 100644 query-engine/driver-adapters/js/adapter-libsql/src/conversion.ts create mode 100644 query-engine/driver-adapters/js/adapter-libsql/src/index.ts create mode 100644 query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts create mode 100644 query-engine/driver-adapters/js/adapter-libsql/tests/types.test.mts create mode 100644 query-engine/driver-adapters/js/adapter-libsql/tsconfig.build.json create mode 100644 query-engine/driver-adapters/js/adapter-libsql/tsconfig.json create mode 100644 query-engine/driver-adapters/js/smoke-test-js/.gitignore create mode 100644 query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/commands/type_test/insert.sql create mode 100644 query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/20230915202554_init/migration.sql create mode 100644 query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/migration_lock.toml create mode 100644 query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/schema.prisma create mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/client/libsql.test.ts create mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/libquery/libsql.test.ts diff --git a/.envrc b/.envrc index 64aea05c728e..48b1254c1700 100644 --- a/.envrc +++ b/.envrc @@ -56,5 +56,6 @@ fi # Source the gitignored .envrc.local if it exists. if test -f .envrc.local; then + watch_file .envrc.local source .envrc.local fi diff --git a/.github/workflows/driver-adapter-smoke-tests.yml b/.github/workflows/driver-adapter-smoke-tests.yml index e3a233339b1a..ac4bc4c4fae3 100644 --- a/.github/workflows/driver-adapter-smoke-tests.yml +++ b/.github/workflows/driver-adapter-smoke-tests.yml @@ -18,7 +18,7 @@ jobs: strategy: fail-fast: false matrix: - adapter: ["neon:ws", "neon:http", planetscale, pg] + adapter: ["neon:ws", "neon:http", planetscale, pg, libsql] runs-on: ubuntu-latest @@ -39,6 +39,10 @@ jobs: JS_NEON_DATABASE_URL: ${{ secrets.JS_NEON_DATABASE_URL }} JS_PLANETSCALE_DATABASE_URL: ${{ secrets.JS_PLANETSCALE_DATABASE_URL }} JS_PG_DATABASE_URL: postgres://postgres:postgres@localhost:5432/test # ${{ secrets.JS_PG_DATABASE_URL }} + # TODO: test sqld and embedded replicas + JS_LIBSQL_DATABASE_URL: file:/tmp/libsql.db + # TODO: test all three of ("number", "bigint", "string") and conditionally skip some tests as appropriate + JS_LIBSQL_INT_MODE: bigint steps: - uses: actions/checkout@v4 @@ -75,4 +79,4 @@ jobs: - name: pnpm errors run: pnpm errors if: always() - working-directory: ./query-engine/driver-adapters/js/smoke-test-js \ No newline at end of file + working-directory: ./query-engine/driver-adapters/js/smoke-test-js diff --git a/.github/workflows/query-engine-driver-adapters.yml b/.github/workflows/query-engine-driver-adapters.yml index dda32259ff80..8ab2d932e07f 100644 --- a/.github/workflows/query-engine-driver-adapters.yml +++ b/.github/workflows/query-engine-driver-adapters.yml @@ -27,6 +27,8 @@ jobs: setup_task: "dev-pg-postgres13" - name: "neon:ws" setup_task: "dev-neon-ws-postgres13" + - name: "libsql" + setup_task: "dev-libsql-sqlite" node_version: ["18"] env: LOG_LEVEL: "info" # Set to "debug" to trace the query engine and node process running the driver adapter diff --git a/Makefile b/Makefile index d0f4bdd04631..3a683b824e3b 100644 --- a/Makefile +++ b/Makefile @@ -77,6 +77,9 @@ start-sqlite: dev-sqlite: cp $(CONFIG_PATH)/sqlite $(CONFIG_FILE) +dev-libsql-sqlite: build-qe-napi build-connector-kit-js + cp $(CONFIG_PATH)/libsql-sqlite $(CONFIG_FILE) + start-postgres9: docker compose -f docker-compose.yml up -d --remove-orphans postgres9 diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs index e45cef8ac306..9aa34a943560 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs @@ -213,7 +213,7 @@ mod interactive_tx { Ok(()) } - #[connector_test] + #[connector_test(exclude(JS))] async fn batch_queries_failure(mut runner: Runner) -> TestResult<()> { // Tx expires after five second. let tx_id = runner.start_tx(5000, 5000, None).await?; @@ -256,7 +256,7 @@ mod interactive_tx { Ok(()) } - #[connector_test] + #[connector_test(exclude(JS))] async fn tx_expiration_failure_cycle(mut runner: Runner) -> TestResult<()> { // Tx expires after one seconds. let tx_id = runner.start_tx(5000, 1000, None).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_default.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_default.rs index d4aa49116ea0..8ea08acc85da 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_default.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_default.rs @@ -66,7 +66,7 @@ mod one2one_req { } /// Deleting the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(required_with_default))] + #[connector_test(schema(required_with_default), exclude(MongoDb, MySQL, JS))] async fn delete_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, child: { create: { id: 1 }}}) { id }}"#), @@ -167,7 +167,7 @@ mod one2one_opt { } /// Deleting the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(optional_with_default))] + #[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL, JS))] async fn delete_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, child: { create: { id: 1 }}}) { id }}"#), @@ -270,7 +270,7 @@ mod one2many_req { } /// Deleting the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(required_with_default))] + #[connector_test(schema(required_with_default), exclude(MongoDb, MySQL, JS))] async fn delete_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, children: { create: { id: 1 }}}) { id }}"#), @@ -371,7 +371,7 @@ mod one2many_opt { } /// Deleting the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(optional_with_default))] + #[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL, JS))] async fn delete_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, children: { create: { id: 1 }}}) { id }}"#), diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_default.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_default.rs index 270bb927c265..b0e566ffcb55 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_default.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_default.rs @@ -68,7 +68,7 @@ mod one2one_req { } /// Updating the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(required_with_default))] + #[connector_test(schema(required_with_default), exclude(MongoDb, MySQL, JS))] async fn update_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", child: { create: { id: 1 }}}) { id }}"#), @@ -171,7 +171,7 @@ mod one2one_opt { } /// Updating the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(optional_with_default))] + #[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL, JS))] async fn update_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", child: { create: { id: 1 }}}) { id }}"#), @@ -276,7 +276,7 @@ mod one2many_req { } /// Updating the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(required_with_default))] + #[connector_test(schema(required_with_default), exclude(MongoDb, MySQL, JS))] async fn update_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#), @@ -379,7 +379,7 @@ mod one2many_opt { } /// Updating the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(optional_with_default))] + #[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL, JS))] async fn update_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#), diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs index 944f1c400400..37f3bc89bbd4 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs @@ -231,11 +231,15 @@ impl TestConfig { } if self.driver_adapter.is_some() && self.external_test_executor.is_none() { - exit_with_message("When using a driver adapter, the external test executor must be set."); + exit_with_message( + "When using a driver adapter, the external test executor (EXTERNAL_TEST_EXECUTOR env var) must be set.", + ); } if self.driver_adapter.is_none() && self.driver_adapter_config.is_some() { - exit_with_message("When using a driver adapter config, the driver adapter must be set."); + exit_with_message( + "When using a driver adapter config, the driver adapter (DRIVER_ADAPTER env var) must be set.", + ); } } diff --git a/query-engine/connector-test-kit-rs/test-configs/libsql-sqlite b/query-engine/connector-test-kit-rs/test-configs/libsql-sqlite new file mode 100644 index 000000000000..9638e3a22840 --- /dev/null +++ b/query-engine/connector-test-kit-rs/test-configs/libsql-sqlite @@ -0,0 +1,5 @@ +{ + "connector": "sqlite", + "driver_adapter": "libsql", + "external_test_executor": "default" +} \ No newline at end of file diff --git a/query-engine/driver-adapters/js/adapter-libsql/.gitignore b/query-engine/driver-adapters/js/adapter-libsql/.gitignore new file mode 100644 index 000000000000..c370cb644f95 --- /dev/null +++ b/query-engine/driver-adapters/js/adapter-libsql/.gitignore @@ -0,0 +1 @@ +test.db diff --git a/query-engine/driver-adapters/js/adapter-libsql/README.md b/query-engine/driver-adapters/js/adapter-libsql/README.md new file mode 100644 index 000000000000..2a869cbf5c78 --- /dev/null +++ b/query-engine/driver-adapters/js/adapter-libsql/README.md @@ -0,0 +1,3 @@ +# @prisma/adapter-libsql + +**INTERNAL PACKAGE, DO NOT USE** diff --git a/query-engine/driver-adapters/js/adapter-libsql/package.json b/query-engine/driver-adapters/js/adapter-libsql/package.json new file mode 100644 index 000000000000..20817e5a7d9d --- /dev/null +++ b/query-engine/driver-adapters/js/adapter-libsql/package.json @@ -0,0 +1,30 @@ +{ + "name": "@prisma/adapter-libsql", + "version": "0.2.3", + "description": "Prisma's driver adapter for libsql and Turso", + "main": "dist/index.js", + "module": "dist/index.mjs", + "types": "dist/index.d.ts", + "scripts": { + "build": "tsup ./src/index.ts --format cjs,esm --dts", + "lint": "tsc -p ./tsconfig.build.json", + "test": "node --loader tsx --test tests/*.test.mts" + }, + "files": [ + "dist", + "README.md" + ], + "keywords": [], + "author": "Alexey Orlenko ", + "license": "Apache-2.0", + "sideEffects": false, + "dependencies": { + "@prisma/driver-adapter-utils": "workspace:*" + }, + "devDependencies": { + "@libsql/client": "0.3.5" + }, + "peerDependencies": { + "@libsql/client": "^0.3.5" + } +} diff --git a/query-engine/driver-adapters/js/adapter-libsql/src/conversion.ts b/query-engine/driver-adapters/js/adapter-libsql/src/conversion.ts new file mode 100644 index 000000000000..2ef0386df020 --- /dev/null +++ b/query-engine/driver-adapters/js/adapter-libsql/src/conversion.ts @@ -0,0 +1,159 @@ +import { ColumnTypeEnum, ColumnType, Debug } from '@prisma/driver-adapter-utils' +import { Row, Value } from '@libsql/client' +import { isArrayBuffer } from 'node:util/types' + +const debug = Debug('prisma:driver-adapter:libsql:conversion') + +// Mirrors sqlite/conversion.rs in quaint +function mapDeclType(declType: string): ColumnType | null { + switch (declType.toUpperCase()) { + case '': + return null + case 'DECIMAL': + return ColumnTypeEnum.Numeric + case 'FLOAT': + return ColumnTypeEnum.Float + case 'DOUBLE': + case 'DOUBLE PRECISION': + case 'NUMERIC': + case 'REAL': + return ColumnTypeEnum.Double + case 'TINYINT': + case 'SMALLINT': + case 'MEDIUMINT': + case 'INT': + case 'INTEGER': + case 'SERIAL': + case 'INT2': + return ColumnTypeEnum.Int32 + case 'BIGINT': + case 'UNSIGNED BIG INT': + case 'INT8': + return ColumnTypeEnum.Int64 + case 'DATETIME': + case 'TIMESTAMP': + return ColumnTypeEnum.DateTime + case 'TIME': + return ColumnTypeEnum.Time + case 'DATE': + return ColumnTypeEnum.Date + case 'TEXT': + case 'CLOB': + case 'CHARACTER': + case 'VARCHAR': + case 'VARYING CHARACTER': + case 'NCHAR': + case 'NATIVE CHARACTER': + case 'NVARCHAR': + return ColumnTypeEnum.Text + case 'BLOB': + return ColumnTypeEnum.Bytes + case 'BOOLEAN': + return ColumnTypeEnum.Boolean + default: + debug('unknown decltype:', declType) + return null + } +} + +function mapDeclaredColumnTypes(columntTypes: string[]): [out: Array, empty: Set] { + const emptyIndices = new Set() + const result = columntTypes.map((typeName, index) => { + const mappedType = mapDeclType(typeName) + if (mappedType === null) { + emptyIndices.add(index) + } + return mappedType + }) + return [result, emptyIndices] +} + +export function getColumnTypes(declaredTypes: string[], rows: Row[]): ColumnType[] { + const [columnTypes, emptyIndices] = mapDeclaredColumnTypes(declaredTypes) + + if (emptyIndices.size === 0) { + return columnTypes as ColumnType[] + } + + columnLoop: for (const columnIndex of emptyIndices) { + // No declared column type in db schema, infer using first non-null value + for (let rowIndex = 0; rowIndex < rows.length; rowIndex++) { + const candidateValue = rows[rowIndex][columnIndex] + if (candidateValue !== null) { + columnTypes[columnIndex] = inferColumnType(candidateValue) + continue columnLoop + } + } + + // No non-null value found for this column, fall back to int32 to mimic what quaint does + columnTypes[columnIndex] = ColumnTypeEnum.Int32 + } + + return columnTypes as ColumnType[] +} + +function inferColumnType(value: NonNullable): ColumnType { + switch (typeof value) { + case 'string': + return ColumnTypeEnum.Text + case 'bigint': + return ColumnTypeEnum.Int64 + case 'boolean': + return ColumnTypeEnum.Boolean + case 'number': + return inferNumericType(value) + case 'object': + return inferObjectType(value) + default: + throw new UnexpectedTypeError(value) + } +} + +function inferNumericType(value: number): ColumnType { + if (Number.isInteger(value)) { + return ColumnTypeEnum.Int64 + } else { + return ColumnTypeEnum.Double + } +} + +function inferObjectType(value: {}): ColumnType { + if (isArrayBuffer(value)) { + return ColumnTypeEnum.Bytes + } + throw new UnexpectedTypeError(value) +} + +class UnexpectedTypeError extends Error { + name = 'UnexpectedTypeError' + constructor(value: unknown) { + const type = typeof value + const repr = type === 'object' ? JSON.stringify(value) : String(value) + super(`unexpected value of type ${type}: ${repr}`) + } +} + +export function mapRow(row: Row): unknown[] { + // `Row` doesn't have map, so we copy the array once and modify it in-place + // to avoid allocating and copying twice if we used `Array.from(row).map(...)`. + const result: unknown[] = Array.from(row) + + for (let i = 0; i < result.length; i++) { + const value = result[i] + + // Convert bigint to string as we can only use JSON-encodable types here + if (typeof value === 'bigint') { + result[i] = value.toString() + } + + // Convert array buffers to arrays of bytes. + // Base64 would've been more efficient but would collide with the existing + // logic that treats string values of type Bytes as raw UTF-8 bytes that was + // implemented for other adapters. + if (isArrayBuffer(value)) { + result[i] = Array.from(new Uint8Array(value)) + } + } + + return result +} diff --git a/query-engine/driver-adapters/js/adapter-libsql/src/index.ts b/query-engine/driver-adapters/js/adapter-libsql/src/index.ts new file mode 100644 index 000000000000..04a95cc4cfcd --- /dev/null +++ b/query-engine/driver-adapters/js/adapter-libsql/src/index.ts @@ -0,0 +1 @@ +export { PrismaLibSQL } from './libsql' diff --git a/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts b/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts new file mode 100644 index 000000000000..1f119ac4937d --- /dev/null +++ b/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts @@ -0,0 +1,123 @@ +import { Debug, ok } from '@prisma/driver-adapter-utils' +import type { + DriverAdapter, + Query, + Queryable, + Result, + ResultSet, + Transaction, + TransactionOptions, +} from '@prisma/driver-adapter-utils' +import type { InStatement, Client as LibSqlClientRaw, Transaction as LibSqlTransactionRaw } from '@libsql/client' +import { getColumnTypes, mapRow } from './conversion' + +const debug = Debug('prisma:driver-adapter:libsql') + +type StdClient = LibSqlClientRaw +type TransactionClient = LibSqlTransactionRaw + +class LibSqlQueryable implements Queryable { + readonly flavour = 'sqlite' + + constructor(protected readonly client: ClientT) {} + + /** + * Execute a query given as SQL, interpolating the given parameters. + */ + async queryRaw(query: Query): Promise> { + const tag = '[js::query_raw]' + debug(`${tag} %O`, query) + + const { columns, rows, columnTypes: declaredColumnTypes } = await this.performIO(query) + + const columnTypes = getColumnTypes(declaredColumnTypes, rows) + + const resultSet: ResultSet = { + columnNames: columns, + columnTypes, + rows: rows.map(mapRow), + } + + return ok(resultSet) + } + + /** + * Execute a query given as SQL, interpolating the given parameters and + * returning the number of affected rows. + * Note: Queryable expects a u64, but napi.rs only supports u32. + */ + async executeRaw(query: Query): Promise> { + const tag = '[js::execute_raw]' + debug(`${tag} %O`, query) + + const { rowsAffected } = await this.performIO(query) + return ok(rowsAffected ?? 0) + } + + /** + * Run a query against the database, returning the result set. + * Should the query fail due to a connection error, the connection is + * marked as unhealthy. + */ + private async performIO(query: Query) { + try { + const result = await this.client.execute(query as InStatement) + return result + } catch (e) { + const error = e as Error + debug('Error in performIO: %O', error) + throw error + } + } +} + +class LibSqlTransaction extends LibSqlQueryable implements Transaction { + constructor( + client: TransactionClient, + readonly options: TransactionOptions, + ) { + super(client) + } + + async commit(): Promise> { + debug(`[js::commit]`) + + await this.client.commit() + return ok(undefined) + } + + async rollback(): Promise> { + debug(`[js::rollback]`) + + try { + await this.client.rollback() + } catch (error) { + debug('error in rollback:', error) + } + + return ok(undefined) + } +} + +export class PrismaLibSQL extends LibSqlQueryable implements DriverAdapter { + constructor(client: StdClient) { + super(client) + } + + async startTransaction(): Promise> { + const options: TransactionOptions = { + usePhantomQuery: true, + } + + const tag = '[js::startTransaction]' + debug(`${tag} options: %O`, options) + + const tx = await this.client.transaction('deferred') + return ok(new LibSqlTransaction(tx, options)) + } + + async close(): Promise> { + this.client.close() + return ok(undefined) + } +} diff --git a/query-engine/driver-adapters/js/adapter-libsql/tests/types.test.mts b/query-engine/driver-adapters/js/adapter-libsql/tests/types.test.mts new file mode 100644 index 000000000000..f7f1b474a300 --- /dev/null +++ b/query-engine/driver-adapters/js/adapter-libsql/tests/types.test.mts @@ -0,0 +1,151 @@ +import assert from 'node:assert/strict' +import { describe, it } from 'node:test' +import { Config, createClient } from '@libsql/client' +import { PrismaLibSQL } from '../dist/index.js' +import { ColumnTypeEnum } from '@jkomyno/prisma-driver-adapter-utils' + +function connect(config?: Partial): PrismaLibSQL { + const client = createClient({ url: 'file:test.db', ...config }) + return new PrismaLibSQL(client) +} + +it('checks declared types', async () => { + const client = connect() + + await client.executeRaw({ + sql: ` + DROP TABLE IF EXISTS types; + `, + args: [], + }) + + await client.executeRaw({ + sql: ` + CREATE TABLE types ( + id INTEGER PRIMARY KEY, + real REAL, + bigint BIGINT, + date DATETIME, + text TEXT, + blob BLOB + ) + `, + args: [], + }) + + const result = await client.queryRaw({ + sql: ` + SELECT * FROM types + `, + args: [], + }) + + assert(result.ok) + assert.deepEqual(result.value.columnTypes, [ + ColumnTypeEnum.Int32, + ColumnTypeEnum.Double, + ColumnTypeEnum.Int64, + ColumnTypeEnum.DateTime, + ColumnTypeEnum.Text, + ColumnTypeEnum.Bytes, + ]) +}) + +it('infers types when sqlite decltype is not available', async () => { + const client = connect() + + const result = await client.queryRaw({ + sql: ` + SELECT 1 as first, 'test' as second + `, + args: [], + }) + + assert(result.ok) + assert.deepEqual(result.value.columnTypes, [ColumnTypeEnum.Int64, ColumnTypeEnum.Text]) +}) + +describe('int64 with different intMode', () => { + const N = 2n ** 63n - 1n + + it('correctly infers int64 with intMode=number for safe JS integers', async () => { + const client = connect({ intMode: 'number' }) + + const result = await client.queryRaw({ + sql: `SELECT ?`, + args: [Number.MAX_SAFE_INTEGER], + }) + + assert(result.ok) + assert.equal(result.value.columnTypes[0], ColumnTypeEnum.Int64) + assert.equal(result.value.rows[0][0], Number.MAX_SAFE_INTEGER) + }) + + it("doesn't support very big int64 with intMode=number", async () => { + const client = connect({ intMode: 'number' }) + + assert.rejects( + client.queryRaw({ + sql: `SELECT ?`, + args: [N], + }), + ) + }) + + it('correctly infers int64 with intMode=bigint', async () => { + const client = connect({ intMode: 'bigint' }) + + const result = await client.queryRaw({ + sql: `SELECT ?`, + args: [N], + }) + + assert(result.ok) + assert.equal(result.value.columnTypes[0], ColumnTypeEnum.Int64) + + // bigints are converted to strings because we can't currently pass a bigint + // to rust due to a napi.rs limitation + assert.equal(result.value.rows[0][0], N.toString()) + }) + + it('correctly infers int64 with intMode=string when we have decltype', async () => { + const client = connect({ intMode: 'string' }) + + await client.executeRaw({ + sql: `DROP TABLE IF EXISTS test`, + args: [], + }) + + await client.executeRaw({ + sql: `CREATE TABLE test (int64 BIGINT)`, + args: [], + }) + + await client.executeRaw({ + sql: `INSERT INTO test (int64) VALUES (?)`, + args: [N], + }) + + const result = await client.queryRaw({ + sql: `SELECT int64 FROM test`, + args: [], + }) + + assert(result.ok) + assert.equal(result.value.columnTypes[0], ColumnTypeEnum.Int64) + assert.equal(result.value.rows[0][0], N.toString()) + }) + + it("can't infer int64 with intMode=string without schema", async () => { + const client = connect({ intMode: 'string' }) + + const result = await client.queryRaw({ + sql: `SELECT ?`, + args: [N], + }) + + assert(result.ok) + assert.equal(result.value.columnTypes[0], ColumnTypeEnum.Text) + assert.equal(result.value.rows[0][0], N.toString()) + }) +}) diff --git a/query-engine/driver-adapters/js/adapter-libsql/tsconfig.build.json b/query-engine/driver-adapters/js/adapter-libsql/tsconfig.build.json new file mode 100644 index 000000000000..28c56f6c3a9a --- /dev/null +++ b/query-engine/driver-adapters/js/adapter-libsql/tsconfig.build.json @@ -0,0 +1,6 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "outDir": "declaration" + } +} diff --git a/query-engine/driver-adapters/js/adapter-libsql/tsconfig.json b/query-engine/driver-adapters/js/adapter-libsql/tsconfig.json new file mode 100644 index 000000000000..3c43903cfdd1 --- /dev/null +++ b/query-engine/driver-adapters/js/adapter-libsql/tsconfig.json @@ -0,0 +1,3 @@ +{ + "extends": "../tsconfig.json" +} diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/package.json b/query-engine/driver-adapters/js/connector-test-kit-executor/package.json index 1dc1315afc83..6d4ba374461c 100644 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/package.json +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/package.json @@ -13,7 +13,9 @@ "sideEffects": false, "license": "Apache-2.0", "dependencies": { + "@libsql/client": "0.3.5", "@neondatabase/serverless": "^0.6.0", + "@prisma/adapter-libsql": "workspace:*", "@prisma/adapter-neon": "workspace:*", "@prisma/adapter-pg": "workspace:*", "@prisma/driver-adapter-utils": "workspace:*", diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts b/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts index 21df3430d3b1..9e0c38678a6f 100644 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts @@ -12,10 +12,15 @@ import { Pool as NeonPool, neonConfig } from '@neondatabase/serverless' import { WebSocket } from 'undici' import * as prismaNeon from '@prisma/adapter-neon' +// libsql dependencies +import { createClient } from '@libsql/client' +import { PrismaLibSQL } from '@prisma/adapter-libsql' + import {bindAdapter, DriverAdapter, ErrorCapturingDriverAdapter} from "@prisma/driver-adapter-utils"; + const SUPPORTED_ADAPTERS: Record Promise> - = {"pg": pgAdapter, "neon:ws" : neonWsAdapter}; + = {"pg": pgAdapter, "neon:ws" : neonWsAdapter, "libsql": libsqlAdapter}; // conditional debug logging based on LOG_LEVEL env var const debug = (() => { @@ -31,7 +36,6 @@ const debug = (() => { // error logger const err = (...args: any[]) => console.error('[nodejs] ERROR:', ...args); - async function main(): Promise { const iface = readline.createInterface({ input: process.stdin, @@ -229,10 +233,10 @@ async function pgAdapter(url: string): Promise { async function neonWsAdapter(url: string): Promise { const proxyURL = JSON.parse(process.env.DRIVER_ADAPTER_CONFIG || '{}').proxyUrl ?? '' if (proxyURL == '') { - throw new Error("DRIVER_ADAPTER_URL_OVERRIDE is not defined or empty, but its required for neon adapter."); + throw new Error("DRIVER_ADAPTER_CONFIG is not defined or empty, but its required for neon adapter."); } - neonConfig.wsProxy = () => `127.0.0.1:5488/v1` + neonConfig.wsProxy = () => proxyURL neonConfig.webSocketConstructor = WebSocket neonConfig.useSecureWebSocket = false neonConfig.pipelineConnect = false @@ -241,4 +245,9 @@ async function neonWsAdapter(url: string): Promise { return new prismaNeon.PrismaNeon(pool) } +async function libsqlAdapter(url: string): Promise { + const libsql = createClient({ url, intMode: 'bigint' }) + return new PrismaLibSQL(libsql) +} + main().catch(err) diff --git a/query-engine/driver-adapters/js/package.json b/query-engine/driver-adapters/js/package.json index 1fc20228fe6f..2036794f8c02 100644 --- a/query-engine/driver-adapters/js/package.json +++ b/query-engine/driver-adapters/js/package.json @@ -17,6 +17,7 @@ "devDependencies": { "@types/node": "^20.5.1", "tsup": "^7.2.0", + "tsx": "^3.12.7", "typescript": "^5.1.6" } } diff --git a/query-engine/driver-adapters/js/pnpm-lock.yaml b/query-engine/driver-adapters/js/pnpm-lock.yaml index ce7a442a5388..78e9e94da329 100644 --- a/query-engine/driver-adapters/js/pnpm-lock.yaml +++ b/query-engine/driver-adapters/js/pnpm-lock.yaml @@ -14,10 +14,23 @@ importers: tsup: specifier: ^7.2.0 version: 7.2.0(typescript@5.1.6) + tsx: + specifier: ^3.12.7 + version: 3.12.7 typescript: specifier: ^5.1.6 version: 5.1.6 + adapter-libsql: + dependencies: + '@prisma/driver-adapter-utils': + specifier: workspace:* + version: link:../driver-adapter-utils + devDependencies: + '@libsql/client': + specifier: 0.3.5 + version: 0.3.5 + adapter-neon: dependencies: '@prisma/driver-adapter-utils': @@ -53,9 +66,15 @@ importers: connector-test-kit-executor: dependencies: + '@libsql/client': + specifier: 0.3.5 + version: 0.3.5 '@neondatabase/serverless': specifier: ^0.6.0 version: 0.6.0 + '@prisma/adapter-libsql': + specifier: workspace:* + version: link:../adapter-libsql '@prisma/adapter-neon': specifier: workspace:* version: link:../adapter-neon @@ -87,12 +106,18 @@ importers: smoke-test-js: dependencies: + '@libsql/client': + specifier: 0.3.5 + version: 0.3.5 '@neondatabase/serverless': specifier: ^0.6.0 version: 0.6.0 '@planetscale/database': specifier: ^1.11.0 version: 1.11.0 + '@prisma/adapter-libsql': + specifier: workspace:* + version: link:../adapter-libsql '@prisma/adapter-neon': specifier: workspace:* version: link:../adapter-neon @@ -103,8 +128,8 @@ importers: specifier: workspace:* version: link:../adapter-planetscale '@prisma/client': - specifier: 5.4.0-dev.29 - version: 5.4.0-dev.29(prisma@5.4.0-dev.29) + specifier: 5.4.0-integration-libsql-adapter.7 + version: 5.4.0-integration-libsql-adapter.7(prisma@5.4.0-integration-libsql-adapter.7) '@prisma/driver-adapter-utils': specifier: workspace:* version: link:../driver-adapter-utils @@ -128,8 +153,8 @@ importers: specifier: ^7.0.3 version: 7.0.3 prisma: - specifier: 5.4.0-dev.29 - version: 5.4.0-dev.29 + specifier: 5.4.0-integration-libsql-adapter.7 + version: 5.4.0-integration-libsql-adapter.7 tsx: specifier: ^3.12.7 version: 3.12.7 @@ -385,6 +410,84 @@ packages: '@jridgewell/sourcemap-codec': 1.4.15 dev: true + /@libsql/client@0.3.5: + resolution: {integrity: sha512-4fZxGh0qKW5dtp1yuQLRvRAtbt02V4jzjM9sHSmz5k25xZTLg7/GlNudKdqKZrjJXEV5PvDNsczupBtedZZovw==} + dependencies: + '@libsql/hrana-client': 0.5.5 + js-base64: 3.7.5 + libsql: 0.1.23 + transitivePeerDependencies: + - bufferutil + - encoding + - utf-8-validate + + /@libsql/darwin-arm64@0.1.23: + resolution: {integrity: sha512-+V9aoOrZ47iYbY5NrcS0F2bDOCH407QI0wxAtss0CLOcFxlz/T6Nw0ryLK31GabklJQAmOXIyqkumLfz5HT64w==} + cpu: [arm64] + os: [darwin] + requiresBuild: true + optional: true + + /@libsql/darwin-x64@0.1.23: + resolution: {integrity: sha512-toHo7s0HiMl4VCIfjhGXDe9bGWWo78eP8fxIbwU6RlaLO6MNV9fjHY/GjTWccWOwyxcT+q6X/kUc957HnoW3bg==} + cpu: [x64] + os: [darwin] + requiresBuild: true + optional: true + + /@libsql/hrana-client@0.5.5: + resolution: {integrity: sha512-i+hDBpiV719poqEiHupUUZYKJ9YSbCRFe5Q2PQ0v3mHIftePH6gayLjp2u6TXbqbO/Dv6y8yyvYlBXf/kFfRZA==} + dependencies: + '@libsql/isomorphic-fetch': 0.1.10 + '@libsql/isomorphic-ws': 0.1.5 + js-base64: 3.7.5 + node-fetch: 3.3.2 + transitivePeerDependencies: + - bufferutil + - encoding + - utf-8-validate + + /@libsql/isomorphic-fetch@0.1.10: + resolution: {integrity: sha512-dH0lMk50gKSvEKD78xWMu60SY1sjp1sY//iFLO0XMmBwfVfG136P9KOk06R4maBdlb8KMXOzJ1D28FR5ZKnHTA==} + dependencies: + '@types/node-fetch': 2.6.6 + node-fetch: 2.7.0 + transitivePeerDependencies: + - encoding + + /@libsql/isomorphic-ws@0.1.5: + resolution: {integrity: sha512-DtLWIH29onUYR00i0GlQ3UdcTRC6EP4u9w/h9LxpUZJWRMARk6dQwZ6Jkd+QdwVpuAOrdxt18v0K2uIYR3fwFg==} + dependencies: + '@types/ws': 8.5.5 + ws: 8.14.2 + transitivePeerDependencies: + - bufferutil + - utf-8-validate + + /@libsql/linux-x64-gnu@0.1.23: + resolution: {integrity: sha512-U11LdjayakOj0lQCHDYkTgUfe4Q+7AjZZh8MzgEDF/9l0bmKNI3eFLWA3JD2Xm98yz65lUx95om0WKOKu5VW/w==} + cpu: [x64] + os: [linux] + requiresBuild: true + optional: true + + /@libsql/linux-x64-musl@0.1.23: + resolution: {integrity: sha512-8UcCK2sPVzcafHsEmcU5IDp/NxjD6F6JFS5giijsMX5iGgxYQiiwTUMOmSxW0AWBeT4VY5U7G6rG5PC8JSFtfg==} + cpu: [x64] + os: [linux] + requiresBuild: true + optional: true + + /@libsql/win32-x64-msvc@0.1.23: + resolution: {integrity: sha512-HAugD66jTmRRRGNMLKRiaFeMOC3mgUsAiuO6NRdRz3nM6saf9e5QqN/Ppuu9yqHHcZfv7VhQ9UGlAvzVK64Itg==} + cpu: [x64] + os: [win32] + requiresBuild: true + optional: true + + /@neon-rs/load@0.0.4: + resolution: {integrity: sha512-kTPhdZyTQxB+2wpiRcFWrDcejc4JI6tkPuS7UZCG4l6Zvc5kU/gGQ/ozvHTh1XR5tS+UlfAfGuPajjzQjCiHCw==} + /@neondatabase/serverless@0.6.0: resolution: {integrity: sha512-qXxBRYN0m2v8kVQBfMxbzNGn2xFAhTXFibzQlE++NfJ56Shz3m7+MyBBtXDlEH+3Wfa6lToDXf1MElocY4sJ3w==} dependencies: @@ -415,8 +518,8 @@ packages: resolution: {integrity: sha512-aWbU+D/IRHoDE9975y+Q4c+EwwAWxCPwFId+N1AhQVFXzbeJMkj6KN2iQtoi03elcLMRdfT+V3i9Z4WRw+/oIA==} engines: {node: '>=16'} - /@prisma/client@5.4.0-dev.29(prisma@5.4.0-dev.29): - resolution: {integrity: sha512-rpuBku3CFmX6FDq2SANcc9Ch6ZTqT6fyhvhe66bI/kzJjVY4NN7PwleJesB8/VfS5TkAYMmK5HcPQUwi5hZEVw==} + /@prisma/client@5.4.0-integration-libsql-adapter.7(prisma@5.4.0-integration-libsql-adapter.7): + resolution: {integrity: sha512-0WNHV37C16IPoE0cgBtX4heYDHaLXQhq4IIB50RMw/WJyzozhigEIizaFKbBdZ4PjxpekohHvPgTH6r4QfiTrA==} engines: {node: '>=16.13'} requiresBuild: true peerDependencies: @@ -425,16 +528,16 @@ packages: prisma: optional: true dependencies: - '@prisma/engines-version': 5.4.0-18.32692fd2ce90d456c093eb8eae68511575243419 - prisma: 5.4.0-dev.29 + '@prisma/engines-version': 5.4.0-24.libsql-adapter-8337c6b372b0f4eb2500403a7cf450885aee4cdc + prisma: 5.4.0-integration-libsql-adapter.7 dev: false - /@prisma/engines-version@5.4.0-18.32692fd2ce90d456c093eb8eae68511575243419: - resolution: {integrity: sha512-6qgjyvmru90p7sn+mWQlZDmX8WgYTZ/cB2kpDShjbg1ymF4dIszqUm6RZqESoZ39Mgp5d620AgDtZqfFQ8sWRQ==} + /@prisma/engines-version@5.4.0-24.libsql-adapter-8337c6b372b0f4eb2500403a7cf450885aee4cdc: + resolution: {integrity: sha512-Yr2GeXHTK2FdxF5o0lLyZk0oJC8L1QMADZyPn+wTNcG9kfMCCs3cvQwPLDdvsMUHfwJ0c31r6w0mEpM4c37Ejw==} dev: false - /@prisma/engines@5.4.0-dev.29: - resolution: {integrity: sha512-3uZ/rLbrJcVSv6js2haSGb2QqB4n26j3Gr4w7iySiQ5O83L11rtOjmkRGdX8yXxEBibuWtOn43SJb24AlQDj8g==} + /@prisma/engines@5.4.0-integration-libsql-adapter.7: + resolution: {integrity: sha512-QRNhAeLw4EqSE+N6tzpOSlkqW9XO1Zf3aUO4wNH3LJTjG153oIJDnGfahijF93PjuyIOSHEFGZ7mfKeAaq7FiA==} requiresBuild: true /@types/debug@4.1.8: @@ -447,6 +550,12 @@ packages: resolution: {integrity: sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA==} dev: true + /@types/node-fetch@2.6.6: + resolution: {integrity: sha512-95X8guJYhfqiuVVhRFxVQcf4hW/2bCuoPwDasMf/531STFoNoWTT7YDnWdXHEZKqAGUigmpG31r2FE70LwnzJw==} + dependencies: + '@types/node': 20.6.5 + form-data: 4.0.0 + /@types/node@20.5.1: resolution: {integrity: sha512-4tT2UrL5LBqDwoed9wZ6N3umC4Yhz3W3FloMmiiG4JwmUJWpie0c7lcnUNd4gtMKuDEO4wRVS8B6Xa0uMRsMKg==} dev: true @@ -454,6 +563,9 @@ packages: /@types/node@20.5.9: resolution: {integrity: sha512-PcGNd//40kHAS3sTlzKB9C9XL4K0sTup8nbG5lC14kzEteTNuAFh9u5nA0o5TWnSG2r/JNPRXFVcHJIIeRlmqQ==} + /@types/node@20.6.5: + resolution: {integrity: sha512-2qGq5LAOTh9izcc0+F+dToFigBWiK1phKPt7rNhOqJSr35y8rlIBjDwGtFSgAI6MGIhjwOVNSQZVdJsZJ2uR1w==} + /@types/pg@8.10.2: resolution: {integrity: sha512-MKFs9P6nJ+LAeHLU3V0cODEOgyThJ3OAnmOlsZsxux6sfQs3HRXR5bBn7xG5DjckEFhTAxsXi7k7cd0pCMxpJw==} dependencies: @@ -468,6 +580,11 @@ packages: pg-protocol: 1.6.0 pg-types: 2.2.0 + /@types/ws@8.5.5: + resolution: {integrity: sha512-lwhs8hktwxSjf9UaZ9tG5M03PGogvFaH8gUgLNbN9HKIg0dvv6q+gkSuJ8HN4/VbyxkuLzCjlN7GquQ0gUJfIg==} + dependencies: + '@types/node': 20.6.5 + /any-promise@1.3.0: resolution: {integrity: sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==} dev: true @@ -485,6 +602,9 @@ packages: engines: {node: '>=8'} dev: true + /asynckit@0.4.0: + resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} + /balanced-match@1.0.2: resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} dev: true @@ -553,6 +673,12 @@ packages: fsevents: 2.3.3 dev: true + /combined-stream@1.0.8: + resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} + engines: {node: '>= 0.8'} + dependencies: + delayed-stream: 1.0.0 + /commander@4.1.1: resolution: {integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==} engines: {node: '>= 6'} @@ -586,6 +712,10 @@ packages: which: 2.0.2 dev: true + /data-uri-to-buffer@4.0.1: + resolution: {integrity: sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==} + engines: {node: '>= 12'} + /debug@4.3.4: resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==} engines: {node: '>=6.0'} @@ -597,6 +727,14 @@ packages: dependencies: ms: 2.1.2 + /delayed-stream@1.0.0: + resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} + engines: {node: '>=0.4.0'} + + /detect-libc@2.0.2: + resolution: {integrity: sha512-UX6sGumvvqSaXgdKGUsgZWqcUyIXZ/vZTrlRT/iobiKhGL0zL4d3osHj3uqllWJK+i+sixDS/3COVEOFbupFyw==} + engines: {node: '>=8'} + /dir-glob@3.0.1: resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} engines: {node: '>=8'} @@ -666,6 +804,13 @@ packages: reusify: 1.0.4 dev: true + /fetch-blob@3.2.0: + resolution: {integrity: sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==} + engines: {node: ^12.20 || >= 14.13} + dependencies: + node-domexception: 1.0.0 + web-streams-polyfill: 3.2.1 + /fill-range@7.0.1: resolution: {integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==} engines: {node: '>=8'} @@ -673,6 +818,20 @@ packages: to-regex-range: 5.0.1 dev: true + /form-data@4.0.0: + resolution: {integrity: sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==} + engines: {node: '>= 6'} + dependencies: + asynckit: 0.4.0 + combined-stream: 1.0.8 + mime-types: 2.1.35 + + /formdata-polyfill@4.0.10: + resolution: {integrity: sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==} + engines: {node: '>=12.20.0'} + dependencies: + fetch-blob: 3.2.0 + /fs.realpath@1.0.0: resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} dev: true @@ -790,6 +949,23 @@ packages: engines: {node: '>=10'} dev: true + /js-base64@3.7.5: + resolution: {integrity: sha512-3MEt5DTINKqfScXKfJFrRbxkrnk2AxPWGBL/ycjz4dK8iqiSJ06UxD8jh8xuh6p10TX4t2+7FsBYVxxQbMg+qA==} + + /libsql@0.1.23: + resolution: {integrity: sha512-Nf/1B2Glxvcnba4jYFhXcaYmicyBA3RRm0LVwBkTl8UWCIDbX+Ad7c1ecrQwixPLPffWOVxKIqyCNTuUHUkVgA==} + cpu: [x64, arm64] + os: [darwin, linux, win32] + dependencies: + '@neon-rs/load': 0.0.4 + detect-libc: 2.0.2 + optionalDependencies: + '@libsql/darwin-arm64': 0.1.23 + '@libsql/darwin-x64': 0.1.23 + '@libsql/linux-x64-gnu': 0.1.23 + '@libsql/linux-x64-musl': 0.1.23 + '@libsql/win32-x64-msvc': 0.1.23 + /lilconfig@2.1.0: resolution: {integrity: sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==} engines: {node: '>=10'} @@ -825,6 +1001,16 @@ packages: picomatch: 2.3.1 dev: true + /mime-db@1.52.0: + resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} + engines: {node: '>= 0.6'} + + /mime-types@2.1.35: + resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} + engines: {node: '>= 0.6'} + dependencies: + mime-db: 1.52.0 + /mimic-fn@2.1.0: resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} engines: {node: '>=6'} @@ -847,6 +1033,29 @@ packages: thenify-all: 1.6.0 dev: true + /node-domexception@1.0.0: + resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==} + engines: {node: '>=10.5.0'} + + /node-fetch@2.7.0: + resolution: {integrity: sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==} + engines: {node: 4.x || >=6.0.0} + peerDependencies: + encoding: ^0.1.0 + peerDependenciesMeta: + encoding: + optional: true + dependencies: + whatwg-url: 5.0.0 + + /node-fetch@3.3.2: + resolution: {integrity: sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + dependencies: + data-uri-to-buffer: 4.0.1 + fetch-blob: 3.2.0 + formdata-polyfill: 4.0.10 + /normalize-path@3.0.0: resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} engines: {node: '>=0.10.0'} @@ -1035,13 +1244,13 @@ packages: /postgres-range@1.1.3: resolution: {integrity: sha512-VdlZoocy5lCP0c/t66xAfclglEapXPCIVhqqJRncYpvbCgImF0w67aPKfbqUMr72tO2k5q0TdTZwCLjPTI6C9g==} - /prisma@5.4.0-dev.29: - resolution: {integrity: sha512-TlhIZLVZsDVIQBcVZ8bRi9CJrThkEhKMJ9sEBtsINYx4ju3k7lGl9Kdqlm7zOW4FVwSNPgKvgsdzRgsO6fbDug==} + /prisma@5.4.0-integration-libsql-adapter.7: + resolution: {integrity: sha512-B7nkAnHFAxEMPS/o3jpUeUOp97Js3HlRThfXMfYILrVML/MMy18HwjQrVzxfF/QSq7UxbUQAyGLFrqSypTPAzw==} engines: {node: '>=16.13'} hasBin: true requiresBuild: true dependencies: - '@prisma/engines': 5.4.0-dev.29 + '@prisma/engines': 5.4.0-integration-libsql-adapter.7 /punycode@2.3.0: resolution: {integrity: sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==} @@ -1182,6 +1391,9 @@ packages: is-number: 7.0.0 dev: true + /tr46@0.0.3: + resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==} + /tr46@1.0.1: resolution: {integrity: sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA==} dependencies: @@ -1257,10 +1469,23 @@ packages: busboy: 1.6.0 dev: false + /web-streams-polyfill@3.2.1: + resolution: {integrity: sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q==} + engines: {node: '>= 8'} + + /webidl-conversions@3.0.1: + resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==} + /webidl-conversions@4.0.2: resolution: {integrity: sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==} dev: true + /whatwg-url@5.0.0: + resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==} + dependencies: + tr46: 0.0.3 + webidl-conversions: 3.0.1 + /whatwg-url@7.1.0: resolution: {integrity: sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg==} dependencies: @@ -1281,6 +1506,18 @@ packages: resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} dev: true + /ws@8.14.2: + resolution: {integrity: sha512-wEBG1ftX4jcglPxgFCMJmZ2PLtSbJ2Peg6TmpJFTbe9GZYOQCDPdMYu/Tm0/bGZkw8paZnJY45J4K2PZrLYq8g==} + engines: {node: '>=10.0.0'} + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: '>=5.0.2' + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + /xtend@4.0.2: resolution: {integrity: sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==} engines: {node: '>=0.4'} diff --git a/query-engine/driver-adapters/js/pnpm-workspace.yaml b/query-engine/driver-adapters/js/pnpm-workspace.yaml index a7ffe6b09c5f..f9e70da7ee5a 100644 --- a/query-engine/driver-adapters/js/pnpm-workspace.yaml +++ b/query-engine/driver-adapters/js/pnpm-workspace.yaml @@ -1,4 +1,5 @@ packages: + - './adapter-libsql' - './adapter-neon' - './adapter-pg' - './adapter-planetscale' diff --git a/query-engine/driver-adapters/js/smoke-test-js/.envrc.example b/query-engine/driver-adapters/js/smoke-test-js/.envrc.example index 4a73859e999e..9b2e18373163 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/.envrc.example +++ b/query-engine/driver-adapters/js/smoke-test-js/.envrc.example @@ -3,3 +3,18 @@ export JS_NEON_DATABASE_URL="postgres://USER:PASSWORD@DATABASE-pooler.eu-central # Note: if you use hosted Postgres instances (e.g., from PDP provision), you need `?sslmode=disable` export JS_PG_DATABASE_URL="postgres://postgres:prisma@localhost:5438" + +# Set this to a `file:` URL when using a local sqlite database (either +# standalone or as an embedded replica). Otherwise, when using a remote Turso +# (or sqld) database in HTTP mode directly without an embedded replica, set its +# URL here. +export JS_LIBSQL_DATABASE_URL="file:${PWD}/libsql.db" + +# # Set this to the URL of remote Turso database when using an embedded replica. +# export JS_LIBSQL_SYNC_URL="" + +# # Provide an auth token when using a remote Turso database. +# export JS_LIBSQL_AUTH_TOKEN="" + +# Can be one of "number" (the default when nothing is specified), "bigint" or "string". "bigint" works best with Prisma. +export JS_LIBSQL_INT_MODE="bigint" diff --git a/query-engine/driver-adapters/js/smoke-test-js/.gitignore b/query-engine/driver-adapters/js/smoke-test-js/.gitignore new file mode 100644 index 000000000000..be550f99317f --- /dev/null +++ b/query-engine/driver-adapters/js/smoke-test-js/.gitignore @@ -0,0 +1,4 @@ +libsql.db +libsql.db-journal +libsql.db-shm +libsql.db-wal diff --git a/query-engine/driver-adapters/js/smoke-test-js/package.json b/query-engine/driver-adapters/js/smoke-test-js/package.json index 95459d0237d4..f54e717dbbd6 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/package.json +++ b/query-engine/driver-adapters/js/smoke-test-js/package.json @@ -10,6 +10,8 @@ "prisma:studio:postgres": "prisma studio --schema ./prisma/postgres/schema.prisma", "prisma:db:push:mysql": "prisma db push --schema ./prisma/mysql/schema.prisma --force-reset", "prisma:db:execute:mysql": "prisma db execute --schema ./prisma/mysql/schema.prisma --file ./prisma/mysql/commands/type_test/insert.sql", + "prisma:db:push:sqlite": "prisma db push --schema ./prisma/sqlite/schema.prisma --force-reset", + "prisma:db:execute:sqlite": "prisma db execute --schema ./prisma/sqlite/schema.prisma --file ./prisma/sqlite/commands/type_test/insert.sql", "prisma:studio:mysql": "prisma studio --schema ./prisma/mysql/schema.prisma", "prisma:neon:ws": "pnpm prisma:neon", "prisma:neon:http": "pnpm prisma:neon", @@ -31,19 +33,25 @@ "studio:planetscale": "cross-env-shell DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" \"pnpm prisma:studio:mysql\"", "planetscale:libquery": "DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/planetscale.test.ts", "planetscale:client": "DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/planetscale.test.ts", - "planetscale": "pnpm planetscale:libquery && pnpm planetscale:client" + "planetscale": "pnpm planetscale:libquery && pnpm planetscale:client", + "prisma:libsql": "cross-env-shell DATABASE_URL=\"${JS_LIBSQL_DATABASE_URL}\" \"pnpm prisma:db:push:sqlite && pnpm prisma:db:execute:sqlite\"", + "libsql:libquery": "cross-env-shell DATABASE_URL=\"${JS_LIBSQL_DATABASE_URL}\" node --test --loader=tsx ./src/libquery/libsql.test.ts", + "libsql:client": "cross-env-shell DATABASE_URL=\"${JS_LIBSQL_DATABASE_URL}\" node --test --loader=tsx ./src/client/libsql.test.ts", + "libsql": "pnpm libsql:libquery && pnpm libsql:client" }, "keywords": [], "author": "Alberto Schiabel ", "license": "Apache-2.0", "sideEffects": true, "dependencies": { + "@libsql/client": "0.3.5", "@neondatabase/serverless": "^0.6.0", "@planetscale/database": "^1.11.0", + "@prisma/adapter-libsql": "workspace:*", "@prisma/adapter-neon": "workspace:*", "@prisma/adapter-pg": "workspace:*", "@prisma/adapter-planetscale": "workspace:*", - "@prisma/client": "5.4.0-dev.29", + "@prisma/client": "5.4.0-integration-libsql-adapter.7", "@prisma/driver-adapter-utils": "workspace:*", "pg": "^8.11.3", "superjson": "^1.13.1", @@ -53,7 +61,7 @@ "@types/node": "^20.5.1", "@types/pg": "^8.10.2", "cross-env": "^7.0.3", - "prisma": "5.4.0-dev.29", + "prisma": "5.4.0-integration-libsql-adapter.7", "tsx": "^3.12.7" } } diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/commands/type_test/insert.sql b/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/commands/type_test/insert.sql new file mode 100644 index 000000000000..014592d2fa2c --- /dev/null +++ b/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/commands/type_test/insert.sql @@ -0,0 +1,17 @@ +INSERT INTO type_test ( + int_column, + bigint_column, + double_column, + decimal_column, + boolean_column, + text_column, + datetime_column +) VALUES ( + 2147483647, -- int + 9223372036854775807, -- bigint + 1.7976931348623157, -- double + 99999999.99, -- decimal + TRUE, -- boolean + 'This is a long text...', -- text + '2023-07-24 23:59:59.415' -- datetime +); diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/20230915202554_init/migration.sql b/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/20230915202554_init/migration.sql new file mode 100644 index 000000000000..77e333ceeb53 --- /dev/null +++ b/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/20230915202554_init/migration.sql @@ -0,0 +1,80 @@ +-- CreateTable +CREATE TABLE "type_test" ( + "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + "int_column" INTEGER NOT NULL, + "int_column_null" INTEGER, + "bigint_column" BIGINT NOT NULL, + "bigint_column_null" BIGINT, + "double_column" REAL NOT NULL, + "double_column_null" REAL, + "decimal_column" DECIMAL NOT NULL, + "decimal_column_null" DECIMAL, + "boolean_column" BOOLEAN NOT NULL, + "boolean_column_null" BOOLEAN, + "text_column" TEXT NOT NULL, + "text_column_null" TEXT, + "datetime_column" DATETIME NOT NULL, + "datetime_column_null" DATETIME +); + +-- CreateTable +CREATE TABLE "type_test_2" ( + "id" TEXT NOT NULL PRIMARY KEY, + "datetime_column" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + "datetime_column_null" DATETIME +); + +-- CreateTable +CREATE TABLE "type_test_3" ( + "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + "bytes" BLOB NOT NULL +); + +-- CreateTable +CREATE TABLE "Child" ( + "c" TEXT NOT NULL, + "c_1" TEXT NOT NULL, + "c_2" TEXT NOT NULL, + "parentId" TEXT, + "non_unique" TEXT, + "id" TEXT NOT NULL PRIMARY KEY +); + +-- CreateTable +CREATE TABLE "Parent" ( + "p" TEXT NOT NULL, + "p_1" TEXT NOT NULL, + "p_2" TEXT NOT NULL, + "non_unique" TEXT, + "id" TEXT NOT NULL PRIMARY KEY +); + +-- CreateTable +CREATE TABLE "authors" ( + "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + "firstName" TEXT NOT NULL, + "lastName" TEXT NOT NULL, + "age" INTEGER NOT NULL +); + +-- CreateTable +CREATE TABLE "Product" ( + "id" TEXT NOT NULL PRIMARY KEY, + "properties" TEXT NOT NULL, + "properties_null" TEXT +); + +-- CreateIndex +CREATE UNIQUE INDEX "Child_c_key" ON "Child"("c"); + +-- CreateIndex +CREATE UNIQUE INDEX "Child_parentId_key" ON "Child"("parentId"); + +-- CreateIndex +CREATE UNIQUE INDEX "Child_c_1_c_2_key" ON "Child"("c_1", "c_2"); + +-- CreateIndex +CREATE UNIQUE INDEX "Parent_p_key" ON "Parent"("p"); + +-- CreateIndex +CREATE UNIQUE INDEX "Parent_p_1_p_2_key" ON "Parent"("p_1", "p_2"); diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/migration_lock.toml b/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/migration_lock.toml new file mode 100644 index 000000000000..e5e5c4705ab0 --- /dev/null +++ b/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/migration_lock.toml @@ -0,0 +1,3 @@ +# Please do not edit this file manually +# It should be added in your version-control system (i.e. Git) +provider = "sqlite" \ No newline at end of file diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/schema.prisma b/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/schema.prisma new file mode 100644 index 000000000000..e1432d2f316a --- /dev/null +++ b/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/schema.prisma @@ -0,0 +1,75 @@ +generator client { + provider = "prisma-client-js" + previewFeatures = ["driverAdapters"] +} + +datasource db { + provider = "sqlite" + url = env("DATABASE_URL") +} + +model type_test { + id Int @id @default(autoincrement()) + int_column Int + int_column_null Int? + bigint_column BigInt + bigint_column_null BigInt? + double_column Float + double_column_null Float? + decimal_column Decimal + decimal_column_null Decimal? + boolean_column Boolean + boolean_column_null Boolean? + text_column String + text_column_null String? + datetime_column DateTime + datetime_column_null DateTime? +} + +// This will eventually supersede type_test +model type_test_2 { + id String @id @default(cuid()) + datetime_column DateTime @default(now()) + datetime_column_null DateTime? +} + +model type_test_3 { + id Int @id @default(autoincrement()) + bytes Bytes +} + +model Child { + c String @unique + c_1 String + c_2 String + parentId String? @unique + non_unique String? + id String @id + + @@unique([c_1, c_2]) +} + +model Parent { + p String @unique + p_1 String + p_2 String + non_unique String? + id String @id + + @@unique([p_1, p_2]) +} + +model Author { + id Int @id @default(autoincrement()) + firstName String + lastName String + age Int + + @@map("authors") +} + +model Product { + id String @id @default(cuid()) + properties String + properties_null String? +} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts index 35a7e8975f26..7074320f4351 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts @@ -26,7 +26,7 @@ export async function smokeTestClient(driverAdapter: DriverAdapter) { adapter, log, }) - + const queries: string[] = [] prisma.$on('query', ({ query }) => queries.push(query)) @@ -52,6 +52,8 @@ export async function smokeTestClient(driverAdapter: DriverAdapter) { '-- Implicit "COMMIT" query via underlying driver', ] + // TODO: sqlite should be here too but it's too flaky the way the test is currently written, + // only a subset of logs arrives on time (from 2 to 4 out of 5) if (['mysql'].includes(provider)) { if (isUsingDriverAdapters) { assert.deepEqual(queries, driverAdapterExpectedQueries) @@ -63,41 +65,76 @@ export async function smokeTestClient(driverAdapter: DriverAdapter) { assert.deepEqual(queries.at(0), defaultExpectedQueries.at(0)) assert.deepEqual( queries.filter((q) => q !== 'DEALLOCATE ALL'), - defaultExpectedQueries + defaultExpectedQueries, ) } }) - - it('applies isolation level when using batch $transaction', async () => { - const prisma = new PrismaClient({ - adapter, - log, + + if (provider !== 'sqlite') { + it('applies isolation level when using batch $transaction', async () => { + const prisma = new PrismaClient({ adapter, log }) + + const queries: string[] = [] + prisma.$on('query', ({ query }) => queries.push(query)) + + await prisma.$transaction([prisma.child.findMany(), prisma.child.count()], { + isolationLevel: 'ReadCommitted', + }) + + if (['mysql'].includes(provider)) { + assert.deepEqual(queries.slice(0, 2), ['SET TRANSACTION ISOLATION LEVEL READ COMMITTED', 'BEGIN']) + } else if (['postgres'].includes(provider)) { + assert.deepEqual(queries.slice(0, 2), ['BEGIN', 'SET TRANSACTION ISOLATION LEVEL READ COMMITTED']) + } + + assert.deepEqual(queries.at(-1), 'COMMIT') }) - - const queries: string[] = [] - prisma.$on('query', ({ query }) => queries.push(query)) - - await prisma.$transaction([ - prisma.child.findMany(), - prisma.child.count(), - ], { - isolationLevel: 'ReadCommitted', + } else { + describe('isolation levels with sqlite', () => { + it('accepts Serializable as a no-op', async () => { + const prisma = new PrismaClient({ adapter, log }) + + const queries: string[] = [] + prisma.$on('query', ({ query }) => queries.push(query)) + + await prisma.$transaction([prisma.child.findMany(), prisma.child.count()], { + isolationLevel: 'Serializable', + }) + + if (isUsingDriverAdapters) { + assert.equal(queries.at(0), '-- Implicit "BEGIN" query via underlying driver') + assert.equal(queries.at(-1), '-- Implicit "COMMIT" query via underlying driver') + } else { + assert.equal(queries.at(0), 'BEGIN') + assert.equal(queries.at(-1), 'COMMIT') + } + + assert(!queries.find((q) => q.includes('SET TRANSACTION ISOLATION LEVEL'))) + }) + + it('throws on unsupported isolation levels', async () => { + const prisma = new PrismaClient({ adapter }) + + assert.rejects( + prisma.$transaction([prisma.child.findMany(), prisma.child.count()], { + isolationLevel: 'ReadCommitted', + }), + ) + }) + + it('bytes type support', async () => { + const prisma = new PrismaClient({ adapter, log }) + + const result = await prisma.type_test_3.create({ + data: { + bytes: Buffer.from([1, 2, 3, 4]), + }, + }) + + assert.deepEqual(result.bytes, Buffer.from([1, 2, 3, 4])) + }) }) - - if (['mysql'].includes(provider)) { - assert.deepEqual(queries.slice(0, 2), [ - 'SET TRANSACTION ISOLATION LEVEL READ COMMITTED', - 'BEGIN', - ]) - } else if (['postgres'].includes(provider)) { - assert.deepEqual(queries.slice(0, 2), [ - 'BEGIN', - 'SET TRANSACTION ISOLATION LEVEL READ COMMITTED', - ]) - } - - assert.deepEqual(queries.at(-1), 'COMMIT') - }) + } }) } } diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/libsql.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/libsql.test.ts new file mode 100644 index 000000000000..f216b2a02ac7 --- /dev/null +++ b/query-engine/driver-adapters/js/smoke-test-js/src/client/libsql.test.ts @@ -0,0 +1,20 @@ +import { PrismaLibSQL } from '@prisma/adapter-libsql' +import { IntMode, createClient } from '@libsql/client' +import { describe } from 'node:test' +import { smokeTestClient } from './client' + +describe('libsql with @prisma/client', async () => { + const url = process.env.JS_LIBSQL_DATABASE_URL as string + const syncUrl = process.env.JS_LIBSQL_SYNC_URL + const authToken = process.env.JS_LIBSQL_AUTH_TOKEN + const intMode = process.env.JS_LIBSQL_INT_MODE as IntMode | undefined + + const client = createClient({ url, syncUrl, authToken, intMode }) + const adapter = new PrismaLibSQL(client) + + if (syncUrl) { + await client.sync() + } + + smokeTestClient(adapter) +}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts index 3f659a6cb592..c1cd760f5f8d 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts @@ -43,7 +43,10 @@ export function smokeTestLibquery(adapter: ErrorCapturingDriverAdapter, prismaSc }, }) - assert.strictEqual(created.data.createOneProduct.properties.$type, 'Json') + if (flavour !== 'sqlite') { + assert.strictEqual(created.data.createOneProduct.properties.$type, 'Json') + } + console.log('[nodejs] created', JSON.stringify(created, null, 2)) const resultSet = await doQuery({ @@ -359,10 +362,65 @@ export function smokeTestLibquery(adapter: ErrorCapturingDriverAdapter, prismaSc }) console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) }) + } else if (['sqlite'].includes(flavour)) { + it('sqlite', async () => { + const resultSet = await doQuery( + { + "action": "findMany", + "modelName": "type_test", + "query": { + "selection": { + "int_column": true, + "bigint_column": true, + "double_column": true, + "decimal_column": true, + "boolean_column": true, + "text_column": true, + "datetime_column": true, + } + } + } + ) + console.log('[nodejs] findMany resultSet', JSON.stringify((resultSet), null, 2)) + }) } else { throw new Error(`Missing test for flavour ${flavour}`) } }) + + it('write and read back bytes', async () => { + const createResultSet = await doQuery({ + action: 'createOne', + modelName: 'type_test_3', + query: { + selection: { + bytes: true, + }, + arguments: { + data: { + bytes: { + $type: 'Bytes', + value: 'AQID', + }, + }, + }, + }, + }) + console.log('[nodejs] createOne resultSet:') + console.dir(createResultSet, { depth: Infinity }) + + const findResultSet = await doQuery({ + action: 'findMany', + modelName: 'type_test_3', + query: { + selection: { + bytes: true, + }, + }, + }) + console.log('[nodejs] findMany resultSet:') + console.dir(findResultSet, { depth: Infinity }) + }) }) } diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libsql.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libsql.test.ts new file mode 100644 index 000000000000..7f0a1038ec74 --- /dev/null +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libsql.test.ts @@ -0,0 +1,22 @@ +import { PrismaLibSQL } from '@prisma/adapter-libsql' +import { bindAdapter } from '@prisma/driver-adapter-utils' +import { IntMode, createClient } from '@libsql/client' +import { describe } from 'node:test' +import { smokeTestLibquery } from './libquery' + +describe('libsql', async () => { + const url = process.env.JS_LIBSQL_DATABASE_URL as string + const syncUrl = process.env.JS_LIBSQL_SYNC_URL + const authToken = process.env.JS_LIBSQL_AUTH_TOKEN + const intMode = process.env.JS_LIBSQL_INT_MODE as IntMode | undefined + + const client = createClient({ url, syncUrl, authToken, intMode }) + const adapter = new PrismaLibSQL(client) + const driverAdapter = bindAdapter(adapter) + + if (syncUrl) { + await client.sync() + } + + smokeTestLibquery(driverAdapter, '../../prisma/sqlite/schema.prisma') +}) diff --git a/query-engine/driver-adapters/src/conversion.rs b/query-engine/driver-adapters/src/conversion.rs index c64954e389cf..c69b9f020d61 100644 --- a/query-engine/driver-adapters/src/conversion.rs +++ b/query-engine/driver-adapters/src/conversion.rs @@ -8,6 +8,7 @@ use serde_json::value::Value as JsonValue; pub enum JSArg { RawString(String), Value(serde_json::Value), + Buffer(Vec), } impl From for JSArg { @@ -17,8 +18,8 @@ impl From for JSArg { } // FromNapiValue is the napi equivalent to serde::Deserialize. -// Note: we can safely leave this unimplemented as we don't need deserialize JSArg back to napi_value -// (nor we need to). However, removing this altogether would cause a compile error. +// Note: we can safely leave this unimplemented as we don't need deserialize napi_value back to JSArg. +// However, removing this altogether would cause a compile error. impl FromNapiValue for JSArg { unsafe fn from_napi_value(_env: napi::sys::napi_env, _napi_value: napi::sys::napi_value) -> napi::Result { unreachable!() @@ -31,6 +32,10 @@ impl ToNapiValue for JSArg { match value { JSArg::RawString(s) => ToNapiValue::to_napi_value(env, s), JSArg::Value(v) => ToNapiValue::to_napi_value(env, v), + JSArg::Buffer(bytes) => ToNapiValue::to_napi_value( + env, + napi::Env::from_raw(env).create_arraybuffer_with_data(bytes)?.into_raw(), + ), } } } @@ -47,6 +52,10 @@ pub fn conv_params(params: &[QuaintValue<'_>]) -> serde_json::Result> } None => JsonValue::Null.into(), }, + QuaintValue::Bytes(bytes) => match bytes { + Some(bytes) => JSArg::Buffer(bytes.to_vec()), + None => JsonValue::Null.into(), + }, quaint_value => { let json: JsonValue = quaint_value.clone().into(); json.into() diff --git a/query-engine/driver-adapters/src/proxy.rs b/query-engine/driver-adapters/src/proxy.rs index bc17e2963236..7e30f73c29a3 100644 --- a/query-engine/driver-adapters/src/proxy.rs +++ b/query-engine/driver-adapters/src/proxy.rs @@ -11,7 +11,7 @@ use quaint::connector::ResultSet as QuaintResultSet; use quaint::Value as QuaintValue; // TODO(jkomyno): import these 3rd-party crates from the `quaint-core` crate. -use bigdecimal::BigDecimal; +use bigdecimal::{BigDecimal, FromPrimitive}; use chrono::{DateTime, Utc}; use chrono::{NaiveDate, NaiveTime}; @@ -44,7 +44,7 @@ pub(crate) struct TransactionProxy { /// commit transaction commit: AsyncJsFunction<(), ()>, - /// rollback transcation + /// rollback transaction rollback: AsyncJsFunction<(), ()>, } @@ -175,16 +175,24 @@ fn js_value_to_quaint( // n.as_i32() is not implemented, so we need to downcast from i64 instead QuaintValue::int32(n.as_i64().expect("number must be an i32") as i32) } + serde_json::Value::String(s) => { + let n = s.parse::().expect("string-encoded number must be an i32"); + QuaintValue::int32(n) + } serde_json::Value::Null => QuaintValue::Int32(None), mismatch => panic!("Expected an i32 number in column {}, found {}", column_name, mismatch), }, ColumnType::Int64 => match json_value { + serde_json::Value::Number(n) => QuaintValue::int64(n.as_i64().expect("number must be an i64")), serde_json::Value::String(s) => { let n = s.parse::().expect("string-encoded number must be an i64"); QuaintValue::int64(n) } serde_json::Value::Null => QuaintValue::Int64(None), - mismatch => panic!("Expected a string in column {}, found {}", column_name, mismatch), + mismatch => panic!( + "Expected a string or number in column {}, found {}", + column_name, mismatch + ), }, ColumnType::Float => match json_value { // n.as_f32() is not implemented, so we need to downcast from f64 instead. @@ -203,6 +211,11 @@ fn js_value_to_quaint( let decimal = BigDecimal::from_str(&s).expect("invalid numeric value"); QuaintValue::numeric(decimal) } + serde_json::Value::Number(n) => QuaintValue::numeric( + n.as_f64() + .and_then(BigDecimal::from_f64) + .expect("number must be an f64"), + ), serde_json::Value::Null => QuaintValue::Numeric(None), mismatch => panic!( "Expected a string-encoded number in column {}, found {}", @@ -212,6 +225,16 @@ fn js_value_to_quaint( ColumnType::Boolean => match json_value { serde_json::Value::Bool(b) => QuaintValue::boolean(b), serde_json::Value::Null => QuaintValue::Boolean(None), + serde_json::Value::Number(n) => QuaintValue::boolean(match n.as_i64() { + Some(0) => false, + Some(1) => true, + _ => panic!("expected number-encoded boolean to be 0 or 1, got {n}"), + }), + serde_json::Value::String(s) => QuaintValue::boolean(match s.as_str() { + "false" | "FALSE" | "0" => false, + "true" | "TRUE" | "1" => true, + _ => panic!("expected string-encoded boolean, got \"{s}\""), + }), mismatch => panic!("Expected a boolean in column {}, found {}", column_name, mismatch), }, ColumnType::Char => match json_value { @@ -243,8 +266,9 @@ fn js_value_to_quaint( ColumnType::DateTime => match json_value { serde_json::Value::String(s) => { let datetime = chrono::NaiveDateTime::parse_from_str(&s, "%Y-%m-%d %H:%M:%S%.f") + .map(|dt| DateTime::from_utc(dt, Utc)) + .or_else(|_| DateTime::parse_from_rfc3339(&s).map(DateTime::::from)) .unwrap_or_else(|_| panic!("Expected a datetime string, found {:?}", &s)); - let datetime: DateTime = DateTime::from_utc(datetime, Utc); QuaintValue::datetime(datetime) } serde_json::Value::Null => QuaintValue::DateTime(None), @@ -261,8 +285,18 @@ fn js_value_to_quaint( }, ColumnType::Bytes => match json_value { serde_json::Value::String(s) => QuaintValue::Bytes(Some(s.into_bytes().into())), + serde_json::Value::Array(array) => { + let bytes: Option> = array + .iter() + .map(|value| value.as_i64().and_then(|maybe_byte| maybe_byte.try_into().ok())) + .collect(); + QuaintValue::Bytes(Some(bytes.expect("elements of the array must be u8"))) + } serde_json::Value::Null => QuaintValue::Bytes(None), - mismatch => panic!("Expected a string in column {}, found {}", column_name, mismatch), + mismatch => panic!( + "Expected a string or an array in column {}, found {}", + column_name, mismatch + ), }, unimplemented => { todo!("support column type {:?} in column {}", unimplemented, column_name) @@ -353,7 +387,7 @@ impl TransactionProxy { pub fn new(js_transaction: &JsObject) -> napi::Result { let commit = js_transaction.get_named_property("commit")?; let rollback = js_transaction.get_named_property("rollback")?; - let options: TransactionOptions = js_transaction.get_named_property("options")?; + let options = js_transaction.get_named_property("options")?; Ok(Self { commit, @@ -422,6 +456,12 @@ mod proxy_test { let json_value = serde_json::Value::Number(serde_json::Number::from(n)); let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); assert_eq!(quaint_value, QuaintValue::Int32(Some(n))); + + // string-encoded + let n = i32::MAX; + let json_value = serde_json::Value::String(n.to_string()); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + assert_eq!(quaint_value, QuaintValue::Int32(Some(n))); } #[test] @@ -448,6 +488,12 @@ mod proxy_test { let json_value = serde_json::Value::String(n.to_string()); let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); assert_eq!(quaint_value, QuaintValue::Int64(Some(n))); + + // number-encoded + let n: i64 = (1 << 53) - 1; // max JS safe integer + let json_value = serde_json::Value::Number(serde_json::Number::from(n)); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + assert_eq!(quaint_value, QuaintValue::Int64(Some(n))); } #[test] @@ -532,16 +578,16 @@ mod proxy_test { test_null(QuaintValue::Boolean(None), column_type); // true - let bool_val = true; - let json_value = serde_json::Value::Bool(bool_val); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); - assert_eq!(quaint_value, QuaintValue::Boolean(Some(bool_val))); + for truthy_value in [json!(true), json!(1), json!("true"), json!("TRUE"), json!("1")] { + let quaint_value = js_value_to_quaint(truthy_value, column_type, "column_name"); + assert_eq!(quaint_value, QuaintValue::Boolean(Some(true))); + } // false - let bool_val = false; - let json_value = serde_json::Value::Bool(bool_val); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); - assert_eq!(quaint_value, QuaintValue::Boolean(Some(bool_val))); + for falsy_value in [json!(false), json!(0), json!("false"), json!("FALSE"), json!("0")] { + let quaint_value = js_value_to_quaint(falsy_value, column_type, "column_name"); + assert_eq!(quaint_value, QuaintValue::Boolean(Some(false))); + } } #[test] diff --git a/query-engine/driver-adapters/src/queryable.rs b/query-engine/driver-adapters/src/queryable.rs index 5dbb549e677e..310f69641696 100644 --- a/query-engine/driver-adapters/src/queryable.rs +++ b/query-engine/driver-adapters/src/queryable.rs @@ -6,10 +6,7 @@ use async_trait::async_trait; use napi::JsObject; use psl::datamodel_connector::Flavour; use quaint::{ - connector::{ - metrics::{self}, - IsolationLevel, Transaction, - }, + connector::{metrics, IsolationLevel, Transaction}, error::{Error, ErrorKind}, prelude::{Query as QuaintQuery, Queryable as QuaintQueryable, ResultSet, TransactionCapable}, visitor::{self, Visitor}, @@ -112,6 +109,13 @@ impl QuaintQueryable for JsBaseQueryable { return Err(Error::builder(ErrorKind::invalid_isolation_level(&isolation_level)).build()); } + if self.flavour == Flavour::Sqlite { + return match isolation_level { + IsolationLevel::Serializable => Ok(()), + _ => Err(Error::builder(ErrorKind::invalid_isolation_level(&isolation_level)).build()), + }; + } + self.raw_cmd(&format!("SET TRANSACTION ISOLATION LEVEL {isolation_level}")) .await } From 103a7a2cc1c1c5bfac05b79cb834bf588c9c38fa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Wed, 27 Sep 2023 13:09:04 +0200 Subject: [PATCH 040/128] BigDecimal values converted to json are transformed into a f64 value (#4285) using the following internal conversion: ``` fn to_f64(&self) -> Option { self.int_val.to_f64().map(|x| x * 10f64.powi(-self.scale as i32)) } ``` This computations causes the rounding problem seen in driver adapters. In quaint this is not manifested, as the driver takes the string representation and not the converted value for the BigDecimal. --- query-engine/driver-adapters/src/conversion.rs | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/query-engine/driver-adapters/src/conversion.rs b/query-engine/driver-adapters/src/conversion.rs index c69b9f020d61..ac06a3cff586 100644 --- a/query-engine/driver-adapters/src/conversion.rs +++ b/query-engine/driver-adapters/src/conversion.rs @@ -56,10 +56,14 @@ pub fn conv_params(params: &[QuaintValue<'_>]) -> serde_json::Result> Some(bytes) => JSArg::Buffer(bytes.to_vec()), None => JsonValue::Null.into(), }, - quaint_value => { - let json: JsonValue = quaint_value.clone().into(); - json.into() - } + quaint_value @ QuaintValue::Numeric(bd) => match bd { + Some(bd) => match bd.to_string().parse::() { + Ok(double) => JSArg::from(JsonValue::from(double)), + Err(_) => JSArg::from(JsonValue::from(quaint_value.clone())), + }, + None => JsonValue::Null.into(), + }, + quaint_value => JSArg::from(JsonValue::from(quaint_value.clone())), }; values.push(res); From 6dc5bad713cea7efd29c9e8d99886ca68d196477 Mon Sep 17 00:00:00 2001 From: Flavian Desverne Date: Wed, 27 Sep 2023 14:35:08 +0200 Subject: [PATCH 041/128] perf: improve to-one relational filters (#4235) --- quaint/src/ast/select.rs | 8 + quaint/src/ast/table.rs | 14 + .../filters/extended_relation_filters.rs | 4 +- .../tests/queries/filters/many_relation.rs | 136 +++ .../tests/queries/filters/one_relation.rs | 16 +- .../src/cursor_condition.rs | 2 +- .../src/database/operations/update.rs | 11 +- .../src/database/operations/upsert.rs | 4 +- .../src/database/operations/write.rs | 4 +- .../sql-query-connector/src/filter/alias.rs | 75 ++ .../sql-query-connector/src/filter/mod.rs | 42 + .../visitor.rs} | 800 ++++++++++-------- .../sql-query-connector/src/join_utils.rs | 91 +- .../connectors/sql-query-connector/src/lib.rs | 4 +- .../sql-query-connector/src/ordering.rs | 16 +- .../src/query_builder/read.rs | 28 +- .../sql-query-connector/src/query_ext.rs | 6 +- 17 files changed, 840 insertions(+), 421 deletions(-) create mode 100644 query-engine/connectors/sql-query-connector/src/filter/alias.rs create mode 100644 query-engine/connectors/sql-query-connector/src/filter/mod.rs rename query-engine/connectors/sql-query-connector/src/{filter_conversion.rs => filter/visitor.rs} (65%) diff --git a/quaint/src/ast/select.rs b/quaint/src/ast/select.rs index 6ab8df310fe4..b08dce6624eb 100644 --- a/quaint/src/ast/select.rs +++ b/quaint/src/ast/select.rs @@ -453,6 +453,14 @@ impl<'a> Select<'a> { self } + pub fn join(mut self, join: J) -> Self + where + J: Into>, + { + self.joins.push(join.into()); + self + } + /// Adds an ordering to the `ORDER BY` section. /// /// ```rust diff --git a/quaint/src/ast/table.rs b/quaint/src/ast/table.rs index 4b5d50161af9..4eca73f27bc7 100644 --- a/quaint/src/ast/table.rs +++ b/quaint/src/ast/table.rs @@ -344,6 +344,20 @@ impl<'a> Table<'a> { self } + + pub fn join(self, join: J) -> Self + where + J: Into>, + { + let join: Join = join.into(); + + match join { + Join::Inner(x) => self.inner_join(x), + Join::Left(x) => self.left_join(x), + Join::Right(x) => self.right_join(x), + Join::Full(x) => self.full_join(x), + } + } } impl<'a> From<&'a str> for Table<'a> { diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/extended_relation_filters.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/extended_relation_filters.rs index 8dec5383fd0a..dea57eb1e0e4 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/extended_relation_filters.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/extended_relation_filters.rs @@ -368,7 +368,7 @@ mod ext_rel_filters { test_data(&runner).await?; insta::assert_snapshot!( - run_query!(&runner, r#"{ findManyAlbum(where: { Tracks: { some: { OR:[{ MediaType: {is: { Name: { equals: "MediaType1" }}}}, { Genre: { is: { Name: { equals: "Genre2" }}}}]}}}) { Title }}"#), + run_query!(&runner, r#"{ findManyAlbum(where: { Tracks: { some: { OR:[{ MediaType: {is: { Name: { equals: "MediaType1" }}}}, { Genre: { is: { Name: { equals: "Genre2" }}}}]}}}, orderBy: { Title: asc }) { Title }}"#), @r###"{"data":{"findManyAlbum":[{"Title":"Album1"},{"Title":"Album3"},{"Title":"Album4"},{"Title":"Album5"}]}}"### ); @@ -507,7 +507,7 @@ mod ext_rel_filters { test_data(&runner).await?; insta::assert_snapshot!( - run_query!(&runner, r#"{ findManyGenre(where: { Tracks: { some: {} }}) { Name }}"#), + run_query!(&runner, r#"{ findManyGenre(where: { Tracks: { some: {} }}, orderBy: { Name: asc }) { Name }}"#), @r###"{"data":{"findManyGenre":[{"Name":"Genre1"},{"Name":"Genre2"},{"Name":"Genre3"}]}}"### ); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/many_relation.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/many_relation.rs index e47c98c4cf2b..2f50edbe2628 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/many_relation.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/many_relation.rs @@ -276,6 +276,142 @@ mod many_relation { Ok(()) } + fn schema_2() -> String { + let schema = indoc! { + r#" + model Blog { + #id(id, Int, @id) + name String + posts Post[] + } + + model Post { + #id(id, Int, @id) + + blog_id Int + blog Blog @relation(fields: [blog_id], references: [id]) + + comment Comment? + } + + model Comment { + #id(id, Int, @id) + popularity Int + + postId Int @unique + post Post @relation(fields: [postId], references: [id]) + } + "# + }; + + schema.to_owned() + } + + // 2 levels to-many/to-one relation filter, all combinations. + #[connector_test(schema(schema_2))] + async fn l2_m_1_rel_all(runner: Runner) -> TestResult<()> { + // Seed + run_query!( + &runner, + r#"mutation { createOneBlog(data: { + id: 1, + name: "blog1", + posts: { + create: [ + { id: 1, comment: { create: { id: 1, popularity: 10 } } }, + { id: 2, comment: { create: { id: 2, popularity: 50 } } }, + { id: 3, comment: { create: { id: 3, popularity: 100 } } }, + ] + } + }) { id } } + "# + ); + + run_query!( + &runner, + r#"mutation { createOneBlog(data: { + id: 2, + name: "blog2", + posts: { + create: [ + { id: 4, comment: { create: { id: 4, popularity: 1000 } } }, + { id: 5, comment: { create: { id: 5, popularity: 1000 } } }, + ] + } + }) { id } } + "# + ); + + // posts without comment + run_query!( + &runner, + r#"mutation { createOneBlog(data: { + id: 3, + name: "blog3", + posts: { + create: [ + { id: 6 }, + { id: 7 }, + ] + } + }) { id } } + "# + ); + + // blog without posts + run_query!( + &runner, + r#"mutation { createOneBlog(data: { id: 4, name: "blog4" }) { id } } "# + ); + + // some / is + insta::assert_snapshot!( + run_query!(&runner, r#"query { findManyBlog(where: { posts: { some: { comment: { is: { popularity: { lt: 1000 } } } } } }) { name }}"#), + @r###"{"data":{"findManyBlog":[{"name":"blog1"}]}}"### + ); + + // some / isNot + // TODO: Investigate why MongoDB returns a different result + match_connector_result!( + &runner, + r#"query { findManyBlog(where: { posts: { some: { comment: { isNot: { popularity: { gt: 100 } } } } } }) { name }}"#, + MongoDb(_) => vec![r#"{"data":{"findManyBlog":[{"name":"blog1"}]}}"#], + _ => vec![r#"{"data":{"findManyBlog":[{"name":"blog1"},{"name":"blog3"}]}}"#] + ); + + // none / is + insta::assert_snapshot!( + run_query!(&runner, r#"query { findManyBlog(where: { posts: { none: { comment: { is: { popularity: { lt: 1000 } } } } } }) { name }}"#), + @r###"{"data":{"findManyBlog":[{"name":"blog2"},{"name":"blog3"},{"name":"blog4"}]}}"### + ); + + // none / isNot + // TODO: Investigate why MongoDB returns a different result + match_connector_result!( + &runner, + r#"query { findManyBlog(where: { posts: { none: { comment: { isNot: { popularity: { gt: 100 } } } } } }) { name }}"#, + MongoDb(_) => vec![r#"{"data":{"findManyBlog":[{"name":"blog2"},{"name":"blog3"},{"name":"blog4"}]}}"#], + _ => vec![r#"{"data":{"findManyBlog":[{"name":"blog2"},{"name":"blog4"}]}}"#] + ); + + // every / is + insta::assert_snapshot!( + run_query!(&runner, r#"query { findManyBlog(where: { posts: { every: { comment: { is: { popularity: { gte: 1000 } } } } } }) { name }}"#), + @r###"{"data":{"findManyBlog":[{"name":"blog2"},{"name":"blog4"}]}}"### + ); + + // every / isNot + // TODO: Investigate why MongoDB returns a different result + match_connector_result!( + &runner, + r#"query { findManyBlog(where: { posts: { every: { comment: { isNot: { popularity: { gte: 1000 } } } } } }) { name }}"#, + MongoDb(_) => vec![r#"{"data":{"findManyBlog":[{"name":"blog1"},{"name":"blog4"}]}}"#], + _ => vec![r#"{"data":{"findManyBlog":[{"name":"blog1"},{"name":"blog3"},{"name":"blog4"}]}}"#] + ); + + Ok(()) + } + // Note: Only the original author knows why this is considered crazy. #[connector_test] async fn crazy_filters(runner: Runner) -> TestResult<()> { diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/one_relation.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/one_relation.rs index e0d74adcaaf2..ff4cddfc9b57 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/one_relation.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/one_relation.rs @@ -52,6 +52,10 @@ mod one_relation { #[connector_test] async fn l1_one_rel(runner: Runner) -> TestResult<()> { test_data(&runner).await?; + run_query!( + &runner, + r#"mutation { createOneBlog( data: { name: "blog 4" } ) { name } }"# + ); insta::assert_snapshot!( run_query!(&runner, r#"query { findManyPost(where: { title: { equals: "post 2" }}) { title }}"#), @@ -59,12 +63,12 @@ mod one_relation { ); insta::assert_snapshot!( - run_query!(&runner, r#"{findManyPost(where:{blog:{is:{name:{equals: "blog 1"}}}}) { title }}"#), + run_query!(&runner, r#"{findManyPost(where:{blog:{is:{ name:{equals: "blog 1"}}}}) { title }}"#), @r###"{"data":{"findManyPost":[{"title":"post 1"}]}}"### ); insta::assert_snapshot!( - run_query!(&runner, r#"{findManyBlog(where: { post: { is:{popularity: { gte: 100 }}}}){name}}"#), + run_query!(&runner, r#"{findManyBlog(where: { post: { is:{ popularity: { gte: 100 }}}}){name}}"#), @r###"{"data":{"findManyBlog":[{"name":"blog 2"},{"name":"blog 3"}]}}"### ); @@ -73,9 +77,11 @@ mod one_relation { @r###"{"data":{"findManyBlog":[{"name":"blog 3"}]}}"### ); - insta::assert_snapshot!( - run_query!(&runner, r#"{findManyBlog(where: { post: { isNot:{popularity: { gte: 500 }}}}){name}}"#), - @r###"{"data":{"findManyBlog":[{"name":"blog 1"},{"name":"blog 2"}]}}"### + match_connector_result!( + &runner, + r#"{findManyBlog(where: { post: { isNot:{ popularity: { gte: 500 }}}}){name}}"#, + MongoDb(_) => vec![r#"{"data":{"findManyBlog":[{"name":"blog 1"},{"name":"blog 2"}]}}"#], + _ => vec![r#"{"data":{"findManyBlog":[{"name":"blog 1"},{"name":"blog 2"},{"name":"blog 4"}]}}"#] ); runner diff --git a/query-engine/connectors/sql-query-connector/src/cursor_condition.rs b/query-engine/connectors/sql-query-connector/src/cursor_condition.rs index 4fa10dde9483..72bbd8e273ec 100644 --- a/query-engine/connectors/sql-query-connector/src/cursor_condition.rs +++ b/query-engine/connectors/sql-query-connector/src/cursor_condition.rs @@ -226,7 +226,7 @@ pub(crate) fn build( let order_subquery = order_by_defs .iter() .flat_map(|j| &j.joins) - .fold(order_subquery, |acc, join| acc.left_join(join.data.clone())); + .fold(order_subquery, |acc, join| acc.join(join.data.clone())); let len = definitions.len(); let reverse = query_arguments.needs_reversed_order(); diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/update.rs b/query-engine/connectors/sql-query-connector/src/database/operations/update.rs index 75657406d6ec..2270d6c6fefa 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/update.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/update.rs @@ -1,7 +1,7 @@ use super::read::get_single_record; use crate::column_metadata::{self, ColumnMetadata}; -use crate::filter_conversion::AliasedCondition; +use crate::filter::FilterBuilder; use crate::query_builder::write::{build_update_and_set_query, chunk_update_with_ids}; use crate::row::ToSqlRow; use crate::{Context, QueryExt, Queryable}; @@ -31,8 +31,9 @@ pub(crate) async fn update_one_with_selection( return get_single_record(conn, model, &filter, &selected_fields, &[], ctx).await; } - let update = build_update_and_set_query(model, args, Some(&selected_fields), ctx) - .so_that(build_update_one_filter(record_filter).aliased_condition_from(None, false, ctx)); + let cond = FilterBuilder::without_top_level_joins().visit_filter(build_update_one_filter(record_filter), ctx); + + let update = build_update_and_set_query(model, args, Some(&selected_fields), ctx).so_that(cond); let field_names: Vec<_> = selected_fields.db_names().collect(); let idents = selected_fields.type_identifiers_with_arities(); @@ -101,7 +102,7 @@ pub(crate) async fn update_many_from_filter( ctx: &Context<'_>, ) -> crate::Result { let update = build_update_and_set_query(model, args, None, ctx); - let filter_condition = record_filter.filter.aliased_condition_from(None, false, ctx); + let filter_condition = FilterBuilder::without_top_level_joins().visit_filter(record_filter.filter, ctx); let update = update.so_that(filter_condition); let count = conn.execute(update.into()).await?; @@ -117,7 +118,7 @@ pub(crate) async fn update_many_from_ids_and_filter( args: WriteArgs, ctx: &Context<'_>, ) -> crate::Result<(usize, Vec)> { - let filter_condition = record_filter.filter.aliased_condition_from(None, false, ctx); + let filter_condition = FilterBuilder::without_top_level_joins().visit_filter(record_filter.filter.clone(), ctx); let ids: Vec = conn.filter_selectors(model, record_filter, ctx).await?; if ids.is_empty() { diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/upsert.rs b/query-engine/connectors/sql-query-connector/src/database/operations/upsert.rs index 23b14ea93a77..cfd473923ffc 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/upsert.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/upsert.rs @@ -1,6 +1,6 @@ use crate::{ column_metadata, - filter_conversion::AliasedCondition, + filter::FilterBuilder, model_extensions::AsColumns, query_builder::write::{build_update_and_set_query, create_record}, row::ToSqlRow, @@ -21,7 +21,7 @@ pub(crate) async fn native_upsert( let meta = column_metadata::create(&field_names, &idents); - let where_condition = upsert.filter().aliased_condition_from(None, false, ctx); + let where_condition = FilterBuilder::without_top_level_joins().visit_filter(upsert.filter().clone(), ctx); let update = build_update_and_set_query(upsert.model(), upsert.update().clone(), None, ctx).so_that(where_condition); diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/write.rs b/query-engine/connectors/sql-query-connector/src/database/operations/write.rs index f23b01a457d0..8463a710b67f 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/write.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/write.rs @@ -1,6 +1,6 @@ use super::update::*; use crate::column_metadata; -use crate::filter_conversion::AliasedCondition; +use crate::filter::FilterBuilder; use crate::row::ToSqlRow; use crate::{ error::SqlError, model_extensions::*, query_builder::write, sql_trace::SqlTraceComment, Context, QueryExt, @@ -361,7 +361,7 @@ pub(crate) async fn delete_records( record_filter: RecordFilter, ctx: &Context<'_>, ) -> crate::Result { - let filter_condition = record_filter.clone().filter.aliased_condition_from(None, false, ctx); + let filter_condition = FilterBuilder::without_top_level_joins().visit_filter(record_filter.clone().filter, ctx); let ids = conn.filter_selectors(model, record_filter, ctx).await?; let ids: Vec<&SelectionResult> = ids.iter().collect(); let count = ids.len(); diff --git a/query-engine/connectors/sql-query-connector/src/filter/alias.rs b/query-engine/connectors/sql-query-connector/src/filter/alias.rs new file mode 100644 index 000000000000..61686929d400 --- /dev/null +++ b/query-engine/connectors/sql-query-connector/src/filter/alias.rs @@ -0,0 +1,75 @@ +use crate::{model_extensions::AsColumn, *}; + +use prisma_models::ScalarField; +use quaint::prelude::Column; + +#[derive(Clone, Copy, Debug)] +/// A distinction in aliasing to separate the parent table and the joined data +/// in the statement. +#[derive(Default)] +pub enum AliasMode { + #[default] + Table, + Join, +} + +#[derive(Clone, Copy, Debug, Default)] +/// Aliasing tool to count the nesting level to help with heavily nested +/// self-related queries. +pub(crate) struct Alias { + counter: usize, + mode: AliasMode, +} + +impl Alias { + /// Increment the alias as a new copy. + /// + /// Use when nesting one level down to a new subquery. `AliasMode` is + /// required due to the fact the current mode can be in `AliasMode::Join`. + pub fn inc(&self, mode: AliasMode) -> Self { + Self { + counter: self.counter + 1, + mode, + } + } + + /// Flip the alias to a different mode keeping the same nesting count. + pub fn flip(&self, mode: AliasMode) -> Self { + Self { + counter: self.counter, + mode, + } + } + + /// A string representation of the current alias. The current mode can be + /// overridden by defining the `mode_override`. + pub fn to_string(&self, mode_override: Option) -> String { + match mode_override.unwrap_or(self.mode) { + AliasMode::Table => format!("t{}", self.counter), + AliasMode::Join => format!("j{}", self.counter), + } + } +} + +pub(crate) trait AliasedColumn { + /// Conversion to a column. Column will point to the given alias if provided, otherwise the fully qualified path. + /// + /// Alias should be used only when nesting, making the top level queries + /// more explicit. + fn aliased_col(self, alias: Option, ctx: &Context<'_>) -> Column<'static>; +} + +impl AliasedColumn for &ScalarField { + fn aliased_col(self, alias: Option, ctx: &Context<'_>) -> Column<'static> { + self.as_column(ctx).aliased_col(alias, ctx) + } +} + +impl AliasedColumn for Column<'static> { + fn aliased_col(self, alias: Option, _ctx: &Context<'_>) -> Column<'static> { + match alias { + Some(alias) => self.table(alias.to_string(None)), + None => self, + } + } +} diff --git a/query-engine/connectors/sql-query-connector/src/filter/mod.rs b/query-engine/connectors/sql-query-connector/src/filter/mod.rs new file mode 100644 index 000000000000..bbf3557b16b7 --- /dev/null +++ b/query-engine/connectors/sql-query-connector/src/filter/mod.rs @@ -0,0 +1,42 @@ +mod alias; +mod visitor; + +use connector_interface::Filter; +use quaint::prelude::*; +use visitor::*; + +use crate::{context::Context, join_utils::AliasedJoin}; + +pub(crate) struct FilterBuilder {} +pub(crate) struct FilterBuilderWithJoins {} +pub(crate) struct FilterBuilderWithoutJoins {} + +impl FilterBuilder { + pub(crate) fn with_top_level_joins() -> FilterBuilderWithJoins { + FilterBuilderWithJoins {} + } + + pub(crate) fn without_top_level_joins() -> FilterBuilderWithoutJoins { + FilterBuilderWithoutJoins {} + } +} + +impl FilterBuilderWithJoins { + /// Visits a filter and return additional top-level joins that need to be manually dealt with. + pub(crate) fn visit_filter( + &self, + filter: Filter, + ctx: &Context, + ) -> (ConditionTree<'static>, Option>) { + FilterVisitor::with_top_level_joins().visit_filter(filter, ctx) + } +} + +impl FilterBuilderWithoutJoins { + /// Visits a filter without any top-level joins. Can be safely used in any context. + pub(crate) fn visit_filter(&self, filter: Filter, ctx: &Context) -> ConditionTree<'static> { + let (cond, _) = FilterVisitor::without_top_level_joins().visit_filter(filter, ctx); + + cond + } +} diff --git a/query-engine/connectors/sql-query-connector/src/filter_conversion.rs b/query-engine/connectors/sql-query-connector/src/filter/visitor.rs similarity index 65% rename from query-engine/connectors/sql-query-connector/src/filter_conversion.rs rename to query-engine/connectors/sql-query-connector/src/filter/visitor.rs index ed2202bcdc84..274301892f93 100644 --- a/query-engine/connectors/sql-query-connector/src/filter_conversion.rs +++ b/query-engine/connectors/sql-query-connector/src/filter/visitor.rs @@ -1,208 +1,300 @@ +use super::alias::*; +use crate::join_utils::{compute_one2m_join, AliasedJoin}; use crate::{model_extensions::*, Context}; + use connector_interface::filter::*; use prisma_models::prelude::*; use quaint::ast::concat; use quaint::ast::*; use std::convert::TryInto; -#[derive(Clone, Copy, Debug)] -/// A distinction in aliasing to separate the parent table and the joined data -/// in the statement. -#[derive(Default)] -pub enum AliasMode { - #[default] - Table, - Join, +pub(crate) trait FilterVisitorExt { + fn visit_filter(&mut self, filter: Filter, ctx: &Context<'_>) + -> (ConditionTree<'static>, Option>); + fn visit_relation_filter( + &mut self, + filter: RelationFilter, + ctx: &Context<'_>, + ) -> (ConditionTree<'static>, Option>); + fn visit_scalar_filter(&mut self, filter: ScalarFilter, ctx: &Context<'_>) -> ConditionTree<'static>; + fn visit_scalar_list_filter(&mut self, filter: ScalarListFilter, ctx: &Context<'_>) -> ConditionTree<'static>; + fn visit_one_relation_is_null_filter( + &mut self, + filter: OneRelationIsNullFilter, + ctx: &Context<'_>, + ) -> (ConditionTree<'static>, Option>); + fn visit_aggregation_filter(&mut self, filter: AggregationFilter, ctx: &Context<'_>) -> ConditionTree<'static>; } -#[derive(Clone, Copy, Debug, Default)] -/// Aliasing tool to count the nesting level to help with heavily nested -/// self-related queries. -pub(crate) struct Alias { - counter: usize, - mode: AliasMode, +#[derive(Debug, Clone, Default)] +pub(crate) struct FilterVisitor { + /// The last alias that's been rendered. + last_alias: Option, + /// The parent alias, used when rendering nested filters so that a child filter can refer to its join. + parent_alias: Option, + /// Whether filters can return top-level joins. + with_top_level_joins: bool, + /// Whether this visitor traverses nested filters. + is_nested: bool, + /// Whether the visitor is in a NOT clause. + reverse: bool, } -impl Alias { - /// Increment the alias as a new copy. - /// - /// Use when nesting one level down to a new subquery. `AliasMode` is - /// required due to the fact the current mode can be in `AliasMode::Join`. - pub fn inc(&self, mode: AliasMode) -> Self { +impl FilterVisitor { + pub fn with_top_level_joins() -> Self { Self { - counter: self.counter + 1, - mode, + with_top_level_joins: true, + ..Default::default() } } - /// Flip the alias to a different mode keeping the same nesting count. - pub fn flip(&self, mode: AliasMode) -> Self { + pub fn without_top_level_joins() -> Self { Self { - counter: self.counter, - mode, - } - } - - /// A string representation of the current alias. The current mode can be - /// overridden by defining the `mode_override`. - pub fn to_string(&self, mode_override: Option) -> String { - match mode_override.unwrap_or(self.mode) { - AliasMode::Table => format!("t{}", self.counter), - AliasMode::Join => format!("j{}", self.counter), + with_top_level_joins: false, + ..Default::default() } } -} -#[derive(Clone)] -pub struct ConditionState { - reverse: bool, - alias: Option, -} + /// Returns the next join/table alias by increasing the counter of the last alias. + fn next_alias(&mut self, mode: AliasMode) -> Alias { + let next_alias = self.last_alias.unwrap_or_default().inc(mode); + self.last_alias = Some(next_alias); -impl ConditionState { - fn new(alias: Option, reverse: bool) -> Self { - Self { reverse, alias } + next_alias } - fn invert_reverse(self) -> Self { - Self::new(self.alias, !self.reverse) + /// Returns the parent alias, if there's one set, so that nested filters can refer to the parent join/table. + fn parent_alias(&self) -> Option { + self.parent_alias } - fn alias(&self) -> Option { - self.alias + /// A top-level join can be rendered if we're explicitly allowing it or if we're in a nested visitor. + fn can_render_join(&self) -> bool { + self.with_top_level_joins || self.is_nested } + /// Returns whether the visitor is in a NOT clause. fn reverse(&self) -> bool { self.reverse } -} -pub(crate) trait AliasedCondition { - /// Conversion to a query condition tree. Columns will point to the given - /// alias if provided, otherwise using the fully qualified path. - /// - /// Alias should be used only when nesting, making the top level queries - /// more explicit. - fn aliased_cond(self, state: ConditionState, ctx: &Context<'_>) -> ConditionTree<'static>; - - fn aliased_condition_from(&self, alias: Option, reverse: bool, ctx: &Context<'_>) -> ConditionTree<'static> - where - Self: Sized + Clone, - { - self.clone().aliased_cond(ConditionState::new(alias, reverse), ctx) + fn invert_reverse(&mut self, f: impl FnOnce(&mut Self) -> T) -> T { + self.reverse = !self.reverse; + let res = f(self); + self.reverse = !self.reverse; + res } -} -trait AliasedSelect { - /// Conversion to a select. Columns will point to the given - /// alias if provided, otherwise using the fully qualified path. - /// - /// Alias should be used only when nesting, making the top level queries - /// more explicit. - fn aliased_sel(self, alias: Option, ctx: &Context<'_>) -> Select<'static>; -} + fn update_last_alias(&mut self, nested_visitor: &Self) -> &mut Self { + if let Some(alias) = nested_visitor.last_alias { + self.last_alias = Some(alias); + } -trait AliasedColumn { - /// Conversion to a column. Column will point to the given alias if provided, otherwise the fully qualified path. - /// - /// Alias should be used only when nesting, making the top level queries - /// more explicit. - fn aliased_col(self, alias: Option, ctx: &Context<'_>) -> Column<'static>; -} + self + } + + fn create_nested_visitor(&self, parent_alias: Alias) -> Self { + let mut nested_visitor = self.clone(); + nested_visitor.is_nested = true; + nested_visitor.parent_alias = Some(parent_alias); -impl AliasedColumn for &ScalarFieldRef { - fn aliased_col(self, alias: Option, ctx: &Context<'_>) -> Column<'static> { - self.as_column(ctx).aliased_col(alias, ctx) + nested_visitor } -} -impl AliasedColumn for Column<'static> { - fn aliased_col(self, alias: Option, _ctx: &Context<'_>) -> Column<'static> { - match alias { - Some(alias) => self.table(alias.to_string(None)), - None => self, + fn visit_nested_filter(&mut self, parent_alias: Alias, f: impl FnOnce(&mut Self) -> T) -> T { + let mut nested_visitor = self.create_nested_visitor(parent_alias); + let res = f(&mut nested_visitor); + // Ensures the alias counter is updated after building the nested filter so that we don't render duplicate aliases. + self.update_last_alias(&nested_visitor); + + res + } + + fn visit_relation_filter_select(&mut self, filter: RelationFilter, ctx: &Context<'_>) -> Select<'static> { + let alias = self.next_alias(AliasMode::Table); + let condition = filter.condition; + + // Perf: We can skip a join if the relation is inlined on the related model. + // In this case, we can select the related table's foreign key instead of joining. + // This is not possible in the case of M2M implicit relations. + if filter.field.related_field().is_inlined_on_enclosing_model() { + let related_table = filter.field.related_model().as_table(ctx); + let related_columns: Vec<_> = ModelProjection::from(filter.field.related_field().linking_fields()) + .as_columns(ctx) + .map(|col| col.aliased_col(Some(alias), ctx)) + .collect(); + + let (nested_conditions, nested_joins) = + self.visit_nested_filter(alias, |this| this.visit_filter(*filter.nested_filter, ctx)); + let nested_conditions = nested_conditions.invert_if(condition.invert_of_subselect()); + + let conditions = related_columns + .clone() + .into_iter() + .fold(nested_conditions, |acc, column| acc.and(column.is_not_null())); + + let select = Select::from_table(related_table.alias(alias.to_string(Some(AliasMode::Table)))) + .columns(related_columns) + .so_that(conditions); + + if let Some(nested_joins) = nested_joins { + nested_joins.into_iter().fold(select, |acc, join| acc.join(join.data)) + } else { + select + } + } else { + let table = filter.field.as_table(ctx); + let selected_identifier: Vec = filter + .field + .identifier_columns(ctx) + .map(|col| col.aliased_col(Some(alias), ctx)) + .collect(); + + let join_columns: Vec = filter + .field + .join_columns(ctx) + .map(|c| c.aliased_col(Some(alias), ctx)) + .collect(); + + let related_table = filter.field.related_model().as_table(ctx); + let related_join_columns: Vec<_> = ModelProjection::from(filter.field.related_field().linking_fields()) + .as_columns(ctx) + .map(|col| col.aliased_col(Some(alias.flip(AliasMode::Join)), ctx)) + .collect(); + + let (nested_conditions, nested_joins) = self + .visit_nested_filter(alias.flip(AliasMode::Join), |nested_visitor| { + nested_visitor.visit_filter(*filter.nested_filter, ctx) + }); + + let nested_conditions = nested_conditions.invert_if(condition.invert_of_subselect()); + let nested_conditons = selected_identifier + .clone() + .into_iter() + .fold(nested_conditions, |acc, column| acc.and(column.is_not_null())); + + let join = related_table + .alias(alias.to_string(Some(AliasMode::Join))) + .on(Row::from(related_join_columns).equals(Row::from(join_columns))); + + let select = Select::from_table(table.alias(alias.to_string(Some(AliasMode::Table)))) + .columns(selected_identifier) + .inner_join(join) + .so_that(nested_conditons); + + if let Some(nested_joins) = nested_joins { + nested_joins.into_iter().fold(select, |acc, join| acc.join(join.data)) + } else { + select + } } } } -impl AliasedCondition for Filter { - /// Conversion from a `Filter` to a query condition tree. Aliased when in a nested `SELECT`. - fn aliased_cond(self, state: ConditionState, ctx: &Context<'_>) -> ConditionTree<'static> { - match self { +impl FilterVisitorExt for FilterVisitor { + fn visit_filter( + &mut self, + filter: Filter, + ctx: &Context<'_>, + ) -> (ConditionTree<'static>, Option>) { + match filter { Filter::And(mut filters) => match filters.len() { - n if n == 0 => ConditionTree::NoCondition, - n if n == 1 => filters.pop().unwrap().aliased_cond(state, ctx), + n if n == 0 => (ConditionTree::NoCondition, None), + n if n == 1 => self.visit_filter(filters.pop().unwrap(), ctx), _ => { - let exprs = filters - .into_iter() - .map(|f| f.aliased_cond(state.clone(), ctx)) - .map(Expression::from) - .collect(); + let mut exprs = Vec::with_capacity(filters.len()); + let mut top_level_joins = vec![]; + + for filter in filters { + let (conditions, nested_joins) = self.visit_filter(filter, ctx); + + exprs.push(Expression::from(conditions)); + + if let Some(nested_joins) = nested_joins { + top_level_joins.extend(nested_joins); + } + } - ConditionTree::And(exprs) + (ConditionTree::And(exprs), Some(top_level_joins)) } }, Filter::Or(mut filters) => match filters.len() { - n if n == 0 => ConditionTree::NegativeCondition, - n if n == 1 => filters.pop().unwrap().aliased_cond(state, ctx), + n if n == 0 => (ConditionTree::NegativeCondition, None), + n if n == 1 => self.visit_filter(filters.pop().unwrap(), ctx), _ => { - let exprs = filters - .into_iter() - .map(|f| f.aliased_cond(state.clone(), ctx)) - .map(Expression::from) - .collect(); + let mut exprs = Vec::with_capacity(filters.len()); + let mut top_level_joins = vec![]; + + for filter in filters { + let (conditions, nested_joins) = self.visit_filter(filter, ctx); + + exprs.push(Expression::from(conditions)); - ConditionTree::Or(exprs) + if let Some(nested_joins) = nested_joins { + top_level_joins.extend(nested_joins); + } + } + + (ConditionTree::Or(exprs), Some(top_level_joins)) } }, Filter::Not(mut filters) => match filters.len() { - n if n == 0 => ConditionTree::NoCondition, - n if n == 1 => filters.pop().unwrap().aliased_cond(state.invert_reverse(), ctx).not(), + n if n == 0 => (ConditionTree::NoCondition, None), + n if n == 1 => { + let (cond, joins) = self.invert_reverse(|this| this.visit_filter(filters.pop().unwrap(), ctx)); + + (cond.not(), joins) + } _ => { - let exprs = filters - .into_iter() - .map(|f| f.aliased_cond(state.clone().invert_reverse(), ctx).not()) - .map(Expression::from) - .collect(); + let mut exprs = Vec::with_capacity(filters.len()); + let mut top_level_joins = vec![]; + + for filter in filters { + let (conditions, nested_joins) = self.invert_reverse(|this| this.visit_filter(filter, ctx)); + let inverted_conditions = conditions.not(); + + exprs.push(Expression::from(inverted_conditions)); - ConditionTree::And(exprs) + if let Some(nested_joins) = nested_joins { + top_level_joins.extend(nested_joins); + } + } + + (ConditionTree::And(exprs), Some(top_level_joins)) } }, - Filter::Scalar(filter) => filter.aliased_cond(state, ctx), - Filter::OneRelationIsNull(filter) => filter.aliased_cond(state, ctx), - Filter::Relation(filter) => filter.aliased_cond(state, ctx), + Filter::Scalar(filter) => (self.visit_scalar_filter(filter, ctx), None), + Filter::OneRelationIsNull(filter) => self.visit_one_relation_is_null_filter(filter, ctx), + Filter::Relation(filter) => self.visit_relation_filter(filter, ctx), Filter::BoolFilter(b) => { if b { - ConditionTree::NoCondition + (ConditionTree::NoCondition, None) } else { - ConditionTree::NegativeCondition + (ConditionTree::NegativeCondition, None) } } - Filter::Aggregation(filter) => filter.aliased_cond(state, ctx), - Filter::ScalarList(filter) => filter.aliased_cond(state, ctx), - Filter::Empty => ConditionTree::NoCondition, + Filter::Aggregation(filter) => (self.visit_aggregation_filter(filter, ctx), None), + Filter::ScalarList(filter) => (self.visit_scalar_list_filter(filter, ctx), None), + Filter::Empty => (ConditionTree::NoCondition, None), Filter::Composite(_) => unimplemented!("SQL connectors do not support composites yet."), } } -} -impl AliasedCondition for ScalarFilter { - /// Conversion from a `ScalarFilter` to a query condition tree. Aliased when in a nested `SELECT`. - fn aliased_cond(self, state: ConditionState, ctx: &Context<'_>) -> ConditionTree<'static> { - match self.condition { + fn visit_scalar_filter(&mut self, filter: ScalarFilter, ctx: &Context<'_>) -> ConditionTree<'static> { + match filter.condition { ScalarCondition::Search(_, _) | ScalarCondition::NotSearch(_, _) => { - let mut projections = match self.condition.clone() { + let mut projections = match filter.condition.clone() { ScalarCondition::Search(_, proj) => proj, ScalarCondition::NotSearch(_, proj) => proj, _ => unreachable!(), }; - projections.push(self.projection); + projections.push(filter.projection); let columns: Vec = projections .into_iter() .map(|p| match p { - ScalarProjection::Single(field) => field.aliased_col(state.alias(), ctx), + ScalarProjection::Single(field) => field.aliased_col(self.parent_alias(), ctx), ScalarProjection::Compound(_) => { unreachable!("Full-text search does not support compound fields") } @@ -213,260 +305,293 @@ impl AliasedCondition for ScalarFilter { convert_scalar_filter( comparable, - self.condition, - state.reverse(), - self.mode, + filter.condition, + self.reverse(), + filter.mode, &[], - state.alias(), + self.parent_alias(), false, ctx, ) } - _ => scalar_filter_aliased_cond(self, state.alias(), state.reverse(), ctx), + _ => scalar_filter_aliased_cond(filter, self.parent_alias(), self.reverse(), ctx), } } -} -fn scalar_filter_aliased_cond( - sf: ScalarFilter, - alias: Option, - reverse: bool, - ctx: &Context<'_>, -) -> ConditionTree<'static> { - match sf.projection { - ScalarProjection::Single(field) => { - let comparable: Expression = field.aliased_col(alias, ctx).into(); + fn visit_relation_filter( + &mut self, + filter: RelationFilter, + ctx: &Context<'_>, + ) -> (ConditionTree<'static>, Option>) { + let parent_alias = self.parent_alias().map(|a| a.to_string(None)); - convert_scalar_filter(comparable, sf.condition, reverse, sf.mode, &[field], alias, false, ctx) - } - ScalarProjection::Compound(fields) => { - let columns: Vec> = fields - .clone() - .into_iter() - .map(|field| field.aliased_col(alias, ctx)) - .collect(); + match &filter.condition { + // { to_one: { isNot: { ... } } } + RelationCondition::NoRelatedRecord if self.can_render_join() && !filter.field.is_list() => { + let alias = self.next_alias(AliasMode::Join); - convert_scalar_filter( - Row::from(columns).into(), - sf.condition, - reverse, - sf.mode, - &fields, - alias, - false, - ctx, - ) - } - } -} + let linking_fields_null: Vec<_> = ModelProjection::from(filter.field.model().primary_identifier()) + .as_columns(ctx) + .map(|c| c.aliased_col(Some(alias), ctx)) + .map(|c| c.is_null()) + .map(Expression::from) + .collect(); + let null_filter = ConditionTree::And(linking_fields_null); -impl AliasedCondition for ScalarListFilter { - fn aliased_cond(self, state: ConditionState, ctx: &Context<'_>) -> ConditionTree<'static> { - let comparable: Expression = self.field.aliased_col(state.alias(), ctx).into(); + let join = compute_one2m_join( + &filter.field, + alias.to_string(None).as_str(), + parent_alias.as_deref(), + ctx, + ); - convert_scalar_list_filter(comparable, self.condition, &self.field, state.alias(), ctx) - } -} + let mut output_joins = vec![join]; -fn convert_scalar_list_filter( - comparable: Expression<'static>, - cond: ScalarListCondition, - field: &ScalarFieldRef, - alias: Option, - ctx: &Context<'_>, -) -> ConditionTree<'static> { - let condition = match cond { - ScalarListCondition::Contains(ConditionValue::Value(val)) => { - comparable.compare_raw("@>", convert_list_pv(field, vec![val])) - } - ScalarListCondition::Contains(ConditionValue::FieldRef(field_ref)) => { - let field_ref_expr: Expression = field_ref.aliased_col(alias, ctx).into(); + let (conditions, nested_joins) = self.visit_nested_filter(alias, |nested_visitor| { + nested_visitor + .invert_reverse(|nested_visitor| nested_visitor.visit_filter(*filter.nested_filter, ctx)) + }); - // This code path is only reachable for connectors with `ScalarLists` capability - field_ref_expr.equals(comparable.any()) - } - ScalarListCondition::ContainsEvery(ConditionListValue::List(vals)) => { - comparable.compare_raw("@>", convert_list_pv(field, vals)) - } - ScalarListCondition::ContainsEvery(ConditionListValue::FieldRef(field_ref)) => { - comparable.compare_raw("@>", field_ref.aliased_col(alias, ctx)) - } - ScalarListCondition::ContainsSome(ConditionListValue::List(vals)) => { - comparable.compare_raw("&&", convert_list_pv(field, vals)) - } - ScalarListCondition::ContainsSome(ConditionListValue::FieldRef(field_ref)) => { - comparable.compare_raw("&&", field_ref.aliased_col(alias, ctx)) - } - ScalarListCondition::IsEmpty(true) => comparable.compare_raw("=", Value::Array(Some(vec![])).raw()), - ScalarListCondition::IsEmpty(false) => comparable.compare_raw("<>", Value::Array(Some(vec![])).raw()), - }; + if let Some(nested_joins) = nested_joins { + output_joins.extend(nested_joins); + } - ConditionTree::single(condition) -} + (conditions.not().or(null_filter), Some(output_joins)) + } + // { to_one: { is: { ... } } } + RelationCondition::ToOneRelatedRecord if self.can_render_join() && !filter.field.is_list() => { + let alias = self.next_alias(AliasMode::Join); -impl AliasedCondition for RelationFilter { - /// Conversion from a `RelationFilter` to a query condition tree. Aliased when in a nested `SELECT`. - fn aliased_cond(self, state: ConditionState, ctx: &Context<'_>) -> ConditionTree<'static> { - let ids = ModelProjection::from(self.field.model().primary_identifier()).as_columns(ctx); - let columns: Vec> = ids.map(|col| col.aliased_col(state.alias(), ctx)).collect(); + let linking_fields_not_null: Vec<_> = ModelProjection::from(filter.field.model().primary_identifier()) + .as_columns(ctx) + .map(|c| c.aliased_col(Some(alias), ctx)) + .map(|c| c.is_not_null()) + .map(Expression::from) + .collect(); + let not_null_filter = ConditionTree::And(linking_fields_not_null); - let condition = self.condition; - let sub_select = self.aliased_sel(state.alias().map(|a| a.inc(AliasMode::Table)), ctx); + let join = compute_one2m_join( + &filter.field, + alias.to_string(None).as_str(), + parent_alias.as_deref(), + ctx, + ); + let mut output_joins = vec![join]; - let comparison = match condition { - RelationCondition::AtLeastOneRelatedRecord => Row::from(columns).in_selection(sub_select), - RelationCondition::EveryRelatedRecord => Row::from(columns).not_in_selection(sub_select), - RelationCondition::NoRelatedRecord => Row::from(columns).not_in_selection(sub_select), - RelationCondition::ToOneRelatedRecord => Row::from(columns).in_selection(sub_select), - }; + let (conditions, nested_joins) = self.visit_nested_filter(alias, |nested_visitor| { + nested_visitor.visit_filter(*filter.nested_filter, ctx) + }); - comparison.into() - } -} + if let Some(nested_joins) = nested_joins { + output_joins.extend(nested_joins); + }; -impl AliasedSelect for RelationFilter { - /// The subselect part of the `RelationFilter` `ConditionTree`. - fn aliased_sel<'a>(self, alias: Option, ctx: &Context<'_>) -> Select<'static> { - let alias = alias.unwrap_or_default(); - let condition = self.condition; + (conditions.and(not_null_filter), Some(output_joins)) + } - // Performance can be improved by using fields in related table which skip a join table operation - if self.field.related_field().walker().fields().is_some() { - let related_table = self.field.related_model().as_table(ctx); - let related_columns: Vec<_> = ModelProjection::from(self.field.related_field().linking_fields()) - .as_columns(ctx) - .map(|col| col.aliased_col(Some(alias), ctx)) - .collect(); + _ => { + let ids = ModelProjection::from(filter.field.model().primary_identifier()).as_columns(ctx); + let columns: Vec> = ids.map(|col| col.aliased_col(self.parent_alias(), ctx)).collect(); - let nested_conditions = self - .nested_filter - .aliased_condition_from(Some(alias), false, ctx) - .invert_if(condition.invert_of_subselect()); + let condition = filter.condition; + let sub_select = self.visit_relation_filter_select(filter, ctx); - let conditions = related_columns - .clone() - .into_iter() - .fold(nested_conditions, |acc, column| acc.and(column.is_not_null())); - - Select::from_table(related_table.alias(alias.to_string(Some(AliasMode::Table)))) - .columns(related_columns) - .so_that(conditions) - } else { - let table = self.field.as_table(ctx); - let selected_identifier: Vec = self - .field - .identifier_columns(ctx) - .map(|col| col.aliased_col(Some(alias), ctx)) - .collect(); + let comparison = match condition { + RelationCondition::AtLeastOneRelatedRecord => Row::from(columns).in_selection(sub_select), + RelationCondition::EveryRelatedRecord => Row::from(columns).not_in_selection(sub_select), + RelationCondition::NoRelatedRecord => Row::from(columns).not_in_selection(sub_select), + RelationCondition::ToOneRelatedRecord => Row::from(columns).in_selection(sub_select), + }; - let join_columns: Vec = self - .field - .join_columns(ctx) - .map(|c| c.aliased_col(Some(alias), ctx)) - .collect(); + (comparison.into(), None) + } + } + } - let related_table = self.field.related_model().as_table(ctx); - let related_join_columns: Vec<_> = ModelProjection::from(self.field.related_field().linking_fields()) + fn visit_one_relation_is_null_filter( + &mut self, + filter: OneRelationIsNullFilter, + ctx: &Context<'_>, + ) -> (ConditionTree<'static>, Option>) { + let parent_alias = self.parent_alias(); + let parent_alias_string = parent_alias.as_ref().map(|a| a.to_string(None)); + + // If the relation is inlined, we simply check whether the linking fields are null. + // + // ```sql + // SELECT "Parent"."id" FROM "Parent" + // WHERE "Parent"."childId" IS NULL; + // ``` + if filter.field.is_inlined_on_enclosing_model() { + let conditions: Vec<_> = ModelProjection::from(filter.field.linking_fields()) .as_columns(ctx) - .map(|col| col.aliased_col(Some(alias.flip(AliasMode::Join)), ctx)) + .map(|c| c.opt_table(parent_alias_string.clone())) + .map(|c| c.is_null()) + .map(Expression::from) .collect(); - let nested_conditions = self - .nested_filter - .aliased_condition_from(Some(alias.flip(AliasMode::Join)), false, ctx) - .invert_if(condition.invert_of_subselect()); + return (ConditionTree::And(conditions), None); + } - let conditions = selected_identifier - .clone() - .into_iter() - .fold(nested_conditions, |acc, column| acc.and(column.is_not_null())); + // If the relation is not inlined and we can use joins, then we join the relation and check whether the related linking fields are null. + // + // ```sql + // SELECT "Parent"."id" FROM "Parent" + // LEFT JOIN "Child" AS "j1" ON ("j1"."parentId" = "Parent"."id") + // WHERE "j1"."parentId" IS NULL OFFSET; + // ``` + if self.can_render_join() { + let alias = self.next_alias(AliasMode::Join); + + let conditions: Vec<_> = ModelProjection::from(filter.field.related_field().linking_fields()) + .as_columns(ctx) + .map(|c| c.aliased_col(Some(alias), ctx)) + .map(|c| c.is_null()) + .map(Expression::from) + .collect(); - let join = related_table - .alias(alias.to_string(Some(AliasMode::Join))) - .on(Row::from(related_join_columns).equals(Row::from(join_columns))); + let join = compute_one2m_join( + &filter.field, + alias.to_string(None).as_str(), + parent_alias_string.as_deref(), + ctx, + ); - Select::from_table(table.alias(alias.to_string(Some(AliasMode::Table)))) - .columns(selected_identifier) - .inner_join(join) - .so_that(conditions) + return (ConditionTree::And(conditions), Some(vec![join])); } - } -} -impl AliasedCondition for OneRelationIsNullFilter { - /// Conversion from a `OneRelationIsNullFilter` to a query condition tree. Aliased when in a nested `SELECT`. - fn aliased_cond(self, state: ConditionState, ctx: &Context<'_>) -> ConditionTree<'static> { - let alias = state.alias().map(|a| a.to_string(None)); + // Otherwise, we use a NOT IN clause and a subselect to find the related records that are nulls. + // + // ```sql + // SELECT "Parent"."id" FROM "Parent" + // WHERE ("Parent".id) NOT IN ( + // SELECT "Child"."parentId" FROM "Child" WHERE "Child"."parentId" IS NOT NULL + // ) + // ``` + let relation = filter.field.relation(); + let table = relation.as_table(ctx); + let relation_table = match parent_alias { + Some(ref alias) => table.alias(alias.to_string(None)), + None => table, + }; - let condition = if self.field.relation_is_inlined_in_parent() { - self.field + let columns_not_null = + filter + .field + .related_field() .as_columns(ctx) .fold(ConditionTree::NoCondition, |acc, column| { - let column_is_null = column.opt_table(alias.clone()).is_null(); + let column_is_not_null = column.opt_table(parent_alias_string.clone()).is_not_null(); match acc { - ConditionTree::NoCondition => column_is_null.into(), - cond => cond.and(column_is_null), + ConditionTree::NoCondition => column_is_not_null.into(), + cond => cond.and(column_is_not_null), } - }) - } else { - let relation = self.field.relation(); - let table = relation.as_table(ctx); - let relation_table = match alias { - Some(ref alias) => table.alias(alias.to_string()), - None => table, - }; - - let columns_not_null = - self.field - .related_field() - .as_columns(ctx) - .fold(ConditionTree::NoCondition, |acc, column| { - let column_is_not_null = column.opt_table(alias.clone()).is_not_null(); + }); + + // If the table is aliased, we need to use that alias in the SELECT too + // eg: SELECT .x FROM table AS + let columns: Vec<_> = filter + .field + .related_field() + .scalar_fields() + .iter() + .map(|f| f.as_column(ctx).opt_table(parent_alias_string.clone())) + .collect(); + + let sub_select = Select::from_table(relation_table) + .columns(columns) + .and_where(columns_not_null); + + let id_columns: Vec> = ModelProjection::from(filter.field.linking_fields()) + .as_columns(ctx) + .map(|c| c.opt_table(parent_alias_string.clone())) + .collect(); + + ( + ConditionTree::single(Row::from(id_columns).not_in_selection(sub_select)), + None, + ) + } - match acc { - ConditionTree::NoCondition => column_is_not_null.into(), - cond => cond.and(column_is_not_null), - } - }); + fn visit_aggregation_filter(&mut self, filter: AggregationFilter, ctx: &Context<'_>) -> ConditionTree<'static> { + let alias = self.parent_alias(); + let reverse = self.reverse(); - // If the table is aliased, we need to use that alias in the SELECT too - // eg: SELECT .x FROM table AS - let columns: Vec<_> = self - .field - .related_field() - .scalar_fields() - .iter() - .map(|f| f.as_column(ctx).opt_table(alias.clone())) - .collect(); + match filter { + AggregationFilter::Count(filter) => aggregate_conditions(*filter, alias, reverse, |x| count(x).into(), ctx), + AggregationFilter::Average(filter) => aggregate_conditions(*filter, alias, reverse, |x| avg(x).into(), ctx), + AggregationFilter::Sum(filter) => aggregate_conditions(*filter, alias, reverse, |x| sum(x).into(), ctx), + AggregationFilter::Min(filter) => aggregate_conditions(*filter, alias, reverse, |x| min(x).into(), ctx), + AggregationFilter::Max(filter) => aggregate_conditions(*filter, alias, reverse, |x| max(x).into(), ctx), + } + } - let sub_select = Select::from_table(relation_table) - .columns(columns) - .and_where(columns_not_null); + fn visit_scalar_list_filter(&mut self, filter: ScalarListFilter, ctx: &Context<'_>) -> ConditionTree<'static> { + let comparable: Expression = filter.field.aliased_col(self.parent_alias(), ctx).into(); + let cond = filter.condition; + let field = &filter.field; + let alias = self.parent_alias(); - let id_columns: Vec> = ModelProjection::from(self.field.linking_fields()) - .as_columns(ctx) - .map(|c| c.opt_table(alias.clone())) - .collect(); + let condition = match cond { + ScalarListCondition::Contains(ConditionValue::Value(val)) => { + comparable.compare_raw("@>", convert_list_pv(field, vec![val])) + } + ScalarListCondition::Contains(ConditionValue::FieldRef(field_ref)) => { + let field_ref_expr: Expression = field_ref.aliased_col(alias, ctx).into(); - Row::from(id_columns).not_in_selection(sub_select).into() + // This code path is only reachable for connectors with `ScalarLists` capability + field_ref_expr.equals(comparable.any()) + } + ScalarListCondition::ContainsEvery(ConditionListValue::List(vals)) => { + comparable.compare_raw("@>", convert_list_pv(field, vals)) + } + ScalarListCondition::ContainsEvery(ConditionListValue::FieldRef(field_ref)) => { + comparable.compare_raw("@>", field_ref.aliased_col(alias, ctx)) + } + ScalarListCondition::ContainsSome(ConditionListValue::List(vals)) => { + comparable.compare_raw("&&", convert_list_pv(field, vals)) + } + ScalarListCondition::ContainsSome(ConditionListValue::FieldRef(field_ref)) => { + comparable.compare_raw("&&", field_ref.aliased_col(alias, ctx)) + } + ScalarListCondition::IsEmpty(true) => comparable.compare_raw("=", Value::Array(Some(vec![])).raw()), + ScalarListCondition::IsEmpty(false) => comparable.compare_raw("<>", Value::Array(Some(vec![])).raw()), }; ConditionTree::single(condition) } } -impl AliasedCondition for AggregationFilter { - /// Conversion from an `AggregationFilter` to a query condition tree. Aliased when in a nested `SELECT`. - fn aliased_cond(self, state: ConditionState, ctx: &Context<'_>) -> ConditionTree<'static> { - let alias = state.alias(); - let reverse = state.reverse(); - match self { - AggregationFilter::Count(filter) => aggregate_conditions(*filter, alias, reverse, |x| count(x).into(), ctx), - AggregationFilter::Average(filter) => aggregate_conditions(*filter, alias, reverse, |x| avg(x).into(), ctx), - AggregationFilter::Sum(filter) => aggregate_conditions(*filter, alias, reverse, |x| sum(x).into(), ctx), - AggregationFilter::Min(filter) => aggregate_conditions(*filter, alias, reverse, |x| min(x).into(), ctx), - AggregationFilter::Max(filter) => aggregate_conditions(*filter, alias, reverse, |x| max(x).into(), ctx), +fn scalar_filter_aliased_cond( + sf: ScalarFilter, + alias: Option, + reverse: bool, + ctx: &Context<'_>, +) -> ConditionTree<'static> { + match sf.projection { + ScalarProjection::Single(field) => { + let comparable: Expression = field.aliased_col(alias, ctx).into(); + + convert_scalar_filter(comparable, sf.condition, reverse, sf.mode, &[field], alias, false, ctx) + } + ScalarProjection::Compound(fields) => { + let columns: Vec> = fields + .clone() + .into_iter() + .map(|field| field.aliased_col(alias, ctx)) + .collect(); + + convert_scalar_filter( + Row::from(columns).into(), + sf.condition, + reverse, + sf.mode, + &fields, + alias, + false, + ctx, + ) } } } @@ -481,10 +606,7 @@ fn aggregate_conditions( where T: Fn(Column) -> Expression, { - let sf = match filter { - Filter::Scalar(sf) => sf, - _ => unimplemented!(), - }; + let sf = filter.into_scalar().unwrap(); match sf.projection { ScalarProjection::Compound(_) => { diff --git a/query-engine/connectors/sql-query-connector/src/join_utils.rs b/query-engine/connectors/sql-query-connector/src/join_utils.rs index dbec0e430951..4b4d2fc8aa24 100644 --- a/query-engine/connectors/sql-query-connector/src/join_utils.rs +++ b/query-engine/connectors/sql-query-connector/src/join_utils.rs @@ -1,4 +1,4 @@ -use crate::{filter_conversion::AliasedCondition, model_extensions::*, Context}; +use crate::{filter::FilterBuilder, model_extensions::*, Context}; use connector_interface::Filter; use prisma_models::*; use quaint::prelude::*; @@ -6,7 +6,7 @@ use quaint::prelude::*; #[derive(Debug, Clone)] pub(crate) struct AliasedJoin { // Actual join data to be passed to quaint - pub(crate) data: JoinData<'static>, + pub(crate) data: Join<'static>, // Alias used for the join. eg: LEFT JOIN ... AS pub(crate) alias: String, } @@ -22,7 +22,7 @@ pub(crate) fn compute_aggr_join( filter: Option, aggregator_alias: &str, join_alias: &str, - previous_join: Option<&AliasedJoin>, + previous_join: Option<&str>, ctx: &Context<'_>, ) -> AliasedJoin { let join_alias = format!("{}_{}", join_alias, &rf.related_model().name()); @@ -65,7 +65,7 @@ fn compute_aggr_join_one2m( filter: Option, aggregator_alias: &str, join_alias: &str, - previous_join: Option<&AliasedJoin>, + previous_join: Option<&str>, ctx: &Context<'_>, ) -> AliasedJoin { let (left_fields, right_fields) = if rf.is_inlined_on_enclosing_model() { @@ -77,9 +77,9 @@ fn compute_aggr_join_one2m( ) }; let select_columns = right_fields.iter().map(|f| f.as_column(ctx)); - let conditions: ConditionTree = filter - .map(|f| f.aliased_condition_from(None, false, ctx)) - .unwrap_or(ConditionTree::NoCondition); + let (conditions, joins) = filter + .map(|f| FilterBuilder::with_top_level_joins().visit_filter(f, ctx)) + .unwrap_or((ConditionTree::NoCondition, None)); // + SELECT Child. FROM Child WHERE let query = Select::from_table(rf.related_model().as_table(ctx)) @@ -98,11 +98,17 @@ fn compute_aggr_join_one2m( // + GROUP BY Child. let query = right_fields.iter().fold(query, |acc, f| acc.group_by(f.as_column(ctx))); + let query = if let Some(joins) = joins { + joins.into_iter().fold(query, |acc, join| acc.join(join.data)) + } else { + query + }; + let pairs = left_fields.into_iter().zip(right_fields); let on_conditions: Vec = pairs .map(|(a, b)| { let col_a = match previous_join { - Some(prev_join) => Column::from((prev_join.alias.to_owned(), a.db_name().to_owned())), + Some(prev_join) => Column::from((prev_join.to_owned(), a.db_name().to_owned())), None => a.as_column(ctx), }; let col_b = Column::from((join_alias.to_owned(), b.db_name().to_owned())); @@ -120,7 +126,7 @@ fn compute_aggr_join_one2m( .on(ConditionTree::And(on_conditions)); AliasedJoin { - data: join, + data: Join::Left(join), alias: join_alias.to_owned(), } } @@ -141,7 +147,7 @@ fn compute_aggr_join_m2m( filter: Option, aggregator_alias: &str, join_alias: &str, - previous_join: Option<&AliasedJoin>, + previous_join: Option<&str>, ctx: &Context<'_>, ) -> AliasedJoin { // m2m join table (_ParentToChild) @@ -155,15 +161,21 @@ fn compute_aggr_join_m2m( // Parent primary identifiers let parent_ids: ModelProjection = rf.model().primary_identifier().into(); // Rendered filters - let conditions: ConditionTree = filter - .map(|f| f.aliased_condition_from(None, false, ctx)) - .unwrap_or(ConditionTree::NoCondition); + let (conditions, joins) = filter + .map(|f| FilterBuilder::with_top_level_joins().visit_filter(f, ctx)) + .unwrap_or((ConditionTree::NoCondition, None)); // + SELECT _ParentToChild.ChildId FROM Child WHERE let query = Select::from_table(child_model.as_table(ctx)) .columns(m2m_child_columns.clone()) .so_that(conditions); + let query = if let Some(joins) = joins { + joins.into_iter().fold(query, |acc, join| acc.join(join.data)) + } else { + query + }; + let aggr_expr = match aggregation { AggregationType::Count => count(m2m_child_columns.clone()), }; @@ -196,7 +208,7 @@ fn compute_aggr_join_m2m( let on_conditions: Vec = pairs .map(|(a, b)| { let col_a = match previous_join { - Some(prev_join) => Column::from((prev_join.alias.to_owned(), a.db_name().to_owned())), + Some(prev_join) => Column::from((prev_join.to_owned(), a.db_name().to_owned())), None => a.as_column(ctx), }; let col_b = Column::from((join_alias.to_owned(), b.name.to_string())); @@ -216,48 +228,33 @@ fn compute_aggr_join_m2m( AliasedJoin { alias: join_alias.to_owned(), - data: join, + data: Join::Left(join), } } pub(crate) fn compute_one2m_join( - rf: &RelationFieldRef, - join_prefix: &str, - previous_join: Option<&AliasedJoin>, + field: &RelationFieldRef, + alias: &str, + parent_alias: Option<&str>, ctx: &Context<'_>, ) -> AliasedJoin { - let (left_fields, right_fields) = if rf.is_inlined_on_enclosing_model() { - (rf.scalar_fields(), rf.referenced_fields()) - } else { - ( - rf.related_field().referenced_fields(), - rf.related_field().scalar_fields(), - ) - }; - - let right_table_alias = format!("{}_{}", join_prefix, rf.related_model().name()); - - let related_model = rf.related_model(); - let pairs = left_fields.into_iter().zip(right_fields); - - let on_conditions: Vec = pairs - .map(|(a, b)| { - let a_col = match previous_join { - Some(prev_join) => Column::from((prev_join.alias.to_owned(), a.db_name().to_owned())), - None => a.as_column(ctx), - }; + let join_columns: Vec = field + .join_columns(ctx) + .map(|c| c.opt_table(parent_alias.map(ToOwned::to_owned))) + .collect(); - let b_col = Column::from((right_table_alias.clone(), b.db_name().to_owned())); + let related_table = field.related_model().as_table(ctx); + let related_join_columns: Vec<_> = ModelProjection::from(field.related_field().linking_fields()) + .as_columns(ctx) + .map(|col| col.table(alias.to_owned())) + .collect(); - a_col.equals(b_col).into() - }) - .collect::>(); + let join = related_table + .alias(alias.to_owned()) + .on(Row::from(related_join_columns).equals(Row::from(join_columns))); AliasedJoin { - alias: right_table_alias.to_owned(), - data: related_model - .as_table(ctx) - .alias(right_table_alias) - .on(ConditionTree::And(on_conditions)), + alias: alias.to_owned(), + data: Join::Left(join), } } diff --git a/query-engine/connectors/sql-query-connector/src/lib.rs b/query-engine/connectors/sql-query-connector/src/lib.rs index d98f87d9a92e..ed1528ded6b5 100644 --- a/query-engine/connectors/sql-query-connector/src/lib.rs +++ b/query-engine/connectors/sql-query-connector/src/lib.rs @@ -6,7 +6,7 @@ mod context; mod cursor_condition; mod database; mod error; -mod filter_conversion; +mod filter; mod join_utils; mod model_extensions; mod nested_aggregations; @@ -19,7 +19,7 @@ mod sql_trace; mod value; mod value_ext; -use self::{column_metadata::*, context::Context, filter_conversion::*, query_ext::QueryExt, row::*}; +use self::{column_metadata::*, context::Context, query_ext::QueryExt, row::*}; use quaint::prelude::Queryable; #[cfg(feature = "driver-adapters")] diff --git a/query-engine/connectors/sql-query-connector/src/ordering.rs b/query-engine/connectors/sql-query-connector/src/ordering.rs index 72485bacb2cb..7ab1bc03d3ce 100644 --- a/query-engine/connectors/sql-query-connector/src/ordering.rs +++ b/query-engine/connectors/sql-query-connector/src/ordering.rs @@ -142,11 +142,12 @@ impl OrderByBuilder { .expect("An order by relation aggregation has to have at least one hop"); // Unwraps are safe because the SQL connector doesn't yet support any other type of orderBy hop but the relation hop. - let mut joins = vec![]; + let mut joins: Vec = vec![]; + for (i, hop) in rest_hops.iter().enumerate() { let previous_join = if i > 0 { joins.get(i - 1) } else { None }; - - let join = compute_one2m_join(hop.as_relation_hop().unwrap(), &self.join_prefix(), previous_join, ctx); + let previous_alias = previous_join.map(|j| j.alias.as_str()); + let join = compute_one2m_join(hop.as_relation_hop().unwrap(), &self.join_prefix(), previous_alias, ctx); joins.push(join); } @@ -156,6 +157,8 @@ impl OrderByBuilder { _ => unreachable!("Order by relation aggregation other than count are not supported"), }; + let previous_alias = joins.last().map(|j| j.alias.as_str()); + // We perform the aggregation on the last join let last_aggr_join = compute_aggr_join( last_hop.as_relation_hop().unwrap(), @@ -163,7 +166,7 @@ impl OrderByBuilder { None, ORDER_AGGREGATOR_ALIAS, &self.join_prefix(), - joins.last(), + previous_alias, ctx, ); @@ -181,11 +184,12 @@ impl OrderByBuilder { order_by: &OrderByScalar, ctx: &Context<'_>, ) -> (Vec, Column<'static>) { - let mut joins = vec![]; + let mut joins: Vec = vec![]; for (i, hop) in order_by.path.iter().enumerate() { let previous_join = if i > 0 { joins.get(i - 1) } else { None }; - let join = compute_one2m_join(hop.as_relation_hop().unwrap(), &self.join_prefix(), previous_join, ctx); + let previous_alias = previous_join.map(|j| j.alias.as_str()); + let join = compute_one2m_join(hop.as_relation_hop().unwrap(), &self.join_prefix(), previous_alias, ctx); joins.push(join); } diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/read.rs b/query-engine/connectors/sql-query-connector/src/query_builder/read.rs index 2aa0a80169de..eba36d394282 100644 --- a/query-engine/connectors/sql-query-connector/src/query_builder/read.rs +++ b/query-engine/connectors/sql-query-connector/src/query_builder/read.rs @@ -1,6 +1,6 @@ use crate::{ - cursor_condition, filter_conversion::AliasedCondition, model_extensions::*, nested_aggregations, - ordering::OrderByBuilder, sql_trace::SqlTraceComment, Context, + cursor_condition, filter::FilterBuilder, model_extensions::*, nested_aggregations, ordering::OrderByBuilder, + sql_trace::SqlTraceComment, Context, }; use connector_interface::{filter::Filter, AggregationSelection, QueryArguments, RelAggregationSelection}; use itertools::Itertools; @@ -65,10 +65,10 @@ impl SelectDefinition for QueryArguments { let limit = if self.ignore_take { None } else { self.take_abs() }; let skip = if self.ignore_skip { 0 } else { self.skip.unwrap_or(0) }; - let filter: ConditionTree = self + let (filter, filter_joins) = self .filter - .map(|f| f.aliased_condition_from(None, false, ctx)) - .unwrap_or(ConditionTree::NoCondition); + .map(|f| FilterBuilder::with_top_level_joins().visit_filter(f, ctx)) + .unwrap_or((ConditionTree::NoCondition, None)); let conditions = match (filter, cursor_condition) { (ConditionTree::NoCondition, cursor) => cursor, @@ -80,13 +80,21 @@ impl SelectDefinition for QueryArguments { let joined_table = order_by_definitions .iter() .flat_map(|j| &j.joins) - .fold(model.as_table(ctx), |acc, join| acc.left_join(join.clone().data)); + .fold(model.as_table(ctx), |acc, join| acc.join(join.clone().data)); // Add joins necessary to the nested aggregations let joined_table = aggregation_joins .joins .into_iter() - .fold(joined_table, |acc, join| acc.left_join(join.data)); + .fold(joined_table, |acc, join| acc.join(join.data)); + + let joined_table = if let Some(filter_joins) = filter_joins { + filter_joins + .into_iter() + .fold(joined_table, |acc, join| acc.join(join.data)) + } else { + joined_table + }; let select_ast = Select::from_table(joined_table) .so_that(conditions) @@ -247,7 +255,11 @@ pub(crate) fn group_by_aggregate( ); match having { - Some(filter) => grouped.having(filter.aliased_condition_from(None, false, ctx)), + Some(filter) => { + let cond = FilterBuilder::without_top_level_joins().visit_filter(filter, ctx); + + grouped.having(cond) + } None => grouped, } } diff --git a/query-engine/connectors/sql-query-connector/src/query_ext.rs b/query-engine/connectors/sql-query-connector/src/query_ext.rs index 8194a70fd6fa..2dba40dcb7fc 100644 --- a/query-engine/connectors/sql-query-connector/src/query_ext.rs +++ b/query-engine/connectors/sql-query-connector/src/query_ext.rs @@ -1,6 +1,7 @@ +use crate::filter::FilterBuilder; use crate::{ column_metadata, error::*, model_extensions::*, sql_trace::trace_parent_to_string, sql_trace::SqlTraceComment, - value_ext::IntoTypedJsonExtension, AliasedCondition, ColumnMetadata, Context, SqlRow, ToSqlRow, + value_ext::IntoTypedJsonExtension, ColumnMetadata, Context, SqlRow, ToSqlRow, }; use async_trait::async_trait; use connector_interface::{filter::Filter, RecordFilter}; @@ -126,12 +127,13 @@ impl QueryExt for Q { ) -> crate::Result> { let model_id: ModelProjection = model.primary_identifier().into(); let id_cols: Vec> = model_id.as_columns(ctx).collect(); + let condition = FilterBuilder::without_top_level_joins().visit_filter(filter, ctx); let select = Select::from_table(model.as_table(ctx)) .columns(id_cols) .append_trace(&Span::current()) .add_trace_id(ctx.trace_id) - .so_that(filter.aliased_condition_from(None, false, ctx)); + .so_that(condition); self.select_ids(select, model_id, ctx).await } From 4dbb2527e259e781d2b8a701f496d34f93383926 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Wed, 27 Sep 2023 16:02:18 +0200 Subject: [PATCH 042/128] driver-adapters: close tx on drop (#4286) * driver-adapters: close tx on drop * Review feedback, agreed by Oleksii * Change discard -> dispose in ts * Bump driver-adapter-utils * Bump prisma and @prisma/client * Bump all package versions --------- Co-authored-by: Miguel Fernandez --- .../js/adapter-libsql/package.json | 2 +- .../js/adapter-libsql/src/libsql.ts | 14 +++++++++ .../js/adapter-neon/package.json | 2 +- .../js/adapter-neon/src/neon.ts | 10 ++++++- .../js/adapter-pg/package.json | 2 +- .../driver-adapters/js/adapter-pg/src/pg.ts | 5 ++++ .../js/adapter-planetscale/package.json | 2 +- .../js/adapter-planetscale/src/planetscale.ts | 11 +++++++ .../js/driver-adapter-utils/package.json | 2 +- .../js/driver-adapter-utils/src/binder.ts | 15 ++++++++++ .../js/driver-adapter-utils/src/types.ts | 7 +++++ .../driver-adapters/js/pnpm-lock.yaml | 30 +++++++++---------- .../js/smoke-test-js/package.json | 4 +-- query-engine/driver-adapters/src/proxy.rs | 16 ++++++++++ 14 files changed, 99 insertions(+), 23 deletions(-) diff --git a/query-engine/driver-adapters/js/adapter-libsql/package.json b/query-engine/driver-adapters/js/adapter-libsql/package.json index 20817e5a7d9d..fb672669ecb7 100644 --- a/query-engine/driver-adapters/js/adapter-libsql/package.json +++ b/query-engine/driver-adapters/js/adapter-libsql/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/adapter-libsql", - "version": "0.2.3", + "version": "0.3.0", "description": "Prisma's driver adapter for libsql and Turso", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts b/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts index 1f119ac4937d..cbe7d34d40c9 100644 --- a/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts +++ b/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts @@ -72,6 +72,8 @@ class LibSqlQueryable implements } class LibSqlTransaction extends LibSqlQueryable implements Transaction { + finished = false + constructor( client: TransactionClient, readonly options: TransactionOptions, @@ -82,6 +84,8 @@ class LibSqlTransaction extends LibSqlQueryable implements Tr async commit(): Promise> { debug(`[js::commit]`) + this.finished = true + await this.client.commit() return ok(undefined) } @@ -89,6 +93,8 @@ class LibSqlTransaction extends LibSqlQueryable implements Tr async rollback(): Promise> { debug(`[js::rollback]`) + this.finished = true + try { await this.client.rollback() } catch (error) { @@ -97,6 +103,14 @@ class LibSqlTransaction extends LibSqlQueryable implements Tr return ok(undefined) } + + dispose(): Result { + if (!this.finished) { + this.finished = true + this.rollback().catch(console.error) + } + return ok(undefined) + } } export class PrismaLibSQL extends LibSqlQueryable implements DriverAdapter { diff --git a/query-engine/driver-adapters/js/adapter-neon/package.json b/query-engine/driver-adapters/js/adapter-neon/package.json index 04291500ce57..281c37918b76 100644 --- a/query-engine/driver-adapters/js/adapter-neon/package.json +++ b/query-engine/driver-adapters/js/adapter-neon/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/adapter-neon", - "version": "0.3.4", + "version": "0.4.0", "description": "Prisma's driver adapter for \"@neondatabase/serverless\"", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts b/query-engine/driver-adapters/js/adapter-neon/src/neon.ts index e2dac37a911c..3ed0460cf858 100644 --- a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts +++ b/query-engine/driver-adapters/js/adapter-neon/src/neon.ts @@ -80,7 +80,10 @@ class NeonWsQueryable extends NeonQ } class NeonTransaction extends NeonWsQueryable implements Transaction { - constructor(client: neon.PoolClient, readonly options: TransactionOptions) { + constructor( + client: neon.PoolClient, + readonly options: TransactionOptions, + ) { super(client) } @@ -97,6 +100,11 @@ class NeonTransaction extends NeonWsQueryable implements Transa this.client.release() return Promise.resolve(ok(undefined)) } + + dispose(): Result { + this.client.release() + return ok(undefined) + } } export class PrismaNeon extends NeonWsQueryable implements DriverAdapter { diff --git a/query-engine/driver-adapters/js/adapter-pg/package.json b/query-engine/driver-adapters/js/adapter-pg/package.json index ab19ba1a341d..4274e4f4b6fd 100644 --- a/query-engine/driver-adapters/js/adapter-pg/package.json +++ b/query-engine/driver-adapters/js/adapter-pg/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/adapter-pg", - "version": "0.3.4", + "version": "0.4.0", "description": "Prisma's driver adapter for \"pg\"", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/adapter-pg/src/pg.ts b/query-engine/driver-adapters/js/adapter-pg/src/pg.ts index 5c574460b49b..b315dabc8273 100644 --- a/query-engine/driver-adapters/js/adapter-pg/src/pg.ts +++ b/query-engine/driver-adapters/js/adapter-pg/src/pg.ts @@ -93,6 +93,11 @@ class PgTransaction extends PgQueryable implements Transactio this.client.release() return ok(undefined) } + + dispose(): Result { + this.client.release() + return ok(undefined) + } } export class PrismaPg extends PgQueryable implements DriverAdapter { diff --git a/query-engine/driver-adapters/js/adapter-planetscale/package.json b/query-engine/driver-adapters/js/adapter-planetscale/package.json index a34f9b52b4fc..2d1e59bdd612 100644 --- a/query-engine/driver-adapters/js/adapter-planetscale/package.json +++ b/query-engine/driver-adapters/js/adapter-planetscale/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/adapter-planetscale", - "version": "0.3.4", + "version": "0.4.0", "description": "Prisma's driver adapter for \"@planetscale/database\"", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts index b5dffb89272f..cffb00482003 100644 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts +++ b/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts @@ -84,6 +84,8 @@ class PlanetScaleQueryable implements Transaction { + finished = false + constructor( tx: planetScale.Transaction, readonly options: TransactionOptions, @@ -96,6 +98,7 @@ class PlanetScaleTransaction extends PlanetScaleQueryable> { debug(`[js::commit]`) + this.finished = true this.txDeferred.resolve() return Promise.resolve(ok(await this.txResultPromise)) } @@ -103,9 +106,17 @@ class PlanetScaleTransaction extends PlanetScaleQueryable> { debug(`[js::rollback]`) + this.finished = true this.txDeferred.reject(new RollbackError()) return Promise.resolve(ok(await this.txResultPromise)) } + + dispose(): Result { + if (!this.finished) { + this.rollback().catch(console.error) + } + return ok(undefined) + } } export class PrismaPlanetScale extends PlanetScaleQueryable implements DriverAdapter { diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/package.json b/query-engine/driver-adapters/js/driver-adapter-utils/package.json index 22130b4ff536..0f6afd2ce2b4 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/package.json +++ b/query-engine/driver-adapters/js/driver-adapter-utils/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/driver-adapter-utils", - "version": "0.4.0", + "version": "0.5.0", "description": "Internal set of utilities and types for Prisma's driver adapters.", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts index 5f0e055ec3a6..aee18197e291 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts +++ b/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts @@ -47,6 +47,7 @@ const bindTransaction = (errorRegistry: ErrorRegistryInternal, transaction: Tran executeRaw: wrapAsync(errorRegistry, transaction.executeRaw.bind(transaction)), commit: wrapAsync(errorRegistry, transaction.commit.bind(transaction)), rollback: wrapAsync(errorRegistry, transaction.rollback.bind(transaction)), + dispose: wrapSync(errorRegistry, transaction.dispose.bind(transaction)), } } @@ -63,3 +64,17 @@ function wrapAsync( } } } + +function wrapSync( + registry: ErrorRegistryInternal, + fn: (...args: A) => Result, +): (...args: A) => Result { + return (...args) => { + try { + return fn(...args) + } catch (error) { + const id = registry.registerNewError(error) + return err({ kind: 'GenericJsError', id }) + } + } +} diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts index 409f3958bcd5..65fa002dcc3a 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts +++ b/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts @@ -96,6 +96,13 @@ export interface Transaction extends Queryable { * Rolls back the transaction. */ rollback(): Promise> + /** + * Discards and closes the transaction which may or may not have been committed or rolled back. + * This operation must be synchronous. If the implementation requires calling creating new + * asynchronous tasks on the event loop, the driver is responsible for handling the errors + * appropriately to ensure they don't crash the application. + */ + dispose(): Result } export interface ErrorCapturingDriverAdapter extends DriverAdapter { diff --git a/query-engine/driver-adapters/js/pnpm-lock.yaml b/query-engine/driver-adapters/js/pnpm-lock.yaml index 78e9e94da329..ad56abb645a0 100644 --- a/query-engine/driver-adapters/js/pnpm-lock.yaml +++ b/query-engine/driver-adapters/js/pnpm-lock.yaml @@ -128,8 +128,8 @@ importers: specifier: workspace:* version: link:../adapter-planetscale '@prisma/client': - specifier: 5.4.0-integration-libsql-adapter.7 - version: 5.4.0-integration-libsql-adapter.7(prisma@5.4.0-integration-libsql-adapter.7) + specifier: 5.4.0-integration-dispose-tx.2 + version: 5.4.0-integration-dispose-tx.2(prisma@5.4.0-integration-dispose-tx.2) '@prisma/driver-adapter-utils': specifier: workspace:* version: link:../driver-adapter-utils @@ -153,8 +153,8 @@ importers: specifier: ^7.0.3 version: 7.0.3 prisma: - specifier: 5.4.0-integration-libsql-adapter.7 - version: 5.4.0-integration-libsql-adapter.7 + specifier: 5.4.0-integration-dispose-tx.2 + version: 5.4.0-integration-dispose-tx.2 tsx: specifier: ^3.12.7 version: 3.12.7 @@ -518,8 +518,8 @@ packages: resolution: {integrity: sha512-aWbU+D/IRHoDE9975y+Q4c+EwwAWxCPwFId+N1AhQVFXzbeJMkj6KN2iQtoi03elcLMRdfT+V3i9Z4WRw+/oIA==} engines: {node: '>=16'} - /@prisma/client@5.4.0-integration-libsql-adapter.7(prisma@5.4.0-integration-libsql-adapter.7): - resolution: {integrity: sha512-0WNHV37C16IPoE0cgBtX4heYDHaLXQhq4IIB50RMw/WJyzozhigEIizaFKbBdZ4PjxpekohHvPgTH6r4QfiTrA==} + /@prisma/client@5.4.0-integration-dispose-tx.2(prisma@5.4.0-integration-dispose-tx.2): + resolution: {integrity: sha512-MShiYnvIUS/5ThfLRjyGaKGrhtzj69f38EqEksph7KckbLzfPQ7VWAJ2ZwKoi5DGJXEPaeb3S0lQpXl2KyuGxA==} engines: {node: '>=16.13'} requiresBuild: true peerDependencies: @@ -528,16 +528,16 @@ packages: prisma: optional: true dependencies: - '@prisma/engines-version': 5.4.0-24.libsql-adapter-8337c6b372b0f4eb2500403a7cf450885aee4cdc - prisma: 5.4.0-integration-libsql-adapter.7 + '@prisma/engines-version': 5.4.0-26.4bf3cce422a49f49c661da32d4016a5be81d28b4 + prisma: 5.4.0-integration-dispose-tx.2 dev: false - /@prisma/engines-version@5.4.0-24.libsql-adapter-8337c6b372b0f4eb2500403a7cf450885aee4cdc: - resolution: {integrity: sha512-Yr2GeXHTK2FdxF5o0lLyZk0oJC8L1QMADZyPn+wTNcG9kfMCCs3cvQwPLDdvsMUHfwJ0c31r6w0mEpM4c37Ejw==} + /@prisma/engines-version@5.4.0-26.4bf3cce422a49f49c661da32d4016a5be81d28b4: + resolution: {integrity: sha512-6yhw/P2lWJOljh3QIkqeBNgLPBLVca08YjKPTyOlQ771vnA3pH+EYpIi2VOb2+3NsIM9zlX1NvFadd4qSbtubA==} dev: false - /@prisma/engines@5.4.0-integration-libsql-adapter.7: - resolution: {integrity: sha512-QRNhAeLw4EqSE+N6tzpOSlkqW9XO1Zf3aUO4wNH3LJTjG153oIJDnGfahijF93PjuyIOSHEFGZ7mfKeAaq7FiA==} + /@prisma/engines@5.4.0-integration-dispose-tx.2: + resolution: {integrity: sha512-3kYPptQRiyDARcJIZudak7naHlTo0qYB/8ObxlIyw9IjbKax2m4MiPZuVasVpdcspXYj+ayzomFmCDptjZrjzg==} requiresBuild: true /@types/debug@4.1.8: @@ -1244,13 +1244,13 @@ packages: /postgres-range@1.1.3: resolution: {integrity: sha512-VdlZoocy5lCP0c/t66xAfclglEapXPCIVhqqJRncYpvbCgImF0w67aPKfbqUMr72tO2k5q0TdTZwCLjPTI6C9g==} - /prisma@5.4.0-integration-libsql-adapter.7: - resolution: {integrity: sha512-B7nkAnHFAxEMPS/o3jpUeUOp97Js3HlRThfXMfYILrVML/MMy18HwjQrVzxfF/QSq7UxbUQAyGLFrqSypTPAzw==} + /prisma@5.4.0-integration-dispose-tx.2: + resolution: {integrity: sha512-FBI46emn8rBapyTN6cwM0KNtmK94D9mucnQh2g+VhjWqD1SpFwFTVLXiT25tOFwEK0M/UQQ+eBsXn65BNBoisQ==} engines: {node: '>=16.13'} hasBin: true requiresBuild: true dependencies: - '@prisma/engines': 5.4.0-integration-libsql-adapter.7 + '@prisma/engines': 5.4.0-integration-dispose-tx.2 /punycode@2.3.0: resolution: {integrity: sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==} diff --git a/query-engine/driver-adapters/js/smoke-test-js/package.json b/query-engine/driver-adapters/js/smoke-test-js/package.json index f54e717dbbd6..3dbad884541a 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/package.json +++ b/query-engine/driver-adapters/js/smoke-test-js/package.json @@ -51,7 +51,7 @@ "@prisma/adapter-neon": "workspace:*", "@prisma/adapter-pg": "workspace:*", "@prisma/adapter-planetscale": "workspace:*", - "@prisma/client": "5.4.0-integration-libsql-adapter.7", + "@prisma/client": "5.4.0-integration-dispose-tx.2", "@prisma/driver-adapter-utils": "workspace:*", "pg": "^8.11.3", "superjson": "^1.13.1", @@ -61,7 +61,7 @@ "@types/node": "^20.5.1", "@types/pg": "^8.10.2", "cross-env": "^7.0.3", - "prisma": "5.4.0-integration-libsql-adapter.7", + "prisma": "5.4.0-integration-dispose-tx.2", "tsx": "^3.12.7" } } diff --git a/query-engine/driver-adapters/src/proxy.rs b/query-engine/driver-adapters/src/proxy.rs index 7e30f73c29a3..6352f3a96f29 100644 --- a/query-engine/driver-adapters/src/proxy.rs +++ b/query-engine/driver-adapters/src/proxy.rs @@ -5,6 +5,7 @@ use crate::async_js_function::AsyncJsFunction; use crate::conversion::JSArg; use crate::transaction::JsTransaction; use napi::bindgen_prelude::{FromNapiValue, ToNapiValue}; +use napi::threadsafe_function::{ErrorStrategy, ThreadsafeFunction}; use napi::{JsObject, JsString}; use napi_derive::napi; use quaint::connector::ResultSet as QuaintResultSet; @@ -46,6 +47,10 @@ pub(crate) struct TransactionProxy { /// rollback transaction rollback: AsyncJsFunction<(), ()>, + + /// dispose transaction, cleanup logic executed at the end of the transaction lifecycle + /// on drop. + dispose: ThreadsafeFunction<(), ErrorStrategy::Fatal>, } /// This result set is more convenient to be manipulated from both Rust and NodeJS. @@ -387,11 +392,13 @@ impl TransactionProxy { pub fn new(js_transaction: &JsObject) -> napi::Result { let commit = js_transaction.get_named_property("commit")?; let rollback = js_transaction.get_named_property("rollback")?; + let dispose = js_transaction.get_named_property("dispose")?; let options = js_transaction.get_named_property("options")?; Ok(Self { commit, rollback, + dispose, options, }) } @@ -403,11 +410,20 @@ impl TransactionProxy { pub async fn commit(&self) -> quaint::Result<()> { self.commit.call(()).await } + pub async fn rollback(&self) -> quaint::Result<()> { self.rollback.call(()).await } } +impl Drop for TransactionProxy { + fn drop(&mut self) { + _ = self + .dispose + .call((), napi::threadsafe_function::ThreadsafeFunctionCallMode::NonBlocking); + } +} + /// Coerce a `f64` to a `f32`, asserting that the conversion is lossless. /// Note that, when overflow occurs during conversion, the result is `infinity`. fn f64_to_f32(x: f64) -> f32 { From 5c9823b6fe757fb47fc4665815b2cbc4d7920806 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Wed, 27 Sep 2023 17:34:40 +0200 Subject: [PATCH 043/128] driver-adapters: libsql: serialize all operations (#4288) --- .../js/adapter-libsql/package.json | 5 +-- .../js/adapter-libsql/src/libsql.ts | 34 ++++++++++++++++--- .../driver-adapters/js/pnpm-lock.yaml | 13 +++++++ 3 files changed, 46 insertions(+), 6 deletions(-) diff --git a/query-engine/driver-adapters/js/adapter-libsql/package.json b/query-engine/driver-adapters/js/adapter-libsql/package.json index fb672669ecb7..f99e7bb26226 100644 --- a/query-engine/driver-adapters/js/adapter-libsql/package.json +++ b/query-engine/driver-adapters/js/adapter-libsql/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/adapter-libsql", - "version": "0.3.0", + "version": "0.3.1", "description": "Prisma's driver adapter for libsql and Turso", "main": "dist/index.js", "module": "dist/index.mjs", @@ -19,7 +19,8 @@ "license": "Apache-2.0", "sideEffects": false, "dependencies": { - "@prisma/driver-adapter-utils": "workspace:*" + "@prisma/driver-adapter-utils": "workspace:*", + "async-mutex": "0.4.0" }, "devDependencies": { "@libsql/client": "0.3.5" diff --git a/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts b/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts index cbe7d34d40c9..fab2480ea5b8 100644 --- a/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts +++ b/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts @@ -9,6 +9,7 @@ import type { TransactionOptions, } from '@prisma/driver-adapter-utils' import type { InStatement, Client as LibSqlClientRaw, Transaction as LibSqlTransactionRaw } from '@libsql/client' +import { Mutex } from 'async-mutex' import { getColumnTypes, mapRow } from './conversion' const debug = Debug('prisma:driver-adapter:libsql') @@ -16,8 +17,12 @@ const debug = Debug('prisma:driver-adapter:libsql') type StdClient = LibSqlClientRaw type TransactionClient = LibSqlTransactionRaw +const LOCK_TAG = Symbol() + class LibSqlQueryable implements Queryable { - readonly flavour = 'sqlite' + readonly flavour = 'sqlite'; + + [LOCK_TAG] = new Mutex() constructor(protected readonly client: ClientT) {} @@ -60,6 +65,7 @@ class LibSqlQueryable implements * marked as unhealthy. */ private async performIO(query: Query) { + const release = await this[LOCK_TAG].acquire() try { const result = await this.client.execute(query as InStatement) return result @@ -67,6 +73,8 @@ class LibSqlQueryable implements const error = e as Error debug('Error in performIO: %O', error) throw error + } finally { + release() } } } @@ -77,6 +85,7 @@ class LibSqlTransaction extends LibSqlQueryable implements Tr constructor( client: TransactionClient, readonly options: TransactionOptions, + readonly unlockParent: () => void, ) { super(client) } @@ -86,7 +95,12 @@ class LibSqlTransaction extends LibSqlQueryable implements Tr this.finished = true - await this.client.commit() + try { + await this.client.commit() + } finally { + this.unlockParent() + } + return ok(undefined) } @@ -99,6 +113,8 @@ class LibSqlTransaction extends LibSqlQueryable implements Tr await this.client.rollback() } catch (error) { debug('error in rollback:', error) + } finally { + this.unlockParent() } return ok(undefined) @@ -126,11 +142,21 @@ export class PrismaLibSQL extends LibSqlQueryable implements DriverAd const tag = '[js::startTransaction]' debug(`${tag} options: %O`, options) - const tx = await this.client.transaction('deferred') - return ok(new LibSqlTransaction(tx, options)) + const release = await this[LOCK_TAG].acquire() + + try { + const tx = await this.client.transaction('deferred') + return ok(new LibSqlTransaction(tx, options, release)) + } catch (e) { + // note: we only release the lock if creating the transaction fails, it must stay locked otherwise, + // hence `catch` and rethrowing the error and not `finally`. + release() + throw e + } } async close(): Promise> { + await this[LOCK_TAG].acquire() this.client.close() return ok(undefined) } diff --git a/query-engine/driver-adapters/js/pnpm-lock.yaml b/query-engine/driver-adapters/js/pnpm-lock.yaml index ad56abb645a0..bc36246d5c9d 100644 --- a/query-engine/driver-adapters/js/pnpm-lock.yaml +++ b/query-engine/driver-adapters/js/pnpm-lock.yaml @@ -26,6 +26,9 @@ importers: '@prisma/driver-adapter-utils': specifier: workspace:* version: link:../driver-adapter-utils + async-mutex: + specifier: 0.4.0 + version: 0.4.0 devDependencies: '@libsql/client': specifier: 0.3.5 @@ -602,6 +605,12 @@ packages: engines: {node: '>=8'} dev: true + /async-mutex@0.4.0: + resolution: {integrity: sha512-eJFZ1YhRR8UN8eBLoNzcDPcy/jqjsg6I1AP+KvWQX80BqOSW1oJPJXDylPUEeMr2ZQvHgnQ//Lp6f3RQ1zI7HA==} + dependencies: + tslib: 2.6.2 + dev: false + /asynckit@0.4.0: resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} @@ -1409,6 +1418,10 @@ packages: resolution: {integrity: sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==} dev: true + /tslib@2.6.2: + resolution: {integrity: sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==} + dev: false + /tsup@7.2.0(typescript@5.1.6): resolution: {integrity: sha512-vDHlczXbgUvY3rWvqFEbSqmC1L7woozbzngMqTtL2PGBODTtWlRwGDDawhvWzr5c1QjKe4OAKqJGfE1xeXUvtQ==} engines: {node: '>=16.14'} From 6039b1c04cb6a240c24047d50ebf000e9722ad47 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Wed, 27 Sep 2023 20:11:32 +0200 Subject: [PATCH 044/128] driver-adapters: add example of loading the parent .envrc (#4226) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * driver-adapters: load parent .envrc The example `.envrc` file was missing the `source_up` call to make it inherit the parent `.envrc` from the root of the repo. This caused everything from the root `.envrc` to be unloaded and unset. * Apply suggestion Co-authored-by: Miguel Fernández * Make source_up commented out in the example --------- Co-authored-by: Miguel Fernández --- .../driver-adapters/js/smoke-test-js/.envrc.example | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/query-engine/driver-adapters/js/smoke-test-js/.envrc.example b/query-engine/driver-adapters/js/smoke-test-js/.envrc.example index 9b2e18373163..15a286787cbd 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/.envrc.example +++ b/query-engine/driver-adapters/js/smoke-test-js/.envrc.example @@ -1,3 +1,9 @@ +# Uncomment "source_up" if you need to load the .envrc at the root of the +# `prisma-engines` repository before loading this one (for example, if you +# are using Nix). +# +# source_up + export JS_PLANETSCALE_DATABASE_URL="mysql://USER:PASSWORD@aws.connect.psdb.cloud/DATABASE?sslaccept=strict" export JS_NEON_DATABASE_URL="postgres://USER:PASSWORD@DATABASE-pooler.eu-central-1.aws.neon.tech/neondb?pgbouncer=true&connect_timeout=10" From 89d54c2a997c4665c8e31bad5b4ffb16fb0ab524 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Wed, 27 Sep 2023 20:36:37 +0200 Subject: [PATCH 045/128] nix: update flake (#4294) * Rust 1.72.1 * Node.js 20.6.1 * etc --- flake.lock | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/flake.lock b/flake.lock index 6f11c280dd3d..48c36ddc3bde 100644 --- a/flake.lock +++ b/flake.lock @@ -14,11 +14,11 @@ ] }, "locked": { - "lastModified": 1693787605, - "narHash": "sha256-rwq5U8dy+a9JFny/73L0SJu1GfWwATMPMTp7D+mjHy8=", + "lastModified": 1695511445, + "narHash": "sha256-mnE14re43v3/Jc50Jv0BKPMtEk7FEtDSligP6B5HwlI=", "owner": "ipetkov", "repo": "crane", - "rev": "8b4f7a4dab2120cf41e7957a28a853f45016bd9d", + "rev": "3de322e06fc88ada5e3589dc8a375b73e749f512", "type": "github" }, "original": { @@ -70,11 +70,11 @@ ] }, "locked": { - "lastModified": 1692799911, - "narHash": "sha256-3eihraek4qL744EvQXsK1Ha6C3CR7nnT8X2qWap4RNk=", + "lastModified": 1694529238, + "narHash": "sha256-zsNZZGTGnMOf9YpHKJqMSsa0dXbfmxeoJ7xHlrt+xmY=", "owner": "numtide", "repo": "flake-utils", - "rev": "f9e7cf818399d17d347f847525c5a5a8032e4e44", + "rev": "ff7b65b44d01cf9ba6a71320833626af21126384", "type": "github" }, "original": { @@ -105,11 +105,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1694422566, - "narHash": "sha256-lHJ+A9esOz9vln/3CJG23FV6Wd2OoOFbDeEs4cMGMqc=", + "lastModified": 1695644571, + "narHash": "sha256-asS9dCCdlt1lPq0DLwkVBbVoEKuEuz+Zi3DG7pR/RxA=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "3a2786eea085f040a66ecde1bc3ddc7099f6dbeb", + "rev": "6500b4580c2a1f3d0f980d32d285739d8e156d92", "type": "github" }, "original": { @@ -139,11 +139,11 @@ ] }, "locked": { - "lastModified": 1694484610, - "narHash": "sha256-aeSDkp7fkAqtVjW3QUn7vq7BKNlFul/BiGgdv7rK+mA=", + "lastModified": 1695780708, + "narHash": "sha256-+0difm874E5ra98MeLxW8SfoxfL+Wzn3cLzKGGd2I4M=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "c5b977a7e6a295697fa1f9c42174fd6313b38df4", + "rev": "e04538a3e155ebe4d15a281559119f63d33116bb", "type": "github" }, "original": { From e3ff754ecf45fbf80f28be5cf24655d056f77736 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Wed, 27 Sep 2023 20:59:14 +0200 Subject: [PATCH 046/128] Fix JsonNull parsing from neon and pg drivers (#4291) * Fix JsonNull conversion errors * Update package versions * Use $__prisma_null as a marker value * qe: add json test failing with driver adapters (#4293) --------- Co-authored-by: Alexey Orlenko --- .../tests/queries/data_types/json.rs | 38 +++++++++++++++++++ .../js/adapter-neon/package.json | 2 +- .../js/adapter-neon/src/conversion.ts | 19 +++++++++- .../js/adapter-pg/package.json | 2 +- .../js/adapter-pg/src/conversion.ts | 19 +++++++++- .../js/driver-adapter-utils/package.json | 2 +- .../js/driver-adapter-utils/src/const.ts | 5 +++ .../js/driver-adapter-utils/src/index.ts | 2 +- query-engine/driver-adapters/src/proxy.rs | 13 +++++-- 9 files changed, 92 insertions(+), 10 deletions(-) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/json.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/json.rs index acc4fb0ec3b0..5a2ddc350d06 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/json.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/json.rs @@ -122,6 +122,44 @@ mod json { Ok(()) } + #[connector_test(capabilities(AdvancedJsonNullability))] + async fn json_null_must_not_be_confused_with_literal_string(runner: Runner) -> TestResult<()> { + create_row(&runner, r#"{ id: 1, json: "\"null\"" }"#).await?; + + match runner.protocol() { + query_engine_tests::EngineProtocol::Graphql => { + let res = run_query!(runner, r#"{ findManyTestModel { json } }"#); + + insta::assert_snapshot!( + res, + @r###"{"data":{"findManyTestModel":[{"json":"\"null\""}]}}"### + ); + } + query_engine_tests::EngineProtocol::Json => { + let res = runner + .query_json( + r#"{ + "modelName": "TestModel", + "action": "findMany", + "query": { + "selection": { + "json": true + } + } + }"#, + ) + .await?; + + insta::assert_snapshot!( + res.to_string(), + @r###"{"data":{"findManyTestModel":[{"json":{"$type":"Json","value":"\"null\""}}]}}"### + ); + } + } + + Ok(()) + } + async fn create_test_data(runner: &Runner) -> TestResult<()> { create_row(runner, r#"{ id: 1, json: "{}" }"#).await?; create_row(runner, r#"{ id: 2, json: "{\"a\":\"b\"}" }"#).await?; diff --git a/query-engine/driver-adapters/js/adapter-neon/package.json b/query-engine/driver-adapters/js/adapter-neon/package.json index 281c37918b76..ec8b3d696136 100644 --- a/query-engine/driver-adapters/js/adapter-neon/package.json +++ b/query-engine/driver-adapters/js/adapter-neon/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/adapter-neon", - "version": "0.4.0", + "version": "0.4.1", "description": "Prisma's driver adapter for \"@neondatabase/serverless\"", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts b/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts index c05ad1f65108..7b08ba5bd1f8 100644 --- a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts +++ b/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts @@ -1,4 +1,4 @@ -import { ColumnTypeEnum, type ColumnType } from '@prisma/driver-adapter-utils' +import { ColumnTypeEnum, type ColumnType, JsonNullMarker } from '@prisma/driver-adapter-utils' import { types } from '@neondatabase/serverless' const NeonColumnType = types.builtins @@ -45,7 +45,24 @@ export function fieldToColumnType(fieldTypeId: number): ColumnType { } } +/** + * JsonNull are stored in JSON strings as the string "null", distinguishable from + * the `null` value which is used by the driver to represent the database NULL. + * By default, JSON and JSONB columns use JSON.parse to parse a JSON column value + * and this will lead to serde_json::Value::Null in Rust, which will be interpreted + * as DbNull. + * + * By converting "null" to JsonNullMarker, we can signal JsonNull in Rust side and + * convert it to QuaintValue::Json(Some(Null)). + */ +function convertJson(json: string): unknown { + return (json === 'null') ? JsonNullMarker : JSON.parse(json) +} + // return string instead of JavaScript Date object types.setTypeParser(NeonColumnType.DATE, date => date) types.setTypeParser(NeonColumnType.TIME, date => date) types.setTypeParser(NeonColumnType.TIMESTAMP, date => date) + +types.setTypeParser(NeonColumnType.JSONB, convertJson) +types.setTypeParser(NeonColumnType.JSON, convertJson) diff --git a/query-engine/driver-adapters/js/adapter-pg/package.json b/query-engine/driver-adapters/js/adapter-pg/package.json index 4274e4f4b6fd..4baf86068379 100644 --- a/query-engine/driver-adapters/js/adapter-pg/package.json +++ b/query-engine/driver-adapters/js/adapter-pg/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/adapter-pg", - "version": "0.4.0", + "version": "0.4.1", "description": "Prisma's driver adapter for \"pg\"", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts b/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts index 8943ae2d16b2..da145a5eb663 100644 --- a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts +++ b/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts @@ -1,4 +1,4 @@ -import { ColumnTypeEnum, type ColumnType } from '@prisma/driver-adapter-utils' +import { ColumnTypeEnum, type ColumnType, JsonNullMarker } from '@prisma/driver-adapter-utils' import { types } from 'pg' const PgColumnType = types.builtins @@ -45,7 +45,24 @@ export function fieldToColumnType(fieldTypeId: number): ColumnType { } } +/** + * JsonNull are stored in JSON strings as the string "null", distinguishable from + * the `null` value which is used by the driver to represent the database NULL. + * By default, JSON and JSONB columns use JSON.parse to parse a JSON column value + * and this will lead to serde_json::Value::Null in Rust, which will be interpreted + * as DbNull. + * + * By converting "null" to JsonNullMarker, we can signal JsonNull in Rust side and + * convert it to QuaintValue::Json(Some(Null)). + */ +function convertJson(json: string): unknown { + return (json === 'null') ? JsonNullMarker : JSON.parse(json) +} + // return string instead of JavaScript Date object types.setTypeParser(PgColumnType.DATE, date => date) types.setTypeParser(PgColumnType.TIME, date => date) types.setTypeParser(PgColumnType.TIMESTAMP, date => date) + +types.setTypeParser(PgColumnType.JSONB, convertJson) +types.setTypeParser(PgColumnType.JSON, convertJson) diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/package.json b/query-engine/driver-adapters/js/driver-adapter-utils/package.json index 0f6afd2ce2b4..b52a24dfb7a9 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/package.json +++ b/query-engine/driver-adapters/js/driver-adapter-utils/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/driver-adapter-utils", - "version": "0.5.0", + "version": "0.6.0", "description": "Internal set of utilities and types for Prisma's driver adapters.", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts index f65104d5bf55..d4f418e47935 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts +++ b/query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts @@ -21,3 +21,8 @@ export const ColumnTypeEnum = { // 'Array': 15, // ... } as const + +// This string value paired with `ColumnType.Json` will be treated as JSON `null` +// when convering to a quaint value. This is to work around JS/JSON null values +// already being used to represent database NULLs. +export const JsonNullMarker = '$__prisma_null' diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/index.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/index.ts index ee851d6961c6..e7c13be99966 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/index.ts +++ b/query-engine/driver-adapters/js/driver-adapter-utils/src/index.ts @@ -1,5 +1,5 @@ export { bindAdapter } from './binder' -export { ColumnTypeEnum } from './const' +export { ColumnTypeEnum, JsonNullMarker } from './const' export { Debug } from './debug' export { ok, err, type Result } from './result' export type * from './types' diff --git a/query-engine/driver-adapters/src/proxy.rs b/query-engine/driver-adapters/src/proxy.rs index 6352f3a96f29..466d18cd901d 100644 --- a/query-engine/driver-adapters/src/proxy.rs +++ b/query-engine/driver-adapters/src/proxy.rs @@ -279,10 +279,15 @@ fn js_value_to_quaint( serde_json::Value::Null => QuaintValue::DateTime(None), mismatch => panic!("Expected a string in column {}, found {}", column_name, mismatch), }, - ColumnType::Json => match json_value { - serde_json::Value::Null => QuaintValue::Json(None), - json => QuaintValue::json(json), - }, + ColumnType::Json => { + match json_value { + // DbNull + serde_json::Value::Null => QuaintValue::Json(None), + // JsonNull + serde_json::Value::String(s) if s == "$__prisma_null" => QuaintValue::json(serde_json::Value::Null), + json => QuaintValue::json(json), + } + } ColumnType::Enum => match json_value { serde_json::Value::String(s) => QuaintValue::enum_variant(s), serde_json::Value::Null => QuaintValue::Enum(None), From 802670c29b2fcbf39740c9ba94842dc90483060c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Wed, 27 Sep 2023 23:36:31 +0200 Subject: [PATCH 047/128] fix(driver-adapters) libsql queries containing aggregations (#4292) * Leniant parsing of numbers to work in turso. * Fix aggregation tests in turso * Update @prisma/adapter-libsql version * Update @prisma/driver-adapter-utils version --------- Co-authored-by: Alexey Orlenko --- .../js/adapter-libsql/package.json | 2 +- .../js/adapter-libsql/src/conversion.ts | 10 +--- .../js/driver-adapter-utils/package.json | 2 +- .../js/driver-adapter-utils/src/const.ts | 1 + query-engine/driver-adapters/src/proxy.rs | 50 +++++++++++++------ 5 files changed, 39 insertions(+), 26 deletions(-) diff --git a/query-engine/driver-adapters/js/adapter-libsql/package.json b/query-engine/driver-adapters/js/adapter-libsql/package.json index f99e7bb26226..163cbb35a1d0 100644 --- a/query-engine/driver-adapters/js/adapter-libsql/package.json +++ b/query-engine/driver-adapters/js/adapter-libsql/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/adapter-libsql", - "version": "0.3.1", + "version": "0.4.0", "description": "Prisma's driver adapter for libsql and Turso", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/adapter-libsql/src/conversion.ts b/query-engine/driver-adapters/js/adapter-libsql/src/conversion.ts index 2ef0386df020..cf1af825db5e 100644 --- a/query-engine/driver-adapters/js/adapter-libsql/src/conversion.ts +++ b/query-engine/driver-adapters/js/adapter-libsql/src/conversion.ts @@ -101,7 +101,7 @@ function inferColumnType(value: NonNullable): ColumnType { case 'boolean': return ColumnTypeEnum.Boolean case 'number': - return inferNumericType(value) + return ColumnTypeEnum.UnknownNumber case 'object': return inferObjectType(value) default: @@ -109,14 +109,6 @@ function inferColumnType(value: NonNullable): ColumnType { } } -function inferNumericType(value: number): ColumnType { - if (Number.isInteger(value)) { - return ColumnTypeEnum.Int64 - } else { - return ColumnTypeEnum.Double - } -} - function inferObjectType(value: {}): ColumnType { if (isArrayBuffer(value)) { return ColumnTypeEnum.Bytes diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/package.json b/query-engine/driver-adapters/js/driver-adapter-utils/package.json index b52a24dfb7a9..ee3487053d0e 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/package.json +++ b/query-engine/driver-adapters/js/driver-adapter-utils/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/driver-adapter-utils", - "version": "0.6.0", + "version": "0.7.0", "description": "Internal set of utilities and types for Prisma's driver adapters.", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts index d4f418e47935..a313812eaf5b 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts +++ b/query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts @@ -20,6 +20,7 @@ export const ColumnTypeEnum = { // 'Set': 14, // 'Array': 15, // ... + 'UnknownNumber': 128 } as const // This string value paired with `ColumnType.Json` will be treated as JSON `null` diff --git a/query-engine/driver-adapters/src/proxy.rs b/query-engine/driver-adapters/src/proxy.rs index 466d18cd901d..7244d88db393 100644 --- a/query-engine/driver-adapters/src/proxy.rs +++ b/query-engine/driver-adapters/src/proxy.rs @@ -92,69 +92,78 @@ pub enum ColumnType { /// - INT16 (SMALLINT) -> e.g. `32767` /// - INT24 (MEDIUMINT) -> e.g. `8388607` /// - INT32 (INT) -> e.g. `2147483647` - Int32, + Int32 = 0, /// The following PlanetScale type IDs are mapped into Int64: /// - INT64 (BIGINT) -> e.g. `"9223372036854775807"` (String-encoded) - Int64, + Int64 = 1, /// The following PlanetScale type IDs are mapped into Float: /// - FLOAT32 (FLOAT) -> e.g. `3.402823466` - Float, + Float = 2, /// The following PlanetScale type IDs are mapped into Double: /// - FLOAT64 (DOUBLE) -> e.g. `1.7976931348623157` - Double, + Double = 3, /// The following PlanetScale type IDs are mapped into Numeric: /// - DECIMAL (DECIMAL) -> e.g. `"99999999.99"` (String-encoded) - Numeric, + Numeric = 4, /// The following PlanetScale type IDs are mapped into Boolean: /// - BOOLEAN (BOOLEAN) -> e.g. `1` - Boolean, + Boolean = 5, /// The following PlanetScale type IDs are mapped into Char: /// - CHAR (CHAR) -> e.g. `"c"` (String-encoded) - Char, + Char = 6, /// The following PlanetScale type IDs are mapped into Text: /// - TEXT (TEXT) -> e.g. `"foo"` (String-encoded) /// - VARCHAR (VARCHAR) -> e.g. `"foo"` (String-encoded) - Text, + Text = 7, /// The following PlanetScale type IDs are mapped into Date: /// - DATE (DATE) -> e.g. `"2023-01-01"` (String-encoded, yyyy-MM-dd) - Date, + Date = 8, /// The following PlanetScale type IDs are mapped into Time: /// - TIME (TIME) -> e.g. `"23:59:59"` (String-encoded, HH:mm:ss) - Time, + Time = 9, /// The following PlanetScale type IDs are mapped into DateTime: /// - DATETIME (DATETIME) -> e.g. `"2023-01-01 23:59:59"` (String-encoded, yyyy-MM-dd HH:mm:ss) /// - TIMESTAMP (TIMESTAMP) -> e.g. `"2023-01-01 23:59:59"` (String-encoded, yyyy-MM-dd HH:mm:ss) - DateTime, + DateTime = 10, /// The following PlanetScale type IDs are mapped into Json: /// - JSON (JSON) -> e.g. `"{\"key\": \"value\"}"` (String-encoded) - Json, + Json = 11, /// The following PlanetScale type IDs are mapped into Enum: /// - ENUM (ENUM) -> e.g. `"foo"` (String-encoded) - Enum, + Enum = 12, /// The following PlanetScale type IDs are mapped into Bytes: /// - BLOB (BLOB) -> e.g. `"\u0012"` (String-encoded) /// - VARBINARY (VARBINARY) -> e.g. `"\u0012"` (String-encoded) /// - BINARY (BINARY) -> e.g. `"\u0012"` (String-encoded) /// - GEOMETRY (GEOMETRY) -> e.g. `"\u0012"` (String-encoded) - Bytes, + Bytes = 13, /// The following PlanetScale type IDs are mapped into Set: /// - SET (SET) -> e.g. `"foo,bar"` (String-encoded, comma-separated) /// This is currently unhandled, and will panic if encountered. - Set, + Set = 14, + + // Below there are custom types that don't have a 1:1 translation with a quaint::Value. + // enum variant. + /// UnknownNumber is used when the type of the column is a number but of unknown particular type + /// and precision. + /// + /// It's used by some driver adapters, like libsql to return aggregation values like AVG, or + /// COUNT, and it can be mapped to either Int64, or Double + UnknownNumber = 128, } #[napi(object)] @@ -308,6 +317,17 @@ fn js_value_to_quaint( column_name, mismatch ), }, + ColumnType::UnknownNumber => match json_value { + serde_json::Value::Number(n) => n + .as_i64() + .map(|v| QuaintValue::Int64(Some(v))) + .or(n.as_f64().map(|v| QuaintValue::Double(Some(v)))) + .expect("number must be an i64 or f64"), + mismatch => panic!( + "Expected a either an i64 or a f64 in column {}, found {}", + column_name, mismatch + ), + }, unimplemented => { todo!("support column type {:?} in column {}", unimplemented, column_name) } From 743c426239730872bc6753d737e705f80619fd97 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Wed, 27 Sep 2023 23:38:38 +0200 Subject: [PATCH 048/128] driver-adapters: fix Transaction.dispose for pg and neon (#4295) --- query-engine/driver-adapters/js/adapter-neon/package.json | 2 +- query-engine/driver-adapters/js/adapter-neon/src/neon.ts | 8 +++++++- query-engine/driver-adapters/js/adapter-pg/package.json | 2 +- query-engine/driver-adapters/js/adapter-pg/src/pg.ts | 8 +++++++- 4 files changed, 16 insertions(+), 4 deletions(-) diff --git a/query-engine/driver-adapters/js/adapter-neon/package.json b/query-engine/driver-adapters/js/adapter-neon/package.json index ec8b3d696136..3c0089a4757d 100644 --- a/query-engine/driver-adapters/js/adapter-neon/package.json +++ b/query-engine/driver-adapters/js/adapter-neon/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/adapter-neon", - "version": "0.4.1", + "version": "0.4.2", "description": "Prisma's driver adapter for \"@neondatabase/serverless\"", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts b/query-engine/driver-adapters/js/adapter-neon/src/neon.ts index 3ed0460cf858..cd0731240488 100644 --- a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts +++ b/query-engine/driver-adapters/js/adapter-neon/src/neon.ts @@ -80,6 +80,8 @@ class NeonWsQueryable extends NeonQ } class NeonTransaction extends NeonWsQueryable implements Transaction { + finished = false + constructor( client: neon.PoolClient, readonly options: TransactionOptions, @@ -90,6 +92,7 @@ class NeonTransaction extends NeonWsQueryable implements Transa async commit(): Promise> { debug(`[js::commit]`) + this.finished = true this.client.release() return Promise.resolve(ok(undefined)) } @@ -97,12 +100,15 @@ class NeonTransaction extends NeonWsQueryable implements Transa async rollback(): Promise> { debug(`[js::rollback]`) + this.finished = true this.client.release() return Promise.resolve(ok(undefined)) } dispose(): Result { - this.client.release() + if (!this.finished) { + this.client.release() + } return ok(undefined) } } diff --git a/query-engine/driver-adapters/js/adapter-pg/package.json b/query-engine/driver-adapters/js/adapter-pg/package.json index 4baf86068379..68cdad5ea715 100644 --- a/query-engine/driver-adapters/js/adapter-pg/package.json +++ b/query-engine/driver-adapters/js/adapter-pg/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/adapter-pg", - "version": "0.4.1", + "version": "0.4.2", "description": "Prisma's driver adapter for \"pg\"", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/adapter-pg/src/pg.ts b/query-engine/driver-adapters/js/adapter-pg/src/pg.ts index b315dabc8273..ae2df4e7a6b6 100644 --- a/query-engine/driver-adapters/js/adapter-pg/src/pg.ts +++ b/query-engine/driver-adapters/js/adapter-pg/src/pg.ts @@ -76,6 +76,8 @@ class PgQueryable implements Quer } class PgTransaction extends PgQueryable implements Transaction { + finished = false + constructor(client: pg.PoolClient, readonly options: TransactionOptions) { super(client) } @@ -83,6 +85,7 @@ class PgTransaction extends PgQueryable implements Transactio async commit(): Promise> { debug(`[js::commit]`) + this.finished = true this.client.release() return ok(undefined) } @@ -90,12 +93,15 @@ class PgTransaction extends PgQueryable implements Transactio async rollback(): Promise> { debug(`[js::rollback]`) + this.finished = true this.client.release() return ok(undefined) } dispose(): Result { - this.client.release() + if (!this.finished) { + this.client.release() + } return ok(undefined) } } From f5cd0c134750cfe64ba6d7ef185e01d65a193bf7 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Thu, 28 Sep 2023 09:22:32 +0200 Subject: [PATCH 049/128] driver-adapters: fix getLogs rpc handler in test executor (#4297) Fixes 4 tests: * `new::native_upsert::native_upsert::should_upsert_on_compound_id` * `new::native_upsert::native_upsert::should_upsert_on_id` * `new::native_upsert::native_upsert::should_upsert_on_single_unique` * `new::native_upsert::native_upsert::should_upsert_on_unique_list` --- .../js/connector-test-kit-executor/src/index.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts b/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts index 9e0c38678a6f..68664272a6ce 100644 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts @@ -67,7 +67,7 @@ async function main(): Promise { const state: Record = {} async function handleRequest(method: string, params: unknown): Promise { @@ -177,7 +177,7 @@ async function handleRequest(method: string, params: unknown): Promise } const castParams = params as GetLogsPayload - return state[castParams.schemaId].queryLogs ?? [] + return state[castParams.schemaId].logs } default: { throw new Error(`Unknown method: \`${method}\``) From b6fe3c09d21cb7b5250ea6637f9ae1c027a1458c Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Thu, 28 Sep 2023 10:07:42 +0200 Subject: [PATCH 050/128] adapter-libsql: fix failing apply_number_ops_for_int test (#4296) --- .../js/adapter-libsql/src/conversion.ts | 12 +++++++++++- .../driver-adapters/js/adapter-libsql/src/libsql.ts | 2 +- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/query-engine/driver-adapters/js/adapter-libsql/src/conversion.ts b/query-engine/driver-adapters/js/adapter-libsql/src/conversion.ts index cf1af825db5e..b2fa4b5b4095 100644 --- a/query-engine/driver-adapters/js/adapter-libsql/src/conversion.ts +++ b/query-engine/driver-adapters/js/adapter-libsql/src/conversion.ts @@ -125,7 +125,7 @@ class UnexpectedTypeError extends Error { } } -export function mapRow(row: Row): unknown[] { +export function mapRow(row: Row, columnTypes: ColumnType[]): unknown[] { // `Row` doesn't have map, so we copy the array once and modify it in-place // to avoid allocating and copying twice if we used `Array.from(row).map(...)`. const result: unknown[] = Array.from(row) @@ -145,6 +145,16 @@ export function mapRow(row: Row): unknown[] { if (isArrayBuffer(value)) { result[i] = Array.from(new Uint8Array(value)) } + + // If an integer is required and the current number isn't one, + // discard the fractional part. + if ( + typeof value === 'number' && + (columnTypes[i] === ColumnTypeEnum.Int32 || columnTypes[i] === ColumnTypeEnum.Int64) && + !Number.isInteger(value) + ) { + result[i] = Math.trunc(value) + } } return result diff --git a/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts b/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts index fab2480ea5b8..5d104e8e2949 100644 --- a/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts +++ b/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts @@ -40,7 +40,7 @@ class LibSqlQueryable implements const resultSet: ResultSet = { columnNames: columns, columnTypes, - rows: rows.map(mapRow), + rows: rows.map((row) => mapRow(row, columnTypes)), } return ok(resultSet) From 147b77a820feb1786449562fdffa0c015193eebc Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Thu, 28 Sep 2023 10:49:53 +0200 Subject: [PATCH 051/128] adapter-libsql: update version (#4298) --- query-engine/driver-adapters/js/adapter-libsql/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/query-engine/driver-adapters/js/adapter-libsql/package.json b/query-engine/driver-adapters/js/adapter-libsql/package.json index 163cbb35a1d0..900ea148320d 100644 --- a/query-engine/driver-adapters/js/adapter-libsql/package.json +++ b/query-engine/driver-adapters/js/adapter-libsql/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/adapter-libsql", - "version": "0.4.0", + "version": "0.4.1", "description": "Prisma's driver adapter for libsql and Turso", "main": "dist/index.js", "module": "dist/index.mjs", From 763a9b9ea9fd485cd836b7cf93a6186c1d2a1756 Mon Sep 17 00:00:00 2001 From: Jan Piotrowski Date: Thu, 28 Sep 2023 13:36:10 +0200 Subject: [PATCH 052/128] ci(driver-adapter-smoke-tests): Update job name to make adapter visible in job popover (#4299) --- .github/workflows/driver-adapter-smoke-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/driver-adapter-smoke-tests.yml b/.github/workflows/driver-adapter-smoke-tests.yml index ac4bc4c4fae3..eb80a9ef9ce8 100644 --- a/.github/workflows/driver-adapter-smoke-tests.yml +++ b/.github/workflows/driver-adapter-smoke-tests.yml @@ -13,7 +13,7 @@ on: jobs: driver-adapter-smoke-tests: - name: Smoke tests for adapter ${{ matrix.adapter }} + name: ${{ matrix.adapter }} strategy: fail-fast: false From 109b8f121d606613e20abfe67c2348a5d636b6df Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Thu, 28 Sep 2023 17:30:48 +0200 Subject: [PATCH 053/128] driver-adapters: get rid of panics in conversions (#4300) * Use TryFrom instead of From * Return errors instead of panics * Don't ignore int32 overflows * Update tests to unwrap results --- query-engine/driver-adapters/src/proxy.rs | 333 ++++++++++-------- query-engine/driver-adapters/src/queryable.rs | 3 +- 2 files changed, 193 insertions(+), 143 deletions(-) diff --git a/query-engine/driver-adapters/src/proxy.rs b/query-engine/driver-adapters/src/proxy.rs index 7244d88db393..4bf4bc56ad7f 100644 --- a/query-engine/driver-adapters/src/proxy.rs +++ b/query-engine/driver-adapters/src/proxy.rs @@ -1,4 +1,4 @@ -use core::panic; +use std::borrow::Cow; use std::str::FromStr; use crate::async_js_function::AsyncJsFunction; @@ -9,7 +9,10 @@ use napi::threadsafe_function::{ErrorStrategy, ThreadsafeFunction}; use napi::{JsObject, JsString}; use napi_derive::napi; use quaint::connector::ResultSet as QuaintResultSet; -use quaint::Value as QuaintValue; +use quaint::{ + error::{Error as QuaintError, ErrorKind}, + Value as QuaintValue, +}; // TODO(jkomyno): import these 3rd-party crates from the `quaint-core` crate. use bigdecimal::{BigDecimal, FromPrimitive}; @@ -173,13 +176,27 @@ pub struct Query { pub args: Vec, } +fn conversion_error(args: &std::fmt::Arguments) -> QuaintError { + let msg = match args.as_str() { + Some(s) => Cow::Borrowed(s), + None => Cow::Owned(args.to_string()), + }; + QuaintError::builder(ErrorKind::ConversionError(msg)).build() +} + +macro_rules! conversion_error { + ($($arg:tt)*) => { + conversion_error(&format_args!($($arg)*)) + }; +} + /// Handle data-type conversion from a JSON value to a Quaint value. /// This is used for most data types, except those that require connector-specific handling, e.g., `ColumnType::Boolean`. fn js_value_to_quaint( json_value: serde_json::Value, column_type: ColumnType, column_name: &str, -) -> QuaintValue<'static> { +) -> quaint::Result> { // Note for the future: it may be worth revisiting how much bloat so many panics with different static // strings add to the compiled artefact, and in case we should come up with a restricted set of panic // messages, or even find a way of removing them altogether. @@ -187,146 +204,174 @@ fn js_value_to_quaint( ColumnType::Int32 => match json_value { serde_json::Value::Number(n) => { // n.as_i32() is not implemented, so we need to downcast from i64 instead - QuaintValue::int32(n.as_i64().expect("number must be an i32") as i32) - } - serde_json::Value::String(s) => { - let n = s.parse::().expect("string-encoded number must be an i32"); - QuaintValue::int32(n) + n.as_i64() + .ok_or(conversion_error!("number must be an integer")) + .and_then(|n| -> quaint::Result { + n.try_into() + .map_err(|e| conversion_error!("cannot convert {n} to i32: {e}")) + }) + .map(QuaintValue::int32) } - serde_json::Value::Null => QuaintValue::Int32(None), - mismatch => panic!("Expected an i32 number in column {}, found {}", column_name, mismatch), + serde_json::Value::String(s) => s + .parse::() + .map(QuaintValue::int32) + .map_err(|e| conversion_error!("string-encoded number must be an i32, got {s}: {e}")), + serde_json::Value::Null => Ok(QuaintValue::Int32(None)), + mismatch => Err(conversion_error!( + "expected an i32 number in column {column_name}, found {mismatch}" + )), }, ColumnType::Int64 => match json_value { - serde_json::Value::Number(n) => QuaintValue::int64(n.as_i64().expect("number must be an i64")), - serde_json::Value::String(s) => { - let n = s.parse::().expect("string-encoded number must be an i64"); - QuaintValue::int64(n) - } - serde_json::Value::Null => QuaintValue::Int64(None), - mismatch => panic!( - "Expected a string or number in column {}, found {}", - column_name, mismatch - ), + serde_json::Value::Number(n) => n + .as_i64() + .map(QuaintValue::int64) + .ok_or(conversion_error!("number must be an i64, got {n}")), + serde_json::Value::String(s) => s + .parse::() + .map(QuaintValue::int64) + .map_err(|e| conversion_error!("string-encoded number must be an i64, got {s}: {e}")), + serde_json::Value::Null => Ok(QuaintValue::Int64(None)), + mismatch => Err(conversion_error!( + "expected a string or number in column {column_name}, found {mismatch}" + )), }, ColumnType::Float => match json_value { // n.as_f32() is not implemented, so we need to downcast from f64 instead. // We assume that the JSON value is a valid f32 number, but we check for overflows anyway. - serde_json::Value::Number(n) => QuaintValue::float(f64_to_f32(n.as_f64().expect("number must be a f64"))), - serde_json::Value::Null => QuaintValue::Float(None), - mismatch => panic!("Expected a f32 number in column {}, found {}", column_name, mismatch), + serde_json::Value::Number(n) => n + .as_f64() + .ok_or(conversion_error!("number must be a float, got {n}")) + .and_then(f64_to_f32) + .map(QuaintValue::float), + serde_json::Value::Null => Ok(QuaintValue::Float(None)), + mismatch => Err(conversion_error!( + "expected an f32 number in column {column_name}, found {mismatch}" + )), }, ColumnType::Double => match json_value { - serde_json::Value::Number(n) => QuaintValue::double(n.as_f64().expect("number must be a f64")), - serde_json::Value::Null => QuaintValue::Double(None), - mismatch => panic!("Expected a f64 number in column {}, found {}", column_name, mismatch), + serde_json::Value::Number(n) => n + .as_f64() + .map(QuaintValue::double) + .ok_or(conversion_error!("number must be a f64, got {n}")), + serde_json::Value::Null => Ok(QuaintValue::Double(None)), + mismatch => Err(conversion_error!( + "expected an f64 number in column {column_name}, found {mismatch}" + )), }, ColumnType::Numeric => match json_value { - serde_json::Value::String(s) => { - let decimal = BigDecimal::from_str(&s).expect("invalid numeric value"); - QuaintValue::numeric(decimal) - } - serde_json::Value::Number(n) => QuaintValue::numeric( - n.as_f64() - .and_then(BigDecimal::from_f64) - .expect("number must be an f64"), - ), - serde_json::Value::Null => QuaintValue::Numeric(None), - mismatch => panic!( - "Expected a string-encoded number in column {}, found {}", - column_name, mismatch - ), + serde_json::Value::String(s) => BigDecimal::from_str(&s) + .map(QuaintValue::numeric) + .map_err(|e| conversion_error!("invalid numeric value when parsing {s}: {e}")), + serde_json::Value::Number(n) => n + .as_f64() + .and_then(BigDecimal::from_f64) + .ok_or(conversion_error!("number must be an f64, got {n}")) + .map(QuaintValue::numeric), + serde_json::Value::Null => Ok(QuaintValue::Numeric(None)), + mismatch => Err(conversion_error!( + "expected a string-encoded number in column {column_name}, found {mismatch}", + )), }, ColumnType::Boolean => match json_value { - serde_json::Value::Bool(b) => QuaintValue::boolean(b), - serde_json::Value::Null => QuaintValue::Boolean(None), - serde_json::Value::Number(n) => QuaintValue::boolean(match n.as_i64() { - Some(0) => false, - Some(1) => true, - _ => panic!("expected number-encoded boolean to be 0 or 1, got {n}"), - }), - serde_json::Value::String(s) => QuaintValue::boolean(match s.as_str() { - "false" | "FALSE" | "0" => false, - "true" | "TRUE" | "1" => true, - _ => panic!("expected string-encoded boolean, got \"{s}\""), - }), - mismatch => panic!("Expected a boolean in column {}, found {}", column_name, mismatch), + serde_json::Value::Bool(b) => Ok(QuaintValue::boolean(b)), + serde_json::Value::Null => Ok(QuaintValue::Boolean(None)), + serde_json::Value::Number(n) => match n.as_i64() { + Some(0) => Ok(QuaintValue::boolean(false)), + Some(1) => Ok(QuaintValue::boolean(true)), + _ => Err(conversion_error!( + "expected number-encoded boolean to be 0 or 1, got {n}" + )), + }, + serde_json::Value::String(s) => match s.as_str() { + "false" | "FALSE" | "0" => Ok(QuaintValue::boolean(false)), + "true" | "TRUE" | "1" => Ok(QuaintValue::boolean(true)), + _ => Err(conversion_error!("expected string-encoded boolean, got {s}")), + }, + mismatch => Err(conversion_error!( + "expected a boolean in column {column_name}, found {mismatch}" + )), }, ColumnType::Char => match json_value { - serde_json::Value::String(s) => QuaintValue::Char(s.chars().next()), - serde_json::Value::Null => QuaintValue::Char(None), - mismatch => panic!("Expected a string in column {}, found {}", column_name, mismatch), + serde_json::Value::String(s) => Ok(QuaintValue::Char(s.chars().next())), + serde_json::Value::Null => Ok(QuaintValue::Char(None)), + mismatch => Err(conversion_error!( + "expected a string in column {column_name}, found {mismatch}" + )), }, ColumnType::Text => match json_value { - serde_json::Value::String(s) => QuaintValue::text(s), - serde_json::Value::Null => QuaintValue::Text(None), - mismatch => panic!("Expected a string in column {}, found {}", column_name, mismatch), + serde_json::Value::String(s) => Ok(QuaintValue::text(s)), + serde_json::Value::Null => Ok(QuaintValue::Text(None)), + mismatch => Err(conversion_error!( + "expected a string in column {column_name}, found {mismatch}" + )), }, ColumnType::Date => match json_value { - serde_json::Value::String(s) => { - let date = NaiveDate::parse_from_str(&s, "%Y-%m-%d").expect("Expected a date string"); - QuaintValue::date(date) - } - serde_json::Value::Null => QuaintValue::Date(None), - mismatch => panic!("Expected a string in column {}, found {}", column_name, mismatch), + serde_json::Value::String(s) => NaiveDate::parse_from_str(&s, "%Y-%m-%d") + .map(QuaintValue::date) + .map_err(|_| conversion_error!("expected a date string, got {s}")), + serde_json::Value::Null => Ok(QuaintValue::Date(None)), + mismatch => Err(conversion_error!( + "expected a string in column {column_name}, found {mismatch}" + )), }, ColumnType::Time => match json_value { - serde_json::Value::String(s) => { - let time = NaiveTime::parse_from_str(&s, "%H:%M:%S").expect("Expected a time string"); - QuaintValue::time(time) - } - serde_json::Value::Null => QuaintValue::Time(None), - mismatch => panic!("Expected a string in column {}, found {}", column_name, mismatch), + serde_json::Value::String(s) => NaiveTime::parse_from_str(&s, "%H:%M:%S") + .map(QuaintValue::time) + .map_err(|_| conversion_error!("expected a time string, got {s}")), + serde_json::Value::Null => Ok(QuaintValue::Time(None)), + mismatch => Err(conversion_error!( + "expected a string in column {column_name}, found {mismatch}" + )), }, ColumnType::DateTime => match json_value { - serde_json::Value::String(s) => { - let datetime = chrono::NaiveDateTime::parse_from_str(&s, "%Y-%m-%d %H:%M:%S%.f") - .map(|dt| DateTime::from_utc(dt, Utc)) - .or_else(|_| DateTime::parse_from_rfc3339(&s).map(DateTime::::from)) - .unwrap_or_else(|_| panic!("Expected a datetime string, found {:?}", &s)); - QuaintValue::datetime(datetime) - } - serde_json::Value::Null => QuaintValue::DateTime(None), - mismatch => panic!("Expected a string in column {}, found {}", column_name, mismatch), + serde_json::Value::String(s) => chrono::NaiveDateTime::parse_from_str(&s, "%Y-%m-%d %H:%M:%S%.f") + .map(|dt| DateTime::from_utc(dt, Utc)) + .or_else(|_| DateTime::parse_from_rfc3339(&s).map(DateTime::::from)) + .map(QuaintValue::datetime) + .map_err(|_| conversion_error!("expected a datetime string, found {s}")), + serde_json::Value::Null => Ok(QuaintValue::DateTime(None)), + mismatch => Err(conversion_error!( + "expected a string in column {column_name}, found {mismatch}" + )), }, ColumnType::Json => { match json_value { // DbNull - serde_json::Value::Null => QuaintValue::Json(None), + serde_json::Value::Null => Ok(QuaintValue::Json(None)), // JsonNull - serde_json::Value::String(s) if s == "$__prisma_null" => QuaintValue::json(serde_json::Value::Null), - json => QuaintValue::json(json), + serde_json::Value::String(s) if s == "$__prisma_null" => Ok(QuaintValue::json(serde_json::Value::Null)), + json => Ok(QuaintValue::json(json)), } } ColumnType::Enum => match json_value { - serde_json::Value::String(s) => QuaintValue::enum_variant(s), - serde_json::Value::Null => QuaintValue::Enum(None), - mismatch => panic!("Expected a string in column {}, found {}", column_name, mismatch), + serde_json::Value::String(s) => Ok(QuaintValue::enum_variant(s)), + serde_json::Value::Null => Ok(QuaintValue::Enum(None)), + mismatch => Err(conversion_error!( + "expected a string in column {column_name}, found {mismatch}" + )), }, ColumnType::Bytes => match json_value { - serde_json::Value::String(s) => QuaintValue::Bytes(Some(s.into_bytes().into())), - serde_json::Value::Array(array) => { - let bytes: Option> = array - .iter() - .map(|value| value.as_i64().and_then(|maybe_byte| maybe_byte.try_into().ok())) - .collect(); - QuaintValue::Bytes(Some(bytes.expect("elements of the array must be u8"))) - } - serde_json::Value::Null => QuaintValue::Bytes(None), - mismatch => panic!( - "Expected a string or an array in column {}, found {}", - column_name, mismatch - ), + serde_json::Value::String(s) => Ok(QuaintValue::Bytes(Some(s.into_bytes().into()))), + serde_json::Value::Array(array) => array + .iter() + .map(|value| value.as_i64().and_then(|maybe_byte| maybe_byte.try_into().ok())) + .collect::>>() + .map(QuaintValue::bytes) + .ok_or(conversion_error!("elements of the array must be u8")), + serde_json::Value::Null => Ok(QuaintValue::Bytes(None)), + mismatch => Err(conversion_error!( + "expected a string or an array in column {column_name}, found {mismatch}", + )), }, ColumnType::UnknownNumber => match json_value { serde_json::Value::Number(n) => n .as_i64() - .map(|v| QuaintValue::Int64(Some(v))) - .or(n.as_f64().map(|v| QuaintValue::Double(Some(v)))) - .expect("number must be an i64 or f64"), - mismatch => panic!( - "Expected a either an i64 or a f64 in column {}, found {}", - column_name, mismatch - ), + .map(QuaintValue::int64) + .or(n.as_f64().map(QuaintValue::double)) + .ok_or(conversion_error!("number must be an i64 or f64, got {n}")), + mismatch => Err(conversion_error!( + "expected a either an i64 or a f64 in column {column_name}, found {mismatch}", + )), }, unimplemented => { todo!("support column type {:?} in column {}", unimplemented, column_name) @@ -334,8 +379,10 @@ fn js_value_to_quaint( } } -impl From for QuaintResultSet { - fn from(js_result_set: JSResultSet) -> Self { +impl TryFrom for QuaintResultSet { + type Error = quaint::error::Error; + + fn try_from(js_result_set: JSResultSet) -> Result { let JSResultSet { rows, column_names, @@ -352,7 +399,7 @@ impl From for QuaintResultSet { let column_type = column_types[i]; let column_name = column_names[i].as_str(); - quaint_row.push(js_value_to_quaint(row, column_type, column_name)); + quaint_row.push(js_value_to_quaint(row, column_type, column_name)?); } quaint_rows.push(quaint_row); @@ -368,7 +415,7 @@ impl From for QuaintResultSet { quaint_result_set.set_last_insert_id(last_insert_id); } - quaint_result_set + Ok(quaint_result_set) } } @@ -451,12 +498,14 @@ impl Drop for TransactionProxy { /// Coerce a `f64` to a `f32`, asserting that the conversion is lossless. /// Note that, when overflow occurs during conversion, the result is `infinity`. -fn f64_to_f32(x: f64) -> f32 { +fn f64_to_f32(x: f64) -> quaint::Result { let y = x as f32; - assert_eq!(x.is_finite(), y.is_finite(), "f32 overflow during conversion"); - - y + if x.is_finite() == y.is_finite() { + Ok(y) + } else { + Err(conversion_error!("f32 overflow during conversion")) + } } #[cfg(test)] @@ -469,7 +518,7 @@ mod proxy_test { #[track_caller] fn test_null(quaint_none: QuaintValue, column_type: ColumnType) { let json_value = serde_json::Value::Null; - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); assert_eq!(quaint_value, quaint_none); } @@ -483,25 +532,25 @@ mod proxy_test { // 0 let n: i32 = 0; let json_value = serde_json::Value::Number(serde_json::Number::from(n)); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); assert_eq!(quaint_value, QuaintValue::Int32(Some(n))); // max let n: i32 = i32::MAX; let json_value = serde_json::Value::Number(serde_json::Number::from(n)); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); assert_eq!(quaint_value, QuaintValue::Int32(Some(n))); // min let n: i32 = i32::MIN; let json_value = serde_json::Value::Number(serde_json::Number::from(n)); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); assert_eq!(quaint_value, QuaintValue::Int32(Some(n))); // string-encoded let n = i32::MAX; let json_value = serde_json::Value::String(n.to_string()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); assert_eq!(quaint_value, QuaintValue::Int32(Some(n))); } @@ -515,25 +564,25 @@ mod proxy_test { // 0 let n: i64 = 0; let json_value = serde_json::Value::String(n.to_string()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); assert_eq!(quaint_value, QuaintValue::Int64(Some(n))); // max let n: i64 = i64::MAX; let json_value = serde_json::Value::String(n.to_string()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); assert_eq!(quaint_value, QuaintValue::Int64(Some(n))); // min let n: i64 = i64::MIN; let json_value = serde_json::Value::String(n.to_string()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); assert_eq!(quaint_value, QuaintValue::Int64(Some(n))); // number-encoded let n: i64 = (1 << 53) - 1; // max JS safe integer let json_value = serde_json::Value::Number(serde_json::Number::from(n)); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); assert_eq!(quaint_value, QuaintValue::Int64(Some(n))); } @@ -547,19 +596,19 @@ mod proxy_test { // 0 let n: f32 = 0.0; let json_value = serde_json::Value::Number(serde_json::Number::from_f64(n.into()).unwrap()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); assert_eq!(quaint_value, QuaintValue::Float(Some(n))); // max let n: f32 = f32::MAX; let json_value = serde_json::Value::Number(serde_json::Number::from_f64(n.into()).unwrap()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); assert_eq!(quaint_value, QuaintValue::Float(Some(n))); // min let n: f32 = f32::MIN; let json_value = serde_json::Value::Number(serde_json::Number::from_f64(n.into()).unwrap()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); assert_eq!(quaint_value, QuaintValue::Float(Some(n))); } @@ -573,19 +622,19 @@ mod proxy_test { // 0 let n: f64 = 0.0; let json_value = serde_json::Value::Number(serde_json::Number::from_f64(n).unwrap()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); assert_eq!(quaint_value, QuaintValue::Double(Some(n))); // max let n: f64 = f64::MAX; let json_value = serde_json::Value::Number(serde_json::Number::from_f64(n).unwrap()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); assert_eq!(quaint_value, QuaintValue::Double(Some(n))); // min let n: f64 = f64::MIN; let json_value = serde_json::Value::Number(serde_json::Number::from_f64(n).unwrap()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); assert_eq!(quaint_value, QuaintValue::Double(Some(n))); } @@ -600,14 +649,14 @@ mod proxy_test { let decimal = BigDecimal::new(BigInt::parse_bytes(b"123499", 10).unwrap(), 2); let json_value = serde_json::Value::String(n_as_string.into()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); assert_eq!(quaint_value, QuaintValue::Numeric(Some(decimal))); let n_as_string = "1234.999999"; let decimal = BigDecimal::new(BigInt::parse_bytes(b"1234999999", 10).unwrap(), 6); let json_value = serde_json::Value::String(n_as_string.into()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); assert_eq!(quaint_value, QuaintValue::Numeric(Some(decimal))); } @@ -620,13 +669,13 @@ mod proxy_test { // true for truthy_value in [json!(true), json!(1), json!("true"), json!("TRUE"), json!("1")] { - let quaint_value = js_value_to_quaint(truthy_value, column_type, "column_name"); + let quaint_value = js_value_to_quaint(truthy_value, column_type, "column_name").unwrap(); assert_eq!(quaint_value, QuaintValue::Boolean(Some(true))); } // false for falsy_value in [json!(false), json!(0), json!("false"), json!("FALSE"), json!("0")] { - let quaint_value = js_value_to_quaint(falsy_value, column_type, "column_name"); + let quaint_value = js_value_to_quaint(falsy_value, column_type, "column_name").unwrap(); assert_eq!(quaint_value, QuaintValue::Boolean(Some(false))); } } @@ -640,7 +689,7 @@ mod proxy_test { let c = 'c'; let json_value = serde_json::Value::String(c.to_string()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); assert_eq!(quaint_value, QuaintValue::Char(Some(c))); } @@ -653,7 +702,7 @@ mod proxy_test { let s = "some text"; let json_value = serde_json::Value::String(s.to_string()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); assert_eq!(quaint_value, QuaintValue::Text(Some(s.into()))); } @@ -666,7 +715,7 @@ mod proxy_test { let s = "2023-01-01"; let json_value = serde_json::Value::String(s.to_string()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); let date = NaiveDate::from_ymd_opt(2023, 1, 1).unwrap(); assert_eq!(quaint_value, QuaintValue::Date(Some(date))); @@ -681,7 +730,7 @@ mod proxy_test { let s = "23:59:59"; let json_value = serde_json::Value::String(s.to_string()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); let time: NaiveTime = NaiveTime::from_hms_opt(23, 59, 59).unwrap(); assert_eq!(quaint_value, QuaintValue::Time(Some(time))); @@ -696,7 +745,7 @@ mod proxy_test { let s = "2023-01-01 23:59:59.415"; let json_value = serde_json::Value::String(s.to_string()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); let datetime = NaiveDate::from_ymd_opt(2023, 1, 1) .unwrap() @@ -707,7 +756,7 @@ mod proxy_test { let s = "2023-01-01 23:59:59.123456"; let json_value = serde_json::Value::String(s.to_string()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); let datetime = NaiveDate::from_ymd_opt(2023, 1, 1) .unwrap() @@ -718,7 +767,7 @@ mod proxy_test { let s = "2023-01-01 23:59:59"; let json_value = serde_json::Value::String(s.to_string()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); let datetime = NaiveDate::from_ymd_opt(2023, 1, 1) .unwrap() @@ -745,7 +794,7 @@ mod proxy_test { ] }); let json_value = json.clone(); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); assert_eq!(quaint_value, QuaintValue::Json(Some(json.clone()))); } @@ -758,7 +807,7 @@ mod proxy_test { let s = "some enum variant"; let json_value = serde_json::Value::String(s.to_string()); - let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); assert_eq!(quaint_value, QuaintValue::Enum(Some(s.into()))); } } diff --git a/query-engine/driver-adapters/src/queryable.rs b/query-engine/driver-adapters/src/queryable.rs index 310f69641696..398286c8ca05 100644 --- a/query-engine/driver-adapters/src/queryable.rs +++ b/query-engine/driver-adapters/src/queryable.rs @@ -150,7 +150,8 @@ impl JsBaseQueryable { let len = result_set.len(); let _deserialization_span = info_span!("js:query:result", user_facing = true, "length" = %len).entered(); - Ok(ResultSet::from(result_set)) + + result_set.try_into() } async fn do_execute_raw(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { From d26d585c0f5e948a8363df95c1afa4f349f732c8 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Fri, 29 Sep 2023 11:26:26 +0200 Subject: [PATCH 054/128] adapter-libsql: update README.md and include link to post (#4303) * adapter-libsql: update README.md and include link to post * adapter-libsql: update version and description --- query-engine/driver-adapters/js/adapter-libsql/README.md | 4 +++- query-engine/driver-adapters/js/adapter-libsql/package.json | 4 ++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/query-engine/driver-adapters/js/adapter-libsql/README.md b/query-engine/driver-adapters/js/adapter-libsql/README.md index 2a869cbf5c78..219200af2080 100644 --- a/query-engine/driver-adapters/js/adapter-libsql/README.md +++ b/query-engine/driver-adapters/js/adapter-libsql/README.md @@ -1,3 +1,5 @@ # @prisma/adapter-libsql -**INTERNAL PACKAGE, DO NOT USE** +Prisma driver adapter for Turso and libSQL. + +See https://prisma.io/turso for details. diff --git a/query-engine/driver-adapters/js/adapter-libsql/package.json b/query-engine/driver-adapters/js/adapter-libsql/package.json index 900ea148320d..dc4466d826fa 100644 --- a/query-engine/driver-adapters/js/adapter-libsql/package.json +++ b/query-engine/driver-adapters/js/adapter-libsql/package.json @@ -1,7 +1,7 @@ { "name": "@prisma/adapter-libsql", - "version": "0.4.1", - "description": "Prisma's driver adapter for libsql and Turso", + "version": "0.4.2", + "description": "Prisma's driver adapter for libSQL and Turso", "main": "dist/index.js", "module": "dist/index.mjs", "types": "dist/index.d.ts", From 2b2ffb48c15c4644d39591fed77b43f8af89ad56 Mon Sep 17 00:00:00 2001 From: Sophie <29753584+Druue@users.noreply.github.com> Date: Fri, 29 Sep 2023 11:57:30 +0200 Subject: [PATCH 055/128] feat(fmt): Add quick-fix to add schemas defined in `@@schema` to the datasource `schemas` property (#4254) --- prisma-fmt/src/code_actions.rs | 23 ++++++- prisma-fmt/src/code_actions/multi_schema.rs | 68 +++++++++++++++++++ .../result.json | 41 +++++++++++ .../schema.prisma | 17 +++++ .../result.json | 41 +++++++++++ .../schema.prisma | 16 +++++ prisma-fmt/tests/code_actions/tests.rs | 2 + 7 files changed, 207 insertions(+), 1 deletion(-) create mode 100644 prisma-fmt/tests/code_actions/scenarios/multi_schema_add_to_existing_schemas/result.json create mode 100644 prisma-fmt/tests/code_actions/scenarios/multi_schema_add_to_existing_schemas/schema.prisma create mode 100644 prisma-fmt/tests/code_actions/scenarios/multi_schema_add_to_nonexisting_schemas/result.json create mode 100644 prisma-fmt/tests/code_actions/scenarios/multi_schema_add_to_nonexisting_schemas/schema.prisma diff --git a/prisma-fmt/src/code_actions.rs b/prisma-fmt/src/code_actions.rs index 1037192b1a93..27fdeddad159 100644 --- a/prisma-fmt/src/code_actions.rs +++ b/prisma-fmt/src/code_actions.rs @@ -48,7 +48,9 @@ pub(crate) fn available_actions(schema: String, params: CodeActionParams) -> Vec validated_schema.db.source(), config, model, - ) + ); + + multi_schema::add_schema_to_schemas(&mut actions, ¶ms, validated_schema.db.source(), config, model); } if matches!(datasource, Some(ds) if ds.active_provider == "mongodb") { @@ -205,6 +207,25 @@ fn format_field_attribute(attribute: &str) -> String { format!(" {attribute}\n") } +fn format_block_property( + property: &str, + value: &str, + indentation: IndentationType, + newline: NewlineType, + has_properties: bool, +) -> String { + let separator = if has_properties { newline.as_ref() } else { "" }; + + // * (soph) I don't super like needing to prefix this with ')' but + // * it would require further updating how we parse spans + // todo: update so that we have a concepts for: + // todo: - The entire url span + // todo: - The url arg span :: currently, url_span only represents this. + let formatted_attribute = format!(r#"){separator}{indentation}{property} = ["{value}"]"#); + + formatted_attribute +} + fn format_block_attribute( attribute: &str, indentation: IndentationType, diff --git a/prisma-fmt/src/code_actions/multi_schema.rs b/prisma-fmt/src/code_actions/multi_schema.rs index de117a867b06..c797245b87e9 100644 --- a/prisma-fmt/src/code_actions/multi_schema.rs +++ b/prisma-fmt/src/code_actions/multi_schema.rs @@ -1,5 +1,6 @@ use lsp_types::{CodeAction, CodeActionKind, CodeActionOrCommand, CodeActionParams}; use psl::{ + diagnostics::Span, parser_database::walkers::{EnumWalker, ModelWalker}, schema_ast::ast::WithSpan, Configuration, @@ -108,3 +109,70 @@ pub(super) fn add_schema_block_attribute_enum( actions.push(CodeActionOrCommand::CodeAction(action)) } + +pub(super) fn add_schema_to_schemas( + actions: &mut Vec, + params: &CodeActionParams, + schema: &str, + config: &Configuration, + model: ModelWalker<'_>, +) { + let datasource = match config.datasources.first() { + Some(ds) => ds, + None => return, + }; + + let span_diagnostics = + match super::diagnostics_for_span(schema, ¶ms.context.diagnostics, model.ast_model().span()) { + Some(sd) => sd, + None => return, + }; + + let diagnostics = match super::filter_diagnostics(span_diagnostics, "This schema is not defined in the datasource.") + { + Some(value) => value, + None => return, + }; + + let edit = match datasource.schemas_span { + Some(span) => { + let formatted_attribute = format!(r#", "{}""#, model.schema_name().unwrap()); + super::create_text_edit( + schema, + formatted_attribute, + true, + // todo: update spans so that we can just append to the end of the _inside_ of the array. Instead of needing to re-append the `]` or taking the span end -1 + Span::new(span.start, span.end - 1), + params, + ) + } + None => { + let has_properties = datasource.provider_defined() + || datasource.url_defined() + || datasource.direct_url_defined() + || datasource.shadow_url_defined() + || datasource.relation_mode_defined() + || datasource.schemas_defined(); + + let formatted_attribute = super::format_block_property( + "schemas", + model.schema_name().unwrap(), + model.indentation(), + model.newline(), + has_properties, + ); + + super::create_text_edit(schema, formatted_attribute, true, datasource.url_span, params) + } + }; + + let action = CodeAction { + title: String::from("Add schema to schemas"), + kind: Some(CodeActionKind::QUICKFIX), + edit: Some(edit), + diagnostics: Some(diagnostics), + ..Default::default() + }; + + actions.push(CodeActionOrCommand::CodeAction(action)) +} diff --git a/prisma-fmt/tests/code_actions/scenarios/multi_schema_add_to_existing_schemas/result.json b/prisma-fmt/tests/code_actions/scenarios/multi_schema_add_to_existing_schemas/result.json new file mode 100644 index 000000000000..26a243e4f105 --- /dev/null +++ b/prisma-fmt/tests/code_actions/scenarios/multi_schema_add_to_existing_schemas/result.json @@ -0,0 +1,41 @@ +[ + { + "title": "Add schema to schemas", + "kind": "quickfix", + "diagnostics": [ + { + "range": { + "start": { + "line": 15, + "character": 13 + }, + "end": { + "line": 15, + "character": 19 + } + }, + "severity": 1, + "message": "This schema is not defined in the datasource. Read more on `@@schema` at https://pris.ly/d/multi-schema" + } + ], + "edit": { + "changes": { + "file:///path/to/schema.prisma": [ + { + "range": { + "start": { + "line": 8, + "character": 27 + }, + "end": { + "line": 8, + "character": 28 + } + }, + "newText": ", \"base\"" + } + ] + } + } + } +] \ No newline at end of file diff --git a/prisma-fmt/tests/code_actions/scenarios/multi_schema_add_to_existing_schemas/schema.prisma b/prisma-fmt/tests/code_actions/scenarios/multi_schema_add_to_existing_schemas/schema.prisma new file mode 100644 index 000000000000..fc555c0756f5 --- /dev/null +++ b/prisma-fmt/tests/code_actions/scenarios/multi_schema_add_to_existing_schemas/schema.prisma @@ -0,0 +1,17 @@ +generator client { + provider = "prisma-client-js" + previewFeatures = ["multiSchema"] +} + +datasource db { + provider = "postgresql" + url = env("DATABASE_URL") + schemas = ["a", "b"] + relationMode = "prisma" +} + +model A { + id Int @id + + @@schema("base") +} diff --git a/prisma-fmt/tests/code_actions/scenarios/multi_schema_add_to_nonexisting_schemas/result.json b/prisma-fmt/tests/code_actions/scenarios/multi_schema_add_to_nonexisting_schemas/result.json new file mode 100644 index 000000000000..64f2acdb8098 --- /dev/null +++ b/prisma-fmt/tests/code_actions/scenarios/multi_schema_add_to_nonexisting_schemas/result.json @@ -0,0 +1,41 @@ +[ + { + "title": "Add schema to schemas", + "kind": "quickfix", + "diagnostics": [ + { + "range": { + "start": { + "line": 14, + "character": 13 + }, + "end": { + "line": 14, + "character": 19 + } + }, + "severity": 1, + "message": "This schema is not defined in the datasource. Read more on `@@schema` at https://pris.ly/d/multi-schema" + } + ], + "edit": { + "changes": { + "file:///path/to/schema.prisma": [ + { + "range": { + "start": { + "line": 7, + "character": 37 + }, + "end": { + "line": 7, + "character": 38 + } + }, + "newText": ")\n schemas = [\"base\"]" + } + ] + } + } + } +] \ No newline at end of file diff --git a/prisma-fmt/tests/code_actions/scenarios/multi_schema_add_to_nonexisting_schemas/schema.prisma b/prisma-fmt/tests/code_actions/scenarios/multi_schema_add_to_nonexisting_schemas/schema.prisma new file mode 100644 index 000000000000..aba6b13dc237 --- /dev/null +++ b/prisma-fmt/tests/code_actions/scenarios/multi_schema_add_to_nonexisting_schemas/schema.prisma @@ -0,0 +1,16 @@ +generator client { + provider = "prisma-client-js" + previewFeatures = ["multiSchema"] +} + +datasource db { + provider = "postgresql" + url = env("DATABASE_URL") + relationMode = "prisma" +} + +model A { + id Int @id + + @@schema("base") +} diff --git a/prisma-fmt/tests/code_actions/tests.rs b/prisma-fmt/tests/code_actions/tests.rs index e76179204d92..dbd5ff44e96b 100644 --- a/prisma-fmt/tests/code_actions/tests.rs +++ b/prisma-fmt/tests/code_actions/tests.rs @@ -29,6 +29,8 @@ scenarios! { multi_schema_one_model multi_schema_one_model_one_enum multi_schema_two_models + multi_schema_add_to_existing_schemas + multi_schema_add_to_nonexisting_schemas mongodb_at_map mongodb_at_map_with_validation_errors } From 18f3e4017174e7e4731539d4d97cadb72b10ae73 Mon Sep 17 00:00:00 2001 From: Flavian Desverne Date: Fri, 29 Sep 2023 14:19:04 +0200 Subject: [PATCH 056/128] perf: remove select before deleteMany (#4249) * perf: remove select before deleteMany * fix nested delete many * fix: set selectors in graph to avoid double read --- .../src/database/operations/write.rs | 28 ++++++------- .../src/query_builder/write.rs | 19 ++++++--- .../src/query_graph_builder/write/delete.rs | 39 +++++++++++++------ 3 files changed, 55 insertions(+), 31 deletions(-) diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/write.rs b/query-engine/connectors/sql-query-connector/src/database/operations/write.rs index 8463a710b67f..425f4ac1d4b3 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/write.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/write.rs @@ -362,23 +362,23 @@ pub(crate) async fn delete_records( ctx: &Context<'_>, ) -> crate::Result { let filter_condition = FilterBuilder::without_top_level_joins().visit_filter(record_filter.clone().filter, ctx); - let ids = conn.filter_selectors(model, record_filter, ctx).await?; - let ids: Vec<&SelectionResult> = ids.iter().collect(); - let count = ids.len(); - if count == 0 { - return Ok(count); - } + // If we have selectors, then we must chunk the mutation into multiple if necessary and add the ids to the filter. + let row_count = if record_filter.has_selectors() { + let ids: Vec<_> = record_filter.selectors.as_ref().unwrap().iter().collect(); + let mut row_count = 0; - let mut row_count = 0; - for delete in write::delete_many(model, ids.as_slice(), filter_condition, ctx) { - row_count += conn.execute(delete).await?; - } + for delete in write::delete_many_from_ids_and_filter(model, ids.as_slice(), filter_condition, ctx) { + row_count += conn.execute(delete).await?; + } - match usize::try_from(row_count) { - Ok(row_count) => Ok(row_count), - Err(_) => Ok(count), - } + row_count + } else { + conn.execute(write::delete_many_from_filter(model, filter_condition, ctx)) + .await? + }; + + Ok(row_count as usize) } /// Connect relations defined in `child_ids` to a parent defined in `parent_id`. diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/write.rs b/query-engine/connectors/sql-query-connector/src/query_builder/write.rs index c18cb9bd6613..de8b6e78ef8d 100644 --- a/query-engine/connectors/sql-query-connector/src/query_builder/write.rs +++ b/query-engine/connectors/sql-query-connector/src/query_builder/write.rs @@ -190,7 +190,19 @@ pub(crate) fn chunk_update_with_ids( Ok(query) } -pub(crate) fn delete_many( +pub(crate) fn delete_many_from_filter( + model: &Model, + filter_condition: ConditionTree<'static>, + ctx: &Context<'_>, +) -> Query<'static> { + Delete::from_table(model.as_table(ctx)) + .so_that(filter_condition) + .append_trace(&Span::current()) + .add_trace_id(ctx.trace_id) + .into() +} + +pub(crate) fn delete_many_from_ids_and_filter( model: &Model, ids: &[&SelectionResult], filter_condition: ConditionTree<'static>, @@ -201,10 +213,7 @@ pub(crate) fn delete_many( .collect(); super::chunked_conditions(&columns, ids, |conditions| { - Delete::from_table(model.as_table(ctx)) - .so_that(conditions.and(filter_condition.clone())) - .append_trace(&Span::current()) - .add_trace_id(ctx.trace_id) + delete_many_from_filter(model, conditions.and(filter_condition.clone()), ctx) }) } diff --git a/query-engine/core/src/query_graph_builder/write/delete.rs b/query-engine/core/src/query_graph_builder/write/delete.rs index 91cfe088a089..a5bca5af7758 100644 --- a/query-engine/core/src/query_graph_builder/write/delete.rs +++ b/query-engine/core/src/query_graph_builder/write/delete.rs @@ -1,7 +1,7 @@ use super::*; use crate::{ query_ast::*, - query_graph::{QueryGraph, QueryGraphDependency}, + query_graph::{Node, QueryGraph, QueryGraphDependency}, ArgumentListLookup, FilteredQuery, ParsedField, }; use connector::filter::Filter; @@ -52,6 +52,7 @@ pub(crate) fn delete_record( )?; graph.add_result_node(&read_node); + Ok(()) } @@ -62,31 +63,45 @@ pub fn delete_many_records( model: Model, mut field: ParsedField<'_>, ) -> QueryGraphBuilderResult<()> { - graph.flag_transactional(); - let filter = match field.arguments.lookup(args::WHERE) { Some(where_arg) => extract_filter(where_arg.value.try_into()?, &model)?, None => Filter::empty(), }; let model_id = model.primary_identifier(); - let read_query = utils::read_ids_infallible(model.clone(), model_id, filter.clone()); - let record_filter = filter.into(); + let record_filter = filter.clone().into(); let delete_many = WriteQuery::DeleteManyRecords(DeleteManyRecords { model: model.clone(), record_filter, }); - let read_query_node = graph.create_node(read_query); let delete_many_node = graph.create_node(Query::Write(delete_many)); - utils::insert_emulated_on_delete(graph, query_schema, &model, &read_query_node, &delete_many_node)?; + if query_schema.relation_mode().is_prisma() { + graph.flag_transactional(); - graph.create_edge( - &read_query_node, - &delete_many_node, - QueryGraphDependency::ExecutionOrder, - )?; + let read_query = utils::read_ids_infallible(model.clone(), model_id.clone(), filter); + let read_query_node = graph.create_node(read_query); + + utils::insert_emulated_on_delete(graph, query_schema, &model, &read_query_node, &delete_many_node)?; + + graph.create_edge( + &read_query_node, + &delete_many_node, + QueryGraphDependency::ProjectedDataDependency( + model_id, + Box::new(|mut delete_many_node, ids| { + if let Node::Query(Query::Write(WriteQuery::DeleteManyRecords(ref mut dmr))) = delete_many_node { + dmr.record_filter = ids.into(); + } + + Ok(delete_many_node) + }), + ), + )?; + } + + graph.add_result_node(&delete_many_node); Ok(()) } From fa67c78e3f101106cb5b5850cfae0872ccf791c3 Mon Sep 17 00:00:00 2001 From: Flavian Desverne Date: Fri, 29 Sep 2023 14:59:04 +0200 Subject: [PATCH 057/128] perf: avoid all internal queries for enums (#4280) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * perf: avoid all internal queries for enums * fix tests + support enum arrays * support empty enum arrays * cleanup --------- Co-authored-by: Miguel Fernández --- quaint/src/ast.rs | 2 + quaint/src/ast/column.rs | 33 +++++ quaint/src/ast/enums.rs | 84 ++++++++++++ quaint/src/ast/values.rs | 47 ++++++- quaint/src/connector/mssql/conversion.rs | 4 +- quaint/src/connector/mysql/conversion.rs | 6 +- quaint/src/connector/postgres/conversion.rs | 22 ++- quaint/src/connector/sqlite/conversion.rs | 4 +- quaint/src/serde.rs | 19 ++- quaint/src/tests/query.rs | 20 ++- quaint/src/tests/types/mysql.rs | 2 +- quaint/src/visitor.rs | 29 +++- quaint/src/visitor/mssql.rs | 4 +- quaint/src/visitor/mysql.rs | 4 +- quaint/src/visitor/postgres.rs | 127 +++++++++++++++++- quaint/src/visitor/sqlite.rs | 4 +- .../src/model_extensions/column.rs | 7 +- .../src/model_extensions/scalar_field.rs | 25 +++- .../src/query_builder/read.rs | 6 +- .../src/query_builder/write.rs | 4 +- .../connectors/sql-query-connector/src/row.rs | 2 +- .../sql-query-connector/src/value.rs | 13 +- .../sql-query-connector/src/value_ext.rs | 4 +- query-engine/driver-adapters/src/proxy.rs | 7 +- query-engine/prisma-models/src/field/mod.rs | 5 + .../prisma-models/src/internal_enum.rs | 4 + .../src/assertions/quaint_result_set_ext.rs | 3 +- .../tests/migrations/sql.rs | 4 +- 28 files changed, 440 insertions(+), 55 deletions(-) create mode 100644 quaint/src/ast/enums.rs diff --git a/quaint/src/ast.rs b/quaint/src/ast.rs index 03f9bc234cd2..ae2a19960b2d 100644 --- a/quaint/src/ast.rs +++ b/quaint/src/ast.rs @@ -11,6 +11,7 @@ mod conditions; mod conjunctive; mod cte; mod delete; +mod enums; mod expression; mod function; mod grouping; @@ -35,6 +36,7 @@ pub use conditions::ConditionTree; pub use conjunctive::Conjunctive; pub use cte::{CommonTableExpression, IntoCommonTableExpression}; pub use delete::Delete; +pub use enums::{EnumName, EnumVariant}; pub use expression::*; pub use function::*; pub use grouping::*; diff --git a/quaint/src/ast/column.rs b/quaint/src/ast/column.rs index 87342bd56bcb..7e6fbb71c9b7 100644 --- a/quaint/src/ast/column.rs +++ b/quaint/src/ast/column.rs @@ -32,6 +32,12 @@ pub struct Column<'a> { pub(crate) alias: Option>, pub(crate) default: Option>, pub(crate) type_family: Option, + /// Whether the column is an enum. + pub(crate) is_enum: bool, + /// Whether the column is a (scalar) list. + pub(crate) is_list: bool, + /// Whether the column is part of a SELECT or RETURNING clause. + pub(crate) is_selected: bool, } /// Defines a default value for a `Column`. @@ -89,6 +95,33 @@ impl<'a> Column<'a> { self } + /// Sets whether the column points to an enum type. + pub fn set_is_enum(mut self, is_enum: bool) -> Self { + self.is_enum = is_enum; + self + } + + /// Sets whether the column points to an scalar list. + pub fn set_is_list(mut self, is_list: bool) -> Self { + self.is_list = is_list; + self + } + + /// Sets whether the column is selected. + /// + /// On Postgres, this defines whether an enum column should be casted to `TEXT` when rendered. + /// + /// Since enums are user-defined custom types, `tokio-postgres` fires an additional query + /// when selecting columns of type enum to know which custom type the column refers to. + /// Casting the enum column to `TEXT` avoid this roundtrip since `TEXT` is a builtin type. + /// + /// We don't want to cast every single enum columns to text though, as this would prevent indexes from being used, + /// so we use this additional field to granularly pick which columns we cast. + pub fn set_is_selected(mut self, is_selected: bool) -> Self { + self.is_selected = is_selected; + self + } + /// True when the default value is set and automatically generated in the /// database. pub fn default_autogen(&self) -> bool { diff --git a/quaint/src/ast/enums.rs b/quaint/src/ast/enums.rs new file mode 100644 index 000000000000..4b9798ee9704 --- /dev/null +++ b/quaint/src/ast/enums.rs @@ -0,0 +1,84 @@ +use crate::Value; +use std::{borrow::Cow, fmt}; + +#[derive(Debug, Clone, PartialEq)] +pub struct EnumVariant<'a>(Cow<'a, str>); + +impl<'a> EnumVariant<'a> { + pub fn new(variant: impl Into>) -> Self { + Self(variant.into()) + } + + pub fn into_owned(self) -> String { + self.0.into_owned() + } + + pub fn into_text(self) -> Value<'a> { + Value::Text(Some(self.0)) + } + + pub fn into_enum(self, name: Option>) -> Value<'a> { + Value::Enum(Some(self), name) + } +} + +impl<'a> AsRef for EnumVariant<'a> { + fn as_ref(&self) -> &str { + self.0.as_ref() + } +} + +impl<'a> std::ops::Deref for EnumVariant<'a> { + type Target = str; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl<'a> fmt::Display for EnumVariant<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(self.as_ref()) + } +} + +impl<'a> From> for EnumVariant<'a> { + fn from(value: Cow<'a, str>) -> Self { + Self(value) + } +} + +impl<'a> From for EnumVariant<'a> { + fn from(value: String) -> Self { + Self(value.into()) + } +} + +impl<'a> From<&'a str> for EnumVariant<'a> { + fn from(value: &'a str) -> Self { + Self(value.into()) + } +} + +#[derive(Debug, Clone, PartialEq)] +pub struct EnumName<'a>(Cow<'a, str>); + +impl<'a> EnumName<'a> { + pub fn new(name: impl Into>) -> Self { + Self(name.into()) + } +} + +impl<'a> AsRef for EnumName<'a> { + fn as_ref(&self) -> &str { + self.0.as_ref() + } +} + +impl<'a> std::ops::Deref for EnumName<'a> { + type Target = str; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} diff --git a/quaint/src/ast/values.rs b/quaint/src/ast/values.rs index 874e6f4cb0b7..89a8bfb02760 100644 --- a/quaint/src/ast/values.rs +++ b/quaint/src/ast/values.rs @@ -50,7 +50,13 @@ pub enum Value<'a> { /// String value. Text(Option>), /// Database enum value. - Enum(Option>), + /// The optional `EnumName` is only used on PostgreSQL. + /// Read more about it here: https://github.com/prisma/prisma-engines/pull/4280 + Enum(Option>, Option>), + /// Database enum array (PostgreSQL specific). + /// We use a different variant than `Value::Array` to uplift the `EnumName` + /// and have it available even for empty enum arrays. + EnumArray(Option>>, Option>), /// Bytes value. Bytes(Option>), /// Boolean value. @@ -106,7 +112,20 @@ impl<'a> fmt::Display for Value<'a> { Value::Double(val) => val.map(|v| write!(f, "{v}")), Value::Text(val) => val.as_ref().map(|v| write!(f, "\"{v}\"")), Value::Bytes(val) => val.as_ref().map(|v| write!(f, "<{} bytes blob>", v.len())), - Value::Enum(val) => val.as_ref().map(|v| write!(f, "\"{v}\"")), + Value::Enum(val, _) => val.as_ref().map(|v| write!(f, "\"{v}\"")), + Value::EnumArray(vals, _) => vals.as_ref().map(|vals| { + let len = vals.len(); + + write!(f, "[")?; + for (i, val) in vals.iter().enumerate() { + write!(f, "{val}")?; + + if i < (len - 1) { + write!(f, ",")?; + } + } + write!(f, "]") + }), Value::Boolean(val) => val.map(|v| write!(f, "{v}")), Value::Char(val) => val.map(|v| write!(f, "'{v}'")), Value::Array(vals) => vals.as_ref().map(|vals| { @@ -155,7 +174,15 @@ impl<'a> From> for serde_json::Value { }), Value::Text(cow) => cow.map(|cow| serde_json::Value::String(cow.into_owned())), Value::Bytes(bytes) => bytes.map(|bytes| serde_json::Value::String(base64::encode(bytes))), - Value::Enum(cow) => cow.map(|cow| serde_json::Value::String(cow.into_owned())), + Value::Enum(cow, _) => cow.map(|cow| serde_json::Value::String(cow.into_owned())), + Value::EnumArray(values, _) => values.map(|values| { + serde_json::Value::Array( + values + .into_iter() + .map(|value| serde_json::Value::String(value.into_owned())) + .collect(), + ) + }), Value::Boolean(b) => b.map(serde_json::Value::Bool), Value::Char(c) => c.map(|c| { let bytes = [c as u8]; @@ -240,7 +267,16 @@ impl<'a> Value<'a> { where T: Into>, { - Value::Enum(Some(value.into())) + Value::Enum(Some(EnumVariant::new(value)), None) + } + + /// Creates a new enum value with the name of the enum attached. + pub fn enum_variant_with_name(value: T, name: U) -> Self + where + T: Into>, + U: Into>, + { + Value::Enum(Some(EnumVariant::new(value)), Some(EnumName::new(name))) } /// Creates a new bytes value. @@ -319,7 +355,8 @@ impl<'a> Value<'a> { Value::Float(i) => i.is_none(), Value::Double(i) => i.is_none(), Value::Text(t) => t.is_none(), - Value::Enum(e) => e.is_none(), + Value::Enum(e, _) => e.is_none(), + Value::EnumArray(e, _) => e.is_none(), Value::Bytes(b) => b.is_none(), Value::Boolean(b) => b.is_none(), Value::Char(c) => c.is_none(), diff --git a/quaint/src/connector/mssql/conversion.rs b/quaint/src/connector/mssql/conversion.rs index d80c0aa6b0dc..682e75b44760 100644 --- a/quaint/src/connector/mssql/conversion.rs +++ b/quaint/src/connector/mssql/conversion.rs @@ -17,11 +17,11 @@ impl<'a> IntoSql<'a> for &'a Value<'a> { Value::Double(val) => val.into_sql(), Value::Text(val) => val.as_deref().into_sql(), Value::Bytes(val) => val.as_deref().into_sql(), - Value::Enum(val) => val.as_deref().into_sql(), + Value::Enum(val, _) => val.as_deref().into_sql(), Value::Boolean(val) => val.into_sql(), Value::Char(val) => val.as_ref().map(|val| format!("{val}")).into_sql(), Value::Xml(val) => val.as_deref().into_sql(), - Value::Array(_) => panic!("Arrays are not supported on SQL Server."), + Value::Array(_) | Value::EnumArray(_, _) => panic!("Arrays are not supported on SQL Server."), #[cfg(feature = "bigdecimal")] Value::Numeric(val) => (*val).to_sql(), Value::Json(val) => val.as_ref().map(|val| serde_json::to_string(&val).unwrap()).into_sql(), diff --git a/quaint/src/connector/mysql/conversion.rs b/quaint/src/connector/mysql/conversion.rs index 41e08f5a4416..c9b1e812873d 100644 --- a/quaint/src/connector/mysql/conversion.rs +++ b/quaint/src/connector/mysql/conversion.rs @@ -26,11 +26,11 @@ pub fn conv_params(params: &[Value<'_>]) -> crate::Result { Value::Double(f) => f.map(my::Value::Double), Value::Text(s) => s.clone().map(|s| my::Value::Bytes((*s).as_bytes().to_vec())), Value::Bytes(bytes) => bytes.clone().map(|bytes| my::Value::Bytes(bytes.into_owned())), - Value::Enum(s) => s.clone().map(|s| my::Value::Bytes((*s).as_bytes().to_vec())), + Value::Enum(s, _) => s.clone().map(|s| my::Value::Bytes((*s).as_bytes().to_vec())), Value::Boolean(b) => b.map(|b| my::Value::Int(b as i64)), Value::Char(c) => c.map(|c| my::Value::Bytes(vec![c as u8])), Value::Xml(s) => s.as_ref().map(|s| my::Value::Bytes((s).as_bytes().to_vec())), - Value::Array(_) => { + Value::Array(_) | Value::EnumArray(_, _) => { let msg = "Arrays are not supported in MySQL."; let kind = ErrorKind::conversion(msg); @@ -304,7 +304,7 @@ impl TakeRow for my::Row { } my::Value::NULL => match column { t if t.is_bool() => Value::Boolean(None), - t if t.is_enum() => Value::Enum(None), + t if t.is_enum() => Value::Enum(None, None), t if t.is_null() => Value::Int32(None), t if t.is_int64() => Value::Int64(None), t if t.is_int32() => Value::Int32(None), diff --git a/quaint/src/connector/postgres/conversion.rs b/quaint/src/connector/postgres/conversion.rs index 4760f436fdcd..f321e1829529 100644 --- a/quaint/src/connector/postgres/conversion.rs +++ b/quaint/src/connector/postgres/conversion.rs @@ -44,8 +44,8 @@ pub(crate) fn params_to_types(params: &[Value<'_>]) -> Vec { Value::Float(_) => PostgresType::FLOAT4, Value::Double(_) => PostgresType::FLOAT8, Value::Text(_) => PostgresType::TEXT, - // Enums are special types, we can't statically infer them, so we let PG infer it - Value::Enum(_) => PostgresType::UNKNOWN, + // Enums are user-defined types, we can't statically infer them, so we let PG infer it + Value::Enum(_, _) | Value::EnumArray(_, _) => PostgresType::UNKNOWN, Value::Bytes(_) => PostgresType::BYTEA, Value::Boolean(_) => PostgresType::BOOL, Value::Char(_) => PostgresType::CHAR, @@ -83,7 +83,7 @@ pub(crate) fn params_to_types(params: &[Value<'_>]) -> Vec { Value::Double(_) => PostgresType::FLOAT8_ARRAY, Value::Text(_) => PostgresType::TEXT_ARRAY, // Enums are special types, we can't statically infer them, so we let PG infer it - Value::Enum(_) => PostgresType::UNKNOWN, + Value::Enum(_, _) | Value::EnumArray(_, _) => PostgresType::UNKNOWN, Value::Bytes(_) => PostgresType::BYTEA_ARRAY, Value::Boolean(_) => PostgresType::BOOL_ARRAY, Value::Char(_) => PostgresType::CHAR_ARRAY, @@ -525,13 +525,13 @@ impl GetRow for PostgresRow { Value::enum_variant(val.value) } - None => Value::Enum(None), + None => Value::Enum(None, None), }, Kind::Array(inner) => match inner.kind() { Kind::Enum => match row.try_get(i)? { Some(val) => { let val: Vec> = val; - let variants = val.into_iter().map(|x| Value::Enum(x.map(|x| x.value.into()))); + let variants = val.into_iter().map(|x| Value::Enum(x.map(|x| x.value.into()), None)); Ok(Value::array(variants)) } @@ -839,7 +839,7 @@ impl<'a> ToSql for Value<'a> { }) } (Value::Bytes(bytes), _) => bytes.as_ref().map(|bytes| bytes.as_ref().to_sql(ty, out)), - (Value::Enum(string), _) => string.as_ref().map(|string| { + (Value::Enum(string, _), _) => string.as_ref().map(|string| { out.extend_from_slice(string.as_bytes()); Ok(IsNull::No) }), @@ -848,6 +848,16 @@ impl<'a> ToSql for Value<'a> { (Value::Array(vec), typ) if matches!(typ.kind(), Kind::Array(_)) => { vec.as_ref().map(|vec| vec.to_sql(ty, out)) } + (Value::EnumArray(variants, _), typ) if matches!(typ.kind(), Kind::Array(_)) => variants + .as_ref() + .map(|vec| vec.iter().map(|val| val.as_ref()).collect::>().to_sql(ty, out)), + (Value::EnumArray(variants, _), typ) => { + let kind = ErrorKind::conversion(format!( + "Couldn't serialize value `{variants:?}` into a `{typ}`. Value is a list but `{typ}` is not." + )); + + return Err(Error::builder(kind).build().into()); + } (Value::Array(vec), typ) => { let kind = ErrorKind::conversion(format!( "Couldn't serialize value `{vec:?}` into a `{typ}`. Value is a list but `{typ}` is not." diff --git a/quaint/src/connector/sqlite/conversion.rs b/quaint/src/connector/sqlite/conversion.rs index ab62992e7a67..dade118596c6 100644 --- a/quaint/src/connector/sqlite/conversion.rs +++ b/quaint/src/connector/sqlite/conversion.rs @@ -251,11 +251,11 @@ impl<'a> ToSql for Value<'a> { Value::Float(float) => float.map(|f| f as f64).map(ToSqlOutput::from), Value::Double(double) => double.map(ToSqlOutput::from), Value::Text(cow) => cow.as_ref().map(|cow| ToSqlOutput::from(cow.as_ref())), - Value::Enum(cow) => cow.as_ref().map(|cow| ToSqlOutput::from(cow.as_ref())), + Value::Enum(cow, _) => cow.as_ref().map(|cow| ToSqlOutput::from(cow.as_ref())), Value::Boolean(boo) => boo.map(ToSqlOutput::from), Value::Char(c) => c.map(|c| ToSqlOutput::from(c as u8)), Value::Bytes(bytes) => bytes.as_ref().map(|bytes| ToSqlOutput::from(bytes.as_ref())), - Value::Array(_) => { + Value::Array(_) | Value::EnumArray(_, _) => { let msg = "Arrays are not supported in SQLite."; let kind = ErrorKind::conversion(msg); diff --git a/quaint/src/serde.rs b/quaint/src/serde.rs index 85b605aec034..c88ff4ae520b 100644 --- a/quaint/src/serde.rs +++ b/quaint/src/serde.rs @@ -3,7 +3,7 @@ use std::borrow::Cow; use crate::{ - ast::Value, + ast::{EnumVariant, Value}, connector::{ResultRow, ResultSet}, error::{Error, ErrorKind}, }; @@ -100,6 +100,14 @@ impl<'de> IntoDeserializer<'de, DeserializeError> for Value<'de> { } } +impl<'de> IntoDeserializer<'de, DeserializeError> for EnumVariant<'de> { + type Deserializer = ValueDeserializer<'de>; + + fn into_deserializer(self) -> Self::Deserializer { + ValueDeserializer(self.into_text()) + } +} + #[derive(Debug)] pub struct ValueDeserializer<'a>(Value<'a>); @@ -112,8 +120,13 @@ impl<'de> Deserializer<'de> for ValueDeserializer<'de> { Value::Text(None) => visitor.visit_none(), Value::Bytes(Some(bytes)) => visitor.visit_bytes(bytes.as_ref()), Value::Bytes(None) => visitor.visit_none(), - Value::Enum(Some(s)) => visitor.visit_string(s.into_owned()), - Value::Enum(None) => visitor.visit_none(), + Value::Enum(Some(s), _) => visitor.visit_string(s.into_owned()), + Value::Enum(None, _) => visitor.visit_none(), + Value::EnumArray(Some(variants), _) => { + let deserializer = serde::de::value::SeqDeserializer::new(variants.into_iter()); + visitor.visit_seq(deserializer) + } + Value::EnumArray(None, _) => visitor.visit_none(), Value::Int32(Some(i)) => visitor.visit_i32(i), Value::Int32(None) => visitor.visit_none(), Value::Int64(Some(i)) => visitor.visit_i64(i), diff --git a/quaint/src/tests/query.rs b/quaint/src/tests/query.rs index dc286307cf1d..ae3666e80a6b 100644 --- a/quaint/src/tests/query.rs +++ b/quaint/src/tests/query.rs @@ -1663,15 +1663,27 @@ async fn enum_values(api: &mut dyn TestApi) -> crate::Result<()> { .await?; api.conn() - .insert(Insert::single_into(&table).value("value", "A").into()) + .insert( + Insert::single_into(&table) + .value("value", Value::enum_variant_with_name("A", &type_name)) + .into(), + ) .await?; api.conn() - .insert(Insert::single_into(&table).value("value", "B").into()) + .insert( + Insert::single_into(&table) + .value("value", Value::enum_variant_with_name("B", &type_name)) + .into(), + ) .await?; api.conn() - .insert(Insert::single_into(&table).value("value", Value::Enum(None)).into()) + .insert( + Insert::single_into(&table) + .value("value", Value::Enum(None, None)) + .into(), + ) .await?; let select = Select::from_table(&table).column("value").order_by("id".ascend()); @@ -1684,7 +1696,7 @@ async fn enum_values(api: &mut dyn TestApi) -> crate::Result<()> { assert_eq!(Some(&Value::enum_variant("B")), row.at(0)); let row = res.get(2).unwrap(); - assert_eq!(Some(&Value::Enum(None)), row.at(0)); + assert_eq!(Some(&Value::Enum(None, None)), row.at(0)); Ok(()) } diff --git a/quaint/src/tests/types/mysql.rs b/quaint/src/tests/types/mysql.rs index b69d736d4a3c..fc3d86a30bcb 100644 --- a/quaint/src/tests/types/mysql.rs +++ b/quaint/src/tests/types/mysql.rs @@ -203,7 +203,7 @@ test_type!(blob(mysql, "blob", Value::Bytes(None), Value::bytes(vec![1, 2, 3]))) test_type!(enum( mysql, "enum('pollicle_dogs','jellicle_cats')", - Value::Enum(None), + Value::Enum(None, None), Value::enum_variant("jellicle_cats"), Value::enum_variant("pollicle_dogs") )); diff --git a/quaint/src/visitor.rs b/quaint/src/visitor.rs index a77e86533174..29ca3d5ccbaa 100644 --- a/quaint/src/visitor.rs +++ b/quaint/src/visitor.rs @@ -148,10 +148,35 @@ pub trait Visitor<'a> { #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_text_search_relevance(&mut self, text_search_relevance: TextSearchRelevance<'a>) -> Result; + fn visit_parameterized_enum(&mut self, variant: EnumVariant<'a>, name: Option>) -> Result { + self.add_parameter(Value::Enum(Some(variant), name)); + self.parameter_substitution()?; + + Ok(()) + } + + fn visit_parameterized_enum_array(&mut self, variants: Vec>, name: Option>) -> Result { + let enum_variants: Vec<_> = variants + .into_iter() + .map(|variant| variant.into_enum(name.clone())) + .collect(); + + self.add_parameter(Value::Array(Some(enum_variants))); + self.parameter_substitution()?; + + Ok(()) + } + /// A visit to a value we parameterize fn visit_parameterized(&mut self, value: Value<'a>) -> Result { - self.add_parameter(value); - self.parameter_substitution() + match value { + Value::Enum(Some(variant), name) => self.visit_parameterized_enum(variant, name), + Value::EnumArray(Some(variants), name) => self.visit_parameterized_enum_array(variants, name), + _ => { + self.add_parameter(value); + self.parameter_substitution() + } + } } /// The join statements in the query diff --git a/quaint/src/visitor/mssql.rs b/quaint/src/visitor/mssql.rs index a5975e6249fa..7e8249f369e0 100644 --- a/quaint/src/visitor/mssql.rs +++ b/quaint/src/visitor/mssql.rs @@ -326,11 +326,11 @@ impl<'a> Visitor<'a> for Mssql<'a> { v => self.write(format!("{v:?}")), }), Value::Text(t) => t.map(|t| self.write(format!("'{t}'"))), - Value::Enum(e) => e.map(|e| self.write(e)), + Value::Enum(e, _) => e.map(|e| self.write(e)), Value::Bytes(b) => b.map(|b| self.write(format!("0x{}", hex::encode(b)))), Value::Boolean(b) => b.map(|b| self.write(if b { 1 } else { 0 })), Value::Char(c) => c.map(|c| self.write(format!("'{c}'"))), - Value::Array(_) => { + Value::Array(_) | Value::EnumArray(_, _) => { let msg = "Arrays are not supported in T-SQL."; let kind = ErrorKind::conversion(msg); diff --git a/quaint/src/visitor/mysql.rs b/quaint/src/visitor/mysql.rs index bbe4591575ef..d4587753f8f8 100644 --- a/quaint/src/visitor/mysql.rs +++ b/quaint/src/visitor/mysql.rs @@ -136,11 +136,11 @@ impl<'a> Visitor<'a> for Mysql<'a> { v => self.write(format!("{v:?}")), }), Value::Text(t) => t.map(|t| self.write(format!("'{t}'"))), - Value::Enum(e) => e.map(|e| self.write(e)), + Value::Enum(e, _) => e.map(|e| self.write(e)), Value::Bytes(b) => b.map(|b| self.write(format!("x'{}'", hex::encode(b)))), Value::Boolean(b) => b.map(|b| self.write(b)), Value::Char(c) => c.map(|c| self.write(format!("'{c}'"))), - Value::Array(_) => { + Value::Array(_) | Value::EnumArray(_, _) => { let msg = "Arrays are not supported in MySQL."; let kind = ErrorKind::conversion(msg); diff --git a/quaint/src/visitor/postgres.rs b/quaint/src/visitor/postgres.rs index f18114ba5888..4ba6b99b5123 100644 --- a/quaint/src/visitor/postgres.rs +++ b/quaint/src/visitor/postgres.rs @@ -2,7 +2,10 @@ use crate::{ ast::*, visitor::{self, Visitor}, }; -use std::fmt::{self, Write}; +use std::{ + fmt::{self, Write}, + ops::Deref, +}; /// A visitor to generate queries for the PostgreSQL database. /// @@ -47,6 +50,93 @@ impl<'a> Visitor<'a> for Postgres<'a> { self.write(self.parameters.len()) } + fn visit_parameterized_enum(&mut self, variant: EnumVariant<'a>, name: Option>) -> visitor::Result { + self.add_parameter(variant.into_text()); + + // Since enums are user-defined custom types, tokio-postgres fires an additional query + // when parameterizing values of type enum to know which custom type the value refers to. + // Casting the enum value to `TEXT` avoid this roundtrip since `TEXT` is a builtin type. + if let Some(enum_name) = name { + self.surround_with("CAST(", ")", |ref mut s| { + s.parameter_substitution()?; + s.write("::text")?; + s.write(" AS ")?; + s.surround_with_backticks(enum_name.deref()) + })?; + } else { + self.parameter_substitution()?; + } + + Ok(()) + } + + fn visit_parameterized_enum_array( + &mut self, + variants: Vec>, + name: Option>, + ) -> visitor::Result { + let len = variants.len(); + + // Since enums are user-defined custom types, tokio-postgres fires an additional query + // when parameterizing values of type enum to know which custom type the value refers to. + // Casting the enum value to `TEXT` avoid this roundtrip since `TEXT` is a builtin type. + if let Some(enum_name) = name.clone() { + self.surround_with("ARRAY[", "]", |s| { + for (i, variant) in variants.into_iter().enumerate() { + s.add_parameter(variant.into_text()); + s.parameter_substitution()?; + s.write("::text")?; + + if i < (len - 1) { + s.write(", ")?; + } + } + + Ok(()) + })?; + + self.write("::")?; + self.surround_with_backticks(enum_name.deref())?; + self.write("[]")?; + } else { + self.visit_parameterized(Value::Array(Some( + variants + .into_iter() + .map(|variant| variant.into_enum(name.clone())) + .collect(), + )))?; + } + + Ok(()) + } + + /// A database column identifier + fn visit_column(&mut self, column: Column<'a>) -> visitor::Result { + match column.table { + Some(table) => { + self.visit_table(table, false)?; + self.write(".")?; + self.delimited_identifiers(&[&*column.name])?; + } + _ => self.delimited_identifiers(&[&*column.name])?, + }; + + if column.is_enum && column.is_selected { + if column.is_list { + self.write("::text[]")?; + } else { + self.write("::text")?; + } + } + + if let Some(alias) = column.alias { + self.write(" AS ")?; + self.delimited_identifiers(&[&*alias])?; + } + + Ok(()) + } + fn visit_limit_and_offset(&mut self, limit: Option>, offset: Option>) -> visitor::Result { match (limit, offset) { (Some(limit), Some(offset)) => { @@ -73,7 +163,7 @@ impl<'a> Visitor<'a> for Postgres<'a> { Value::Int32(i) => i.map(|i| self.write(i)), Value::Int64(i) => i.map(|i| self.write(i)), Value::Text(t) => t.map(|t| self.write(format!("'{t}'"))), - Value::Enum(e) => e.map(|e| self.write(e)), + Value::Enum(e, _) => e.map(|e| self.write(e)), Value::Bytes(b) => b.map(|b| self.write(format!("E'{}'", hex::encode(b)))), Value::Boolean(b) => b.map(|b| self.write(b)), Value::Xml(cow) => cow.map(|cow| self.write(format!("'{cow}'"))), @@ -105,7 +195,28 @@ impl<'a> Visitor<'a> for Postgres<'a> { Ok(()) }) }), + Value::EnumArray(variants, name) => variants.map(|variants| { + self.surround_with("ARRAY[", "]", |ref mut s| { + let len = variants.len(); + + for (i, item) in variants.into_iter().enumerate() { + s.surround_with("'", "'", |t| t.write(item))?; + if i < len - 1 { + s.write(",")?; + } + } + + Ok(()) + })?; + + if let Some(name) = name { + self.write("::")?; + self.surround_with_backticks(name.as_ref())?; + } + + Ok(()) + }), Value::Json(j) => j.map(|j| self.write(format!("'{}'", serde_json::to_string(&j).unwrap()))), #[cfg(feature = "bigdecimal")] Value::Numeric(r) => r.map(|r| self.write(r)), @@ -925,6 +1036,18 @@ mod tests { assert_eq!(r#"SELECT "foo".* FROM "foo" WHERE "bar" ILIKE $1"#, sql); } + #[test] + fn test_raw_enum_array() { + let enum_array = Value::EnumArray( + Some(vec![EnumVariant::new("A"), EnumVariant::new("B")]), + Some(EnumName::new("Alphabet")), + ); + let (sql, params) = Postgres::build(Select::default().value(enum_array.raw())).unwrap(); + + assert_eq!("SELECT ARRAY['A','B']::\"Alphabet\"", sql); + assert!(params.is_empty()); + } + #[test] fn test_like_cast_to_string() { let expected = expected_values( diff --git a/quaint/src/visitor/sqlite.rs b/quaint/src/visitor/sqlite.rs index 838ead467571..209758bbeb20 100644 --- a/quaint/src/visitor/sqlite.rs +++ b/quaint/src/visitor/sqlite.rs @@ -78,7 +78,7 @@ impl<'a> Visitor<'a> for Sqlite<'a> { Value::Int32(i) => i.map(|i| self.write(i)), Value::Int64(i) => i.map(|i| self.write(i)), Value::Text(t) => t.map(|t| self.write(format!("'{t}'"))), - Value::Enum(e) => e.map(|e| self.write(e)), + Value::Enum(e, _) => e.map(|e| self.write(e)), Value::Bytes(b) => b.map(|b| self.write(format!("x'{}'", hex::encode(b)))), Value::Boolean(b) => b.map(|b| self.write(b)), Value::Char(c) => c.map(|c| self.write(format!("'{c}'"))), @@ -94,7 +94,7 @@ impl<'a> Visitor<'a> for Sqlite<'a> { f if f == f64::NEG_INFINITY => self.write("'-Infinity"), v => self.write(format!("{v:?}")), }), - Value::Array(_) => { + Value::Array(_) | Value::EnumArray(_, _) => { let msg = "Arrays are not supported in SQLite."; let kind = ErrorKind::conversion(msg); diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/column.rs b/query-engine/connectors/sql-query-connector/src/model_extensions/column.rs index 02ed3776b147..445bada9c45c 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/column.rs +++ b/query-engine/connectors/sql-query-connector/src/model_extensions/column.rs @@ -96,7 +96,10 @@ impl AsColumn for ScalarField { let full_table_name = super::table::db_name_with_schema(&self.container().as_model().unwrap(), ctx); let col = self.db_name().to_string(); - let column = Column::from((full_table_name, col)).type_family(self.type_family()); - column.default(quaint::ast::DefaultValue::Generated) + Column::from((full_table_name, col)) + .type_family(self.type_family()) + .set_is_enum(self.type_identifier().is_enum()) + .set_is_list(self.is_list()) + .default(quaint::ast::DefaultValue::Generated) } } diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs b/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs index 429cff058241..bddb0aacc333 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs +++ b/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs @@ -2,8 +2,8 @@ use chrono::Utc; use prisma_models::{ScalarField, TypeIdentifier}; use prisma_value::PrismaValue; use quaint::{ - ast::Value, - prelude::{TypeDataLength, TypeFamily}, + ast::{EnumName, Value}, + prelude::{EnumVariant, TypeDataLength, TypeFamily}, }; pub trait ScalarFieldExt { @@ -18,6 +18,21 @@ impl ScalarFieldExt for ScalarField { (PrismaValue::Float(f), _) => f.into(), (PrismaValue::Boolean(b), _) => b.into(), (PrismaValue::DateTime(d), _) => d.with_timezone(&Utc).into(), + (PrismaValue::Enum(e), TypeIdentifier::Enum(enum_id)) => { + let enum_walker = self.dm.clone().zip(enum_id); + + Value::enum_variant_with_name(e, enum_walker.db_name().to_owned()) + } + (PrismaValue::List(vals), TypeIdentifier::Enum(enum_id)) => { + let enum_walker = self.dm.clone().zip(enum_id); + let variants: Vec<_> = vals + .into_iter() + .map(|val| val.into_string().unwrap()) + .map(EnumVariant::new) + .collect(); + + Value::EnumArray(Some(variants), Some(EnumName::new(enum_walker.db_name().to_owned()))) + } (PrismaValue::Enum(e), _) => e.into(), (PrismaValue::Int(i), _) => i.into(), (PrismaValue::BigInt(i), _) => i.into(), @@ -31,7 +46,11 @@ impl ScalarFieldExt for ScalarField { TypeIdentifier::Float => Value::Numeric(None), TypeIdentifier::Decimal => Value::Numeric(None), TypeIdentifier::Boolean => Value::Boolean(None), - TypeIdentifier::Enum(_) => Value::Enum(None), + TypeIdentifier::Enum(enum_id) => { + let enum_walker = self.dm.clone().zip(enum_id); + + Value::Enum(None, Some(EnumName::new(enum_walker.db_name().to_owned()))) + } TypeIdentifier::Json => Value::Json(None), TypeIdentifier::DateTime => Value::DateTime(None), TypeIdentifier::UUID => Value::Uuid(None), diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/read.rs b/query-engine/connectors/sql-query-connector/src/query_builder/read.rs index eba36d394282..a5385f1dd56a 100644 --- a/query-engine/connectors/sql-query-connector/src/query_builder/read.rs +++ b/query-engine/connectors/sql-query-connector/src/query_builder/read.rs @@ -124,7 +124,9 @@ where T: SelectDefinition, { let (select, additional_selection_set) = query.into_select(model, aggr_selections, ctx); - let select = columns.fold(select, |acc, col| acc.column(col)); + let select = columns + .map(|c| c.set_is_selected(true)) + .fold(select, |acc, col| acc.column(col)); let select = select.append_trace(&Span::current()).add_trace_id(ctx.trace_id); @@ -218,7 +220,7 @@ pub(crate) fn group_by_aggregate( let (base_query, _) = args.into_select(model, &[], ctx); let select_query = selections.iter().fold(base_query, |select, next_op| match next_op { - AggregationSelection::Field(field) => select.column(field.as_column(ctx)), + AggregationSelection::Field(field) => select.column(field.as_column(ctx).set_is_selected(true)), AggregationSelection::Count { all, fields } => { let select = fields.iter().fold(select, |select, next_field| { diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/write.rs b/query-engine/connectors/sql-query-connector/src/query_builder/write.rs index de8b6e78ef8d..f0a4fb08e237 100644 --- a/query-engine/connectors/sql-query-connector/src/query_builder/write.rs +++ b/query-engine/connectors/sql-query-connector/src/query_builder/write.rs @@ -32,7 +32,7 @@ pub(crate) fn create_record( }); Insert::from(insert) - .returning(selected_fields.as_columns(ctx)) + .returning(selected_fields.as_columns(ctx).map(|c| c.set_is_selected(true))) .append_trace(&Span::current()) .add_trace_id(ctx.trace_id) } @@ -164,7 +164,7 @@ pub(crate) fn build_update_and_set_query( let query = query.append_trace(&Span::current()).add_trace_id(ctx.trace_id); let query = if let Some(selected_fields) = selected_fields { - query.returning(selected_fields.as_columns(ctx)) + query.returning(selected_fields.as_columns(ctx).map(|c| c.set_is_selected(true))) } else { query }; diff --git a/query-engine/connectors/sql-query-connector/src/row.rs b/query-engine/connectors/sql-query-connector/src/row.rs index 9dfd05751c56..9f0b69e73ff7 100644 --- a/query-engine/connectors/sql-query-connector/src/row.rs +++ b/query-engine/connectors/sql-query-connector/src/row.rs @@ -151,7 +151,7 @@ fn row_value_to_prisma_value(p_value: Value, meta: ColumnMetadata<'_>) -> Result }, TypeIdentifier::Enum(_) => match p_value { value if value.is_null() => PrismaValue::Null, - Value::Enum(Some(cow)) => PrismaValue::Enum(cow.into_owned()), + Value::Enum(Some(cow), _) => PrismaValue::Enum(cow.into_owned()), Value::Text(Some(cow)) => PrismaValue::Enum(cow.into_owned()), _ => return Err(create_error(&p_value)), }, diff --git a/query-engine/connectors/sql-query-connector/src/value.rs b/query-engine/connectors/sql-query-connector/src/value.rs index 086314ed7419..8ccf89288071 100644 --- a/query-engine/connectors/sql-query-connector/src/value.rs +++ b/query-engine/connectors/sql-query-connector/src/value.rs @@ -33,7 +33,7 @@ pub fn to_prisma_value(quaint_value: Value<'_>) -> crate::Result { .map(|s| PrismaValue::String(s.into_owned())) .unwrap_or(PrismaValue::Null), - Value::Enum(s) => s + Value::Enum(s, _) => s .map(|s| PrismaValue::Enum(s.into_owned())) .unwrap_or(PrismaValue::Null), @@ -51,6 +51,17 @@ pub fn to_prisma_value(quaint_value: Value<'_>) -> crate::Result { Value::Array(None) => PrismaValue::Null, + Value::EnumArray(Some(v), name) => { + let mut res = Vec::with_capacity(v.len()); + + for v in v.into_iter() { + res.push(to_prisma_value(Value::Enum(Some(v), name.clone()))?); + } + + PrismaValue::List(res) + } + Value::EnumArray(None, _) => PrismaValue::Null, + Value::Json(val) => val .map(|val| PrismaValue::Json(val.to_string())) .unwrap_or(PrismaValue::Null), diff --git a/query-engine/connectors/sql-query-connector/src/value_ext.rs b/query-engine/connectors/sql-query-connector/src/value_ext.rs index 1d9a82427592..b0c42e5af38c 100644 --- a/query-engine/connectors/sql-query-connector/src/value_ext.rs +++ b/query-engine/connectors/sql-query-connector/src/value_ext.rs @@ -17,7 +17,7 @@ impl<'a> IntoTypedJsonExtension for quaint::Value<'a> { quaint::Value::Float(_) => "float", quaint::Value::Double(_) => "double", quaint::Value::Text(_) => "string", - quaint::Value::Enum(_) => "enum", + quaint::Value::Enum(_, _) => "enum", quaint::Value::Bytes(_) => "bytes", quaint::Value::Boolean(_) => "bool", quaint::Value::Char(_) => "char", @@ -28,7 +28,7 @@ impl<'a> IntoTypedJsonExtension for quaint::Value<'a> { quaint::Value::DateTime(_) => "datetime", quaint::Value::Date(_) => "date", quaint::Value::Time(_) => "time", - quaint::Value::Array(_) => "array", + quaint::Value::Array(_) | quaint::Value::EnumArray(_, _) => "array", }; type_name.to_owned() diff --git a/query-engine/driver-adapters/src/proxy.rs b/query-engine/driver-adapters/src/proxy.rs index 4bf4bc56ad7f..d1ab2b361d69 100644 --- a/query-engine/driver-adapters/src/proxy.rs +++ b/query-engine/driver-adapters/src/proxy.rs @@ -345,7 +345,7 @@ fn js_value_to_quaint( } ColumnType::Enum => match json_value { serde_json::Value::String(s) => Ok(QuaintValue::enum_variant(s)), - serde_json::Value::Null => Ok(QuaintValue::Enum(None)), + serde_json::Value::Null => Ok(QuaintValue::Enum(None, None)), mismatch => Err(conversion_error!( "expected a string in column {column_name}, found {mismatch}" )), @@ -803,11 +803,12 @@ mod proxy_test { let column_type = ColumnType::Enum; // null - test_null(QuaintValue::Enum(None), column_type); + test_null(QuaintValue::Enum(None, None), column_type); let s = "some enum variant"; let json_value = serde_json::Value::String(s.to_string()); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::Enum(Some(s.into()))); + assert_eq!(quaint_value, QuaintValue::Enum(Some(s.into()), None)); } } diff --git a/query-engine/prisma-models/src/field/mod.rs b/query-engine/prisma-models/src/field/mod.rs index d05b6d7d6361..45d529c56abf 100644 --- a/query-engine/prisma-models/src/field/mod.rs +++ b/query-engine/prisma-models/src/field/mod.rs @@ -178,6 +178,11 @@ impl TypeIdentifier { TypeIdentifier::Unsupported => "Unsupported".into(), } } + + /// Returns `true` if the type identifier is [`Enum`]. + pub fn is_enum(&self) -> bool { + matches!(self, Self::Enum(..)) + } } #[derive(Clone, Debug, PartialEq, Eq, Hash)] diff --git a/query-engine/prisma-models/src/internal_enum.rs b/query-engine/prisma-models/src/internal_enum.rs index 1ae3459f2356..e393f82509cd 100644 --- a/query-engine/prisma-models/src/internal_enum.rs +++ b/query-engine/prisma-models/src/internal_enum.rs @@ -9,6 +9,10 @@ impl InternalEnum { pub fn name(&self) -> &str { self.dm.walk(self.id).name() } + + pub fn db_name(&self) -> &str { + self.dm.walk(self.id).database_name() + } } impl std::fmt::Debug for InternalEnum { diff --git a/schema-engine/sql-migration-tests/src/assertions/quaint_result_set_ext.rs b/schema-engine/sql-migration-tests/src/assertions/quaint_result_set_ext.rs index 3acb21c9af25..2aaf1f9801db 100644 --- a/schema-engine/sql-migration-tests/src/assertions/quaint_result_set_ext.rs +++ b/schema-engine/sql-migration-tests/src/assertions/quaint_result_set_ext.rs @@ -88,7 +88,8 @@ impl<'a> RowAssertion<'a> { pub fn assert_text_value(self, column_name: &str, expected_value: &str) -> Self { let value = self.0.get(column_name).expect("Expected a value, found none"); let value_text: &str = match value { - Value::Text(val) | Value::Enum(val) => val.as_deref(), + Value::Text(val) => val.as_deref(), + Value::Enum(val, _) => val.as_deref(), _ => None, } .expect("Expected a string value"); diff --git a/schema-engine/sql-migration-tests/tests/migrations/sql.rs b/schema-engine/sql-migration-tests/tests/migrations/sql.rs index 36486e7c18a6..f8a9d0ef3202 100644 --- a/schema-engine/sql-migration-tests/tests/migrations/sql.rs +++ b/schema-engine/sql-migration-tests/tests/migrations/sql.rs @@ -245,7 +245,7 @@ fn enum_defaults_must_work(api: TestApi) { assert_eq!(row.get("id").unwrap().to_string().unwrap(), "the-id"); assert_eq!( match row.get("mood").unwrap() { - quaint::Value::Enum(Some(enm)) => enm.as_ref(), + quaint::Value::Enum(Some(enm), _) => enm.as_ref(), quaint::Value::Text(Some(enm)) => enm.as_ref(), _ => panic!("mood is not an enum value"), }, @@ -253,7 +253,7 @@ fn enum_defaults_must_work(api: TestApi) { ); assert_eq!( match row.get("previousMood").unwrap() { - quaint::Value::Enum(Some(enm)) => enm.as_ref(), + quaint::Value::Enum(Some(enm), _) => enm.as_ref(), quaint::Value::Text(Some(enm)) => enm.as_ref(), _ => panic!("previousMood is not an enum value"), }, From a437f71ab038893c0001b09743862e841bedca01 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Fri, 29 Sep 2023 15:00:09 +0200 Subject: [PATCH 058/128] driver(adapters): More test fixes focused on neon/pg (#4301) * fix(driver-adapter): add support for UUID type * Add conversion of UUID to neon * Add conversion of OID to neon and pg Fixes writes::data_types::native_types::postgres::postgres::native_int_types * Prepare adapter-side transformations * Fix money * Isolate quaint conversion to their own set of modules * polymorphic, per flavor conversion. (prep refactoring) * Revert "polymorphic, per flavor conversion. (prep refactoring)" This reverts commit 07bddd35466f8c8a05419db386c9e9769c735b99. * Revert "Isolate quaint conversion to their own set of modules" This reverts commit 2f8439d7a57bf3c27eacce2f7abb8d323bde4113. * Update query-engine/driver-adapters/src/proxy.rs Co-authored-by: Alexey Orlenko * Update query-engine/driver-adapters/src/proxy.rs Co-authored-by: Alexey Orlenko * Remove unused conversions * Update query-engine/driver-adapters/src/proxy.rs Co-authored-by: Alexey Orlenko * fixup! Remove unused conversions * Update driver-adapter-utils version * Update drivers versions * Fix typos in proxy.rs * formatting --------- Co-authored-by: jkomyno Co-authored-by: Alexey Orlenko --- Cargo.lock | 1 + query-engine/driver-adapters/Cargo.toml | 1 + .../js/adapter-neon/package.json | 2 +- .../js/adapter-neon/src/conversion.ts | 18 +++++++++++++----- .../js/adapter-neon/src/neon.ts | 4 +++- .../driver-adapters/js/adapter-pg/package.json | 2 +- .../js/adapter-pg/src/conversion.ts | 18 +++++++++++++----- .../driver-adapters/js/adapter-pg/src/pg.ts | 1 + .../js/driver-adapter-utils/package.json | 2 +- .../js/driver-adapter-utils/src/const.ts | 4 ++-- query-engine/driver-adapters/src/proxy.rs | 18 ++++++++++++++++-- 11 files changed, 53 insertions(+), 18 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 803c2ccda82c..5f4df8b9a2f9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1068,6 +1068,7 @@ dependencies = [ "tokio", "tracing", "tracing-core", + "uuid", ] [[package]] diff --git a/query-engine/driver-adapters/Cargo.toml b/query-engine/driver-adapters/Cargo.toml index 39e6804eca28..4c0b55bb0a92 100644 --- a/query-engine/driver-adapters/Cargo.toml +++ b/query-engine/driver-adapters/Cargo.toml @@ -13,6 +13,7 @@ psl.workspace = true tracing = "0.1" tracing-core = "0.1" metrics = "0.18" +uuid = { version = "1", features = ["v4"] } # Note: these deps are temporarily specified here to avoid importing them from tiberius (the SQL server driver). # They will be imported from quaint-core instead in a future PR. diff --git a/query-engine/driver-adapters/js/adapter-neon/package.json b/query-engine/driver-adapters/js/adapter-neon/package.json index 3c0089a4757d..a891c0147969 100644 --- a/query-engine/driver-adapters/js/adapter-neon/package.json +++ b/query-engine/driver-adapters/js/adapter-neon/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/adapter-neon", - "version": "0.4.2", + "version": "0.5.0", "description": "Prisma's driver adapter for \"@neondatabase/serverless\"", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts b/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts index 7b08ba5bd1f8..99db8870de98 100644 --- a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts +++ b/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts @@ -28,14 +28,22 @@ export function fieldToColumnType(fieldTypeId: number): ColumnType { case NeonColumnType['TIMESTAMP']: return ColumnTypeEnum.DateTime case NeonColumnType['NUMERIC']: + case NeonColumnType['MONEY']: return ColumnTypeEnum.Numeric + case NeonColumnType['JSONB']: + return ColumnTypeEnum.Json + case NeonColumnType['UUID']: + return ColumnTypeEnum.Uuid + case NeonColumnType['OID']: + return ColumnTypeEnum.Int64 case NeonColumnType['BPCHAR']: - return ColumnTypeEnum.Char case NeonColumnType['TEXT']: case NeonColumnType['VARCHAR']: + case NeonColumnType['BIT']: + case NeonColumnType['VARBIT']: + case NeonColumnType['INET']: + case NeonColumnType['CIDR']: return ColumnTypeEnum.Text - case NeonColumnType['JSONB']: - return ColumnTypeEnum.Json default: if (fieldTypeId >= 10000) { // Postgres Custom Types @@ -60,9 +68,9 @@ function convertJson(json: string): unknown { } // return string instead of JavaScript Date object -types.setTypeParser(NeonColumnType.DATE, date => date) types.setTypeParser(NeonColumnType.TIME, date => date) +types.setTypeParser(NeonColumnType.DATE, date => date) types.setTypeParser(NeonColumnType.TIMESTAMP, date => date) - types.setTypeParser(NeonColumnType.JSONB, convertJson) types.setTypeParser(NeonColumnType.JSON, convertJson) +types.setTypeParser(NeonColumnType.MONEY, (money: string) => money.slice(1)) \ No newline at end of file diff --git a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts b/query-engine/driver-adapters/js/adapter-neon/src/neon.ts index cd0731240488..c86b8d88bef0 100644 --- a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts +++ b/query-engine/driver-adapters/js/adapter-neon/src/neon.ts @@ -29,9 +29,11 @@ abstract class NeonQueryable implements Queryable { return (await this.performIO(query)).map(({ fields, rows }) => { const columns = fields.map((field) => field.name) + const columnTypes = fields.map((field) => fieldToColumnType(field.dataTypeID)) + return { columnNames: columns, - columnTypes: fields.map((field) => fieldToColumnType(field.dataTypeID)), + columnTypes, rows, } }) diff --git a/query-engine/driver-adapters/js/adapter-pg/package.json b/query-engine/driver-adapters/js/adapter-pg/package.json index 68cdad5ea715..6e17419ae85f 100644 --- a/query-engine/driver-adapters/js/adapter-pg/package.json +++ b/query-engine/driver-adapters/js/adapter-pg/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/adapter-pg", - "version": "0.4.2", + "version": "0.5.0", "description": "Prisma's driver adapter for \"pg\"", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts b/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts index da145a5eb663..a8102f95ad0b 100644 --- a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts +++ b/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts @@ -28,14 +28,22 @@ export function fieldToColumnType(fieldTypeId: number): ColumnType { case PgColumnType['TIMESTAMP']: return ColumnTypeEnum.DateTime case PgColumnType['NUMERIC']: + case PgColumnType['MONEY']: return ColumnTypeEnum.Numeric + case PgColumnType['JSONB']: + return ColumnTypeEnum.Json + case PgColumnType['UUID']: + return ColumnTypeEnum.Uuid + case PgColumnType['OID']: + return ColumnTypeEnum.Int64 case PgColumnType['BPCHAR']: - return ColumnTypeEnum.Char case PgColumnType['TEXT']: case PgColumnType['VARCHAR']: + case PgColumnType['BIT']: + case PgColumnType['VARBIT']: + case PgColumnType['INET']: + case PgColumnType['CIDR']: return ColumnTypeEnum.Text - case PgColumnType['JSONB']: - return ColumnTypeEnum.Json default: if (fieldTypeId >= 10000) { // Postgres Custom Types @@ -60,9 +68,9 @@ function convertJson(json: string): unknown { } // return string instead of JavaScript Date object -types.setTypeParser(PgColumnType.DATE, date => date) types.setTypeParser(PgColumnType.TIME, date => date) +types.setTypeParser(PgColumnType.DATE, date => date) types.setTypeParser(PgColumnType.TIMESTAMP, date => date) - types.setTypeParser(PgColumnType.JSONB, convertJson) types.setTypeParser(PgColumnType.JSON, convertJson) +types.setTypeParser(PgColumnType.MONEY, (money: string) => money.slice(1)) \ No newline at end of file diff --git a/query-engine/driver-adapters/js/adapter-pg/src/pg.ts b/query-engine/driver-adapters/js/adapter-pg/src/pg.ts index ae2df4e7a6b6..a049b59a0740 100644 --- a/query-engine/driver-adapters/js/adapter-pg/src/pg.ts +++ b/query-engine/driver-adapters/js/adapter-pg/src/pg.ts @@ -32,6 +32,7 @@ class PgQueryable implements Quer const columns = fields.map((field) => field.name) const columnTypes = fields.map((field) => fieldToColumnType(field.dataTypeID)) + const resultSet: ResultSet = { columnNames: columns, columnTypes, diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/package.json b/query-engine/driver-adapters/js/driver-adapter-utils/package.json index ee3487053d0e..ffa1d8209750 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/package.json +++ b/query-engine/driver-adapters/js/driver-adapter-utils/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/driver-adapter-utils", - "version": "0.7.0", + "version": "0.8.0", "description": "Internal set of utilities and types for Prisma's driver adapters.", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts index a313812eaf5b..09fa4b3ad6e1 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts +++ b/query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts @@ -17,8 +17,8 @@ export const ColumnTypeEnum = { 'Json': 11, 'Enum': 12, 'Bytes': 13, - // 'Set': 14, - // 'Array': 15, + 'Set': 14, + 'Uuid': 15, // ... 'UnknownNumber': 128 } as const diff --git a/query-engine/driver-adapters/src/proxy.rs b/query-engine/driver-adapters/src/proxy.rs index d1ab2b361d69..a71742d3f282 100644 --- a/query-engine/driver-adapters/src/proxy.rs +++ b/query-engine/driver-adapters/src/proxy.rs @@ -159,8 +159,13 @@ pub enum ColumnType { /// This is currently unhandled, and will panic if encountered. Set = 14, - // Below there are custom types that don't have a 1:1 translation with a quaint::Value. - // enum variant. + /// UUID from postgres-flavored driver adapters is mapped to this type. + Uuid = 15, + + /* + * Below there are custom types that don't have a 1:1 translation with a quaint::Value. + * enum variant. + */ /// UnknownNumber is used when the type of the column is a number but of unknown particular type /// and precision. /// @@ -363,6 +368,15 @@ fn js_value_to_quaint( "expected a string or an array in column {column_name}, found {mismatch}", )), }, + ColumnType::Uuid => match json_value { + serde_json::Value::String(s) => uuid::Uuid::parse_str(&s) + .map(QuaintValue::uuid) + .map_err(|_| conversion_error!("Expected a UUID string")), + serde_json::Value::Null => Ok(QuaintValue::Bytes(None)), + mismatch => Err(conversion_error!( + "Expected a UUID string in column {column_name}, found {mismatch}" + )), + }, ColumnType::UnknownNumber => match json_value { serde_json::Value::Number(n) => n .as_i64() From 6df6e1939776a7b0ca8d05857b6235f684b5935f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Fri, 29 Sep 2023 18:59:43 +0200 Subject: [PATCH 059/128] fix(qe): Namespace references to enum type names with the schema name. (#4305) * Add schema name to enum name for casting * Escape schema name * push down context * Use context to grab the schema name * Remove logging trace * Fix tests * Add cargo lock * Check the lock file in to avoid divergencies between envs --- quaint/.gitignore | 3 +- quaint/Cargo.lock | 2928 +++++++++++++++++ quaint/src/ast/enums.rs | 26 +- quaint/src/ast/values.rs | 5 +- quaint/src/tests/query.rs | 10 +- quaint/src/visitor/postgres.rs | 24 +- .../src/cursor_condition.rs | 2 +- .../src/database/operations/read.rs | 2 +- .../sql-query-connector/src/filter/visitor.rs | 42 +- .../src/model_extensions/scalar_field.rs | 31 +- .../src/model_extensions/selection_result.rs | 7 +- .../src/query_builder/mod.rs | 7 +- .../src/query_builder/write.rs | 30 +- .../prisma-models/src/internal_enum.rs | 4 + 14 files changed, 3042 insertions(+), 79 deletions(-) create mode 100644 quaint/Cargo.lock diff --git a/quaint/.gitignore b/quaint/.gitignore index d14afc776cbd..10667362c4a3 100644 --- a/quaint/.gitignore +++ b/quaint/.gitignore @@ -1,9 +1,8 @@ **/target **/*.rs.bk -Cargo.lock .direnv/ .vscode/ docker-compose.override.yml db/ -!db/.gitkeep \ No newline at end of file +!db/.gitkeep diff --git a/quaint/Cargo.lock b/quaint/Cargo.lock new file mode 100644 index 000000000000..9bf69f731916 --- /dev/null +++ b/quaint/Cargo.lock @@ -0,0 +1,2928 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "addr2line" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4fa78e18c64fce05e902adecd7a5eed15a5e0a3439f7b0e169f0252214865e3" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "ahash" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" +dependencies = [ + "getrandom 0.2.10", + "once_cell", + "version_check", +] + +[[package]] +name = "ahash" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" +dependencies = [ + "cfg-if", + "once_cell", + "version_check", +] + +[[package]] +name = "aho-corasick" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86b8f9420f797f2d9e935edf629310eb938a0d839f984e25327f3c7eed22300c" +dependencies = [ + "memchr", +] + +[[package]] +name = "allocator-api2" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5" + +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "arrayvec" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" + +[[package]] +name = "async-native-tls" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d57d4cec3c647232e1094dc013546c0b33ce785d8aeb251e1f20dfaf8a9a13fe" +dependencies = [ + "futures-util", + "native-tls", + "thiserror", + "url", +] + +[[package]] +name = "async-trait" +version = "0.1.72" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc6dde6e4ed435a4c1ee4e73592f5ba9da2151af10076cc04858746af9352d09" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.28", +] + +[[package]] +name = "asynchronous-codec" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4057f2c32adbb2fc158e22fb38433c8e9bbf76b75a4732c7c0cbaf695fb65568" +dependencies = [ + "bytes", + "futures-sink", + "futures-util", + "memchr", + "pin-project-lite", +] + +[[package]] +name = "autocfg" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" + +[[package]] +name = "backtrace" +version = "0.3.68" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4319208da049c43661739c5fade2ba182f09d1dc2299b32298d3a31692b17e12" +dependencies = [ + "addr2line", + "cc", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", +] + +[[package]] +name = "base64" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3441f0f7b02788e948e47f457ca01f1d7e6d92c693bc132c22b087d3141c03ff" + +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + +[[package]] +name = "bigdecimal" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6773ddc0eafc0e509fb60e48dff7f450f8e674a0686ae8605e8d9901bd5eefa" +dependencies = [ + "num-bigint", + "num-integer", + "num-traits", +] + +[[package]] +name = "bindgen" +version = "0.59.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bd2a9a458e8f4304c52c43ebb0cfbd520289f8379a52e329a38afda99bf8eb8" +dependencies = [ + "bitflags 1.3.2", + "cexpr", + "clang-sys", + "lazy_static", + "lazycell", + "peeking_take_while", + "proc-macro2", + "quote", + "regex", + "rustc-hash", + "shlex", +] + +[[package]] +name = "bit-vec" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "630be753d4e58660abd17930c71b647fe46c27ea6b63cc59e1e3851406972e42" + +[[package]] +name = "bitvec" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" +dependencies = [ + "funty", + "radium", + "tap", + "wyz", +] + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "borsh" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4114279215a005bc675e386011e594e1d9b800918cea18fcadadcce864a2046b" +dependencies = [ + "borsh-derive", + "hashbrown 0.13.2", +] + +[[package]] +name = "borsh-derive" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0754613691538d51f329cce9af41d7b7ca150bc973056f1156611489475f54f7" +dependencies = [ + "borsh-derive-internal", + "borsh-schema-derive-internal", + "proc-macro-crate", + "proc-macro2", + "syn 1.0.109", +] + +[[package]] +name = "borsh-derive-internal" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afb438156919598d2c7bad7e1c0adf3d26ed3840dbc010db1a882a65583ca2fb" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "borsh-schema-derive-internal" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "634205cc43f74a1b9046ef87c4540ebda95696ec0f315024860cad7c5b0f5ccd" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "bumpalo" +version = "3.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1" + +[[package]] +name = "bytecheck" +version = "0.6.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b6372023ac861f6e6dc89c8344a8f398fb42aaba2b5dbc649ca0c0e9dbcb627" +dependencies = [ + "bytecheck_derive", + "ptr_meta", + "simdutf8", +] + +[[package]] +name = "bytecheck_derive" +version = "0.6.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7ec4c6f261935ad534c0c22dbef2201b45918860eb1c574b972bd213a76af61" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "byteorder" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" + +[[package]] +name = "bytes" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" + +[[package]] +name = "cc" +version = "1.0.82" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "305fe645edc1442a0fa8b6726ba61d422798d37a52e12eaecf4b022ebbb88f01" +dependencies = [ + "libc", +] + +[[package]] +name = "cexpr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" +dependencies = [ + "nom", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "chrono" +version = "0.4.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec837a71355b28f6556dbd569b37b3f363091c0bd4b2e735674521b4c5fd9bc5" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "js-sys", + "num-traits", + "serde", + "time 0.1.45", + "wasm-bindgen", + "winapi", +] + +[[package]] +name = "clang-sys" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c688fc74432808e3eb684cae8830a86be1d66a2bd58e1f248ed0960a590baf6f" +dependencies = [ + "glob", + "libc", + "libloading", +] + +[[package]] +name = "cmake" +version = "0.1.50" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a31c789563b815f77f4250caee12365734369f942439b7defd71e18a48197130" +dependencies = [ + "cc", +] + +[[package]] +name = "connection-string" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "510ca239cf13b7f8d16a2b48f263de7b4f8c566f0af58d901031473c76afb1e3" + +[[package]] +name = "core-foundation" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" + +[[package]] +name = "cpufeatures" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1" +dependencies = [ + "libc", +] + +[[package]] +name = "crc32fast" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crossbeam" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2801af0d36612ae591caa9568261fddce32ce6e08a7275ea334a06a4ad021a2c" +dependencies = [ + "cfg-if", + "crossbeam-channel", + "crossbeam-deque", + "crossbeam-epoch", + "crossbeam-queue", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-channel" +version = "0.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200" +dependencies = [ + "cfg-if", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef" +dependencies = [ + "cfg-if", + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7" +dependencies = [ + "autocfg", + "cfg-if", + "crossbeam-utils", + "memoffset", + "scopeguard", +] + +[[package]] +name = "crossbeam-queue" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add" +dependencies = [ + "cfg-if", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "darling" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d706e75d87e35569db781a9b5e2416cff1236a47ed380831f959382ccd5f858" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0c960ae2da4de88a91b2d920c2a7233b400bc33cb28453a2987822d8392519b" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn 1.0.109", +] + +[[package]] +name = "darling_macro" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b5a2f4ac4969822c62224815d069952656cadc7084fdca9751e6d959189b72" +dependencies = [ + "darling_core", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "deranged" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7684a49fb1af197853ef7b2ee694bc1f5b4179556f1e5710e1760c5db6f5e929" + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "crypto-common", + "subtle", +] + +[[package]] +name = "either" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" + +[[package]] +name = "encoding" +version = "0.2.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b0d943856b990d12d3b55b359144ff341533e516d94098b1d3fc1ac666d36ec" +dependencies = [ + "encoding-index-japanese", + "encoding-index-korean", + "encoding-index-simpchinese", + "encoding-index-singlebyte", + "encoding-index-tradchinese", +] + +[[package]] +name = "encoding-index-japanese" +version = "1.20141219.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04e8b2ff42e9a05335dbf8b5c6f7567e5591d0d916ccef4e0b1710d32a0d0c91" +dependencies = [ + "encoding_index_tests", +] + +[[package]] +name = "encoding-index-korean" +version = "1.20141219.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4dc33fb8e6bcba213fe2f14275f0963fd16f0a02c878e3095ecfdf5bee529d81" +dependencies = [ + "encoding_index_tests", +] + +[[package]] +name = "encoding-index-simpchinese" +version = "1.20141219.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d87a7194909b9118fc707194baa434a4e3b0fb6a5a757c73c3adb07aa25031f7" +dependencies = [ + "encoding_index_tests", +] + +[[package]] +name = "encoding-index-singlebyte" +version = "1.20141219.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3351d5acffb224af9ca265f435b859c7c01537c0849754d3db3fdf2bfe2ae84a" +dependencies = [ + "encoding_index_tests", +] + +[[package]] +name = "encoding-index-tradchinese" +version = "1.20141219.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd0e20d5688ce3cab59eb3ef3a2083a5c77bf496cb798dc6fcdb75f323890c18" +dependencies = [ + "encoding_index_tests", +] + +[[package]] +name = "encoding_index_tests" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a246d82be1c9d791c5dfde9a2bd045fc3cbba3fa2b11ad558f27d01712f00569" + +[[package]] +name = "enumflags2" +version = "0.7.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c041f5090df68b32bcd905365fd51769c8b9d553fe87fde0b683534f10c01bd2" +dependencies = [ + "enumflags2_derive", +] + +[[package]] +name = "enumflags2_derive" +version = "0.7.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e9a1f9f7d83e59740248a6e14ecf93929ade55027844dfcea78beafccc15745" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.28", +] + +[[package]] +name = "errno" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b30f669a7961ef1631673d2766cc92f52d64f7ef354d4fe0ddfd30ed52f0f4f" +dependencies = [ + "errno-dragonfly", + "libc", + "windows-sys", +] + +[[package]] +name = "errno-dragonfly" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "fallible-iterator" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" + +[[package]] +name = "fallible-streaming-iterator" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a" + +[[package]] +name = "fastrand" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6999dc1837253364c2ebb0704ba97994bd874e8f195d665c50b7548f6ea92764" + +[[package]] +name = "flate2" +version = "1.0.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b9429470923de8e8cbd4d2dc513535400b4b3fef0319fb5c4e1f520a7bef743" +dependencies = [ + "crc32fast", + "libz-sys", + "miniz_oxide", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "form_urlencoded" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "frunk" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11a351b59e12f97b4176ee78497dff72e4276fb1ceb13e19056aca7fa0206287" +dependencies = [ + "frunk_core", + "frunk_derives", + "frunk_proc_macros", +] + +[[package]] +name = "frunk_core" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af2469fab0bd07e64ccf0ad57a1438f63160c69b2e57f04a439653d68eb558d6" + +[[package]] +name = "frunk_derives" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fa992f1656e1707946bbba340ad244f0814009ef8c0118eb7b658395f19a2e" +dependencies = [ + "frunk_proc_macro_helpers", + "quote", + "syn 2.0.28", +] + +[[package]] +name = "frunk_proc_macro_helpers" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35b54add839292b743aeda6ebedbd8b11e93404f902c56223e51b9ec18a13d2c" +dependencies = [ + "frunk_core", + "proc-macro2", + "quote", + "syn 2.0.28", +] + +[[package]] +name = "frunk_proc_macros" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71b85a1d4a9a6b300b41c05e8e13ef2feca03e0334127f29eca9506a7fe13a93" +dependencies = [ + "frunk_core", + "frunk_proc_macro_helpers", + "quote", + "syn 2.0.28", +] + +[[package]] +name = "fuchsia-cprng" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" + +[[package]] +name = "funty" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" + +[[package]] +name = "futures" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "955518d47e09b25bbebc7a18df10b81f0c766eaf4c4f1cccef2fca5f2a4fb5f2" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c" + +[[package]] +name = "futures-executor" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964" + +[[package]] +name = "futures-macro" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.28", +] + +[[package]] +name = "futures-sink" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f43be4fe21a13b9781a69afa4985b0f6ee0e1afab2c6f454a8cf30e2b2237b6e" + +[[package]] +name = "futures-task" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76d3d132be6c0e6aa1534069c705a74a5997a356c0dc2f86a47765e5617c5b65" + +[[package]] +name = "futures-timer" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" + +[[package]] +name = "futures-util" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.9.0+wasi-snapshot-preview1", +] + +[[package]] +name = "getrandom" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.11.0+wasi-snapshot-preview1", +] + +[[package]] +name = "gimli" +version = "0.27.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e" + +[[package]] +name = "glob" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" +dependencies = [ + "ahash 0.7.6", +] + +[[package]] +name = "hashbrown" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" +dependencies = [ + "ahash 0.8.3", +] + +[[package]] +name = "hashbrown" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" +dependencies = [ + "ahash 0.8.3", + "allocator-api2", +] + +[[package]] +name = "hashlink" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "312f66718a2d7789ffef4f4b7b213138ed9f1eb3aa1d0d82fc99f88fb3ffd26f" +dependencies = [ + "hashbrown 0.14.0", +] + +[[package]] +name = "hermit-abi" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.57" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2fad5b825842d2b38bd206f3e81d6957625fd7f0a361e345c30e01a0ae2dd613" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "windows", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "idna" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", +] + +[[package]] +name = "indoc" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47741a8bc60fb26eb8d6e0238bbb26d8575ff623fdc97b1a2c00c050b9684ed8" +dependencies = [ + "indoc-impl", + "proc-macro-hack", +] + +[[package]] +name = "indoc-impl" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce046d161f000fffde5f432a0d034d0341dc152643b2598ed5bfce44c4f3a8f0" +dependencies = [ + "proc-macro-hack", + "proc-macro2", + "quote", + "syn 1.0.109", + "unindent", +] + +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" + +[[package]] +name = "js-sys" +version = "0.3.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "lazycell" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" + +[[package]] +name = "lexical" +version = "6.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7aefb36fd43fef7003334742cbf77b243fcd36418a1d1bdd480d613a67968f6" +dependencies = [ + "lexical-core", +] + +[[package]] +name = "lexical-core" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2cde5de06e8d4c2faabc400238f9ae1c74d5412d03a7bd067645ccbc47070e46" +dependencies = [ + "lexical-parse-float", + "lexical-parse-integer", + "lexical-util", + "lexical-write-float", + "lexical-write-integer", +] + +[[package]] +name = "lexical-parse-float" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "683b3a5ebd0130b8fb52ba0bdc718cc56815b6a097e28ae5a6997d0ad17dc05f" +dependencies = [ + "lexical-parse-integer", + "lexical-util", + "static_assertions", +] + +[[package]] +name = "lexical-parse-integer" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d0994485ed0c312f6d965766754ea177d07f9c00c9b82a5ee62ed5b47945ee9" +dependencies = [ + "lexical-util", + "static_assertions", +] + +[[package]] +name = "lexical-util" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5255b9ff16ff898710eb9eb63cb39248ea8a5bb036bea8085b1a767ff6c4e3fc" +dependencies = [ + "static_assertions", +] + +[[package]] +name = "lexical-write-float" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accabaa1c4581f05a3923d1b4cfd124c329352288b7b9da09e766b0668116862" +dependencies = [ + "lexical-util", + "lexical-write-integer", + "static_assertions", +] + +[[package]] +name = "lexical-write-integer" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1b6f3d1f4422866b68192d62f77bc5c700bee84f3069f2469d7bc8c77852446" +dependencies = [ + "lexical-util", + "static_assertions", +] + +[[package]] +name = "libc" +version = "0.2.147" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" + +[[package]] +name = "libloading" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f" +dependencies = [ + "cfg-if", + "winapi", +] + +[[package]] +name = "libsqlite3-sys" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afc22eff61b133b115c6e8c74e818c628d6d5e7a502afea6f64dee076dd94326" +dependencies = [ + "cc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "libz-sys" +version = "1.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d97137b25e321a73eef1418d1d5d2eda4d77e12813f8e6dead84bc52c5870a7b" +dependencies = [ + "cc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "linked-hash-map" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" + +[[package]] +name = "linux-raw-sys" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57bcfdad1b858c2db7c38303a6d2ad4dfaf5eb53dfeb0910128b2c26d6158503" + +[[package]] +name = "lock_api" +version = "0.4.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16" +dependencies = [ + "autocfg", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4" + +[[package]] +name = "lru" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6e8aaa3f231bb4bd57b84b2d5dc3ae7f350265df8aa96492e0bc394a1571909" +dependencies = [ + "hashbrown 0.12.3", +] + +[[package]] +name = "lru-cache" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31e24f1ad8321ca0e8a1e0ac13f23cb668e6f5466c2c57319f6a5cf1cc8e3b1c" +dependencies = [ + "linked-hash-map", +] + +[[package]] +name = "md-5" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6365506850d44bff6e2fbcb5176cf63650e48bd45ef2fe2665ae1570e0f4b9ca" +dependencies = [ + "digest", +] + +[[package]] +name = "md5" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e6bcd6433cff03a4bfc3d9834d504467db1f1cf6d0ea765d37d330249ed629d" + +[[package]] +name = "memchr" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" + +[[package]] +name = "memoffset" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" +dependencies = [ + "autocfg", +] + +[[package]] +name = "metrics" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e52eb6380b6d2a10eb3434aec0885374490f5b82c8aaf5cd487a183c98be834" +dependencies = [ + "ahash 0.7.6", + "metrics-macros", +] + +[[package]] +name = "metrics-macros" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49e30813093f757be5cf21e50389a24dc7dbb22c49f23b7e8f51d69b508a5ffa" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + +[[package]] +name = "miniz_oxide" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7" +dependencies = [ + "adler", +] + +[[package]] +name = "mio" +version = "0.8.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2" +dependencies = [ + "libc", + "log", + "wasi 0.11.0+wasi-snapshot-preview1", + "windows-sys", +] + +[[package]] +name = "mobc" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc79c4a77e312fee9c7bd4b957c12ad1196db73c4a81e5c0b13f02083c4f7f2f" +dependencies = [ + "async-trait", + "futures-channel", + "futures-core", + "futures-timer", + "futures-util", + "log", + "metrics", + "thiserror", + "tokio", + "tracing", + "tracing-subscriber", +] + +[[package]] +name = "mysql_async" +version = "0.31.3" +source = "git+https://github.com/prisma/mysql_async?branch=vendored-openssl#dad187b50dc7e8ce2b61fec126822e8e172a9c8a" +dependencies = [ + "bytes", + "crossbeam", + "flate2", + "futures-core", + "futures-sink", + "futures-util", + "lazy_static", + "lru", + "mio", + "mysql_common", + "native-tls", + "once_cell", + "pem", + "percent-encoding", + "pin-project", + "priority-queue", + "serde", + "serde_json", + "socket2 0.4.9", + "thiserror", + "tokio", + "tokio-native-tls", + "tokio-util 0.7.8", + "twox-hash", + "url", +] + +[[package]] +name = "mysql_common" +version = "0.29.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9006c95034ccf7b903d955f210469119f6c3477fc9c9e7a7845ce38a3e665c2a" +dependencies = [ + "base64 0.13.1", + "bigdecimal", + "bindgen", + "bitflags 1.3.2", + "bitvec", + "byteorder", + "bytes", + "cc", + "cmake", + "crc32fast", + "flate2", + "frunk", + "lazy_static", + "lexical", + "num-bigint", + "num-traits", + "rand 0.8.5", + "regex", + "rust_decimal", + "saturating", + "serde", + "serde_json", + "sha1", + "sha2", + "smallvec", + "subprocess", + "thiserror", + "time 0.3.25", + "uuid", +] + +[[package]] +name = "names" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef320dab323286b50fb5cdda23f61c796a72a89998ab565ca32525c5c556f2da" +dependencies = [ + "rand 0.3.23", +] + +[[package]] +name = "native-tls" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e" +dependencies = [ + "lazy_static", + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + +[[package]] +name = "num-bigint" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f93ab6289c7b344a8a9f60f88d80aa20032336fe78da341afc91c8a2341fc75f" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-integer" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" +dependencies = [ + "autocfg", + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2" +dependencies = [ + "autocfg", +] + +[[package]] +name = "num_cpus" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" +dependencies = [ + "hermit-abi", + "libc", +] + +[[package]] +name = "object" +version = "0.31.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8bda667d9f2b5051b8833f59f3bf748b28ef54f850f4fcb389a252aa383866d1" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" + +[[package]] +name = "openssl" +version = "0.10.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "729b745ad4a5575dd06a3e1af1414bd330ee561c01b3899eb584baeaa8def17e" +dependencies = [ + "bitflags 1.3.2", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.28", +] + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "openssl-src" +version = "111.27.0+1.1.1v" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06e8f197c82d7511c5b014030c9b1efeda40d7d5f99d23b4ceed3524a5e63f02" +dependencies = [ + "cc", +] + +[[package]] +name = "openssl-sys" +version = "0.9.91" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "866b5f16f90776b9bb8dc1e1802ac6f0513de3a7a7465867bfbc563dc737faac" +dependencies = [ + "cc", + "libc", + "openssl-src", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "opentls" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f561874f8d6ecfb674fc08863414040c93cc90c0b6963fe679895fab8b65560" +dependencies = [ + "futures-util", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "url", +] + +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + +[[package]] +name = "parking_lot" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-targets", +] + +[[package]] +name = "paste" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" + +[[package]] +name = "peeking_take_while" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" + +[[package]] +name = "pem" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8835c273a76a90455d7344889b0964598e3316e2a79ede8e36f16bdcf2228b8" +dependencies = [ + "base64 0.13.1", +] + +[[package]] +name = "percent-encoding" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" + +[[package]] +name = "phf" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ade2d8b8f33c7333b51bcf0428d37e217e9f32192ae4772156f65063b8ce03dc" +dependencies = [ + "phf_shared", +] + +[[package]] +name = "phf_shared" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b" +dependencies = [ + "siphasher", +] + +[[package]] +name = "pin-project" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fda4ed1c6c173e3fc7a83629421152e01d7b1f9b7f65fb301e490e8cfc656422" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.28", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12cc1b0bf1727a77a54b6654e7b5f1af8604923edc8b81885f8ec92f9e3f0a05" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pkg-config" +version = "0.3.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964" + +[[package]] +name = "postgres-native-tls" +version = "0.5.0" +source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#a1a2dc6d9584deaf70a14293c428e7b6ca614d98" +dependencies = [ + "native-tls", + "tokio", + "tokio-native-tls", + "tokio-postgres", +] + +[[package]] +name = "postgres-protocol" +version = "0.6.4" +source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#a1a2dc6d9584deaf70a14293c428e7b6ca614d98" +dependencies = [ + "base64 0.13.1", + "byteorder", + "bytes", + "fallible-iterator", + "hmac", + "md-5", + "memchr", + "rand 0.8.5", + "sha2", + "stringprep", +] + +[[package]] +name = "postgres-types" +version = "0.2.4" +source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#a1a2dc6d9584deaf70a14293c428e7b6ca614d98" +dependencies = [ + "bit-vec", + "bytes", + "chrono", + "fallible-iterator", + "postgres-protocol", + "serde", + "serde_json", + "uuid", +] + +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + +[[package]] +name = "pretty-hex" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6fa0831dd7cc608c38a5e323422a0077678fa5744aa2be4ad91c4ece8eec8d5" + +[[package]] +name = "priority-queue" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fff39edfcaec0d64e8d0da38564fad195d2d51b680940295fcc307366e101e61" +dependencies = [ + "autocfg", + "indexmap", +] + +[[package]] +name = "proc-macro-crate" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d6ea3c4595b96363c13943497db34af4460fb474a95c43f4446ad341b8c9785" +dependencies = [ + "toml", +] + +[[package]] +name = "proc-macro-hack" +version = "0.5.20+deprecated" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" + +[[package]] +name = "proc-macro2" +version = "1.0.66" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "ptr_meta" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0738ccf7ea06b608c10564b31debd4f5bc5e197fc8bfe088f68ae5ce81e7a4f1" +dependencies = [ + "ptr_meta_derive", +] + +[[package]] +name = "ptr_meta_derive" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16b845dbfca988fa33db069c0e230574d15a3088f147a87b64c7589eb662c9ac" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "quaint" +version = "0.2.0-alpha.13" +dependencies = [ + "async-trait", + "base64 0.12.3", + "bigdecimal", + "bit-vec", + "byteorder", + "bytes", + "chrono", + "connection-string", + "either", + "futures", + "hex", + "indoc", + "lru-cache", + "metrics", + "mobc", + "mysql_async", + "names", + "native-tls", + "num_cpus", + "once_cell", + "paste", + "percent-encoding", + "postgres-native-tls", + "postgres-types", + "rusqlite", + "serde", + "serde_json", + "sqlformat", + "test-macros", + "test-setup", + "thiserror", + "tiberius", + "tokio", + "tokio-postgres", + "tokio-util 0.6.10", + "tracing", + "tracing-core", + "url", + "uuid", +] + +[[package]] +name = "quote" +version = "1.0.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50f3b39ccfb720540debaa0164757101c08ecb8d326b15358ce76a62c7e85965" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "radium" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" + +[[package]] +name = "rand" +version = "0.3.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64ac302d8f83c0c1974bf758f6b041c6c8ada916fbb44a609158ca8b064cc76c" +dependencies = [ + "libc", + "rand 0.4.6", +] + +[[package]] +name = "rand" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" +dependencies = [ + "fuchsia-cprng", + "libc", + "rand_core 0.3.1", + "rdrand", + "winapi", +] + +[[package]] +name = "rand" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" +dependencies = [ + "getrandom 0.1.16", + "libc", + "rand_chacha 0.2.2", + "rand_core 0.5.1", + "rand_hc", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha 0.3.1", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" +dependencies = [ + "ppv-lite86", + "rand_core 0.5.1", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_core" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" +dependencies = [ + "rand_core 0.4.2", +] + +[[package]] +name = "rand_core" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" + +[[package]] +name = "rand_core" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" +dependencies = [ + "getrandom 0.1.16", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom 0.2.10", +] + +[[package]] +name = "rand_hc" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" +dependencies = [ + "rand_core 0.5.1", +] + +[[package]] +name = "rdrand" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "redox_syscall" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "regex" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81bc1d4caf89fac26a70747fe603c130093b53c773888797a6329091246d651a" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fed1ceff11a1dddaee50c9dc8e4938bd106e9d89ae372f192311e7da498e3b69" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5ea92a5b6195c6ef2a0295ea818b312502c6fc94dde986c5553242e18fd4ce2" + +[[package]] +name = "rend" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "581008d2099240d37fb08d77ad713bcaec2c4d89d50b5b21a8bb1996bbab68ab" +dependencies = [ + "bytecheck", +] + +[[package]] +name = "rkyv" +version = "0.7.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0200c8230b013893c0b2d6213d6ec64ed2b9be2e0e016682b7224ff82cff5c58" +dependencies = [ + "bitvec", + "bytecheck", + "hashbrown 0.12.3", + "ptr_meta", + "rend", + "rkyv_derive", + "seahash", + "tinyvec", + "uuid", +] + +[[package]] +name = "rkyv_derive" +version = "0.7.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2e06b915b5c230a17d7a736d1e2e63ee753c256a8614ef3f5147b13a4f5541d" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "rusqlite" +version = "0.29.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "549b9d036d571d42e6e85d1c1425e2ac83491075078ca9a15be021c56b1641f2" +dependencies = [ + "bitflags 2.3.3", + "chrono", + "fallible-iterator", + "fallible-streaming-iterator", + "hashlink", + "libsqlite3-sys", + "smallvec", +] + +[[package]] +name = "rust_decimal" +version = "1.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a2ab0025103a60ecaaf3abf24db1db240a4e1c15837090d2c32f625ac98abea" +dependencies = [ + "arrayvec", + "borsh", + "byteorder", + "bytes", + "num-traits", + "rand 0.8.5", + "rkyv", + "serde", + "serde_json", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" + +[[package]] +name = "rustc-hash" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" + +[[package]] +name = "rustix" +version = "0.38.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "172891ebdceb05aa0005f533a6cbfca599ddd7d966f6f5d4d9b2e70478e70399" +dependencies = [ + "bitflags 2.3.3", + "errno", + "libc", + "linux-raw-sys", + "windows-sys", +] + +[[package]] +name = "ryu" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" + +[[package]] +name = "saturating" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ece8e78b2f38ec51c51f5d475df0a7187ba5111b2a28bdc761ee05b075d40a71" + +[[package]] +name = "schannel" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c3733bf4cf7ea0880754e19cb5a462007c4a8c1914bff372ccc95b464f1df88" +dependencies = [ + "windows-sys", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "seahash" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b" + +[[package]] +name = "security-framework" +version = "2.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "serde" +version = "1.0.183" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32ac8da02677876d532745a130fc9d8e6edfa81a269b107c5b00829b91d8eb3c" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.183" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aafe972d60b0b9bee71a91b92fee2d4fb3c9d7e8f6b179aa99f27203d99a4816" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.28", +] + +[[package]] +name = "serde_json" +version = "1.0.104" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "076066c5f1078eac5b722a31827a8832fe108bed65dfa75e233c89f8206e976c" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "sha1" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sha2" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sharded-slab" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "shlex" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43b2853a4d09f215c24cc5489c992ce46052d359b5109343cbafbf26bc62f8a3" + +[[package]] +name = "simdutf8" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f27f6278552951f1f2b8cf9da965d10969b2efdea95a6ec47987ab46edfe263a" + +[[package]] +name = "siphasher" +version = "0.3.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7bd3e3206899af3f8b12af284fafc038cc1dc2b41d1b89dd17297221c5d225de" + +[[package]] +name = "slab" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6528351c9bc8ab22353f9d776db39a20288e8d6c37ef8cfe3317cf875eecfc2d" +dependencies = [ + "autocfg", +] + +[[package]] +name = "smallvec" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9" + +[[package]] +name = "socket2" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "socket2" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2538b18701741680e0322a2302176d3253a35388e2e62f172f64f4f16605f877" +dependencies = [ + "libc", + "windows-sys", +] + +[[package]] +name = "sqlformat" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c12bc9199d1db8234678b7051747c07f517cdcf019262d1847b94ec8b1aee3e" +dependencies = [ + "itertools", + "nom", + "unicode_categories", +] + +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] +name = "stringprep" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db3737bde7edce97102e0e2b15365bf7a20bfdb5f60f4f9e8d7004258a51a8da" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "strsim" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6446ced80d6c486436db5c078dde11a9f73d42b57fb273121e160b84f63d894c" + +[[package]] +name = "subprocess" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c2e86926081dda636c546d8c5e641661049d7562a68f5488be4a1f7f66f6086" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "subtle" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04361975b3f5e348b2189d8dc55bc942f278b2d482a6a0365de5bdd62d351567" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "tap" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" + +[[package]] +name = "tempfile" +version = "3.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc02fddf48964c42031a0b3fe0428320ecf3a73c401040fc0096f97794310651" +dependencies = [ + "cfg-if", + "fastrand", + "redox_syscall", + "rustix", + "windows-sys", +] + +[[package]] +name = "test-macros" +version = "0.1.0" +dependencies = [ + "darling", + "once_cell", + "proc-macro2", + "quote", + "syn 1.0.109", + "test-setup", +] + +[[package]] +name = "test-setup" +version = "0.1.0" +dependencies = [ + "async-trait", + "bitflags 1.3.2", + "names", + "once_cell", + "quaint", + "tokio", +] + +[[package]] +name = "thiserror" +version = "1.0.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "611040a08a0439f8248d1990b111c95baa9c704c805fa1f62104b39655fd7f90" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "090198534930841fab3a5d1bb637cde49e339654e606195f8d9c76eeb081dc96" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.28", +] + +[[package]] +name = "thread_local" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" +dependencies = [ + "cfg-if", + "once_cell", +] + +[[package]] +name = "tiberius" +version = "0.11.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "66303a42b7c5daffb95c10cd8f3007a9c29b3e90128cf42b3738f58102aa2516" +dependencies = [ + "async-native-tls", + "async-trait", + "asynchronous-codec", + "bigdecimal", + "byteorder", + "bytes", + "chrono", + "connection-string", + "encoding", + "enumflags2", + "futures", + "futures-sink", + "futures-util", + "num-traits", + "once_cell", + "opentls", + "pin-project-lite", + "pretty-hex", + "thiserror", + "tokio", + "tokio-util 0.7.8", + "tracing", + "uuid", + "winauth", +] + +[[package]] +name = "time" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a" +dependencies = [ + "libc", + "wasi 0.10.0+wasi-snapshot-preview1", + "winapi", +] + +[[package]] +name = "time" +version = "0.3.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fdd63d58b18d663fbdf70e049f00a22c8e42be082203be7f26589213cd75ea" +dependencies = [ + "deranged", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7300fbefb4dadc1af235a9cef3737cea692a9d97e1b9cbcd4ebdae6f8868e6fb" + +[[package]] +name = "time-macros" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb71511c991639bb078fd5bf97757e03914361c48100d52878b8e52b46fb92cd" +dependencies = [ + "time-core", +] + +[[package]] +name = "tinyvec" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.30.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d3ce25f50619af8b0aec2eb23deebe84249e19e2ddd393a6e16e3300a6dadfd" +dependencies = [ + "backtrace", + "bytes", + "libc", + "mio", + "num_cpus", + "pin-project-lite", + "socket2 0.5.3", + "tokio-macros", + "windows-sys", +] + +[[package]] +name = "tokio-macros" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.28", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-postgres" +version = "0.7.7" +source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#a1a2dc6d9584deaf70a14293c428e7b6ca614d98" +dependencies = [ + "async-trait", + "byteorder", + "bytes", + "fallible-iterator", + "futures-channel", + "futures-util", + "log", + "parking_lot", + "percent-encoding", + "phf", + "pin-project-lite", + "postgres-protocol", + "postgres-types", + "socket2 0.5.3", + "tokio", + "tokio-util 0.7.8", +] + +[[package]] +name = "tokio-util" +version = "0.6.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36943ee01a6d67977dd3f84a5a1d2efeb4ada3a1ae771cadfaa535d9d9fc6507" +dependencies = [ + "bytes", + "futures-core", + "futures-io", + "futures-sink", + "log", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "806fe8c2c87eccc8b3267cbae29ed3ab2d0bd37fca70ab622e46aaa9375ddb7d" +dependencies = [ + "bytes", + "futures-core", + "futures-io", + "futures-sink", + "pin-project-lite", + "tokio", + "tracing", +] + +[[package]] +name = "toml" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" +dependencies = [ + "serde", +] + +[[package]] +name = "tracing" +version = "0.1.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" +dependencies = [ + "cfg-if", + "log", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.28", +] + +[[package]] +name = "tracing-core" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922" +dependencies = [ + "lazy_static", + "log", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77" +dependencies = [ + "nu-ansi-term", + "sharded-slab", + "smallvec", + "thread_local", + "tracing-core", + "tracing-log", +] + +[[package]] +name = "twox-hash" +version = "1.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675" +dependencies = [ + "cfg-if", + "rand 0.3.23", + "static_assertions", +] + +[[package]] +name = "typenum" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" + +[[package]] +name = "unicode-bidi" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" + +[[package]] +name = "unicode-ident" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c" + +[[package]] +name = "unicode-normalization" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode_categories" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" + +[[package]] +name = "unindent" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1766d682d402817b5ac4490b3c3002d91dfa0d22812f341609f97b08757359c" + +[[package]] +name = "url" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50bff7831e19200a85b17131d085c25d7811bc4e186efdaf54bbd132994a88cb" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", +] + +[[package]] +name = "uuid" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79daa5ed5740825c40b389c5e50312b9c86df53fccd33f281df655642b43869d" +dependencies = [ + "getrandom 0.2.10", +] + +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "wasi" +version = "0.9.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" + +[[package]] +name = "wasi" +version = "0.10.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.87" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.87" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn 2.0.28", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.87" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.87" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.28", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.87" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "winauth" +version = "0.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f820cd208ce9c6b050812dc2d724ba98c6c1e9db5ce9b3f58d925ae5723a5e6" +dependencies = [ + "bitflags 1.3.2", + "byteorder", + "md5", + "rand 0.7.3", + "winapi", +] + +[[package]] +name = "windows" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.48.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" + +[[package]] +name = "wyz" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" +dependencies = [ + "tap", +] \ No newline at end of file diff --git a/quaint/src/ast/enums.rs b/quaint/src/ast/enums.rs index 4b9798ee9704..d301df25cc13 100644 --- a/quaint/src/ast/enums.rs +++ b/quaint/src/ast/enums.rs @@ -61,24 +61,16 @@ impl<'a> From<&'a str> for EnumVariant<'a> { } #[derive(Debug, Clone, PartialEq)] -pub struct EnumName<'a>(Cow<'a, str>); - -impl<'a> EnumName<'a> { - pub fn new(name: impl Into>) -> Self { - Self(name.into()) - } +pub struct EnumName<'a> { + pub name: Cow<'a, str>, + pub schema_name: Option>, } -impl<'a> AsRef for EnumName<'a> { - fn as_ref(&self) -> &str { - self.0.as_ref() - } -} - -impl<'a> std::ops::Deref for EnumName<'a> { - type Target = str; - - fn deref(&self) -> &Self::Target { - &self.0 +impl<'a> EnumName<'a> { + pub fn new(name: impl Into>, schema_name: Option>>) -> Self { + Self { + name: name.into(), + schema_name: schema_name.map(|s| s.into()), + } } } diff --git a/quaint/src/ast/values.rs b/quaint/src/ast/values.rs index 89a8bfb02760..05acccd77fff 100644 --- a/quaint/src/ast/values.rs +++ b/quaint/src/ast/values.rs @@ -271,12 +271,13 @@ impl<'a> Value<'a> { } /// Creates a new enum value with the name of the enum attached. - pub fn enum_variant_with_name(value: T, name: U) -> Self + pub fn enum_variant_with_name(value: T, name: U, schema_name: Option) -> Self where T: Into>, U: Into>, + V: Into>, { - Value::Enum(Some(EnumVariant::new(value)), Some(EnumName::new(name))) + Value::Enum(Some(EnumVariant::new(value)), Some(EnumName::new(name, schema_name))) } /// Creates a new bytes value. diff --git a/quaint/src/tests/query.rs b/quaint/src/tests/query.rs index ae3666e80a6b..7016262f2fec 100644 --- a/quaint/src/tests/query.rs +++ b/quaint/src/tests/query.rs @@ -1665,7 +1665,10 @@ async fn enum_values(api: &mut dyn TestApi) -> crate::Result<()> { api.conn() .insert( Insert::single_into(&table) - .value("value", Value::enum_variant_with_name("A", &type_name)) + .value( + "value", + Value::enum_variant_with_name("A", &type_name, Option::<&str>::None), + ) .into(), ) .await?; @@ -1673,7 +1676,10 @@ async fn enum_values(api: &mut dyn TestApi) -> crate::Result<()> { api.conn() .insert( Insert::single_into(&table) - .value("value", Value::enum_variant_with_name("B", &type_name)) + .value( + "value", + Value::enum_variant_with_name("B", &type_name, Option::<&str>::None), + ) .into(), ) .await?; diff --git a/quaint/src/visitor/postgres.rs b/quaint/src/visitor/postgres.rs index 4ba6b99b5123..0e36abe68c24 100644 --- a/quaint/src/visitor/postgres.rs +++ b/quaint/src/visitor/postgres.rs @@ -61,7 +61,11 @@ impl<'a> Visitor<'a> for Postgres<'a> { s.parameter_substitution()?; s.write("::text")?; s.write(" AS ")?; - s.surround_with_backticks(enum_name.deref()) + if let Some(schema_name) = enum_name.schema_name { + s.surround_with_backticks(schema_name.deref())?; + s.write(".")? + } + s.surround_with_backticks(enum_name.name.deref()) })?; } else { self.parameter_substitution()?; @@ -96,7 +100,11 @@ impl<'a> Visitor<'a> for Postgres<'a> { })?; self.write("::")?; - self.surround_with_backticks(enum_name.deref())?; + if let Some(schema_name) = enum_name.schema_name { + self.surround_with_backticks(schema_name.deref())?; + self.write(".")? + } + self.surround_with_backticks(enum_name.name.deref())?; self.write("[]")?; } else { self.visit_parameterized(Value::Array(Some( @@ -210,9 +218,13 @@ impl<'a> Visitor<'a> for Postgres<'a> { Ok(()) })?; - if let Some(name) = name { + if let Some(enum_name) = name { self.write("::")?; - self.surround_with_backticks(name.as_ref())?; + if let Some(schema_name) = enum_name.schema_name { + self.surround_with_backticks(schema_name.deref())?; + self.write(".")? + } + self.surround_with_backticks(enum_name.name.deref())?; } Ok(()) @@ -1040,11 +1052,11 @@ mod tests { fn test_raw_enum_array() { let enum_array = Value::EnumArray( Some(vec![EnumVariant::new("A"), EnumVariant::new("B")]), - Some(EnumName::new("Alphabet")), + Some(EnumName::new("Alphabet", Some("foo"))), ); let (sql, params) = Postgres::build(Select::default().value(enum_array.raw())).unwrap(); - assert_eq!("SELECT ARRAY['A','B']::\"Alphabet\"", sql); + assert_eq!("SELECT ARRAY['A','B']::\"foo\".\"Alphabet\"", sql); assert!(params.is_empty()); } diff --git a/query-engine/connectors/sql-query-connector/src/cursor_condition.rs b/query-engine/connectors/sql-query-connector/src/cursor_condition.rs index 72bbd8e273ec..79e61523dec2 100644 --- a/query-engine/connectors/sql-query-connector/src/cursor_condition.rs +++ b/query-engine/connectors/sql-query-connector/src/cursor_condition.rs @@ -207,7 +207,7 @@ pub(crate) fn build( None => ConditionTree::NoCondition, Some(ref cursor) => { let cursor_fields: Vec<_> = cursor.as_scalar_fields().expect("Cursor fields contain non-scalars."); - let cursor_values: Vec<_> = cursor.db_values(); + let cursor_values: Vec<_> = cursor.db_values(ctx); let cursor_columns: Vec<_> = cursor_fields.as_slice().as_columns(ctx).collect(); let cursor_row = Row::from(cursor_columns); diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/read.rs b/query-engine/connectors/sql-query-connector/src/database/operations/read.rs index 6de72d5e744c..470628de1132 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/read.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/read.rs @@ -166,7 +166,7 @@ pub(crate) async fn get_related_m2m_record_ids( // [DTODO] To verify: We might need chunked fetch here (too many parameters in the query). let select = Select::from_table(table) - .so_that(query_builder::in_conditions(&from_columns, from_record_ids)) + .so_that(query_builder::in_conditions(&from_columns, from_record_ids, ctx)) .columns(from_columns.into_iter().chain(to_columns.into_iter())); let parent_model_id = from_field.model().primary_identifier(); diff --git a/query-engine/connectors/sql-query-connector/src/filter/visitor.rs b/query-engine/connectors/sql-query-connector/src/filter/visitor.rs index 274301892f93..894a35305459 100644 --- a/query-engine/connectors/sql-query-connector/src/filter/visitor.rs +++ b/query-engine/connectors/sql-query-connector/src/filter/visitor.rs @@ -535,7 +535,7 @@ impl FilterVisitorExt for FilterVisitor { let condition = match cond { ScalarListCondition::Contains(ConditionValue::Value(val)) => { - comparable.compare_raw("@>", convert_list_pv(field, vec![val])) + comparable.compare_raw("@>", convert_list_pv(field, vec![val], ctx)) } ScalarListCondition::Contains(ConditionValue::FieldRef(field_ref)) => { let field_ref_expr: Expression = field_ref.aliased_col(alias, ctx).into(); @@ -544,13 +544,13 @@ impl FilterVisitorExt for FilterVisitor { field_ref_expr.equals(comparable.any()) } ScalarListCondition::ContainsEvery(ConditionListValue::List(vals)) => { - comparable.compare_raw("@>", convert_list_pv(field, vals)) + comparable.compare_raw("@>", convert_list_pv(field, vals, ctx)) } ScalarListCondition::ContainsEvery(ConditionListValue::FieldRef(field_ref)) => { comparable.compare_raw("@>", field_ref.aliased_col(alias, ctx)) } ScalarListCondition::ContainsSome(ConditionListValue::List(vals)) => { - comparable.compare_raw("&&", convert_list_pv(field, vals)) + comparable.compare_raw("&&", convert_list_pv(field, vals, ctx)) } ScalarListCondition::ContainsSome(ConditionListValue::FieldRef(field_ref)) => { comparable.compare_raw("&&", field_ref.aliased_col(alias, ctx)) @@ -847,13 +847,13 @@ fn default_scalar_filter( let mut sql_values = Values::with_capacity(values.len()); for pv in values { - let list_value = convert_pvs(fields, pv.into_list().unwrap()); + let list_value = convert_pvs(fields, pv.into_list().unwrap(), ctx); sql_values.push(list_value); } comparable.in_selection(sql_values) } - _ => comparable.in_selection(convert_pvs(fields, values)), + _ => comparable.in_selection(convert_pvs(fields, values, ctx)), }, ScalarCondition::In(ConditionListValue::FieldRef(field_ref)) => { // This code path is only reachable for connectors with `ScalarLists` capability @@ -864,13 +864,13 @@ fn default_scalar_filter( let mut sql_values = Values::with_capacity(values.len()); for pv in values { - let list_value = convert_pvs(fields, pv.into_list().unwrap()); + let list_value = convert_pvs(fields, pv.into_list().unwrap(), ctx); sql_values.push(list_value); } comparable.not_in_selection(sql_values) } - _ => comparable.not_in_selection(convert_pvs(fields, values)), + _ => comparable.not_in_selection(convert_pvs(fields, values, ctx)), }, ScalarCondition::NotIn(ConditionListValue::FieldRef(field_ref)) => { // This code path is only reachable for connectors with `ScalarLists` capability @@ -999,7 +999,7 @@ fn insensitive_scalar_filter( let mut sql_values = Values::with_capacity(values.len()); for pv in values { - let list_value = convert_pvs(fields, pv.into_list().unwrap()); + let list_value = convert_pvs(fields, pv.into_list().unwrap(), ctx); sql_values.push(list_value); } @@ -1030,7 +1030,7 @@ fn insensitive_scalar_filter( let mut sql_values = Values::with_capacity(values.len()); for pv in values { - let list_value = convert_pvs(fields, pv.into_list().unwrap()); + let list_value = convert_pvs(fields, pv.into_list().unwrap(), ctx); sql_values.push(list_value); } @@ -1096,7 +1096,7 @@ fn convert_value<'a>( ctx: &Context<'_>, ) -> Expression<'a> { match value.into() { - ConditionValue::Value(pv) => convert_pv(field, pv), + ConditionValue::Value(pv) => convert_pv(field, pv, ctx), ConditionValue::FieldRef(field_ref) => field_ref.aliased_col(alias, ctx).into(), } } @@ -1108,29 +1108,29 @@ fn convert_first_value<'a>( ctx: &Context<'_>, ) -> Expression<'a> { match value.into() { - ConditionValue::Value(pv) => convert_pv(fields.first().unwrap(), pv), + ConditionValue::Value(pv) => convert_pv(fields.first().unwrap(), pv, ctx), ConditionValue::FieldRef(field_ref) => field_ref.aliased_col(alias, ctx).into(), } } -fn convert_pv<'a>(field: &ScalarFieldRef, pv: PrismaValue) -> Expression<'a> { - field.value(pv).into() +fn convert_pv<'a>(field: &ScalarFieldRef, pv: PrismaValue, ctx: &Context<'_>) -> Expression<'a> { + field.value(pv, ctx).into() } -fn convert_list_pv<'a>(field: &ScalarFieldRef, values: Vec) -> Expression<'a> { - Value::Array(Some(values.into_iter().map(|val| field.value(val)).collect())).into() +fn convert_list_pv<'a>(field: &ScalarFieldRef, values: Vec, ctx: &Context<'_>) -> Expression<'a> { + Value::Array(Some(values.into_iter().map(|val| field.value(val, ctx)).collect())).into() } -fn convert_pvs<'a>(fields: &[ScalarFieldRef], values: Vec) -> Vec> { +fn convert_pvs<'a>(fields: &[ScalarFieldRef], values: Vec, ctx: &Context<'_>) -> Vec> { if fields.len() == values.len() { fields .iter() .zip(values) - .map(|(field, value)| field.value(value)) + .map(|(field, value)| field.value(value, ctx)) .collect() } else { let field = fields.first().unwrap(); - values.into_iter().map(|value| field.value(value)).collect() + values.into_iter().map(|value| field.value(value, ctx)).collect() } } @@ -1191,7 +1191,7 @@ impl JsonFilterExt for (Expression<'static>, Expression<'static>) { } // array_contains (value) (ConditionValue::Value(value), JsonTargetType::Array) => { - let contains = expr_json.clone().json_array_contains(convert_pv(field, value)); + let contains = expr_json.clone().json_array_contains(convert_pv(field, value, ctx)); if reverse { contains.or(expr_json.json_type_not_equals(JsonType::Array)).into() @@ -1249,7 +1249,7 @@ impl JsonFilterExt for (Expression<'static>, Expression<'static>) { } // array_starts_with (value) (ConditionValue::Value(value), JsonTargetType::Array) => { - let starts_with = expr_json.clone().json_array_begins_with(convert_pv(field, value)); + let starts_with = expr_json.clone().json_array_begins_with(convert_pv(field, value, ctx)); if reverse { starts_with.or(expr_json.json_type_not_equals(JsonType::Array)).into() @@ -1309,7 +1309,7 @@ impl JsonFilterExt for (Expression<'static>, Expression<'static>) { } // array_ends_with (value) (ConditionValue::Value(value), JsonTargetType::Array) => { - let ends_with = expr_json.clone().json_array_ends_into(convert_pv(field, value)); + let ends_with = expr_json.clone().json_array_ends_into(convert_pv(field, value, ctx)); if reverse { ends_with.or(expr_json.json_type_not_equals(JsonType::Array)).into() diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs b/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs index bddb0aacc333..1250fbf88f67 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs +++ b/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs @@ -1,3 +1,4 @@ +use crate::context::Context; use chrono::Utc; use prisma_models::{ScalarField, TypeIdentifier}; use prisma_value::PrismaValue; @@ -6,13 +7,13 @@ use quaint::{ prelude::{EnumVariant, TypeDataLength, TypeFamily}, }; -pub trait ScalarFieldExt { - fn value<'a>(&self, pv: PrismaValue) -> Value<'a>; +pub(crate) trait ScalarFieldExt { + fn value<'a>(&self, pv: PrismaValue, ctx: &Context<'_>) -> Value<'a>; fn type_family(&self) -> TypeFamily; } impl ScalarFieldExt for ScalarField { - fn value<'a>(&self, pv: PrismaValue) -> Value<'a> { + fn value<'a>(&self, pv: PrismaValue, ctx: &Context<'_>) -> Value<'a> { match (pv, self.type_identifier()) { (PrismaValue::String(s), _) => s.into(), (PrismaValue::Float(f), _) => f.into(), @@ -20,8 +21,13 @@ impl ScalarFieldExt for ScalarField { (PrismaValue::DateTime(d), _) => d.with_timezone(&Utc).into(), (PrismaValue::Enum(e), TypeIdentifier::Enum(enum_id)) => { let enum_walker = self.dm.clone().zip(enum_id); + let enum_name = enum_walker.db_name().to_owned(); + let schema_name = enum_walker + .schema_name() + .map(ToOwned::to_owned) + .or(Some(ctx.schema_name().to_owned())); - Value::enum_variant_with_name(e, enum_walker.db_name().to_owned()) + Value::enum_variant_with_name(e, enum_name, schema_name) } (PrismaValue::List(vals), TypeIdentifier::Enum(enum_id)) => { let enum_walker = self.dm.clone().zip(enum_id); @@ -31,13 +37,19 @@ impl ScalarFieldExt for ScalarField { .map(EnumVariant::new) .collect(); - Value::EnumArray(Some(variants), Some(EnumName::new(enum_walker.db_name().to_owned()))) + let enum_name = enum_walker.db_name().to_owned(); + let schema_name = enum_walker + .schema_name() + .map(ToOwned::to_owned) + .or(Some(ctx.schema_name().to_owned())); + + Value::EnumArray(Some(variants), Some(EnumName::new(enum_name, schema_name))) } (PrismaValue::Enum(e), _) => e.into(), (PrismaValue::Int(i), _) => i.into(), (PrismaValue::BigInt(i), _) => i.into(), (PrismaValue::Uuid(u), _) => u.to_string().into(), - (PrismaValue::List(l), _) => Value::Array(Some(l.into_iter().map(|x| self.value(x)).collect())), + (PrismaValue::List(l), _) => Value::Array(Some(l.into_iter().map(|x| self.value(x, ctx)).collect())), (PrismaValue::Json(s), _) => Value::Json(Some(serde_json::from_str::(&s).unwrap())), (PrismaValue::Bytes(b), _) => Value::Bytes(Some(b.into())), (PrismaValue::Object(_), _) => unimplemented!(), @@ -48,8 +60,13 @@ impl ScalarFieldExt for ScalarField { TypeIdentifier::Boolean => Value::Boolean(None), TypeIdentifier::Enum(enum_id) => { let enum_walker = self.dm.clone().zip(enum_id); + let enum_name = enum_walker.db_name().to_owned(); + let schema_name = enum_walker + .schema_name() + .map(ToOwned::to_owned) + .or(Some(ctx.schema_name().to_owned())); - Value::Enum(None, Some(EnumName::new(enum_walker.db_name().to_owned()))) + Value::Enum(None, Some(EnumName::new(enum_name, schema_name))) } TypeIdentifier::Json => Value::Json(None), TypeIdentifier::DateTime => Value::DateTime(None), diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/selection_result.rs b/query-engine/connectors/sql-query-connector/src/model_extensions/selection_result.rs index 25e864ddd7c0..25d994b1d64d 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/selection_result.rs +++ b/query-engine/connectors/sql-query-connector/src/model_extensions/selection_result.rs @@ -1,10 +1,11 @@ use super::ScalarFieldExt; +use crate::context::Context; use prisma_models::{PrismaValue, SelectedField, SelectionResult}; use quaint::Value; pub(crate) trait SelectionResultExt { fn misses_autogen_value(&self) -> bool; - fn db_values<'a>(&self) -> Vec>; + fn db_values<'a>(&self, ctx: &Context<'_>) -> Vec>; fn add_autogen_value(&mut self, value: V) -> bool where @@ -30,11 +31,11 @@ impl SelectionResultExt for SelectionResult { false } - fn db_values<'a>(&self) -> Vec> { + fn db_values<'a>(&self, ctx: &Context<'_>) -> Vec> { self.pairs .iter() .map(|(selection, v)| match selection { - SelectedField::Scalar(sf) => sf.value(v.clone()), + SelectedField::Scalar(sf) => sf.value(v.clone(), ctx), SelectedField::Composite(_cf) => todo!(), // [Composites] todo }) .collect() diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/mod.rs b/query-engine/connectors/sql-query-connector/src/query_builder/mod.rs index b121eb308249..f9a3d43905e3 100644 --- a/query-engine/connectors/sql-query-connector/src/query_builder/mod.rs +++ b/query-engine/connectors/sql-query-connector/src/query_builder/mod.rs @@ -1,6 +1,7 @@ pub(crate) mod read; pub(crate) mod write; +use crate::context::Context; use crate::model_extensions::SelectionResultExt; use prisma_models::SelectionResult; use quaint::ast::{Column, Comparable, ConditionTree, Query, Row, Values}; @@ -10,6 +11,7 @@ const PARAMETER_LIMIT: usize = 2000; pub(super) fn chunked_conditions( columns: &[Column<'static>], records: &[&SelectionResult], + ctx: &Context<'_>, f: F, ) -> Vec> where @@ -19,7 +21,7 @@ where records .chunks(PARAMETER_LIMIT) .map(|chunk| { - let tree = in_conditions(columns, chunk.iter().copied()); + let tree = in_conditions(columns, chunk.iter().copied(), ctx); f(tree).into() }) .collect() @@ -28,11 +30,12 @@ where pub(super) fn in_conditions<'a>( columns: &'a [Column<'static>], results: impl IntoIterator, + ctx: &Context<'_>, ) -> ConditionTree<'static> { let mut values = Values::empty(); for result in results.into_iter() { - let vals: Vec<_> = result.db_values(); + let vals: Vec<_> = result.db_values(ctx); values.push(vals) } diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/write.rs b/query-engine/connectors/sql-query-connector/src/query_builder/write.rs index f0a4fb08e237..b9356842b285 100644 --- a/query-engine/connectors/sql-query-connector/src/query_builder/write.rs +++ b/query-engine/connectors/sql-query-connector/src/query_builder/write.rs @@ -28,7 +28,7 @@ pub(crate) fn create_record( .try_into() .expect("Create calls can only use PrismaValue write expressions (right now)."); - insert.value(db_name.to_owned(), field.value(value)) + insert.value(db_name.to_owned(), field.value(value, ctx)) }); Insert::from(insert) @@ -64,7 +64,7 @@ pub(crate) fn create_records_nonempty( .try_into() .expect("Create calls can only use PrismaValue write expressions (right now)."); - row.push(field.value(value).into()); + row.push(field.value(value, ctx).into()); } None => row.push(default_value()), @@ -124,12 +124,12 @@ pub(crate) fn build_update_and_set_query( let value: Expression = match val.try_into_scalar().unwrap() { ScalarWriteOperation::Field(_) => unimplemented!(), - ScalarWriteOperation::Set(rhs) => field.value(rhs).into(), + ScalarWriteOperation::Set(rhs) => field.value(rhs, ctx).into(), ScalarWriteOperation::Add(rhs) if field.is_list() => { let e: Expression = Column::from((table.clone(), name.clone())).into(); let vals: Vec<_> = match rhs { - PrismaValue::List(vals) => vals.into_iter().map(|val| field.value(val)).collect(), - _ => vec![field.value(rhs)], + PrismaValue::List(vals) => vals.into_iter().map(|val| field.value(val, ctx)).collect(), + _ => vec![field.value(rhs, ctx)], }; // Postgres only @@ -137,22 +137,22 @@ pub(crate) fn build_update_and_set_query( } ScalarWriteOperation::Add(rhs) => { let e: Expression<'_> = Column::from((table.clone(), name.clone())).into(); - e + field.value(rhs).into() + e + field.value(rhs, ctx).into() } ScalarWriteOperation::Substract(rhs) => { let e: Expression<'_> = Column::from((table.clone(), name.clone())).into(); - e - field.value(rhs).into() + e - field.value(rhs, ctx).into() } ScalarWriteOperation::Multiply(rhs) => { let e: Expression<'_> = Column::from((table.clone(), name.clone())).into(); - e * field.value(rhs).into() + e * field.value(rhs, ctx).into() } ScalarWriteOperation::Divide(rhs) => { let e: Expression<'_> = Column::from((table.clone(), name.clone())).into(); - e / field.value(rhs).into() + e / field.value(rhs, ctx).into() } ScalarWriteOperation::Unset(_) => unreachable!("Unset is not supported on SQL connectors"), @@ -183,7 +183,7 @@ pub(crate) fn chunk_update_with_ids( .as_columns(ctx) .collect(); - let query = super::chunked_conditions(&columns, ids, |conditions| { + let query = super::chunked_conditions(&columns, ids, ctx, |conditions| { update.clone().so_that(conditions.and(filter_condition.clone())) }); @@ -212,7 +212,7 @@ pub(crate) fn delete_many_from_ids_and_filter( .as_columns(ctx) .collect(); - super::chunked_conditions(&columns, ids, |conditions| { + super::chunked_conditions(&columns, ids, ctx, |conditions| { delete_many_from_filter(model, conditions.and(filter_condition.clone()), ctx) }) } @@ -232,9 +232,9 @@ pub(crate) fn create_relation_table_records( let insert = Insert::multi_into(relation.as_table(ctx), columns); let insert: MultiRowInsert = child_ids.iter().fold(insert, |insert, child_id| { - let mut values: Vec<_> = parent_id.db_values(); + let mut values: Vec<_> = parent_id.db_values(ctx); - values.extend(child_id.db_values()); + values.extend(child_id.db_values(ctx)); insert.values(values) }); @@ -253,14 +253,14 @@ pub(crate) fn delete_relation_table_records( let mut parent_columns: Vec<_> = parent_field.related_field().m2m_columns(ctx); let child_columns: Vec<_> = parent_field.m2m_columns(ctx); - let parent_id_values = parent_id.db_values(); + let parent_id_values = parent_id.db_values(ctx); let parent_id_criteria = if parent_columns.len() > 1 { Row::from(parent_columns).equals(parent_id_values) } else { parent_columns.pop().unwrap().equals(parent_id_values) }; - let child_id_criteria = super::in_conditions(&child_columns, child_ids); + let child_id_criteria = super::in_conditions(&child_columns, child_ids, ctx); Delete::from_table(relation.as_table(ctx)) .so_that(parent_id_criteria.and(child_id_criteria)) diff --git a/query-engine/prisma-models/src/internal_enum.rs b/query-engine/prisma-models/src/internal_enum.rs index e393f82509cd..6467adcebf6d 100644 --- a/query-engine/prisma-models/src/internal_enum.rs +++ b/query-engine/prisma-models/src/internal_enum.rs @@ -13,6 +13,10 @@ impl InternalEnum { pub fn db_name(&self) -> &str { self.dm.walk(self.id).database_name() } + + pub fn schema_name(&self) -> Option<&str> { + self.dm.walk(self.id).schema().map(|tuple| tuple.0) + } } impl std::fmt::Debug for InternalEnum { From 6b4d2fc685ddd87d33b4efa5b8ff961f4cf2fec5 Mon Sep 17 00:00:00 2001 From: XiaoZhang Date: Sat, 30 Sep 2023 02:03:22 +0800 Subject: [PATCH 060/128] fix: refine error message when retrieving type identifier (#4253) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Miguel Fernández --- .../validation_pipeline/validations.rs | 1 + .../validations/indexes.rs | 17 ++++++++++++ .../prisma-models/src/field/scalar.rs | 4 ++- .../tests/datamodel_converter_tests.rs | 26 +++++++++++++++++++ 4 files changed, 47 insertions(+), 1 deletion(-) diff --git a/psl/psl-core/src/validate/validation_pipeline/validations.rs b/psl/psl-core/src/validate/validation_pipeline/validations.rs index 5814317ee48a..4040844bb767 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations.rs @@ -123,6 +123,7 @@ pub(super) fn validate(ctx: &mut Context<'_>) { indexes::supports_clustering_setting(index, ctx); indexes::clustering_can_be_defined_only_once(index, ctx); indexes::opclasses_are_not_allowed_with_other_than_normal_indices(index, ctx); + indexes::composite_types_are_not_allowed_in_index(index, ctx); for field_attribute in index.scalar_field_attributes() { let span = index.ast_attribute().span; diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/indexes.rs b/psl/psl-core/src/validate/validation_pipeline/validations/indexes.rs index 7d16ce78414d..5f3288264016 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/indexes.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/indexes.rs @@ -386,6 +386,23 @@ pub(crate) fn opclasses_are_not_allowed_with_other_than_normal_indices(index: In } } +pub(crate) fn composite_types_are_not_allowed_in_index(index: IndexWalker<'_>, ctx: &mut Context<'_>) { + for field in index.fields() { + if field.scalar_field_type().as_composite_type().is_some() { + let message = format!( + "Indexes can only contain scalar attributes. Please remove {:?} from the argument list of the indexes.", + field.name() + ); + ctx.push_error(DatamodelError::new_attribute_validation_error( + &message, + index.attribute_name(), + index.ast_attribute().span, + )); + return; + } + } +} + pub(super) fn unique_client_name_does_not_clash_with_field(index: IndexWalker<'_>, ctx: &mut Context<'_>) { if !index.is_unique() { return; diff --git a/query-engine/prisma-models/src/field/scalar.rs b/query-engine/prisma-models/src/field/scalar.rs index cc3b3533322b..92039da53663 100644 --- a/query-engine/prisma-models/src/field/scalar.rs +++ b/query-engine/prisma-models/src/field/scalar.rs @@ -90,7 +90,9 @@ impl ScalarField { }; match scalar_field_type { - ScalarFieldType::CompositeType(_) => unreachable!(), + ScalarFieldType::CompositeType(_) => { + unreachable!("Cannot convert a composite type to a type identifier. This error is typically caused by mistakenly using a composite type within a composite index.",) + } ScalarFieldType::Enum(x) => TypeIdentifier::Enum(x), ScalarFieldType::BuiltInScalar(scalar) => scalar.into(), ScalarFieldType::Unsupported(_) => TypeIdentifier::Unsupported, diff --git a/query-engine/prisma-models/tests/datamodel_converter_tests.rs b/query-engine/prisma-models/tests/datamodel_converter_tests.rs index 2f5bf75b103f..0a45c80ed163 100644 --- a/query-engine/prisma-models/tests/datamodel_converter_tests.rs +++ b/query-engine/prisma-models/tests/datamodel_converter_tests.rs @@ -38,6 +38,32 @@ fn converting_enums() { } } +#[test] +fn converting_composite_types() { + let res = psl::parse_schema( + r#" + datasource db { + provider = "mongodb" + url = "mongodb://localhost:27017/hello" + } + + model MyModel { + id String @id @default(auto()) @map("_id") @db.ObjectId + attribute Attribute + + @@unique([attribute], name: "composite_index") + } + + type Attribute { + name String + value String + group String + } + "#, + ); + assert!(res.unwrap_err().contains("Indexes can only contain scalar attributes. Please remove \"attribute\" from the argument list of the indexes.")); +} + #[test] fn models_with_only_scalar_fields() { let datamodel = convert( From a39bb2dcb9f2ca75c528537d32de238eb9096ed2 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Mon, 2 Oct 2023 14:00:00 +0200 Subject: [PATCH 061/128] psl: un-hide `driverAdapters` preview feature (#4304) Closes: https://github.com/prisma/team-orm/issues/406 --- psl/psl-core/src/common/preview_features.rs | 7 +++---- psl/psl/tests/config/generators.rs | 2 +- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/psl/psl-core/src/common/preview_features.rs b/psl/psl-core/src/common/preview_features.rs index 93ddde63291c..544bf5b99164 100644 --- a/psl/psl-core/src/common/preview_features.rs +++ b/psl/psl-core/src/common/preview_features.rs @@ -45,6 +45,7 @@ features!( DataProxy, Deno, Distinct, + DriverAdapters, ExtendedIndexes, ExtendedWhereUnique, FieldReference, @@ -64,7 +65,6 @@ features!( NamedConstraints, NApi, NativeTypes, - DriverAdapters, OrderByAggregateGroup, OrderByNulls, OrderByRelation, @@ -82,6 +82,7 @@ features!( pub const ALL_PREVIEW_FEATURES: FeatureMap = FeatureMap { active: enumflags2::make_bitflags!(PreviewFeature::{ Deno + | DriverAdapters | FullTextIndex | FullTextSearch | Metrics @@ -123,9 +124,7 @@ pub const ALL_PREVIEW_FEATURES: FeatureMap = FeatureMap { | TransactionApi | UncheckedScalarInputs }), - hidden: enumflags2::make_bitflags!(PreviewFeature::{ - DriverAdapters - }), + hidden: enumflags2::BitFlags::EMPTY, }; #[derive(Debug)] diff --git a/psl/psl/tests/config/generators.rs b/psl/psl/tests/config/generators.rs index 193ad28889ed..f10a9bda3eae 100644 --- a/psl/psl/tests/config/generators.rs +++ b/psl/psl/tests/config/generators.rs @@ -258,7 +258,7 @@ fn nice_error_for_unknown_generator_preview_feature() { .unwrap_err(); let expectation = expect![[r#" - error: The preview feature "foo" is not known. Expected one of: deno, fullTextIndex, fullTextSearch, metrics, multiSchema, postgresqlExtensions, tracing, views + error: The preview feature "foo" is not known. Expected one of: deno, driverAdapters, fullTextIndex, fullTextSearch, metrics, multiSchema, postgresqlExtensions, tracing, views --> schema.prisma:3  |   2 |  provider = "prisma-client-js" From 944a9c2e40ae2d4c9f6ceee792eab0fb18d65cec Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Tue, 3 Oct 2023 10:09:40 +0200 Subject: [PATCH 062/128] driver-adapters: support bytes type in pg and neon (#4307) * driver-adapters: support bytes in neon and pg * driver-adapters: pass bytes from Rust to JS as Node.js Buffer --- .../js/adapter-neon/src/conversion.ts | 23 ++++++++++++++++++- .../js/adapter-pg/src/conversion.ts | 23 ++++++++++++++++++- .../driver-adapters/src/conversion.rs | 7 +++--- 3 files changed, 47 insertions(+), 6 deletions(-) diff --git a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts b/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts index 99db8870de98..dce982b4b1b0 100644 --- a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts +++ b/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts @@ -44,6 +44,8 @@ export function fieldToColumnType(fieldTypeId: number): ColumnType { case NeonColumnType['INET']: case NeonColumnType['CIDR']: return ColumnTypeEnum.Text + case NeonColumnType['BYTEA']: + return ColumnTypeEnum.Bytes default: if (fieldTypeId >= 10000) { // Postgres Custom Types @@ -67,10 +69,29 @@ function convertJson(json: string): unknown { return (json === 'null') ? JsonNullMarker : JSON.parse(json) } +// Original BYTEA parser +const parsePgBytes = types.getTypeParser(NeonColumnType.BYTEA) as (_: string) => Buffer + +/** + * Convert bytes to a JSON-encodable representation since we can't + * currently send a parsed Buffer or ArrayBuffer across JS to Rust + * boundary. + * TODO: + * 1. Check if using base64 would be more efficient than this encoding. + * 2. Consider the possibility of eliminating re-encoding altogether + * and passing bytea hex format to the engine if that can be aligned + * with other adapter flavours. + */ +function convertBytes(serializedBytes: string): number[] { + const buffer = parsePgBytes(serializedBytes) + return Array.from(new Uint8Array(buffer)) +} + // return string instead of JavaScript Date object types.setTypeParser(NeonColumnType.TIME, date => date) types.setTypeParser(NeonColumnType.DATE, date => date) types.setTypeParser(NeonColumnType.TIMESTAMP, date => date) types.setTypeParser(NeonColumnType.JSONB, convertJson) types.setTypeParser(NeonColumnType.JSON, convertJson) -types.setTypeParser(NeonColumnType.MONEY, (money: string) => money.slice(1)) \ No newline at end of file +types.setTypeParser(NeonColumnType.MONEY, money => money.slice(1)) +types.setTypeParser(NeonColumnType.BYTEA, convertBytes) diff --git a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts b/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts index a8102f95ad0b..6f78e9bb3698 100644 --- a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts +++ b/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts @@ -44,6 +44,8 @@ export function fieldToColumnType(fieldTypeId: number): ColumnType { case PgColumnType['INET']: case PgColumnType['CIDR']: return ColumnTypeEnum.Text + case PgColumnType['BYTEA']: + return ColumnTypeEnum.Bytes default: if (fieldTypeId >= 10000) { // Postgres Custom Types @@ -67,10 +69,29 @@ function convertJson(json: string): unknown { return (json === 'null') ? JsonNullMarker : JSON.parse(json) } +// Original BYTEA parser +const parsePgBytes = types.getTypeParser(PgColumnType.BYTEA) as (_: string) => Buffer + +/** + * Convert bytes to a JSON-encodable representation since we can't + * currently send a parsed Buffer or ArrayBuffer across JS to Rust + * boundary. + * TODO: + * 1. Check if using base64 would be more efficient than this encoding. + * 2. Consider the possibility of eliminating re-encoding altogether + * and passing bytea hex format to the engine if that can be aligned + * with other adapter flavours. + */ +function convertBytes(serializedBytes: string): number[] { + const buffer = parsePgBytes(serializedBytes) + return Array.from(new Uint8Array(buffer)) +} + // return string instead of JavaScript Date object types.setTypeParser(PgColumnType.TIME, date => date) types.setTypeParser(PgColumnType.DATE, date => date) types.setTypeParser(PgColumnType.TIMESTAMP, date => date) types.setTypeParser(PgColumnType.JSONB, convertJson) types.setTypeParser(PgColumnType.JSON, convertJson) -types.setTypeParser(PgColumnType.MONEY, (money: string) => money.slice(1)) \ No newline at end of file +types.setTypeParser(PgColumnType.MONEY, money => money.slice(1)) +types.setTypeParser(PgColumnType.BYTEA, convertBytes) diff --git a/query-engine/driver-adapters/src/conversion.rs b/query-engine/driver-adapters/src/conversion.rs index ac06a3cff586..52060419c661 100644 --- a/query-engine/driver-adapters/src/conversion.rs +++ b/query-engine/driver-adapters/src/conversion.rs @@ -32,10 +32,9 @@ impl ToNapiValue for JSArg { match value { JSArg::RawString(s) => ToNapiValue::to_napi_value(env, s), JSArg::Value(v) => ToNapiValue::to_napi_value(env, v), - JSArg::Buffer(bytes) => ToNapiValue::to_napi_value( - env, - napi::Env::from_raw(env).create_arraybuffer_with_data(bytes)?.into_raw(), - ), + JSArg::Buffer(bytes) => { + ToNapiValue::to_napi_value(env, napi::Env::from_raw(env).create_buffer_with_data(bytes)?.into_raw()) + } } } } From 7df36636cba36cb527b67d90a4b2a5cf723dcb8e Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Tue, 3 Oct 2023 10:10:01 +0200 Subject: [PATCH 063/128] driver-adapters: support XML columns in pg and neon (#4308) Fixes the "unsupported column type 142" error. --- query-engine/driver-adapters/js/adapter-neon/src/conversion.ts | 1 + query-engine/driver-adapters/js/adapter-pg/src/conversion.ts | 1 + 2 files changed, 2 insertions(+) diff --git a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts b/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts index dce982b4b1b0..bc9f062128b1 100644 --- a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts +++ b/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts @@ -43,6 +43,7 @@ export function fieldToColumnType(fieldTypeId: number): ColumnType { case NeonColumnType['VARBIT']: case NeonColumnType['INET']: case NeonColumnType['CIDR']: + case NeonColumnType['XML']: return ColumnTypeEnum.Text case NeonColumnType['BYTEA']: return ColumnTypeEnum.Bytes diff --git a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts b/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts index 6f78e9bb3698..f176932b08f4 100644 --- a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts +++ b/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts @@ -43,6 +43,7 @@ export function fieldToColumnType(fieldTypeId: number): ColumnType { case PgColumnType['VARBIT']: case PgColumnType['INET']: case PgColumnType['CIDR']: + case PgColumnType['XML']: return ColumnTypeEnum.Text case PgColumnType['BYTEA']: return ColumnTypeEnum.Bytes From 06e0f081c833fd3b6af31e608cbe53650873c9c8 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Tue, 3 Oct 2023 10:22:18 +0200 Subject: [PATCH 064/128] driver-adapters: support JSON columns (#4309) Fixes the "unsupported column type 141" error. --- query-engine/driver-adapters/js/adapter-neon/src/conversion.ts | 1 + query-engine/driver-adapters/js/adapter-pg/src/conversion.ts | 1 + 2 files changed, 2 insertions(+) diff --git a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts b/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts index bc9f062128b1..940c318cb439 100644 --- a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts +++ b/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts @@ -30,6 +30,7 @@ export function fieldToColumnType(fieldTypeId: number): ColumnType { case NeonColumnType['NUMERIC']: case NeonColumnType['MONEY']: return ColumnTypeEnum.Numeric + case NeonColumnType['JSON']: case NeonColumnType['JSONB']: return ColumnTypeEnum.Json case NeonColumnType['UUID']: diff --git a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts b/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts index f176932b08f4..79f9461ba7d6 100644 --- a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts +++ b/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts @@ -30,6 +30,7 @@ export function fieldToColumnType(fieldTypeId: number): ColumnType { case PgColumnType['NUMERIC']: case PgColumnType['MONEY']: return ColumnTypeEnum.Numeric + case PgColumnType['JSON']: case PgColumnType['JSONB']: return ColumnTypeEnum.Json case PgColumnType['UUID']: From 3b37c318a976ae7ee215b71633a315315b23280f Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Tue, 3 Oct 2023 15:14:51 +0200 Subject: [PATCH 065/128] driver-adapters: support arrays in pg and neon (#4310) Fixes most scalar list tests and some other tests that use scalar lists. Fixed tests: ``` < new::regressions::prisma_13097::prisma_13097::group_by_boolean_array < new::regressions::prisma_13097::prisma_13097::group_by_enum_array < queries::batch::select_one_compound::compound_batch::should_only_batch_if_possible_list < queries::filters::field_reference::bytes_filter::bytes_filter::inclusion_filter < queries::filters::field_reference::bytes_filter::bytes_filter::scalar_list_filters < queries::filters::list_filters::json_lists::equality < queries::filters::list_filters::json_lists::has < queries::filters::list_filters::json_lists::has_every < queries::filters::list_filters::json_lists::has_some < queries::filters::list_filters::json_lists::is_empty < writes::data_types::scalar_list::base::basic_types::behave_like_regular_val_for_create_and_update < writes::data_types::scalar_list::base::basic_types::create_mut_return_items_with_empty_lists < writes::data_types::scalar_list::base::basic_types::create_mut_work_with_list_vals < writes::data_types::scalar_list::base::basic_types::set_base < writes::data_types::scalar_list::base::basic_types::update_mut_push_empty_scalar_list < writes::data_types::scalar_list::decimal::decimal::behave_like_regular_val_for_create_and_update < writes::data_types::scalar_list::decimal::decimal::create_mut_return_items_with_empty_lists < writes::data_types::scalar_list::decimal::decimal::create_mut_work_with_list_vals < writes::data_types::scalar_list::decimal::decimal::update_mut_push_empty_scalar_list < writes::data_types::scalar_list::defaults::basic::basic_empty_write < writes::data_types::scalar_list::defaults::basic::basic_write < writes::data_types::scalar_list::defaults::decimal::basic_empty_write < writes::data_types::scalar_list::defaults::json::basic_empty_write < writes::data_types::scalar_list::defaults::json::basic_write < writes::data_types::scalar_list::json::json::behave_like_regular_val_for_create_and_update < writes::data_types::scalar_list::json::json::create_mut_return_items_with_empty_lists < writes::data_types::scalar_list::json::json::create_mut_work_with_list_vals < writes::data_types::scalar_list::json::json::update_mut_push_empty_scalar_list < writes::top_level_mutations::create_list::create_list::create_not_accept_null_in_set ``` Relevant tests that are not fixed by these changes yet and will need to be addressed in future PRs: ``` raw::sql::null_list::null_list::null_scalar_lists writes::data_types::scalar_list::defaults::decimal::basic_write ``` Fixes: https://github.com/prisma/team-orm/issues/374 --- .../writes/data_types/scalar_list/json.rs | 6 +- .../query-tests-setup/src/runner/mod.rs | 4 + .../js/adapter-neon/src/conversion.ts | 86 ++++++++++- .../js/adapter-pg/src/conversion.ts | 86 ++++++++++- .../js/driver-adapter-utils/src/const.ts | 55 +++++--- .../driver-adapters/src/conversion.rs | 20 +++ query-engine/driver-adapters/src/proxy.rs | 133 ++++++++++++++++++ 7 files changed, 366 insertions(+), 24 deletions(-) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/scalar_list/json.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/scalar_list/json.rs index ab6ddbdcd304..79c511edbc52 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/scalar_list/json.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/scalar_list/json.rs @@ -70,8 +70,9 @@ mod json { // TODO: This specific query currently cannot be sent from the JS client. // The client _always_ sends an array as plain json and never as an array of json. // We're temporarily ignoring it for the JSON protocol because we can't differentiate a list of json values from a json array. + // Similarly, this does not currently work with driver adapters. // https://github.com/prisma/prisma/issues/18019 - if runner.protocol().is_graphql() { + if runner.protocol().is_graphql() && !runner.is_external_executor() { match_connector_result!( &runner, r#"mutation { @@ -161,8 +162,9 @@ mod json { // TODO: This specific query currently cannot be sent from the JS client. // The client _always_ sends an array as plain json and never as an array of json. // We're temporarily ignoring it for the JSON protocol because we can't differentiate a list of json values from a json array. + // Similarly, this does not currently work with driver adapters. // https://github.com/prisma/prisma/issues/18019 - if runner.protocol().is_graphql() { + if runner.protocol().is_graphql() && !runner.is_external_executor() { match_connector_result!( &runner, r#"mutation { diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/mod.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/mod.rs index d6505206356b..03e2dce5c5e0 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/mod.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/mod.rs @@ -480,4 +480,8 @@ impl Runner { pub fn protocol(&self) -> EngineProtocol { self.protocol } + + pub fn is_external_executor(&self) -> bool { + matches!(self.executor, RunnerExecutor::External(_)) + } } diff --git a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts b/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts index 940c318cb439..932461e3bc3b 100644 --- a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts +++ b/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts @@ -3,6 +3,31 @@ import { types } from '@neondatabase/serverless' const NeonColumnType = types.builtins +/** + * PostgreSQL array column types (not defined in NeonColumnType). + */ +const ArrayColumnType = { + BOOL_ARRAY: 1000, + BYTEA_ARRAY: 1001, + BPCHAR_ARRAY: 1014, + CHAR_ARRAY: 1002, + DATE_ARRAY: 1182, + FLOAT4_ARRAY: 1021, + FLOAT8_ARRAY: 1022, + INT2_ARRAY: 1005, + INT4_ARRAY: 1007, + JSONB_ARRAY: 3807, + JSON_ARRAY: 199, + MONEY_ARRAY: 791, + NUMERIC_ARRAY: 1231, + TEXT_ARRAY: 1009, + TIMESTAMP_ARRAY: 1115, + TIME_ARRAY: 1183, + UUID_ARRAY: 2951, + VARCHAR_ARRAY: 1015, + XML_ARRAY: 143, +} + /** * This is a simplification of quaint's value inference logic. Take a look at quaint's conversion.rs * module to see how other attributes of the field packet such as the field length are used to infer @@ -48,6 +73,40 @@ export function fieldToColumnType(fieldTypeId: number): ColumnType { return ColumnTypeEnum.Text case NeonColumnType['BYTEA']: return ColumnTypeEnum.Bytes + + case ArrayColumnType.INT2_ARRAY: + case ArrayColumnType.INT4_ARRAY: + return ColumnTypeEnum.Int32Array + case ArrayColumnType.FLOAT4_ARRAY: + return ColumnTypeEnum.FloatArray + case ArrayColumnType.FLOAT8_ARRAY: + return ColumnTypeEnum.DoubleArray + case ArrayColumnType.NUMERIC_ARRAY: + case ArrayColumnType.MONEY_ARRAY: + return ColumnTypeEnum.NumericArray + case ArrayColumnType.BOOL_ARRAY: + return ColumnTypeEnum.BooleanArray + case ArrayColumnType.CHAR_ARRAY: + return ColumnTypeEnum.CharArray + case ArrayColumnType.TEXT_ARRAY: + case ArrayColumnType.VARCHAR_ARRAY: + case ArrayColumnType.BPCHAR_ARRAY: + case ArrayColumnType.XML_ARRAY: + return ColumnTypeEnum.TextArray + case ArrayColumnType.DATE_ARRAY: + return ColumnTypeEnum.DateArray + case ArrayColumnType.TIME_ARRAY: + return ColumnTypeEnum.TimeArray + case ArrayColumnType.TIMESTAMP_ARRAY: + return ColumnTypeEnum.DateTimeArray + case ArrayColumnType.JSON_ARRAY: + case ArrayColumnType.JSONB_ARRAY: + return ColumnTypeEnum.JsonArray + case ArrayColumnType.BYTEA_ARRAY: + return ColumnTypeEnum.BytesArray + case ArrayColumnType.UUID_ARRAY: + return ColumnTypeEnum.UuidArray + default: if (fieldTypeId >= 10000) { // Postgres Custom Types @@ -78,14 +137,20 @@ const parsePgBytes = types.getTypeParser(NeonColumnType.BYTEA) as (_: string) => * Convert bytes to a JSON-encodable representation since we can't * currently send a parsed Buffer or ArrayBuffer across JS to Rust * boundary. + */ +function convertBytes(serializedBytes: string): number[] { + const buffer = parsePgBytes(serializedBytes) + return encodeBuffer(buffer) +} + +/** * TODO: * 1. Check if using base64 would be more efficient than this encoding. * 2. Consider the possibility of eliminating re-encoding altogether * and passing bytea hex format to the engine if that can be aligned * with other adapter flavours. */ -function convertBytes(serializedBytes: string): number[] { - const buffer = parsePgBytes(serializedBytes) +function encodeBuffer(buffer: Buffer) { return Array.from(new Uint8Array(buffer)) } @@ -97,3 +162,20 @@ types.setTypeParser(NeonColumnType.JSONB, convertJson) types.setTypeParser(NeonColumnType.JSON, convertJson) types.setTypeParser(NeonColumnType.MONEY, money => money.slice(1)) types.setTypeParser(NeonColumnType.BYTEA, convertBytes) + +const parseBytesArray = types.getTypeParser(ArrayColumnType.BYTEA_ARRAY) as (_: string) => Buffer[] + +types.setTypeParser(ArrayColumnType.BYTEA_ARRAY, (serializedBytesArray) => { + const buffers = parseBytesArray(serializedBytesArray) + return buffers.map(encodeBuffer) +}) + +const parseTextArray = types.getTypeParser(ArrayColumnType.TEXT_ARRAY) as (_: string) => string[] + +types.setTypeParser(ArrayColumnType.TIME_ARRAY, parseTextArray) +types.setTypeParser(ArrayColumnType.DATE_ARRAY, parseTextArray) +types.setTypeParser(ArrayColumnType.TIMESTAMP_ARRAY, parseTextArray) + +types.setTypeParser(ArrayColumnType.MONEY_ARRAY, (moneyArray) => + parseTextArray(moneyArray).map((money) => money.slice(1)), +) diff --git a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts b/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts index 79f9461ba7d6..a1c8ce7c5e6a 100644 --- a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts +++ b/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts @@ -3,6 +3,31 @@ import { types } from 'pg' const PgColumnType = types.builtins +/** + * PostgreSQL array column types (not defined in PgColumnType). + */ +const ArrayColumnType = { + BOOL_ARRAY: 1000, + BYTEA_ARRAY: 1001, + BPCHAR_ARRAY: 1014, + CHAR_ARRAY: 1002, + DATE_ARRAY: 1182, + FLOAT4_ARRAY: 1021, + FLOAT8_ARRAY: 1022, + INT2_ARRAY: 1005, + INT4_ARRAY: 1007, + JSONB_ARRAY: 3807, + JSON_ARRAY: 199, + MONEY_ARRAY: 791, + NUMERIC_ARRAY: 1231, + TEXT_ARRAY: 1009, + TIMESTAMP_ARRAY: 1115, + TIME_ARRAY: 1183, + UUID_ARRAY: 2951, + VARCHAR_ARRAY: 1015, + XML_ARRAY: 143, +} + /** * This is a simplification of quaint's value inference logic. Take a look at quaint's conversion.rs * module to see how other attributes of the field packet such as the field length are used to infer @@ -48,6 +73,40 @@ export function fieldToColumnType(fieldTypeId: number): ColumnType { return ColumnTypeEnum.Text case PgColumnType['BYTEA']: return ColumnTypeEnum.Bytes + + case ArrayColumnType.INT2_ARRAY: + case ArrayColumnType.INT4_ARRAY: + return ColumnTypeEnum.Int32Array + case ArrayColumnType.FLOAT4_ARRAY: + return ColumnTypeEnum.FloatArray + case ArrayColumnType.FLOAT8_ARRAY: + return ColumnTypeEnum.DoubleArray + case ArrayColumnType.NUMERIC_ARRAY: + case ArrayColumnType.MONEY_ARRAY: + return ColumnTypeEnum.NumericArray + case ArrayColumnType.BOOL_ARRAY: + return ColumnTypeEnum.BooleanArray + case ArrayColumnType.CHAR_ARRAY: + return ColumnTypeEnum.CharArray + case ArrayColumnType.TEXT_ARRAY: + case ArrayColumnType.VARCHAR_ARRAY: + case ArrayColumnType.BPCHAR_ARRAY: + case ArrayColumnType.XML_ARRAY: + return ColumnTypeEnum.TextArray + case ArrayColumnType.DATE_ARRAY: + return ColumnTypeEnum.DateArray + case ArrayColumnType.TIME_ARRAY: + return ColumnTypeEnum.TimeArray + case ArrayColumnType.TIMESTAMP_ARRAY: + return ColumnTypeEnum.DateTimeArray + case ArrayColumnType.JSON_ARRAY: + case ArrayColumnType.JSONB_ARRAY: + return ColumnTypeEnum.JsonArray + case ArrayColumnType.BYTEA_ARRAY: + return ColumnTypeEnum.BytesArray + case ArrayColumnType.UUID_ARRAY: + return ColumnTypeEnum.UuidArray + default: if (fieldTypeId >= 10000) { // Postgres Custom Types @@ -78,14 +137,20 @@ const parsePgBytes = types.getTypeParser(PgColumnType.BYTEA) as (_: string) => B * Convert bytes to a JSON-encodable representation since we can't * currently send a parsed Buffer or ArrayBuffer across JS to Rust * boundary. + */ +function convertBytes(serializedBytes: string): number[] { + const buffer = parsePgBytes(serializedBytes) + return encodeBuffer(buffer) +} + +/** * TODO: * 1. Check if using base64 would be more efficient than this encoding. * 2. Consider the possibility of eliminating re-encoding altogether * and passing bytea hex format to the engine if that can be aligned * with other adapter flavours. */ -function convertBytes(serializedBytes: string): number[] { - const buffer = parsePgBytes(serializedBytes) +function encodeBuffer(buffer: Buffer) { return Array.from(new Uint8Array(buffer)) } @@ -97,3 +162,20 @@ types.setTypeParser(PgColumnType.JSONB, convertJson) types.setTypeParser(PgColumnType.JSON, convertJson) types.setTypeParser(PgColumnType.MONEY, money => money.slice(1)) types.setTypeParser(PgColumnType.BYTEA, convertBytes) + +const parseBytesArray = types.getTypeParser(ArrayColumnType.BYTEA_ARRAY) as (_: string) => Buffer[] + +types.setTypeParser(ArrayColumnType.BYTEA_ARRAY, (serializedBytesArray) => { + const buffers = parseBytesArray(serializedBytesArray) + return buffers.map(encodeBuffer) +}) + +const parseTextArray = types.getTypeParser(ArrayColumnType.TEXT_ARRAY) as (_: string) => string[] + +types.setTypeParser(ArrayColumnType.TIME_ARRAY, parseTextArray) +types.setTypeParser(ArrayColumnType.DATE_ARRAY, parseTextArray) +types.setTypeParser(ArrayColumnType.TIMESTAMP_ARRAY, parseTextArray) + +types.setTypeParser(ArrayColumnType.MONEY_ARRAY, (moneyArray) => + parseTextArray(moneyArray).map((money) => money.slice(1)), +) diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts index 09fa4b3ad6e1..5ddc7f20b390 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts +++ b/query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts @@ -3,24 +3,43 @@ // them via regular dictionaries. // See: https://hackmd.io/@dzearing/Sk3xV0cLs export const ColumnTypeEnum = { - 'Int32': 0, - 'Int64': 1, - 'Float': 2, - 'Double': 3, - 'Numeric': 4, - 'Boolean': 5, - 'Char': 6, - 'Text': 7, - 'Date': 8, - 'Time': 9, - 'DateTime': 10, - 'Json': 11, - 'Enum': 12, - 'Bytes': 13, - 'Set': 14, - 'Uuid': 15, - // ... - 'UnknownNumber': 128 + // Scalars + Int32: 0, + Int64: 1, + Float: 2, + Double: 3, + Numeric: 4, + Boolean: 5, + Char: 6, + Text: 7, + Date: 8, + Time: 9, + DateTime: 10, + Json: 11, + Enum: 12, + Bytes: 13, + Set: 14, + Uuid: 15, + + // Arrays + Int32Array: 64, + Int64Array: 65, + FloatArray: 66, + DoubleArray: 67, + NumericArray: 68, + BooleanArray: 69, + CharArray: 70, + TextArray: 71, + DateArray: 72, + TimeArray: 73, + DateTimeArray: 74, + JsonArray: 75, + EnumArray: 76, + BytesArray: 77, + UuidArray: 78, + + // Custom + UnknownNumber: 128, } as const // This string value paired with `ColumnType.Json` will be treated as JSON `null` diff --git a/query-engine/driver-adapters/src/conversion.rs b/query-engine/driver-adapters/src/conversion.rs index 52060419c661..1e32dc3b8306 100644 --- a/query-engine/driver-adapters/src/conversion.rs +++ b/query-engine/driver-adapters/src/conversion.rs @@ -1,4 +1,5 @@ use napi::bindgen_prelude::{FromNapiValue, ToNapiValue}; +use napi::NapiValue; use quaint::ast::Value as QuaintValue; use serde::Serialize; use serde_json::value::Value as JsonValue; @@ -9,6 +10,7 @@ pub enum JSArg { RawString(String), Value(serde_json::Value), Buffer(Vec), + Array(Vec), } impl From for JSArg { @@ -35,6 +37,23 @@ impl ToNapiValue for JSArg { JSArg::Buffer(bytes) => { ToNapiValue::to_napi_value(env, napi::Env::from_raw(env).create_buffer_with_data(bytes)?.into_raw()) } + // While arrays are encodable as JSON generally, their element might not be, or may be + // represented in a different way than we need. We use this custom logic for all arrays + // to avoid having separate `JsonArray` and `BytesArray` variants in `JSArg` and + // avoid complicating the logic in `conv_params`. + JSArg::Array(items) => { + let env = napi::Env::from_raw(env); + let mut array = env.create_array(items.len().try_into().expect("JS array length must fit into u32"))?; + + for (index, item) in items.into_iter().enumerate() { + let js_value = ToNapiValue::to_napi_value(env.raw(), item)?; + // TODO: NapiRaw could be implemented for sys::napi_value directly, there should + // be no need for re-wrapping; submit a patch to napi-rs and simplify here. + array.set(index as u32, napi::JsUnknown::from_raw_unchecked(env.raw(), js_value))?; + } + + ToNapiValue::to_napi_value(env.raw(), array) + } } } } @@ -62,6 +81,7 @@ pub fn conv_params(params: &[QuaintValue<'_>]) -> serde_json::Result> }, None => JsonValue::Null.into(), }, + QuaintValue::Array(Some(items)) => JSArg::Array(conv_params(items)?), quaint_value => JSArg::from(JsonValue::from(quaint_value.clone())), }; diff --git a/query-engine/driver-adapters/src/proxy.rs b/query-engine/driver-adapters/src/proxy.rs index a71742d3f282..aeaef30664d0 100644 --- a/query-engine/driver-adapters/src/proxy.rs +++ b/query-engine/driver-adapters/src/proxy.rs @@ -162,6 +162,54 @@ pub enum ColumnType { /// UUID from postgres-flavored driver adapters is mapped to this type. Uuid = 15, + /* + * Scalar arrays + */ + /// Int32 array (INT2_ARRAY and INT4_ARRAY in PostgreSQL) + Int32Array = 64, + + /// Int64 array (INT8_ARRAY in PostgreSQL) + Int64Array = 65, + + /// Float array (FLOAT4_ARRAY in PostgreSQL) + FloatArray = 66, + + /// Double array (FLOAT8_ARRAY in PostgreSQL) + DoubleArray = 67, + + /// Numeric array (NUMERIC_ARRAY, MONEY_ARRAY etc in PostgreSQL) + NumericArray = 68, + + /// Boolean array (BOOL_ARRAY in PostgreSQL) + BooleanArray = 69, + + /// Char array (CHAR_ARRAY in PostgreSQL) + CharArray = 70, + + /// Text array (TEXT_ARRAY in PostgreSQL) + TextArray = 71, + + /// Date array (DATE_ARRAY in PostgreSQL) + DateArray = 72, + + /// Time array (TIME_ARRAY in PostgreSQL) + TimeArray = 73, + + /// DateTime array (TIMESTAMP_ARRAY in PostgreSQL) + DateTimeArray = 74, + + /// Json array (JSON_ARRAY in PostgreSQL) + JsonArray = 75, + + /// Enum array + EnumArray = 76, + + /// Bytes array (BYTEA_ARRAY in PostgreSQL) + BytesArray = 77, + + /// Uuid array (UUID_ARRAY in PostgreSQL) + UuidArray = 78, + /* * Below there are custom types that don't have a 1:1 translation with a quaint::Value. * enum variant. @@ -387,12 +435,49 @@ fn js_value_to_quaint( "expected a either an i64 or a f64 in column {column_name}, found {mismatch}", )), }, + + ColumnType::Int32Array => js_array_to_quaint(ColumnType::Int32, json_value, column_name), + ColumnType::Int64Array => js_array_to_quaint(ColumnType::Int64, json_value, column_name), + ColumnType::FloatArray => js_array_to_quaint(ColumnType::Float, json_value, column_name), + ColumnType::DoubleArray => js_array_to_quaint(ColumnType::Double, json_value, column_name), + ColumnType::NumericArray => js_array_to_quaint(ColumnType::Numeric, json_value, column_name), + ColumnType::BooleanArray => js_array_to_quaint(ColumnType::Boolean, json_value, column_name), + ColumnType::CharArray => js_array_to_quaint(ColumnType::Char, json_value, column_name), + ColumnType::TextArray => js_array_to_quaint(ColumnType::Text, json_value, column_name), + ColumnType::DateArray => js_array_to_quaint(ColumnType::Date, json_value, column_name), + ColumnType::TimeArray => js_array_to_quaint(ColumnType::Time, json_value, column_name), + ColumnType::DateTimeArray => js_array_to_quaint(ColumnType::DateTime, json_value, column_name), + ColumnType::JsonArray => js_array_to_quaint(ColumnType::Json, json_value, column_name), + ColumnType::EnumArray => js_array_to_quaint(ColumnType::Enum, json_value, column_name), + ColumnType::BytesArray => js_array_to_quaint(ColumnType::Bytes, json_value, column_name), + ColumnType::UuidArray => js_array_to_quaint(ColumnType::Uuid, json_value, column_name), + unimplemented => { todo!("support column type {:?} in column {}", unimplemented, column_name) } } } +fn js_array_to_quaint( + base_type: ColumnType, + json_value: serde_json::Value, + column_name: &str, +) -> quaint::Result> { + match json_value { + serde_json::Value::Array(array) => Ok(QuaintValue::Array(Some( + array + .into_iter() + .enumerate() + .map(|(index, elem)| js_value_to_quaint(elem, base_type, &format!("{column_name}[{index}]"))) + .collect::>>()?, + ))), + serde_json::Value::Null => Ok(QuaintValue::Array(None)), + mismatch => Err(conversion_error!( + "expected an array in column {column_name}, found {mismatch}", + )), + } +} + impl TryFrom for QuaintResultSet { type Error = quaint::error::Error; @@ -825,4 +910,52 @@ mod proxy_test { let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); assert_eq!(quaint_value, QuaintValue::Enum(Some(s.into()), None)); } + + #[test] + fn js_int32_array_to_quaint() { + let column_type = ColumnType::Int32Array; + test_null(QuaintValue::Array(None), column_type); + + let json_value = json!([1, 2, 3]); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + + assert_eq!( + quaint_value, + QuaintValue::Array(Some(vec![ + QuaintValue::int32(1), + QuaintValue::int32(2), + QuaintValue::int32(3) + ])) + ); + + let json_value = json!([1, 2, {}]); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + + assert_eq!( + quaint_value.err().unwrap().to_string(), + "Conversion failed: expected an i32 number in column column_name[2], found {}" + ); + } + + #[test] + fn js_text_array_to_quaint() { + let column_type = ColumnType::TextArray; + test_null(QuaintValue::Array(None), column_type); + + let json_value = json!(["hi", "there"]); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + + assert_eq!( + quaint_value, + QuaintValue::Array(Some(vec![QuaintValue::text("hi"), QuaintValue::text("there"),])) + ); + + let json_value = json!([10]); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name"); + + assert_eq!( + quaint_value.err().unwrap().to_string(), + "Conversion failed: expected a string in column column_name[0], found 10" + ); + } } From a5596b96668f0f4b397761ce0956db54e17e48c4 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Wed, 4 Oct 2023 00:17:59 +0200 Subject: [PATCH 066/128] driver-adapters: bump package versions for publishing (#4312) --- query-engine/driver-adapters/js/adapter-libsql/package.json | 2 +- query-engine/driver-adapters/js/adapter-neon/package.json | 2 +- query-engine/driver-adapters/js/adapter-pg/package.json | 2 +- .../driver-adapters/js/adapter-planetscale/package.json | 2 +- .../driver-adapters/js/driver-adapter-utils/package.json | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/query-engine/driver-adapters/js/adapter-libsql/package.json b/query-engine/driver-adapters/js/adapter-libsql/package.json index dc4466d826fa..9e386404ec13 100644 --- a/query-engine/driver-adapters/js/adapter-libsql/package.json +++ b/query-engine/driver-adapters/js/adapter-libsql/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/adapter-libsql", - "version": "0.4.2", + "version": "0.5.0", "description": "Prisma's driver adapter for libSQL and Turso", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/adapter-neon/package.json b/query-engine/driver-adapters/js/adapter-neon/package.json index a891c0147969..d2d891eb521b 100644 --- a/query-engine/driver-adapters/js/adapter-neon/package.json +++ b/query-engine/driver-adapters/js/adapter-neon/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/adapter-neon", - "version": "0.5.0", + "version": "0.6.0", "description": "Prisma's driver adapter for \"@neondatabase/serverless\"", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/adapter-pg/package.json b/query-engine/driver-adapters/js/adapter-pg/package.json index 6e17419ae85f..9fb31126acf8 100644 --- a/query-engine/driver-adapters/js/adapter-pg/package.json +++ b/query-engine/driver-adapters/js/adapter-pg/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/adapter-pg", - "version": "0.5.0", + "version": "0.6.0", "description": "Prisma's driver adapter for \"pg\"", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/adapter-planetscale/package.json b/query-engine/driver-adapters/js/adapter-planetscale/package.json index 2d1e59bdd612..bbad56ed1197 100644 --- a/query-engine/driver-adapters/js/adapter-planetscale/package.json +++ b/query-engine/driver-adapters/js/adapter-planetscale/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/adapter-planetscale", - "version": "0.4.0", + "version": "0.5.0", "description": "Prisma's driver adapter for \"@planetscale/database\"", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/package.json b/query-engine/driver-adapters/js/driver-adapter-utils/package.json index ffa1d8209750..057d4a295d06 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/package.json +++ b/query-engine/driver-adapters/js/driver-adapter-utils/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/driver-adapter-utils", - "version": "0.8.0", + "version": "0.9.0", "description": "Internal set of utilities and types for Prisma's driver adapters.", "main": "dist/index.js", "module": "dist/index.mjs", From e7be1fb123810efabd75ba49981b988f27108e32 Mon Sep 17 00:00:00 2001 From: Alberto Schiabel Date: Wed, 4 Oct 2023 18:30:34 +0200 Subject: [PATCH 067/128] test(driver-adapters): add READMEs for Neon and PlanetScale (#4314) * test(driver-adapters): add READMEs for Neon and PlanetScale * feat(driver-adapters): add version.sh to bump monorepo version * chore(driver-adapters): bump version to 5.4.0 * chore: add "Serverless Driver" to Neon Co-authored-by: Jan Piotrowski * chore: add "Serverless Driver" to PlanetScale Co-authored-by: Jan Piotrowski --------- Co-authored-by: Jan Piotrowski --- .../js/adapter-libsql/package.json | 2 +- .../driver-adapters/js/adapter-neon/README.md | 4 +++- .../driver-adapters/js/adapter-neon/package.json | 2 +- .../driver-adapters/js/adapter-pg/package.json | 2 +- .../js/adapter-planetscale/README.md | 5 ++++- .../js/adapter-planetscale/package.json | 2 +- .../js/connector-test-kit-executor/package.json | 2 +- .../js/driver-adapter-utils/package.json | 2 +- .../driver-adapters/js/smoke-test-js/package.json | 2 +- query-engine/driver-adapters/js/version.sh | 15 +++++++++++++++ 10 files changed, 29 insertions(+), 9 deletions(-) create mode 100755 query-engine/driver-adapters/js/version.sh diff --git a/query-engine/driver-adapters/js/adapter-libsql/package.json b/query-engine/driver-adapters/js/adapter-libsql/package.json index 9e386404ec13..a455b6fc72b3 100644 --- a/query-engine/driver-adapters/js/adapter-libsql/package.json +++ b/query-engine/driver-adapters/js/adapter-libsql/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/adapter-libsql", - "version": "0.5.0", + "version": "5.4.0", "description": "Prisma's driver adapter for libSQL and Turso", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/adapter-neon/README.md b/query-engine/driver-adapters/js/adapter-neon/README.md index 74e6fa91dc31..8af259ab74c1 100644 --- a/query-engine/driver-adapters/js/adapter-neon/README.md +++ b/query-engine/driver-adapters/js/adapter-neon/README.md @@ -1,3 +1,5 @@ # @prisma/adapter-neon -**INTERNAL PACKAGE, DO NOT USE** +Prisma driver adapter for [Neon Serverless Driver](https://github.com/neondatabase/serverless). + +See https://github.com/prisma/prisma/releases/tag/5.4.0 for details. diff --git a/query-engine/driver-adapters/js/adapter-neon/package.json b/query-engine/driver-adapters/js/adapter-neon/package.json index d2d891eb521b..a2a67d8d128f 100644 --- a/query-engine/driver-adapters/js/adapter-neon/package.json +++ b/query-engine/driver-adapters/js/adapter-neon/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/adapter-neon", - "version": "0.6.0", + "version": "5.4.0", "description": "Prisma's driver adapter for \"@neondatabase/serverless\"", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/adapter-pg/package.json b/query-engine/driver-adapters/js/adapter-pg/package.json index 9fb31126acf8..58262e63d0b0 100644 --- a/query-engine/driver-adapters/js/adapter-pg/package.json +++ b/query-engine/driver-adapters/js/adapter-pg/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/adapter-pg", - "version": "0.6.0", + "version": "5.4.0", "description": "Prisma's driver adapter for \"pg\"", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/adapter-planetscale/README.md b/query-engine/driver-adapters/js/adapter-planetscale/README.md index 61b8b1717c1a..8e145c07c098 100644 --- a/query-engine/driver-adapters/js/adapter-planetscale/README.md +++ b/query-engine/driver-adapters/js/adapter-planetscale/README.md @@ -1,3 +1,6 @@ # @prisma/adapter-planetscale -**INTERNAL PACKAGE, DO NOT USE** +Prisma driver adapter for [PlanetScale Serverless Driver](https://github.com/planetscale/database-js). + +See https://github.com/prisma/prisma/releases/tag/5.4.0 for details. + diff --git a/query-engine/driver-adapters/js/adapter-planetscale/package.json b/query-engine/driver-adapters/js/adapter-planetscale/package.json index bbad56ed1197..ac2aac284565 100644 --- a/query-engine/driver-adapters/js/adapter-planetscale/package.json +++ b/query-engine/driver-adapters/js/adapter-planetscale/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/adapter-planetscale", - "version": "0.5.0", + "version": "5.4.0", "description": "Prisma's driver adapter for \"@planetscale/database\"", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/package.json b/query-engine/driver-adapters/js/connector-test-kit-executor/package.json index 6d4ba374461c..be6a54a315fb 100644 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/package.json +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/package.json @@ -1,6 +1,6 @@ { "name": "connector-test-kit-executor", - "version": "1.0.0", + "version": "5.4.0", "description": "", "main": "dist/index.js", "private": true, diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/package.json b/query-engine/driver-adapters/js/driver-adapter-utils/package.json index 057d4a295d06..14385c963f16 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/package.json +++ b/query-engine/driver-adapters/js/driver-adapter-utils/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/driver-adapter-utils", - "version": "0.9.0", + "version": "5.4.0", "description": "Internal set of utilities and types for Prisma's driver adapters.", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/smoke-test-js/package.json b/query-engine/driver-adapters/js/smoke-test-js/package.json index 3dbad884541a..23c6ed2db129 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/package.json +++ b/query-engine/driver-adapters/js/smoke-test-js/package.json @@ -2,7 +2,7 @@ "name": "@prisma/driver-adapters-smoke-tests-js", "private": true, "type": "module", - "version": "0.0.0", + "version": "5.4.0", "description": "", "scripts": { "prisma:db:push:postgres": "prisma db push --schema ./prisma/postgres/schema.prisma --force-reset", diff --git a/query-engine/driver-adapters/js/version.sh b/query-engine/driver-adapters/js/version.sh new file mode 100755 index 000000000000..8f592c0e197c --- /dev/null +++ b/query-engine/driver-adapters/js/version.sh @@ -0,0 +1,15 @@ +#!/bin/bash + +# Usage: `./version.sh x.y.z` will set the `x.y.z` to every package in the monorepo. + +target_version=$1 +package_dirs=$(pnpm -r list -r --depth -1 --json | jq -r '.[] | .path' | tail -n +2) + +# Iterate through each package directory +for package_dir in $package_dirs; do + # Check if the directory exists + if [ -d "$package_dir" ]; then + # Set the target version using pnpm + (cd "$package_dir" && pnpm version "$target_version" --no-git-tag-version --allow-same-version) + fi +done From c815422ede0692d58c70890066702bae4b466e8c Mon Sep 17 00:00:00 2001 From: Flavian Desverne Date: Wed, 4 Oct 2023 21:52:40 +0200 Subject: [PATCH 068/128] fix: to-one filter should reference the related model (#4316) hotfix cherry-picked from https://github.com/prisma/prisma-engines/pull/4315 --- .../tests/queries/filters/one_relation.rs | 10 +++---- .../sql-query-connector/src/filter/visitor.rs | 26 ++++++++++--------- 2 files changed, 19 insertions(+), 17 deletions(-) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/one_relation.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/one_relation.rs index ff4cddfc9b57..3a56dd18abab 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/one_relation.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/one_relation.rs @@ -9,26 +9,26 @@ mod one_relation { let schema = indoc! { r#" model Blog { - #id(id, String, @id, @default(cuid())) + #id(blogId, String, @id, @default(cuid())) name String post Post? } model Post { - #id(id, String, @id, @default(cuid())) + #id(postId, String, @id, @default(cuid())) title String popularity Int blogId String? @unique - blog Blog? @relation(fields: [blogId], references: [id]) + blog Blog? @relation(fields: [blogId], references: [blogId]) comment Comment? } model Comment { - #id(id, String, @id, @default(cuid())) + #id(commentId, String, @id, @default(cuid())) text String likes Int postId String? @unique - post Post? @relation(fields: [postId], references: [id]) + post Post? @relation(fields: [postId], references: [postId]) } "# }; diff --git a/query-engine/connectors/sql-query-connector/src/filter/visitor.rs b/query-engine/connectors/sql-query-connector/src/filter/visitor.rs index 894a35305459..26796bf79121 100644 --- a/query-engine/connectors/sql-query-connector/src/filter/visitor.rs +++ b/query-engine/connectors/sql-query-connector/src/filter/visitor.rs @@ -330,12 +330,13 @@ impl FilterVisitorExt for FilterVisitor { RelationCondition::NoRelatedRecord if self.can_render_join() && !filter.field.is_list() => { let alias = self.next_alias(AliasMode::Join); - let linking_fields_null: Vec<_> = ModelProjection::from(filter.field.model().primary_identifier()) - .as_columns(ctx) - .map(|c| c.aliased_col(Some(alias), ctx)) - .map(|c| c.is_null()) - .map(Expression::from) - .collect(); + let linking_fields_null: Vec<_> = + ModelProjection::from(filter.field.related_model().primary_identifier()) + .as_columns(ctx) + .map(|c| c.aliased_col(Some(alias), ctx)) + .map(|c| c.is_null()) + .map(Expression::from) + .collect(); let null_filter = ConditionTree::And(linking_fields_null); let join = compute_one2m_join( @@ -362,12 +363,13 @@ impl FilterVisitorExt for FilterVisitor { RelationCondition::ToOneRelatedRecord if self.can_render_join() && !filter.field.is_list() => { let alias = self.next_alias(AliasMode::Join); - let linking_fields_not_null: Vec<_> = ModelProjection::from(filter.field.model().primary_identifier()) - .as_columns(ctx) - .map(|c| c.aliased_col(Some(alias), ctx)) - .map(|c| c.is_not_null()) - .map(Expression::from) - .collect(); + let linking_fields_not_null: Vec<_> = + ModelProjection::from(filter.field.related_model().primary_identifier()) + .as_columns(ctx) + .map(|c| c.aliased_col(Some(alias), ctx)) + .map(|c| c.is_not_null()) + .map(Expression::from) + .collect(); let not_null_filter = ConditionTree::And(linking_fields_not_null); let join = compute_one2m_join( From 6e26301fe272ba4ba0598fe43eb5d8df030be4db Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Thu, 5 Oct 2023 16:54:17 +0200 Subject: [PATCH 069/128] feat(qe): Allow quaint::Value to include metainformation about the corresponding native database type (#4311) --- .../query-engine-driver-adapters.yml | 8 - .../src/test_each_connector.rs | 140 +++ quaint/src/ast.rs | 2 +- quaint/src/ast/column.rs | 2 +- quaint/src/ast/enums.rs | 7 +- quaint/src/ast/expression.rs | 20 +- quaint/src/ast/function/row_to_json.rs | 4 +- quaint/src/ast/values.rs | 881 ++++++++++++++---- quaint/src/connector/metrics.rs | 2 +- quaint/src/connector/mssql/conversion.rs | 76 +- quaint/src/connector/mysql.rs | 2 +- quaint/src/connector/mysql/conversion.rs | 66 +- quaint/src/connector/postgres.rs | 14 +- quaint/src/connector/postgres/conversion.rs | 398 ++++---- quaint/src/connector/postgres/error.rs | 6 +- quaint/src/connector/sqlite/conversion.rs | 60 +- quaint/src/lib.rs | 2 +- quaint/src/macros.rs | 22 +- quaint/src/serde.rs | 86 +- quaint/src/tests/query.rs | 189 ++-- quaint/src/tests/query/error.rs | 5 +- quaint/src/tests/types/mssql.rs | 50 +- quaint/src/tests/types/mssql/bigdecimal.rs | 12 +- quaint/src/tests/types/mysql.rs | 73 +- quaint/src/tests/types/postgres.rs | 166 ++-- quaint/src/tests/types/postgres/bigdecimal.rs | 12 +- quaint/src/tests/types/sqlite.rs | 24 +- quaint/src/visitor.rs | 13 +- quaint/src/visitor/mssql.rs | 46 +- quaint/src/visitor/mysql.rs | 76 +- quaint/src/visitor/postgres.rs | 63 +- quaint/src/visitor/sqlite.rs | 42 +- .../query-tests-setup/src/config.rs | 6 +- .../src/query_builder/group_by_builder.rs | 2 +- .../src/cursor_condition.rs | 4 +- .../sql-query-connector/src/filter/visitor.rs | 6 +- .../src/model_extensions/scalar_field.rs | 42 +- .../sql-query-connector/src/ordering.rs | 2 +- .../connectors/sql-query-connector/src/row.rs | 110 +-- .../sql-query-connector/src/value.rs | 52 +- .../sql-query-connector/src/value_ext.rs | 44 +- query-engine/dmmf/src/tests/tests.rs | 5 +- .../driver-adapters/src/conversion.rs | 13 +- query-engine/driver-adapters/src/proxy.rs | 138 +-- query-engine/driver-adapters/src/queryable.rs | 2 +- .../query-engine-node-api/src/logger.rs | 2 +- .../src/assertions/quaint_result_set_ext.rs | 12 +- .../tests/existing_data/mod.rs | 9 +- .../made_optional_field_required.rs | 6 +- .../sqlite_existing_data_tests.rs | 4 +- .../existing_data/type_migration_tests.rs | 4 +- .../tests/migrations/sql.rs | 12 +- .../tests/native_types/mssql.rs | 20 +- .../tests/native_types/mysql.rs | 96 +- .../tests/native_types/postgres.rs | 26 +- .../sql-schema-describer/src/postgres.rs | 79 +- .../sql-schema-describer/src/sqlite.rs | 7 +- 57 files changed, 1943 insertions(+), 1329 deletions(-) create mode 100644 quaint/quaint-test-macros/src/test_each_connector.rs diff --git a/.github/workflows/query-engine-driver-adapters.yml b/.github/workflows/query-engine-driver-adapters.yml index 8ab2d932e07f..f4207e2a6d51 100644 --- a/.github/workflows/query-engine-driver-adapters.yml +++ b/.github/workflows/query-engine-driver-adapters.yml @@ -61,14 +61,6 @@ jobs: run: | echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV - - uses: actions/cache@v3 - name: "Setup pnpm cache" - with: - path: ${{ env.STORE_PATH }} - key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} - restore-keys: | - ${{ runner.os }}-pnpm-store- - - name: "Login to Docker Hub" uses: docker/login-action@v2 continue-on-error: true diff --git a/quaint/quaint-test-macros/src/test_each_connector.rs b/quaint/quaint-test-macros/src/test_each_connector.rs new file mode 100644 index 000000000000..c01aa695d1ad --- /dev/null +++ b/quaint/quaint-test-macros/src/test_each_connector.rs @@ -0,0 +1,140 @@ +use darling::FromMeta; +use once_cell::sync::Lazy; +use proc_macro::TokenStream; +use proc_macro2::Span; +use quaint_test_setup::{ConnectorDefinition, Tags, CONNECTORS}; +use quote::quote; +use std::str::FromStr; +use syn::{parse_macro_input, spanned::Spanned, AttributeArgs, Ident, ItemFn}; + +static TAGS_FILTER: Lazy = Lazy::new(|| { + let tags_str = std::env::var("TEST_EACH_CONNECTOR_TAGS").ok(); + let mut tags = Tags::empty(); + + if let Some(tags_str) = tags_str { + for tag_str in tags_str.split(',') { + let tag = Tags::from_str(tag_str).unwrap(); + tags |= tag; + } + } + + tags +}); + +#[derive(Debug, FromMeta)] +struct TestEachConnectorArgs { + /// If present, run only the tests for the connectors with any of the passed + /// in tags. + #[darling(default)] + tags: TagsWrapper, + + /// Optional list of tags to ignore. + #[darling(default)] + ignore: TagsWrapper, +} + +impl TestEachConnectorArgs { + fn connectors_to_test(&self) -> impl Iterator { + CONNECTORS + .all() + .filter(move |connector| TAGS_FILTER.is_empty() || connector.tags.contains(*TAGS_FILTER)) + .filter(move |connector| self.tags.0.is_empty() || connector.tags.intersects(self.tags.0)) + .filter(move |connector| !connector.tags.intersects(self.ignore.0)) + } +} + +#[derive(Debug)] +struct TagsWrapper(Tags); + +impl Default for TagsWrapper { + fn default() -> Self { + TagsWrapper(Tags::empty()) + } +} + +impl darling::FromMeta for TagsWrapper { + fn from_list(items: &[syn::NestedMeta]) -> Result { + let mut tags = Tags::empty(); + + for item in items { + match item { + syn::NestedMeta::Lit(syn::Lit::Str(s)) => { + let s = s.value(); + let tag = Tags::from_str(&s) + .map_err(|err| darling::Error::unknown_value(&err.to_string()).with_span(&item.span()))?; + tags.insert(tag); + } + syn::NestedMeta::Lit(other) => { + return Err(darling::Error::unexpected_lit_type(other).with_span(&other.span())) + } + syn::NestedMeta::Meta(meta) => { + return Err(darling::Error::unsupported_shape("Expected string literal").with_span(&meta.span())) + } + } + } + + Ok(TagsWrapper(tags)) + } +} + +#[allow(clippy::needless_borrow)] +pub fn test_each_connector_impl(attr: TokenStream, input: TokenStream) -> TokenStream { + let attributes_meta: syn::AttributeArgs = parse_macro_input!(attr as AttributeArgs); + let args = TestEachConnectorArgs::from_list(&attributes_meta); + + let mut test_function = parse_macro_input!(input as ItemFn); + super::strip_test_attribute(&mut test_function); + + let tests = match args { + Ok(args) => test_each_connector_async_wrapper_functions(&args, &test_function), + Err(err) => return err.write_errors().into(), + }; + + let output = quote! { + #(#tests)* + + #test_function + }; + + output.into() +} + +#[allow(clippy::needless_borrow)] +fn test_each_connector_async_wrapper_functions( + args: &TestEachConnectorArgs, + test_function: &ItemFn, +) -> Vec { + let test_fn_name = &test_function.sig.ident; + let mut tests = Vec::with_capacity(CONNECTORS.len()); + + let optional_unwrap = if super::function_returns_result(&test_function) { + Some(quote!(.unwrap())) + } else { + None + }; + + for connector in args.connectors_to_test() { + let connector_name = connector.name(); + let feature_name = connector.feature_name(); + let connector_test_fn_name = Ident::new(&format!("{}_on_{}", test_fn_name, connector_name), Span::call_site()); + + let conn_api_factory = Ident::new(connector.test_api(), Span::call_site()); + + let test = quote! { + #[test] + #[cfg(feature = #feature_name)] + fn #connector_test_fn_name() { + let fut = async { + let mut api = #conn_api_factory().await#optional_unwrap; + #test_fn_name(&mut api).await#optional_unwrap + }; + + quaint_test_setup::run_with_tokio(fut) + } + }; + + tests.push(test); + } + + tests +} diff --git a/quaint/src/ast.rs b/quaint/src/ast.rs index ae2a19960b2d..dc634423014a 100644 --- a/quaint/src/ast.rs +++ b/quaint/src/ast.rs @@ -54,4 +54,4 @@ pub use table::*; pub use union::Union; pub use update::*; pub(crate) use values::Params; -pub use values::{IntoRaw, Raw, Value, Values}; +pub use values::{IntoRaw, Raw, Value, ValueType, Values}; diff --git a/quaint/src/ast/column.rs b/quaint/src/ast/column.rs index 7e6fbb71c9b7..836b4ce96527 100644 --- a/quaint/src/ast/column.rs +++ b/quaint/src/ast/column.rs @@ -110,7 +110,7 @@ impl<'a> Column<'a> { /// Sets whether the column is selected. /// /// On Postgres, this defines whether an enum column should be casted to `TEXT` when rendered. - /// + /// /// Since enums are user-defined custom types, `tokio-postgres` fires an additional query /// when selecting columns of type enum to know which custom type the column refers to. /// Casting the enum column to `TEXT` avoid this roundtrip since `TEXT` is a builtin type. diff --git a/quaint/src/ast/enums.rs b/quaint/src/ast/enums.rs index d301df25cc13..a4e93836d24b 100644 --- a/quaint/src/ast/enums.rs +++ b/quaint/src/ast/enums.rs @@ -14,11 +14,14 @@ impl<'a> EnumVariant<'a> { } pub fn into_text(self) -> Value<'a> { - Value::Text(Some(self.0)) + Value::text(self.0) } pub fn into_enum(self, name: Option>) -> Value<'a> { - Value::Enum(Some(self), name) + match name { + Some(name) => Value::enum_variant_with_name(self.0, name), + None => Value::enum_variant(self.0), + } } } diff --git a/quaint/src/ast/expression.rs b/quaint/src/ast/expression.rs index 7e5912d1e027..ea4c32a4fb61 100644 --- a/quaint/src/ast/expression.rs +++ b/quaint/src/ast/expression.rs @@ -45,7 +45,10 @@ impl<'a> Expression<'a> { pub(crate) fn is_json_expr(&self) -> bool { match &self.kind { - ExpressionKind::Parameterized(Value::Json(_)) => true, + ExpressionKind::Parameterized(Value { + typed: ValueType::Json(_), + .. + }) => true, ExpressionKind::Value(expr) => expr.is_json_value(), @@ -58,7 +61,10 @@ impl<'a> Expression<'a> { pub(crate) fn is_json_value(&self) -> bool { match &self.kind { - ExpressionKind::Parameterized(Value::Json(_)) => true, + ExpressionKind::Parameterized(Value { + typed: ValueType::Json(_), + .. + }) => true, ExpressionKind::Value(expr) => expr.is_json_value(), _ => false, @@ -69,7 +75,10 @@ impl<'a> Expression<'a> { pub(crate) fn into_json_value(self) -> Option { match self.kind { - ExpressionKind::Parameterized(Value::Json(json_val)) => json_val, + ExpressionKind::Parameterized(Value { + typed: ValueType::Json(json_val), + .. + }) => json_val, ExpressionKind::Value(expr) => expr.into_json_value(), _ => None, @@ -217,7 +226,10 @@ pub enum ExpressionKind<'a> { impl<'a> ExpressionKind<'a> { pub(crate) fn is_xml_value(&self) -> bool { match self { - Self::Parameterized(Value::Xml(_)) => true, + Self::Parameterized(Value { + typed: ValueType::Xml(_), + .. + }) => true, Self::Value(expr) => expr.is_xml_value(), _ => false, } diff --git a/quaint/src/ast/function/row_to_json.rs b/quaint/src/ast/function/row_to_json.rs index 1093431e7412..40e2b0dec7fc 100644 --- a/quaint/src/ast/function/row_to_json.rs +++ b/quaint/src/ast/function/row_to_json.rs @@ -31,9 +31,9 @@ pub struct RowToJson<'a> { /// let result = conn.select(select).await?; /// /// assert_eq!( -/// Value::Json(Some(serde_json::json!({ +/// Value::json(serde_json::json!({ /// "toto": "hello_world" -/// }))), +/// })), /// result.into_single().unwrap()[0] /// ); /// # Ok(()) diff --git a/quaint/src/ast/values.rs b/quaint/src/ast/values.rs index 05acccd77fff..92719b982eb4 100644 --- a/quaint/src/ast/values.rs +++ b/quaint/src/ast/values.rs @@ -5,6 +5,7 @@ use crate::error::{Error, ErrorKind}; use bigdecimal::{BigDecimal, FromPrimitive, ToPrimitive}; use chrono::{DateTime, NaiveDate, NaiveTime, Utc}; use serde_json::{Number, Value as JsonValue}; +use std::fmt::Display; use std::{ borrow::{Borrow, Cow}, convert::TryFrom, @@ -34,11 +35,456 @@ where } } +#[derive(Debug, Clone, PartialEq)] +pub struct Value<'a> { + pub typed: ValueType<'a>, + pub native_column_type: Option>, +} + +impl<'a> Value<'a> { + /// Creates a new 32-bit signed integer. + pub fn int32(value: I) -> Self + where + I: Into, + { + ValueType::int32(value).into_value() + } + + /// Creates a new 64-bit signed integer. + pub fn int64(value: I) -> Self + where + I: Into, + { + ValueType::int64(value).into_value() + } + + /// Creates a new decimal value. + #[cfg(feature = "bigdecimal")] + #[cfg_attr(feature = "docs", doc(cfg(feature = "bigdecimal")))] + pub fn numeric(value: BigDecimal) -> Self { + ValueType::numeric(value).into_value() + } + + /// Creates a new float value. + pub fn float(value: f32) -> Self { + ValueType::float(value).into_value() + } + + /// Creates a new double value. + pub fn double(value: f64) -> Self { + ValueType::double(value).into_value() + } + + /// Creates a new string value. + pub fn text(value: T) -> Self + where + T: Into>, + { + ValueType::text(value).into_value() + } + + /// Creates a new enum value. + pub fn enum_variant(value: T) -> Self + where + T: Into>, + { + ValueType::enum_variant(value).into_value() + } + + /// Creates a new enum value with the name of the enum attached. + pub fn enum_variant_with_name(value: T, name: U) -> Self + where + T: Into>, + U: Into>, + { + ValueType::enum_variant_with_name(value, name).into_value() + } + + /// Creates a new enum array value + pub fn enum_array(value: T) -> Self + where + T: IntoIterator>, + { + ValueType::enum_array(value).into_value() + } + + /// Creates a new enum array value with the name of the enum attached. + pub fn enum_array_with_name(value: T, name: U) -> Self + where + T: IntoIterator>, + U: Into>, + { + ValueType::enum_array_with_name(value, name).into_value() + } + + /// Creates a new bytes value. + pub fn bytes(value: B) -> Self + where + B: Into>, + { + ValueType::bytes(value).into_value() + } + + /// Creates a new boolean value. + pub fn boolean(value: B) -> Self + where + B: Into, + { + ValueType::boolean(value).into_value() + } + + /// Creates a new character value. + pub fn character(value: C) -> Self + where + C: Into, + { + ValueType::character(value).into_value() + } + + /// Creates a new array value. + pub fn array(value: I) -> Self + where + I: IntoIterator, + V: Into>, + { + ValueType::array(value).into_value() + } + + /// Creates a new uuid value. + #[cfg(feature = "uuid")] + #[cfg_attr(feature = "docs", doc(cfg(feature = "uuid")))] + pub fn uuid(value: Uuid) -> Self { + ValueType::uuid(value).into_value() + } + + /// Creates a new datetime value. + pub fn datetime(value: DateTime) -> Self { + ValueType::datetime(value).into_value() + } + + /// Creates a new date value. + pub fn date(value: NaiveDate) -> Self { + ValueType::date(value).into_value() + } + + /// Creates a new time value. + pub fn time(value: NaiveTime) -> Self { + ValueType::time(value).into_value() + } + + /// Creates a new JSON value. + pub fn json(value: serde_json::Value) -> Self { + ValueType::json(value).into_value() + } + + /// Creates a new XML value. + pub fn xml(value: T) -> Self + where + T: Into>, + { + ValueType::xml(value).into_value() + } + + /// `true` if the `Value` is null. + pub fn is_null(&self) -> bool { + self.typed.is_null() + } + + /// Returns a &str if the value is text, otherwise `None`. + pub fn as_str(&self) -> Option<&str> { + self.typed.as_str() + } + + /// `true` if the `Value` is text. + pub fn is_text(&self) -> bool { + self.typed.is_text() + } + + /// Returns a char if the value is a char, otherwise `None`. + pub fn as_char(&self) -> Option { + self.typed.as_char() + } + + /// Returns a cloned String if the value is text, otherwise `None`. + pub fn to_string(&self) -> Option { + self.typed.to_string() + } + + /// Transforms the `Value` to a `String` if it's text, + /// otherwise `None`. + pub fn into_string(self) -> Option { + self.typed.into_string() + } + + /// Returns whether this value is the `Bytes` variant. + pub fn is_bytes(&self) -> bool { + self.typed.is_bytes() + } + + /// Returns a bytes slice if the value is text or a byte slice, otherwise `None`. + pub fn as_bytes(&self) -> Option<&[u8]> { + self.typed.as_bytes() + } + + /// Returns a cloned `Vec` if the value is text or a byte slice, otherwise `None`. + pub fn to_bytes(&self) -> Option> { + self.typed.to_bytes() + } + + /// `true` if the `Value` is a 32-bit signed integer. + pub fn is_i32(&self) -> bool { + self.typed.is_i32() + } + + /// `true` if the `Value` is a 64-bit signed integer. + pub fn is_i64(&self) -> bool { + self.typed.is_i64() + } + + /// `true` if the `Value` is a signed integer. + pub fn is_integer(&self) -> bool { + self.typed.is_integer() + } + + /// Returns an `i64` if the value is a 64-bit signed integer, otherwise `None`. + pub fn as_i64(&self) -> Option { + self.typed.as_i64() + } + + /// Returns an `i32` if the value is a 32-bit signed integer, otherwise `None`. + pub fn as_i32(&self) -> Option { + self.typed.as_i32() + } + + /// Returns an `i64` if the value is a signed integer, otherwise `None`. + pub fn as_integer(&self) -> Option { + self.typed.as_integer() + } + + /// Returns a `f64` if the value is a double, otherwise `None`. + pub fn as_f64(&self) -> Option { + self.typed.as_f64() + } + + /// Returns a `f32` if the value is a double, otherwise `None`. + pub fn as_f32(&self) -> Option { + self.typed.as_f32() + } + + /// `true` if the `Value` is a numeric value or can be converted to one. + #[cfg(feature = "bigdecimal")] + #[cfg_attr(feature = "docs", doc(cfg(feature = "bigdecimal")))] + pub fn is_numeric(&self) -> bool { + self.typed.is_numeric() + } + + /// Returns a bigdecimal, if the value is a numeric, float or double value, + /// otherwise `None`. + #[cfg(feature = "bigdecimal")] + #[cfg_attr(feature = "docs", doc(cfg(feature = "bigdecimal")))] + pub fn into_numeric(self) -> Option { + self.typed.into_numeric() + } + + /// Returns a reference to a bigdecimal, if the value is a numeric. + /// Otherwise `None`. + #[cfg(feature = "bigdecimal")] + #[cfg_attr(feature = "docs", doc(cfg(feature = "bigdecimal")))] + pub fn as_numeric(&self) -> Option<&BigDecimal> { + self.typed.as_numeric() + } + + /// `true` if the `Value` is a boolean value. + pub fn is_bool(&self) -> bool { + self.typed.is_bool() + } + + /// Returns a bool if the value is a boolean, otherwise `None`. + pub fn as_bool(&self) -> Option { + self.typed.as_bool() + } + + /// `true` if the `Value` is an Array. + pub fn is_array(&self) -> bool { + self.typed.is_array() + } + + /// `true` if the `Value` is of UUID type. + #[cfg(feature = "uuid")] + #[cfg_attr(feature = "docs", doc(cfg(feature = "uuid")))] + pub fn is_uuid(&self) -> bool { + self.typed.is_uuid() + } + + /// Returns an UUID if the value is of UUID type, otherwise `None`. + #[cfg(feature = "uuid")] + #[cfg_attr(feature = "docs", doc(cfg(feature = "uuid")))] + pub fn as_uuid(&self) -> Option { + self.typed.as_uuid() + } + + /// `true` if the `Value` is a DateTime. + pub fn is_datetime(&self) -> bool { + self.typed.is_datetime() + } + + /// Returns a `DateTime` if the value is a `DateTime`, otherwise `None`. + pub fn as_datetime(&self) -> Option> { + self.typed.as_datetime() + } + + /// `true` if the `Value` is a Date. + pub fn is_date(&self) -> bool { + self.typed.is_date() + } + + /// Returns a `NaiveDate` if the value is a `Date`, otherwise `None`. + pub fn as_date(&self) -> Option { + self.typed.as_date() + } + + /// `true` if the `Value` is a `Time`. + pub fn is_time(&self) -> bool { + self.typed.is_time() + } + + /// Returns a `NaiveTime` if the value is a `Time`, otherwise `None`. + pub fn as_time(&self) -> Option { + self.typed.as_time() + } + + /// `true` if the `Value` is a JSON value. + pub fn is_json(&self) -> bool { + self.typed.is_json() + } + + /// Returns a reference to a JSON Value if of Json type, otherwise `None`. + pub fn as_json(&self) -> Option<&serde_json::Value> { + self.typed.as_json() + } + + /// Transforms to a JSON Value if of Json type, otherwise `None`. + pub fn into_json(self) -> Option { + self.typed.into_json() + } + + /// Returns a `Vec` if the value is an array of `T`, otherwise `None`. + pub fn into_vec(self) -> Option> + where + T: TryFrom>, + { + self.typed.into_vec() + } + + /// Returns a cloned Vec if the value is an array of T, otherwise `None`. + pub fn to_vec(&self) -> Option> + where + T: TryFrom>, + { + self.typed.to_vec() + } + + pub fn null_int32() -> Self { + ValueType::Int32(None).into() + } + + pub fn null_int64() -> Self { + ValueType::Int64(None).into() + } + + pub fn null_float() -> Self { + ValueType::Float(None).into() + } + + pub fn null_double() -> Self { + ValueType::Double(None).into() + } + + pub fn null_text() -> Self { + ValueType::Text(None).into() + } + + pub fn null_enum() -> Self { + ValueType::Enum(None, None).into() + } + + pub fn null_enum_array() -> Self { + ValueType::EnumArray(None, None).into() + } + + pub fn null_bytes() -> Self { + ValueType::Bytes(None).into() + } + + pub fn null_boolean() -> Self { + ValueType::Boolean(None).into() + } + + pub fn null_character() -> Self { + ValueType::Char(None).into() + } + + pub fn null_array() -> Self { + ValueType::Array(None).into() + } + + #[cfg(feature = "bigdecimal")] + pub fn null_numeric() -> Self { + ValueType::Numeric(None).into() + } + + pub fn null_json() -> Self { + ValueType::Json(None).into() + } + + pub fn null_xml() -> Self { + ValueType::Xml(None).into() + } + + #[cfg(feature = "uuid")] + pub fn null_uuid() -> Self { + ValueType::Uuid(None).into() + } + + pub fn null_datetime() -> Self { + ValueType::DateTime(None).into() + } + + pub fn null_date() -> Self { + ValueType::Date(None).into() + } + + pub fn null_time() -> Self { + ValueType::Time(None).into() + } +} + +impl<'a> Display for Value<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.typed.fmt(f) + } +} + +impl<'a> From> for Value<'a> { + fn from(inner: ValueType<'a>) -> Self { + Self { + typed: inner, + native_column_type: Default::default(), + } + } +} + +impl<'a> From> for ValueType<'a> { + fn from(val: Value<'a>) -> Self { + val.typed + } +} + /// A value we must parameterize for the prepared statement. Null values should be /// defined by their corresponding type variants with a `None` value for best /// compatibility. #[derive(Debug, Clone, PartialEq)] -pub enum Value<'a> { +pub enum ValueType<'a> { /// 32-bit signed integer. Int32(Option), /// 64-bit signed integer. @@ -54,7 +500,7 @@ pub enum Value<'a> { /// Read more about it here: https://github.com/prisma/prisma-engines/pull/4280 Enum(Option>, Option>), /// Database enum array (PostgreSQL specific). - /// We use a different variant than `Value::Array` to uplift the `EnumName` + /// We use a different variant than `ValueType::Array` to uplift the `EnumName` /// and have it available even for empty enum arrays. EnumArray(Option>>, Option>), /// Bytes value. @@ -87,7 +533,7 @@ pub enum Value<'a> { pub(crate) struct Params<'a>(pub(crate) &'a [Value<'a>]); -impl<'a> fmt::Display for Params<'a> { +impl<'a> Display for Params<'a> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let len = self.0.len(); @@ -103,17 +549,17 @@ impl<'a> fmt::Display for Params<'a> { } } -impl<'a> fmt::Display for Value<'a> { +impl<'a> fmt::Display for ValueType<'a> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let res = match self { - Value::Int32(val) => val.map(|v| write!(f, "{v}")), - Value::Int64(val) => val.map(|v| write!(f, "{v}")), - Value::Float(val) => val.map(|v| write!(f, "{v}")), - Value::Double(val) => val.map(|v| write!(f, "{v}")), - Value::Text(val) => val.as_ref().map(|v| write!(f, "\"{v}\"")), - Value::Bytes(val) => val.as_ref().map(|v| write!(f, "<{} bytes blob>", v.len())), - Value::Enum(val, _) => val.as_ref().map(|v| write!(f, "\"{v}\"")), - Value::EnumArray(vals, _) => vals.as_ref().map(|vals| { + ValueType::Int32(val) => val.map(|v| write!(f, "{v}")), + ValueType::Int64(val) => val.map(|v| write!(f, "{v}")), + ValueType::Float(val) => val.map(|v| write!(f, "{v}")), + ValueType::Double(val) => val.map(|v| write!(f, "{v}")), + ValueType::Text(val) => val.as_ref().map(|v| write!(f, "\"{v}\"")), + ValueType::Bytes(val) => val.as_ref().map(|v| write!(f, "<{} bytes blob>", v.len())), + ValueType::Enum(val, _) => val.as_ref().map(|v| write!(f, "\"{v}\"")), + ValueType::EnumArray(vals, _) => vals.as_ref().map(|vals| { let len = vals.len(); write!(f, "[")?; @@ -126,9 +572,9 @@ impl<'a> fmt::Display for Value<'a> { } write!(f, "]") }), - Value::Boolean(val) => val.map(|v| write!(f, "{v}")), - Value::Char(val) => val.map(|v| write!(f, "'{v}'")), - Value::Array(vals) => vals.as_ref().map(|vals| { + ValueType::Boolean(val) => val.map(|v| write!(f, "{v}")), + ValueType::Char(val) => val.map(|v| write!(f, "'{v}'")), + ValueType::Array(vals) => vals.as_ref().map(|vals| { let len = vals.len(); write!(f, "[")?; @@ -141,15 +587,15 @@ impl<'a> fmt::Display for Value<'a> { } write!(f, "]") }), - Value::Xml(val) => val.as_ref().map(|v| write!(f, "{v}")), + ValueType::Xml(val) => val.as_ref().map(|v| write!(f, "{v}")), #[cfg(feature = "bigdecimal")] - Value::Numeric(val) => val.as_ref().map(|v| write!(f, "{v}")), - Value::Json(val) => val.as_ref().map(|v| write!(f, "{v}")), + ValueType::Numeric(val) => val.as_ref().map(|v| write!(f, "{v}")), + ValueType::Json(val) => val.as_ref().map(|v| write!(f, "{v}")), #[cfg(feature = "uuid")] - Value::Uuid(val) => val.map(|v| write!(f, "\"{v}\"")), - Value::DateTime(val) => val.map(|v| write!(f, "\"{v}\"")), - Value::Date(val) => val.map(|v| write!(f, "\"{v}\"")), - Value::Time(val) => val.map(|v| write!(f, "\"{v}\"")), + ValueType::Uuid(val) => val.map(|v| write!(f, "\"{v}\"")), + ValueType::DateTime(val) => val.map(|v| write!(f, "\"{v}\"")), + ValueType::Date(val) => val.map(|v| write!(f, "\"{v}\"")), + ValueType::Time(val) => val.map(|v| write!(f, "\"{v}\"")), }; match res { @@ -161,21 +607,27 @@ impl<'a> fmt::Display for Value<'a> { impl<'a> From> for serde_json::Value { fn from(pv: Value<'a>) -> Self { + pv.typed.into() + } +} + +impl<'a> From> for serde_json::Value { + fn from(pv: ValueType<'a>) -> Self { let res = match pv { - Value::Int32(i) => i.map(|i| serde_json::Value::Number(Number::from(i))), - Value::Int64(i) => i.map(|i| serde_json::Value::Number(Number::from(i))), - Value::Float(f) => f.map(|f| match Number::from_f64(f as f64) { + ValueType::Int32(i) => i.map(|i| serde_json::Value::Number(Number::from(i))), + ValueType::Int64(i) => i.map(|i| serde_json::Value::Number(Number::from(i))), + ValueType::Float(f) => f.map(|f| match Number::from_f64(f as f64) { Some(number) => serde_json::Value::Number(number), None => serde_json::Value::Null, }), - Value::Double(f) => f.map(|f| match Number::from_f64(f) { + ValueType::Double(f) => f.map(|f| match Number::from_f64(f) { Some(number) => serde_json::Value::Number(number), None => serde_json::Value::Null, }), - Value::Text(cow) => cow.map(|cow| serde_json::Value::String(cow.into_owned())), - Value::Bytes(bytes) => bytes.map(|bytes| serde_json::Value::String(base64::encode(bytes))), - Value::Enum(cow, _) => cow.map(|cow| serde_json::Value::String(cow.into_owned())), - Value::EnumArray(values, _) => values.map(|values| { + ValueType::Text(cow) => cow.map(|cow| serde_json::Value::String(cow.into_owned())), + ValueType::Bytes(bytes) => bytes.map(|bytes| serde_json::Value::String(base64::encode(bytes))), + ValueType::Enum(cow, _) => cow.map(|cow| serde_json::Value::String(cow.into_owned())), + ValueType::EnumArray(values, _) => values.map(|values| { serde_json::Value::Array( values .into_iter() @@ -183,26 +635,26 @@ impl<'a> From> for serde_json::Value { .collect(), ) }), - Value::Boolean(b) => b.map(serde_json::Value::Bool), - Value::Char(c) => c.map(|c| { + ValueType::Boolean(b) => b.map(serde_json::Value::Bool), + ValueType::Char(c) => c.map(|c| { let bytes = [c as u8]; let s = std::str::from_utf8(&bytes) .expect("interpret byte as UTF-8") .to_string(); serde_json::Value::String(s) }), - Value::Xml(cow) => cow.map(|cow| serde_json::Value::String(cow.into_owned())), - Value::Array(v) => { + ValueType::Xml(cow) => cow.map(|cow| serde_json::Value::String(cow.into_owned())), + ValueType::Array(v) => { v.map(|v| serde_json::Value::Array(v.into_iter().map(serde_json::Value::from).collect())) } #[cfg(feature = "bigdecimal")] - Value::Numeric(d) => d.map(|d| serde_json::to_value(d.to_f64().unwrap()).unwrap()), - Value::Json(v) => v, + ValueType::Numeric(d) => d.map(|d| serde_json::to_value(d.to_f64().unwrap()).unwrap()), + ValueType::Json(v) => v, #[cfg(feature = "uuid")] - Value::Uuid(u) => u.map(|u| serde_json::Value::String(u.hyphenated().to_string())), - Value::DateTime(dt) => dt.map(|dt| serde_json::Value::String(dt.to_rfc3339())), - Value::Date(date) => date.map(|date| serde_json::Value::String(format!("{date}"))), - Value::Time(time) => time.map(|time| serde_json::Value::String(format!("{time}"))), + ValueType::Uuid(u) => u.map(|u| serde_json::Value::String(u.hyphenated().to_string())), + ValueType::DateTime(dt) => dt.map(|dt| serde_json::Value::String(dt.to_rfc3339())), + ValueType::Date(date) => date.map(|date| serde_json::Value::String(format!("{date}"))), + ValueType::Time(time) => time.map(|time| serde_json::Value::String(format!("{time}"))), }; match res { @@ -212,259 +664,271 @@ impl<'a> From> for serde_json::Value { } } -impl<'a> Value<'a> { +impl<'a> ValueType<'a> { + pub fn into_value(self) -> Value<'a> { + self.into() + } + /// Creates a new 32-bit signed integer. - pub fn int32(value: I) -> Self + pub(crate) fn int32(value: I) -> Self where I: Into, { - Value::Int32(Some(value.into())) + Self::Int32(Some(value.into())) } /// Creates a new 64-bit signed integer. - pub fn int64(value: I) -> Self + pub(crate) fn int64(value: I) -> Self where I: Into, { - Value::Int64(Some(value.into())) - } - - /// Creates a new 32-bit signed integer. - pub fn integer(value: I) -> Self - where - I: Into, - { - Value::Int32(Some(value.into())) + Self::Int64(Some(value.into())) } /// Creates a new decimal value. #[cfg(feature = "bigdecimal")] #[cfg_attr(feature = "docs", doc(cfg(feature = "bigdecimal")))] - pub const fn numeric(value: BigDecimal) -> Self { - Value::Numeric(Some(value)) + pub(crate) fn numeric(value: BigDecimal) -> Self { + Self::Numeric(Some(value)) } /// Creates a new float value. - pub const fn float(value: f32) -> Self { + pub(crate) fn float(value: f32) -> Self { Self::Float(Some(value)) } /// Creates a new double value. - pub const fn double(value: f64) -> Self { + pub(crate) fn double(value: f64) -> Self { Self::Double(Some(value)) } /// Creates a new string value. - pub fn text(value: T) -> Self + pub(crate) fn text(value: T) -> Self where T: Into>, { - Value::Text(Some(value.into())) + Self::Text(Some(value.into())) } /// Creates a new enum value. - pub fn enum_variant(value: T) -> Self + pub(crate) fn enum_variant(value: T) -> Self where - T: Into>, + T: Into>, { - Value::Enum(Some(EnumVariant::new(value)), None) + Self::Enum(Some(value.into()), None) } /// Creates a new enum value with the name of the enum attached. - pub fn enum_variant_with_name(value: T, name: U, schema_name: Option) -> Self + pub(crate) fn enum_variant_with_name(value: T, enum_name: U) -> Self where - T: Into>, - U: Into>, - V: Into>, + T: Into>, + U: Into>, + { + Self::Enum(Some(value.into()), Some(enum_name.into())) + } + + /// Creates a new enum array value + pub(crate) fn enum_array(value: T) -> Self + where + T: IntoIterator>, { - Value::Enum(Some(EnumVariant::new(value)), Some(EnumName::new(name, schema_name))) + Self::EnumArray(Some(value.into_iter().collect()), None) + } + + /// Creates a new enum array value with the name of the enum attached. + pub(crate) fn enum_array_with_name(value: T, name: U) -> Self + where + T: IntoIterator>, + U: Into>, + { + Self::EnumArray(Some(value.into_iter().collect()), Some(name.into())) } /// Creates a new bytes value. - pub fn bytes(value: B) -> Self + pub(crate) fn bytes(value: B) -> Self where B: Into>, { - Value::Bytes(Some(value.into())) + Self::Bytes(Some(value.into())) } /// Creates a new boolean value. - pub fn boolean(value: B) -> Self + pub(crate) fn boolean(value: B) -> Self where B: Into, { - Value::Boolean(Some(value.into())) + Self::Boolean(Some(value.into())) } /// Creates a new character value. - pub fn character(value: C) -> Self + pub(crate) fn character(value: C) -> Self where C: Into, { - Value::Char(Some(value.into())) + Self::Char(Some(value.into())) } /// Creates a new array value. - pub fn array(value: I) -> Self + pub(crate) fn array(value: I) -> Self where I: IntoIterator, V: Into>, { - Value::Array(Some(value.into_iter().map(|v| v.into()).collect())) + Self::Array(Some(value.into_iter().map(|v| v.into()).collect())) } /// Creates a new uuid value. #[cfg(feature = "uuid")] #[cfg_attr(feature = "docs", doc(cfg(feature = "uuid")))] - pub const fn uuid(value: Uuid) -> Self { - Value::Uuid(Some(value)) + pub(crate) fn uuid(value: Uuid) -> Self { + Self::Uuid(Some(value)) } /// Creates a new datetime value. - pub const fn datetime(value: DateTime) -> Self { - Value::DateTime(Some(value)) + pub(crate) fn datetime(value: DateTime) -> Self { + Self::DateTime(Some(value)) } /// Creates a new date value. - pub const fn date(value: NaiveDate) -> Self { - Value::Date(Some(value)) + pub(crate) fn date(value: NaiveDate) -> Self { + Self::Date(Some(value)) } /// Creates a new time value. - pub const fn time(value: NaiveTime) -> Self { - Value::Time(Some(value)) + pub(crate) fn time(value: NaiveTime) -> Self { + Self::Time(Some(value)) } /// Creates a new JSON value. - pub const fn json(value: serde_json::Value) -> Self { - Value::Json(Some(value)) + pub(crate) fn json(value: serde_json::Value) -> Self { + Self::Json(Some(value)) } /// Creates a new XML value. - pub fn xml(value: T) -> Self + pub(crate) fn xml(value: T) -> Self where T: Into>, { - Value::Xml(Some(value.into())) + Self::Xml(Some(value.into())) } /// `true` if the `Value` is null. - pub const fn is_null(&self) -> bool { + pub fn is_null(&self) -> bool { match self { - Value::Int32(i) => i.is_none(), - Value::Int64(i) => i.is_none(), - Value::Float(i) => i.is_none(), - Value::Double(i) => i.is_none(), - Value::Text(t) => t.is_none(), - Value::Enum(e, _) => e.is_none(), - Value::EnumArray(e, _) => e.is_none(), - Value::Bytes(b) => b.is_none(), - Value::Boolean(b) => b.is_none(), - Value::Char(c) => c.is_none(), - Value::Array(v) => v.is_none(), - Value::Xml(s) => s.is_none(), + Self::Int32(i) => i.is_none(), + Self::Int64(i) => i.is_none(), + Self::Float(i) => i.is_none(), + Self::Double(i) => i.is_none(), + Self::Text(t) => t.is_none(), + Self::Enum(e, _) => e.is_none(), + Self::EnumArray(e, _) => e.is_none(), + Self::Bytes(b) => b.is_none(), + Self::Boolean(b) => b.is_none(), + Self::Char(c) => c.is_none(), + Self::Array(v) => v.is_none(), + Self::Xml(s) => s.is_none(), #[cfg(feature = "bigdecimal")] - Value::Numeric(r) => r.is_none(), + Self::Numeric(r) => r.is_none(), #[cfg(feature = "uuid")] - Value::Uuid(u) => u.is_none(), - Value::DateTime(dt) => dt.is_none(), - Value::Date(d) => d.is_none(), - Value::Time(t) => t.is_none(), - Value::Json(json) => json.is_none(), + Self::Uuid(u) => u.is_none(), + Self::DateTime(dt) => dt.is_none(), + Self::Date(d) => d.is_none(), + Self::Time(t) => t.is_none(), + Self::Json(json) => json.is_none(), } } /// `true` if the `Value` is text. - pub const fn is_text(&self) -> bool { - matches!(self, Value::Text(_)) + pub(crate) fn is_text(&self) -> bool { + matches!(self, Self::Text(_)) } /// Returns a &str if the value is text, otherwise `None`. - pub fn as_str(&self) -> Option<&str> { + pub(crate) fn as_str(&self) -> Option<&str> { match self { - Value::Text(Some(cow)) => Some(cow.borrow()), - Value::Bytes(Some(cow)) => std::str::from_utf8(cow.as_ref()).ok(), + Self::Text(Some(cow)) => Some(cow.borrow()), + Self::Bytes(Some(cow)) => std::str::from_utf8(cow.as_ref()).ok(), _ => None, } } /// Returns a char if the value is a char, otherwise `None`. - pub const fn as_char(&self) -> Option { + pub(crate) fn as_char(&self) -> Option { match self { - Value::Char(c) => *c, + Self::Char(c) => *c, _ => None, } } /// Returns a cloned String if the value is text, otherwise `None`. - pub fn to_string(&self) -> Option { + pub(crate) fn to_string(&self) -> Option { match self { - Value::Text(Some(cow)) => Some(cow.to_string()), - Value::Bytes(Some(cow)) => std::str::from_utf8(cow.as_ref()).map(|s| s.to_owned()).ok(), + Self::Text(Some(cow)) => Some(cow.to_string()), + Self::Bytes(Some(cow)) => std::str::from_utf8(cow.as_ref()).map(|s| s.to_owned()).ok(), _ => None, } } /// Transforms the `Value` to a `String` if it's text, /// otherwise `None`. - pub fn into_string(self) -> Option { + pub(crate) fn into_string(self) -> Option { match self { - Value::Text(Some(cow)) => Some(cow.into_owned()), - Value::Bytes(Some(cow)) => String::from_utf8(cow.into_owned()).ok(), + Self::Text(Some(cow)) => Some(cow.into_owned()), + Self::Bytes(Some(cow)) => String::from_utf8(cow.into_owned()).ok(), _ => None, } } /// Returns whether this value is the `Bytes` variant. - pub const fn is_bytes(&self) -> bool { - matches!(self, Value::Bytes(_)) + pub(crate) fn is_bytes(&self) -> bool { + matches!(self, Self::Bytes(_)) } /// Returns a bytes slice if the value is text or a byte slice, otherwise `None`. - pub fn as_bytes(&self) -> Option<&[u8]> { + pub(crate) fn as_bytes(&self) -> Option<&[u8]> { match self { - Value::Text(Some(cow)) => Some(cow.as_ref().as_bytes()), - Value::Bytes(Some(cow)) => Some(cow.as_ref()), + Self::Text(Some(cow)) => Some(cow.as_ref().as_bytes()), + Self::Bytes(Some(cow)) => Some(cow.as_ref()), _ => None, } } /// Returns a cloned `Vec` if the value is text or a byte slice, otherwise `None`. - pub fn to_bytes(&self) -> Option> { + pub(crate) fn to_bytes(&self) -> Option> { match self { - Value::Text(Some(cow)) => Some(cow.to_string().into_bytes()), - Value::Bytes(Some(cow)) => Some(cow.to_vec()), + Self::Text(Some(cow)) => Some(cow.to_string().into_bytes()), + Self::Bytes(Some(cow)) => Some(cow.to_vec()), _ => None, } } /// `true` if the `Value` is a 32-bit signed integer. - pub const fn is_i32(&self) -> bool { - matches!(self, Value::Int32(_)) + pub(crate) fn is_i32(&self) -> bool { + matches!(self, Self::Int32(_)) } /// `true` if the `Value` is a 64-bit signed integer. - pub const fn is_i64(&self) -> bool { - matches!(self, Value::Int64(_)) + pub(crate) fn is_i64(&self) -> bool { + matches!(self, Self::Int64(_)) } /// `true` if the `Value` is a signed integer. - pub const fn is_integer(&self) -> bool { - matches!(self, Value::Int32(_) | Value::Int64(_)) + pub fn is_integer(&self) -> bool { + matches!(self, Self::Int32(_) | Self::Int64(_)) } /// Returns an `i64` if the value is a 64-bit signed integer, otherwise `None`. - pub const fn as_i64(&self) -> Option { + pub(crate) fn as_i64(&self) -> Option { match self { - Value::Int64(i) => *i, + Self::Int64(i) => *i, _ => None, } } /// Returns an `i32` if the value is a 32-bit signed integer, otherwise `None`. - pub const fn as_i32(&self) -> Option { + pub(crate) fn as_i32(&self) -> Option { match self { - Value::Int32(i) => *i, + Self::Int32(i) => *i, _ => None, } } @@ -472,24 +936,24 @@ impl<'a> Value<'a> { /// Returns an `i64` if the value is a signed integer, otherwise `None`. pub fn as_integer(&self) -> Option { match self { - Value::Int32(i) => i.map(|i| i as i64), - Value::Int64(i) => *i, + Self::Int32(i) => i.map(|i| i as i64), + Self::Int64(i) => *i, _ => None, } } /// Returns a `f64` if the value is a double, otherwise `None`. - pub const fn as_f64(&self) -> Option { + pub(crate) fn as_f64(&self) -> Option { match self { - Value::Double(Some(f)) => Some(*f), + Self::Double(Some(f)) => Some(*f), _ => None, } } /// Returns a `f32` if the value is a double, otherwise `None`. - pub const fn as_f32(&self) -> Option { + pub(crate) fn as_f32(&self) -> Option { match self { - Value::Float(Some(f)) => Some(*f), + Self::Float(Some(f)) => Some(*f), _ => None, } } @@ -497,19 +961,19 @@ impl<'a> Value<'a> { /// `true` if the `Value` is a numeric value or can be converted to one. #[cfg(feature = "bigdecimal")] #[cfg_attr(feature = "docs", doc(cfg(feature = "bigdecimal")))] - pub const fn is_numeric(&self) -> bool { - matches!(self, Value::Numeric(_) | Value::Float(_) | Value::Double(_)) + pub(crate) fn is_numeric(&self) -> bool { + matches!(self, Self::Numeric(_) | Self::Float(_) | Self::Double(_)) } /// Returns a bigdecimal, if the value is a numeric, float or double value, /// otherwise `None`. #[cfg(feature = "bigdecimal")] #[cfg_attr(feature = "docs", doc(cfg(feature = "bigdecimal")))] - pub fn into_numeric(self) -> Option { + pub(crate) fn into_numeric(self) -> Option { match self { - Value::Numeric(d) => d, - Value::Float(f) => f.and_then(BigDecimal::from_f32), - Value::Double(f) => f.and_then(BigDecimal::from_f64), + Self::Numeric(d) => d, + Self::Float(f) => f.and_then(BigDecimal::from_f32), + Self::Double(f) => f.and_then(BigDecimal::from_f64), _ => None, } } @@ -518,125 +982,125 @@ impl<'a> Value<'a> { /// Otherwise `None`. #[cfg(feature = "bigdecimal")] #[cfg_attr(feature = "docs", doc(cfg(feature = "bigdecimal")))] - pub const fn as_numeric(&self) -> Option<&BigDecimal> { + pub(crate) fn as_numeric(&self) -> Option<&BigDecimal> { match self { - Value::Numeric(d) => d.as_ref(), + Self::Numeric(d) => d.as_ref(), _ => None, } } /// `true` if the `Value` is a boolean value. - pub const fn is_bool(&self) -> bool { + pub(crate) fn is_bool(&self) -> bool { match self { - Value::Boolean(_) => true, + Self::Boolean(_) => true, // For schemas which don't tag booleans - Value::Int32(Some(i)) if *i == 0 || *i == 1 => true, - Value::Int64(Some(i)) if *i == 0 || *i == 1 => true, + Self::Int32(Some(i)) if *i == 0 || *i == 1 => true, + Self::Int64(Some(i)) if *i == 0 || *i == 1 => true, _ => false, } } /// Returns a bool if the value is a boolean, otherwise `None`. - pub const fn as_bool(&self) -> Option { + pub(crate) fn as_bool(&self) -> Option { match self { - Value::Boolean(b) => *b, + Self::Boolean(b) => *b, // For schemas which don't tag booleans - Value::Int32(Some(i)) if *i == 0 || *i == 1 => Some(*i == 1), - Value::Int64(Some(i)) if *i == 0 || *i == 1 => Some(*i == 1), + Self::Int32(Some(i)) if *i == 0 || *i == 1 => Some(*i == 1), + Self::Int64(Some(i)) if *i == 0 || *i == 1 => Some(*i == 1), _ => None, } } /// `true` if the `Value` is an Array. - pub const fn is_array(&self) -> bool { - matches!(self, Value::Array(_)) + pub(crate) fn is_array(&self) -> bool { + matches!(self, Self::Array(_)) } /// `true` if the `Value` is of UUID type. #[cfg(feature = "uuid")] #[cfg_attr(feature = "docs", doc(cfg(feature = "uuid")))] - pub const fn is_uuid(&self) -> bool { - matches!(self, Value::Uuid(_)) + pub(crate) fn is_uuid(&self) -> bool { + matches!(self, Self::Uuid(_)) } /// Returns an UUID if the value is of UUID type, otherwise `None`. #[cfg(feature = "uuid")] #[cfg_attr(feature = "docs", doc(cfg(feature = "uuid")))] - pub const fn as_uuid(&self) -> Option { + pub(crate) fn as_uuid(&self) -> Option { match self { - Value::Uuid(u) => *u, + Self::Uuid(u) => *u, _ => None, } } /// `true` if the `Value` is a DateTime. - pub const fn is_datetime(&self) -> bool { - matches!(self, Value::DateTime(_)) + pub(crate) fn is_datetime(&self) -> bool { + matches!(self, Self::DateTime(_)) } /// Returns a `DateTime` if the value is a `DateTime`, otherwise `None`. - pub const fn as_datetime(&self) -> Option> { + pub(crate) fn as_datetime(&self) -> Option> { match self { - Value::DateTime(dt) => *dt, + Self::DateTime(dt) => *dt, _ => None, } } /// `true` if the `Value` is a Date. - pub const fn is_date(&self) -> bool { - matches!(self, Value::Date(_)) + pub(crate) fn is_date(&self) -> bool { + matches!(self, Self::Date(_)) } /// Returns a `NaiveDate` if the value is a `Date`, otherwise `None`. - pub const fn as_date(&self) -> Option { + pub(crate) fn as_date(&self) -> Option { match self { - Value::Date(dt) => *dt, + Self::Date(dt) => *dt, _ => None, } } /// `true` if the `Value` is a `Time`. - pub const fn is_time(&self) -> bool { - matches!(self, Value::Time(_)) + pub(crate) fn is_time(&self) -> bool { + matches!(self, Self::Time(_)) } /// Returns a `NaiveTime` if the value is a `Time`, otherwise `None`. - pub const fn as_time(&self) -> Option { + pub(crate) fn as_time(&self) -> Option { match self { - Value::Time(time) => *time, + Self::Time(time) => *time, _ => None, } } /// `true` if the `Value` is a JSON value. - pub const fn is_json(&self) -> bool { - matches!(self, Value::Json(_)) + pub(crate) fn is_json(&self) -> bool { + matches!(self, Self::Json(_)) } /// Returns a reference to a JSON Value if of Json type, otherwise `None`. - pub const fn as_json(&self) -> Option<&serde_json::Value> { + pub(crate) fn as_json(&self) -> Option<&serde_json::Value> { match self { - Value::Json(Some(j)) => Some(j), + Self::Json(Some(j)) => Some(j), _ => None, } } /// Transforms to a JSON Value if of Json type, otherwise `None`. - pub fn into_json(self) -> Option { + pub(crate) fn into_json(self) -> Option { match self { - Value::Json(Some(j)) => Some(j), + Self::Json(Some(j)) => Some(j), _ => None, } } /// Returns a `Vec` if the value is an array of `T`, otherwise `None`. - pub fn into_vec(self) -> Option> + pub(crate) fn into_vec(self) -> Option> where // Implement From T: TryFrom>, { match self { - Value::Array(Some(vec)) => { + Self::Array(Some(vec)) => { let rslt: Result, _> = vec.into_iter().map(T::try_from).collect(); match rslt { Err(_) => None, @@ -648,12 +1112,12 @@ impl<'a> Value<'a> { } /// Returns a cloned Vec if the value is an array of T, otherwise `None`. - pub fn to_vec(&self) -> Option> + pub(crate) fn to_vec(&self) -> Option> where T: TryFrom>, { match self { - Value::Array(Some(vec)) => { + Self::Array(Some(vec)) => { let rslt: Result, _> = vec.clone().into_iter().map(T::try_from).collect(); match rslt { Err(_) => None, @@ -669,6 +1133,8 @@ value!(val: i64, Int64, val); value!(val: i32, Int32, val); value!(val: bool, Boolean, val); value!(val: &'a str, Text, val.into()); +value!(val: &'a String, Text, val.into()); +value!(val: &'a &str, Text, (*val).into()); value!(val: String, Text, val.into()); value!(val: usize, Int64, i64::try_from(val).unwrap()); value!(val: &'a [u8], Bytes, val.into()); @@ -689,6 +1155,7 @@ impl<'a> TryFrom> for i64 { fn try_from(value: Value<'a>) -> Result { value + .typed .as_i64() .ok_or_else(|| Error::builder(ErrorKind::conversion("Not an i64")).build()) } @@ -759,24 +1226,24 @@ impl<'a> TryFrom<&Value<'a>> for Option { type Error = Error; fn try_from(value: &Value<'a>) -> Result, Self::Error> { - match value { - val @ Value::Text(Some(_)) => { - let text = val.as_str().unwrap(); + match &value.typed { + ValueType::Text(Some(_)) => { + let text = value.typed.as_str().unwrap(); match std::net::IpAddr::from_str(text) { Ok(ip) => Ok(Some(ip)), Err(e) => Err(e.into()), } } - val @ Value::Bytes(Some(_)) => { - let text = val.as_str().unwrap(); + ValueType::Bytes(Some(_)) => { + let text = value.typed.as_str().unwrap(); match std::net::IpAddr::from_str(text) { Ok(ip) => Ok(Some(ip)), Err(e) => Err(e.into()), } } - v if v.is_null() => Ok(None), + _ if value.typed.is_null() => Ok(None), v => { let kind = ErrorKind::conversion(format!("Couldn't convert value of type `{v:?}` to std::net::IpAddr.")); @@ -792,25 +1259,25 @@ impl<'a> TryFrom<&Value<'a>> for Option { type Error = Error; fn try_from(value: &Value<'a>) -> Result, Self::Error> { - match value { - Value::Uuid(uuid) => Ok(*uuid), - val @ Value::Text(Some(_)) => { - let text = val.as_str().unwrap(); + match &value.typed { + ValueType::Uuid(uuid) => Ok(*uuid), + ValueType::Text(Some(_)) => { + let text = value.typed.as_str().unwrap(); match uuid::Uuid::from_str(text) { Ok(ip) => Ok(Some(ip)), Err(e) => Err(e.into()), } } - val @ Value::Bytes(Some(_)) => { - let text = val.as_str().unwrap(); + ValueType::Bytes(Some(_)) => { + let text = value.typed.as_str().unwrap(); match uuid::Uuid::from_str(text) { Ok(ip) => Ok(Some(ip)), Err(e) => Err(e.into()), } } - v if v.is_null() => Ok(None), + _ if value.typed.is_null() => Ok(None), v => { let kind = ErrorKind::conversion(format!("Couldn't convert value of type `{v:?}` to uuid::Uuid.")); @@ -913,35 +1380,35 @@ mod tests { #[test] fn a_parameterized_value_of_ints32_can_be_converted_into_a_vec() { let pv = Value::array(vec![1]); - let values: Vec = pv.into_vec().expect("convert into Vec"); + let values: Vec = pv.typed.into_vec().expect("convert into Vec"); assert_eq!(values, vec![1]); } #[test] fn a_parameterized_value_of_ints64_can_be_converted_into_a_vec() { let pv = Value::array(vec![1_i64]); - let values: Vec = pv.into_vec().expect("convert into Vec"); + let values: Vec = pv.typed.into_vec().expect("convert into Vec"); assert_eq!(values, vec![1]); } #[test] fn a_parameterized_value_of_reals_can_be_converted_into_a_vec() { let pv = Value::array(vec![1.0]); - let values: Vec = pv.into_vec().expect("convert into Vec"); + let values: Vec = pv.typed.into_vec().expect("convert into Vec"); assert_eq!(values, vec![1.0]); } #[test] fn a_parameterized_value_of_texts_can_be_converted_into_a_vec() { let pv = Value::array(vec!["test"]); - let values: Vec = pv.into_vec().expect("convert into Vec"); + let values: Vec = pv.typed.into_vec().expect("convert into Vec"); assert_eq!(values, vec!["test"]); } #[test] fn a_parameterized_value_of_booleans_can_be_converted_into_a_vec() { let pv = Value::array(vec![true]); - let values: Vec = pv.into_vec().expect("convert into Vec"); + let values: Vec = pv.typed.into_vec().expect("convert into Vec"); assert_eq!(values, vec![true]); } @@ -949,14 +1416,14 @@ mod tests { fn a_parameterized_value_of_datetimes_can_be_converted_into_a_vec() { let datetime = DateTime::from_str("2019-07-27T05:30:30Z").expect("parsing date/time"); let pv = Value::array(vec![datetime]); - let values: Vec> = pv.into_vec().expect("convert into Vec"); + let values: Vec> = pv.typed.into_vec().expect("convert into Vec"); assert_eq!(values, vec![datetime]); } #[test] fn a_parameterized_value_of_an_array_cant_be_converted_into_a_vec_of_the_wrong_type() { let pv = Value::array(vec![1]); - let rslt: Option> = pv.into_vec(); + let rslt: Option> = pv.typed.into_vec(); assert!(rslt.is_none()); } diff --git a/quaint/src/connector/metrics.rs b/quaint/src/connector/metrics.rs index e806f98c1d8f..2705a40b32b2 100644 --- a/quaint/src/connector/metrics.rs +++ b/quaint/src/connector/metrics.rs @@ -36,7 +36,7 @@ where trace_query(&query_fmt, params, result, start); } else { - trace_query(&query, params, result, start); + trace_query(query, params, result, start); }; } diff --git a/quaint/src/connector/mssql/conversion.rs b/quaint/src/connector/mssql/conversion.rs index 682e75b44760..246d1a30cdde 100644 --- a/quaint/src/connector/mssql/conversion.rs +++ b/quaint/src/connector/mssql/conversion.rs @@ -1,4 +1,4 @@ -use crate::ast::Value; +use crate::ast::{Value, ValueType}; #[cfg(not(feature = "bigdecimal"))] use crate::error::*; #[cfg(feature = "bigdecimal")] @@ -10,26 +10,26 @@ use tiberius::{ColumnData, FromSql, IntoSql}; impl<'a> IntoSql<'a> for &'a Value<'a> { fn into_sql(self) -> ColumnData<'a> { - match self { - Value::Int32(val) => val.into_sql(), - Value::Int64(val) => val.into_sql(), - Value::Float(val) => val.into_sql(), - Value::Double(val) => val.into_sql(), - Value::Text(val) => val.as_deref().into_sql(), - Value::Bytes(val) => val.as_deref().into_sql(), - Value::Enum(val, _) => val.as_deref().into_sql(), - Value::Boolean(val) => val.into_sql(), - Value::Char(val) => val.as_ref().map(|val| format!("{val}")).into_sql(), - Value::Xml(val) => val.as_deref().into_sql(), - Value::Array(_) | Value::EnumArray(_, _) => panic!("Arrays are not supported on SQL Server."), + match &self.typed { + ValueType::Int32(val) => val.into_sql(), + ValueType::Int64(val) => val.into_sql(), + ValueType::Float(val) => val.into_sql(), + ValueType::Double(val) => val.into_sql(), + ValueType::Text(val) => val.as_deref().into_sql(), + ValueType::Bytes(val) => val.as_deref().into_sql(), + ValueType::Enum(val, _) => val.as_deref().into_sql(), + ValueType::Boolean(val) => val.into_sql(), + ValueType::Char(val) => val.as_ref().map(|val| format!("{val}")).into_sql(), + ValueType::Xml(val) => val.as_deref().into_sql(), + ValueType::Array(_) | ValueType::EnumArray(_, _) => panic!("Arrays are not supported on SQL Server."), #[cfg(feature = "bigdecimal")] - Value::Numeric(val) => (*val).to_sql(), - Value::Json(val) => val.as_ref().map(|val| serde_json::to_string(&val).unwrap()).into_sql(), + ValueType::Numeric(val) => (*val).to_sql(), + ValueType::Json(val) => val.as_ref().map(|val| serde_json::to_string(&val).unwrap()).into_sql(), #[cfg(feature = "uuid")] - Value::Uuid(val) => val.into_sql(), - Value::DateTime(val) => val.into_sql(), - Value::Date(val) => val.into_sql(), - Value::Time(val) => val.into_sql(), + ValueType::Uuid(val) => val.into_sql(), + ValueType::DateTime(val) => val.into_sql(), + ValueType::Date(val) => val.into_sql(), + ValueType::Time(val) => val.into_sql(), } } } @@ -39,18 +39,18 @@ impl TryFrom> for Value<'static> { fn try_from(cd: ColumnData<'static>) -> crate::Result { let res = match cd { - ColumnData::U8(num) => Value::Int32(num.map(i32::from)), - ColumnData::I16(num) => Value::Int32(num.map(i32::from)), - ColumnData::I32(num) => Value::Int32(num.map(i32::from)), - ColumnData::I64(num) => Value::Int64(num.map(i64::from)), - ColumnData::F32(num) => Value::Float(num), - ColumnData::F64(num) => Value::Double(num), - ColumnData::Bit(b) => Value::Boolean(b), - ColumnData::String(s) => Value::Text(s), - ColumnData::Guid(uuid) => Value::Uuid(uuid), - ColumnData::Binary(bytes) => Value::Bytes(bytes), + ColumnData::U8(num) => ValueType::Int32(num.map(i32::from)), + ColumnData::I16(num) => ValueType::Int32(num.map(i32::from)), + ColumnData::I32(num) => ValueType::Int32(num.map(i32::from)), + ColumnData::I64(num) => ValueType::Int64(num.map(i64::from)), + ColumnData::F32(num) => ValueType::Float(num), + ColumnData::F64(num) => ValueType::Double(num), + ColumnData::Bit(b) => ValueType::Boolean(b), + ColumnData::String(s) => ValueType::Text(s), + ColumnData::Guid(uuid) => ValueType::Uuid(uuid), + ColumnData::Binary(bytes) => ValueType::Bytes(bytes), #[cfg(feature = "bigdecimal")] - numeric @ ColumnData::Numeric(_) => Value::Numeric(BigDecimal::from_sql(&numeric)?), + numeric @ ColumnData::Numeric(_) => ValueType::Numeric(BigDecimal::from_sql(&numeric)?), #[cfg(not(feature = "bigdecimal"))] _numeric @ ColumnData::Numeric(_) => { let kind = ErrorKind::conversion("Please enable `bigdecimal` feature to read numeric values"); @@ -60,38 +60,38 @@ impl TryFrom> for Value<'static> { use tiberius::time::chrono::{DateTime, NaiveDateTime, Utc}; let dt = NaiveDateTime::from_sql(&dt)?.map(|dt| DateTime::::from_utc(dt, Utc)); - Value::DateTime(dt) + ValueType::DateTime(dt) } dt @ ColumnData::SmallDateTime(_) => { use tiberius::time::chrono::{DateTime, NaiveDateTime, Utc}; let dt = NaiveDateTime::from_sql(&dt)?.map(|dt| DateTime::::from_utc(dt, Utc)); - Value::DateTime(dt) + ValueType::DateTime(dt) } dt @ ColumnData::Time(_) => { use tiberius::time::chrono::NaiveTime; - Value::Time(NaiveTime::from_sql(&dt)?) + ValueType::Time(NaiveTime::from_sql(&dt)?) } dt @ ColumnData::Date(_) => { use tiberius::time::chrono::NaiveDate; - Value::Date(NaiveDate::from_sql(&dt)?) + ValueType::Date(NaiveDate::from_sql(&dt)?) } dt @ ColumnData::DateTime2(_) => { use tiberius::time::chrono::{DateTime, NaiveDateTime, Utc}; let dt = NaiveDateTime::from_sql(&dt)?.map(|dt| DateTime::::from_utc(dt, Utc)); - Value::DateTime(dt) + ValueType::DateTime(dt) } dt @ ColumnData::DateTimeOffset(_) => { use tiberius::time::chrono::{DateTime, Utc}; - Value::DateTime(DateTime::::from_sql(&dt)?) + ValueType::DateTime(DateTime::::from_sql(&dt)?) } - ColumnData::Xml(cow) => Value::Xml(cow.map(|xml_data| Cow::Owned(xml_data.into_owned().into_string()))), + ColumnData::Xml(cow) => ValueType::Xml(cow.map(|xml_data| Cow::Owned(xml_data.into_owned().into_string()))), }; - Ok(res) + Ok(Value::from(res)) } } diff --git a/quaint/src/connector/mysql.rs b/quaint/src/connector/mysql.rs index e4be7b47c404..d0c28a9786fe 100644 --- a/quaint/src/connector/mysql.rs +++ b/quaint/src/connector/mysql.rs @@ -560,7 +560,7 @@ impl Queryable for Mysql { let version_string = rows .get(0) - .and_then(|row| row.get("version").and_then(|version| version.to_string())); + .and_then(|row| row.get("version").and_then(|version| version.typed.to_string())); Ok(version_string) } diff --git a/quaint/src/connector/mysql/conversion.rs b/quaint/src/connector/mysql/conversion.rs index c9b1e812873d..9230199eaf40 100644 --- a/quaint/src/connector/mysql/conversion.rs +++ b/quaint/src/connector/mysql/conversion.rs @@ -1,5 +1,5 @@ use crate::{ - ast::Value, + ast::{Value, ValueType}, connector::{queryable::TakeRow, TypeIdentifier}, error::{Error, ErrorKind}, }; @@ -19,18 +19,18 @@ pub fn conv_params(params: &[Value<'_>]) -> crate::Result { let mut values = Vec::with_capacity(params.len()); for pv in params { - let res = match pv { - Value::Int32(i) => i.map(|i| my::Value::Int(i as i64)), - Value::Int64(i) => i.map(my::Value::Int), - Value::Float(f) => f.map(my::Value::Float), - Value::Double(f) => f.map(my::Value::Double), - Value::Text(s) => s.clone().map(|s| my::Value::Bytes((*s).as_bytes().to_vec())), - Value::Bytes(bytes) => bytes.clone().map(|bytes| my::Value::Bytes(bytes.into_owned())), - Value::Enum(s, _) => s.clone().map(|s| my::Value::Bytes((*s).as_bytes().to_vec())), - Value::Boolean(b) => b.map(|b| my::Value::Int(b as i64)), - Value::Char(c) => c.map(|c| my::Value::Bytes(vec![c as u8])), - Value::Xml(s) => s.as_ref().map(|s| my::Value::Bytes((s).as_bytes().to_vec())), - Value::Array(_) | Value::EnumArray(_, _) => { + let res = match &pv.typed { + ValueType::Int32(i) => i.map(|i| my::Value::Int(i as i64)), + ValueType::Int64(i) => i.map(my::Value::Int), + ValueType::Float(f) => f.map(my::Value::Float), + ValueType::Double(f) => f.map(my::Value::Double), + ValueType::Text(s) => s.clone().map(|s| my::Value::Bytes((*s).as_bytes().to_vec())), + ValueType::Bytes(bytes) => bytes.clone().map(|bytes| my::Value::Bytes(bytes.into_owned())), + ValueType::Enum(s, _) => s.clone().map(|s| my::Value::Bytes((*s).as_bytes().to_vec())), + ValueType::Boolean(b) => b.map(|b| my::Value::Int(b as i64)), + ValueType::Char(c) => c.map(|c| my::Value::Bytes(vec![c as u8])), + ValueType::Xml(s) => s.as_ref().map(|s| my::Value::Bytes((s).as_bytes().to_vec())), + ValueType::Array(_) | ValueType::EnumArray(_, _) => { let msg = "Arrays are not supported in MySQL."; let kind = ErrorKind::conversion(msg); @@ -40,8 +40,8 @@ pub fn conv_params(params: &[Value<'_>]) -> crate::Result { return Err(builder.build()); } #[cfg(feature = "bigdecimal")] - Value::Numeric(f) => f.as_ref().map(|f| my::Value::Bytes(f.to_string().as_bytes().to_vec())), - Value::Json(s) => match s { + ValueType::Numeric(f) => f.as_ref().map(|f| my::Value::Bytes(f.to_string().as_bytes().to_vec())), + ValueType::Json(s) => match s { Some(ref s) => { let json = serde_json::to_string(s)?; let bytes = json.into_bytes(); @@ -51,14 +51,14 @@ pub fn conv_params(params: &[Value<'_>]) -> crate::Result { None => None, }, #[cfg(feature = "uuid")] - Value::Uuid(u) => u.map(|u| my::Value::Bytes(u.hyphenated().to_string().into_bytes())), - Value::Date(d) => { + ValueType::Uuid(u) => u.map(|u| my::Value::Bytes(u.hyphenated().to_string().into_bytes())), + ValueType::Date(d) => { d.map(|d| my::Value::Date(d.year() as u16, d.month() as u8, d.day() as u8, 0, 0, 0, 0)) } - Value::Time(t) => { + ValueType::Time(t) => { t.map(|t| my::Value::Time(false, 0, t.hour() as u8, t.minute() as u8, t.second() as u8, 0)) } - Value::DateTime(dt) => dt.map(|dt| { + ValueType::DateTime(dt) => dt.map(|dt| { my::Value::Date( dt.year() as u16, dt.month() as u8, @@ -303,21 +303,21 @@ impl TakeRow for my::Row { Value::time(time) } my::Value::NULL => match column { - t if t.is_bool() => Value::Boolean(None), - t if t.is_enum() => Value::Enum(None, None), - t if t.is_null() => Value::Int32(None), - t if t.is_int64() => Value::Int64(None), - t if t.is_int32() => Value::Int32(None), - t if t.is_float() => Value::Float(None), - t if t.is_double() => Value::Double(None), - t if t.is_text() => Value::Text(None), - t if t.is_bytes() => Value::Bytes(None), + t if t.is_bool() => Value::null_boolean(), + t if t.is_enum() => Value::null_enum(), + t if t.is_null() => Value::null_int32(), + t if t.is_int64() => Value::null_int64(), + t if t.is_int32() => Value::null_int32(), + t if t.is_float() => Value::null_float(), + t if t.is_double() => Value::null_double(), + t if t.is_text() => Value::null_text(), + t if t.is_bytes() => Value::null_bytes(), #[cfg(feature = "bigdecimal")] - t if t.is_real() => Value::Numeric(None), - t if t.is_datetime() => Value::DateTime(None), - t if t.is_time() => Value::Time(None), - t if t.is_date() => Value::Date(None), - t if t.is_json() => Value::Json(None), + t if t.is_real() => Value::null_numeric(), + t if t.is_datetime() => Value::null_datetime(), + t if t.is_time() => Value::null_time(), + t if t.is_date() => Value::null_date(), + t if t.is_json() => Value::null_json(), typ => { let msg = format!("Value of type {typ:?} is not supported with the current configuration"); diff --git a/quaint/src/connector/postgres.rs b/quaint/src/connector/postgres.rs index c35208f84199..dadc39faea2a 100644 --- a/quaint/src/connector/postgres.rs +++ b/quaint/src/connector/postgres.rs @@ -1178,7 +1178,7 @@ mod tests { let result_set = client.query_raw("SHOW search_path", &[]).await.unwrap(); let row = result_set.first().unwrap(); - row[0].to_string() + row[0].typed.to_string() } // Safe @@ -1230,7 +1230,7 @@ mod tests { let result_set = client.query_raw("SHOW search_path", &[]).await.unwrap(); let row = result_set.first().unwrap(); - row[0].to_string() + row[0].typed.to_string() } // Safe @@ -1281,7 +1281,7 @@ mod tests { let result_set = client.query_raw("SHOW search_path", &[]).await.unwrap(); let row = result_set.first().unwrap(); - row[0].to_string() + row[0].typed.to_string() } // Safe @@ -1332,7 +1332,7 @@ mod tests { let result_set = client.query_raw("SHOW search_path", &[]).await.unwrap(); let row = result_set.first().unwrap(); - row[0].to_string() + row[0].typed.to_string() } // Safe @@ -1383,7 +1383,7 @@ mod tests { let result_set = client.query_raw("SHOW search_path", &[]).await.unwrap(); let row = result_set.first().unwrap(); - row[0].to_string() + row[0].typed.to_string() } // Safe @@ -1480,9 +1480,7 @@ mod tests { let url = Url::parse(&CONN_STR).unwrap(); let conn = Quaint::new(url.as_str()).await.unwrap(); - let res = conn - .query_raw("SELECT $1", &[Value::integer(1), Value::integer(2)]) - .await; + let res = conn.query_raw("SELECT $1", &[Value::int32(1), Value::int32(2)]).await; assert!(res.is_err()); diff --git a/quaint/src/connector/postgres/conversion.rs b/quaint/src/connector/postgres/conversion.rs index f321e1829529..0ab4413f792c 100644 --- a/quaint/src/connector/postgres/conversion.rs +++ b/quaint/src/connector/postgres/conversion.rs @@ -2,7 +2,7 @@ mod decimal; use crate::{ - ast::Value, + ast::{Value, ValueType}, connector::queryable::{GetRow, ToColumnNames}, error::{Error, ErrorKind}, }; @@ -38,27 +38,27 @@ pub(crate) fn params_to_types(params: &[Value<'_>]) -> Vec { return PostgresType::UNKNOWN; } - match p { - Value::Int32(_) => PostgresType::INT4, - Value::Int64(_) => PostgresType::INT8, - Value::Float(_) => PostgresType::FLOAT4, - Value::Double(_) => PostgresType::FLOAT8, - Value::Text(_) => PostgresType::TEXT, + match &p.typed { + ValueType::Int32(_) => PostgresType::INT4, + ValueType::Int64(_) => PostgresType::INT8, + ValueType::Float(_) => PostgresType::FLOAT4, + ValueType::Double(_) => PostgresType::FLOAT8, + ValueType::Text(_) => PostgresType::TEXT, // Enums are user-defined types, we can't statically infer them, so we let PG infer it - Value::Enum(_, _) | Value::EnumArray(_, _) => PostgresType::UNKNOWN, - Value::Bytes(_) => PostgresType::BYTEA, - Value::Boolean(_) => PostgresType::BOOL, - Value::Char(_) => PostgresType::CHAR, + ValueType::Enum(_, _) | ValueType::EnumArray(_, _) => PostgresType::UNKNOWN, + ValueType::Bytes(_) => PostgresType::BYTEA, + ValueType::Boolean(_) => PostgresType::BOOL, + ValueType::Char(_) => PostgresType::CHAR, #[cfg(feature = "bigdecimal")] - Value::Numeric(_) => PostgresType::NUMERIC, - Value::Json(_) => PostgresType::JSONB, - Value::Xml(_) => PostgresType::XML, + ValueType::Numeric(_) => PostgresType::NUMERIC, + ValueType::Json(_) => PostgresType::JSONB, + ValueType::Xml(_) => PostgresType::XML, #[cfg(feature = "uuid")] - Value::Uuid(_) => PostgresType::UUID, - Value::DateTime(_) => PostgresType::TIMESTAMPTZ, - Value::Date(_) => PostgresType::TIMESTAMP, - Value::Time(_) => PostgresType::TIME, - Value::Array(ref arr) => { + ValueType::Uuid(_) => PostgresType::UUID, + ValueType::DateTime(_) => PostgresType::TIMESTAMPTZ, + ValueType::Date(_) => PostgresType::TIMESTAMP, + ValueType::Time(_) => PostgresType::TIME, + ValueType::Array(ref arr) => { let arr = arr.as_ref().unwrap(); // If the array is empty, we can't infer the type so we let PG infer it @@ -71,33 +71,33 @@ pub(crate) fn params_to_types(params: &[Value<'_>]) -> Vec { // If the array does not contain the same types of values, we let PG infer the type if arr .iter() - .any(|val| std::mem::discriminant(first) != std::mem::discriminant(val)) + .any(|val| std::mem::discriminant(&first.typed) != std::mem::discriminant(&val.typed)) { return PostgresType::UNKNOWN; } - match first { - Value::Int32(_) => PostgresType::INT4_ARRAY, - Value::Int64(_) => PostgresType::INT8_ARRAY, - Value::Float(_) => PostgresType::FLOAT4_ARRAY, - Value::Double(_) => PostgresType::FLOAT8_ARRAY, - Value::Text(_) => PostgresType::TEXT_ARRAY, + match first.typed { + ValueType::Int32(_) => PostgresType::INT4_ARRAY, + ValueType::Int64(_) => PostgresType::INT8_ARRAY, + ValueType::Float(_) => PostgresType::FLOAT4_ARRAY, + ValueType::Double(_) => PostgresType::FLOAT8_ARRAY, + ValueType::Text(_) => PostgresType::TEXT_ARRAY, // Enums are special types, we can't statically infer them, so we let PG infer it - Value::Enum(_, _) | Value::EnumArray(_, _) => PostgresType::UNKNOWN, - Value::Bytes(_) => PostgresType::BYTEA_ARRAY, - Value::Boolean(_) => PostgresType::BOOL_ARRAY, - Value::Char(_) => PostgresType::CHAR_ARRAY, + ValueType::Enum(_, _) | ValueType::EnumArray(_, _) => PostgresType::UNKNOWN, + ValueType::Bytes(_) => PostgresType::BYTEA_ARRAY, + ValueType::Boolean(_) => PostgresType::BOOL_ARRAY, + ValueType::Char(_) => PostgresType::CHAR_ARRAY, #[cfg(feature = "bigdecimal")] - Value::Numeric(_) => PostgresType::NUMERIC_ARRAY, - Value::Json(_) => PostgresType::JSONB_ARRAY, - Value::Xml(_) => PostgresType::XML_ARRAY, + ValueType::Numeric(_) => PostgresType::NUMERIC_ARRAY, + ValueType::Json(_) => PostgresType::JSONB_ARRAY, + ValueType::Xml(_) => PostgresType::XML_ARRAY, #[cfg(feature = "uuid")] - Value::Uuid(_) => PostgresType::UUID_ARRAY, - Value::DateTime(_) => PostgresType::TIMESTAMPTZ_ARRAY, - Value::Date(_) => PostgresType::TIMESTAMP_ARRAY, - Value::Time(_) => PostgresType::TIME_ARRAY, + ValueType::Uuid(_) => PostgresType::UUID_ARRAY, + ValueType::DateTime(_) => PostgresType::TIMESTAMPTZ_ARRAY, + ValueType::Date(_) => PostgresType::TIMESTAMP_ARRAY, + ValueType::Time(_) => PostgresType::TIME_ARRAY, // In the case of nested arrays, we let PG infer the type - Value::Array(_) => PostgresType::UNKNOWN, + ValueType::Array(_) => PostgresType::UNKNOWN, } } } @@ -171,63 +171,63 @@ impl GetRow for PostgresRow { fn get_result_row(&self) -> crate::Result>> { fn convert(row: &PostgresRow, i: usize) -> crate::Result> { let result = match *row.columns()[i].type_() { - PostgresType::BOOL => Value::Boolean(row.try_get(i)?), + PostgresType::BOOL => ValueType::Boolean(row.try_get(i)?).into_value(), PostgresType::INT2 => match row.try_get(i)? { Some(val) => { let val: i16 = val; Value::int32(val) } - None => Value::Int32(None), + None => Value::null_int32(), }, PostgresType::INT4 => match row.try_get(i)? { Some(val) => { let val: i32 = val; Value::int32(val) } - None => Value::Int32(None), + None => Value::null_int32(), }, PostgresType::INT8 => match row.try_get(i)? { Some(val) => { let val: i64 = val; Value::int64(val) } - None => Value::Int64(None), + None => Value::null_int64(), }, PostgresType::FLOAT4 => match row.try_get(i)? { Some(val) => { let val: f32 = val; Value::float(val) } - None => Value::Float(None), + None => Value::null_float(), }, PostgresType::FLOAT8 => match row.try_get(i)? { Some(val) => { let val: f64 = val; Value::double(val) } - None => Value::Double(None), + None => Value::null_double(), }, PostgresType::BYTEA => match row.try_get(i)? { Some(val) => { let val: &[u8] = val; Value::bytes(val.to_owned()) } - None => Value::Bytes(None), + None => Value::null_bytes(), }, PostgresType::BYTEA_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec>> = val; - let byteas = val.into_iter().map(|b| Value::Bytes(b.map(Into::into))); + let byteas = val.into_iter().map(|b| ValueType::Bytes(b.map(Into::into))); Value::array(byteas) } - None => Value::Array(None), + None => Value::null_array(), }, #[cfg(feature = "bigdecimal")] PostgresType::NUMERIC => { let dw: Option = row.try_get(i)?; - Value::Numeric(dw.map(|dw| dw.0)) + ValueType::Numeric(dw.map(|dw| dw.0)).into_value() } #[cfg(feature = "bigdecimal")] PostgresType::MONEY => match row.try_get(i)? { @@ -235,7 +235,7 @@ impl GetRow for PostgresRow { let val: NaiveMoney = val; Value::numeric(val.0) } - None => Value::Numeric(None), + None => Value::null_numeric(), }, PostgresType::TIMESTAMP => match row.try_get(i)? { Some(val) => { @@ -243,29 +243,29 @@ impl GetRow for PostgresRow { let dt = DateTime::::from_utc(ts, Utc); Value::datetime(dt) } - None => Value::DateTime(None), + None => Value::null_datetime(), }, PostgresType::TIMESTAMPTZ => match row.try_get(i)? { Some(val) => { let ts: DateTime = val; Value::datetime(ts) } - None => Value::DateTime(None), + None => Value::null_datetime(), }, PostgresType::DATE => match row.try_get(i)? { Some(val) => Value::date(val), - None => Value::Date(None), + None => Value::null_date(), }, PostgresType::TIME => match row.try_get(i)? { Some(val) => Value::time(val), - None => Value::Time(None), + None => Value::null_time(), }, PostgresType::TIMETZ => match row.try_get(i)? { Some(val) => { let time: TimeTz = val; Value::time(time.0) } - None => Value::Time(None), + None => Value::null_time(), }, #[cfg(feature = "uuid")] PostgresType::UUID => match row.try_get(i)? { @@ -273,72 +273,72 @@ impl GetRow for PostgresRow { let val: Uuid = val; Value::uuid(val) } - None => Value::Uuid(None), + None => ValueType::Uuid(None).into_value(), }, #[cfg(feature = "uuid")] PostgresType::UUID_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; - let val = val.into_iter().map(Value::Uuid); + let val = val.into_iter().map(ValueType::Uuid); Value::array(val) } - None => Value::Array(None), + None => Value::null_array(), }, - PostgresType::JSON | PostgresType::JSONB => Value::Json(row.try_get(i)?), + PostgresType::JSON | PostgresType::JSONB => ValueType::Json(row.try_get(i)?).into_value(), PostgresType::INT2_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; - let ints = val.into_iter().map(|i| Value::Int32(i.map(|i| i as i32))); + let ints = val.into_iter().map(|i| ValueType::Int32(i.map(|i| i as i32))); Value::array(ints) } - None => Value::Array(None), + None => Value::null_array(), }, PostgresType::INT4_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; - let ints = val.into_iter().map(Value::Int32); + let ints = val.into_iter().map(ValueType::Int32); Value::array(ints) } - None => Value::Array(None), + None => Value::null_array(), }, PostgresType::INT8_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; - let ints = val.into_iter().map(Value::Int64); + let ints = val.into_iter().map(ValueType::Int64); Value::array(ints) } - None => Value::Array(None), + None => Value::null_array(), }, PostgresType::FLOAT4_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; - let floats = val.into_iter().map(Value::Float); + let floats = val.into_iter().map(ValueType::Float); Value::array(floats) } - None => Value::Array(None), + None => Value::null_array(), }, PostgresType::FLOAT8_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; - let floats = val.into_iter().map(Value::Double); + let floats = val.into_iter().map(ValueType::Double); Value::array(floats) } - None => Value::Array(None), + None => Value::null_array(), }, PostgresType::BOOL_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; - let bools = val.into_iter().map(Value::Boolean); + let bools = val.into_iter().map(ValueType::Boolean); Value::array(bools) } - None => Value::Array(None), + None => Value::null_array(), }, PostgresType::TIMESTAMP_ARRAY => match row.try_get(i)? { Some(val) => { @@ -346,11 +346,11 @@ impl GetRow for PostgresRow { let dates = val .into_iter() - .map(|dt| Value::DateTime(dt.map(|dt| DateTime::::from_utc(dt, Utc)))); + .map(|dt| ValueType::DateTime(dt.map(|dt| DateTime::::from_utc(dt, Utc)))); Value::array(dates) } - None => Value::Array(None), + None => Value::null_array(), }, #[cfg(feature = "bigdecimal")] PostgresType::NUMERIC_ARRAY => match row.try_get(i)? { @@ -359,11 +359,11 @@ impl GetRow for PostgresRow { let decimals = val .into_iter() - .map(|dec| Value::Numeric(dec.map(|dec| dec.0.to_string().parse().unwrap()))); + .map(|dec| ValueType::Numeric(dec.map(|dec| dec.0.to_string().parse().unwrap()))); Value::array(decimals) } - None => Value::Array(None), + None => Value::null_array(), }, PostgresType::TEXT_ARRAY | PostgresType::NAME_ARRAY | PostgresType::VARCHAR_ARRAY => { match row.try_get(i)? { @@ -372,142 +372,140 @@ impl GetRow for PostgresRow { Value::array(strings.into_iter().map(|s| s.map(|s| s.to_string()))) } - None => Value::Array(None), + None => Value::null_array(), } } #[cfg(feature = "bigdecimal")] PostgresType::MONEY_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; - let nums = val.into_iter().map(|num| Value::Numeric(num.map(|num| num.0))); + let nums = val.into_iter().map(|num| ValueType::Numeric(num.map(|num| num.0))); Value::array(nums) } - None => Value::Array(None), + None => Value::null_array(), }, PostgresType::OID_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; - let nums = val.into_iter().map(|oid| Value::Int64(oid.map(|oid| oid as i64))); + let nums = val.into_iter().map(|oid| ValueType::Int64(oid.map(|oid| oid as i64))); Value::array(nums) } - None => Value::Array(None), + None => Value::null_array(), }, PostgresType::TIMESTAMPTZ_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec>> = val; - let dates = val.into_iter().map(Value::DateTime); + let dates = val.into_iter().map(ValueType::DateTime); Value::array(dates) } - None => Value::Array(None), + None => Value::null_array(), }, PostgresType::DATE_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; - let dates = val.into_iter().map(Value::Date); + let dates = val.into_iter().map(ValueType::Date); Value::array(dates) } - None => Value::Array(None), + None => Value::null_array(), }, PostgresType::TIME_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; - let times = val.into_iter().map(Value::Time); + let times = val.into_iter().map(ValueType::Time); Value::array(times) } - None => Value::Array(None), + None => Value::null_array(), }, PostgresType::TIMETZ_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; - let timetzs = val.into_iter().map(|time| Value::Time(time.map(|time| time.0))); + let timetzs = val.into_iter().map(|time| ValueType::Time(time.map(|time| time.0))); Value::array(timetzs) } - None => Value::Array(None), + None => Value::null_array(), }, PostgresType::JSON_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; - let jsons = val.into_iter().map(Value::Json); + let jsons = val.into_iter().map(ValueType::Json); Value::array(jsons) } - None => Value::Array(None), + None => Value::null_array(), }, PostgresType::JSONB_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; - let jsons = val.into_iter().map(Value::Json); + let jsons = val.into_iter().map(ValueType::Json); Value::array(jsons) } - None => Value::Array(None), + None => Value::null_array(), }, PostgresType::OID => match row.try_get(i)? { Some(val) => { let val: u32 = val; Value::int64(val) } - None => Value::Int64(None), + None => Value::null_int64(), }, PostgresType::CHAR => match row.try_get(i)? { Some(val) => { let val: i8 = val; Value::character((val as u8) as char) } - None => Value::Char(None), + None => Value::null_character(), }, PostgresType::INET | PostgresType::CIDR => match row.try_get(i)? { Some(val) => { let val: std::net::IpAddr = val; Value::text(val.to_string()) } - None => Value::Text(None), + None => Value::null_text(), }, PostgresType::INET_ARRAY | PostgresType::CIDR_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; let addrs = val .into_iter() - .map(|ip| Value::Text(ip.map(|ip| ip.to_string().into()))); + .map(|ip| ValueType::Text(ip.map(|ip| ip.to_string().into()))); Value::array(addrs) } - None => Value::Array(None), + None => Value::null_array(), }, PostgresType::BIT | PostgresType::VARBIT => match row.try_get(i)? { Some(val) => { let val: BitVec = val; Value::text(bits_to_string(&val)?) } - None => Value::Text(None), + None => Value::null_text(), }, PostgresType::BIT_ARRAY | PostgresType::VARBIT_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; - let stringified = val - .into_iter() + val.into_iter() .map(|bits| match bits { - Some(bits) => bits_to_string(&bits).map(Value::text), - None => Ok(Value::Text(None)), + Some(bits) => bits_to_string(&bits).map(|s| ValueType::Text(Some(s.into()))), + None => Ok(ValueType::Text(None)), }) - .collect::>>()?; - - Value::array(stringified) + .collect::>>() + .map(Value::array)? } - None => Value::Array(None), + None => Value::null_array(), }, PostgresType::XML => match row.try_get(i)? { Some(val) => { let val: XmlString = val; Value::xml(val.0) } - None => Value::Xml(None), + None => Value::null_xml(), }, PostgresType::XML_ARRAY => match row.try_get(i)? { Some(val) => { @@ -516,7 +514,7 @@ impl GetRow for PostgresRow { Value::array(xmls) } - None => Value::Array(None), + None => Value::null_array(), }, ref x => match x.kind() { Kind::Enum => match row.try_get(i)? { @@ -525,26 +523,28 @@ impl GetRow for PostgresRow { Value::enum_variant(val.value) } - None => Value::Enum(None, None), + None => Value::null_enum(), }, Kind::Array(inner) => match inner.kind() { Kind::Enum => match row.try_get(i)? { Some(val) => { let val: Vec> = val; - let variants = val.into_iter().map(|x| Value::Enum(x.map(|x| x.value.into()), None)); + let variants = val + .into_iter() + .map(|x| ValueType::Enum(x.map(|x| x.value.into()), None)); Ok(Value::array(variants)) } - None => Ok(Value::Array(None)), + None => Ok(Value::null_array()), }, _ => match row.try_get(i) { Ok(Some(val)) => { let val: Vec> = val; - let strings = val.into_iter().map(|str| Value::Text(str.map(Into::into))); + let strings = val.into_iter().map(|str| ValueType::Text(str.map(Into::into))); Ok(Value::array(strings)) } - Ok(None) => Ok(Value::Array(None)), + Ok(None) => Ok(Value::null_array()), Err(err) => { if err.source().map(|err| err.is::()).unwrap_or(false) { let kind = ErrorKind::UnsupportedColumnType { @@ -564,7 +564,7 @@ impl GetRow for PostgresRow { Ok(Value::text(val)) } - Ok(None) => Ok(Value::Text(None)), + Ok(None) => Ok(Value::from(ValueType::Text(None))), Err(err) => { if err.source().map(|err| err.is::()).unwrap_or(false) { let kind = ErrorKind::UnsupportedColumnType { @@ -606,8 +606,8 @@ impl<'a> ToSql for Value<'a> { ty: &PostgresType, out: &mut BytesMut, ) -> Result> { - let res = match (self, ty) { - (Value::Int32(integer), &PostgresType::INT2) => match integer { + let res = match (&self.typed, ty) { + (ValueType::Int32(integer), &PostgresType::INT2) => match integer { Some(i) => { let integer = i16::try_from(*i).map_err(|_| { let kind = ErrorKind::conversion(format!( @@ -621,9 +621,9 @@ impl<'a> ToSql for Value<'a> { } _ => None, }, - (Value::Int32(integer), &PostgresType::INT4) => integer.map(|integer| integer.to_sql(ty, out)), - (Value::Int32(integer), &PostgresType::INT8) => integer.map(|integer| (integer as i64).to_sql(ty, out)), - (Value::Int64(integer), &PostgresType::INT2) => match integer { + (ValueType::Int32(integer), &PostgresType::INT4) => integer.map(|integer| integer.to_sql(ty, out)), + (ValueType::Int32(integer), &PostgresType::INT8) => integer.map(|integer| (integer as i64).to_sql(ty, out)), + (ValueType::Int64(integer), &PostgresType::INT2) => match integer { Some(i) => { let integer = i16::try_from(*i).map_err(|_| { let kind = ErrorKind::conversion(format!( @@ -637,7 +637,7 @@ impl<'a> ToSql for Value<'a> { } _ => None, }, - (Value::Int64(integer), &PostgresType::INT4) => match integer { + (ValueType::Int64(integer), &PostgresType::INT4) => match integer { Some(i) => { let integer = i32::try_from(*i).map_err(|_| { let kind = ErrorKind::conversion(format!( @@ -651,20 +651,24 @@ impl<'a> ToSql for Value<'a> { } _ => None, }, - (Value::Int64(integer), &PostgresType::INT8) => integer.map(|integer| integer.to_sql(ty, out)), + (ValueType::Int64(integer), &PostgresType::INT8) => integer.map(|integer| integer.to_sql(ty, out)), #[cfg(feature = "bigdecimal")] - (Value::Int32(integer), &PostgresType::NUMERIC) => integer + (ValueType::Int32(integer), &PostgresType::NUMERIC) => integer .map(|integer| BigDecimal::from_i32(integer).unwrap()) .map(DecimalWrapper) .map(|dw| dw.to_sql(ty, out)), #[cfg(feature = "bigdecimal")] - (Value::Int64(integer), &PostgresType::NUMERIC) => integer + (ValueType::Int64(integer), &PostgresType::NUMERIC) => integer .map(|integer| BigDecimal::from_i64(integer).unwrap()) .map(DecimalWrapper) .map(|dw| dw.to_sql(ty, out)), - (Value::Int32(integer), &PostgresType::TEXT) => integer.map(|integer| format!("{integer}").to_sql(ty, out)), - (Value::Int64(integer), &PostgresType::TEXT) => integer.map(|integer| format!("{integer}").to_sql(ty, out)), - (Value::Int32(integer), &PostgresType::OID) => match integer { + (ValueType::Int32(integer), &PostgresType::TEXT) => { + integer.map(|integer| format!("{integer}").to_sql(ty, out)) + } + (ValueType::Int64(integer), &PostgresType::TEXT) => { + integer.map(|integer| format!("{integer}").to_sql(ty, out)) + } + (ValueType::Int32(integer), &PostgresType::OID) => match integer { Some(i) => { let integer = u32::try_from(*i).map_err(|_| { let kind = ErrorKind::conversion(format!( @@ -678,7 +682,7 @@ impl<'a> ToSql for Value<'a> { } _ => None, }, - (Value::Int64(integer), &PostgresType::OID) => match integer { + (ValueType::Int64(integer), &PostgresType::OID) => match integer { Some(i) => { let integer = u32::try_from(*i).map_err(|_| { let kind = ErrorKind::conversion(format!( @@ -692,43 +696,43 @@ impl<'a> ToSql for Value<'a> { } _ => None, }, - (Value::Int32(integer), _) => integer.map(|integer| integer.to_sql(ty, out)), - (Value::Int64(integer), _) => integer.map(|integer| integer.to_sql(ty, out)), - (Value::Float(float), &PostgresType::FLOAT8) => float.map(|float| (float as f64).to_sql(ty, out)), + (ValueType::Int32(integer), _) => integer.map(|integer| integer.to_sql(ty, out)), + (ValueType::Int64(integer), _) => integer.map(|integer| integer.to_sql(ty, out)), + (ValueType::Float(float), &PostgresType::FLOAT8) => float.map(|float| (float as f64).to_sql(ty, out)), #[cfg(feature = "bigdecimal")] - (Value::Float(float), &PostgresType::NUMERIC) => float + (ValueType::Float(float), &PostgresType::NUMERIC) => float .map(|float| BigDecimal::from_f32(float).unwrap()) .map(DecimalWrapper) .map(|dw| dw.to_sql(ty, out)), - (Value::Float(float), _) => float.map(|float| float.to_sql(ty, out)), - (Value::Double(double), &PostgresType::FLOAT4) => double.map(|double| (double as f32).to_sql(ty, out)), + (ValueType::Float(float), _) => float.map(|float| float.to_sql(ty, out)), + (ValueType::Double(double), &PostgresType::FLOAT4) => double.map(|double| (double as f32).to_sql(ty, out)), #[cfg(feature = "bigdecimal")] - (Value::Double(double), &PostgresType::NUMERIC) => double + (ValueType::Double(double), &PostgresType::NUMERIC) => double .map(|double| BigDecimal::from_f64(double).unwrap()) .map(DecimalWrapper) .map(|dw| dw.to_sql(ty, out)), - (Value::Double(double), _) => double.map(|double| double.to_sql(ty, out)), + (ValueType::Double(double), _) => double.map(|double| double.to_sql(ty, out)), #[cfg(feature = "bigdecimal")] - (Value::Numeric(decimal), &PostgresType::FLOAT4) => decimal.as_ref().map(|decimal| { + (ValueType::Numeric(decimal), &PostgresType::FLOAT4) => decimal.as_ref().map(|decimal| { let f = decimal.to_string().parse::().expect("decimal to f32 conversion"); f.to_sql(ty, out) }), #[cfg(feature = "bigdecimal")] - (Value::Numeric(decimal), &PostgresType::FLOAT8) => decimal.as_ref().map(|decimal| { + (ValueType::Numeric(decimal), &PostgresType::FLOAT8) => decimal.as_ref().map(|decimal| { let f = decimal.to_string().parse::().expect("decimal to f64 conversion"); f.to_sql(ty, out) }), #[cfg(feature = "bigdecimal")] - (Value::Array(values), &PostgresType::FLOAT4_ARRAY) => values.as_ref().map(|values| { + (ValueType::Array(values), &PostgresType::FLOAT4_ARRAY) => values.as_ref().map(|values| { let mut floats = Vec::with_capacity(values.len()); for value in values.iter() { - let float = match value { - Value::Numeric(n) => n.as_ref().and_then(|n| n.to_string().parse::().ok()), - Value::Int64(n) => n.map(|n| n as f32), - Value::Float(f) => *f, - Value::Double(d) => d.map(|d| d as f32), - v if v.is_null() => None, + let float = match &value.typed { + ValueType::Numeric(n) => n.as_ref().and_then(|n| n.to_string().parse::().ok()), + ValueType::Int64(n) => n.map(|n| n as f32), + ValueType::Float(f) => *f, + ValueType::Double(d) => d.map(|d| d as f32), + _ if value.is_null() => None, v => { let kind = ErrorKind::conversion(format!( "Couldn't add value of type `{v:?}` into a float array." @@ -744,15 +748,15 @@ impl<'a> ToSql for Value<'a> { floats.to_sql(ty, out) }), #[cfg(feature = "bigdecimal")] - (Value::Array(values), &PostgresType::FLOAT8_ARRAY) => values.as_ref().map(|values| { + (ValueType::Array(values), &PostgresType::FLOAT8_ARRAY) => values.as_ref().map(|values| { let mut floats = Vec::with_capacity(values.len()); for value in values.iter() { - let float = match value { - Value::Numeric(n) => n.as_ref().and_then(|n| n.to_string().parse::().ok()), - Value::Int64(n) => n.map(|n| n as f64), - Value::Float(f) => f.map(|f| f as f64), - Value::Double(d) => *d, + let float = match &value.typed { + ValueType::Numeric(n) => n.as_ref().and_then(|n| n.to_string().parse::().ok()), + ValueType::Int64(n) => n.map(|n| n as f64), + ValueType::Float(f) => f.map(|f| f as f64), + ValueType::Double(d) => *d, v if v.is_null() => None, v => { let kind = ErrorKind::conversion(format!( @@ -769,7 +773,7 @@ impl<'a> ToSql for Value<'a> { floats.to_sql(ty, out) }), #[cfg(feature = "bigdecimal")] - (Value::Numeric(decimal), &PostgresType::MONEY) => decimal.as_ref().map(|decimal| { + (ValueType::Numeric(decimal), &PostgresType::MONEY) => decimal.as_ref().map(|decimal| { let decimal = (decimal * BigInt::from_i32(100).unwrap()).round(0); let i = decimal.to_i64().ok_or_else(|| { @@ -780,20 +784,20 @@ impl<'a> ToSql for Value<'a> { i.to_sql(ty, out) }), #[cfg(feature = "bigdecimal")] - (Value::Numeric(decimal), &PostgresType::NUMERIC) => decimal + (ValueType::Numeric(decimal), &PostgresType::NUMERIC) => decimal .as_ref() .map(|decimal| DecimalWrapper(decimal.clone()).to_sql(ty, out)), #[cfg(feature = "bigdecimal")] - (Value::Numeric(float), _) => float + (ValueType::Numeric(float), _) => float .as_ref() .map(|float| DecimalWrapper(float.clone()).to_sql(ty, out)), #[cfg(feature = "uuid")] - (Value::Text(string), &PostgresType::UUID) => string.as_ref().map(|string| { + (ValueType::Text(string), &PostgresType::UUID) => string.as_ref().map(|string| { let parsed_uuid: Uuid = string.parse()?; parsed_uuid.to_sql(ty, out) }), #[cfg(feature = "uuid")] - (Value::Array(values), &PostgresType::UUID_ARRAY) => values.as_ref().map(|values| { + (ValueType::Array(values), &PostgresType::UUID_ARRAY) => values.as_ref().map(|values| { let parsed_uuid: Vec> = values .iter() .map(>::try_from) @@ -801,85 +805,83 @@ impl<'a> ToSql for Value<'a> { parsed_uuid.to_sql(ty, out) }), - (Value::Text(string), &PostgresType::INET) | (Value::Text(string), &PostgresType::CIDR) => { + (ValueType::Text(string), &PostgresType::INET) | (ValueType::Text(string), &PostgresType::CIDR) => { string.as_ref().map(|string| { let parsed_ip_addr: std::net::IpAddr = string.parse()?; parsed_ip_addr.to_sql(ty, out) }) } - (Value::Array(values), &PostgresType::INET_ARRAY) | (Value::Array(values), &PostgresType::CIDR_ARRAY) => { - values.as_ref().map(|values| { - let parsed_ip_addr: Vec> = values - .iter() - .map(>::try_from) - .collect::>()?; + (ValueType::Array(values), &PostgresType::INET_ARRAY) + | (ValueType::Array(values), &PostgresType::CIDR_ARRAY) => values.as_ref().map(|values| { + let parsed_ip_addr: Vec> = values + .iter() + .map(>::try_from) + .collect::>()?; - parsed_ip_addr.to_sql(ty, out) - }) - } - (Value::Text(string), &PostgresType::JSON) | (Value::Text(string), &PostgresType::JSONB) => string + parsed_ip_addr.to_sql(ty, out) + }), + (ValueType::Text(string), &PostgresType::JSON) | (ValueType::Text(string), &PostgresType::JSONB) => string .as_ref() .map(|string| serde_json::from_str::(string)?.to_sql(ty, out)), - (Value::Text(string), &PostgresType::BIT) | (Value::Text(string), &PostgresType::VARBIT) => { + (ValueType::Text(string), &PostgresType::BIT) | (ValueType::Text(string), &PostgresType::VARBIT) => { string.as_ref().map(|string| { let bits: BitVec = string_to_bits(string)?; bits.to_sql(ty, out) }) } - (Value::Text(string), _) => string.as_ref().map(|ref string| string.to_sql(ty, out)), - (Value::Array(values), &PostgresType::BIT_ARRAY) | (Value::Array(values), &PostgresType::VARBIT_ARRAY) => { - values.as_ref().map(|values| { - let bitvecs: Vec> = values - .iter() - .map(>::try_from) - .collect::>>()?; + (ValueType::Text(string), _) => string.as_ref().map(|ref string| string.to_sql(ty, out)), + (ValueType::Array(values), &PostgresType::BIT_ARRAY) + | (ValueType::Array(values), &PostgresType::VARBIT_ARRAY) => values.as_ref().map(|values| { + let bitvecs: Vec> = values + .iter() + .map(|value| value.try_into()) + .collect::>>()?; - bitvecs.to_sql(ty, out) - }) - } - (Value::Bytes(bytes), _) => bytes.as_ref().map(|bytes| bytes.as_ref().to_sql(ty, out)), - (Value::Enum(string, _), _) => string.as_ref().map(|string| { + bitvecs.to_sql(ty, out) + }), + (ValueType::Bytes(bytes), _) => bytes.as_ref().map(|bytes| bytes.as_ref().to_sql(ty, out)), + (ValueType::Enum(string, _), _) => string.as_ref().map(|string| { out.extend_from_slice(string.as_bytes()); Ok(IsNull::No) }), - (Value::Boolean(boo), _) => boo.map(|boo| boo.to_sql(ty, out)), - (Value::Char(c), _) => c.map(|c| (c as i8).to_sql(ty, out)), - (Value::Array(vec), typ) if matches!(typ.kind(), Kind::Array(_)) => { + (ValueType::Boolean(boo), _) => boo.map(|boo| boo.to_sql(ty, out)), + (ValueType::Char(c), _) => c.map(|c| (c as i8).to_sql(ty, out)), + (ValueType::Array(vec), typ) if matches!(typ.kind(), Kind::Array(_)) => { vec.as_ref().map(|vec| vec.to_sql(ty, out)) } - (Value::EnumArray(variants, _), typ) if matches!(typ.kind(), Kind::Array(_)) => variants + (ValueType::EnumArray(variants, _), typ) if matches!(typ.kind(), Kind::Array(_)) => variants .as_ref() .map(|vec| vec.iter().map(|val| val.as_ref()).collect::>().to_sql(ty, out)), - (Value::EnumArray(variants, _), typ) => { + (ValueType::EnumArray(variants, _), typ) => { let kind = ErrorKind::conversion(format!( "Couldn't serialize value `{variants:?}` into a `{typ}`. Value is a list but `{typ}` is not." )); return Err(Error::builder(kind).build().into()); } - (Value::Array(vec), typ) => { + (ValueType::Array(vec), typ) => { let kind = ErrorKind::conversion(format!( "Couldn't serialize value `{vec:?}` into a `{typ}`. Value is a list but `{typ}` is not." )); return Err(Error::builder(kind).build().into()); } - (Value::Json(value), _) => value.as_ref().map(|value| value.to_sql(ty, out)), - (Value::Xml(value), _) => value.as_ref().map(|value| value.to_sql(ty, out)), + (ValueType::Json(value), _) => value.as_ref().map(|value| value.to_sql(ty, out)), + (ValueType::Xml(value), _) => value.as_ref().map(|value| value.to_sql(ty, out)), #[cfg(feature = "uuid")] - (Value::Uuid(value), _) => value.map(|value| value.to_sql(ty, out)), - (Value::DateTime(value), &PostgresType::DATE) => value.map(|value| value.date_naive().to_sql(ty, out)), - (Value::Date(value), _) => value.map(|value| value.to_sql(ty, out)), - (Value::Time(value), _) => value.map(|value| value.to_sql(ty, out)), - (Value::DateTime(value), &PostgresType::TIME) => value.map(|value| value.time().to_sql(ty, out)), - (Value::DateTime(value), &PostgresType::TIMETZ) => value.map(|value| { + (ValueType::Uuid(value), _) => value.map(|value| value.to_sql(ty, out)), + (ValueType::DateTime(value), &PostgresType::DATE) => value.map(|value| value.date_naive().to_sql(ty, out)), + (ValueType::Date(value), _) => value.map(|value| value.to_sql(ty, out)), + (ValueType::Time(value), _) => value.map(|value| value.to_sql(ty, out)), + (ValueType::DateTime(value), &PostgresType::TIME) => value.map(|value| value.time().to_sql(ty, out)), + (ValueType::DateTime(value), &PostgresType::TIMETZ) => value.map(|value| { let result = value.time().to_sql(ty, out)?; // We assume UTC. see https://www.postgresql.org/docs/9.5/datatype-datetime.html out.extend_from_slice(&[0; 4]); Ok(result) }), - (Value::DateTime(value), _) => value.map(|value| value.naive_utc().to_sql(ty, out)), + (ValueType::DateTime(value), _) => value.map(|value| value.naive_utc().to_sql(ty, out)), }; match res { @@ -935,12 +937,18 @@ impl<'a> TryFrom<&Value<'a>> for Option { fn try_from(value: &Value<'a>) -> Result, Self::Error> { match value { - val @ Value::Text(Some(_)) => { + val @ Value { + typed: ValueType::Text(Some(_)), + .. + } => { let text = val.as_str().unwrap(); string_to_bits(text).map(Option::Some) } - val @ Value::Bytes(Some(_)) => { + val @ Value { + typed: ValueType::Bytes(Some(_)), + .. + } => { let text = val.as_str().unwrap(); string_to_bits(text).map(Option::Some) diff --git a/quaint/src/connector/postgres/error.rs b/quaint/src/connector/postgres/error.rs index 4f7bb23a5c85..dc8699875ea8 100644 --- a/quaint/src/connector/postgres/error.rs +++ b/quaint/src/connector/postgres/error.rs @@ -282,7 +282,7 @@ impl From for Error { }; builder.set_original_message(reason); - return builder.build(); + builder.build() } // sigh... // https://github.com/sfackler/rust-postgres/blob/0c84ed9f8201f4e5b4803199a24afa2c9f3723b2/tokio-postgres/src/connect_tls.rs#L37 "error performing TLS handshake: server does not support TLS" => { @@ -295,7 +295,7 @@ impl From for Error { }; builder.set_original_message(reason); - return builder.build(); + builder.build() } // double sigh _ => { let code = code.map(|c| c.to_string()); @@ -306,7 +306,7 @@ impl From for Error { }; builder.set_original_message(reason); - return builder.build(); + builder.build() } } } diff --git a/quaint/src/connector/sqlite/conversion.rs b/quaint/src/connector/sqlite/conversion.rs index dade118596c6..4f6dea515621 100644 --- a/quaint/src/connector/sqlite/conversion.rs +++ b/quaint/src/connector/sqlite/conversion.rs @@ -1,7 +1,7 @@ use std::convert::TryFrom; use crate::{ - ast::Value, + ast::{Value, ValueType}, connector::{ queryable::{GetRow, ToColumnNames}, TypeIdentifier, @@ -138,17 +138,17 @@ impl<'a> GetRow for SqliteRow<'a> { let pv = match self.get_ref_unwrap(i) { ValueRef::Null => match column { // NOTE: A value without decl_type would be Int32(None) - c if c.is_int32() | c.is_null() => Value::Int32(None), - c if c.is_int64() => Value::Int64(None), - c if c.is_text() => Value::Text(None), - c if c.is_bytes() => Value::Bytes(None), - c if c.is_float() => Value::Float(None), - c if c.is_double() => Value::Double(None), + c if c.is_int32() | c.is_null() => Value::null_int32(), + c if c.is_int64() => Value::null_int64(), + c if c.is_text() => Value::null_text(), + c if c.is_bytes() => Value::null_bytes(), + c if c.is_float() => Value::null_float(), + c if c.is_double() => Value::null_double(), #[cfg(feature = "bigdecimal")] - c if c.is_real() => Value::Numeric(None), - c if c.is_datetime() => Value::DateTime(None), - c if c.is_date() => Value::Date(None), - c if c.is_bool() => Value::Boolean(None), + c if c.is_real() => Value::null_numeric(), + c if c.is_datetime() => Value::null_datetime(), + c if c.is_date() => Value::null_date(), + c if c.is_bool() => Value::null_boolean(), c => match c.decl_type() { Some(n) => { let msg = format!("Value {n} not supported"); @@ -157,7 +157,7 @@ impl<'a> GetRow for SqliteRow<'a> { return Err(Error::builder(kind).build()); } // When we don't know what to do, the default value would be Int32(None) - None => Value::Int32(None), + None => Value::null_int32(), }, }, ValueRef::Integer(i) => { @@ -245,17 +245,17 @@ impl<'a> ToColumnNames for SqliteRows<'a> { impl<'a> ToSql for Value<'a> { fn to_sql(&self) -> Result { - let value = match self { - Value::Int32(integer) => integer.map(ToSqlOutput::from), - Value::Int64(integer) => integer.map(ToSqlOutput::from), - Value::Float(float) => float.map(|f| f as f64).map(ToSqlOutput::from), - Value::Double(double) => double.map(ToSqlOutput::from), - Value::Text(cow) => cow.as_ref().map(|cow| ToSqlOutput::from(cow.as_ref())), - Value::Enum(cow, _) => cow.as_ref().map(|cow| ToSqlOutput::from(cow.as_ref())), - Value::Boolean(boo) => boo.map(ToSqlOutput::from), - Value::Char(c) => c.map(|c| ToSqlOutput::from(c as u8)), - Value::Bytes(bytes) => bytes.as_ref().map(|bytes| ToSqlOutput::from(bytes.as_ref())), - Value::Array(_) | Value::EnumArray(_, _) => { + let value = match &self.typed { + ValueType::Int32(integer) => integer.map(ToSqlOutput::from), + ValueType::Int64(integer) => integer.map(ToSqlOutput::from), + ValueType::Float(float) => float.map(|f| f as f64).map(ToSqlOutput::from), + ValueType::Double(double) => double.map(ToSqlOutput::from), + ValueType::Text(cow) => cow.as_ref().map(|cow| ToSqlOutput::from(cow.as_ref())), + ValueType::Enum(cow, _) => cow.as_ref().map(|cow| ToSqlOutput::from(cow.as_ref())), + ValueType::Boolean(boo) => boo.map(ToSqlOutput::from), + ValueType::Char(c) => c.map(|c| ToSqlOutput::from(c as u8)), + ValueType::Bytes(bytes) => bytes.as_ref().map(|bytes| ToSqlOutput::from(bytes.as_ref())), + ValueType::Array(_) | ValueType::EnumArray(_, _) => { let msg = "Arrays are not supported in SQLite."; let kind = ErrorKind::conversion(msg); @@ -265,24 +265,24 @@ impl<'a> ToSql for Value<'a> { return Err(RusqlError::ToSqlConversionFailure(Box::new(builder.build()))); } #[cfg(feature = "bigdecimal")] - Value::Numeric(d) => d + ValueType::Numeric(d) => d .as_ref() .map(|d| ToSqlOutput::from(d.to_string().parse::().expect("BigDecimal is not a f64."))), - Value::Json(value) => value.as_ref().map(|value| { + ValueType::Json(value) => value.as_ref().map(|value| { let stringified = serde_json::to_string(value) .map_err(|err| RusqlError::ToSqlConversionFailure(Box::new(err))) .unwrap(); ToSqlOutput::from(stringified) }), - Value::Xml(cow) => cow.as_ref().map(|cow| ToSqlOutput::from(cow.as_ref())), + ValueType::Xml(cow) => cow.as_ref().map(|cow| ToSqlOutput::from(cow.as_ref())), #[cfg(feature = "uuid")] - Value::Uuid(value) => value.map(|value| ToSqlOutput::from(value.hyphenated().to_string())), - Value::DateTime(value) => value.map(|value| ToSqlOutput::from(value.timestamp_millis())), - Value::Date(date) => date + ValueType::Uuid(value) => value.map(|value| ToSqlOutput::from(value.hyphenated().to_string())), + ValueType::DateTime(value) => value.map(|value| ToSqlOutput::from(value.timestamp_millis())), + ValueType::Date(date) => date .and_then(|date| date.and_hms_opt(0, 0, 0)) .map(|dt| ToSqlOutput::from(dt.timestamp_millis())), - Value::Time(time) => time + ValueType::Time(time) => time .and_then(|time| chrono::NaiveDate::from_ymd_opt(1970, 1, 1).map(|d| (d, time))) .and_then(|(date, time)| { use chrono::Timelike; diff --git a/quaint/src/lib.rs b/quaint/src/lib.rs index 1fa817fddf55..5472c12885a4 100644 --- a/quaint/src/lib.rs +++ b/quaint/src/lib.rs @@ -133,6 +133,6 @@ pub mod single; mod tests; pub mod visitor; -pub use ast::Value; +pub use ast::{Value, ValueType}; pub type Result = std::result::Result; diff --git a/quaint/src/macros.rs b/quaint/src/macros.rs index 6289fe0bac23..cfb52bc0c6e1 100644 --- a/quaint/src/macros.rs +++ b/quaint/src/macros.rs @@ -88,21 +88,33 @@ macro_rules! val { macro_rules! value { ($target:ident: $kind:ty,$paramkind:ident,$that:expr) => { - impl<'a> From<$kind> for crate::ast::Value<'a> { + impl<'a> From<$kind> for crate::ast::ValueType<'a> { fn from(that: $kind) -> Self { let $target = that; - crate::ast::Value::$paramkind(Some($that)) + crate::ast::ValueType::$paramkind(Some($that)) } } - impl<'a> From> for crate::ast::Value<'a> { + impl<'a> From> for crate::ast::ValueType<'a> { fn from(that: Option<$kind>) -> Self { match that { - Some(val) => crate::ast::Value::from(val), - None => crate::ast::Value::$paramkind(None), + Some(val) => crate::ast::ValueType::from(val), + None => crate::ast::ValueType::$paramkind(None), } } } + + impl<'a> From<$kind> for crate::ast::Value<'a> { + fn from(that: $kind) -> Self { + crate::ast::Value::from(crate::ast::ValueType::from(that)) + } + } + + impl<'a> From> for crate::ast::Value<'a> { + fn from(that: Option<$kind>) -> Self { + crate::ast::Value::from(crate::ast::ValueType::from(that)) + } + } }; } diff --git a/quaint/src/serde.rs b/quaint/src/serde.rs index c88ff4ae520b..092ab344633d 100644 --- a/quaint/src/serde.rs +++ b/quaint/src/serde.rs @@ -3,7 +3,7 @@ use std::borrow::Cow; use crate::{ - ast::{EnumVariant, Value}, + ast::{EnumVariant, Value, ValueType}, connector::{ResultRow, ResultSet}, error::{Error, ErrorKind}, }; @@ -76,7 +76,7 @@ impl<'de> Deserializer<'de> for RowDeserializer { let kvs = columns.iter().enumerate().map(move |(v, k)| { // The unwrap is safe if `columns` is correct. let value = values.get_mut(v).unwrap(); - let taken_value = std::mem::replace(value, Value::Int64(None)); + let taken_value = std::mem::replace(value, Value::from(ValueType::Int64(None))); (k.as_str(), taken_value) }); @@ -115,69 +115,69 @@ impl<'de> Deserializer<'de> for ValueDeserializer<'de> { type Error = DeserializeError; fn deserialize_any>(self, visitor: V) -> Result { - match self.0 { - Value::Text(Some(s)) => visitor.visit_string(s.into_owned()), - Value::Text(None) => visitor.visit_none(), - Value::Bytes(Some(bytes)) => visitor.visit_bytes(bytes.as_ref()), - Value::Bytes(None) => visitor.visit_none(), - Value::Enum(Some(s), _) => visitor.visit_string(s.into_owned()), - Value::Enum(None, _) => visitor.visit_none(), - Value::EnumArray(Some(variants), _) => { + match self.0.typed { + ValueType::Text(Some(s)) => visitor.visit_string(s.into_owned()), + ValueType::Text(None) => visitor.visit_none(), + ValueType::Bytes(Some(bytes)) => visitor.visit_bytes(bytes.as_ref()), + ValueType::Bytes(None) => visitor.visit_none(), + ValueType::Enum(Some(s), _) => visitor.visit_string(s.into_owned()), + ValueType::Enum(None, _) => visitor.visit_none(), + ValueType::EnumArray(Some(variants), _) => { let deserializer = serde::de::value::SeqDeserializer::new(variants.into_iter()); visitor.visit_seq(deserializer) } - Value::EnumArray(None, _) => visitor.visit_none(), - Value::Int32(Some(i)) => visitor.visit_i32(i), - Value::Int32(None) => visitor.visit_none(), - Value::Int64(Some(i)) => visitor.visit_i64(i), - Value::Int64(None) => visitor.visit_none(), - Value::Boolean(Some(b)) => visitor.visit_bool(b), - Value::Boolean(None) => visitor.visit_none(), - Value::Char(Some(c)) => visitor.visit_char(c), - Value::Char(None) => visitor.visit_none(), - Value::Float(Some(num)) => visitor.visit_f64(num as f64), - Value::Float(None) => visitor.visit_none(), - Value::Double(Some(num)) => visitor.visit_f64(num), - Value::Double(None) => visitor.visit_none(), + ValueType::EnumArray(None, _) => visitor.visit_none(), + ValueType::Int32(Some(i)) => visitor.visit_i32(i), + ValueType::Int32(None) => visitor.visit_none(), + ValueType::Int64(Some(i)) => visitor.visit_i64(i), + ValueType::Int64(None) => visitor.visit_none(), + ValueType::Boolean(Some(b)) => visitor.visit_bool(b), + ValueType::Boolean(None) => visitor.visit_none(), + ValueType::Char(Some(c)) => visitor.visit_char(c), + ValueType::Char(None) => visitor.visit_none(), + ValueType::Float(Some(num)) => visitor.visit_f64(num as f64), + ValueType::Float(None) => visitor.visit_none(), + ValueType::Double(Some(num)) => visitor.visit_f64(num), + ValueType::Double(None) => visitor.visit_none(), #[cfg(feature = "bigdecimal")] - Value::Numeric(Some(num)) => { + ValueType::Numeric(Some(num)) => { use crate::bigdecimal::ToPrimitive; visitor.visit_f64(num.to_f64().unwrap()) } #[cfg(feature = "bigdecimal")] - Value::Numeric(None) => visitor.visit_none(), + ValueType::Numeric(None) => visitor.visit_none(), #[cfg(feature = "uuid")] - Value::Uuid(Some(uuid)) => visitor.visit_string(uuid.to_string()), + ValueType::Uuid(Some(uuid)) => visitor.visit_string(uuid.to_string()), #[cfg(feature = "uuid")] - Value::Uuid(None) => visitor.visit_none(), + ValueType::Uuid(None) => visitor.visit_none(), - Value::Json(Some(value)) => { + ValueType::Json(Some(value)) => { let de = value.into_deserializer(); de.deserialize_any(visitor) .map_err(|err| serde::de::value::Error::custom(format!("Error deserializing JSON value: {err}"))) } - Value::Json(None) => visitor.visit_none(), + ValueType::Json(None) => visitor.visit_none(), - Value::Xml(Some(s)) => visitor.visit_string(s.into_owned()), - Value::Xml(None) => visitor.visit_none(), + ValueType::Xml(Some(s)) => visitor.visit_string(s.into_owned()), + ValueType::Xml(None) => visitor.visit_none(), - Value::DateTime(Some(dt)) => visitor.visit_string(dt.to_rfc3339()), - Value::DateTime(None) => visitor.visit_none(), + ValueType::DateTime(Some(dt)) => visitor.visit_string(dt.to_rfc3339()), + ValueType::DateTime(None) => visitor.visit_none(), - Value::Date(Some(d)) => visitor.visit_string(format!("{d}")), - Value::Date(None) => visitor.visit_none(), + ValueType::Date(Some(d)) => visitor.visit_string(format!("{d}")), + ValueType::Date(None) => visitor.visit_none(), - Value::Time(Some(t)) => visitor.visit_string(format!("{t}")), - Value::Time(None) => visitor.visit_none(), + ValueType::Time(Some(t)) => visitor.visit_string(format!("{t}")), + ValueType::Time(None) => visitor.visit_none(), - Value::Array(Some(values)) => { + ValueType::Array(Some(values)) => { let deserializer = serde::de::value::SeqDeserializer::new(values.into_iter()); visitor.visit_seq(deserializer) } - Value::Array(None) => visitor.visit_none(), + ValueType::Array(None) => visitor.visit_none(), } } @@ -193,7 +193,7 @@ impl<'de> Deserializer<'de> for ValueDeserializer<'de> { where V: Visitor<'de>, { - if let Value::Bytes(Some(bytes)) = self.0 { + if let ValueType::Bytes(Some(bytes)) = self.0.typed { match bytes { Cow::Borrowed(bytes) => visitor.visit_borrowed_bytes(bytes), Cow::Owned(bytes) => visitor.visit_byte_buf(bytes), @@ -251,7 +251,7 @@ mod tests { #[test] fn deserialize_user() { - let row = make_row(vec![("id", Value::integer(12)), ("name", "Georgina".into())]); + let row = make_row(vec![("id", Value::int32(12)), ("name", "Georgina".into())]); let user: User = from_row(row).unwrap(); assert_eq!( @@ -267,9 +267,9 @@ mod tests { #[test] fn from_rows_works() { let first_row = make_row(vec![ - ("id", Value::integer(12)), + ("id", Value::int32(12)), ("name", "Georgina".into()), - ("bio", Value::Text(None)), + ("bio", Value::null_text()), ]); let second_row = make_row(vec![ ("id", 33.into()), diff --git a/quaint/src/tests/query.rs b/quaint/src/tests/query.rs index 7016262f2fec..ff16c118a46a 100644 --- a/quaint/src/tests/query.rs +++ b/quaint/src/tests/query.rs @@ -33,7 +33,7 @@ async fn aliased_value(api: &mut dyn TestApi) -> crate::Result<()> { #[test_each_connector] async fn aliased_null(api: &mut dyn TestApi) -> crate::Result<()> { - let query = Select::default().value(val!(Value::Int64(None)).alias("test")); + let query = Select::default().value(val!(Value::null_int64()).alias("test")); let res = api.conn().select(query).await?; let row = res.get(0).unwrap(); @@ -307,8 +307,8 @@ async fn where_equals(api: &mut dyn TestApi) -> crate::Result<()> { let table = api.create_temp_table("id int, name varchar(255)").await?; let insert = Insert::multi_into(&table, vec!["id", "name"]) - .values(vec![Value::integer(1), Value::text("Musti")]) - .values(vec![Value::integer(2), Value::text("Naukio")]); + .values(vec![Value::int32(1), Value::text("Musti")]) + .values(vec![Value::int32(2), Value::text("Naukio")]); api.conn().insert(insert.into()).await?; @@ -328,8 +328,8 @@ async fn where_like(api: &mut dyn TestApi) -> crate::Result<()> { let table = api.create_temp_table("id int, name varchar(255)").await?; let insert = Insert::multi_into(&table, vec!["id", "name"]) - .values(vec![Value::integer(1), Value::text("Musti")]) - .values(vec![Value::integer(2), Value::text("Naukio")]); + .values(vec![Value::int32(1), Value::text("Musti")]) + .values(vec![Value::int32(2), Value::text("Naukio")]); api.conn().insert(insert.into()).await?; @@ -349,8 +349,8 @@ async fn where_not_like(api: &mut dyn TestApi) -> crate::Result<()> { let table = api.create_temp_table("id int, name varchar(255)").await?; let insert = Insert::multi_into(&table, vec!["id", "name"]) - .values(vec![Value::integer(1), Value::text("Musti")]) - .values(vec![Value::integer(2), Value::text("Naukio")]); + .values(vec![Value::int32(1), Value::text("Musti")]) + .values(vec![Value::int32(2), Value::text("Naukio")]); api.conn().insert(insert.into()).await?; @@ -371,14 +371,14 @@ async fn inner_join(api: &mut dyn TestApi) -> crate::Result<()> { let table2 = api.create_temp_table("t1_id int, is_cat int").await?; let insert = Insert::multi_into(&table1, vec!["id", "name"]) - .values(vec![Value::integer(1), Value::text("Musti")]) - .values(vec![Value::integer(2), Value::text("Belka")]); + .values(vec![Value::int32(1), Value::text("Musti")]) + .values(vec![Value::int32(2), Value::text("Belka")]); api.conn().insert(insert.into()).await?; let insert = Insert::multi_into(&table2, vec!["t1_id", "is_cat"]) - .values(vec![Value::integer(1), Value::integer(1)]) - .values(vec![Value::integer(2), Value::integer(0)]); + .values(vec![Value::int32(1), Value::int32(1)]) + .values(vec![Value::int32(2), Value::int32(0)]); api.conn().insert(insert.into()).await?; @@ -414,18 +414,18 @@ async fn table_inner_join(api: &mut dyn TestApi) -> crate::Result<()> { let table3 = api.create_temp_table("id int, foo int").await?; let insert = Insert::multi_into(&table1, vec!["id", "name"]) - .values(vec![Value::integer(1), Value::text("Musti")]) - .values(vec![Value::integer(2), Value::text("Belka")]); + .values(vec![Value::int32(1), Value::text("Musti")]) + .values(vec![Value::int32(2), Value::text("Belka")]); api.conn().insert(insert.into()).await?; let insert = Insert::multi_into(&table2, vec!["t1_id", "is_cat"]) - .values(vec![Value::integer(1), Value::integer(1)]) - .values(vec![Value::integer(2), Value::integer(0)]); + .values(vec![Value::int32(1), Value::int32(1)]) + .values(vec![Value::int32(2), Value::int32(0)]); api.conn().insert(insert.into()).await?; - let insert = Insert::multi_into(&table3, vec!["id", "foo"]).values(vec![Value::integer(1), Value::integer(1)]); + let insert = Insert::multi_into(&table3, vec!["id", "foo"]).values(vec![Value::int32(1), Value::int32(1)]); api.conn().insert(insert.into()).await?; @@ -466,13 +466,12 @@ async fn left_join(api: &mut dyn TestApi) -> crate::Result<()> { let table2 = api.create_temp_table("t1_id int, is_cat int").await?; let insert = Insert::multi_into(&table1, vec!["id", "name"]) - .values(vec![Value::integer(1), Value::text("Musti")]) - .values(vec![Value::integer(2), Value::text("Belka")]); + .values(vec![Value::int32(1), Value::text("Musti")]) + .values(vec![Value::int32(2), Value::text("Belka")]); api.conn().insert(insert.into()).await?; - let insert = - Insert::multi_into(&table2, vec!["t1_id", "is_cat"]).values(vec![Value::integer(1), Value::integer(1)]); + let insert = Insert::multi_into(&table2, vec!["t1_id", "is_cat"]).values(vec![Value::int32(1), Value::int32(1)]); api.conn().insert(insert.into()).await?; @@ -508,17 +507,16 @@ async fn table_left_join(api: &mut dyn TestApi) -> crate::Result<()> { let table3 = api.create_temp_table("id int, foo int").await?; let insert = Insert::multi_into(&table1, vec!["id", "name"]) - .values(vec![Value::integer(1), Value::text("Musti")]) - .values(vec![Value::integer(2), Value::text("Belka")]); + .values(vec![Value::int32(1), Value::text("Musti")]) + .values(vec![Value::int32(2), Value::text("Belka")]); api.conn().insert(insert.into()).await?; - let insert = - Insert::multi_into(&table2, vec!["t1_id", "is_cat"]).values(vec![Value::integer(1), Value::integer(1)]); + let insert = Insert::multi_into(&table2, vec!["t1_id", "is_cat"]).values(vec![Value::int32(1), Value::int32(1)]); api.conn().insert(insert.into()).await?; - let insert = Insert::multi_into(&table3, vec!["id", "foo"]).values(vec![Value::integer(1), Value::integer(1)]); + let insert = Insert::multi_into(&table3, vec!["id", "foo"]).values(vec![Value::int32(1), Value::int32(1)]); api.conn().insert(insert.into()).await?; @@ -558,8 +556,8 @@ async fn limit_no_offset(api: &mut dyn TestApi) -> crate::Result<()> { let table = api.create_temp_table("id int, name varchar(255)").await?; let insert = Insert::multi_into(&table, vec!["id", "name"]) - .values(vec![Value::integer(1), Value::text("Musti")]) - .values(vec![Value::integer(2), Value::text("Naukio")]); + .values(vec![Value::int32(1), Value::text("Musti")]) + .values(vec![Value::int32(2), Value::text("Naukio")]); api.conn().insert(insert.into()).await?; @@ -580,8 +578,8 @@ async fn offset_no_limit(api: &mut dyn TestApi) -> crate::Result<()> { let table = api.create_temp_table("id int, name varchar(255)").await?; let insert = Insert::multi_into(&table, vec!["id", "name"]) - .values(vec![Value::integer(1), Value::text("Musti")]) - .values(vec![Value::integer(2), Value::text("Naukio")]); + .values(vec![Value::int32(1), Value::text("Musti")]) + .values(vec![Value::int32(2), Value::text("Naukio")]); api.conn().insert(insert.into()).await?; @@ -602,9 +600,9 @@ async fn limit_with_offset(api: &mut dyn TestApi) -> crate::Result<()> { let table = api.create_temp_table("id int, name varchar(255)").await?; let insert = Insert::multi_into(&table, vec!["id", "name"]) - .values(vec![Value::integer(1), Value::text("Musti")]) - .values(vec![Value::integer(2), Value::text("Naukio")]) - .values(vec![Value::integer(3), Value::text("Belka")]); + .values(vec![Value::int32(1), Value::text("Musti")]) + .values(vec![Value::int32(2), Value::text("Naukio")]) + .values(vec![Value::int32(3), Value::text("Belka")]); api.conn().insert(insert.into()).await?; @@ -625,9 +623,9 @@ async fn limit_with_offset_no_given_order(api: &mut dyn TestApi) -> crate::Resul let table = api.create_temp_table("id int, name varchar(255)").await?; let insert = Insert::multi_into(&table, vec!["id", "name"]) - .values(vec![Value::integer(1), Value::text("Musti")]) - .values(vec![Value::integer(2), Value::text("Naukio")]) - .values(vec![Value::integer(3), Value::text("Belka")]); + .values(vec![Value::int32(1), Value::text("Musti")]) + .values(vec![Value::int32(2), Value::text("Naukio")]) + .values(vec![Value::int32(3), Value::text("Belka")]); api.conn().insert(insert.into()).await?; @@ -1376,13 +1374,10 @@ async fn float_columns_cast_to_f32(api: &mut dyn TestApi) -> crate::Result<()> { #[test_each_connector(tags("mysql"), ignore("mysql8"))] #[cfg(feature = "bigdecimal")] async fn newdecimal_conversion_is_handled_correctly(api: &mut dyn TestApi) -> crate::Result<()> { - let select = Select::default().value(sum(Value::integer(1)).alias("theone")); + let select = Select::default().value(sum(Value::int32(1)).alias("theone")); let result = api.conn().select(select).await?; - assert_eq!( - Value::Numeric(Some("1.0".parse().unwrap())), - result.into_single().unwrap()[0] - ); + assert_eq!(Value::numeric("1.0".parse().unwrap()), result.into_single().unwrap()[0]); Ok(()) } @@ -1667,7 +1662,7 @@ async fn enum_values(api: &mut dyn TestApi) -> crate::Result<()> { Insert::single_into(&table) .value( "value", - Value::enum_variant_with_name("A", &type_name, Option::<&str>::None), + Value::enum_variant_with_name("A", EnumName::new(&type_name, Option::::None)), ) .into(), ) @@ -1678,18 +1673,14 @@ async fn enum_values(api: &mut dyn TestApi) -> crate::Result<()> { Insert::single_into(&table) .value( "value", - Value::enum_variant_with_name("B", &type_name, Option::<&str>::None), + Value::enum_variant_with_name("B", EnumName::new(&type_name, Option::::None)), ) .into(), ) .await?; api.conn() - .insert( - Insert::single_into(&table) - .value("value", Value::Enum(None, None)) - .into(), - ) + .insert(Insert::single_into(&table).value("value", Value::null_enum()).into()) .await?; let select = Select::from_table(&table).column("value").order_by("id".ascend()); @@ -1702,7 +1693,7 @@ async fn enum_values(api: &mut dyn TestApi) -> crate::Result<()> { assert_eq!(Some(&Value::enum_variant("B")), row.at(0)); let row = res.get(2).unwrap(); - assert_eq!(Some(&Value::Enum(None, None)), row.at(0)); + assert_eq!(Some(&Value::null_enum()), row.at(0)); Ok(()) } @@ -1717,9 +1708,9 @@ async fn row_to_json_normal(api: &mut dyn TestApi) -> crate::Result<()> { let result = api.conn().select(select).await?; assert_eq!( - Value::Json(Some(serde_json::json!({ + Value::json(serde_json::json!({ "toto": "hello_world" - }))), + })), result.into_single().unwrap()[0] ); @@ -1736,9 +1727,9 @@ async fn row_to_json_pretty(api: &mut dyn TestApi) -> crate::Result<()> { let result = api.conn().select(select).await?; assert_eq!( - Value::Json(Some(serde_json::json!({ + Value::json(serde_json::json!({ "toto": "hello_world" - }))), + })), result.into_single().unwrap()[0] ); @@ -2027,9 +2018,9 @@ async fn ints_read_write_to_numeric(api: &mut dyn TestApi) -> crate::Result<()> let table = api.create_temp_table("id int, value numeric(12,2)").await?; let insert = Insert::multi_into(&table, ["id", "value"]) - .values(vec![Value::integer(1), Value::double(1234.5)]) - .values(vec![Value::integer(2), Value::integer(1234)]) - .values(vec![Value::integer(3), Value::integer(12345)]); + .values(vec![Value::int32(1), Value::double(1234.5)]) + .values(vec![Value::int32(2), Value::int32(1234)]) + .values(vec![Value::int32(3), Value::int32(12345)]); api.conn().execute(insert.into()).await?; @@ -2057,7 +2048,7 @@ async fn bigdecimal_read_write_to_floating(api: &mut dyn TestApi) -> crate::Resu let val = BigDecimal::from_str("0.1").unwrap(); let insert = Insert::multi_into(&table, ["id", "a", "b"]).values(vec![ - Value::integer(1), + Value::int32(1), Value::numeric(val.clone()), Value::numeric(val.clone()), ]); @@ -2075,7 +2066,7 @@ async fn bigdecimal_read_write_to_floating(api: &mut dyn TestApi) -> crate::Resu #[test_each_connector] async fn coalesce_fun(api: &mut dyn TestApi) -> crate::Result<()> { - let exprs: Vec = vec![Value::Text(None).into(), Value::text("Individual").into()]; + let exprs: Vec = vec![Value::null_text().into(), Value::text("Individual").into()]; let select = Select::default().value(coalesce(exprs).alias("val")); let row = api.conn().select(select).await?.into_single()?; @@ -2085,15 +2076,15 @@ async fn coalesce_fun(api: &mut dyn TestApi) -> crate::Result<()> { } fn value_into_json(value: &Value) -> Option { - match value.clone() { + match value.typed.clone() { // MariaDB returns JSON as text - Value::Text(Some(text)) => { + ValueType::Text(Some(text)) => { let json: serde_json::Value = serde_json::from_str(&text) .unwrap_or_else(|_| panic!("expected parsable text to json, found {}", text)); Some(json) } - Value::Json(Some(json)) => Some(json), + ValueType::Json(Some(json)) => Some(json), _ => None, } } @@ -3007,7 +2998,7 @@ async fn generate_binary_uuid(api: &mut dyn TestApi) -> crate::Result<()> { let val = res.into_single()?; // If it is a byte type and has a value, it's a generated UUID. - assert!(matches!(val, Value::Bytes(x) if x.is_some())); + assert!(matches!(val.typed, ValueType::Bytes(x) if x.is_some())); Ok(()) } @@ -3020,7 +3011,7 @@ async fn generate_swapped_binary_uuid(api: &mut dyn TestApi) -> crate::Result<() let val = res.into_single()?; // If it is a byte type and has a value, it's a generated UUID. - assert!(matches!(val, Value::Bytes(x) if x.is_some())); + assert!(matches!(val.typed, ValueType::Bytes(x) if x.is_some())); Ok(()) } @@ -3033,7 +3024,7 @@ async fn generate_native_uuid(api: &mut dyn TestApi) -> crate::Result<()> { let val = res.into_single()?; // If it is a text type and has a value, it's a generated string UUID. - assert!(matches!(val, Value::Text(x) if x.is_some())); + assert!(matches!(val.typed, ValueType::Text(x) if x.is_some())); Ok(()) } @@ -3176,25 +3167,25 @@ async fn order_by_nulls_first_last(api: &mut dyn TestApi) -> crate::Result<()> { let insert = Insert::single_into(&table) .value("name", "b") - .value("age", Value::Int32(None)); + .value("age", Value::null_int32()); api.conn().insert(insert.into()).await?; let insert = Insert::single_into(&table) - .value("name", Value::Text(None)) + .value("name", Value::null_text()) .value("age", 2); api.conn().insert(insert.into()).await?; let insert = Insert::single_into(&table) - .value("name", Value::Text(None)) - .value("age", Value::Text(None)); + .value("name", Value::null_text()) + .value("age", Value::null_text()); api.conn().insert(insert.into()).await?; // name ASC NULLS FIRST let select = Select::from_table(table.clone()).order_by("name".ascend_nulls_first()); let res = api.conn().select(select).await?; - assert_eq!(res.get(0).unwrap()["name"], Value::Text(None)); - assert_eq!(res.get(1).unwrap()["name"], Value::Text(None)); + assert_eq!(res.get(0).unwrap()["name"], Value::null_text()); + assert_eq!(res.get(1).unwrap()["name"], Value::null_text()); assert_eq!(res.get(2).unwrap()["name"], Value::text("a")); assert_eq!(res.get(3).unwrap()["name"], Value::text("b")); @@ -3204,15 +3195,15 @@ async fn order_by_nulls_first_last(api: &mut dyn TestApi) -> crate::Result<()> { assert_eq!(res.get(0).unwrap()["name"], Value::text("a")); assert_eq!(res.get(1).unwrap()["name"], Value::text("b")); - assert_eq!(res.get(2).unwrap()["name"], Value::Text(None)); - assert_eq!(res.get(3).unwrap()["name"], Value::Text(None)); + assert_eq!(res.get(2).unwrap()["name"], Value::null_text()); + assert_eq!(res.get(3).unwrap()["name"], Value::null_text()); // name DESC NULLS FIRST let select = Select::from_table(table.clone()).order_by("name".descend_nulls_first()); let res = api.conn().select(select).await?; - assert_eq!(res.get(0).unwrap()["name"], Value::Text(None)); - assert_eq!(res.get(1).unwrap()["name"], Value::Text(None)); + assert_eq!(res.get(0).unwrap()["name"], Value::null_text()); + assert_eq!(res.get(1).unwrap()["name"], Value::null_text()); assert_eq!(res.get(2).unwrap()["name"], Value::text("b")); assert_eq!(res.get(3).unwrap()["name"], Value::text("a")); @@ -3222,8 +3213,8 @@ async fn order_by_nulls_first_last(api: &mut dyn TestApi) -> crate::Result<()> { assert_eq!(res.get(0).unwrap()["name"], Value::text("b")); assert_eq!(res.get(1).unwrap()["name"], Value::text("a")); - assert_eq!(res.get(2).unwrap()["name"], Value::Text(None)); - assert_eq!(res.get(3).unwrap()["name"], Value::Text(None)); + assert_eq!(res.get(2).unwrap()["name"], Value::null_text()); + assert_eq!(res.get(3).unwrap()["name"], Value::null_text()); // name ASC NULLS FIRST, age ASC NULLS FIRST let select = Select::from_table(table.clone()) @@ -3231,17 +3222,17 @@ async fn order_by_nulls_first_last(api: &mut dyn TestApi) -> crate::Result<()> { .order_by("age".ascend_nulls_first()); let res = api.conn().select(select).await?; - assert_eq!(res.get(0).unwrap()["name"], Value::Text(None)); - assert_eq!(res.get(0).unwrap()["age"], Value::Int32(None)); + assert_eq!(res.get(0).unwrap()["name"], Value::null_text()); + assert_eq!(res.get(0).unwrap()["age"], Value::null_int32()); - assert_eq!(res.get(1).unwrap()["name"], Value::Text(None)); + assert_eq!(res.get(1).unwrap()["name"], Value::null_text()); assert_eq!(res.get(1).unwrap()["age"], Value::int32(2)); assert_eq!(res.get(2).unwrap()["name"], Value::text("a")); assert_eq!(res.get(2).unwrap()["age"], Value::int32(1)); assert_eq!(res.get(3).unwrap()["name"], Value::text("b")); - assert_eq!(res.get(3).unwrap()["age"], Value::Int32(None)); + assert_eq!(res.get(3).unwrap()["age"], Value::null_int32()); // name ASC NULLS LAST, age ASC NULLS LAST let select = Select::from_table(table.clone()) @@ -3253,13 +3244,13 @@ async fn order_by_nulls_first_last(api: &mut dyn TestApi) -> crate::Result<()> { assert_eq!(res.get(0).unwrap()["age"], Value::int32(1)); assert_eq!(res.get(1).unwrap()["name"], Value::text("b")); - assert_eq!(res.get(1).unwrap()["age"], Value::Int32(None)); + assert_eq!(res.get(1).unwrap()["age"], Value::null_int32()); - assert_eq!(res.get(2).unwrap()["name"], Value::Text(None)); + assert_eq!(res.get(2).unwrap()["name"], Value::null_text()); assert_eq!(res.get(2).unwrap()["age"], Value::int32(2)); - assert_eq!(res.get(3).unwrap()["name"], Value::Text(None)); - assert_eq!(res.get(3).unwrap()["age"], Value::Int32(None)); + assert_eq!(res.get(3).unwrap()["name"], Value::null_text()); + assert_eq!(res.get(3).unwrap()["age"], Value::null_int32()); // name DESC NULLS FIRST, age DESC NULLS FIRST let select = Select::from_table(table.clone()) @@ -3267,14 +3258,14 @@ async fn order_by_nulls_first_last(api: &mut dyn TestApi) -> crate::Result<()> { .order_by("age".descend_nulls_first()); let res = api.conn().select(select).await?; - assert_eq!(res.get(0).unwrap()["name"], Value::Text(None)); - assert_eq!(res.get(0).unwrap()["age"], Value::Int32(None)); + assert_eq!(res.get(0).unwrap()["name"], Value::null_text()); + assert_eq!(res.get(0).unwrap()["age"], Value::null_int32()); - assert_eq!(res.get(1).unwrap()["name"], Value::Text(None)); + assert_eq!(res.get(1).unwrap()["name"], Value::null_text()); assert_eq!(res.get(1).unwrap()["age"], Value::int32(2)); assert_eq!(res.get(2).unwrap()["name"], Value::text("b")); - assert_eq!(res.get(2).unwrap()["age"], Value::Int32(None)); + assert_eq!(res.get(2).unwrap()["age"], Value::null_int32()); assert_eq!(res.get(3).unwrap()["name"], Value::text("a")); assert_eq!(res.get(3).unwrap()["age"], Value::int32(1)); @@ -3286,16 +3277,16 @@ async fn order_by_nulls_first_last(api: &mut dyn TestApi) -> crate::Result<()> { let res = api.conn().select(select).await?; assert_eq!(res.get(0).unwrap()["name"], Value::text("b")); - assert_eq!(res.get(0).unwrap()["age"], Value::Int32(None)); + assert_eq!(res.get(0).unwrap()["age"], Value::null_int32()); assert_eq!(res.get(1).unwrap()["name"], Value::text("a")); assert_eq!(res.get(1).unwrap()["age"], Value::int32(1)); - assert_eq!(res.get(2).unwrap()["name"], Value::Text(None)); + assert_eq!(res.get(2).unwrap()["name"], Value::null_text()); assert_eq!(res.get(2).unwrap()["age"], Value::int32(2)); - assert_eq!(res.get(3).unwrap()["name"], Value::Text(None)); - assert_eq!(res.get(3).unwrap()["age"], Value::Int32(None)); + assert_eq!(res.get(3).unwrap()["name"], Value::null_text()); + assert_eq!(res.get(3).unwrap()["age"], Value::null_int32()); // name ASC NULLS LAST, age DESC NULLS FIRST let select = Select::from_table(table.clone()) @@ -3307,12 +3298,12 @@ async fn order_by_nulls_first_last(api: &mut dyn TestApi) -> crate::Result<()> { assert_eq!(res.get(0).unwrap()["age"], Value::int32(1)); assert_eq!(res.get(1).unwrap()["name"], Value::text("b")); - assert_eq!(res.get(1).unwrap()["age"], Value::Int32(None)); + assert_eq!(res.get(1).unwrap()["age"], Value::null_int32()); - assert_eq!(res.get(2).unwrap()["name"], Value::Text(None)); - assert_eq!(res.get(2).unwrap()["age"], Value::Int32(None)); + assert_eq!(res.get(2).unwrap()["name"], Value::null_text()); + assert_eq!(res.get(2).unwrap()["age"], Value::null_int32()); - assert_eq!(res.get(3).unwrap()["name"], Value::Text(None)); + assert_eq!(res.get(3).unwrap()["name"], Value::null_text()); assert_eq!(res.get(3).unwrap()["age"], Value::int32(2)); // name DESC NULLS FIRST, age ASC NULLS LAST @@ -3321,14 +3312,14 @@ async fn order_by_nulls_first_last(api: &mut dyn TestApi) -> crate::Result<()> { .order_by("age".ascend_nulls_last()); let res = api.conn().select(select).await?; - assert_eq!(res.get(0).unwrap()["name"], Value::Text(None)); + assert_eq!(res.get(0).unwrap()["name"], Value::null_text()); assert_eq!(res.get(0).unwrap()["age"], Value::int32(2)); - assert_eq!(res.get(1).unwrap()["name"], Value::Text(None)); - assert_eq!(res.get(1).unwrap()["age"], Value::Int32(None)); + assert_eq!(res.get(1).unwrap()["name"], Value::null_text()); + assert_eq!(res.get(1).unwrap()["age"], Value::null_int32()); assert_eq!(res.get(2).unwrap()["name"], Value::text("b")); - assert_eq!(res.get(2).unwrap()["age"], Value::Int32(None)); + assert_eq!(res.get(2).unwrap()["age"], Value::null_int32()); assert_eq!(res.get(3).unwrap()["name"], Value::text("a")); assert_eq!(res.get(3).unwrap()["age"], Value::int32(1)); diff --git a/quaint/src/tests/query/error.rs b/quaint/src/tests/query/error.rs index d9884a2c574a..e1c8a74202f5 100644 --- a/quaint/src/tests/query/error.rs +++ b/quaint/src/tests/query/error.rs @@ -129,7 +129,7 @@ async fn null_constraint_violation(api: &mut dyn TestApi) -> crate::Result<()> { let insert = Insert::single_into(&table).value("id1", 50).value("id2", 55); api.conn().insert(insert.into()).await?; - let update = Update::table(&table).set("id2", Value::Int64(None)); + let update = Update::table(&table).set("id2", ValueType::Int64(None)); let res = api.conn().update(update).await; assert!(res.is_err()); @@ -414,7 +414,8 @@ async fn array_into_scalar_should_fail(api: &mut dyn TestApi) -> crate::Result<( let err = result.unwrap_err(); - assert!(err.to_string().contains("Couldn't serialize value `Some([Text(Some(\"abc\")), Text(Some(\"def\"))])` into a `text`. Value is a list but `text` is not.")); + assert!(err.to_string().contains("Couldn't serialize value")); + assert!(err.to_string().contains("Value is a list but `text` is not.")); Ok(()) } diff --git a/quaint/src/tests/types/mssql.rs b/quaint/src/tests/types/mssql.rs index 6824562cde51..9d5d51317707 100644 --- a/quaint/src/tests/types/mssql.rs +++ b/quaint/src/tests/types/mssql.rs @@ -8,7 +8,7 @@ use crate::tests::test_api::*; test_type!(nvarchar_limited( mssql, "NVARCHAR(10)", - Value::Text(None), + Value::null_text(), Value::text("foobar"), Value::text("余"), )); @@ -16,7 +16,7 @@ test_type!(nvarchar_limited( test_type!(nvarchar_max( mssql, "NVARCHAR(max)", - Value::Text(None), + Value::null_text(), Value::text("foobar"), Value::text("余"), Value::text("test¥฿😀😁😂😃😄😅😆😇😈😉😊😋😌😍😎😏😐😑😒😓😔😕😖😗😘😙😚😛😜😝😞😟😠😡😢😣😤😥�😧😨😩😪😫😬😭😮😯😰😱😲😳😴😵😶😷😸😹😺😻😼😽😾😿🙀🙁�🙂🙃🙄🙅🙆🙇🙈🙉🙊🙋🙌🙍🙎🙏ऀँंःऄअआइईउऊऋऌऍऎएऐऑऒओऔकखगघङचछजझञटठडढणतथदधनऩपफबभमयर€₭₮₯₰₱₲₳₴₵₶₷₸₹₺₻₼₽₾₿⃀"), @@ -25,7 +25,7 @@ test_type!(nvarchar_max( test_type!(ntext( mssql, "NTEXT", - Value::Text(None), + Value::null_text(), Value::text("foobar"), Value::text("余"), )); @@ -33,23 +33,23 @@ test_type!(ntext( test_type!(varchar_limited( mssql, "VARCHAR(10)", - Value::Text(None), + Value::null_text(), Value::text("foobar"), )); test_type!(varchar_max( mssql, "VARCHAR(max)", - Value::Text(None), + Value::null_text(), Value::text("foobar"), )); -test_type!(text(mssql, "TEXT", Value::Text(None), Value::text("foobar"))); +test_type!(text(mssql, "TEXT", Value::null_text(), Value::text("foobar"))); test_type!(tinyint( mssql, "tinyint", - Value::Int32(None), + Value::null_int32(), Value::int32(u8::MIN), Value::int32(u8::MAX), )); @@ -57,7 +57,7 @@ test_type!(tinyint( test_type!(smallint( mssql, "smallint", - Value::Int32(None), + Value::null_int32(), Value::int32(i16::MIN), Value::int32(i16::MAX), )); @@ -65,7 +65,7 @@ test_type!(smallint( test_type!(int( mssql, "int", - Value::Int32(None), + Value::null_int32(), Value::int32(i32::MIN), Value::int32(i32::MAX), )); @@ -73,35 +73,35 @@ test_type!(int( test_type!(bigint( mssql, "bigint", - Value::Int64(None), + Value::null_int64(), Value::int64(i64::MIN), Value::int64(i64::MAX), )); -test_type!(float_24(mssql, "float(24)", Value::Float(None), Value::float(1.23456),)); +test_type!(float_24(mssql, "float(24)", Value::null_float(), Value::float(1.23456),)); -test_type!(real(mssql, "real", Value::Float(None), Value::float(1.123456))); +test_type!(real(mssql, "real", Value::null_float(), Value::float(1.123456))); test_type!(float_53( mssql, "float(53)", - Value::Double(None), + Value::null_double(), Value::double(1.1234567891) )); -test_type!(money(mssql, "money", Value::Double(None), Value::double(3.14))); +test_type!(money(mssql, "money", Value::null_double(), Value::double(3.14))); test_type!(smallmoney( mssql, "smallmoney", - Value::Double(None), + Value::null_double(), Value::double(3.14) )); test_type!(boolean( mssql, "bit", - Value::Boolean(None), + Value::null_boolean(), Value::boolean(true), Value::boolean(false), )); @@ -109,54 +109,54 @@ test_type!(boolean( test_type!(binary( mssql, "binary(8)", - Value::Bytes(None), + Value::null_bytes(), Value::bytes(b"DEADBEEF".to_vec()), )); test_type!(varbinary( mssql, "varbinary(8)", - Value::Bytes(None), + Value::null_bytes(), Value::bytes(b"DEADBEEF".to_vec()), )); test_type!(image( mssql, "image", - Value::Bytes(None), + Value::null_bytes(), Value::bytes(b"DEADBEEF".to_vec()), )); test_type!(date( mssql, "date", - Value::Date(None), + Value::null_date(), Value::date(chrono::NaiveDate::from_ymd_opt(2020, 4, 20).unwrap()) )); test_type!(time( mssql, "time", - Value::Time(None), + Value::null_time(), Value::time(chrono::NaiveTime::from_hms_opt(16, 20, 00).unwrap()) )); -test_type!(datetime2(mssql, "datetime2", Value::DateTime(None), { +test_type!(datetime2(mssql, "datetime2", Value::null_datetime(), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:00Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) })); -test_type!(datetime(mssql, "datetime", Value::DateTime(None), { +test_type!(datetime(mssql, "datetime", Value::null_datetime(), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:22Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) })); -test_type!(datetimeoffset(mssql, "datetimeoffset", Value::DateTime(None), { +test_type!(datetimeoffset(mssql, "datetimeoffset", Value::null_datetime(), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:22Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) })); -test_type!(smalldatetime(mssql, "smalldatetime", Value::DateTime(None), { +test_type!(smalldatetime(mssql, "smalldatetime", Value::null_datetime(), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:00Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) })); diff --git a/quaint/src/tests/types/mssql/bigdecimal.rs b/quaint/src/tests/types/mssql/bigdecimal.rs index 821d419ad4a5..4dbd101ff456 100644 --- a/quaint/src/tests/types/mssql/bigdecimal.rs +++ b/quaint/src/tests/types/mssql/bigdecimal.rs @@ -6,7 +6,7 @@ use std::str::FromStr; test_type!(numeric( mssql, "numeric(10,2)", - Value::Numeric(None), + Value::null_numeric(), Value::numeric(BigDecimal::from_str("3.14")?) )); @@ -148,21 +148,21 @@ test_type!(numeric_38_6( test_type!(money( mssql, "money", - (Value::Numeric(None), Value::Double(None)), + (Value::null_numeric(), Value::null_double()), (Value::numeric(BigDecimal::from_str("3.14")?), Value::double(3.14)) )); test_type!(smallmoney( mssql, "smallmoney", - (Value::Numeric(None), Value::Double(None)), + (Value::null_numeric(), Value::null_double()), (Value::numeric(BigDecimal::from_str("3.14")?), Value::double(3.14)) )); test_type!(float_24( mssql, "float(24)", - (Value::Numeric(None), Value::Float(None)), + (Value::null_numeric(), Value::null_float()), ( Value::numeric(BigDecimal::from_str("1.123456")?), Value::float(1.123456) @@ -172,7 +172,7 @@ test_type!(float_24( test_type!(real( mssql, "real", - (Value::Numeric(None), Value::Float(None)), + (Value::null_numeric(), Value::null_float()), ( Value::numeric(BigDecimal::from_str("1.123456")?), Value::float(1.123456) @@ -182,7 +182,7 @@ test_type!(real( test_type!(float_53( mssql, "float(53)", - (Value::Numeric(None), Value::Double(None)), + (Value::null_numeric(), Value::null_double()), ( Value::numeric(BigDecimal::from_str("1.123456789012345")?), Value::double(1.123456789012345) diff --git a/quaint/src/tests/types/mysql.rs b/quaint/src/tests/types/mysql.rs index fc3d86a30bcb..cebfbef41033 100644 --- a/quaint/src/tests/types/mysql.rs +++ b/quaint/src/tests/types/mysql.rs @@ -11,7 +11,7 @@ use crate::bigdecimal::BigDecimal; test_type!(tinyint( mysql, "tinyint(4)", - Value::Int32(None), + Value::null_int32(), Value::int32(i8::MIN), Value::int32(i8::MAX) )); @@ -27,7 +27,7 @@ test_type!(tinyint1( test_type!(tinyint_unsigned( mysql, "tinyint(4) unsigned", - Value::Int32(None), + Value::null_int32(), Value::int32(0), Value::int32(255) )); @@ -35,7 +35,7 @@ test_type!(tinyint_unsigned( test_type!(year( mysql, "year", - Value::Int32(None), + Value::null_int32(), Value::int32(1984), Value::int32(2049) )); @@ -43,7 +43,7 @@ test_type!(year( test_type!(smallint( mysql, "smallint", - Value::Int32(None), + Value::null_int32(), Value::int32(i16::MIN), Value::int32(i16::MAX) )); @@ -51,7 +51,7 @@ test_type!(smallint( test_type!(smallint_unsigned( mysql, "smallint unsigned", - Value::Int32(None), + Value::null_int32(), Value::int32(0), Value::int32(65535) )); @@ -59,7 +59,7 @@ test_type!(smallint_unsigned( test_type!(mediumint( mysql, "mediumint", - Value::Int32(None), + Value::null_int32(), Value::int32(-8388608), Value::int32(8388607) )); @@ -67,7 +67,7 @@ test_type!(mediumint( test_type!(mediumint_unsigned( mysql, "mediumint unsigned", - Value::Int64(None), + Value::null_int64(), Value::int64(0), Value::int64(16777215) )); @@ -75,7 +75,7 @@ test_type!(mediumint_unsigned( test_type!(int( mysql, "int", - Value::Int32(None), + Value::null_int32(), Value::int32(i32::MIN), Value::int32(i32::MAX) )); @@ -83,7 +83,7 @@ test_type!(int( test_type!(int_unsigned( mysql, "int unsigned", - Value::Int64(None), + Value::null_int64(), Value::int64(0), Value::int64(2173158296i64), Value::int64(4294967295i64) @@ -100,7 +100,7 @@ test_type!(int_unsigned_not_null( test_type!(bigint( mysql, "bigint", - Value::Int64(None), + Value::null_int64(), Value::int64(i64::MIN), Value::int64(i64::MAX) )); @@ -109,7 +109,7 @@ test_type!(bigint( test_type!(decimal( mysql, "decimal(10,2)", - Value::Numeric(None), + Value::null_numeric(), Value::numeric(bigdecimal::BigDecimal::from_str("3.14").unwrap()) )); @@ -127,7 +127,7 @@ test_type!(decimal_65_6( test_type!(float_decimal( mysql, "float", - (Value::Numeric(None), Value::Float(None)), + (Value::null_numeric(), Value::null_float()), ( Value::numeric(bigdecimal::BigDecimal::from_str("3.14").unwrap()), Value::float(3.14) @@ -138,7 +138,7 @@ test_type!(float_decimal( test_type!(double_decimal( mysql, "double", - (Value::Numeric(None), Value::Double(None)), + (Value::null_numeric(), Value::null_double()), ( Value::numeric(bigdecimal::BigDecimal::from_str("3.14").unwrap()), Value::double(3.14) @@ -148,62 +148,67 @@ test_type!(double_decimal( test_type!(bit1( mysql, "bit(1)", - (Value::Bytes(None), Value::Boolean(None)), - (Value::integer(0), Value::boolean(false)), - (Value::integer(1), Value::boolean(true)), + (Value::null_bytes(), Value::null_boolean()), + (Value::int32(0), Value::boolean(false)), + (Value::int32(1), Value::boolean(true)), )); test_type!(bit64( mysql, "bit(64)", - Value::Bytes(None), + Value::null_bytes(), Value::bytes(vec![0, 0, 0, 0, 0, 6, 107, 58]) )); -test_type!(char(mysql, "char(255)", Value::Text(None), Value::text("foobar"))); -test_type!(float(mysql, "float", Value::Float(None), Value::float(1.12345),)); -test_type!(double(mysql, "double", Value::Double(None), Value::double(1.12314124))); -test_type!(varchar(mysql, "varchar(255)", Value::Text(None), Value::text("foobar"))); -test_type!(tinytext(mysql, "tinytext", Value::Text(None), Value::text("foobar"))); -test_type!(text(mysql, "text", Value::Text(None), Value::text("foobar"))); -test_type!(longtext(mysql, "longtext", Value::Text(None), Value::text("foobar"))); +test_type!(char(mysql, "char(255)", Value::null_text(), Value::text("foobar"))); +test_type!(float(mysql, "float", Value::null_float(), Value::float(1.12345),)); +test_type!(double(mysql, "double", Value::null_double(), Value::double(1.12314124))); +test_type!(varchar( + mysql, + "varchar(255)", + Value::null_text(), + Value::text("foobar") +)); +test_type!(tinytext(mysql, "tinytext", Value::null_text(), Value::text("foobar"))); +test_type!(text(mysql, "text", Value::null_text(), Value::text("foobar"))); +test_type!(longtext(mysql, "longtext", Value::null_text(), Value::text("foobar"))); test_type!(binary(mysql, "binary(5)", Value::bytes(vec![1, 2, 3, 0, 0]))); test_type!(varbinary(mysql, "varbinary(255)", Value::bytes(vec![1, 2, 3]))); test_type!(mediumtext( mysql, "mediumtext", - Value::Text(None), + Value::null_text(), Value::text("foobar") )); test_type!(tinyblob( mysql, "tinyblob", - Value::Bytes(None), + Value::null_bytes(), Value::bytes(vec![1, 2, 3]) )); test_type!(mediumblob( mysql, "mediumblob", - Value::Bytes(None), + Value::null_bytes(), Value::bytes(vec![1, 2, 3]) )); test_type!(longblob( mysql, "longblob", - Value::Bytes(None), + Value::null_bytes(), Value::bytes(vec![1, 2, 3]) )); -test_type!(blob(mysql, "blob", Value::Bytes(None), Value::bytes(vec![1, 2, 3]))); +test_type!(blob(mysql, "blob", Value::null_bytes(), Value::bytes(vec![1, 2, 3]))); test_type!(enum( mysql, "enum('pollicle_dogs','jellicle_cats')", - Value::Enum(None, None), + Value::null_enum(), Value::enum_variant("jellicle_cats"), Value::enum_variant("pollicle_dogs") )); @@ -211,11 +216,11 @@ test_type!(enum( test_type!(json( mysql, "json", - Value::Json(None), + Value::null_json(), Value::json(serde_json::json!({"this": "is", "a": "json", "number": 2})) )); -test_type!(date(mysql, "date", Value::Date(None), { +test_type!(date(mysql, "date", Value::null_date(), { let dt = chrono::DateTime::parse_from_rfc3339("2020-04-20T00:00:00Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) })); @@ -223,11 +228,11 @@ test_type!(date(mysql, "date", Value::Date(None), { test_type!(time( mysql, "time", - Value::Time(None), + Value::null_time(), Value::time(chrono::NaiveTime::from_hms_opt(16, 20, 00).unwrap()) )); -test_type!(datetime(mysql, "datetime", Value::DateTime(None), { +test_type!(datetime(mysql, "datetime", Value::null_datetime(), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:22Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) })); diff --git a/quaint/src/tests/types/postgres.rs b/quaint/src/tests/types/postgres.rs index 3ca40c822a77..bcbe30702431 100644 --- a/quaint/src/tests/types/postgres.rs +++ b/quaint/src/tests/types/postgres.rs @@ -8,7 +8,7 @@ use std::str::FromStr; test_type!(boolean( postgresql, "boolean", - Value::Boolean(None), + Value::null_boolean(), Value::boolean(true), Value::boolean(false), )); @@ -16,19 +16,19 @@ test_type!(boolean( test_type!(boolean_array( postgresql, "boolean[]", - Value::Array(None), + Value::null_array(), Value::array(vec![ Value::boolean(true), Value::boolean(false), Value::boolean(true), - Value::Boolean(None) + Value::null_boolean() ]), )); test_type!(int2( postgresql, "int2", - Value::Int32(None), + Value::null_int32(), Value::int32(i16::MIN), Value::int32(i16::MAX), )); @@ -36,7 +36,7 @@ test_type!(int2( test_type!(int2_with_int64( postgresql, "int2", - (Value::Int64(None), Value::Int32(None)), + (Value::null_int64(), Value::null_int32()), (Value::int64(i16::MIN), Value::int32(i16::MIN)), (Value::int64(i16::MAX), Value::int32(i16::MAX)) )); @@ -44,12 +44,12 @@ test_type!(int2_with_int64( test_type!(int2_array( postgresql, "int2[]", - Value::Array(None), + Value::null_array(), Value::array(vec![ Value::int32(1), Value::int32(2), Value::int32(3), - Value::Int32(None) + Value::null_int32() ]), )); @@ -57,15 +57,23 @@ test_type!(int2_array_with_i64( postgresql, "int2[]", ( - Value::array(vec![Value::int64(i16::MIN), Value::int64(i16::MAX), Value::Int64(None)]), - Value::array(vec![Value::int32(i16::MIN), Value::int32(i16::MAX), Value::Int32(None)]) + Value::array(vec![ + Value::int64(i16::MIN), + Value::int64(i16::MAX), + Value::null_int64() + ]), + Value::array(vec![ + Value::int32(i16::MIN), + Value::int32(i16::MAX), + Value::null_int32() + ]) ) )); test_type!(int4( postgresql, "int4", - Value::Int32(None), + Value::null_int32(), Value::int32(i32::MIN), Value::int32(i32::MAX), )); @@ -73,7 +81,7 @@ test_type!(int4( test_type!(int4_with_i64( postgresql, "int4", - (Value::Int64(None), Value::Int32(None)), + (Value::null_int64(), Value::null_int32()), (Value::int64(i32::MIN), Value::int32(i32::MIN)), (Value::int64(i32::MAX), Value::int32(i32::MAX)) )); @@ -81,23 +89,35 @@ test_type!(int4_with_i64( test_type!(int4_array( postgresql, "int4[]", - Value::Array(None), - Value::array(vec![Value::int32(i32::MIN), Value::int32(i32::MAX), Value::Int32(None)]), + Value::null_array(), + Value::array(vec![ + Value::int32(i32::MIN), + Value::int32(i32::MAX), + Value::null_int32() + ]), )); test_type!(int4_array_with_i64( postgresql, "int4[]", ( - Value::array(vec![Value::int64(i32::MIN), Value::int64(i32::MAX), Value::Int64(None)]), - Value::array(vec![Value::int32(i32::MIN), Value::int32(i32::MAX), Value::Int32(None)]) + Value::array(vec![ + Value::int64(i32::MIN), + Value::int64(i32::MAX), + Value::null_int64() + ]), + Value::array(vec![ + Value::int32(i32::MIN), + Value::int32(i32::MAX), + Value::null_int32() + ]) ) )); test_type!(int8( postgresql, "int8", - Value::Int64(None), + Value::null_int64(), Value::int64(i64::MIN), Value::int64(i64::MAX), )); @@ -105,36 +125,36 @@ test_type!(int8( test_type!(int8_array( postgresql, "int8[]", - Value::Array(None), + Value::null_array(), Value::array(vec![ Value::int64(1), Value::int64(2), Value::int64(3), - Value::Int64(None) + Value::null_int64() ]), )); -test_type!(float4(postgresql, "float4", Value::Float(None), Value::float(1.234))); +test_type!(float4(postgresql, "float4", Value::null_float(), Value::float(1.234))); test_type!(float4_array( postgresql, "float4[]", - Value::Array(None), - Value::array(vec![Value::float(1.1234), Value::float(4.321), Value::Float(None)]) + Value::null_array(), + Value::array(vec![Value::float(1.1234), Value::float(4.321), Value::null_float()]) )); test_type!(float8( postgresql, "float8", - Value::Double(None), + Value::null_double(), Value::double(1.12345764), )); test_type!(float8_array( postgresql, "float8[]", - Value::Array(None), - Value::array(vec![Value::double(1.1234), Value::double(4.321), Value::Double(None)]) + Value::null_array(), + Value::array(vec![Value::double(1.1234), Value::double(4.321), Value::null_double()]) )); // NOTE: OIDs are unsigned 32-bit integers (see https://www.postgresql.org/docs/9.4/datatype-oid.html) @@ -142,7 +162,7 @@ test_type!(float8_array( test_type!(oid_with_i32( postgresql, "oid", - (Value::Int32(None), Value::Int64(None)), + (Value::null_int32(), Value::null_int64()), (Value::int32(i32::MAX), Value::int64(i32::MAX)), (Value::int32(u32::MIN as i32), Value::int64(u32::MIN)), )); @@ -150,7 +170,7 @@ test_type!(oid_with_i32( test_type!(oid_with_i64( postgresql, "oid", - Value::Int64(None), + Value::null_int64(), Value::int64(u32::MAX), Value::int64(u32::MIN), )); @@ -158,12 +178,12 @@ test_type!(oid_with_i64( test_type!(oid_array( postgresql, "oid[]", - Value::Array(None), + Value::null_array(), Value::array(vec![ Value::int64(1), Value::int64(2), Value::int64(3), - Value::Int64(None) + Value::null_int64() ]), )); @@ -188,124 +208,124 @@ test_type!(serial8( Value::int64(i64::MAX), )); -test_type!(char(postgresql, "char(6)", Value::Text(None), Value::text("foobar"))); +test_type!(char(postgresql, "char(6)", Value::null_text(), Value::text("foobar"))); test_type!(char_array( postgresql, "char(6)[]", - Value::Array(None), - Value::array(vec![Value::text("foobar"), Value::text("omgwtf"), Value::Text(None)]) + Value::null_array(), + Value::array(vec![Value::text("foobar"), Value::text("omgwtf"), Value::null_text()]) )); test_type!(varchar( postgresql, "varchar(255)", - Value::Text(None), + Value::null_text(), Value::text("foobar") )); test_type!(varchar_array( postgresql, "varchar(255)[]", - Value::Array(None), - Value::array(vec![Value::text("foobar"), Value::text("omgwtf"), Value::Text(None)]) + Value::null_array(), + Value::array(vec![Value::text("foobar"), Value::text("omgwtf"), Value::null_text()]) )); -test_type!(text(postgresql, "text", Value::Text(None), Value::text("foobar"))); +test_type!(text(postgresql, "text", Value::null_text(), Value::text("foobar"))); test_type!(text_array( postgresql, "text[]", - Value::Array(None), - Value::array(vec![Value::text("foobar"), Value::text("omgwtf"), Value::Text(None)]) + Value::null_array(), + Value::array(vec![Value::text("foobar"), Value::text("omgwtf"), Value::null_text()]) )); -test_type!(bit(postgresql, "bit(4)", Value::Text(None), Value::text("1001"))); +test_type!(bit(postgresql, "bit(4)", Value::null_text(), Value::text("1001"))); test_type!(bit_array( postgresql, "bit(4)[]", - Value::Array(None), - Value::array(vec![Value::text("1001"), Value::text("0110"), Value::Text(None)]) + Value::null_array(), + Value::array(vec![Value::text("1001"), Value::text("0110"), Value::null_text()]) )); test_type!(varbit( postgresql, "varbit(20)", - Value::Text(None), + Value::null_text(), Value::text("001010101") )); test_type!(varbit_array( postgresql, "varbit(20)[]", - Value::Array(None), + Value::null_array(), Value::array(vec![ Value::text("001010101"), Value::text("01101111"), - Value::Text(None) + Value::null_text() ]) )); -test_type!(inet(postgresql, "inet", Value::Text(None), Value::text("127.0.0.1"))); +test_type!(inet(postgresql, "inet", Value::null_text(), Value::text("127.0.0.1"))); test_type!(inet_array( postgresql, "inet[]", - Value::Array(None), + Value::null_array(), Value::array(vec![ Value::text("127.0.0.1"), Value::text("192.168.1.1"), - Value::Text(None) + Value::null_text() ]) )); test_type!(json( postgresql, "json", - Value::Json(None), + Value::null_json(), Value::json(serde_json::json!({"foo": "bar"})) )); test_type!(json_array( postgresql, "json[]", - Value::Array(None), + Value::null_array(), Value::array(vec![ Value::json(serde_json::json!({"foo": "bar"})), Value::json(serde_json::json!({"omg": false})), - Value::Json(None) + Value::null_json() ]) )); test_type!(jsonb( postgresql, "jsonb", - Value::Json(None), + Value::null_json(), Value::json(serde_json::json!({"foo": "bar"})) )); test_type!(jsonb_array( postgresql, "jsonb[]", - Value::Array(None), + Value::null_array(), Value::array(vec![ Value::json(serde_json::json!({"foo": "bar"})), Value::json(serde_json::json!({"omg": false})), - Value::Json(None) + Value::null_json() ]) )); -test_type!(xml(postgresql, "xml", Value::Xml(None), Value::xml("1",))); +test_type!(xml(postgresql, "xml", Value::null_xml(), Value::xml("1",))); test_type!(xml_array( postgresql, "xml[]", - Value::Array(None), + Value::null_array(), Value::array(vec![ Value::text("1"), Value::text("2"), - Value::Text(None) + Value::null_text() ]) )); @@ -313,7 +333,7 @@ test_type!(xml_array( test_type!(uuid( postgresql, "uuid", - Value::Uuid(None), + Value::null_uuid(), Value::uuid(uuid::Uuid::from_str("936DA01F-9ABD-4D9D-80C7-02AF85C822A8").unwrap()) )); @@ -321,89 +341,89 @@ test_type!(uuid( test_type!(uuid_array( postgresql, "uuid[]", - Value::Array(None), + Value::null_array(), Value::array(vec![ Value::uuid(uuid::Uuid::from_str("936DA01F-9ABD-4D9D-80C7-02AF85C822A8").unwrap()), - Value::Uuid(None) + Value::null_uuid(), ]) )); test_type!(date( postgresql, "date", - Value::Date(None), + Value::null_date(), Value::date(chrono::NaiveDate::from_ymd_opt(2020, 4, 20).unwrap()) )); test_type!(date_array( postgresql, "date[]", - Value::Array(None), + Value::null_array(), Value::array(vec![ Value::date(chrono::NaiveDate::from_ymd_opt(2020, 4, 20).unwrap()), - Value::Date(None) + Value::null_date() ]) )); test_type!(time( postgresql, "time", - Value::Time(None), + Value::null_time(), Value::time(chrono::NaiveTime::from_hms_opt(16, 20, 00).unwrap()) )); test_type!(time_array( postgresql, "time[]", - Value::Array(None), + Value::null_array(), Value::array(vec![ Value::time(chrono::NaiveTime::from_hms_opt(16, 20, 00).unwrap()), - Value::Time(None) + Value::null_time() ]) )); -test_type!(timestamp(postgresql, "timestamp", Value::DateTime(None), { +test_type!(timestamp(postgresql, "timestamp", Value::null_datetime(), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:22Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) })); -test_type!(timestamp_array(postgresql, "timestamp[]", Value::Array(None), { +test_type!(timestamp_array(postgresql, "timestamp[]", Value::null_array(), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:22Z").unwrap(); Value::array(vec![ Value::datetime(dt.with_timezone(&chrono::Utc)), - Value::DateTime(None), + Value::null_datetime(), ]) })); -test_type!(timestamptz(postgresql, "timestamptz", Value::DateTime(None), { +test_type!(timestamptz(postgresql, "timestamptz", Value::null_datetime(), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:22Z").unwrap(); Value::datetime(dt.with_timezone(&chrono::Utc)) })); -test_type!(timestamptz_array(postgresql, "timestamptz[]", Value::Array(None), { +test_type!(timestamptz_array(postgresql, "timestamptz[]", Value::null_array(), { let dt = chrono::DateTime::parse_from_rfc3339("2020-02-27T19:10:22Z").unwrap(); Value::array(vec![ Value::datetime(dt.with_timezone(&chrono::Utc)), - Value::DateTime(None), + Value::null_datetime(), ]) })); test_type!(bytea( postgresql, "bytea", - Value::Bytes(None), + Value::null_bytes(), Value::bytes(b"DEADBEEF".to_vec()) )); test_type!(bytea_array( postgresql, "bytea[]", - Value::Array(None), + Value::null_array(), Value::array(vec![ Value::bytes(b"DEADBEEF".to_vec()), Value::bytes(b"BEEFBEEF".to_vec()), - Value::Bytes(None) + Value::null_bytes() ]) )); diff --git a/quaint/src/tests/types/postgres/bigdecimal.rs b/quaint/src/tests/types/postgres/bigdecimal.rs index f79c23a8ad78..894b2c967629 100644 --- a/quaint/src/tests/types/postgres/bigdecimal.rs +++ b/quaint/src/tests/types/postgres/bigdecimal.rs @@ -4,7 +4,7 @@ use crate::bigdecimal::BigDecimal; test_type!(decimal( postgresql, "decimal(10,2)", - Value::Numeric(None), + Value::null_numeric(), Value::numeric(BigDecimal::from_str("3.14")?) )); @@ -177,28 +177,28 @@ test_type!(decimal_128_6( test_type!(decimal_array( postgresql, "decimal(10,2)[]", - Value::Array(None), + Value::null_array(), Value::array(vec![BigDecimal::from_str("3.14")?, BigDecimal::from_str("5.12")?]) )); test_type!(money( postgresql, "money", - Value::Numeric(None), + Value::null_numeric(), Value::numeric(BigDecimal::from_str("1.12")?) )); test_type!(money_array( postgresql, "money[]", - Value::Array(None), + Value::null_array(), Value::array(vec![BigDecimal::from_str("1.12")?, BigDecimal::from_str("1.12")?]) )); test_type!(float4( postgresql, "float4", - (Value::Numeric(None), Value::Float(None)), + (Value::null_numeric(), Value::null_float()), ( Value::numeric(BigDecimal::from_str("1.123456")?), Value::float(1.123456) @@ -208,7 +208,7 @@ test_type!(float4( test_type!(float8( postgresql, "float8", - (Value::Numeric(None), Value::Double(None)), + (Value::null_numeric(), Value::null_double()), ( Value::numeric(BigDecimal::from_str("1.123456")?), Value::double(1.123456) diff --git a/quaint/src/tests/types/sqlite.rs b/quaint/src/tests/types/sqlite.rs index 80ab4bb5b8f2..ac2c69131e50 100644 --- a/quaint/src/tests/types/sqlite.rs +++ b/quaint/src/tests/types/sqlite.rs @@ -9,7 +9,7 @@ use std::str::FromStr; test_type!(integer( sqlite, "INTEGER", - Value::Int32(None), + Value::null_int32(), Value::int32(i8::MIN), Value::int32(i8::MAX), Value::int32(i16::MIN), @@ -21,18 +21,18 @@ test_type!(integer( test_type!(big_int( sqlite, "BIGINT", - Value::Int64(None), + Value::null_int64(), Value::int64(i64::MIN), Value::int64(i64::MAX), )); -test_type!(real(sqlite, "REAL", Value::Double(None), Value::double(1.12345))); +test_type!(real(sqlite, "REAL", Value::null_double(), Value::double(1.12345))); #[cfg(feature = "bigdecimal")] test_type!(float_decimal( sqlite, "FLOAT", - (Value::Numeric(None), Value::Float(None)), + (Value::null_numeric(), Value::null_float()), ( Value::numeric(bigdecimal::BigDecimal::from_str("3.14").unwrap()), Value::double(3.14) @@ -43,35 +43,35 @@ test_type!(float_decimal( test_type!(double_decimal( sqlite, "DOUBLE", - (Value::Numeric(None), Value::Double(None)), + (Value::null_numeric(), Value::null_double()), ( Value::numeric(bigdecimal::BigDecimal::from_str("3.14").unwrap()), Value::double(3.14) ) )); -test_type!(text(sqlite, "TEXT", Value::Text(None), Value::text("foobar huhuu"))); +test_type!(text(sqlite, "TEXT", Value::null_text(), Value::text("foobar huhuu"))); test_type!(blob( sqlite, "BLOB", - Value::Bytes(None), + Value::null_bytes(), Value::bytes(b"DEADBEEF".to_vec()) )); -test_type!(float(sqlite, "FLOAT", Value::Float(None), Value::double(1.23))); +test_type!(float(sqlite, "FLOAT", Value::null_float(), Value::double(1.23))); test_type!(double( sqlite, "DOUBLE", - Value::Double(None), + Value::null_double(), Value::double(1.2312313213) )); test_type!(boolean( sqlite, "BOOLEAN", - Value::Boolean(None), + Value::null_boolean(), Value::boolean(true), Value::boolean(false) )); @@ -79,14 +79,14 @@ test_type!(boolean( test_type!(date( sqlite, "DATE", - Value::Date(None), + Value::null_date(), Value::date(chrono::NaiveDate::from_ymd_opt(1984, 1, 1).unwrap()) )); test_type!(datetime( sqlite, "DATETIME", - Value::DateTime(None), + Value::null_datetime(), Value::datetime(chrono::DateTime::from_str("2020-07-29T09:23:44.458Z").unwrap()) )); diff --git a/quaint/src/visitor.rs b/quaint/src/visitor.rs index 29ca3d5ccbaa..8424bc7fbb2b 100644 --- a/quaint/src/visitor.rs +++ b/quaint/src/visitor.rs @@ -149,7 +149,10 @@ pub trait Visitor<'a> { fn visit_text_search_relevance(&mut self, text_search_relevance: TextSearchRelevance<'a>) -> Result; fn visit_parameterized_enum(&mut self, variant: EnumVariant<'a>, name: Option>) -> Result { - self.add_parameter(Value::Enum(Some(variant), name)); + match name { + Some(name) => self.add_parameter(Value::enum_variant_with_name(variant, name)), + None => self.add_parameter(Value::enum_variant(variant)), + } self.parameter_substitution()?; Ok(()) @@ -161,7 +164,7 @@ pub trait Visitor<'a> { .map(|variant| variant.into_enum(name.clone())) .collect(); - self.add_parameter(Value::Array(Some(enum_variants))); + self.add_parameter(Value::array(enum_variants)); self.parameter_substitution()?; Ok(()) @@ -169,9 +172,9 @@ pub trait Visitor<'a> { /// A visit to a value we parameterize fn visit_parameterized(&mut self, value: Value<'a>) -> Result { - match value { - Value::Enum(Some(variant), name) => self.visit_parameterized_enum(variant, name), - Value::EnumArray(Some(variants), name) => self.visit_parameterized_enum_array(variants, name), + match value.typed { + ValueType::Enum(Some(variant), name) => self.visit_parameterized_enum(variant, name), + ValueType::EnumArray(Some(variants), name) => self.visit_parameterized_enum_array(variants, name), _ => { self.add_parameter(value); self.parameter_substitution() diff --git a/quaint/src/visitor/mssql.rs b/quaint/src/visitor/mssql.rs index 7e8249f369e0..4344b307f197 100644 --- a/quaint/src/visitor/mssql.rs +++ b/quaint/src/visitor/mssql.rs @@ -8,7 +8,7 @@ use crate::{ }, error::{Error, ErrorKind}, prelude::{Aliasable, Average, Query}, - visitor, Value, + visitor, Value, ValueType, }; use std::{convert::TryFrom, fmt::Write, iter}; @@ -310,27 +310,27 @@ impl<'a> Visitor<'a> for Mssql<'a> { } fn visit_raw_value(&mut self, value: Value<'a>) -> visitor::Result { - let res = match value { - Value::Int32(i) => i.map(|i| self.write(i)), - Value::Int64(i) => i.map(|i| self.write(i)), - Value::Float(d) => d.map(|f| match f { + let res = match value.typed { + ValueType::Int32(i) => i.map(|i| self.write(i)), + ValueType::Int64(i) => i.map(|i| self.write(i)), + ValueType::Float(d) => d.map(|f| match f { f if f.is_nan() => self.write("'NaN'"), f if f == f32::INFINITY => self.write("'Infinity'"), f if f == f32::NEG_INFINITY => self.write("'-Infinity"), v => self.write(format!("{v:?}")), }), - Value::Double(d) => d.map(|f| match f { + ValueType::Double(d) => d.map(|f| match f { f if f.is_nan() => self.write("'NaN'"), f if f == f64::INFINITY => self.write("'Infinity'"), f if f == f64::NEG_INFINITY => self.write("'-Infinity"), v => self.write(format!("{v:?}")), }), - Value::Text(t) => t.map(|t| self.write(format!("'{t}'"))), - Value::Enum(e, _) => e.map(|e| self.write(e)), - Value::Bytes(b) => b.map(|b| self.write(format!("0x{}", hex::encode(b)))), - Value::Boolean(b) => b.map(|b| self.write(if b { 1 } else { 0 })), - Value::Char(c) => c.map(|c| self.write(format!("'{c}'"))), - Value::Array(_) | Value::EnumArray(_, _) => { + ValueType::Text(t) => t.map(|t| self.write(format!("'{t}'"))), + ValueType::Enum(e, _) => e.map(|e| self.write(e)), + ValueType::Bytes(b) => b.map(|b| self.write(format!("0x{}", hex::encode(b)))), + ValueType::Boolean(b) => b.map(|b| self.write(if b { 1 } else { 0 })), + ValueType::Char(c) => c.map(|c| self.write(format!("'{c}'"))), + ValueType::Array(_) | ValueType::EnumArray(_, _) => { let msg = "Arrays are not supported in T-SQL."; let kind = ErrorKind::conversion(msg); @@ -340,29 +340,29 @@ impl<'a> Visitor<'a> for Mssql<'a> { return Err(builder.build()); } - Value::Json(j) => j.map(|j| self.write(format!("'{}'", serde_json::to_string(&j).unwrap()))), + ValueType::Json(j) => j.map(|j| self.write(format!("'{}'", serde_json::to_string(&j).unwrap()))), #[cfg(feature = "bigdecimal")] - Value::Numeric(r) => r.map(|r| self.write(r)), + ValueType::Numeric(r) => r.map(|r| self.write(r)), #[cfg(feature = "uuid")] - Value::Uuid(uuid) => uuid.map(|uuid| { + ValueType::Uuid(uuid) => uuid.map(|uuid| { let s = format!("CONVERT(uniqueidentifier, N'{}')", uuid.hyphenated()); self.write(s) }), - Value::DateTime(dt) => dt.map(|dt| { + ValueType::DateTime(dt) => dt.map(|dt| { let s = format!("CONVERT(datetimeoffset, N'{}')", dt.to_rfc3339()); self.write(s) }), - Value::Date(date) => date.map(|date| { + ValueType::Date(date) => date.map(|date| { let s = format!("CONVERT(date, N'{date}')"); self.write(s) }), - Value::Time(time) => time.map(|time| { + ValueType::Time(time) => time.map(|time| { let s = format!("CONVERT(time, N'{time}')"); self.write(s) }), // Style 3 is keep all whitespace + internal DTD processing: // https://docs.microsoft.com/en-us/sql/t-sql/functions/cast-and-convert-transact-sql?redirectedfrom=MSDN&view=sql-server-ver15#xml-styles - Value::Xml(cow) => cow.map(|cow| self.write(format!("CONVERT(XML, N'{cow}', 3)"))), + ValueType::Xml(cow) => cow.map(|cow| self.write(format!("CONVERT(XML, N'{cow}', 3)"))), }; match res { @@ -391,7 +391,7 @@ impl<'a> Visitor<'a> for Mssql<'a> { self.visit_parameterized(limit)?; self.write(" ROWS ONLY") } - (None, Some(offset)) if self.order_by_set || offset.as_i64().map(|i| i > 0).unwrap_or(false) => { + (None, Some(offset)) if self.order_by_set || offset.typed.as_i64().map(|i| i > 0).unwrap_or(false) => { add_ordering(self)?; self.write(" OFFSET ")?; @@ -749,11 +749,11 @@ mod tests { #[test] fn test_aliased_null() { let expected_sql = "SELECT @P1 AS [test]"; - let query = Select::default().value(val!(Value::Int32(None)).alias("test")); + let query = Select::default().value(val!(ValueType::Int32(None)).alias("test")); let (sql, params) = Mssql::build(query).unwrap(); assert_eq!(expected_sql, sql); - assert_eq!(vec![Value::Int32(None)], params); + assert_eq!(vec![Value::null_int32()], params); } #[test] @@ -1192,7 +1192,7 @@ mod tests { #[test] fn test_raw_null() { - let (sql, params) = Mssql::build(Select::default().value(Value::Text(None).raw())).unwrap(); + let (sql, params) = Mssql::build(Select::default().value(ValueType::Text(None).raw())).unwrap(); assert_eq!("SELECT null", sql); assert!(params.is_empty()); } diff --git a/quaint/src/visitor/mysql.rs b/quaint/src/visitor/mysql.rs index d4587753f8f8..928c8a8a9ed6 100644 --- a/quaint/src/visitor/mysql.rs +++ b/quaint/src/visitor/mysql.rs @@ -120,27 +120,27 @@ impl<'a> Visitor<'a> for Mysql<'a> { } fn visit_raw_value(&mut self, value: Value<'a>) -> visitor::Result { - let res = match value { - Value::Int32(i) => i.map(|i| self.write(i)), - Value::Int64(i) => i.map(|i| self.write(i)), - Value::Float(d) => d.map(|f| match f { + let res = match &value.typed { + ValueType::Int32(i) => i.map(|i| self.write(i)), + ValueType::Int64(i) => i.map(|i| self.write(i)), + ValueType::Float(d) => d.map(|f| match f { f if f.is_nan() => self.write("'NaN'"), f if f == f32::INFINITY => self.write("'Infinity'"), f if f == f32::NEG_INFINITY => self.write("'-Infinity"), v => self.write(format!("{v:?}")), }), - Value::Double(d) => d.map(|f| match f { + ValueType::Double(d) => d.map(|f| match f { f if f.is_nan() => self.write("'NaN'"), f if f == f64::INFINITY => self.write("'Infinity'"), f if f == f64::NEG_INFINITY => self.write("'-Infinity"), v => self.write(format!("{v:?}")), }), - Value::Text(t) => t.map(|t| self.write(format!("'{t}'"))), - Value::Enum(e, _) => e.map(|e| self.write(e)), - Value::Bytes(b) => b.map(|b| self.write(format!("x'{}'", hex::encode(b)))), - Value::Boolean(b) => b.map(|b| self.write(b)), - Value::Char(c) => c.map(|c| self.write(format!("'{c}'"))), - Value::Array(_) | Value::EnumArray(_, _) => { + ValueType::Text(t) => t.as_ref().map(|t| self.write(format!("'{t}'"))), + ValueType::Enum(e, _) => e.as_ref().map(|e| self.write(e)), + ValueType::Bytes(b) => b.as_ref().map(|b| self.write(format!("x'{}'", hex::encode(b)))), + ValueType::Boolean(b) => b.map(|b| self.write(b)), + ValueType::Char(c) => c.map(|c| self.write(format!("'{c}'"))), + ValueType::Array(_) | ValueType::EnumArray(_, _) => { let msg = "Arrays are not supported in MySQL."; let kind = ErrorKind::conversion(msg); @@ -150,9 +150,9 @@ impl<'a> Visitor<'a> for Mysql<'a> { return Err(builder.build()); } #[cfg(feature = "bigdecimal")] - Value::Numeric(r) => r.map(|r| self.write(r)), + ValueType::Numeric(r) => r.as_ref().map(|r| self.write(r)), - Value::Json(j) => match j { + ValueType::Json(j) => match j { Some(ref j) => { let s = serde_json::to_string(&j)?; Some(self.write(format!("CONVERT('{s}', JSON)"))) @@ -160,11 +160,11 @@ impl<'a> Visitor<'a> for Mysql<'a> { None => None, }, #[cfg(feature = "uuid")] - Value::Uuid(uuid) => uuid.map(|uuid| self.write(format!("'{}'", uuid.hyphenated()))), - Value::DateTime(dt) => dt.map(|dt| self.write(format!("'{}'", dt.to_rfc3339(),))), - Value::Date(date) => date.map(|date| self.write(format!("'{date}'"))), - Value::Time(time) => time.map(|time| self.write(format!("'{time}'"))), - Value::Xml(cow) => cow.map(|cow| self.write(format!("'{cow}'"))), + ValueType::Uuid(uuid) => uuid.map(|uuid| self.write(format!("'{}'", uuid.hyphenated()))), + ValueType::DateTime(dt) => dt.map(|dt| self.write(format!("'{}'", dt.to_rfc3339(),))), + ValueType::Date(date) => date.map(|date| self.write(format!("'{date}'"))), + ValueType::Time(time) => time.map(|time| self.write(format!("'{time}'"))), + ValueType::Xml(cow) => cow.as_ref().map(|cow| self.write(format!("'{cow}'"))), }; match res { @@ -293,8 +293,20 @@ impl<'a> Visitor<'a> for Mysql<'a> { self.write(" OFFSET ")?; self.visit_parameterized(offset) } - (None, Some(Value::Int32(Some(offset)))) if offset < 1 => Ok(()), - (None, Some(Value::Int64(Some(offset)))) if offset < 1 => Ok(()), + ( + None, + Some(Value { + typed: ValueType::Int32(Some(offset)), + .. + }), + ) if offset < 1 => Ok(()), + ( + None, + Some(Value { + typed: ValueType::Int64(Some(offset)), + .. + }), + ) if offset < 1 => Ok(()), (None, Some(offset)) => { self.write(" LIMIT ")?; self.visit_parameterized(Value::from(9_223_372_036_854_775_807i64))?; @@ -421,27 +433,27 @@ impl<'a> Visitor<'a> for Mysql<'a> { match json_type { JsonType::Array => { - self.visit_expression(Value::text("ARRAY").into())?; + self.visit_expression(Expression::from(Value::text("ARRAY")))?; } JsonType::Boolean => { - self.visit_expression(Value::text("BOOLEAN").into())?; + self.visit_expression(Expression::from(Value::text("BOOLEAN")))?; } JsonType::Number => { - self.visit_expression(Value::text("INTEGER").into())?; + self.visit_expression(Expression::from(Value::text("INTEGER")))?; self.write(" OR JSON_TYPE(")?; self.visit_expression(left)?; self.write(")")?; self.write(" = ")?; - self.visit_expression(Value::text("DOUBLE").into())?; + self.visit_expression(Expression::from(Value::text("DOUBLE")))?; } JsonType::Object => { - self.visit_expression(Value::text("OBJECT").into())?; + self.visit_expression(Expression::from(Value::text("OBJECT")))?; } JsonType::String => { - self.visit_expression(Value::text("STRING").into())?; + self.visit_expression(Expression::from(Value::text("STRING")))?; } JsonType::Null => { - self.visit_expression(Value::text("NULL").into())?; + self.visit_expression(Expression::from(Value::text("NULL")))?; } JsonType::ColumnRef(column) => { self.write("JSON_TYPE")?; @@ -741,7 +753,7 @@ mod tests { #[test] fn test_raw_null() { - let (sql, params) = Mysql::build(Select::default().value(Value::Text(None).raw())).unwrap(); + let (sql, params) = Mysql::build(Select::default().value(ValueType::Text(None).raw())).unwrap(); assert_eq!("SELECT null", sql); assert!(params.is_empty()); } @@ -769,7 +781,7 @@ mod tests { #[test] fn test_raw_bytes() { - let (sql, params) = Mysql::build(Select::default().value(Value::bytes(vec![1, 2, 3]).raw())).unwrap(); + let (sql, params) = Mysql::build(Select::default().value(ValueType::bytes(vec![1, 2, 3]).raw())).unwrap(); assert_eq!("SELECT x'010203'", sql); assert!(params.is_empty()); } @@ -787,7 +799,7 @@ mod tests { #[test] fn test_raw_char() { - let (sql, params) = Mysql::build(Select::default().value(Value::character('a').raw())).unwrap(); + let (sql, params) = Mysql::build(Select::default().value(ValueType::character('a').raw())).unwrap(); assert_eq!("SELECT 'a'", sql); assert!(params.is_empty()); } @@ -897,7 +909,7 @@ mod tests { #[test] fn test_json_negation() { - let conditions = ConditionTree::not("json".equals(Value::Json(Some(serde_json::Value::Null)))); + let conditions = ConditionTree::not("json".equals(ValueType::Json(Some(serde_json::Value::Null)))); let (sql, _) = Mysql::build(Select::from_table("test").so_that(conditions)).unwrap(); assert_eq!( @@ -909,7 +921,7 @@ mod tests { #[test] fn test_json_not_negation() { - let conditions = ConditionTree::not("json".not_equals(Value::Json(Some(serde_json::Value::Null)))); + let conditions = ConditionTree::not("json".not_equals(ValueType::Json(Some(serde_json::Value::Null)))); let (sql, _) = Mysql::build(Select::from_table("test").so_that(conditions)).unwrap(); assert_eq!( diff --git a/quaint/src/visitor/postgres.rs b/quaint/src/visitor/postgres.rs index 0e36abe68c24..ec90eda8d6f5 100644 --- a/quaint/src/visitor/postgres.rs +++ b/quaint/src/visitor/postgres.rs @@ -107,12 +107,9 @@ impl<'a> Visitor<'a> for Postgres<'a> { self.surround_with_backticks(enum_name.name.deref())?; self.write("[]")?; } else { - self.visit_parameterized(Value::Array(Some( - variants - .into_iter() - .map(|variant| variant.into_enum(name.clone())) - .collect(), - )))?; + self.visit_parameterized(Value::array( + variants.into_iter().map(|variant| variant.into_enum(name.clone())), + ))?; } Ok(()) @@ -167,32 +164,32 @@ impl<'a> Visitor<'a> for Postgres<'a> { } fn visit_raw_value(&mut self, value: Value<'a>) -> visitor::Result { - let res = match value { - Value::Int32(i) => i.map(|i| self.write(i)), - Value::Int64(i) => i.map(|i| self.write(i)), - Value::Text(t) => t.map(|t| self.write(format!("'{t}'"))), - Value::Enum(e, _) => e.map(|e| self.write(e)), - Value::Bytes(b) => b.map(|b| self.write(format!("E'{}'", hex::encode(b)))), - Value::Boolean(b) => b.map(|b| self.write(b)), - Value::Xml(cow) => cow.map(|cow| self.write(format!("'{cow}'"))), - Value::Char(c) => c.map(|c| self.write(format!("'{c}'"))), - Value::Float(d) => d.map(|f| match f { + let res = match &value.typed { + ValueType::Int32(i) => i.map(|i| self.write(i)), + ValueType::Int64(i) => i.map(|i| self.write(i)), + ValueType::Text(t) => t.as_ref().map(|t| self.write(format!("'{t}'"))), + ValueType::Enum(e, _) => e.as_ref().map(|e| self.write(e)), + ValueType::Bytes(b) => b.as_ref().map(|b| self.write(format!("E'{}'", hex::encode(b)))), + ValueType::Boolean(b) => b.map(|b| self.write(b)), + ValueType::Xml(cow) => cow.as_ref().map(|cow| self.write(format!("'{cow}'"))), + ValueType::Char(c) => c.map(|c| self.write(format!("'{c}'"))), + ValueType::Float(d) => d.map(|f| match f { f if f.is_nan() => self.write("'NaN'"), f if f == f32::INFINITY => self.write("'Infinity'"), f if f == f32::NEG_INFINITY => self.write("'-Infinity"), v => self.write(format!("{v:?}")), }), - Value::Double(d) => d.map(|f| match f { + ValueType::Double(d) => d.map(|f| match f { f if f.is_nan() => self.write("'NaN'"), f if f == f64::INFINITY => self.write("'Infinity'"), f if f == f64::NEG_INFINITY => self.write("'-Infinity"), v => self.write(format!("{v:?}")), }), - Value::Array(ary) => ary.map(|ary| { + ValueType::Array(ary) => ary.as_ref().map(|ary| { self.surround_with("'{", "}'", |ref mut s| { let len = ary.len(); - for (i, item) in ary.into_iter().enumerate() { + for (i, item) in ary.iter().enumerate() { s.write(item)?; if i < len - 1 { @@ -203,11 +200,11 @@ impl<'a> Visitor<'a> for Postgres<'a> { Ok(()) }) }), - Value::EnumArray(variants, name) => variants.map(|variants| { + ValueType::EnumArray(variants, name) => variants.as_ref().map(|variants| { self.surround_with("ARRAY[", "]", |ref mut s| { let len = variants.len(); - for (i, item) in variants.into_iter().enumerate() { + for (i, item) in variants.iter().enumerate() { s.surround_with("'", "'", |t| t.write(item))?; if i < len - 1 { @@ -220,7 +217,7 @@ impl<'a> Visitor<'a> for Postgres<'a> { if let Some(enum_name) = name { self.write("::")?; - if let Some(schema_name) = enum_name.schema_name { + if let Some(schema_name) = &enum_name.schema_name { self.surround_with_backticks(schema_name.deref())?; self.write(".")? } @@ -229,14 +226,16 @@ impl<'a> Visitor<'a> for Postgres<'a> { Ok(()) }), - Value::Json(j) => j.map(|j| self.write(format!("'{}'", serde_json::to_string(&j).unwrap()))), + ValueType::Json(j) => j + .as_ref() + .map(|j| self.write(format!("'{}'", serde_json::to_string(&j).unwrap()))), #[cfg(feature = "bigdecimal")] - Value::Numeric(r) => r.map(|r| self.write(r)), + ValueType::Numeric(r) => r.as_ref().map(|r| self.write(r)), #[cfg(feature = "uuid")] - Value::Uuid(uuid) => uuid.map(|uuid| self.write(format!("'{}'", uuid.hyphenated()))), - Value::DateTime(dt) => dt.map(|dt| self.write(format!("'{}'", dt.to_rfc3339(),))), - Value::Date(date) => date.map(|date| self.write(format!("'{date}'"))), - Value::Time(time) => time.map(|time| self.write(format!("'{time}'"))), + ValueType::Uuid(uuid) => uuid.map(|uuid| self.write(format!("'{}'", uuid.hyphenated()))), + ValueType::DateTime(dt) => dt.map(|dt| self.write(format!("'{}'", dt.to_rfc3339(),))), + ValueType::Date(date) => date.map(|date| self.write(format!("'{date}'"))), + ValueType::Time(time) => time.map(|time| self.write(format!("'{time}'"))), }; match res { @@ -961,7 +960,7 @@ mod tests { #[test] fn test_raw_null() { - let (sql, params) = Postgres::build(Select::default().value(Value::Text(None).raw())).unwrap(); + let (sql, params) = Postgres::build(Select::default().value(Value::null_text().raw())).unwrap(); assert_eq!("SELECT null", sql); assert!(params.is_empty()); } @@ -1050,9 +1049,9 @@ mod tests { #[test] fn test_raw_enum_array() { - let enum_array = Value::EnumArray( - Some(vec![EnumVariant::new("A"), EnumVariant::new("B")]), - Some(EnumName::new("Alphabet", Some("foo"))), + let enum_array = Value::enum_array_with_name( + vec![EnumVariant::new("A"), EnumVariant::new("B")], + EnumName::new("Alphabet", Some("foo")), ); let (sql, params) = Postgres::build(Select::default().value(enum_array.raw())).unwrap(); diff --git a/quaint/src/visitor/sqlite.rs b/quaint/src/visitor/sqlite.rs index 209758bbeb20..45b9a82468ef 100644 --- a/quaint/src/visitor/sqlite.rs +++ b/quaint/src/visitor/sqlite.rs @@ -74,27 +74,27 @@ impl<'a> Visitor<'a> for Sqlite<'a> { } fn visit_raw_value(&mut self, value: Value<'a>) -> visitor::Result { - let res = match value { - Value::Int32(i) => i.map(|i| self.write(i)), - Value::Int64(i) => i.map(|i| self.write(i)), - Value::Text(t) => t.map(|t| self.write(format!("'{t}'"))), - Value::Enum(e, _) => e.map(|e| self.write(e)), - Value::Bytes(b) => b.map(|b| self.write(format!("x'{}'", hex::encode(b)))), - Value::Boolean(b) => b.map(|b| self.write(b)), - Value::Char(c) => c.map(|c| self.write(format!("'{c}'"))), - Value::Float(d) => d.map(|f| match f { + let res = match &value.typed { + ValueType::Int32(i) => i.map(|i| self.write(i)), + ValueType::Int64(i) => i.map(|i| self.write(i)), + ValueType::Text(t) => t.as_ref().map(|t| self.write(format!("'{t}'"))), + ValueType::Enum(e, _) => e.as_ref().map(|e| self.write(e)), + ValueType::Bytes(b) => b.as_ref().map(|b| self.write(format!("x'{}'", hex::encode(b)))), + ValueType::Boolean(b) => b.map(|b| self.write(b)), + ValueType::Char(c) => c.map(|c| self.write(format!("'{c}'"))), + ValueType::Float(d) => d.map(|f| match f { f if f.is_nan() => self.write("'NaN'"), f if f == f32::INFINITY => self.write("'Infinity'"), f if f == f32::NEG_INFINITY => self.write("'-Infinity"), v => self.write(format!("{v:?}")), }), - Value::Double(d) => d.map(|f| match f { + ValueType::Double(d) => d.map(|f| match f { f if f.is_nan() => self.write("'NaN'"), f if f == f64::INFINITY => self.write("'Infinity'"), f if f == f64::NEG_INFINITY => self.write("'-Infinity"), v => self.write(format!("{v:?}")), }), - Value::Array(_) | Value::EnumArray(_, _) => { + ValueType::Array(_) | ValueType::EnumArray(_, _) => { let msg = "Arrays are not supported in SQLite."; let kind = ErrorKind::conversion(msg); @@ -104,7 +104,7 @@ impl<'a> Visitor<'a> for Sqlite<'a> { return Err(builder.build()); } - Value::Json(j) => match j { + ValueType::Json(j) => match j { Some(ref j) => { let s = serde_json::to_string(j)?; Some(self.write(format!("'{s}'"))) @@ -112,13 +112,13 @@ impl<'a> Visitor<'a> for Sqlite<'a> { None => None, }, #[cfg(feature = "bigdecimal")] - Value::Numeric(r) => r.map(|r| self.write(r)), + ValueType::Numeric(r) => r.as_ref().map(|r| self.write(r)), #[cfg(feature = "uuid")] - Value::Uuid(uuid) => uuid.map(|uuid| self.write(format!("'{}'", uuid.hyphenated()))), - Value::DateTime(dt) => dt.map(|dt| self.write(format!("'{}'", dt.to_rfc3339(),))), - Value::Date(date) => date.map(|date| self.write(format!("'{date}'"))), - Value::Time(time) => time.map(|time| self.write(format!("'{time}'"))), - Value::Xml(cow) => cow.map(|cow| self.write(format!("'{cow}'"))), + ValueType::Uuid(uuid) => uuid.map(|uuid| self.write(format!("'{}'", uuid.hyphenated()))), + ValueType::DateTime(dt) => dt.map(|dt| self.write(format!("'{}'", dt.to_rfc3339(),))), + ValueType::Date(date) => date.map(|date| self.write(format!("'{date}'"))), + ValueType::Time(time) => time.map(|time| self.write(format!("'{time}'"))), + ValueType::Xml(cow) => cow.as_ref().map(|cow| self.write(format!("'{cow}'"))), }; match res { @@ -432,11 +432,11 @@ mod tests { #[test] fn test_aliased_null() { let expected_sql = "SELECT ? AS `test`"; - let query = Select::default().value(val!(Value::Text(None)).alias("test")); + let query = Select::default().value(val!(Value::null_text()).alias("test")); let (sql, params) = Sqlite::build(query).unwrap(); assert_eq!(expected_sql, sql); - assert_eq!(vec![Value::Text(None)], params); + assert_eq!(vec![Value::null_text()], params); } #[test] @@ -861,7 +861,7 @@ mod tests { #[test] fn test_raw_null() { - let (sql, params) = Sqlite::build(Select::default().value(Value::Text(None).raw())).unwrap(); + let (sql, params) = Sqlite::build(Select::default().value(Value::null_text().raw())).unwrap(); assert_eq!("SELECT null", sql); assert!(params.is_empty()); } diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs index 37f3bc89bbd4..b27f27406e5c 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs @@ -215,7 +215,11 @@ impl TestConfig { #[cfg(unix)] { use std::os::unix::fs::PermissionsExt; - if path.metadata().is_ok_and(|md| md.permissions().mode() & 0o111 == 0) { + let is_executable = match path.metadata() { + Err(_) => false, + Ok(md) => md.permissions().mode() & 0o111 != 0, + }; + if !is_executable { exit_with_message(&format!( "The external test executor file `{}` must be have permissions to execute", file diff --git a/query-engine/connectors/mongodb-query-connector/src/query_builder/group_by_builder.rs b/query-engine/connectors/mongodb-query-connector/src/query_builder/group_by_builder.rs index b7667a45825f..4ea3d4590446 100644 --- a/query-engine/connectors/mongodb-query-connector/src/query_builder/group_by_builder.rs +++ b/query-engine/connectors/mongodb-query-connector/src/query_builder/group_by_builder.rs @@ -39,7 +39,7 @@ impl std::fmt::Display for AggregationType { impl GroupByBuilder { pub fn new() -> Self { - Self { ..Default::default() } + Default::default() } pub fn render(&self, by_fields: Vec) -> (Document, Option) { diff --git a/query-engine/connectors/sql-query-connector/src/cursor_condition.rs b/query-engine/connectors/sql-query-connector/src/cursor_condition.rs index 79e61523dec2..34373eaf3d5b 100644 --- a/query-engine/connectors/sql-query-connector/src/cursor_condition.rs +++ b/query-engine/connectors/sql-query-connector/src/cursor_condition.rs @@ -469,7 +469,7 @@ fn cursor_order_def_aggregation_scalar( order_by: &OrderByScalarAggregation, order_by_def: &OrderByDefinition, ) -> CursorOrderDefinition { - let coalesce_exprs: Vec = vec![order_by_def.order_column.clone(), Value::integer(0).into()]; + let coalesce_exprs: Vec = vec![order_by_def.order_column.clone(), Value::int32(0).into()]; // We coalesce the order column to 0 when it's compared to the cmp table since the aggregations joins // might return NULL on relations that have no connected records @@ -493,7 +493,7 @@ fn cursor_order_def_aggregation_rel( // cf: part #2 of the SQL query above, when a field is nullable. let fks = foreign_keys_from_order_path(&order_by.path, &order_by_def.joins); - let coalesce_exprs: Vec = vec![order_by_def.order_column.clone(), Value::integer(0).into()]; + let coalesce_exprs: Vec = vec![order_by_def.order_column.clone(), Value::int32(0).into()]; // We coalesce the order column to 0 when it's compared to the cmp table since the aggregations joins // might return NULL on relations that have no connected records let order_column: Expression = coalesce(coalesce_exprs).into(); diff --git a/query-engine/connectors/sql-query-connector/src/filter/visitor.rs b/query-engine/connectors/sql-query-connector/src/filter/visitor.rs index 26796bf79121..099967177b55 100644 --- a/query-engine/connectors/sql-query-connector/src/filter/visitor.rs +++ b/query-engine/connectors/sql-query-connector/src/filter/visitor.rs @@ -557,8 +557,8 @@ impl FilterVisitorExt for FilterVisitor { ScalarListCondition::ContainsSome(ConditionListValue::FieldRef(field_ref)) => { comparable.compare_raw("&&", field_ref.aliased_col(alias, ctx)) } - ScalarListCondition::IsEmpty(true) => comparable.compare_raw("=", Value::Array(Some(vec![])).raw()), - ScalarListCondition::IsEmpty(false) => comparable.compare_raw("<>", Value::Array(Some(vec![])).raw()), + ScalarListCondition::IsEmpty(true) => comparable.compare_raw("=", ValueType::Array(Some(vec![])).raw()), + ScalarListCondition::IsEmpty(false) => comparable.compare_raw("<>", ValueType::Array(Some(vec![])).raw()), }; ConditionTree::single(condition) @@ -1120,7 +1120,7 @@ fn convert_pv<'a>(field: &ScalarFieldRef, pv: PrismaValue, ctx: &Context<'_>) -> } fn convert_list_pv<'a>(field: &ScalarFieldRef, values: Vec, ctx: &Context<'_>) -> Expression<'a> { - Value::Array(Some(values.into_iter().map(|val| field.value(val, ctx)).collect())).into() + Expression::from(Value::array(values.into_iter().map(|val| field.value(val, ctx)))) } fn convert_pvs<'a>(fields: &[ScalarFieldRef], values: Vec, ctx: &Context<'_>) -> Vec> { diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs b/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs index 1250fbf88f67..b8ea590f25dc 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs +++ b/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs @@ -3,7 +3,7 @@ use chrono::Utc; use prisma_models::{ScalarField, TypeIdentifier}; use prisma_value::PrismaValue; use quaint::{ - ast::{EnumName, Value}, + ast::{EnumName, Value, ValueType}, prelude::{EnumVariant, TypeDataLength, TypeFamily}, }; @@ -27,7 +27,7 @@ impl ScalarFieldExt for ScalarField { .map(ToOwned::to_owned) .or(Some(ctx.schema_name().to_owned())); - Value::enum_variant_with_name(e, enum_name, schema_name) + Value::enum_variant_with_name(e, EnumName::new(enum_name, schema_name)) } (PrismaValue::List(vals), TypeIdentifier::Enum(enum_id)) => { let enum_walker = self.dm.clone().zip(enum_id); @@ -43,21 +43,21 @@ impl ScalarFieldExt for ScalarField { .map(ToOwned::to_owned) .or(Some(ctx.schema_name().to_owned())); - Value::EnumArray(Some(variants), Some(EnumName::new(enum_name, schema_name))) + Value::enum_array_with_name(variants, EnumName::new(enum_name, schema_name)) } (PrismaValue::Enum(e), _) => e.into(), (PrismaValue::Int(i), _) => i.into(), (PrismaValue::BigInt(i), _) => i.into(), (PrismaValue::Uuid(u), _) => u.to_string().into(), - (PrismaValue::List(l), _) => Value::Array(Some(l.into_iter().map(|x| self.value(x, ctx)).collect())), - (PrismaValue::Json(s), _) => Value::Json(Some(serde_json::from_str::(&s).unwrap())), - (PrismaValue::Bytes(b), _) => Value::Bytes(Some(b.into())), + (PrismaValue::List(l), _) => Value::array(l.into_iter().map(|x| self.value(x, ctx))), + (PrismaValue::Json(s), _) => Value::json(serde_json::from_str::(&s).unwrap()), + (PrismaValue::Bytes(b), _) => Value::bytes(b), (PrismaValue::Object(_), _) => unimplemented!(), (PrismaValue::Null, ident) => match ident { - TypeIdentifier::String => Value::Text(None), - TypeIdentifier::Float => Value::Numeric(None), - TypeIdentifier::Decimal => Value::Numeric(None), - TypeIdentifier::Boolean => Value::Boolean(None), + TypeIdentifier::String => Value::null_text(), + TypeIdentifier::Float => Value::null_numeric(), + TypeIdentifier::Decimal => Value::null_numeric(), + TypeIdentifier::Boolean => Value::null_boolean(), TypeIdentifier::Enum(enum_id) => { let enum_walker = self.dm.clone().zip(enum_id); let enum_name = enum_walker.db_name().to_owned(); @@ -66,14 +66,14 @@ impl ScalarFieldExt for ScalarField { .map(ToOwned::to_owned) .or(Some(ctx.schema_name().to_owned())); - Value::Enum(None, Some(EnumName::new(enum_name, schema_name))) + ValueType::Enum(None, Some(EnumName::new(enum_name, schema_name))).into_value() } - TypeIdentifier::Json => Value::Json(None), - TypeIdentifier::DateTime => Value::DateTime(None), - TypeIdentifier::UUID => Value::Uuid(None), - TypeIdentifier::Int => Value::Int32(None), - TypeIdentifier::BigInt => Value::Int64(None), - TypeIdentifier::Bytes => Value::Bytes(None), + TypeIdentifier::Json => Value::null_json(), + TypeIdentifier::DateTime => Value::null_datetime(), + TypeIdentifier::UUID => Value::null_uuid(), + TypeIdentifier::Int => Value::null_int32(), + TypeIdentifier::BigInt => Value::null_int64(), + TypeIdentifier::Bytes => Value::null_bytes(), TypeIdentifier::Unsupported => unreachable!("No unsupported field should reach that path"), }, } @@ -117,10 +117,10 @@ pub fn convert_lossy<'a>(pv: PrismaValue) -> Value<'a> { PrismaValue::Int(i) => i.into(), PrismaValue::BigInt(i) => i.into(), PrismaValue::Uuid(u) => u.to_string().into(), - PrismaValue::List(l) => Value::Array(Some(l.into_iter().map(convert_lossy).collect())), - PrismaValue::Json(s) => Value::Json(serde_json::from_str(&s).unwrap()), - PrismaValue::Bytes(b) => Value::Bytes(Some(b.into())), - PrismaValue::Null => Value::Int32(None), // Can't tell which type the null is supposed to be. + PrismaValue::List(l) => Value::array(l.into_iter().map(convert_lossy)), + PrismaValue::Json(s) => Value::json(serde_json::from_str(&s).unwrap()), + PrismaValue::Bytes(b) => Value::bytes(b), + PrismaValue::Null => Value::null_int32(), // Can't tell which type the null is supposed to be. PrismaValue::Object(_) => unimplemented!(), } } diff --git a/query-engine/connectors/sql-query-connector/src/ordering.rs b/query-engine/connectors/sql-query-connector/src/ordering.rs index 7ab1bc03d3ce..cf49698405ef 100644 --- a/query-engine/connectors/sql-query-connector/src/ordering.rs +++ b/query-engine/connectors/sql-query-connector/src/ordering.rs @@ -115,7 +115,7 @@ impl OrderByBuilder { let (joins, order_column) = self.compute_joins_aggregation(order_by, ctx); let order_definition: OrderDefinition = match order_by.sort_aggregation { SortAggregation::Count => { - let exprs: Vec = vec![order_column.clone().into(), Value::integer(0).into()]; + let exprs: Vec = vec![order_column.clone().into(), Value::int32(0).into()]; // We coalesce the order by expr to 0 so that if there's no relation, // `COALESCE(NULL, 0)` will return `0`, thus preserving the order diff --git a/query-engine/connectors/sql-query-connector/src/row.rs b/query-engine/connectors/sql-query-connector/src/row.rs index 9f0b69e73ff7..250ee7d9420f 100644 --- a/query-engine/connectors/sql-query-connector/src/row.rs +++ b/query-engine/connectors/sql-query-connector/src/row.rs @@ -3,7 +3,7 @@ use bigdecimal::{BigDecimal, FromPrimitive, ToPrimitive}; use chrono::{DateTime, NaiveDate, Utc}; use connector_interface::{coerce_null_to_zero_value, AggregationResult, AggregationSelection}; use prisma_models::{ConversionFailure, FieldArity, PrismaValue, Record, TypeIdentifier}; -use quaint::{ast::Value, connector::ResultRow}; +use quaint::{connector::ResultRow, Value, ValueType}; use std::{io, str::FromStr}; use uuid::Uuid; @@ -92,12 +92,12 @@ impl ToSqlRow for ResultRow { for (i, p_value) in self.into_iter().enumerate().take(row_width) { let pv = match (meta[i].identifier(), meta[i].arity()) { - (type_identifier, FieldArity::List) => match p_value { + (type_identifier, FieldArity::List) => match p_value.typed { value if value.is_null() => Ok(PrismaValue::List(Vec::new())), - Value::Array(None) => Ok(PrismaValue::List(Vec::new())), - Value::Array(Some(l)) => l + ValueType::Array(None) => Ok(PrismaValue::List(Vec::new())), + ValueType::Array(Some(l)) => l .into_iter() - .map(|p_value| row_value_to_prisma_value(p_value, meta[i])) + .map(|val| row_value_to_prisma_value(val, meta[i])) .collect::>>() .map(PrismaValue::List), _ => { @@ -140,35 +140,35 @@ fn row_value_to_prisma_value(p_value: Value, meta: ColumnMetadata<'_>) -> Result }; Ok(match meta.identifier() { - TypeIdentifier::Boolean => match p_value { + TypeIdentifier::Boolean => match p_value.typed { value if value.is_null() => PrismaValue::Null, - Value::Int32(Some(i)) => PrismaValue::Boolean(i != 0), - Value::Int64(Some(i)) => PrismaValue::Boolean(i != 0), - Value::Boolean(Some(b)) => PrismaValue::Boolean(b), - Value::Bytes(Some(bytes)) if bytes.as_ref() == [0u8] => PrismaValue::Boolean(false), - Value::Bytes(Some(bytes)) if bytes.as_ref() == [1u8] => PrismaValue::Boolean(true), + ValueType::Int32(Some(i)) => PrismaValue::Boolean(i != 0), + ValueType::Int64(Some(i)) => PrismaValue::Boolean(i != 0), + ValueType::Boolean(Some(b)) => PrismaValue::Boolean(b), + ValueType::Bytes(Some(bytes)) if bytes.as_ref() == [0u8] => PrismaValue::Boolean(false), + ValueType::Bytes(Some(bytes)) if bytes.as_ref() == [1u8] => PrismaValue::Boolean(true), _ => return Err(create_error(&p_value)), }, - TypeIdentifier::Enum(_) => match p_value { + TypeIdentifier::Enum(_) => match p_value.typed { value if value.is_null() => PrismaValue::Null, - Value::Enum(Some(cow), _) => PrismaValue::Enum(cow.into_owned()), - Value::Text(Some(cow)) => PrismaValue::Enum(cow.into_owned()), + ValueType::Enum(Some(cow), _) => PrismaValue::Enum(cow.into_owned()), + ValueType::Text(Some(cow)) => PrismaValue::Enum(cow.into_owned()), _ => return Err(create_error(&p_value)), }, - TypeIdentifier::Json => match p_value { + TypeIdentifier::Json => match p_value.typed { value if value.is_null() => PrismaValue::Null, - Value::Text(Some(json)) => PrismaValue::Json(json.into()), - Value::Json(Some(json)) => PrismaValue::Json(json.to_string()), + ValueType::Text(Some(json)) => PrismaValue::Json(json.into()), + ValueType::Json(Some(json)) => PrismaValue::Json(json.to_string()), _ => return Err(create_error(&p_value)), }, - TypeIdentifier::UUID => match p_value { + TypeIdentifier::UUID => match p_value.typed { value if value.is_null() => PrismaValue::Null, - Value::Text(Some(uuid)) => PrismaValue::Uuid(Uuid::parse_str(&uuid)?), - Value::Uuid(Some(uuid)) => PrismaValue::Uuid(uuid), + ValueType::Text(Some(uuid)) => PrismaValue::Uuid(Uuid::parse_str(&uuid)?), + ValueType::Uuid(Some(uuid)) => PrismaValue::Uuid(uuid), _ => return Err(create_error(&p_value)), }, - TypeIdentifier::DateTime => match p_value { + TypeIdentifier::DateTime => match p_value.typed { value if value.is_null() => PrismaValue::Null, value if value.is_integer() => { let ts = value.as_integer().unwrap(); @@ -179,47 +179,47 @@ fn row_value_to_prisma_value(p_value: Value, meta: ColumnMetadata<'_>) -> Result PrismaValue::DateTime(datetime.into()) } - Value::DateTime(Some(dt)) => PrismaValue::DateTime(dt.into()), - Value::Text(Some(ref dt_string)) => { + ValueType::DateTime(Some(dt)) => PrismaValue::DateTime(dt.into()), + ValueType::Text(Some(ref dt_string)) => { let dt = DateTime::parse_from_rfc3339(dt_string) .or_else(|_| DateTime::parse_from_rfc2822(dt_string)) .map_err(|_| create_error(&p_value))?; PrismaValue::DateTime(dt.with_timezone(&Utc).into()) } - Value::Date(Some(d)) => { + ValueType::Date(Some(d)) => { let dt = DateTime::::from_utc(d.and_hms_opt(0, 0, 0).unwrap(), Utc); PrismaValue::DateTime(dt.into()) } - Value::Time(Some(t)) => { + ValueType::Time(Some(t)) => { let d = NaiveDate::from_ymd_opt(1970, 1, 1).unwrap(); let dt = DateTime::::from_utc(d.and_time(t), Utc); PrismaValue::DateTime(dt.into()) } _ => return Err(create_error(&p_value)), }, - TypeIdentifier::Float | TypeIdentifier::Decimal => match p_value { + TypeIdentifier::Float | TypeIdentifier::Decimal => match p_value.typed { value if value.is_null() => PrismaValue::Null, - Value::Numeric(Some(f)) => PrismaValue::Float(f.normalized()), - Value::Double(Some(f)) => match f { + ValueType::Numeric(Some(f)) => PrismaValue::Float(f.normalized()), + ValueType::Double(Some(f)) => match f { f if f.is_nan() => return Err(create_error(&p_value)), f if f.is_infinite() => return Err(create_error(&p_value)), _ => PrismaValue::Float(BigDecimal::from_f64(f).unwrap().normalized()), }, - Value::Float(Some(f)) => match f { + ValueType::Float(Some(f)) => match f { f if f.is_nan() => return Err(create_error(&p_value)), f if f.is_infinite() => return Err(create_error(&p_value)), _ => PrismaValue::Float(BigDecimal::from_f32(f).unwrap().normalized()), }, - Value::Int32(Some(i)) => match BigDecimal::from_i32(i) { + ValueType::Int32(Some(i)) => match BigDecimal::from_i32(i) { Some(dec) => PrismaValue::Float(dec), None => return Err(create_error(&p_value)), }, - Value::Int64(Some(i)) => match BigDecimal::from_i64(i) { + ValueType::Int64(Some(i)) => match BigDecimal::from_i64(i) { Some(dec) => PrismaValue::Float(dec), None => return Err(create_error(&p_value)), }, - Value::Text(_) | Value::Bytes(_) => { + ValueType::Text(_) | ValueType::Bytes(_) => { let dec: BigDecimal = p_value .as_str() .expect("text/bytes as str") @@ -230,61 +230,61 @@ fn row_value_to_prisma_value(p_value: Value, meta: ColumnMetadata<'_>) -> Result } _ => return Err(create_error(&p_value)), }, - TypeIdentifier::Int => match p_value { + TypeIdentifier::Int => match p_value.typed { value if value.is_null() => PrismaValue::Null, - Value::Int32(Some(i)) => PrismaValue::Int(i as i64), - Value::Int64(Some(i)) => PrismaValue::Int(i), - Value::Bytes(Some(bytes)) => PrismaValue::Int(interpret_bytes_as_i64(&bytes)), - Value::Text(Some(ref txt)) => { + ValueType::Int32(Some(i)) => PrismaValue::Int(i as i64), + ValueType::Int64(Some(i)) => PrismaValue::Int(i), + ValueType::Bytes(Some(bytes)) => PrismaValue::Int(interpret_bytes_as_i64(&bytes)), + ValueType::Text(Some(ref txt)) => { PrismaValue::Int(i64::from_str(txt.trim_start_matches('\0')).map_err(|_| create_error(&p_value))?) } - Value::Float(Some(f)) => { + ValueType::Float(Some(f)) => { sanitize_f32(f, "Int")?; PrismaValue::Int(big_decimal_to_i64(BigDecimal::from_f32(f).unwrap(), "Int")?) } - Value::Double(Some(f)) => { + ValueType::Double(Some(f)) => { sanitize_f64(f, "Int")?; PrismaValue::Int(big_decimal_to_i64(BigDecimal::from_f64(f).unwrap(), "Int")?) } - Value::Numeric(Some(dec)) => PrismaValue::Int(big_decimal_to_i64(dec, "Int")?), - Value::Boolean(Some(bool)) => PrismaValue::Int(bool as i64), + ValueType::Numeric(Some(dec)) => PrismaValue::Int(big_decimal_to_i64(dec, "Int")?), + ValueType::Boolean(Some(bool)) => PrismaValue::Int(bool as i64), other => to_prisma_value(other)?, }, - TypeIdentifier::BigInt => match p_value { + TypeIdentifier::BigInt => match p_value.typed { value if value.is_null() => PrismaValue::Null, - Value::Int32(Some(i)) => PrismaValue::BigInt(i as i64), - Value::Int64(Some(i)) => PrismaValue::BigInt(i), - Value::Bytes(Some(bytes)) => PrismaValue::BigInt(interpret_bytes_as_i64(&bytes)), - Value::Text(Some(ref txt)) => { + ValueType::Int32(Some(i)) => PrismaValue::BigInt(i as i64), + ValueType::Int64(Some(i)) => PrismaValue::BigInt(i), + ValueType::Bytes(Some(bytes)) => PrismaValue::BigInt(interpret_bytes_as_i64(&bytes)), + ValueType::Text(Some(ref txt)) => { PrismaValue::BigInt(i64::from_str(txt.trim_start_matches('\0')).map_err(|_| create_error(&p_value))?) } - Value::Float(Some(f)) => { + ValueType::Float(Some(f)) => { sanitize_f32(f, "BigInt")?; PrismaValue::BigInt(big_decimal_to_i64(BigDecimal::from_f32(f).unwrap(), "BigInt")?) } - Value::Double(Some(f)) => { + ValueType::Double(Some(f)) => { sanitize_f64(f, "BigInt")?; PrismaValue::BigInt(big_decimal_to_i64(BigDecimal::from_f64(f).unwrap(), "BigInt")?) } - Value::Numeric(Some(dec)) => PrismaValue::BigInt(big_decimal_to_i64(dec, "BigInt")?), - Value::Boolean(Some(bool)) => PrismaValue::BigInt(bool as i64), + ValueType::Numeric(Some(dec)) => PrismaValue::BigInt(big_decimal_to_i64(dec, "BigInt")?), + ValueType::Boolean(Some(bool)) => PrismaValue::BigInt(bool as i64), other => to_prisma_value(other)?, }, - TypeIdentifier::String => match p_value { + TypeIdentifier::String => match p_value.typed { value if value.is_null() => PrismaValue::Null, - Value::Uuid(Some(uuid)) => PrismaValue::String(uuid.to_string()), - Value::Json(Some(ref json_value)) => { + ValueType::Uuid(Some(uuid)) => PrismaValue::String(uuid.to_string()), + ValueType::Json(Some(ref json_value)) => { PrismaValue::String(serde_json::to_string(json_value).map_err(|_| create_error(&p_value))?) } other => to_prisma_value(other)?, }, - TypeIdentifier::Bytes => match p_value { + TypeIdentifier::Bytes => match p_value.typed { value if value.is_null() => PrismaValue::Null, - Value::Bytes(Some(bytes)) => PrismaValue::Bytes(bytes.into()), + ValueType::Bytes(Some(bytes)) => PrismaValue::Bytes(bytes.into()), _ => return Err(create_error(&p_value)), }, TypeIdentifier::Unsupported => unreachable!("No unsupported field should reach that path"), diff --git a/query-engine/connectors/sql-query-connector/src/value.rs b/query-engine/connectors/sql-query-connector/src/value.rs index 8ccf89288071..4c31fc9eedb9 100644 --- a/query-engine/connectors/sql-query-connector/src/value.rs +++ b/query-engine/connectors/sql-query-connector/src/value.rs @@ -2,44 +2,44 @@ use crate::row::{sanitize_f32, sanitize_f64}; use bigdecimal::{BigDecimal, FromPrimitive}; use chrono::{DateTime, NaiveDate, Utc}; use prisma_models::PrismaValue; -use quaint::Value; +use quaint::ValueType; -pub fn to_prisma_value(quaint_value: Value<'_>) -> crate::Result { - let val = match quaint_value { - Value::Int32(i) => i.map(|i| PrismaValue::Int(i as i64)).unwrap_or(PrismaValue::Null), - Value::Int64(i) => i.map(PrismaValue::Int).unwrap_or(PrismaValue::Null), - Value::Float(Some(f)) => { +pub fn to_prisma_value<'a, T: Into>>(qv: T) -> crate::Result { + let val = match qv.into() { + ValueType::Int32(i) => i.map(|i| PrismaValue::Int(i as i64)).unwrap_or(PrismaValue::Null), + ValueType::Int64(i) => i.map(PrismaValue::Int).unwrap_or(PrismaValue::Null), + ValueType::Float(Some(f)) => { sanitize_f32(f, "BigDecimal")?; PrismaValue::Float(BigDecimal::from_f32(f).unwrap().normalized()) } - Value::Float(None) => PrismaValue::Null, + ValueType::Float(None) => PrismaValue::Null, - Value::Double(Some(f)) => { + ValueType::Double(Some(f)) => { sanitize_f64(f, "BigDecimal")?; PrismaValue::Float(BigDecimal::from_f64(f).unwrap().normalized()) } - Value::Double(None) => PrismaValue::Null, + ValueType::Double(None) => PrismaValue::Null, - Value::Numeric(d) => d + ValueType::Numeric(d) => d // chop the trailing zeroes off so javascript doesn't start rounding things wrong .map(|d| PrismaValue::Float(d.normalized())) .unwrap_or(PrismaValue::Null), - Value::Text(s) => s + ValueType::Text(s) => s .map(|s| PrismaValue::String(s.into_owned())) .unwrap_or(PrismaValue::Null), - Value::Enum(s, _) => s + ValueType::Enum(s, _) => s .map(|s| PrismaValue::Enum(s.into_owned())) .unwrap_or(PrismaValue::Null), - Value::Boolean(b) => b.map(PrismaValue::Boolean).unwrap_or(PrismaValue::Null), + ValueType::Boolean(b) => b.map(PrismaValue::Boolean).unwrap_or(PrismaValue::Null), - Value::Array(Some(v)) => { + ValueType::Array(Some(v)) => { let mut res = Vec::with_capacity(v.len()); for v in v.into_iter() { @@ -49,33 +49,33 @@ pub fn to_prisma_value(quaint_value: Value<'_>) -> crate::Result { PrismaValue::List(res) } - Value::Array(None) => PrismaValue::Null, + ValueType::Array(None) => PrismaValue::Null, - Value::EnumArray(Some(v), name) => { + ValueType::EnumArray(Some(v), name) => { let mut res = Vec::with_capacity(v.len()); for v in v.into_iter() { - res.push(to_prisma_value(Value::Enum(Some(v), name.clone()))?); + res.push(to_prisma_value(ValueType::Enum(Some(v), name.clone()))?); } PrismaValue::List(res) } - Value::EnumArray(None, _) => PrismaValue::Null, + ValueType::EnumArray(None, _) => PrismaValue::Null, - Value::Json(val) => val + ValueType::Json(val) => val .map(|val| PrismaValue::Json(val.to_string())) .unwrap_or(PrismaValue::Null), - Value::Uuid(uuid) => uuid.map(PrismaValue::Uuid).unwrap_or(PrismaValue::Null), + ValueType::Uuid(uuid) => uuid.map(PrismaValue::Uuid).unwrap_or(PrismaValue::Null), - Value::Date(d) => d + ValueType::Date(d) => d .map(|d| { let dt = DateTime::::from_utc(d.and_hms_opt(0, 0, 0).unwrap(), Utc); PrismaValue::DateTime(dt.into()) }) .unwrap_or(PrismaValue::Null), - Value::Time(t) => t + ValueType::Time(t) => t .map(|t| { let d = NaiveDate::from_ymd_opt(1970, 1, 1).unwrap(); let dt = DateTime::::from_utc(d.and_time(t), Utc); @@ -83,19 +83,19 @@ pub fn to_prisma_value(quaint_value: Value<'_>) -> crate::Result { }) .unwrap_or(PrismaValue::Null), - Value::DateTime(dt) => dt + ValueType::DateTime(dt) => dt .map(|dt| PrismaValue::DateTime(dt.into())) .unwrap_or(PrismaValue::Null), - Value::Char(c) => c + ValueType::Char(c) => c .map(|c| PrismaValue::String(c.to_string())) .unwrap_or(PrismaValue::Null), - Value::Bytes(bytes) => bytes + ValueType::Bytes(bytes) => bytes .map(|b| PrismaValue::Bytes(b.into_owned())) .unwrap_or(PrismaValue::Null), - Value::Xml(s) => s + ValueType::Xml(s) => s .map(|s| PrismaValue::String(s.into_owned())) .unwrap_or(PrismaValue::Null), }; diff --git a/query-engine/connectors/sql-query-connector/src/value_ext.rs b/query-engine/connectors/sql-query-connector/src/value_ext.rs index b0c42e5af38c..a84c9da0380b 100644 --- a/query-engine/connectors/sql-query-connector/src/value_ext.rs +++ b/query-engine/connectors/sql-query-connector/src/value_ext.rs @@ -11,24 +11,24 @@ impl<'a> IntoTypedJsonExtension for quaint::Value<'a> { return "null".to_owned(); } - let type_name = match self { - quaint::Value::Int32(_) => "int", - quaint::Value::Int64(_) => "bigint", - quaint::Value::Float(_) => "float", - quaint::Value::Double(_) => "double", - quaint::Value::Text(_) => "string", - quaint::Value::Enum(_, _) => "enum", - quaint::Value::Bytes(_) => "bytes", - quaint::Value::Boolean(_) => "bool", - quaint::Value::Char(_) => "char", - quaint::Value::Numeric(_) => "decimal", - quaint::Value::Json(_) => "json", - quaint::Value::Xml(_) => "xml", - quaint::Value::Uuid(_) => "uuid", - quaint::Value::DateTime(_) => "datetime", - quaint::Value::Date(_) => "date", - quaint::Value::Time(_) => "time", - quaint::Value::Array(_) | quaint::Value::EnumArray(_, _) => "array", + let type_name = match self.typed { + quaint::ValueType::Int32(_) => "int", + quaint::ValueType::Int64(_) => "bigint", + quaint::ValueType::Float(_) => "float", + quaint::ValueType::Double(_) => "double", + quaint::ValueType::Text(_) => "string", + quaint::ValueType::Enum(_, _) => "enum", + quaint::ValueType::Bytes(_) => "bytes", + quaint::ValueType::Boolean(_) => "bool", + quaint::ValueType::Char(_) => "char", + quaint::ValueType::Numeric(_) => "decimal", + quaint::ValueType::Json(_) => "json", + quaint::ValueType::Xml(_) => "xml", + quaint::ValueType::Uuid(_) => "uuid", + quaint::ValueType::DateTime(_) => "datetime", + quaint::ValueType::Date(_) => "date", + quaint::ValueType::Time(_) => "time", + quaint::ValueType::Array(_) | quaint::ValueType::EnumArray(_, _) => "array", }; type_name.to_owned() @@ -37,12 +37,12 @@ impl<'a> IntoTypedJsonExtension for quaint::Value<'a> { fn as_typed_json(self) -> serde_json::Value { let type_name = self.type_name(); - let json_value = match self { - quaint::Value::Array(Some(values)) => { + let json_value = match self.typed { + quaint::ValueType::Array(Some(values)) => { serde_json::Value::Array(values.into_iter().map(|value| value.as_typed_json()).collect()) } - quaint::Value::Int64(Some(value)) => serde_json::Value::String(value.to_string()), - quaint::Value::Numeric(Some(decimal)) => serde_json::Value::String(decimal.normalized().to_string()), + quaint::ValueType::Int64(Some(value)) => serde_json::Value::String(value.to_string()), + quaint::ValueType::Numeric(Some(decimal)) => serde_json::Value::String(decimal.normalized().to_string()), x => serde_json::Value::from(x), }; diff --git a/query-engine/dmmf/src/tests/tests.rs b/query-engine/dmmf/src/tests/tests.rs index 25f83e64447d..53f11a455ee3 100644 --- a/query-engine/dmmf/src/tests/tests.rs +++ b/query-engine/dmmf/src/tests/tests.rs @@ -19,7 +19,10 @@ fn views_ignore() { } fn assert_comment(actual: Option<&String>, expected: &str) { - assert!(actual.is_some_and(|c| c.as_str() == expected)) + match actual { + Some(actual) => assert_eq!(actual.as_str(), expected), + None => panic!("Expected comment: {}", expected), + } } #[test] diff --git a/query-engine/driver-adapters/src/conversion.rs b/query-engine/driver-adapters/src/conversion.rs index 1e32dc3b8306..2d469a5ab7c3 100644 --- a/query-engine/driver-adapters/src/conversion.rs +++ b/query-engine/driver-adapters/src/conversion.rs @@ -1,6 +1,7 @@ use napi::bindgen_prelude::{FromNapiValue, ToNapiValue}; use napi::NapiValue; use quaint::ast::Value as QuaintValue; +use quaint::ast::ValueType as QuaintValueType; use serde::Serialize; use serde_json::value::Value as JsonValue; @@ -61,27 +62,27 @@ impl ToNapiValue for JSArg { pub fn conv_params(params: &[QuaintValue<'_>]) -> serde_json::Result> { let mut values = Vec::with_capacity(params.len()); - for pv in params { - let res = match pv { - QuaintValue::Json(s) => match s { + for qv in params { + let res = match &qv.typed { + QuaintValueType::Json(s) => match s { Some(ref s) => { let json_str = serde_json::to_string(s)?; JSArg::RawString(json_str) } None => JsonValue::Null.into(), }, - QuaintValue::Bytes(bytes) => match bytes { + QuaintValueType::Bytes(bytes) => match bytes { Some(bytes) => JSArg::Buffer(bytes.to_vec()), None => JsonValue::Null.into(), }, - quaint_value @ QuaintValue::Numeric(bd) => match bd { + quaint_value @ QuaintValueType::Numeric(bd) => match bd { Some(bd) => match bd.to_string().parse::() { Ok(double) => JSArg::from(JsonValue::from(double)), Err(_) => JSArg::from(JsonValue::from(quaint_value.clone())), }, None => JsonValue::Null.into(), }, - QuaintValue::Array(Some(items)) => JSArg::Array(conv_params(items)?), + QuaintValueType::Array(Some(items)) => JSArg::Array(conv_params(items)?), quaint_value => JSArg::from(JsonValue::from(quaint_value.clone())), }; diff --git a/query-engine/driver-adapters/src/proxy.rs b/query-engine/driver-adapters/src/proxy.rs index aeaef30664d0..bdcab93a0c55 100644 --- a/query-engine/driver-adapters/src/proxy.rs +++ b/query-engine/driver-adapters/src/proxy.rs @@ -269,7 +269,7 @@ fn js_value_to_quaint( .parse::() .map(QuaintValue::int32) .map_err(|e| conversion_error!("string-encoded number must be an i32, got {s}: {e}")), - serde_json::Value::Null => Ok(QuaintValue::Int32(None)), + serde_json::Value::Null => Ok(QuaintValue::null_int32()), mismatch => Err(conversion_error!( "expected an i32 number in column {column_name}, found {mismatch}" )), @@ -283,7 +283,7 @@ fn js_value_to_quaint( .parse::() .map(QuaintValue::int64) .map_err(|e| conversion_error!("string-encoded number must be an i64, got {s}: {e}")), - serde_json::Value::Null => Ok(QuaintValue::Int64(None)), + serde_json::Value::Null => Ok(QuaintValue::null_int64()), mismatch => Err(conversion_error!( "expected a string or number in column {column_name}, found {mismatch}" )), @@ -296,7 +296,7 @@ fn js_value_to_quaint( .ok_or(conversion_error!("number must be a float, got {n}")) .and_then(f64_to_f32) .map(QuaintValue::float), - serde_json::Value::Null => Ok(QuaintValue::Float(None)), + serde_json::Value::Null => Ok(QuaintValue::null_float()), mismatch => Err(conversion_error!( "expected an f32 number in column {column_name}, found {mismatch}" )), @@ -306,7 +306,7 @@ fn js_value_to_quaint( .as_f64() .map(QuaintValue::double) .ok_or(conversion_error!("number must be a f64, got {n}")), - serde_json::Value::Null => Ok(QuaintValue::Double(None)), + serde_json::Value::Null => Ok(QuaintValue::null_double()), mismatch => Err(conversion_error!( "expected an f64 number in column {column_name}, found {mismatch}" )), @@ -320,14 +320,14 @@ fn js_value_to_quaint( .and_then(BigDecimal::from_f64) .ok_or(conversion_error!("number must be an f64, got {n}")) .map(QuaintValue::numeric), - serde_json::Value::Null => Ok(QuaintValue::Numeric(None)), + serde_json::Value::Null => Ok(QuaintValue::null_numeric()), mismatch => Err(conversion_error!( "expected a string-encoded number in column {column_name}, found {mismatch}", )), }, ColumnType::Boolean => match json_value { serde_json::Value::Bool(b) => Ok(QuaintValue::boolean(b)), - serde_json::Value::Null => Ok(QuaintValue::Boolean(None)), + serde_json::Value::Null => Ok(QuaintValue::null_boolean()), serde_json::Value::Number(n) => match n.as_i64() { Some(0) => Ok(QuaintValue::boolean(false)), Some(1) => Ok(QuaintValue::boolean(true)), @@ -345,15 +345,18 @@ fn js_value_to_quaint( )), }, ColumnType::Char => match json_value { - serde_json::Value::String(s) => Ok(QuaintValue::Char(s.chars().next())), - serde_json::Value::Null => Ok(QuaintValue::Char(None)), + serde_json::Value::String(s) => match s.chars().next() { + Some(c) => Ok(QuaintValue::character(c)), + None => Ok(QuaintValue::null_character()), + }, + serde_json::Value::Null => Ok(QuaintValue::null_character()), mismatch => Err(conversion_error!( "expected a string in column {column_name}, found {mismatch}" )), }, ColumnType::Text => match json_value { serde_json::Value::String(s) => Ok(QuaintValue::text(s)), - serde_json::Value::Null => Ok(QuaintValue::Text(None)), + serde_json::Value::Null => Ok(QuaintValue::null_text()), mismatch => Err(conversion_error!( "expected a string in column {column_name}, found {mismatch}" )), @@ -362,7 +365,7 @@ fn js_value_to_quaint( serde_json::Value::String(s) => NaiveDate::parse_from_str(&s, "%Y-%m-%d") .map(QuaintValue::date) .map_err(|_| conversion_error!("expected a date string, got {s}")), - serde_json::Value::Null => Ok(QuaintValue::Date(None)), + serde_json::Value::Null => Ok(QuaintValue::null_date()), mismatch => Err(conversion_error!( "expected a string in column {column_name}, found {mismatch}" )), @@ -371,7 +374,7 @@ fn js_value_to_quaint( serde_json::Value::String(s) => NaiveTime::parse_from_str(&s, "%H:%M:%S") .map(QuaintValue::time) .map_err(|_| conversion_error!("expected a time string, got {s}")), - serde_json::Value::Null => Ok(QuaintValue::Time(None)), + serde_json::Value::Null => Ok(QuaintValue::null_time()), mismatch => Err(conversion_error!( "expected a string in column {column_name}, found {mismatch}" )), @@ -382,7 +385,7 @@ fn js_value_to_quaint( .or_else(|_| DateTime::parse_from_rfc3339(&s).map(DateTime::::from)) .map(QuaintValue::datetime) .map_err(|_| conversion_error!("expected a datetime string, found {s}")), - serde_json::Value::Null => Ok(QuaintValue::DateTime(None)), + serde_json::Value::Null => Ok(QuaintValue::null_datetime()), mismatch => Err(conversion_error!( "expected a string in column {column_name}, found {mismatch}" )), @@ -390,7 +393,7 @@ fn js_value_to_quaint( ColumnType::Json => { match json_value { // DbNull - serde_json::Value::Null => Ok(QuaintValue::Json(None)), + serde_json::Value::Null => Ok(QuaintValue::null_json()), // JsonNull serde_json::Value::String(s) if s == "$__prisma_null" => Ok(QuaintValue::json(serde_json::Value::Null)), json => Ok(QuaintValue::json(json)), @@ -398,20 +401,20 @@ fn js_value_to_quaint( } ColumnType::Enum => match json_value { serde_json::Value::String(s) => Ok(QuaintValue::enum_variant(s)), - serde_json::Value::Null => Ok(QuaintValue::Enum(None, None)), + serde_json::Value::Null => Ok(QuaintValue::null_enum()), mismatch => Err(conversion_error!( "expected a string in column {column_name}, found {mismatch}" )), }, ColumnType::Bytes => match json_value { - serde_json::Value::String(s) => Ok(QuaintValue::Bytes(Some(s.into_bytes().into()))), + serde_json::Value::String(s) => Ok(QuaintValue::bytes(s.into_bytes())), serde_json::Value::Array(array) => array .iter() .map(|value| value.as_i64().and_then(|maybe_byte| maybe_byte.try_into().ok())) .collect::>>() .map(QuaintValue::bytes) .ok_or(conversion_error!("elements of the array must be u8")), - serde_json::Value::Null => Ok(QuaintValue::Bytes(None)), + serde_json::Value::Null => Ok(QuaintValue::null_bytes()), mismatch => Err(conversion_error!( "expected a string or an array in column {column_name}, found {mismatch}", )), @@ -420,7 +423,7 @@ fn js_value_to_quaint( serde_json::Value::String(s) => uuid::Uuid::parse_str(&s) .map(QuaintValue::uuid) .map_err(|_| conversion_error!("Expected a UUID string")), - serde_json::Value::Null => Ok(QuaintValue::Bytes(None)), + serde_json::Value::Null => Ok(QuaintValue::null_bytes()), mismatch => Err(conversion_error!( "Expected a UUID string in column {column_name}, found {mismatch}" )), @@ -464,14 +467,14 @@ fn js_array_to_quaint( column_name: &str, ) -> quaint::Result> { match json_value { - serde_json::Value::Array(array) => Ok(QuaintValue::Array(Some( + serde_json::Value::Array(array) => Ok(QuaintValue::array( array .into_iter() .enumerate() .map(|(index, elem)| js_value_to_quaint(elem, base_type, &format!("{column_name}[{index}]"))) .collect::>>()?, - ))), - serde_json::Value::Null => Ok(QuaintValue::Array(None)), + )), + serde_json::Value::Null => Ok(QuaintValue::null_array()), mismatch => Err(conversion_error!( "expected an array in column {column_name}, found {mismatch}", )), @@ -606,7 +609,6 @@ fn f64_to_f32(x: f64) -> quaint::Result { Err(conversion_error!("f32 overflow during conversion")) } } - #[cfg(test)] mod proxy_test { use num_bigint::BigInt; @@ -615,10 +617,10 @@ mod proxy_test { use super::*; #[track_caller] - fn test_null(quaint_none: QuaintValue, column_type: ColumnType) { + fn test_null<'a, T: Into>>(quaint_none: T, column_type: ColumnType) { let json_value = serde_json::Value::Null; let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, quaint_none); + assert_eq!(quaint_value, quaint_none.into()); } #[test] @@ -626,31 +628,31 @@ mod proxy_test { let column_type = ColumnType::Int32; // null - test_null(QuaintValue::Int32(None), column_type); + test_null(QuaintValue::null_int32(), column_type); // 0 let n: i32 = 0; let json_value = serde_json::Value::Number(serde_json::Number::from(n)); let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::Int32(Some(n))); + assert_eq!(quaint_value, QuaintValue::int32(n)); // max let n: i32 = i32::MAX; let json_value = serde_json::Value::Number(serde_json::Number::from(n)); let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::Int32(Some(n))); + assert_eq!(quaint_value, QuaintValue::int32(n)); // min let n: i32 = i32::MIN; let json_value = serde_json::Value::Number(serde_json::Number::from(n)); let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::Int32(Some(n))); + assert_eq!(quaint_value, QuaintValue::int32(n)); // string-encoded let n = i32::MAX; let json_value = serde_json::Value::String(n.to_string()); let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::Int32(Some(n))); + assert_eq!(quaint_value, QuaintValue::int32(n)); } #[test] @@ -658,31 +660,31 @@ mod proxy_test { let column_type = ColumnType::Int64; // null - test_null(QuaintValue::Int64(None), column_type); + test_null(QuaintValue::null_int64(), column_type); // 0 let n: i64 = 0; let json_value = serde_json::Value::String(n.to_string()); let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::Int64(Some(n))); + assert_eq!(quaint_value, QuaintValue::int64(n)); // max let n: i64 = i64::MAX; let json_value = serde_json::Value::String(n.to_string()); let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::Int64(Some(n))); + assert_eq!(quaint_value, QuaintValue::int64(n)); // min let n: i64 = i64::MIN; let json_value = serde_json::Value::String(n.to_string()); let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::Int64(Some(n))); + assert_eq!(quaint_value, QuaintValue::int64(n)); // number-encoded let n: i64 = (1 << 53) - 1; // max JS safe integer let json_value = serde_json::Value::Number(serde_json::Number::from(n)); let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::Int64(Some(n))); + assert_eq!(quaint_value, QuaintValue::int64(n)); } #[test] @@ -690,25 +692,25 @@ mod proxy_test { let column_type = ColumnType::Float; // null - test_null(QuaintValue::Float(None), column_type); + test_null(QuaintValue::null_float(), column_type); // 0 let n: f32 = 0.0; let json_value = serde_json::Value::Number(serde_json::Number::from_f64(n.into()).unwrap()); let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::Float(Some(n))); + assert_eq!(quaint_value, QuaintValue::float(n)); // max let n: f32 = f32::MAX; let json_value = serde_json::Value::Number(serde_json::Number::from_f64(n.into()).unwrap()); let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::Float(Some(n))); + assert_eq!(quaint_value, QuaintValue::float(n)); // min let n: f32 = f32::MIN; let json_value = serde_json::Value::Number(serde_json::Number::from_f64(n.into()).unwrap()); let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::Float(Some(n))); + assert_eq!(quaint_value, QuaintValue::float(n)); } #[test] @@ -716,25 +718,25 @@ mod proxy_test { let column_type = ColumnType::Double; // null - test_null(QuaintValue::Double(None), column_type); + test_null(QuaintValue::null_double(), column_type); // 0 let n: f64 = 0.0; let json_value = serde_json::Value::Number(serde_json::Number::from_f64(n).unwrap()); let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::Double(Some(n))); + assert_eq!(quaint_value, QuaintValue::double(n)); // max let n: f64 = f64::MAX; let json_value = serde_json::Value::Number(serde_json::Number::from_f64(n).unwrap()); let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::Double(Some(n))); + assert_eq!(quaint_value, QuaintValue::double(n)); // min let n: f64 = f64::MIN; let json_value = serde_json::Value::Number(serde_json::Number::from_f64(n).unwrap()); let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::Double(Some(n))); + assert_eq!(quaint_value, QuaintValue::double(n)); } #[test] @@ -742,21 +744,21 @@ mod proxy_test { let column_type = ColumnType::Numeric; // null - test_null(QuaintValue::Numeric(None), column_type); + test_null(QuaintValue::null_numeric(), column_type); let n_as_string = "1234.99"; let decimal = BigDecimal::new(BigInt::parse_bytes(b"123499", 10).unwrap(), 2); let json_value = serde_json::Value::String(n_as_string.into()); let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::Numeric(Some(decimal))); + assert_eq!(quaint_value, QuaintValue::numeric(decimal)); let n_as_string = "1234.999999"; let decimal = BigDecimal::new(BigInt::parse_bytes(b"1234999999", 10).unwrap(), 6); let json_value = serde_json::Value::String(n_as_string.into()); let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::Numeric(Some(decimal))); + assert_eq!(quaint_value, QuaintValue::numeric(decimal)); } #[test] @@ -764,18 +766,18 @@ mod proxy_test { let column_type = ColumnType::Boolean; // null - test_null(QuaintValue::Boolean(None), column_type); + test_null(QuaintValue::null_boolean(), column_type); // true for truthy_value in [json!(true), json!(1), json!("true"), json!("TRUE"), json!("1")] { let quaint_value = js_value_to_quaint(truthy_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::Boolean(Some(true))); + assert_eq!(quaint_value, QuaintValue::boolean(true)); } // false for falsy_value in [json!(false), json!(0), json!("false"), json!("FALSE"), json!("0")] { let quaint_value = js_value_to_quaint(falsy_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::Boolean(Some(false))); + assert_eq!(quaint_value, QuaintValue::boolean(false)); } } @@ -784,12 +786,12 @@ mod proxy_test { let column_type = ColumnType::Char; // null - test_null(QuaintValue::Char(None), column_type); + test_null(QuaintValue::null_character(), column_type); let c = 'c'; let json_value = serde_json::Value::String(c.to_string()); let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::Char(Some(c))); + assert_eq!(quaint_value, QuaintValue::character(c)); } #[test] @@ -797,12 +799,12 @@ mod proxy_test { let column_type = ColumnType::Text; // null - test_null(QuaintValue::Text(None), column_type); + test_null(QuaintValue::null_text(), column_type); let s = "some text"; let json_value = serde_json::Value::String(s.to_string()); let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::Text(Some(s.into()))); + assert_eq!(quaint_value, QuaintValue::text(s)); } #[test] @@ -810,14 +812,14 @@ mod proxy_test { let column_type = ColumnType::Date; // null - test_null(QuaintValue::Date(None), column_type); + test_null(QuaintValue::null_date(), column_type); let s = "2023-01-01"; let json_value = serde_json::Value::String(s.to_string()); let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); let date = NaiveDate::from_ymd_opt(2023, 1, 1).unwrap(); - assert_eq!(quaint_value, QuaintValue::Date(Some(date))); + assert_eq!(quaint_value, QuaintValue::date(date)); } #[test] @@ -825,14 +827,14 @@ mod proxy_test { let column_type = ColumnType::Time; // null - test_null(QuaintValue::Time(None), column_type); + test_null(QuaintValue::null_time(), column_type); let s = "23:59:59"; let json_value = serde_json::Value::String(s.to_string()); let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); let time: NaiveTime = NaiveTime::from_hms_opt(23, 59, 59).unwrap(); - assert_eq!(quaint_value, QuaintValue::Time(Some(time))); + assert_eq!(quaint_value, QuaintValue::time(time)); } #[test] @@ -840,7 +842,7 @@ mod proxy_test { let column_type = ColumnType::DateTime; // null - test_null(QuaintValue::DateTime(None), column_type); + test_null(QuaintValue::null_datetime(), column_type); let s = "2023-01-01 23:59:59.415"; let json_value = serde_json::Value::String(s.to_string()); @@ -851,7 +853,7 @@ mod proxy_test { .and_hms_milli_opt(23, 59, 59, 415) .unwrap(); let datetime = DateTime::from_utc(datetime, Utc); - assert_eq!(quaint_value, QuaintValue::DateTime(Some(datetime))); + assert_eq!(quaint_value, QuaintValue::datetime(datetime)); let s = "2023-01-01 23:59:59.123456"; let json_value = serde_json::Value::String(s.to_string()); @@ -862,7 +864,7 @@ mod proxy_test { .and_hms_micro_opt(23, 59, 59, 123_456) .unwrap(); let datetime = DateTime::from_utc(datetime, Utc); - assert_eq!(quaint_value, QuaintValue::DateTime(Some(datetime))); + assert_eq!(quaint_value, QuaintValue::datetime(datetime)); let s = "2023-01-01 23:59:59"; let json_value = serde_json::Value::String(s.to_string()); @@ -873,7 +875,7 @@ mod proxy_test { .and_hms_milli_opt(23, 59, 59, 0) .unwrap(); let datetime = DateTime::from_utc(datetime, Utc); - assert_eq!(quaint_value, QuaintValue::DateTime(Some(datetime))); + assert_eq!(quaint_value, QuaintValue::datetime(datetime)); } #[test] @@ -881,7 +883,7 @@ mod proxy_test { let column_type = ColumnType::Json; // null - test_null(QuaintValue::Json(None), column_type); + test_null(QuaintValue::null_json(), column_type); let json = json!({ "key": "value", @@ -894,7 +896,7 @@ mod proxy_test { }); let json_value = json.clone(); let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::Json(Some(json.clone()))); + assert_eq!(quaint_value, QuaintValue::json(json.clone())); } #[test] @@ -902,30 +904,30 @@ mod proxy_test { let column_type = ColumnType::Enum; // null - test_null(QuaintValue::Enum(None, None), column_type); + test_null(QuaintValue::null_enum(), column_type); let s = "some enum variant"; let json_value = serde_json::Value::String(s.to_string()); let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - assert_eq!(quaint_value, QuaintValue::Enum(Some(s.into()), None)); + assert_eq!(quaint_value, QuaintValue::enum_variant(s)); } #[test] fn js_int32_array_to_quaint() { let column_type = ColumnType::Int32Array; - test_null(QuaintValue::Array(None), column_type); + test_null(QuaintValue::null_array(), column_type); let json_value = json!([1, 2, 3]); let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); assert_eq!( quaint_value, - QuaintValue::Array(Some(vec![ + QuaintValue::array(vec![ QuaintValue::int32(1), QuaintValue::int32(2), QuaintValue::int32(3) - ])) + ]) ); let json_value = json!([1, 2, {}]); @@ -940,14 +942,14 @@ mod proxy_test { #[test] fn js_text_array_to_quaint() { let column_type = ColumnType::TextArray; - test_null(QuaintValue::Array(None), column_type); + test_null(QuaintValue::null_array(), column_type); let json_value = json!(["hi", "there"]); let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); assert_eq!( quaint_value, - QuaintValue::Array(Some(vec![QuaintValue::text("hi"), QuaintValue::text("there"),])) + QuaintValue::array(vec![QuaintValue::text("hi"), QuaintValue::text("there"),]) ); let json_value = json!([10]); diff --git a/query-engine/driver-adapters/src/queryable.rs b/query-engine/driver-adapters/src/queryable.rs index 398286c8ca05..d8b022d0fa49 100644 --- a/query-engine/driver-adapters/src/queryable.rs +++ b/query-engine/driver-adapters/src/queryable.rs @@ -34,7 +34,7 @@ pub(crate) struct JsBaseQueryable { impl JsBaseQueryable { pub(crate) fn new(proxy: CommonProxy) -> Self { - let flavour: Flavour = proxy.flavour.to_owned().parse().unwrap(); + let flavour: Flavour = proxy.flavour.parse().unwrap(); Self { proxy, flavour } } diff --git a/query-engine/query-engine-node-api/src/logger.rs b/query-engine/query-engine-node-api/src/logger.rs index d327726d6567..da3e725c0218 100644 --- a/query-engine/query-engine-node-api/src/logger.rs +++ b/query-engine/query-engine-node-api/src/logger.rs @@ -58,7 +58,7 @@ impl Logger { None }; - let layer = CallbackLayer::new(log_callback.clone()).with_filter(filters); + let layer = CallbackLayer::new(log_callback).with_filter(filters); let metrics = if enable_metrics { query_engine_metrics::setup(); diff --git a/schema-engine/sql-migration-tests/src/assertions/quaint_result_set_ext.rs b/schema-engine/sql-migration-tests/src/assertions/quaint_result_set_ext.rs index 2aaf1f9801db..3f486f34163b 100644 --- a/schema-engine/sql-migration-tests/src/assertions/quaint_result_set_ext.rs +++ b/schema-engine/sql-migration-tests/src/assertions/quaint_result_set_ext.rs @@ -1,4 +1,4 @@ -use quaint::{connector::ResultRowRef, prelude::ResultSet, Value}; +use quaint::{connector::ResultRowRef, prelude::ResultSet, Value, ValueType}; pub trait ResultSetExt: Sized { fn assert_row_count(self, expected_count: usize) -> Self; @@ -34,8 +34,8 @@ pub struct RowAssertion<'a>(ResultRowRef<'a>); impl<'a> RowAssertion<'a> { pub fn assert_array_value(self, column_name: &str, expected_value: &[Value<'_>]) -> Self { - let actual_value = self.0.get(column_name).and_then(|col: &Value<'_>| match col { - Value::Array(x) => x.as_ref(), + let actual_value = self.0.get(column_name).and_then(|col: &Value<'_>| match &col.typed { + ValueType::Array(x) => x.as_ref(), _ => panic!("as_array"), }); @@ -87,9 +87,9 @@ impl<'a> RowAssertion<'a> { #[track_caller] pub fn assert_text_value(self, column_name: &str, expected_value: &str) -> Self { let value = self.0.get(column_name).expect("Expected a value, found none"); - let value_text: &str = match value { - Value::Text(val) => val.as_deref(), - Value::Enum(val, _) => val.as_deref(), + let value_text: &str = match &value.typed { + ValueType::Text(val) => val.as_deref(), + ValueType::Enum(val, _) => val.as_deref(), _ => None, } .expect("Expected a string value"); diff --git a/schema-engine/sql-migration-tests/tests/existing_data/mod.rs b/schema-engine/sql-migration-tests/tests/existing_data/mod.rs index 461214dd62ae..bed7b8fc80ca 100644 --- a/schema-engine/sql-migration-tests/tests/existing_data/mod.rs +++ b/schema-engine/sql-migration-tests/tests/existing_data/mod.rs @@ -334,7 +334,7 @@ fn changing_a_column_from_optional_to_required_is_unexecutable(api: TestApi) { let insert = Insert::multi_into(api.render_table_name("Test"), ["id", "age"]) .values(("a", 12)) .values(("b", 22)) - .values(("c", Value::Int32(None))); + .values(("c", ValueType::Int32(None))); api.query(insert.into()); @@ -756,10 +756,9 @@ fn set_default_current_timestamp_on_existing_column_works(api: TestApi) { api.schema_push_w_datasource(dm1).send().assert_green(); - let insert = Insert::single_into(api.render_table_name("User")).value("id", 5).value( - "created_at", - Value::DateTime(Some("2020-06-15T14:50:00Z".parse().unwrap())), - ); + let insert = Insert::single_into(api.render_table_name("User")) + .value("id", 5) + .value("created_at", Value::datetime("2020-06-15T14:50:00Z".parse().unwrap())); api.query(insert.into()); let dm2 = r#" diff --git a/schema-engine/sql-migration-tests/tests/existing_data/sql_unexecutable_migrations/made_optional_field_required.rs b/schema-engine/sql-migration-tests/tests/existing_data/sql_unexecutable_migrations/made_optional_field_required.rs index 8b3f0cf608c3..718b34a3230b 100644 --- a/schema-engine/sql-migration-tests/tests/existing_data/sql_unexecutable_migrations/made_optional_field_required.rs +++ b/schema-engine/sql-migration-tests/tests/existing_data/sql_unexecutable_migrations/made_optional_field_required.rs @@ -92,8 +92,8 @@ fn making_an_optional_field_required_with_data_with_a_default_works(api: TestApi .map(|row| row.into_iter().collect::>()) .collect::>(), &[ - &[Value::text("abc"), Value::text("george"), Value::integer(84)], - &[Value::text("def"), Value::text("X Æ A-12"), Value::integer(7)], + &[Value::text("abc"), Value::text("george"), Value::int32(84)], + &[Value::text("def"), Value::text("X Æ A-12"), Value::int32(7)], ] ); } @@ -151,7 +151,7 @@ fn making_an_optional_field_required_with_data_with_a_default_is_unexecutable(ap .map(|row| row.into_iter().collect::>()) .collect::>(), &[ - &[Value::text("abc"), Value::text("george"), Value::Int32(None)], + &[Value::text("abc"), Value::text("george"), Value::null_int32()], &[Value::text("def"), Value::text("X Æ A-12"), Value::int32(7)], ] ); diff --git a/schema-engine/sql-migration-tests/tests/existing_data/sqlite_existing_data_tests.rs b/schema-engine/sql-migration-tests/tests/existing_data/sqlite_existing_data_tests.rs index f87d9f931a28..4485ad4de719 100644 --- a/schema-engine/sql-migration-tests/tests/existing_data/sqlite_existing_data_tests.rs +++ b/schema-engine/sql-migration-tests/tests/existing_data/sqlite_existing_data_tests.rs @@ -1,4 +1,4 @@ -use quaint::{prelude::Insert, Value}; +use quaint::{prelude::Insert, ValueType}; use sql_migration_tests::test_api::*; use sql_schema_describer::DefaultValue; @@ -16,7 +16,7 @@ fn changing_a_column_from_optional_to_required_with_a_default_is_safe(api: TestA let insert = Insert::multi_into(api.render_table_name("Test"), ["id", "age"]) .values(("a", 12)) .values(("b", 22)) - .values(("c", Value::Int32(None))); + .values(("c", ValueType::Int32(None))); api.query(insert.into()); diff --git a/schema-engine/sql-migration-tests/tests/existing_data/type_migration_tests.rs b/schema-engine/sql-migration-tests/tests/existing_data/type_migration_tests.rs index 56d63f7860b3..2a77f3a29eba 100644 --- a/schema-engine/sql-migration-tests/tests/existing_data/type_migration_tests.rs +++ b/schema-engine/sql-migration-tests/tests/existing_data/type_migration_tests.rs @@ -101,7 +101,7 @@ fn changing_a_string_array_column_to_scalar_is_fine(api: TestApi) { .value("id", "film1") .value( "mainProtagonist", - Value::Array(Some(vec!["giant shark".into(), "jason statham".into()])), + Value::array(vec![Value::text("giant shark"), Value::text("jason statham")]), ) .result_raw(); @@ -138,7 +138,7 @@ fn changing_an_int_array_column_to_scalar_is_not_possible(api: TestApi) { api.insert("Film") .value("id", "film1") - .value("mainProtagonist", Value::Array(Some(vec![7.into(), 11.into()]))) + .value("mainProtagonist", Value::array(vec![Value::int32(7), Value::int32(11)])) .result_raw(); let dm2 = r#" diff --git a/schema-engine/sql-migration-tests/tests/migrations/sql.rs b/schema-engine/sql-migration-tests/tests/migrations/sql.rs index f8a9d0ef3202..8f87115db4dc 100644 --- a/schema-engine/sql-migration-tests/tests/migrations/sql.rs +++ b/schema-engine/sql-migration-tests/tests/migrations/sql.rs @@ -244,17 +244,17 @@ fn enum_defaults_must_work(api: TestApi) { assert_eq!(row.get("id").unwrap().to_string().unwrap(), "the-id"); assert_eq!( - match row.get("mood").unwrap() { - quaint::Value::Enum(Some(enm), _) => enm.as_ref(), - quaint::Value::Text(Some(enm)) => enm.as_ref(), + match &row.get("mood").unwrap().typed { + quaint::ValueType::Enum(Some(enm), _) => enm.as_ref(), + quaint::ValueType::Text(Some(enm)) => enm.as_ref(), _ => panic!("mood is not an enum value"), }, "hongry" ); assert_eq!( - match row.get("previousMood").unwrap() { - quaint::Value::Enum(Some(enm), _) => enm.as_ref(), - quaint::Value::Text(Some(enm)) => enm.as_ref(), + match &row.get("previousMood").unwrap().typed { + quaint::ValueType::Enum(Some(enm), _) => enm.as_ref(), + quaint::ValueType::Text(Some(enm)) => enm.as_ref(), _ => panic!("previousMood is not an enum value"), }, "ANGRY" diff --git a/schema-engine/sql-migration-tests/tests/native_types/mssql.rs b/schema-engine/sql-migration-tests/tests/native_types/mssql.rs index 32ac24688601..83d988acb176 100644 --- a/schema-engine/sql-migration-tests/tests/native_types/mssql.rs +++ b/schema-engine/sql-migration-tests/tests/native_types/mssql.rs @@ -43,7 +43,7 @@ static SAFE_CASTS: Lazy> = Lazy::new(|| { ), ( "TinyInt", - Value::integer(u8::MAX), + Value::int32(u8::MAX), &[ "SmallInt", "Int", @@ -69,7 +69,7 @@ static SAFE_CASTS: Lazy> = Lazy::new(|| { ), ( "SmallInt", - Value::integer(i16::MAX), + Value::int32(i16::MAX), &[ "Int", "BigInt", @@ -92,7 +92,7 @@ static SAFE_CASTS: Lazy> = Lazy::new(|| { ), ( "Int", - Value::integer(i32::MAX), + Value::int32(i32::MAX), &[ "BigInt", "Decimal", @@ -423,12 +423,12 @@ static RISKY_CASTS: Lazy> = Lazy::new(|| { vec![ ( "TinyInt", - Value::integer(u8::MAX), + Value::int32(u8::MAX), &["Decimal(2,0)", "Char(2)", "NChar(2)", "VarChar(2)", "NVarChar(2)"], ), ( "SmallInt", - Value::integer(i16::MAX), + Value::int32(i16::MAX), &[ "Bit", "TinyInt", @@ -443,7 +443,7 @@ static RISKY_CASTS: Lazy> = Lazy::new(|| { ), ( "Int", - Value::integer(i32::MAX), + Value::int32(i32::MAX), &[ "Bit", "TinyInt", @@ -468,7 +468,7 @@ static RISKY_CASTS: Lazy> = Lazy::new(|| { ), ( "BigInt", - Value::integer(i32::MAX), + Value::int32(i32::MAX), &[ "Bit", "TinyInt", @@ -1402,7 +1402,7 @@ static NOT_CASTABLE: Lazy> = Lazy::new(|| { ), ( "TinyInt", - Value::integer(u8::MAX), + Value::int32(u8::MAX), &[ "Date", "Time", @@ -1417,7 +1417,7 @@ static NOT_CASTABLE: Lazy> = Lazy::new(|| { ), ( "SmallInt", - Value::integer(i16::MAX), + Value::int32(i16::MAX), &[ "Date", "Time", @@ -1432,7 +1432,7 @@ static NOT_CASTABLE: Lazy> = Lazy::new(|| { ), ( "Int", - Value::integer(i32::MAX), + Value::int32(i32::MAX), &[ "Date", "Time", diff --git a/schema-engine/sql-migration-tests/tests/native_types/mysql.rs b/schema-engine/sql-migration-tests/tests/native_types/mysql.rs index 9144313af8ed..d8cf62f5767c 100644 --- a/schema-engine/sql-migration-tests/tests/native_types/mysql.rs +++ b/schema-engine/sql-migration-tests/tests/native_types/mysql.rs @@ -2,13 +2,13 @@ use sql_migration_tests::test_api::*; use std::{borrow::Cow, fmt::Write}; /// (source native type, test value to insert, target native type) -type Case = (&'static str, quaint::Value<'static>, &'static [&'static str]); +type Case = (&'static str, quaint::ValueType<'static>, &'static [&'static str]); type Cases = &'static [Case]; const SAFE_CASTS: Cases = &[ ( "BigInt", - quaint::Value::Int64(Some(99999999432)), + quaint::ValueType::Int64(Some(99999999432)), &[ "Binary(200)", "Bit(54)", @@ -30,7 +30,7 @@ const SAFE_CASTS: Cases = &[ ), ( "Binary(8)", - quaint::Value::Bytes(Some(Cow::Borrowed(b"08088044"))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(b"08088044"))), &[ "Bit(64)", "Blob", @@ -51,7 +51,7 @@ const SAFE_CASTS: Cases = &[ ), ( "Int", - quaint::Value::Int32(Some(i32::MIN)), + quaint::ValueType::Int32(Some(i32::MIN)), &[ "BigInt", "Char(20)", @@ -64,7 +64,7 @@ const SAFE_CASTS: Cases = &[ ), ( "Bit(32)", - quaint::Value::Bytes(Some(Cow::Borrowed(b""))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(b""))), &[ "SmallInt", "UnsignedSmallInt", @@ -86,12 +86,12 @@ const SAFE_CASTS: Cases = &[ ), ( "Blob", - quaint::Value::Bytes(Some(Cow::Borrowed(&[0xff]))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(&[0xff]))), &["TinyBlob", "MediumBlob", "LongBlob"], ), ( "Char(10)", - quaint::Value::Text(Some(Cow::Borrowed("1234"))), + quaint::ValueType::Text(Some(Cow::Borrowed("1234"))), &[ "Blob", "Char(11)", @@ -107,7 +107,7 @@ const SAFE_CASTS: Cases = &[ ), ( "Date", - quaint::Value::Text(Some(Cow::Borrowed("2020-01-12"))), + quaint::ValueType::Text(Some(Cow::Borrowed("2020-01-12"))), &[ "DateTime(0)", "Decimal(10,0)", @@ -131,7 +131,7 @@ const SAFE_CASTS: Cases = &[ ), ( "DateTime(0)", - quaint::Value::Text(Some(Cow::Borrowed("2020-01-08 08:00:00"))), + quaint::ValueType::Text(Some(Cow::Borrowed("2020-01-08 08:00:00"))), &[ "BigInt", "UnsignedBigInt", @@ -144,7 +144,7 @@ const SAFE_CASTS: Cases = &[ ), ( "Double", - quaint::Value::Float(Some(3.20)), + quaint::ValueType::Float(Some(3.20)), &[ "Float", "Bit(64)", @@ -169,7 +169,7 @@ const SAFE_CASTS: Cases = &[ ), ( "Float", - quaint::Value::Float(Some(3.20)), + quaint::ValueType::Float(Some(3.20)), &[ "Double", "Bit(32)", @@ -196,7 +196,7 @@ const SAFE_CASTS: Cases = &[ ), ( "Json", - quaint::Value::Text(Some(Cow::Borrowed("{\"a\":\"b\"}"))), + quaint::ValueType::Text(Some(Cow::Borrowed("{\"a\":\"b\"}"))), &[ // To string "Binary(10)", @@ -211,22 +211,22 @@ const SAFE_CASTS: Cases = &[ ), ( "LongBlob", - quaint::Value::Bytes(Some(Cow::Borrowed(&[0xff]))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(&[0xff]))), &["TinyBlob", "Blob", "MediumBlob"], ), ( "MediumBlob", - quaint::Value::Bytes(Some(Cow::Borrowed(&[0xff]))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(&[0xff]))), &["TinyBlob", "Blob", "LongBlob"], ), ( "TinyBlob", - quaint::Value::Bytes(Some(Cow::Borrowed(&[0xff]))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(&[0xff]))), &["LongBlob", "Blob", "MediumBlob"], ), ( "Time", - quaint::Value::Int32(Some(20)), + quaint::ValueType::Int32(Some(20)), &[ "VarChar(20)", "BigInt", @@ -238,7 +238,7 @@ const SAFE_CASTS: Cases = &[ ), ( "Year", - quaint::Value::Int32(Some(2000)), + quaint::ValueType::Int32(Some(2000)), &[ // To string "Binary(10)", @@ -272,7 +272,7 @@ const SAFE_CASTS: Cases = &[ const RISKY_CASTS: Cases = &[ ( "BigInt", - quaint::Value::Int64(Some(100)), + quaint::ValueType::Int64(Some(100)), &[ "Int", "MediumInt", @@ -285,30 +285,30 @@ const RISKY_CASTS: Cases = &[ "UnsignedTinyInt", ], ), - ("BigInt", quaint::Value::Int64(Some(2000)), &["Year"]), + ("BigInt", quaint::ValueType::Int64(Some(2000)), &["Year"]), ( "Binary(8)", - quaint::Value::Bytes(Some(Cow::Borrowed(b"08088044"))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(b"08088044"))), &["Bit(32)", "Int", "UnsignedBigInt", "UnsignedInt", "UnsignedMediumInt"], ), ( "Binary(1)", - quaint::Value::Bytes(Some(Cow::Borrowed(b"0"))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(b"0"))), &["Time(0)", "SmallInt", "TinyInt", "UnsignedSmallInt", "UnsignedTinyInt"], ), ( "Binary(4)", - quaint::Value::Bytes(Some(Cow::Borrowed(b"2000"))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(b"2000"))), &["Year"], ), ( "Bit(32)", - quaint::Value::Bytes(Some(Cow::Borrowed(b""))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(b""))), &["Decimal(10,2)", "Double", "Float"], ), ( "Blob", - quaint::Value::Bytes(Some(Cow::Borrowed(b"abc"))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(b"abc"))), &[ "Binary(10)", "Char(10)", @@ -322,22 +322,22 @@ const RISKY_CASTS: Cases = &[ ), ( "Decimal(20,5)", - quaint::Value::Text(Some(Cow::Borrowed("350"))), + quaint::ValueType::Text(Some(Cow::Borrowed("350"))), &["BigInt", "UnsignedBigInt", "Time(0)", "Json"], ), ( "Double", - quaint::Value::Float(Some(0f32)), + quaint::ValueType::Float(Some(0f32)), &["Char(40)", "VarBinary(40)", "VarChar(40)"], ), ( "Float", - quaint::Value::Float(Some(0f32)), + quaint::ValueType::Float(Some(0f32)), &["Char(40)", "VarBinary(40)", "VarChar(40)"], ), ( "LongBlob", - quaint::Value::Bytes(Some(Cow::Borrowed(b"abc"))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(b"abc"))), &[ "Binary(10)", "Char(10)", @@ -351,7 +351,7 @@ const RISKY_CASTS: Cases = &[ ), ( "MediumBlob", - quaint::Value::Bytes(Some(Cow::Borrowed(b"abc"))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(b"abc"))), &[ "Binary(10)", "Char(10)", @@ -363,10 +363,10 @@ const RISKY_CASTS: Cases = &[ "VarChar(20)", ], ), - ("SmallInt", quaint::Value::Int32(Some(1990)), &["Year", "Double"]), + ("SmallInt", quaint::ValueType::Int32(Some(1990)), &["Year", "Double"]), ( "TinyBlob", - quaint::Value::Bytes(Some(Cow::Borrowed(b"abc"))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(b"abc"))), &[ "Binary(10)", "Char(10)", @@ -380,12 +380,12 @@ const RISKY_CASTS: Cases = &[ ), ( "Time(0)", - quaint::Value::Int32(Some(5002)), + quaint::ValueType::Int32(Some(5002)), &["Date", "DateTime(0)", "Timestamp(0)"], ), ( "Year", - quaint::Value::Text(Some(Cow::Borrowed("1999"))), + quaint::ValueType::Text(Some(Cow::Borrowed("1999"))), &["Decimal(10,0)", "Json"], ), ]; @@ -393,22 +393,22 @@ const RISKY_CASTS: Cases = &[ const IMPOSSIBLE_CASTS: Cases = &[ ( "BigInt", - quaint::Value::Int64(Some(500)), + quaint::ValueType::Int64(Some(500)), &["Decimal(15,6)", "Date", "DateTime(0)", "Json", "Timestamp(0)"], ), ( "Binary(12)", - quaint::Value::Bytes(Some(Cow::Borrowed(b"8080008"))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(b"8080008"))), &["Date", "DateTime(0)", "Json", "Timestamp(0)"], ), ( "Bit(32)", - quaint::Value::Bytes(Some(Cow::Borrowed(b""))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(b""))), &["Date", "DateTime(0)", "Time(0)", "Timestamp(0)", "Json"], ), ( "Blob", - quaint::Value::Bytes(Some(Cow::Borrowed(&[0x00]))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(&[0x00]))), &[ "TinyInt", "BigInt", @@ -433,7 +433,7 @@ const IMPOSSIBLE_CASTS: Cases = &[ ), ( "Date", - quaint::Value::Text(Some(Cow::Borrowed("2020-01-12"))), + quaint::ValueType::Text(Some(Cow::Borrowed("2020-01-12"))), &[ "TinyInt", "UnsignedTinyInt", @@ -446,7 +446,7 @@ const IMPOSSIBLE_CASTS: Cases = &[ ), ( "DateTime(0)", - quaint::Value::Text(Some(Cow::Borrowed("2020-01-08 08:00:00"))), + quaint::ValueType::Text(Some(Cow::Borrowed("2020-01-08 08:00:00"))), &[ "TinyInt", "UnsignedTinyInt", @@ -461,17 +461,17 @@ const IMPOSSIBLE_CASTS: Cases = &[ ), ( "Double", - quaint::Value::Float(Some(3.20)), + quaint::ValueType::Float(Some(3.20)), &["Binary(10)", "Date", "Timestamp(0)", "DateTime(0)"], ), ( "Float", - quaint::Value::Float(Some(3.20)), + quaint::ValueType::Float(Some(3.20)), &["Binary(10)", "Date", "Timestamp(0)", "DateTime(0)"], ), ( "Json", - quaint::Value::Text(Some(Cow::Borrowed("{\"a\":\"b\"}"))), + quaint::ValueType::Text(Some(Cow::Borrowed("{\"a\":\"b\"}"))), &[ // Integer types "Bit(64)", @@ -490,7 +490,7 @@ const IMPOSSIBLE_CASTS: Cases = &[ ), ( "LongBlob", - quaint::Value::Bytes(Some(Cow::Borrowed(&[0x00]))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(&[0x00]))), &[ "TinyInt", "BigInt", @@ -515,7 +515,7 @@ const IMPOSSIBLE_CASTS: Cases = &[ ), ( "MediumBlob", - quaint::Value::Bytes(Some(Cow::Borrowed(&[0x00]))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(&[0x00]))), &[ "TinyInt", "BigInt", @@ -538,10 +538,10 @@ const IMPOSSIBLE_CASTS: Cases = &[ "Year", ], ), - ("Time(0)", quaint::Value::Int32(Some(0)), &["Json", "Year"]), + ("Time(0)", quaint::ValueType::Int32(Some(0)), &["Json", "Year"]), ( "TinyBlob", - quaint::Value::Bytes(Some(Cow::Borrowed(&[0x00]))), + quaint::ValueType::Bytes(Some(Cow::Borrowed(&[0x00]))), &[ "TinyInt", "BigInt", @@ -566,7 +566,7 @@ const IMPOSSIBLE_CASTS: Cases = &[ ), ( "Year", - quaint::Value::Int32(Some(2001)), + quaint::ValueType::Int32(Some(2001)), &[ "TinyInt", "UnsignedTinyInt", @@ -638,7 +638,7 @@ fn colnames_for_cases(cases: Cases) -> Vec { fn expand_cases<'a, 'b>( from_type: &str, - test_value: &'a quaint::Value, + test_value: &'a quaint::ValueType<'a>, (to_types, nullable): (&[&str], bool), dm1: &'b mut String, dm2: &'b mut String, diff --git a/schema-engine/sql-migration-tests/tests/native_types/postgres.rs b/schema-engine/sql-migration-tests/tests/native_types/postgres.rs index 1e114f147e5d..24fa4a559744 100644 --- a/schema-engine/sql-migration-tests/tests/native_types/postgres.rs +++ b/schema-engine/sql-migration-tests/tests/native_types/postgres.rs @@ -7,12 +7,12 @@ use std::{collections::HashMap, fmt::Write as _, str::FromStr}; static SAFE_CASTS: Lazy> = Lazy::new(|| { vec![ - ("Oid", Value::integer(u8::MAX), &["VarChar(100)", "Integer", "BigInt"]), + ("Oid", Value::int32(u8::MAX), &["VarChar(100)", "Integer", "BigInt"]), ("Money", Value::int64(u8::MAX), &["VarChar(100)"]), ("Inet", Value::text("10.1.2.3"), &["VarChar(100)"]), ( "SmallInt", - Value::integer(u8::MAX), + Value::int32(u8::MAX), &[ "SmallInt", "Integer", @@ -26,7 +26,7 @@ static SAFE_CASTS: Lazy> = Lazy::new(|| { ), ( "Integer", - Value::integer(i32::MAX), + Value::int32(i32::MAX), &[ "Integer", "BigInt", @@ -67,7 +67,7 @@ static SAFE_CASTS: Lazy> = Lazy::new(|| { ), ( "DoublePrecision", - Value::Double(Some(f64::MIN)), + Value::double(f64::MIN), &["DoublePrecision", "Text", "VarChar", "Char(1000)"], ), ("VarChar", Value::text("fiver"), &["Text"]), @@ -155,13 +155,9 @@ static SAFE_CASTS: Lazy> = Lazy::new(|| { static RISKY_CASTS: Lazy> = Lazy::new(|| { vec![ ("Money", Value::int64(u8::MAX), &["Decimal"]), - ( - "SmallInt", - Value::integer(2), - &["Decimal(2,1)", "VarChar(3)", "Char(1)"], - ), - ("Integer", Value::integer(1), &["Decimal(2,1)", "VarChar(4)", "Char(1)"]), - ("BigInt", Value::integer(2), &["Decimal(2,1)", "VarChar(17)", "Char(1)"]), + ("SmallInt", Value::int32(2), &["Decimal(2,1)", "VarChar(3)", "Char(1)"]), + ("Integer", Value::int32(1), &["Decimal(2,1)", "VarChar(4)", "Char(1)"]), + ("BigInt", Value::int32(2), &["Decimal(2,1)", "VarChar(17)", "Char(1)"]), ( "Decimal(10,2)", Value::numeric(BigDecimal::from_str("1").unwrap()), @@ -227,7 +223,7 @@ static NOT_CASTABLE: Lazy> = Lazy::new(|| { vec![ ( "SmallInt", - Value::integer(u8::MAX), + Value::int32(u8::MAX), &[ "ByteA", "Timestamp(3)", @@ -246,7 +242,7 @@ static NOT_CASTABLE: Lazy> = Lazy::new(|| { ), ( "Integer", - Value::integer(i32::MAX), + Value::int32(i32::MAX), &[ "ByteA", "Timestamp(3)", @@ -1076,7 +1072,7 @@ static SAFE_CASTS_NON_LIST_TO_STRING: CastList = Lazy::new(|| { Value::array(vec![Value::numeric(BigDecimal::from_str("128.90").unwrap())]), ), ("Real", Value::array(vec![Value::float(f32::MIN)])), - ("DoublePrecision", Value::array(vec![Value::Double(Some(f64::MIN))])), + ("DoublePrecision", Value::array(vec![Value::double(f64::MIN)])), ("VarChar", Value::array(vec!["test"])), ("Char(1)", Value::array(vec!["a"])), ("Text", Value::array(vec!["text"])), @@ -1115,7 +1111,7 @@ static SAFE_CASTS_NON_LIST_TO_STRING: CastList = Lazy::new(|| { Value::array(vec![Value::numeric(BigDecimal::from_str("128.90").unwrap())]), ), ("Real", Value::array(vec![Value::float(f32::MIN)])), - ("DoublePrecision", Value::array(vec![Value::Double(Some(f64::MIN))])), + ("DoublePrecision", Value::array(vec![Value::double(f64::MIN)])), ("VarChar", Value::array(vec!["test"])), ("Char(1)", Value::array(vec!["a"])), ("Text", Value::array(vec!["text"])), diff --git a/schema-engine/sql-schema-describer/src/postgres.rs b/schema-engine/sql-schema-describer/src/postgres.rs index 8b6db47651ce..16bf0487dada 100644 --- a/schema-engine/sql-schema-describer/src/postgres.rs +++ b/schema-engine/sql-schema-describer/src/postgres.rs @@ -16,7 +16,7 @@ use psl::{ builtin_connectors::{CockroachType, PostgresType}, datamodel_connector::NativeTypeInstance, }; -use quaint::{connector::ResultRow, prelude::Queryable, Value::Array}; +use quaint::{connector::ResultRow, prelude::Queryable, Value}; use regex::Regex; use std::{ any::type_name, @@ -663,13 +663,7 @@ impl<'a> SqlSchemaDescriber<'a> { WHERE n.nspname = ANY ( $1 ) "#; - let rows = self - .conn - .query_raw( - sql, - &[Array(Some(namespaces.iter().map(|v| v.as_str().into()).collect()))], - ) - .await?; + let rows = self.conn.query_raw(sql, &[Value::array(namespaces)]).await?; let mut procedures = Vec::with_capacity(rows.len()); @@ -691,10 +685,7 @@ impl<'a> SqlSchemaDescriber<'a> { async fn get_namespaces(&self, sql_schema: &mut SqlSchema, namespaces: &[&str]) -> DescriberResult<()> { let sql = include_str!("postgres/namespaces_query.sql"); - let rows = self - .conn - .query_raw(sql, &[Array(Some(namespaces.iter().map(|s| (*s).into()).collect()))]) - .await?; + let rows = self.conn.query_raw(sql, &[Value::array(namespaces)]).await?; let names = rows.into_iter().map(|row| (row.get_expect_string("namespace_name"))); @@ -718,13 +709,7 @@ impl<'a> SqlSchemaDescriber<'a> { let namespaces = &sql_schema.namespaces; - let rows = self - .conn - .query_raw( - sql, - &[Array(Some(namespaces.iter().map(|v| v.as_str().into()).collect()))], - ) - .await?; + let rows = self.conn.query_raw(sql, &[Value::array(namespaces)]).await?; let mut names = Vec::with_capacity(rows.len()); @@ -826,13 +811,7 @@ impl<'a> SqlSchemaDescriber<'a> { WHERE schemaname = ANY ( $1 ) "#}; - let result_set = self - .conn - .query_raw( - sql, - &[Array(Some(namespaces.iter().map(|v| v.as_str().into()).collect()))], - ) - .await?; + let result_set = self.conn.query_raw(sql, &[Value::array(namespaces)]).await?; for row in result_set.into_iter() { let name = row.get_expect_string("view_name"); @@ -896,13 +875,7 @@ impl<'a> SqlSchemaDescriber<'a> { "# ); - let rows = self - .conn - .query_raw( - sql.as_str(), - &[Array(Some(namespaces.iter().map(|v| v.as_str().into()).collect()))], - ) - .await?; + let rows = self.conn.query_raw(sql.as_str(), &[Value::array(namespaces)]).await?; for col in rows { let namespace = col.get_expect_string("namespace"); @@ -1141,13 +1114,7 @@ impl<'a> SqlSchemaDescriber<'a> { // One foreign key with multiple columns will be represented here as several // rows with the same ID. - let result_set = self - .conn - .query_raw( - sql, - &[Array(Some(namespaces.iter().map(|v| v.as_str().into()).collect()))], - ) - .await?; + let result_set = self.conn.query_raw(sql, &[Value::array(namespaces)]).await?; for row in result_set.into_iter() { trace!("Got description FK row {:?}", row); @@ -1254,13 +1221,7 @@ impl<'a> SqlSchemaDescriber<'a> { let namespaces = &sql_schema.namespaces; let sql = include_str!("postgres/constraints_query.sql"); - let rows = self - .conn - .query_raw( - sql, - &[Array(Some(namespaces.iter().map(|v| v.as_str().into()).collect()))], - ) - .await?; + let rows = self.conn.query_raw(sql, &[Value::array(namespaces)]).await?; for row in rows { let namespace = row.get_expect_string("namespace"); @@ -1298,13 +1259,7 @@ impl<'a> SqlSchemaDescriber<'a> { ) -> DescriberResult<()> { let namespaces = &sql_schema.namespaces; let sql = include_str!("postgres/indexes_query.sql"); - let rows = self - .conn - .query_raw( - sql, - &[Array(Some(namespaces.iter().map(|v| v.as_str().into()).collect()))], - ) - .await?; + let rows = self.conn.query_raw(sql, &[Value::array(namespaces)]).await?; let mut result_rows = Vec::new(); let mut index_rows = rows.into_iter().peekable(); @@ -1374,13 +1329,7 @@ impl<'a> SqlSchemaDescriber<'a> { "# }; - let rows = self - .conn - .query_raw( - sql, - &[Array(Some(namespaces.iter().map(|v| v.as_str().into()).collect()))], - ) - .await?; + let rows = self.conn.query_raw(sql, &[Value::array(namespaces)]).await?; let sequences = rows.into_iter().map(|seq| Sequence { namespace_id: sql_schema .get_namespace_id(&seq.get_expect_string("namespace")) @@ -1414,13 +1363,7 @@ impl<'a> SqlSchemaDescriber<'a> { WHERE n.nspname = ANY ( $1 ) ORDER BY e.enumsortorder"; - let rows = self - .conn - .query_raw( - sql, - &[Array(Some(namespaces.iter().map(|v| v.as_str().into()).collect()))], - ) - .await?; + let rows = self.conn.query_raw(sql, &[Value::array(namespaces)]).await?; let mut enum_values: BTreeMap<(NamespaceId, String, Option), Vec> = BTreeMap::new(); for row in rows.into_iter() { diff --git a/schema-engine/sql-schema-describer/src/sqlite.rs b/schema-engine/sql-schema-describer/src/sqlite.rs index 3073be2b4daa..1f28958605a2 100644 --- a/schema-engine/sql-schema-describer/src/sqlite.rs +++ b/schema-engine/sql-schema-describer/src/sqlite.rs @@ -8,7 +8,7 @@ use crate::{ use either::Either; use indexmap::IndexMap; use quaint::{ - ast::Value, + ast::{Value, ValueType}, connector::{GetRow, ToColumnNames}, prelude::ResultRow, }; @@ -345,7 +345,10 @@ async fn push_columns( let default = match row.get("dflt_value") { None => None, Some(val) if val.is_null() => None, - Some(Value::Text(Some(cow_string))) => { + Some(Value { + typed: ValueType::Text(Some(cow_string)), + .. + }) => { let default_string = cow_string.to_string(); if default_string.to_lowercase() == "null" { From 0ae6be330bfb7e6c92e8251f14eb855548fe8530 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Fri, 6 Oct 2023 08:40:58 +0200 Subject: [PATCH 070/128] Fix clippy and compiler warnings with Rust 1.73 (#4321) --- psl/parser-database/src/walkers/relation_field.rs | 7 +------ quaint/src/ast/select.rs | 2 +- quaint/src/connector/mssql/error.rs | 5 ++++- quaint/src/visitor/mysql.rs | 4 ++-- .../black-box-tests/tests/black_box_tests.rs | 2 -- .../query-tests-setup/src/query_result.rs | 4 ++-- .../sql-query-connector/src/filter/visitor.rs | 12 ++++++------ query-engine/core/src/response_ir/internal.rs | 2 +- .../dmmf/src/ast_builders/datamodel_ast_builder.rs | 14 ++++++-------- query-engine/metrics/src/common.rs | 5 +---- query-engine/schema/src/build/input_types/mod.rs | 1 - .../src/sampler/statistics.rs | 2 +- .../schema-connector/src/introspection_context.rs | 2 +- .../src/sql_renderer/mysql_renderer.rs | 2 +- .../src/sql_schema_differ/column.rs | 2 +- schema-engine/sql-schema-describer/src/postgres.rs | 4 +--- 16 files changed, 29 insertions(+), 41 deletions(-) diff --git a/psl/parser-database/src/walkers/relation_field.rs b/psl/parser-database/src/walkers/relation_field.rs index 8e376a8c3ef1..ca9582e49acf 100644 --- a/psl/parser-database/src/walkers/relation_field.rs +++ b/psl/parser-database/src/walkers/relation_field.rs @@ -187,12 +187,7 @@ impl<'db> Eq for RelationName<'db> {} impl<'db> PartialOrd for RelationName<'db> { fn partial_cmp(&self, other: &Self) -> Option { - match (self, other) { - (Self::Explicit(l0), Self::Explicit(r0)) => l0.partial_cmp(r0), - (Self::Generated(l0), Self::Generated(r0)) => l0.partial_cmp(r0), - (Self::Explicit(l0), Self::Generated(r0)) => l0.partial_cmp(&r0.as_str()), - (Self::Generated(l0), Self::Explicit(r0)) => l0.as_str().partial_cmp(*r0), - } + Some(self.cmp(other)) } } diff --git a/quaint/src/ast/select.rs b/quaint/src/ast/select.rs index b08dce6624eb..96d50ba645c5 100644 --- a/quaint/src/ast/select.rs +++ b/quaint/src/ast/select.rs @@ -659,7 +659,7 @@ impl<'a> Select<'a> { ctes.into_iter().collect() }) - .unwrap_or_else(Vec::new); + .unwrap_or_default(); if top_level { let clashing_names = self diff --git a/quaint/src/connector/mssql/error.rs b/quaint/src/connector/mssql/error.rs index cbee867e1f03..f9b6f5e95ab6 100644 --- a/quaint/src/connector/mssql/error.rs +++ b/quaint/src/connector/mssql/error.rs @@ -4,7 +4,10 @@ use tiberius::error::IoErrorKind; impl From for Error { fn from(e: tiberius::error::Error) -> Error { match e { - tiberius::error::Error::Io { kind, message } if kind == IoErrorKind::UnexpectedEof => { + tiberius::error::Error::Io { + kind: IoErrorKind::UnexpectedEof, + message, + } => { let mut builder = Error::builder(ErrorKind::ConnectionClosed); builder.set_original_message(message); builder.build() diff --git a/quaint/src/visitor/mysql.rs b/quaint/src/visitor/mysql.rs index 928c8a8a9ed6..39b55568a338 100644 --- a/quaint/src/visitor/mysql.rs +++ b/quaint/src/visitor/mysql.rs @@ -936,7 +936,7 @@ mod tests { let table_2 = "table2"; let join = table_2.alias("j").on(("j", "id").equals(Column::from(("t1", "id2")))); - let a = table_1.clone().alias("t1"); + let a = table_1.alias("t1"); let selection = Select::from_table(a).column(("t1", "id")).inner_join(join); let id1 = Column::from((table_1, "id")); @@ -957,7 +957,7 @@ mod tests { let table_2 = "table2"; let join = table_2.alias("j").on(("j", "id").equals(Column::from(("t1", "id2")))); - let a = table_1.clone().alias("t1"); + let a = table_1.alias("t1"); let selection = Select::from_table(a).column(("t1", "id")).inner_join(join); let id1 = Column::from((table_1, "id")); diff --git a/query-engine/black-box-tests/tests/black_box_tests.rs b/query-engine/black-box-tests/tests/black_box_tests.rs index d3e6c7065b45..6c2028e1fe0f 100644 --- a/query-engine/black-box-tests/tests/black_box_tests.rs +++ b/query-engine/black-box-tests/tests/black_box_tests.rs @@ -4,5 +4,3 @@ mod helpers; mod metrics; mod protocols; - -use query_engine_metrics; diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/query_result.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/query_result.rs index d45f4ae04c7f..4c85e70ac7c6 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/query_result.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/query_result.rs @@ -254,7 +254,7 @@ mod tests { #[test] fn test_deserializing_error_batch_response() { - let response = r###" + let response = r#" { "batchResult":[ { @@ -280,7 +280,7 @@ mod tests { ] } ] -}"###; +}"#; let result: QueryResult = serde_json::from_str(response).unwrap(); let expected = QueryResult { diff --git a/query-engine/connectors/sql-query-connector/src/filter/visitor.rs b/query-engine/connectors/sql-query-connector/src/filter/visitor.rs index 099967177b55..3b42457950f7 100644 --- a/query-engine/connectors/sql-query-connector/src/filter/visitor.rs +++ b/query-engine/connectors/sql-query-connector/src/filter/visitor.rs @@ -199,8 +199,8 @@ impl FilterVisitorExt for FilterVisitor { ) -> (ConditionTree<'static>, Option>) { match filter { Filter::And(mut filters) => match filters.len() { - n if n == 0 => (ConditionTree::NoCondition, None), - n if n == 1 => self.visit_filter(filters.pop().unwrap(), ctx), + 0 => (ConditionTree::NoCondition, None), + 1 => self.visit_filter(filters.pop().unwrap(), ctx), _ => { let mut exprs = Vec::with_capacity(filters.len()); let mut top_level_joins = vec![]; @@ -219,8 +219,8 @@ impl FilterVisitorExt for FilterVisitor { } }, Filter::Or(mut filters) => match filters.len() { - n if n == 0 => (ConditionTree::NegativeCondition, None), - n if n == 1 => self.visit_filter(filters.pop().unwrap(), ctx), + 0 => (ConditionTree::NegativeCondition, None), + 1 => self.visit_filter(filters.pop().unwrap(), ctx), _ => { let mut exprs = Vec::with_capacity(filters.len()); let mut top_level_joins = vec![]; @@ -239,8 +239,8 @@ impl FilterVisitorExt for FilterVisitor { } }, Filter::Not(mut filters) => match filters.len() { - n if n == 0 => (ConditionTree::NoCondition, None), - n if n == 1 => { + 0 => (ConditionTree::NoCondition, None), + 1 => { let (cond, joins) = self.invert_reverse(|this| this.visit_filter(filters.pop().unwrap(), ctx)); (cond.not(), joins) diff --git a/query-engine/core/src/response_ir/internal.rs b/query-engine/core/src/response_ir/internal.rs index d07d625b4fe6..2ad67707f22c 100644 --- a/query-engine/core/src/response_ir/internal.rs +++ b/query-engine/core/src/response_ir/internal.rs @@ -211,7 +211,7 @@ fn find_nested_aggregate_output_field<'a, 'b>( fn coerce_non_numeric(value: PrismaValue, output: &OutputType<'_>) -> PrismaValue { match (value, &output.inner) { - (PrismaValue::Int(x), InnerOutputType::Scalar(ScalarType::String)) if x == 0 => PrismaValue::Null, + (PrismaValue::Int(0), InnerOutputType::Scalar(ScalarType::String)) => PrismaValue::Null, (x, _) => x, } } diff --git a/query-engine/dmmf/src/ast_builders/datamodel_ast_builder.rs b/query-engine/dmmf/src/ast_builders/datamodel_ast_builder.rs index 1cc66275e8ca..8d078719d4fa 100644 --- a/query-engine/dmmf/src/ast_builders/datamodel_ast_builder.rs +++ b/query-engine/dmmf/src/ast_builders/datamodel_ast_builder.rs @@ -133,17 +133,15 @@ fn model_to_dmmf(model: walkers::ModelWalker<'_>) -> Model { primary_key, unique_fields: model .indexes() - .filter_map(|i| { - (i.is_unique() && !i.is_defined_on_field()).then(|| i.fields().map(|f| f.name().to_owned()).collect()) - }) + .filter(|&i| i.is_unique() && !i.is_defined_on_field()) + .map(|i| i.fields().map(|f| f.name().to_owned()).collect()) .collect(), unique_indexes: model .indexes() - .filter_map(|i| { - (i.is_unique() && !i.is_defined_on_field()).then(|| UniqueIndex { - name: i.name().map(ToOwned::to_owned), - fields: i.fields().map(|f| f.name().to_owned()).collect(), - }) + .filter(|&i| i.is_unique() && !i.is_defined_on_field()) + .map(|i| UniqueIndex { + name: i.name().map(ToOwned::to_owned), + fields: i.fields().map(|f| f.name().to_owned()).collect(), }) .collect(), } diff --git a/query-engine/metrics/src/common.rs b/query-engine/metrics/src/common.rs index c859e142b533..92c76ffba962 100644 --- a/query-engine/metrics/src/common.rs +++ b/query-engine/metrics/src/common.rs @@ -66,10 +66,7 @@ impl Metric { global_labels.clone(), ), None => { - let description = descriptions - .get(key.name()) - .map(|s| s.to_string()) - .unwrap_or(String::new()); + let description = descriptions.get(key.name()).map(|s| s.to_string()).unwrap_or_default(); Self::new(key, description, value, global_labels.clone()) } } diff --git a/query-engine/schema/src/build/input_types/mod.rs b/query-engine/schema/src/build/input_types/mod.rs index f34da22f40af..98c8caa84a38 100644 --- a/query-engine/schema/src/build/input_types/mod.rs +++ b/query-engine/schema/src/build/input_types/mod.rs @@ -2,7 +2,6 @@ pub(crate) mod fields; pub(crate) mod objects; use super::*; -use crate::*; use fields::*; use prisma_models::ScalarFieldRef; diff --git a/schema-engine/connectors/mongodb-schema-connector/src/sampler/statistics.rs b/schema-engine/connectors/mongodb-schema-connector/src/sampler/statistics.rs index 30e75007f2ba..d342e0e89f06 100644 --- a/schema-engine/connectors/mongodb-schema-connector/src/sampler/statistics.rs +++ b/schema-engine/connectors/mongodb-schema-connector/src/sampler/statistics.rs @@ -530,7 +530,7 @@ impl<'a> Statistics<'a> { sampler.types.insert(FieldType::Document(type_name.clone()), 1); let key = Name::CompositeType(type_name); - self.models.entry(key).or_insert_with(Default::default); + self.models.entry(key).or_default(); sampler } else { diff --git a/schema-engine/connectors/schema-connector/src/introspection_context.rs b/schema-engine/connectors/schema-connector/src/introspection_context.rs index 6d5e3319cb21..54f197935bd3 100644 --- a/schema-engine/connectors/schema-connector/src/introspection_context.rs +++ b/schema-engine/connectors/schema-connector/src/introspection_context.rs @@ -116,7 +116,7 @@ impl From for CompositeTypeDepth { fn from(size: isize) -> Self { match size { size if size < 0 => Self::Infinite, - size if size == 0 => Self::None, + 0 => Self::None, _ => Self::Level(size as usize), } } diff --git a/schema-engine/connectors/sql-schema-connector/src/sql_renderer/mysql_renderer.rs b/schema-engine/connectors/sql-schema-connector/src/sql_renderer/mysql_renderer.rs index d8620ff18eeb..4f96aea6fd69 100644 --- a/schema-engine/connectors/sql-schema-connector/src/sql_renderer/mysql_renderer.rs +++ b/schema-engine/connectors/sql-schema-connector/src/sql_renderer/mysql_renderer.rs @@ -366,7 +366,7 @@ fn render_mysql_modify( .map(|default| render_default(next_column, default)) .filter(|expr| !expr.is_empty()) .map(|expression| format!(" DEFAULT {expression}")) - .unwrap_or_else(String::new); + .unwrap_or_default(); format!( "MODIFY {column_name} {column_type}{nullability}{default}{sequence}", diff --git a/schema-engine/connectors/sql-schema-connector/src/sql_schema_differ/column.rs b/schema-engine/connectors/sql-schema-connector/src/sql_schema_differ/column.rs index 059e382ddae8..829531a14291 100644 --- a/schema-engine/connectors/sql-schema-connector/src/sql_schema_differ/column.rs +++ b/schema-engine/connectors/sql-schema-connector/src/sql_schema_differ/column.rs @@ -164,7 +164,7 @@ pub(crate) struct ColumnChanges { impl PartialOrd for ColumnChanges { fn partial_cmp(&self, other: &Self) -> Option { - self.changes.bits().partial_cmp(&other.changes.bits()) + Some(self.cmp(other)) } } diff --git a/schema-engine/sql-schema-describer/src/postgres.rs b/schema-engine/sql-schema-describer/src/postgres.rs index 16bf0487dada..211cf8da489a 100644 --- a/schema-engine/sql-schema-describer/src/postgres.rs +++ b/schema-engine/sql-schema-describer/src/postgres.rs @@ -1373,9 +1373,7 @@ impl<'a> SqlSchemaDescriber<'a> { let description = row.get_string("description"); let namespace_id = sql_schema.get_namespace_id(&namespace).unwrap(); - let values = enum_values - .entry((namespace_id, name, description)) - .or_insert_with(Vec::new); + let values = enum_values.entry((namespace_id, name, description)).or_default(); values.push(value); } From bb19f0b01208c1f081a81303a6caa7069469eeda Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Fri, 6 Oct 2023 08:56:31 +0200 Subject: [PATCH 071/128] tech-debt: make quaint a workspace member (#4313) --- .github/workflows/quaint.yml | 9 +- .github/workflows/unit-tests.yml | 1 + Cargo.lock | 227 +- Cargo.toml | 25 +- .../src/walkers/relation_field.rs | 8 +- quaint/Cargo.lock | 2928 ----------------- quaint/Cargo.toml | 39 +- quaint/README.md | 8 +- .../Cargo.toml | 4 +- .../src/lib.rs | 0 .../src/test_each_connector.rs | 1 + .../Cargo.toml | 4 +- .../src/lib.rs | 0 .../src/tags.rs | 0 quaint/rust-toolchain | 1 - quaint/src/ast/function/row_to_json.rs | 2 - quaint/src/ast/insert.rs | 4 - quaint/src/ast/update.rs | 1 - quaint/src/ast/values.rs | 16 - quaint/src/connector/connection_info.rs | 9 - quaint/src/connector/mssql.rs | 3 - quaint/src/connector/mysql.rs | 2 - quaint/src/connector/postgres.rs | 4 - quaint/src/connector/sqlite.rs | 2 - quaint/src/error.rs | 6 - quaint/src/lib.rs | 6 - quaint/src/serde.rs | 335 -- quaint/src/single.rs | 1 - quaint/src/tests/query.rs | 4 +- quaint/src/tests/query/error.rs | 2 +- quaint/src/tests/test_api.rs | 2 +- quaint/src/tests/test_api/mssql.rs | 4 +- quaint/src/tests/test_api/mysql.rs | 2 +- quaint/src/tests/test_api/postgres.rs | 4 +- quaint/src/tests/test_api/sqlite.rs | 4 +- quaint/src/tests/types/sqlite.rs | 8 +- quaint/src/tests/upsert.rs | 2 +- quaint/src/visitor/mssql.rs | 1 - quaint/src/visitor/mysql.rs | 1 - quaint/src/visitor/postgres.rs | 1 - quaint/src/visitor/sqlite.rs | 1 - quaint/test-macros/src/test_each_connector.rs | 138 - 42 files changed, 267 insertions(+), 3553 deletions(-) delete mode 100644 quaint/Cargo.lock rename quaint/{test-macros => quaint-test-macros}/Cargo.toml (73%) rename quaint/{test-macros => quaint-test-macros}/src/lib.rs (100%) rename quaint/{test-setup => quaint-test-setup}/Cargo.toml (72%) rename quaint/{test-setup => quaint-test-setup}/src/lib.rs (100%) rename quaint/{test-setup => quaint-test-setup}/src/tags.rs (100%) delete mode 100644 quaint/rust-toolchain delete mode 100644 quaint/src/serde.rs delete mode 100644 quaint/test-macros/src/test_each_connector.rs diff --git a/.github/workflows/quaint.yml b/.github/workflows/quaint.yml index d84590b951b4..3c1ae42f9a70 100644 --- a/.github/workflows/quaint.yml +++ b/.github/workflows/quaint.yml @@ -17,14 +17,13 @@ jobs: features: - "--lib --features=all" - "--lib --no-default-features --features=sqlite" - - "--lib --no-default-features --features=sqlite --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=sqlite --features=uuid --features=pooled --features=bigdecimal" - "--lib --no-default-features --features=postgresql" - - "--lib --no-default-features --features=postgresql --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=postgresql --features=uuid --features=pooled --features=bigdecimal" - "--lib --no-default-features --features=mysql" - - "--lib --no-default-features --features=mysql --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=mysql --features=uuid --features=pooled --features=bigdecimal" - "--lib --no-default-features --features=mssql" - - "--lib --no-default-features --features=mssql --features=uuid --features=pooled --features=serde-support --features=bigdecimal" - - "--doc --features=all" + - "--lib --no-default-features --features=mssql --features=uuid --features=pooled --features=bigdecimal" env: TEST_MYSQL: "mysql://root:prisma@localhost:3306/prisma" TEST_MYSQL8: "mysql://root:prisma@localhost:3307/prisma" diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index 96c681075d1d..073d7211e8cf 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -29,6 +29,7 @@ jobs: - run: | cargo test --workspace \ + --exclude=quaint \ --exclude=query-engine \ --exclude=query-engine-node-api \ --exclude=black-box-tests \ diff --git a/Cargo.lock b/Cargo.lock index 5f4df8b9a2f9..3002a1404210 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -321,7 +321,7 @@ version = "0.1.0" dependencies = [ "anyhow", "enumflags2", - "indoc", + "indoc 2.0.3", "insta", "query-engine-metrics", "query-engine-tests", @@ -433,7 +433,7 @@ dependencies = [ "connection-string", "either", "enumflags2", - "indoc", + "indoc 2.0.3", "lsp-types", "once_cell", "psl-core", @@ -890,14 +890,38 @@ dependencies = [ "sha3", ] +[[package]] +name = "darling" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d706e75d87e35569db781a9b5e2416cff1236a47ed380831f959382ccd5f858" +dependencies = [ + "darling_core 0.10.2", + "darling_macro 0.10.2", +] + [[package]] name = "darling" version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a01d95850c592940db9b8194bc39f4bc0e89dee5c4265e4b1807c34a9aba453c" dependencies = [ - "darling_core", - "darling_macro", + "darling_core 0.13.4", + "darling_macro 0.13.4", +] + +[[package]] +name = "darling_core" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0c960ae2da4de88a91b2d920c2a7233b400bc33cb28453a2987822d8392519b" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim 0.9.3", + "syn 1.0.109", ] [[package]] @@ -914,13 +938,24 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "darling_macro" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b5a2f4ac4969822c62224815d069952656cadc7084fdca9751e6d959189b72" +dependencies = [ + "darling_core 0.10.2", + "quote", + "syn 1.0.109", +] + [[package]] name = "darling_macro" version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c972679f83bdf9c42bd905396b6c3588a843a17f0f16dfcfa3e2c5d57441835" dependencies = [ - "darling_core", + "darling_core 0.13.4", "quote", "syn 1.0.109", ] @@ -950,7 +985,7 @@ version = "0.1.0" dependencies = [ "base64 0.13.1", "expect-test", - "indoc", + "indoc 2.0.3", "once_cell", "psl", "regex", @@ -991,7 +1026,7 @@ name = "diagnostics" version = "0.1.0" dependencies = [ "colored", - "indoc", + "indoc 2.0.3", "pest", ] @@ -1036,7 +1071,7 @@ dependencies = [ "expect-test", "flate2", "indexmap 1.9.3", - "indoc", + "indoc 2.0.3", "itertools", "pretty_assertions", "prisma-models", @@ -1377,6 +1412,12 @@ dependencies = [ "syn 2.0.28", ] +[[package]] +name = "fuchsia-cprng" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" + [[package]] name = "funty" version = "2.0.0" @@ -1822,12 +1863,35 @@ dependencies = [ "hashbrown 0.14.0", ] +[[package]] +name = "indoc" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47741a8bc60fb26eb8d6e0238bbb26d8575ff623fdc97b1a2c00c050b9684ed8" +dependencies = [ + "indoc-impl", + "proc-macro-hack", +] + [[package]] name = "indoc" version = "2.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2c785eefb63ebd0e33416dfcb8d6da0bf27ce752843a45632a67bf10d4d4b5c4" +[[package]] +name = "indoc-impl" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce046d161f000fffde5f432a0d034d0341dc152643b2598ed5bfce44c4f3a8f0" +dependencies = [ + "proc-macro-hack", + "proc-macro2", + "quote", + "syn 1.0.109", + "unindent", +] + [[package]] name = "insta" version = "1.21.2" @@ -2426,11 +2490,11 @@ dependencies = [ "enumflags2", "expect-test", "futures", - "indoc", + "indoc 2.0.3", "mongodb", "mongodb-client", "mongodb-schema-describer", - "names", + "names 0.12.0", "once_cell", "psl", "regex", @@ -2527,6 +2591,15 @@ dependencies = [ "uuid", ] +[[package]] +name = "names" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef320dab323286b50fb5cdda23f61c796a72a89998ab565ca32525c5c556f2da" +dependencies = [ + "rand 0.3.23", +] + [[package]] name = "names" version = "0.12.0" @@ -2972,6 +3045,12 @@ dependencies = [ "schema-ast", ] +[[package]] +name = "paste" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" + [[package]] name = "pbkdf2" version = "0.11.0" @@ -3234,7 +3313,7 @@ dependencies = [ "dmmf", "enumflags2", "expect-test", - "indoc", + "indoc 2.0.3", "log", "lsp-types", "once_cell", @@ -3315,6 +3394,12 @@ dependencies = [ "version_check", ] +[[package]] +name = "proc-macro-hack" +version = "0.5.20+deprecated" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" + [[package]] name = "proc-macro2" version = "1.0.66" @@ -3386,7 +3471,7 @@ dependencies = [ "dissimilar", "either", "expect-test", - "indoc", + "indoc 2.0.3", "psl-core", ] @@ -3398,7 +3483,7 @@ dependencies = [ "chrono", "diagnostics", "enumflags2", - "indoc", + "indoc 2.0.3", "itertools", "lsp-types", "once_cell", @@ -3463,16 +3548,23 @@ dependencies = [ "either", "futures", "hex", + "indoc 0.3.6", "lru-cache", "metrics 0.18.1", "mobc", "mysql_async", + "names 0.11.0", "native-tls", "num_cpus", + "once_cell", + "paste", "percent-encoding", "postgres-native-tls", "postgres-types", + "quaint-test-macros", + "quaint-test-setup", "rusqlite", + "serde", "serde_json", "sqlformat", "thiserror", @@ -3486,6 +3578,30 @@ dependencies = [ "uuid", ] +[[package]] +name = "quaint-test-macros" +version = "0.1.0" +dependencies = [ + "darling 0.10.2", + "once_cell", + "proc-macro2", + "quaint-test-setup", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "quaint-test-setup" +version = "0.1.0" +dependencies = [ + "async-trait", + "bitflags 1.3.2", + "names 0.11.0", + "once_cell", + "quaint", + "tokio", +] + [[package]] name = "quanta" version = "0.9.3" @@ -3567,7 +3683,7 @@ dependencies = [ "enumflags2", "graphql-parser", "hyper", - "indoc", + "indoc 2.0.3", "mongodb-query-connector", "opentelemetry", "opentelemetry-otlp", @@ -3652,7 +3768,7 @@ dependencies = [ "colored", "enumflags2", "futures", - "indoc", + "indoc 2.0.3", "insta", "once_cell", "prisma-value", @@ -3672,7 +3788,7 @@ dependencies = [ name = "query-test-macros" version = "0.1.0" dependencies = [ - "darling", + "darling 0.13.4", "proc-macro2", "quote", "syn 1.0.109", @@ -3687,7 +3803,7 @@ dependencies = [ "enumflags2", "hyper", "indexmap 1.9.3", - "indoc", + "indoc 2.0.3", "itertools", "jsonrpc-core", "nom", @@ -3746,6 +3862,29 @@ dependencies = [ "nibble_vec", ] +[[package]] +name = "rand" +version = "0.3.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64ac302d8f83c0c1974bf758f6b041c6c8ada916fbb44a609158ca8b064cc76c" +dependencies = [ + "libc", + "rand 0.4.6", +] + +[[package]] +name = "rand" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" +dependencies = [ + "fuchsia-cprng", + "libc", + "rand_core 0.3.1", + "rdrand", + "winapi", +] + [[package]] name = "rand" version = "0.7.3" @@ -3790,6 +3929,21 @@ dependencies = [ "rand_core 0.6.4", ] +[[package]] +name = "rand_core" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" +dependencies = [ + "rand_core 0.4.2", +] + +[[package]] +name = "rand_core" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" + [[package]] name = "rand_core" version = "0.5.1" @@ -3848,6 +4002,15 @@ dependencies = [ "num_cpus", ] +[[package]] +name = "rdrand" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" +dependencies = [ + "rand_core 0.3.1", +] + [[package]] name = "redox_syscall" version = "0.2.16" @@ -4266,7 +4429,7 @@ dependencies = [ "base64 0.13.1", "connection-string", "expect-test", - "indoc", + "indoc 2.0.3", "jsonrpc-core", "quaint", "schema-connector", @@ -4441,7 +4604,7 @@ version = "1.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e182d6ec6f05393cc0e5ed1bf81ad6db3a8feedf8ee515ecdd369809bcce8082" dependencies = [ - "darling", + "darling 0.13.4", "proc-macro2", "quote", "syn 1.0.109", @@ -4624,7 +4787,7 @@ checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" name = "sql-ddl" version = "0.1.0" dependencies = [ - "indoc", + "indoc 2.0.3", ] [[package]] @@ -4635,7 +4798,7 @@ dependencies = [ "connection-string", "enumflags2", "expect-test", - "indoc", + "indoc 2.0.3", "pretty_assertions", "psl", "quaint", @@ -4661,7 +4824,7 @@ dependencies = [ "connection-string", "enumflags2", "expect-test", - "indoc", + "indoc 2.0.3", "jsonrpc-core", "once_cell", "pretty_assertions", @@ -4722,7 +4885,7 @@ dependencies = [ "datamodel-renderer", "either", "enumflags2", - "indoc", + "indoc 2.0.3", "once_cell", "prisma-value", "psl", @@ -4753,7 +4916,7 @@ dependencies = [ "enumflags2", "expect-test", "indexmap 1.9.3", - "indoc", + "indoc 2.0.3", "once_cell", "pretty_assertions", "prisma-value", @@ -4820,6 +4983,12 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" +[[package]] +name = "strsim" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6446ced80d6c486436db5c078dde11a9f73d42b57fb273121e160b84f63d894c" + [[package]] name = "strsim" version = "0.10.0" @@ -5501,7 +5670,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675" dependencies = [ "cfg-if", - "rand 0.7.3", + "rand 0.8.5", "static_assertions", ] @@ -5573,6 +5742,12 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" +[[package]] +name = "unindent" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1766d682d402817b5ac4490b3c3002d91dfa0d22812f341609f97b08757359c" + [[package]] name = "unreachable" version = "1.0.0" @@ -5614,7 +5789,7 @@ name = "user-facing-errors" version = "0.1.0" dependencies = [ "backtrace", - "indoc", + "indoc 2.0.3", "itertools", "quaint", "serde", diff --git a/Cargo.toml b/Cargo.toml index 77afa5ee21d4..e82019b1e5c5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -30,26 +30,31 @@ members = [ "prisma-fmt", "prisma-schema-wasm", "psl/*", -] - -# All path dependencies residing in the workspace directory automatically become members. -# The following shouldn't be considered members but embedded dependencies. -exclude = [ "quaint", - "quaint/test-macros", - "quaint/test-setup" ] [workspace.dependencies] psl = { path = "./psl/psl" } serde_json = { version = "1", features = ["float_roundtrip", "preserve_order"] } serde = { version = "1", features = ["derive"] } -tokio = { version = "1.25", features = ["rt-multi-thread", "macros", "sync", "io-std", "io-util", "parking_lot", "time"] } +tokio = { version = "1.25", features = [ + "rt-multi-thread", + "macros", + "sync", + "io-std", + "io-util", + "parking_lot", + "time", +] } user-facing-errors = { path = "./libs/user-facing-errors" } uuid = { version = "1", features = ["serde"] } indoc = "2.0.1" connection-string = "0.2" -napi = { version = "2.12.4", default-features = false, features = ["napi8", "tokio_rt", "serde-json"] } +napi = { version = "2.12.4", default-features = false, features = [ + "napi8", + "tokio_rt", + "serde-json", +] } napi-derive = "2.12.4" [workspace.dependencies.quaint] @@ -78,7 +83,7 @@ strip = "symbols" [profile.release] lto = "fat" codegen-units = 1 -opt-level = 's' # Optimize for size. +opt-level = 's' # Optimize for size. [profile.profiling] inherits = "release" diff --git a/psl/parser-database/src/walkers/relation_field.rs b/psl/parser-database/src/walkers/relation_field.rs index ca9582e49acf..b96380f03bf6 100644 --- a/psl/parser-database/src/walkers/relation_field.rs +++ b/psl/parser-database/src/walkers/relation_field.rs @@ -164,7 +164,7 @@ impl<'db> RelationFieldWalker<'db> { } /// The relation name. -#[derive(Debug, Clone)] +#[derive(Debug, Clone, PartialOrd)] pub enum RelationName<'db> { /// A relation name specified in the AST. Explicit(&'db str), @@ -185,12 +185,6 @@ impl<'db> PartialEq for RelationName<'db> { impl<'db> Eq for RelationName<'db> {} -impl<'db> PartialOrd for RelationName<'db> { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - impl<'db> Ord for RelationName<'db> { fn cmp(&self, other: &Self) -> std::cmp::Ordering { match (self, other) { diff --git a/quaint/Cargo.lock b/quaint/Cargo.lock deleted file mode 100644 index 9bf69f731916..000000000000 --- a/quaint/Cargo.lock +++ /dev/null @@ -1,2928 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 3 - -[[package]] -name = "addr2line" -version = "0.20.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4fa78e18c64fce05e902adecd7a5eed15a5e0a3439f7b0e169f0252214865e3" -dependencies = [ - "gimli", -] - -[[package]] -name = "adler" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" - -[[package]] -name = "ahash" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" -dependencies = [ - "getrandom 0.2.10", - "once_cell", - "version_check", -] - -[[package]] -name = "ahash" -version = "0.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" -dependencies = [ - "cfg-if", - "once_cell", - "version_check", -] - -[[package]] -name = "aho-corasick" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86b8f9420f797f2d9e935edf629310eb938a0d839f984e25327f3c7eed22300c" -dependencies = [ - "memchr", -] - -[[package]] -name = "allocator-api2" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5" - -[[package]] -name = "android-tzdata" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" - -[[package]] -name = "android_system_properties" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" -dependencies = [ - "libc", -] - -[[package]] -name = "arrayvec" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" - -[[package]] -name = "async-native-tls" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d57d4cec3c647232e1094dc013546c0b33ce785d8aeb251e1f20dfaf8a9a13fe" -dependencies = [ - "futures-util", - "native-tls", - "thiserror", - "url", -] - -[[package]] -name = "async-trait" -version = "0.1.72" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc6dde6e4ed435a4c1ee4e73592f5ba9da2151af10076cc04858746af9352d09" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.28", -] - -[[package]] -name = "asynchronous-codec" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4057f2c32adbb2fc158e22fb38433c8e9bbf76b75a4732c7c0cbaf695fb65568" -dependencies = [ - "bytes", - "futures-sink", - "futures-util", - "memchr", - "pin-project-lite", -] - -[[package]] -name = "autocfg" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" - -[[package]] -name = "backtrace" -version = "0.3.68" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4319208da049c43661739c5fade2ba182f09d1dc2299b32298d3a31692b17e12" -dependencies = [ - "addr2line", - "cc", - "cfg-if", - "libc", - "miniz_oxide", - "object", - "rustc-demangle", -] - -[[package]] -name = "base64" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3441f0f7b02788e948e47f457ca01f1d7e6d92c693bc132c22b087d3141c03ff" - -[[package]] -name = "base64" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" - -[[package]] -name = "bigdecimal" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6773ddc0eafc0e509fb60e48dff7f450f8e674a0686ae8605e8d9901bd5eefa" -dependencies = [ - "num-bigint", - "num-integer", - "num-traits", -] - -[[package]] -name = "bindgen" -version = "0.59.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bd2a9a458e8f4304c52c43ebb0cfbd520289f8379a52e329a38afda99bf8eb8" -dependencies = [ - "bitflags 1.3.2", - "cexpr", - "clang-sys", - "lazy_static", - "lazycell", - "peeking_take_while", - "proc-macro2", - "quote", - "regex", - "rustc-hash", - "shlex", -] - -[[package]] -name = "bit-vec" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" - -[[package]] -name = "bitflags" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" - -[[package]] -name = "bitflags" -version = "2.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "630be753d4e58660abd17930c71b647fe46c27ea6b63cc59e1e3851406972e42" - -[[package]] -name = "bitvec" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" -dependencies = [ - "funty", - "radium", - "tap", - "wyz", -] - -[[package]] -name = "block-buffer" -version = "0.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" -dependencies = [ - "generic-array", -] - -[[package]] -name = "borsh" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4114279215a005bc675e386011e594e1d9b800918cea18fcadadcce864a2046b" -dependencies = [ - "borsh-derive", - "hashbrown 0.13.2", -] - -[[package]] -name = "borsh-derive" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0754613691538d51f329cce9af41d7b7ca150bc973056f1156611489475f54f7" -dependencies = [ - "borsh-derive-internal", - "borsh-schema-derive-internal", - "proc-macro-crate", - "proc-macro2", - "syn 1.0.109", -] - -[[package]] -name = "borsh-derive-internal" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afb438156919598d2c7bad7e1c0adf3d26ed3840dbc010db1a882a65583ca2fb" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "borsh-schema-derive-internal" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "634205cc43f74a1b9046ef87c4540ebda95696ec0f315024860cad7c5b0f5ccd" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "bumpalo" -version = "3.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1" - -[[package]] -name = "bytecheck" -version = "0.6.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b6372023ac861f6e6dc89c8344a8f398fb42aaba2b5dbc649ca0c0e9dbcb627" -dependencies = [ - "bytecheck_derive", - "ptr_meta", - "simdutf8", -] - -[[package]] -name = "bytecheck_derive" -version = "0.6.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7ec4c6f261935ad534c0c22dbef2201b45918860eb1c574b972bd213a76af61" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "byteorder" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" - -[[package]] -name = "bytes" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" - -[[package]] -name = "cc" -version = "1.0.82" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "305fe645edc1442a0fa8b6726ba61d422798d37a52e12eaecf4b022ebbb88f01" -dependencies = [ - "libc", -] - -[[package]] -name = "cexpr" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" -dependencies = [ - "nom", -] - -[[package]] -name = "cfg-if" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" - -[[package]] -name = "chrono" -version = "0.4.26" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec837a71355b28f6556dbd569b37b3f363091c0bd4b2e735674521b4c5fd9bc5" -dependencies = [ - "android-tzdata", - "iana-time-zone", - "js-sys", - "num-traits", - "serde", - "time 0.1.45", - "wasm-bindgen", - "winapi", -] - -[[package]] -name = "clang-sys" -version = "1.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c688fc74432808e3eb684cae8830a86be1d66a2bd58e1f248ed0960a590baf6f" -dependencies = [ - "glob", - "libc", - "libloading", -] - -[[package]] -name = "cmake" -version = "0.1.50" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a31c789563b815f77f4250caee12365734369f942439b7defd71e18a48197130" -dependencies = [ - "cc", -] - -[[package]] -name = "connection-string" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "510ca239cf13b7f8d16a2b48f263de7b4f8c566f0af58d901031473c76afb1e3" - -[[package]] -name = "core-foundation" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" -dependencies = [ - "core-foundation-sys", - "libc", -] - -[[package]] -name = "core-foundation-sys" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" - -[[package]] -name = "cpufeatures" -version = "0.2.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1" -dependencies = [ - "libc", -] - -[[package]] -name = "crc32fast" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "crossbeam" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2801af0d36612ae591caa9568261fddce32ce6e08a7275ea334a06a4ad021a2c" -dependencies = [ - "cfg-if", - "crossbeam-channel", - "crossbeam-deque", - "crossbeam-epoch", - "crossbeam-queue", - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-channel" -version = "0.5.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200" -dependencies = [ - "cfg-if", - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-deque" -version = "0.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef" -dependencies = [ - "cfg-if", - "crossbeam-epoch", - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-epoch" -version = "0.9.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7" -dependencies = [ - "autocfg", - "cfg-if", - "crossbeam-utils", - "memoffset", - "scopeguard", -] - -[[package]] -name = "crossbeam-queue" -version = "0.3.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add" -dependencies = [ - "cfg-if", - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-utils" -version = "0.8.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "crypto-common" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" -dependencies = [ - "generic-array", - "typenum", -] - -[[package]] -name = "darling" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d706e75d87e35569db781a9b5e2416cff1236a47ed380831f959382ccd5f858" -dependencies = [ - "darling_core", - "darling_macro", -] - -[[package]] -name = "darling_core" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0c960ae2da4de88a91b2d920c2a7233b400bc33cb28453a2987822d8392519b" -dependencies = [ - "fnv", - "ident_case", - "proc-macro2", - "quote", - "strsim", - "syn 1.0.109", -] - -[[package]] -name = "darling_macro" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9b5a2f4ac4969822c62224815d069952656cadc7084fdca9751e6d959189b72" -dependencies = [ - "darling_core", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "deranged" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7684a49fb1af197853ef7b2ee694bc1f5b4179556f1e5710e1760c5db6f5e929" - -[[package]] -name = "digest" -version = "0.10.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" -dependencies = [ - "block-buffer", - "crypto-common", - "subtle", -] - -[[package]] -name = "either" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" - -[[package]] -name = "encoding" -version = "0.2.33" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b0d943856b990d12d3b55b359144ff341533e516d94098b1d3fc1ac666d36ec" -dependencies = [ - "encoding-index-japanese", - "encoding-index-korean", - "encoding-index-simpchinese", - "encoding-index-singlebyte", - "encoding-index-tradchinese", -] - -[[package]] -name = "encoding-index-japanese" -version = "1.20141219.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04e8b2ff42e9a05335dbf8b5c6f7567e5591d0d916ccef4e0b1710d32a0d0c91" -dependencies = [ - "encoding_index_tests", -] - -[[package]] -name = "encoding-index-korean" -version = "1.20141219.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dc33fb8e6bcba213fe2f14275f0963fd16f0a02c878e3095ecfdf5bee529d81" -dependencies = [ - "encoding_index_tests", -] - -[[package]] -name = "encoding-index-simpchinese" -version = "1.20141219.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d87a7194909b9118fc707194baa434a4e3b0fb6a5a757c73c3adb07aa25031f7" -dependencies = [ - "encoding_index_tests", -] - -[[package]] -name = "encoding-index-singlebyte" -version = "1.20141219.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3351d5acffb224af9ca265f435b859c7c01537c0849754d3db3fdf2bfe2ae84a" -dependencies = [ - "encoding_index_tests", -] - -[[package]] -name = "encoding-index-tradchinese" -version = "1.20141219.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd0e20d5688ce3cab59eb3ef3a2083a5c77bf496cb798dc6fcdb75f323890c18" -dependencies = [ - "encoding_index_tests", -] - -[[package]] -name = "encoding_index_tests" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a246d82be1c9d791c5dfde9a2bd045fc3cbba3fa2b11ad558f27d01712f00569" - -[[package]] -name = "enumflags2" -version = "0.7.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c041f5090df68b32bcd905365fd51769c8b9d553fe87fde0b683534f10c01bd2" -dependencies = [ - "enumflags2_derive", -] - -[[package]] -name = "enumflags2_derive" -version = "0.7.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e9a1f9f7d83e59740248a6e14ecf93929ade55027844dfcea78beafccc15745" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.28", -] - -[[package]] -name = "errno" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b30f669a7961ef1631673d2766cc92f52d64f7ef354d4fe0ddfd30ed52f0f4f" -dependencies = [ - "errno-dragonfly", - "libc", - "windows-sys", -] - -[[package]] -name = "errno-dragonfly" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" -dependencies = [ - "cc", - "libc", -] - -[[package]] -name = "fallible-iterator" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" - -[[package]] -name = "fallible-streaming-iterator" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a" - -[[package]] -name = "fastrand" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6999dc1837253364c2ebb0704ba97994bd874e8f195d665c50b7548f6ea92764" - -[[package]] -name = "flate2" -version = "1.0.26" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b9429470923de8e8cbd4d2dc513535400b4b3fef0319fb5c4e1f520a7bef743" -dependencies = [ - "crc32fast", - "libz-sys", - "miniz_oxide", -] - -[[package]] -name = "fnv" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" - -[[package]] -name = "foreign-types" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" -dependencies = [ - "foreign-types-shared", -] - -[[package]] -name = "foreign-types-shared" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" - -[[package]] -name = "form_urlencoded" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" -dependencies = [ - "percent-encoding", -] - -[[package]] -name = "frunk" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11a351b59e12f97b4176ee78497dff72e4276fb1ceb13e19056aca7fa0206287" -dependencies = [ - "frunk_core", - "frunk_derives", - "frunk_proc_macros", -] - -[[package]] -name = "frunk_core" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af2469fab0bd07e64ccf0ad57a1438f63160c69b2e57f04a439653d68eb558d6" - -[[package]] -name = "frunk_derives" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0fa992f1656e1707946bbba340ad244f0814009ef8c0118eb7b658395f19a2e" -dependencies = [ - "frunk_proc_macro_helpers", - "quote", - "syn 2.0.28", -] - -[[package]] -name = "frunk_proc_macro_helpers" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35b54add839292b743aeda6ebedbd8b11e93404f902c56223e51b9ec18a13d2c" -dependencies = [ - "frunk_core", - "proc-macro2", - "quote", - "syn 2.0.28", -] - -[[package]] -name = "frunk_proc_macros" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71b85a1d4a9a6b300b41c05e8e13ef2feca03e0334127f29eca9506a7fe13a93" -dependencies = [ - "frunk_core", - "frunk_proc_macro_helpers", - "quote", - "syn 2.0.28", -] - -[[package]] -name = "fuchsia-cprng" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" - -[[package]] -name = "funty" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" - -[[package]] -name = "futures" -version = "0.3.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40" -dependencies = [ - "futures-channel", - "futures-core", - "futures-executor", - "futures-io", - "futures-sink", - "futures-task", - "futures-util", -] - -[[package]] -name = "futures-channel" -version = "0.3.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "955518d47e09b25bbebc7a18df10b81f0c766eaf4c4f1cccef2fca5f2a4fb5f2" -dependencies = [ - "futures-core", - "futures-sink", -] - -[[package]] -name = "futures-core" -version = "0.3.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c" - -[[package]] -name = "futures-executor" -version = "0.3.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0" -dependencies = [ - "futures-core", - "futures-task", - "futures-util", -] - -[[package]] -name = "futures-io" -version = "0.3.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964" - -[[package]] -name = "futures-macro" -version = "0.3.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.28", -] - -[[package]] -name = "futures-sink" -version = "0.3.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f43be4fe21a13b9781a69afa4985b0f6ee0e1afab2c6f454a8cf30e2b2237b6e" - -[[package]] -name = "futures-task" -version = "0.3.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76d3d132be6c0e6aa1534069c705a74a5997a356c0dc2f86a47765e5617c5b65" - -[[package]] -name = "futures-timer" -version = "3.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" - -[[package]] -name = "futures-util" -version = "0.3.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533" -dependencies = [ - "futures-channel", - "futures-core", - "futures-io", - "futures-macro", - "futures-sink", - "futures-task", - "memchr", - "pin-project-lite", - "pin-utils", - "slab", -] - -[[package]] -name = "generic-array" -version = "0.14.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" -dependencies = [ - "typenum", - "version_check", -] - -[[package]] -name = "getrandom" -version = "0.1.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" -dependencies = [ - "cfg-if", - "libc", - "wasi 0.9.0+wasi-snapshot-preview1", -] - -[[package]] -name = "getrandom" -version = "0.2.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" -dependencies = [ - "cfg-if", - "libc", - "wasi 0.11.0+wasi-snapshot-preview1", -] - -[[package]] -name = "gimli" -version = "0.27.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e" - -[[package]] -name = "glob" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" - -[[package]] -name = "hashbrown" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" -dependencies = [ - "ahash 0.7.6", -] - -[[package]] -name = "hashbrown" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" -dependencies = [ - "ahash 0.8.3", -] - -[[package]] -name = "hashbrown" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" -dependencies = [ - "ahash 0.8.3", - "allocator-api2", -] - -[[package]] -name = "hashlink" -version = "0.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "312f66718a2d7789ffef4f4b7b213138ed9f1eb3aa1d0d82fc99f88fb3ffd26f" -dependencies = [ - "hashbrown 0.14.0", -] - -[[package]] -name = "hermit-abi" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b" - -[[package]] -name = "hex" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" - -[[package]] -name = "hmac" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" -dependencies = [ - "digest", -] - -[[package]] -name = "iana-time-zone" -version = "0.1.57" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fad5b825842d2b38bd206f3e81d6957625fd7f0a361e345c30e01a0ae2dd613" -dependencies = [ - "android_system_properties", - "core-foundation-sys", - "iana-time-zone-haiku", - "js-sys", - "wasm-bindgen", - "windows", -] - -[[package]] -name = "iana-time-zone-haiku" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" -dependencies = [ - "cc", -] - -[[package]] -name = "ident_case" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" - -[[package]] -name = "idna" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" -dependencies = [ - "unicode-bidi", - "unicode-normalization", -] - -[[package]] -name = "indexmap" -version = "1.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" -dependencies = [ - "autocfg", - "hashbrown 0.12.3", -] - -[[package]] -name = "indoc" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47741a8bc60fb26eb8d6e0238bbb26d8575ff623fdc97b1a2c00c050b9684ed8" -dependencies = [ - "indoc-impl", - "proc-macro-hack", -] - -[[package]] -name = "indoc-impl" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce046d161f000fffde5f432a0d034d0341dc152643b2598ed5bfce44c4f3a8f0" -dependencies = [ - "proc-macro-hack", - "proc-macro2", - "quote", - "syn 1.0.109", - "unindent", -] - -[[package]] -name = "itertools" -version = "0.10.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" -dependencies = [ - "either", -] - -[[package]] -name = "itoa" -version = "1.0.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" - -[[package]] -name = "js-sys" -version = "0.3.64" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a" -dependencies = [ - "wasm-bindgen", -] - -[[package]] -name = "lazy_static" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" - -[[package]] -name = "lazycell" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" - -[[package]] -name = "lexical" -version = "6.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7aefb36fd43fef7003334742cbf77b243fcd36418a1d1bdd480d613a67968f6" -dependencies = [ - "lexical-core", -] - -[[package]] -name = "lexical-core" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2cde5de06e8d4c2faabc400238f9ae1c74d5412d03a7bd067645ccbc47070e46" -dependencies = [ - "lexical-parse-float", - "lexical-parse-integer", - "lexical-util", - "lexical-write-float", - "lexical-write-integer", -] - -[[package]] -name = "lexical-parse-float" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "683b3a5ebd0130b8fb52ba0bdc718cc56815b6a097e28ae5a6997d0ad17dc05f" -dependencies = [ - "lexical-parse-integer", - "lexical-util", - "static_assertions", -] - -[[package]] -name = "lexical-parse-integer" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d0994485ed0c312f6d965766754ea177d07f9c00c9b82a5ee62ed5b47945ee9" -dependencies = [ - "lexical-util", - "static_assertions", -] - -[[package]] -name = "lexical-util" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5255b9ff16ff898710eb9eb63cb39248ea8a5bb036bea8085b1a767ff6c4e3fc" -dependencies = [ - "static_assertions", -] - -[[package]] -name = "lexical-write-float" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "accabaa1c4581f05a3923d1b4cfd124c329352288b7b9da09e766b0668116862" -dependencies = [ - "lexical-util", - "lexical-write-integer", - "static_assertions", -] - -[[package]] -name = "lexical-write-integer" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1b6f3d1f4422866b68192d62f77bc5c700bee84f3069f2469d7bc8c77852446" -dependencies = [ - "lexical-util", - "static_assertions", -] - -[[package]] -name = "libc" -version = "0.2.147" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" - -[[package]] -name = "libloading" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f" -dependencies = [ - "cfg-if", - "winapi", -] - -[[package]] -name = "libsqlite3-sys" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afc22eff61b133b115c6e8c74e818c628d6d5e7a502afea6f64dee076dd94326" -dependencies = [ - "cc", - "pkg-config", - "vcpkg", -] - -[[package]] -name = "libz-sys" -version = "1.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d97137b25e321a73eef1418d1d5d2eda4d77e12813f8e6dead84bc52c5870a7b" -dependencies = [ - "cc", - "pkg-config", - "vcpkg", -] - -[[package]] -name = "linked-hash-map" -version = "0.5.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" - -[[package]] -name = "linux-raw-sys" -version = "0.4.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57bcfdad1b858c2db7c38303a6d2ad4dfaf5eb53dfeb0910128b2c26d6158503" - -[[package]] -name = "lock_api" -version = "0.4.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16" -dependencies = [ - "autocfg", - "scopeguard", -] - -[[package]] -name = "log" -version = "0.4.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4" - -[[package]] -name = "lru" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6e8aaa3f231bb4bd57b84b2d5dc3ae7f350265df8aa96492e0bc394a1571909" -dependencies = [ - "hashbrown 0.12.3", -] - -[[package]] -name = "lru-cache" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31e24f1ad8321ca0e8a1e0ac13f23cb668e6f5466c2c57319f6a5cf1cc8e3b1c" -dependencies = [ - "linked-hash-map", -] - -[[package]] -name = "md-5" -version = "0.10.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6365506850d44bff6e2fbcb5176cf63650e48bd45ef2fe2665ae1570e0f4b9ca" -dependencies = [ - "digest", -] - -[[package]] -name = "md5" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e6bcd6433cff03a4bfc3d9834d504467db1f1cf6d0ea765d37d330249ed629d" - -[[package]] -name = "memchr" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" - -[[package]] -name = "memoffset" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" -dependencies = [ - "autocfg", -] - -[[package]] -name = "metrics" -version = "0.18.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e52eb6380b6d2a10eb3434aec0885374490f5b82c8aaf5cd487a183c98be834" -dependencies = [ - "ahash 0.7.6", - "metrics-macros", -] - -[[package]] -name = "metrics-macros" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49e30813093f757be5cf21e50389a24dc7dbb22c49f23b7e8f51d69b508a5ffa" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "minimal-lexical" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" - -[[package]] -name = "miniz_oxide" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7" -dependencies = [ - "adler", -] - -[[package]] -name = "mio" -version = "0.8.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2" -dependencies = [ - "libc", - "log", - "wasi 0.11.0+wasi-snapshot-preview1", - "windows-sys", -] - -[[package]] -name = "mobc" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc79c4a77e312fee9c7bd4b957c12ad1196db73c4a81e5c0b13f02083c4f7f2f" -dependencies = [ - "async-trait", - "futures-channel", - "futures-core", - "futures-timer", - "futures-util", - "log", - "metrics", - "thiserror", - "tokio", - "tracing", - "tracing-subscriber", -] - -[[package]] -name = "mysql_async" -version = "0.31.3" -source = "git+https://github.com/prisma/mysql_async?branch=vendored-openssl#dad187b50dc7e8ce2b61fec126822e8e172a9c8a" -dependencies = [ - "bytes", - "crossbeam", - "flate2", - "futures-core", - "futures-sink", - "futures-util", - "lazy_static", - "lru", - "mio", - "mysql_common", - "native-tls", - "once_cell", - "pem", - "percent-encoding", - "pin-project", - "priority-queue", - "serde", - "serde_json", - "socket2 0.4.9", - "thiserror", - "tokio", - "tokio-native-tls", - "tokio-util 0.7.8", - "twox-hash", - "url", -] - -[[package]] -name = "mysql_common" -version = "0.29.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9006c95034ccf7b903d955f210469119f6c3477fc9c9e7a7845ce38a3e665c2a" -dependencies = [ - "base64 0.13.1", - "bigdecimal", - "bindgen", - "bitflags 1.3.2", - "bitvec", - "byteorder", - "bytes", - "cc", - "cmake", - "crc32fast", - "flate2", - "frunk", - "lazy_static", - "lexical", - "num-bigint", - "num-traits", - "rand 0.8.5", - "regex", - "rust_decimal", - "saturating", - "serde", - "serde_json", - "sha1", - "sha2", - "smallvec", - "subprocess", - "thiserror", - "time 0.3.25", - "uuid", -] - -[[package]] -name = "names" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef320dab323286b50fb5cdda23f61c796a72a89998ab565ca32525c5c556f2da" -dependencies = [ - "rand 0.3.23", -] - -[[package]] -name = "native-tls" -version = "0.2.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e" -dependencies = [ - "lazy_static", - "libc", - "log", - "openssl", - "openssl-probe", - "openssl-sys", - "schannel", - "security-framework", - "security-framework-sys", - "tempfile", -] - -[[package]] -name = "nom" -version = "7.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" -dependencies = [ - "memchr", - "minimal-lexical", -] - -[[package]] -name = "nu-ansi-term" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" -dependencies = [ - "overload", - "winapi", -] - -[[package]] -name = "num-bigint" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f93ab6289c7b344a8a9f60f88d80aa20032336fe78da341afc91c8a2341fc75f" -dependencies = [ - "autocfg", - "num-integer", - "num-traits", -] - -[[package]] -name = "num-integer" -version = "0.1.45" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" -dependencies = [ - "autocfg", - "num-traits", -] - -[[package]] -name = "num-traits" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2" -dependencies = [ - "autocfg", -] - -[[package]] -name = "num_cpus" -version = "1.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" -dependencies = [ - "hermit-abi", - "libc", -] - -[[package]] -name = "object" -version = "0.31.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8bda667d9f2b5051b8833f59f3bf748b28ef54f850f4fcb389a252aa383866d1" -dependencies = [ - "memchr", -] - -[[package]] -name = "once_cell" -version = "1.18.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" - -[[package]] -name = "openssl" -version = "0.10.56" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "729b745ad4a5575dd06a3e1af1414bd330ee561c01b3899eb584baeaa8def17e" -dependencies = [ - "bitflags 1.3.2", - "cfg-if", - "foreign-types", - "libc", - "once_cell", - "openssl-macros", - "openssl-sys", -] - -[[package]] -name = "openssl-macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.28", -] - -[[package]] -name = "openssl-probe" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" - -[[package]] -name = "openssl-src" -version = "111.27.0+1.1.1v" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06e8f197c82d7511c5b014030c9b1efeda40d7d5f99d23b4ceed3524a5e63f02" -dependencies = [ - "cc", -] - -[[package]] -name = "openssl-sys" -version = "0.9.91" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "866b5f16f90776b9bb8dc1e1802ac6f0513de3a7a7465867bfbc563dc737faac" -dependencies = [ - "cc", - "libc", - "openssl-src", - "pkg-config", - "vcpkg", -] - -[[package]] -name = "opentls" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f561874f8d6ecfb674fc08863414040c93cc90c0b6963fe679895fab8b65560" -dependencies = [ - "futures-util", - "log", - "openssl", - "openssl-probe", - "openssl-sys", - "url", -] - -[[package]] -name = "overload" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" - -[[package]] -name = "parking_lot" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" -dependencies = [ - "lock_api", - "parking_lot_core", -] - -[[package]] -name = "parking_lot_core" -version = "0.9.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447" -dependencies = [ - "cfg-if", - "libc", - "redox_syscall", - "smallvec", - "windows-targets", -] - -[[package]] -name = "paste" -version = "1.0.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" - -[[package]] -name = "peeking_take_while" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" - -[[package]] -name = "pem" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8835c273a76a90455d7344889b0964598e3316e2a79ede8e36f16bdcf2228b8" -dependencies = [ - "base64 0.13.1", -] - -[[package]] -name = "percent-encoding" -version = "2.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" - -[[package]] -name = "phf" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ade2d8b8f33c7333b51bcf0428d37e217e9f32192ae4772156f65063b8ce03dc" -dependencies = [ - "phf_shared", -] - -[[package]] -name = "phf_shared" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b" -dependencies = [ - "siphasher", -] - -[[package]] -name = "pin-project" -version = "1.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fda4ed1c6c173e3fc7a83629421152e01d7b1f9b7f65fb301e490e8cfc656422" -dependencies = [ - "pin-project-internal", -] - -[[package]] -name = "pin-project-internal" -version = "1.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.28", -] - -[[package]] -name = "pin-project-lite" -version = "0.2.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12cc1b0bf1727a77a54b6654e7b5f1af8604923edc8b81885f8ec92f9e3f0a05" - -[[package]] -name = "pin-utils" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" - -[[package]] -name = "pkg-config" -version = "0.3.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964" - -[[package]] -name = "postgres-native-tls" -version = "0.5.0" -source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#a1a2dc6d9584deaf70a14293c428e7b6ca614d98" -dependencies = [ - "native-tls", - "tokio", - "tokio-native-tls", - "tokio-postgres", -] - -[[package]] -name = "postgres-protocol" -version = "0.6.4" -source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#a1a2dc6d9584deaf70a14293c428e7b6ca614d98" -dependencies = [ - "base64 0.13.1", - "byteorder", - "bytes", - "fallible-iterator", - "hmac", - "md-5", - "memchr", - "rand 0.8.5", - "sha2", - "stringprep", -] - -[[package]] -name = "postgres-types" -version = "0.2.4" -source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#a1a2dc6d9584deaf70a14293c428e7b6ca614d98" -dependencies = [ - "bit-vec", - "bytes", - "chrono", - "fallible-iterator", - "postgres-protocol", - "serde", - "serde_json", - "uuid", -] - -[[package]] -name = "ppv-lite86" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" - -[[package]] -name = "pretty-hex" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6fa0831dd7cc608c38a5e323422a0077678fa5744aa2be4ad91c4ece8eec8d5" - -[[package]] -name = "priority-queue" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fff39edfcaec0d64e8d0da38564fad195d2d51b680940295fcc307366e101e61" -dependencies = [ - "autocfg", - "indexmap", -] - -[[package]] -name = "proc-macro-crate" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d6ea3c4595b96363c13943497db34af4460fb474a95c43f4446ad341b8c9785" -dependencies = [ - "toml", -] - -[[package]] -name = "proc-macro-hack" -version = "0.5.20+deprecated" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" - -[[package]] -name = "proc-macro2" -version = "1.0.66" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "ptr_meta" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0738ccf7ea06b608c10564b31debd4f5bc5e197fc8bfe088f68ae5ce81e7a4f1" -dependencies = [ - "ptr_meta_derive", -] - -[[package]] -name = "ptr_meta_derive" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16b845dbfca988fa33db069c0e230574d15a3088f147a87b64c7589eb662c9ac" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "quaint" -version = "0.2.0-alpha.13" -dependencies = [ - "async-trait", - "base64 0.12.3", - "bigdecimal", - "bit-vec", - "byteorder", - "bytes", - "chrono", - "connection-string", - "either", - "futures", - "hex", - "indoc", - "lru-cache", - "metrics", - "mobc", - "mysql_async", - "names", - "native-tls", - "num_cpus", - "once_cell", - "paste", - "percent-encoding", - "postgres-native-tls", - "postgres-types", - "rusqlite", - "serde", - "serde_json", - "sqlformat", - "test-macros", - "test-setup", - "thiserror", - "tiberius", - "tokio", - "tokio-postgres", - "tokio-util 0.6.10", - "tracing", - "tracing-core", - "url", - "uuid", -] - -[[package]] -name = "quote" -version = "1.0.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50f3b39ccfb720540debaa0164757101c08ecb8d326b15358ce76a62c7e85965" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "radium" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" - -[[package]] -name = "rand" -version = "0.3.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64ac302d8f83c0c1974bf758f6b041c6c8ada916fbb44a609158ca8b064cc76c" -dependencies = [ - "libc", - "rand 0.4.6", -] - -[[package]] -name = "rand" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" -dependencies = [ - "fuchsia-cprng", - "libc", - "rand_core 0.3.1", - "rdrand", - "winapi", -] - -[[package]] -name = "rand" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" -dependencies = [ - "getrandom 0.1.16", - "libc", - "rand_chacha 0.2.2", - "rand_core 0.5.1", - "rand_hc", -] - -[[package]] -name = "rand" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" -dependencies = [ - "libc", - "rand_chacha 0.3.1", - "rand_core 0.6.4", -] - -[[package]] -name = "rand_chacha" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" -dependencies = [ - "ppv-lite86", - "rand_core 0.5.1", -] - -[[package]] -name = "rand_chacha" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" -dependencies = [ - "ppv-lite86", - "rand_core 0.6.4", -] - -[[package]] -name = "rand_core" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" -dependencies = [ - "rand_core 0.4.2", -] - -[[package]] -name = "rand_core" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" - -[[package]] -name = "rand_core" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" -dependencies = [ - "getrandom 0.1.16", -] - -[[package]] -name = "rand_core" -version = "0.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" -dependencies = [ - "getrandom 0.2.10", -] - -[[package]] -name = "rand_hc" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" -dependencies = [ - "rand_core 0.5.1", -] - -[[package]] -name = "rdrand" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" -dependencies = [ - "rand_core 0.3.1", -] - -[[package]] -name = "redox_syscall" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" -dependencies = [ - "bitflags 1.3.2", -] - -[[package]] -name = "regex" -version = "1.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81bc1d4caf89fac26a70747fe603c130093b53c773888797a6329091246d651a" -dependencies = [ - "aho-corasick", - "memchr", - "regex-automata", - "regex-syntax", -] - -[[package]] -name = "regex-automata" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fed1ceff11a1dddaee50c9dc8e4938bd106e9d89ae372f192311e7da498e3b69" -dependencies = [ - "aho-corasick", - "memchr", - "regex-syntax", -] - -[[package]] -name = "regex-syntax" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5ea92a5b6195c6ef2a0295ea818b312502c6fc94dde986c5553242e18fd4ce2" - -[[package]] -name = "rend" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "581008d2099240d37fb08d77ad713bcaec2c4d89d50b5b21a8bb1996bbab68ab" -dependencies = [ - "bytecheck", -] - -[[package]] -name = "rkyv" -version = "0.7.42" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0200c8230b013893c0b2d6213d6ec64ed2b9be2e0e016682b7224ff82cff5c58" -dependencies = [ - "bitvec", - "bytecheck", - "hashbrown 0.12.3", - "ptr_meta", - "rend", - "rkyv_derive", - "seahash", - "tinyvec", - "uuid", -] - -[[package]] -name = "rkyv_derive" -version = "0.7.42" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2e06b915b5c230a17d7a736d1e2e63ee753c256a8614ef3f5147b13a4f5541d" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "rusqlite" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "549b9d036d571d42e6e85d1c1425e2ac83491075078ca9a15be021c56b1641f2" -dependencies = [ - "bitflags 2.3.3", - "chrono", - "fallible-iterator", - "fallible-streaming-iterator", - "hashlink", - "libsqlite3-sys", - "smallvec", -] - -[[package]] -name = "rust_decimal" -version = "1.31.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a2ab0025103a60ecaaf3abf24db1db240a4e1c15837090d2c32f625ac98abea" -dependencies = [ - "arrayvec", - "borsh", - "byteorder", - "bytes", - "num-traits", - "rand 0.8.5", - "rkyv", - "serde", - "serde_json", -] - -[[package]] -name = "rustc-demangle" -version = "0.1.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" - -[[package]] -name = "rustc-hash" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" - -[[package]] -name = "rustix" -version = "0.38.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "172891ebdceb05aa0005f533a6cbfca599ddd7d966f6f5d4d9b2e70478e70399" -dependencies = [ - "bitflags 2.3.3", - "errno", - "libc", - "linux-raw-sys", - "windows-sys", -] - -[[package]] -name = "ryu" -version = "1.0.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" - -[[package]] -name = "saturating" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ece8e78b2f38ec51c51f5d475df0a7187ba5111b2a28bdc761ee05b075d40a71" - -[[package]] -name = "schannel" -version = "0.1.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c3733bf4cf7ea0880754e19cb5a462007c4a8c1914bff372ccc95b464f1df88" -dependencies = [ - "windows-sys", -] - -[[package]] -name = "scopeguard" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" - -[[package]] -name = "seahash" -version = "4.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b" - -[[package]] -name = "security-framework" -version = "2.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de" -dependencies = [ - "bitflags 1.3.2", - "core-foundation", - "core-foundation-sys", - "libc", - "security-framework-sys", -] - -[[package]] -name = "security-framework-sys" -version = "2.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a" -dependencies = [ - "core-foundation-sys", - "libc", -] - -[[package]] -name = "serde" -version = "1.0.183" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32ac8da02677876d532745a130fc9d8e6edfa81a269b107c5b00829b91d8eb3c" -dependencies = [ - "serde_derive", -] - -[[package]] -name = "serde_derive" -version = "1.0.183" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aafe972d60b0b9bee71a91b92fee2d4fb3c9d7e8f6b179aa99f27203d99a4816" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.28", -] - -[[package]] -name = "serde_json" -version = "1.0.104" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "076066c5f1078eac5b722a31827a8832fe108bed65dfa75e233c89f8206e976c" -dependencies = [ - "itoa", - "ryu", - "serde", -] - -[[package]] -name = "sha1" -version = "0.10.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest", -] - -[[package]] -name = "sha2" -version = "0.10.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest", -] - -[[package]] -name = "sharded-slab" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" -dependencies = [ - "lazy_static", -] - -[[package]] -name = "shlex" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43b2853a4d09f215c24cc5489c992ce46052d359b5109343cbafbf26bc62f8a3" - -[[package]] -name = "simdutf8" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f27f6278552951f1f2b8cf9da965d10969b2efdea95a6ec47987ab46edfe263a" - -[[package]] -name = "siphasher" -version = "0.3.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7bd3e3206899af3f8b12af284fafc038cc1dc2b41d1b89dd17297221c5d225de" - -[[package]] -name = "slab" -version = "0.4.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6528351c9bc8ab22353f9d776db39a20288e8d6c37ef8cfe3317cf875eecfc2d" -dependencies = [ - "autocfg", -] - -[[package]] -name = "smallvec" -version = "1.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9" - -[[package]] -name = "socket2" -version = "0.4.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662" -dependencies = [ - "libc", - "winapi", -] - -[[package]] -name = "socket2" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2538b18701741680e0322a2302176d3253a35388e2e62f172f64f4f16605f877" -dependencies = [ - "libc", - "windows-sys", -] - -[[package]] -name = "sqlformat" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c12bc9199d1db8234678b7051747c07f517cdcf019262d1847b94ec8b1aee3e" -dependencies = [ - "itertools", - "nom", - "unicode_categories", -] - -[[package]] -name = "static_assertions" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" - -[[package]] -name = "stringprep" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db3737bde7edce97102e0e2b15365bf7a20bfdb5f60f4f9e8d7004258a51a8da" -dependencies = [ - "unicode-bidi", - "unicode-normalization", -] - -[[package]] -name = "strsim" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6446ced80d6c486436db5c078dde11a9f73d42b57fb273121e160b84f63d894c" - -[[package]] -name = "subprocess" -version = "0.2.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c2e86926081dda636c546d8c5e641661049d7562a68f5488be4a1f7f66f6086" -dependencies = [ - "libc", - "winapi", -] - -[[package]] -name = "subtle" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" - -[[package]] -name = "syn" -version = "1.0.109" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "syn" -version = "2.0.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04361975b3f5e348b2189d8dc55bc942f278b2d482a6a0365de5bdd62d351567" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "tap" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" - -[[package]] -name = "tempfile" -version = "3.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc02fddf48964c42031a0b3fe0428320ecf3a73c401040fc0096f97794310651" -dependencies = [ - "cfg-if", - "fastrand", - "redox_syscall", - "rustix", - "windows-sys", -] - -[[package]] -name = "test-macros" -version = "0.1.0" -dependencies = [ - "darling", - "once_cell", - "proc-macro2", - "quote", - "syn 1.0.109", - "test-setup", -] - -[[package]] -name = "test-setup" -version = "0.1.0" -dependencies = [ - "async-trait", - "bitflags 1.3.2", - "names", - "once_cell", - "quaint", - "tokio", -] - -[[package]] -name = "thiserror" -version = "1.0.44" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "611040a08a0439f8248d1990b111c95baa9c704c805fa1f62104b39655fd7f90" -dependencies = [ - "thiserror-impl", -] - -[[package]] -name = "thiserror-impl" -version = "1.0.44" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "090198534930841fab3a5d1bb637cde49e339654e606195f8d9c76eeb081dc96" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.28", -] - -[[package]] -name = "thread_local" -version = "1.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" -dependencies = [ - "cfg-if", - "once_cell", -] - -[[package]] -name = "tiberius" -version = "0.11.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66303a42b7c5daffb95c10cd8f3007a9c29b3e90128cf42b3738f58102aa2516" -dependencies = [ - "async-native-tls", - "async-trait", - "asynchronous-codec", - "bigdecimal", - "byteorder", - "bytes", - "chrono", - "connection-string", - "encoding", - "enumflags2", - "futures", - "futures-sink", - "futures-util", - "num-traits", - "once_cell", - "opentls", - "pin-project-lite", - "pretty-hex", - "thiserror", - "tokio", - "tokio-util 0.7.8", - "tracing", - "uuid", - "winauth", -] - -[[package]] -name = "time" -version = "0.1.45" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a" -dependencies = [ - "libc", - "wasi 0.10.0+wasi-snapshot-preview1", - "winapi", -] - -[[package]] -name = "time" -version = "0.3.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0fdd63d58b18d663fbdf70e049f00a22c8e42be082203be7f26589213cd75ea" -dependencies = [ - "deranged", - "time-core", - "time-macros", -] - -[[package]] -name = "time-core" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7300fbefb4dadc1af235a9cef3737cea692a9d97e1b9cbcd4ebdae6f8868e6fb" - -[[package]] -name = "time-macros" -version = "0.2.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb71511c991639bb078fd5bf97757e03914361c48100d52878b8e52b46fb92cd" -dependencies = [ - "time-core", -] - -[[package]] -name = "tinyvec" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" -dependencies = [ - "tinyvec_macros", -] - -[[package]] -name = "tinyvec_macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" - -[[package]] -name = "tokio" -version = "1.30.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d3ce25f50619af8b0aec2eb23deebe84249e19e2ddd393a6e16e3300a6dadfd" -dependencies = [ - "backtrace", - "bytes", - "libc", - "mio", - "num_cpus", - "pin-project-lite", - "socket2 0.5.3", - "tokio-macros", - "windows-sys", -] - -[[package]] -name = "tokio-macros" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.28", -] - -[[package]] -name = "tokio-native-tls" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" -dependencies = [ - "native-tls", - "tokio", -] - -[[package]] -name = "tokio-postgres" -version = "0.7.7" -source = "git+https://github.com/prisma/rust-postgres?branch=pgbouncer-mode#a1a2dc6d9584deaf70a14293c428e7b6ca614d98" -dependencies = [ - "async-trait", - "byteorder", - "bytes", - "fallible-iterator", - "futures-channel", - "futures-util", - "log", - "parking_lot", - "percent-encoding", - "phf", - "pin-project-lite", - "postgres-protocol", - "postgres-types", - "socket2 0.5.3", - "tokio", - "tokio-util 0.7.8", -] - -[[package]] -name = "tokio-util" -version = "0.6.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36943ee01a6d67977dd3f84a5a1d2efeb4ada3a1ae771cadfaa535d9d9fc6507" -dependencies = [ - "bytes", - "futures-core", - "futures-io", - "futures-sink", - "log", - "pin-project-lite", - "tokio", -] - -[[package]] -name = "tokio-util" -version = "0.7.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "806fe8c2c87eccc8b3267cbae29ed3ab2d0bd37fca70ab622e46aaa9375ddb7d" -dependencies = [ - "bytes", - "futures-core", - "futures-io", - "futures-sink", - "pin-project-lite", - "tokio", - "tracing", -] - -[[package]] -name = "toml" -version = "0.5.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" -dependencies = [ - "serde", -] - -[[package]] -name = "tracing" -version = "0.1.37" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" -dependencies = [ - "cfg-if", - "log", - "pin-project-lite", - "tracing-attributes", - "tracing-core", -] - -[[package]] -name = "tracing-attributes" -version = "0.1.26" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.28", -] - -[[package]] -name = "tracing-core" -version = "0.1.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a" -dependencies = [ - "once_cell", - "valuable", -] - -[[package]] -name = "tracing-log" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922" -dependencies = [ - "lazy_static", - "log", - "tracing-core", -] - -[[package]] -name = "tracing-subscriber" -version = "0.3.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77" -dependencies = [ - "nu-ansi-term", - "sharded-slab", - "smallvec", - "thread_local", - "tracing-core", - "tracing-log", -] - -[[package]] -name = "twox-hash" -version = "1.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675" -dependencies = [ - "cfg-if", - "rand 0.3.23", - "static_assertions", -] - -[[package]] -name = "typenum" -version = "1.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" - -[[package]] -name = "unicode-bidi" -version = "0.3.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" - -[[package]] -name = "unicode-ident" -version = "1.0.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c" - -[[package]] -name = "unicode-normalization" -version = "0.1.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" -dependencies = [ - "tinyvec", -] - -[[package]] -name = "unicode_categories" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" - -[[package]] -name = "unindent" -version = "0.1.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1766d682d402817b5ac4490b3c3002d91dfa0d22812f341609f97b08757359c" - -[[package]] -name = "url" -version = "2.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50bff7831e19200a85b17131d085c25d7811bc4e186efdaf54bbd132994a88cb" -dependencies = [ - "form_urlencoded", - "idna", - "percent-encoding", -] - -[[package]] -name = "uuid" -version = "1.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79daa5ed5740825c40b389c5e50312b9c86df53fccd33f281df655642b43869d" -dependencies = [ - "getrandom 0.2.10", -] - -[[package]] -name = "valuable" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" - -[[package]] -name = "vcpkg" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" - -[[package]] -name = "version_check" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" - -[[package]] -name = "wasi" -version = "0.9.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" - -[[package]] -name = "wasi" -version = "0.10.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" - -[[package]] -name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" - -[[package]] -name = "wasm-bindgen" -version = "0.2.87" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" -dependencies = [ - "cfg-if", - "wasm-bindgen-macro", -] - -[[package]] -name = "wasm-bindgen-backend" -version = "0.2.87" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd" -dependencies = [ - "bumpalo", - "log", - "once_cell", - "proc-macro2", - "quote", - "syn 2.0.28", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-macro" -version = "0.2.87" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" -dependencies = [ - "quote", - "wasm-bindgen-macro-support", -] - -[[package]] -name = "wasm-bindgen-macro-support" -version = "0.2.87" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.28", - "wasm-bindgen-backend", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-shared" -version = "0.2.87" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" - -[[package]] -name = "winapi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" -dependencies = [ - "winapi-i686-pc-windows-gnu", - "winapi-x86_64-pc-windows-gnu", -] - -[[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" - -[[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" - -[[package]] -name = "winauth" -version = "0.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f820cd208ce9c6b050812dc2d724ba98c6c1e9db5ce9b3f58d925ae5723a5e6" -dependencies = [ - "bitflags 1.3.2", - "byteorder", - "md5", - "rand 0.7.3", - "winapi", -] - -[[package]] -name = "windows" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" -dependencies = [ - "windows-targets", -] - -[[package]] -name = "windows-sys" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" -dependencies = [ - "windows-targets", -] - -[[package]] -name = "windows-targets" -version = "0.48.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f" -dependencies = [ - "windows_aarch64_gnullvm", - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc", -] - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" - -[[package]] -name = "windows_i686_gnu" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" - -[[package]] -name = "windows_i686_msvc" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" - -[[package]] -name = "wyz" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" -dependencies = [ - "tap", -] \ No newline at end of file diff --git a/quaint/Cargo.toml b/quaint/Cargo.toml index 90f0eacc1fd2..d79d0858ecfe 100644 --- a/quaint/Cargo.toml +++ b/quaint/Cargo.toml @@ -29,16 +29,7 @@ docs = [] # way to access database-specific methods when you need extra control. expose-drivers = [] -all = [ - "mssql", - "mysql", - "pooled", - "postgresql", - "serde-support", - "sqlite", - "uuid", - "bigdecimal", -] +all = ["mssql", "mysql", "pooled", "postgresql", "sqlite", "uuid", "bigdecimal"] vendored-openssl = [ "postgres-native-tls/vendored-openssl", @@ -60,7 +51,6 @@ postgresql = [ mssql = ["tiberius", "uuid", "tokio-util", "tokio/time", "tokio/net", "either"] mysql = ["mysql_async", "tokio/time", "lru-cache"] pooled = ["mobc"] -serde-support = ["serde"] sqlite = ["rusqlite", "tokio/sync"] bigdecimal = ["bigdecimal_"] fmt-sql = ["sqlformat"] @@ -96,8 +86,8 @@ indoc = "0.3" names = "0.11" paste = "1.0" serde = { version = "1.0", features = ["derive"] } -test-macros = { path = "test-macros" } -test-setup = { path = "test-setup" } +quaint-test-macros = { path = "quaint-test-macros" } +quaint-test-setup = { path = "quaint-test-setup" } uuid = { version = "1", features = ["v4"] } tokio = { version = "1.0", features = ["rt-multi-thread", "macros", "time"] } @@ -125,7 +115,14 @@ features = ["sql-browser-tokio", "chrono", "bigdecimal"] version = "0.11.2" optional = true default-features = false -features = ["sql-browser-tokio", "vendored-openssl", "chrono", "bigdecimal", "tds73", "winauth"] +features = [ + "sql-browser-tokio", + "vendored-openssl", + "chrono", + "bigdecimal", + "tds73", + "winauth", +] [dependencies.bigdecimal_] version = "0.3" @@ -137,13 +134,23 @@ version = "1" optional = true [dependencies.tokio-postgres] -features = ["with-uuid-1", "with-chrono-0_4", "with-serde_json-1", "with-bit-vec-0_6"] +features = [ + "with-uuid-1", + "with-chrono-0_4", + "with-serde_json-1", + "with-bit-vec-0_6", +] git = "https://github.com/prisma/rust-postgres" branch = "pgbouncer-mode" optional = true [dependencies.postgres-types] -features = ["with-uuid-1", "with-chrono-0_4", "with-serde_json-1", "with-bit-vec-0_6"] +features = [ + "with-uuid-1", + "with-chrono-0_4", + "with-serde_json-1", + "with-bit-vec-0_6", +] git = "https://github.com/prisma/rust-postgres" branch = "pgbouncer-mode" optional = true diff --git a/quaint/README.md b/quaint/README.md index b7abfaefaf6c..9ad45ee12182 100644 --- a/quaint/README.md +++ b/quaint/README.md @@ -13,11 +13,6 @@ Quaint is an abstraction over certain SQL databases. It provides: - Pooling with [mobc](https://crates.io/crates/mobc) - Async/await and Futures 0.3 -### Documentation - -- [Released](https://docs.rs/quaint) -- [Main](https://prisma.github.io/quaint/quaint/index.html) - ### Feature flags - `mysql`: Support for MySQL databases. @@ -28,7 +23,6 @@ Quaint is an abstraction over certain SQL databases. It provides: - `json`: JSON type support with `serde_json` crate. - `uuid`: UUID type support with `uuid` crate. - `chrono`: DateTime type support with `chrono` crate. -- `serde-support`: Deserialize support from result set with `serde` crate. - `bigdecimal`: Numeric values can be read as `BigDecimal`. - `vendored-openssl`: Statically links against a vendored OpenSSL library on non-Windows or non-Apple platforms. @@ -77,7 +71,7 @@ The `FMT_SQL` environment variable can be used to log _formatted_ SQL queries. B This requires the rust nightly channel: ```sh -> cargo +nightly rustdoc --all-features +> cargo rustdoc --all-features ``` Documentation index would be created at `$CARGO_TARGET_DIR/doc/quaint/index.html` diff --git a/quaint/test-macros/Cargo.toml b/quaint/quaint-test-macros/Cargo.toml similarity index 73% rename from quaint/test-macros/Cargo.toml rename to quaint/quaint-test-macros/Cargo.toml index da106947a9d7..1d7ff6e33536 100644 --- a/quaint/test-macros/Cargo.toml +++ b/quaint/quaint-test-macros/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "test-macros" +name = "quaint-test-macros" version = "0.1.0" authors = ["Julius de Bruijn "] edition = "2018" @@ -13,4 +13,4 @@ syn = "1.0.5" quote = "1.0.2" proc-macro2 = "1.0.6" once_cell = "1.3.1" -test-setup = { path = "../test-setup" } +quaint-test-setup = { path = "../quaint-test-setup" } diff --git a/quaint/test-macros/src/lib.rs b/quaint/quaint-test-macros/src/lib.rs similarity index 100% rename from quaint/test-macros/src/lib.rs rename to quaint/quaint-test-macros/src/lib.rs diff --git a/quaint/quaint-test-macros/src/test_each_connector.rs b/quaint/quaint-test-macros/src/test_each_connector.rs index c01aa695d1ad..87cf6da1a7e6 100644 --- a/quaint/quaint-test-macros/src/test_each_connector.rs +++ b/quaint/quaint-test-macros/src/test_each_connector.rs @@ -2,6 +2,7 @@ use darling::FromMeta; use once_cell::sync::Lazy; use proc_macro::TokenStream; use proc_macro2::Span; + use quaint_test_setup::{ConnectorDefinition, Tags, CONNECTORS}; use quote::quote; use std::str::FromStr; diff --git a/quaint/test-setup/Cargo.toml b/quaint/quaint-test-setup/Cargo.toml similarity index 72% rename from quaint/test-setup/Cargo.toml rename to quaint/quaint-test-setup/Cargo.toml index 959be70d4fd8..b7ad87fed8fc 100644 --- a/quaint/test-setup/Cargo.toml +++ b/quaint/quaint-test-setup/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "test-setup" +name = "quaint-test-setup" version = "0.1.0" authors = ["Julius de Bruijn "] edition = "2018" @@ -9,5 +9,5 @@ once_cell = "1.3.1" bitflags = "1.2.1" async-trait = "0.1" names = "0.11" -tokio = { version = "1.0", features = ["rt-multi-thread"]} +tokio = { version = "1.0", features = ["rt-multi-thread"] } quaint = { path = "..", features = ["all"] } diff --git a/quaint/test-setup/src/lib.rs b/quaint/quaint-test-setup/src/lib.rs similarity index 100% rename from quaint/test-setup/src/lib.rs rename to quaint/quaint-test-setup/src/lib.rs diff --git a/quaint/test-setup/src/tags.rs b/quaint/quaint-test-setup/src/tags.rs similarity index 100% rename from quaint/test-setup/src/tags.rs rename to quaint/quaint-test-setup/src/tags.rs diff --git a/quaint/rust-toolchain b/quaint/rust-toolchain deleted file mode 100644 index 870bbe4e50e6..000000000000 --- a/quaint/rust-toolchain +++ /dev/null @@ -1 +0,0 @@ -stable \ No newline at end of file diff --git a/quaint/src/ast/function/row_to_json.rs b/quaint/src/ast/function/row_to_json.rs index 40e2b0dec7fc..9ffeb6653484 100644 --- a/quaint/src/ast/function/row_to_json.rs +++ b/quaint/src/ast/function/row_to_json.rs @@ -2,7 +2,6 @@ use super::Function; use crate::ast::Table; #[derive(Debug, Clone, PartialEq)] -#[cfg_attr(feature = "docs", doc(cfg(feature = "postgresql")))] #[cfg(feature = "postgresql")] /// A representation of the `ROW_TO_JSON` function in the database. /// Only for `Postgresql` @@ -39,7 +38,6 @@ pub struct RowToJson<'a> { /// # Ok(()) /// # } /// ``` -#[cfg_attr(feature = "docs", doc(cfg(feature = "postgresql")))] #[cfg(feature = "postgresql")] pub fn row_to_json<'a, T>(expr: T, pretty_print: bool) -> Function<'a> where diff --git a/quaint/src/ast/insert.rs b/quaint/src/ast/insert.rs index e984ae0b273e..cd38fff87043 100644 --- a/quaint/src/ast/insert.rs +++ b/quaint/src/ast/insert.rs @@ -256,10 +256,6 @@ impl<'a> Insert<'a> { /// # } /// ``` #[cfg(any(feature = "postgresql", feature = "mssql", feature = "sqlite"))] - #[cfg_attr( - feature = "docs", - doc(cfg(any(feature = "postgresql", feature = "mssql", feature = "sqlite"))) - )] pub fn returning(mut self, columns: I) -> Self where K: Into>, diff --git a/quaint/src/ast/update.rs b/quaint/src/ast/update.rs index 5d35929eac60..751655bd82e1 100644 --- a/quaint/src/ast/update.rs +++ b/quaint/src/ast/update.rs @@ -150,7 +150,6 @@ impl<'a> Update<'a> { /// # } /// ``` #[cfg(any(feature = "postgresql", feature = "sqlite"))] - #[cfg_attr(feature = "docs", doc(cfg(any(feature = "postgresql", feature = "sqlite"))))] pub fn returning(mut self, columns: I) -> Self where K: Into>, diff --git a/quaint/src/ast/values.rs b/quaint/src/ast/values.rs index 92719b982eb4..49560bbc695f 100644 --- a/quaint/src/ast/values.rs +++ b/quaint/src/ast/values.rs @@ -60,7 +60,6 @@ impl<'a> Value<'a> { /// Creates a new decimal value. #[cfg(feature = "bigdecimal")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "bigdecimal")))] pub fn numeric(value: BigDecimal) -> Self { ValueType::numeric(value).into_value() } @@ -152,7 +151,6 @@ impl<'a> Value<'a> { /// Creates a new uuid value. #[cfg(feature = "uuid")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "uuid")))] pub fn uuid(value: Uuid) -> Self { ValueType::uuid(value).into_value() } @@ -273,7 +271,6 @@ impl<'a> Value<'a> { /// `true` if the `Value` is a numeric value or can be converted to one. #[cfg(feature = "bigdecimal")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "bigdecimal")))] pub fn is_numeric(&self) -> bool { self.typed.is_numeric() } @@ -281,7 +278,6 @@ impl<'a> Value<'a> { /// Returns a bigdecimal, if the value is a numeric, float or double value, /// otherwise `None`. #[cfg(feature = "bigdecimal")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "bigdecimal")))] pub fn into_numeric(self) -> Option { self.typed.into_numeric() } @@ -289,7 +285,6 @@ impl<'a> Value<'a> { /// Returns a reference to a bigdecimal, if the value is a numeric. /// Otherwise `None`. #[cfg(feature = "bigdecimal")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "bigdecimal")))] pub fn as_numeric(&self) -> Option<&BigDecimal> { self.typed.as_numeric() } @@ -311,14 +306,12 @@ impl<'a> Value<'a> { /// `true` if the `Value` is of UUID type. #[cfg(feature = "uuid")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "uuid")))] pub fn is_uuid(&self) -> bool { self.typed.is_uuid() } /// Returns an UUID if the value is of UUID type, otherwise `None`. #[cfg(feature = "uuid")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "uuid")))] pub fn as_uuid(&self) -> Option { self.typed.as_uuid() } @@ -513,14 +506,12 @@ pub enum ValueType<'a> { Array(Option>>), /// A numeric value. #[cfg(feature = "bigdecimal")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "bigdecimal")))] Numeric(Option), /// A JSON value. Json(Option), /// A XML value. Xml(Option>), #[cfg(feature = "uuid")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "uuid")))] /// An UUID value. Uuid(Option), /// A datetime value. @@ -687,7 +678,6 @@ impl<'a> ValueType<'a> { /// Creates a new decimal value. #[cfg(feature = "bigdecimal")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "bigdecimal")))] pub(crate) fn numeric(value: BigDecimal) -> Self { Self::Numeric(Some(value)) } @@ -779,7 +769,6 @@ impl<'a> ValueType<'a> { /// Creates a new uuid value. #[cfg(feature = "uuid")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "uuid")))] pub(crate) fn uuid(value: Uuid) -> Self { Self::Uuid(Some(value)) } @@ -960,7 +949,6 @@ impl<'a> ValueType<'a> { /// `true` if the `Value` is a numeric value or can be converted to one. #[cfg(feature = "bigdecimal")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "bigdecimal")))] pub(crate) fn is_numeric(&self) -> bool { matches!(self, Self::Numeric(_) | Self::Float(_) | Self::Double(_)) } @@ -968,7 +956,6 @@ impl<'a> ValueType<'a> { /// Returns a bigdecimal, if the value is a numeric, float or double value, /// otherwise `None`. #[cfg(feature = "bigdecimal")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "bigdecimal")))] pub(crate) fn into_numeric(self) -> Option { match self { Self::Numeric(d) => d, @@ -981,7 +968,6 @@ impl<'a> ValueType<'a> { /// Returns a reference to a bigdecimal, if the value is a numeric. /// Otherwise `None`. #[cfg(feature = "bigdecimal")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "bigdecimal")))] pub(crate) fn as_numeric(&self) -> Option<&BigDecimal> { match self { Self::Numeric(d) => d.as_ref(), @@ -1018,14 +1004,12 @@ impl<'a> ValueType<'a> { /// `true` if the `Value` is of UUID type. #[cfg(feature = "uuid")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "uuid")))] pub(crate) fn is_uuid(&self) -> bool { matches!(self, Self::Uuid(_)) } /// Returns an UUID if the value is of UUID type, otherwise `None`. #[cfg(feature = "uuid")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "uuid")))] pub(crate) fn as_uuid(&self) -> Option { match self { Self::Uuid(u) => *u, diff --git a/quaint/src/connector/connection_info.rs b/quaint/src/connector/connection_info.rs index 7b09472c3312..50029b16c15b 100644 --- a/quaint/src/connector/connection_info.rs +++ b/quaint/src/connector/connection_info.rs @@ -18,19 +18,15 @@ use std::convert::TryFrom; pub enum ConnectionInfo { /// A PostgreSQL connection URL. #[cfg(feature = "postgresql")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "postgresql")))] Postgres(PostgresUrl), /// A MySQL connection URL. #[cfg(feature = "mysql")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "mysql")))] Mysql(MysqlUrl), /// A SQL Server connection URL. #[cfg(feature = "mssql")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "mssql")))] Mssql(MssqlUrl), /// A SQLite connection URL. #[cfg(feature = "sqlite")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "sqlite")))] Sqlite { /// The filesystem path of the SQLite database. file_path: String, @@ -38,7 +34,6 @@ pub enum ConnectionInfo { db_name: String, }, #[cfg(feature = "sqlite")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "sqlite")))] InMemorySqlite { db_name: String }, } @@ -236,16 +231,12 @@ impl ConnectionInfo { #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub enum SqlFamily { #[cfg(feature = "postgresql")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "postgresql")))] Postgres, #[cfg(feature = "mysql")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "mysql")))] Mysql, #[cfg(feature = "sqlite")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "sqlite")))] Sqlite, #[cfg(feature = "mssql")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "mssql")))] Mssql, } diff --git a/quaint/src/connector/mssql.rs b/quaint/src/connector/mssql.rs index 848e708c7dbb..cef092edb9d7 100644 --- a/quaint/src/connector/mssql.rs +++ b/quaint/src/connector/mssql.rs @@ -30,7 +30,6 @@ pub use tiberius; /// Wraps a connection url and exposes the parsing logic used by Quaint, /// including default values. #[derive(Debug, Clone)] -#[cfg_attr(feature = "docs", doc(cfg(feature = "mssql")))] pub struct MssqlUrl { connection_string: String, query_params: MssqlQueryParams, @@ -38,7 +37,6 @@ pub struct MssqlUrl { /// TLS mode when connecting to SQL Server. #[derive(Debug, Clone, Copy)] -#[cfg_attr(feature = "docs", doc(cfg(feature = "mssql")))] pub enum EncryptMode { /// All traffic is encrypted. On, @@ -270,7 +268,6 @@ impl MssqlQueryParams { /// A connector interface for the SQL Server database. #[derive(Debug)] -#[cfg_attr(feature = "docs", doc(cfg(feature = "mssql")))] pub struct Mssql { client: Mutex>>, url: MssqlUrl, diff --git a/quaint/src/connector/mysql.rs b/quaint/src/connector/mysql.rs index d0c28a9786fe..e5a1b794ab5b 100644 --- a/quaint/src/connector/mysql.rs +++ b/quaint/src/connector/mysql.rs @@ -33,7 +33,6 @@ use super::IsolationLevel; /// A connector interface for the MySQL database. #[derive(Debug)] -#[cfg_attr(feature = "docs", doc(cfg(feature = "mysql")))] pub struct Mysql { pub(crate) conn: Mutex, pub(crate) url: MysqlUrl, @@ -44,7 +43,6 @@ pub struct Mysql { /// Wraps a connection url and exposes the parsing logic used by quaint, including default values. #[derive(Debug, Clone)] -#[cfg_attr(feature = "docs", doc(cfg(feature = "mysql")))] pub struct MysqlUrl { url: Url, query_params: MysqlUrlQueryParams, diff --git a/quaint/src/connector/postgres.rs b/quaint/src/connector/postgres.rs index dadc39faea2a..2c81144c812b 100644 --- a/quaint/src/connector/postgres.rs +++ b/quaint/src/connector/postgres.rs @@ -55,7 +55,6 @@ impl Debug for PostgresClient { /// A connector interface for the PostgreSQL database. #[derive(Debug)] -#[cfg_attr(feature = "docs", doc(cfg(feature = "postgresql")))] pub struct PostgreSql { client: PostgresClient, pg_bouncer: bool, @@ -65,14 +64,12 @@ pub struct PostgreSql { } #[derive(Debug, Clone, Copy, PartialEq, Eq)] -#[cfg_attr(feature = "docs", doc(cfg(feature = "postgresql")))] pub enum SslAcceptMode { Strict, AcceptInvalidCerts, } #[derive(Debug, Clone)] -#[cfg_attr(feature = "docs", doc(cfg(feature = "postgresql")))] pub struct SslParams { certificate_file: Option, identity_file: Option, @@ -180,7 +177,6 @@ impl PostgresFlavour { /// Wraps a connection url and exposes the parsing logic used by Quaint, /// including default values. #[derive(Debug, Clone)] -#[cfg_attr(feature = "docs", doc(cfg(feature = "postgresql")))] pub struct PostgresUrl { url: Url, query_params: PostgresUrlQueryParams, diff --git a/quaint/src/connector/sqlite.rs b/quaint/src/connector/sqlite.rs index da85697a5936..6db49523c80a 100644 --- a/quaint/src/connector/sqlite.rs +++ b/quaint/src/connector/sqlite.rs @@ -21,7 +21,6 @@ pub(crate) const DEFAULT_SQLITE_SCHEMA_NAME: &str = "main"; pub use rusqlite; /// A connector interface for the SQLite database -#[cfg_attr(feature = "docs", doc(cfg(feature = "sqlite")))] pub struct Sqlite { pub(crate) client: Mutex, } @@ -29,7 +28,6 @@ pub struct Sqlite { /// Wraps a connection url and exposes the parsing logic used by Quaint, /// including default values. #[derive(Debug)] -#[cfg_attr(feature = "docs", doc(cfg(feature = "sqlite")))] pub struct SqliteParams { pub connection_limit: Option, /// This is not a `PathBuf` because we need to `ATTACH` the database to the path, and this can diff --git a/quaint/src/error.rs b/quaint/src/error.rs index e9bdc890f279..30b6f8c5116f 100644 --- a/quaint/src/error.rs +++ b/quaint/src/error.rs @@ -247,11 +247,6 @@ pub enum ErrorKind { #[error("Value out of range error. {}", message)] ValueOutOfRange { message: String }, - #[cfg(feature = "serde-support")] - #[cfg_attr(feature = "docs", doc(cfg(feature = "serde-support")))] - #[error("Deserializing a ResultRow {:?}", _0)] - FromRowError(serde::de::value::Error), - #[error( "Incorrect number of parameters given to a statement. Expected {}: got: {}.", expected, @@ -320,7 +315,6 @@ impl From for ErrorKind { } #[cfg(feature = "bigdecimal")] -#[cfg_attr(feature = "docs", doc(cfg(feature = "bigdecimal")))] impl From for Error { fn from(e: bigdecimal::ParseBigDecimalError) -> Self { let kind = ErrorKind::conversion(format!("{e}")); diff --git a/quaint/src/lib.rs b/quaint/src/lib.rs index 5472c12885a4..8e0d0f6bd178 100644 --- a/quaint/src/lib.rs +++ b/quaint/src/lib.rs @@ -104,8 +104,6 @@ //! # } //! ``` -#![cfg_attr(feature = "docs", feature(doc_cfg))] - #[cfg(not(any(feature = "sqlite", feature = "postgresql", feature = "mysql", feature = "mssql")))] compile_error!("one of 'sqlite', 'postgresql', 'mysql' or 'mssql' features must be enabled"); @@ -122,12 +120,8 @@ pub mod ast; pub mod connector; pub mod error; #[cfg(feature = "pooled")] -#[cfg_attr(feature = "docs", doc(cfg(pooled)))] pub mod pooled; pub mod prelude; -#[cfg(feature = "serde-support")] -#[cfg_attr(feature = "docs", doc(cfg(feature = "serde-support")))] -pub mod serde; pub mod single; #[cfg(test)] mod tests; diff --git a/quaint/src/serde.rs b/quaint/src/serde.rs deleted file mode 100644 index 092ab344633d..000000000000 --- a/quaint/src/serde.rs +++ /dev/null @@ -1,335 +0,0 @@ -//! Convert results from the database into any type implementing `serde::Deserialize`. - -use std::borrow::Cow; - -use crate::{ - ast::{EnumVariant, Value, ValueType}, - connector::{ResultRow, ResultSet}, - error::{Error, ErrorKind}, -}; -use serde::{de::Error as SerdeError, de::*}; - -impl ResultSet { - /// Takes the first row and deserializes it. - #[allow(clippy::wrong_self_convention)] - pub fn from_first(self) -> crate::Result { - from_row(self.into_single()?) - } -} - -/// Deserialize each row of a [`ResultSet`](../connector/struct.ResultSet.html). -/// -/// For an example, see the docs for [`from_row`](fn.from_row.html). -pub fn from_rows(result_set: ResultSet) -> crate::Result> { - let mut deserialized_rows = Vec::with_capacity(result_set.len()); - - for row in result_set { - deserialized_rows.push(from_row(row)?) - } - - Ok(deserialized_rows) -} - -/// Deserialize a row into any type implementing `Deserialize`. -/// -/// ``` -/// # use serde::Deserialize; -/// # use quaint::ast::Value; -/// # -/// # #[derive(Deserialize, Debug, PartialEq)] -/// # struct User { -/// # id: u64, -/// # name: String, -/// # } -/// # -/// # fn main() -> Result<(), Box> { -/// # -/// # let row = quaint::serde::make_row(vec![ -/// # ("id", Value::from(12)), -/// # ("name", "Georgina".into()), -/// # ]); -/// # -/// # -/// let user: User = quaint::serde::from_row(row)?; -/// -/// assert_eq!(user, User { name: "Georgina".to_string(), id: 12 }); -/// # Ok(()) -/// # } -/// ``` -pub fn from_row(row: ResultRow) -> crate::Result { - let deserializer = RowDeserializer(row); - - T::deserialize(deserializer).map_err(|e| Error::builder(ErrorKind::FromRowError(e)).build()) -} - -type DeserializeError = serde::de::value::Error; - -#[derive(Debug)] -struct RowDeserializer(ResultRow); - -impl<'de> Deserializer<'de> for RowDeserializer { - type Error = DeserializeError; - - fn deserialize_any>(self, visitor: V) -> Result { - let ResultRow { columns, mut values } = self.0; - - let kvs = columns.iter().enumerate().map(move |(v, k)| { - // The unwrap is safe if `columns` is correct. - let value = values.get_mut(v).unwrap(); - let taken_value = std::mem::replace(value, Value::from(ValueType::Int64(None))); - (k.as_str(), taken_value) - }); - - let deserializer = serde::de::value::MapDeserializer::new(kvs); - - visitor.visit_map(deserializer) - } - - serde::forward_to_deserialize_any! { - bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str string bytes byte_buf - option unit unit_struct newtype_struct seq tuple tuple_struct map - struct enum identifier ignored_any - } -} - -impl<'de> IntoDeserializer<'de, DeserializeError> for Value<'de> { - type Deserializer = ValueDeserializer<'de>; - - fn into_deserializer(self) -> Self::Deserializer { - ValueDeserializer(self) - } -} - -impl<'de> IntoDeserializer<'de, DeserializeError> for EnumVariant<'de> { - type Deserializer = ValueDeserializer<'de>; - - fn into_deserializer(self) -> Self::Deserializer { - ValueDeserializer(self.into_text()) - } -} - -#[derive(Debug)] -pub struct ValueDeserializer<'a>(Value<'a>); - -impl<'de> Deserializer<'de> for ValueDeserializer<'de> { - type Error = DeserializeError; - - fn deserialize_any>(self, visitor: V) -> Result { - match self.0.typed { - ValueType::Text(Some(s)) => visitor.visit_string(s.into_owned()), - ValueType::Text(None) => visitor.visit_none(), - ValueType::Bytes(Some(bytes)) => visitor.visit_bytes(bytes.as_ref()), - ValueType::Bytes(None) => visitor.visit_none(), - ValueType::Enum(Some(s), _) => visitor.visit_string(s.into_owned()), - ValueType::Enum(None, _) => visitor.visit_none(), - ValueType::EnumArray(Some(variants), _) => { - let deserializer = serde::de::value::SeqDeserializer::new(variants.into_iter()); - visitor.visit_seq(deserializer) - } - ValueType::EnumArray(None, _) => visitor.visit_none(), - ValueType::Int32(Some(i)) => visitor.visit_i32(i), - ValueType::Int32(None) => visitor.visit_none(), - ValueType::Int64(Some(i)) => visitor.visit_i64(i), - ValueType::Int64(None) => visitor.visit_none(), - ValueType::Boolean(Some(b)) => visitor.visit_bool(b), - ValueType::Boolean(None) => visitor.visit_none(), - ValueType::Char(Some(c)) => visitor.visit_char(c), - ValueType::Char(None) => visitor.visit_none(), - ValueType::Float(Some(num)) => visitor.visit_f64(num as f64), - ValueType::Float(None) => visitor.visit_none(), - ValueType::Double(Some(num)) => visitor.visit_f64(num), - ValueType::Double(None) => visitor.visit_none(), - - #[cfg(feature = "bigdecimal")] - ValueType::Numeric(Some(num)) => { - use crate::bigdecimal::ToPrimitive; - visitor.visit_f64(num.to_f64().unwrap()) - } - #[cfg(feature = "bigdecimal")] - ValueType::Numeric(None) => visitor.visit_none(), - - #[cfg(feature = "uuid")] - ValueType::Uuid(Some(uuid)) => visitor.visit_string(uuid.to_string()), - #[cfg(feature = "uuid")] - ValueType::Uuid(None) => visitor.visit_none(), - - ValueType::Json(Some(value)) => { - let de = value.into_deserializer(); - - de.deserialize_any(visitor) - .map_err(|err| serde::de::value::Error::custom(format!("Error deserializing JSON value: {err}"))) - } - ValueType::Json(None) => visitor.visit_none(), - - ValueType::Xml(Some(s)) => visitor.visit_string(s.into_owned()), - ValueType::Xml(None) => visitor.visit_none(), - - ValueType::DateTime(Some(dt)) => visitor.visit_string(dt.to_rfc3339()), - ValueType::DateTime(None) => visitor.visit_none(), - - ValueType::Date(Some(d)) => visitor.visit_string(format!("{d}")), - ValueType::Date(None) => visitor.visit_none(), - - ValueType::Time(Some(t)) => visitor.visit_string(format!("{t}")), - ValueType::Time(None) => visitor.visit_none(), - - ValueType::Array(Some(values)) => { - let deserializer = serde::de::value::SeqDeserializer::new(values.into_iter()); - visitor.visit_seq(deserializer) - } - ValueType::Array(None) => visitor.visit_none(), - } - } - - fn deserialize_option>(self, visitor: V) -> Result { - if self.0.is_null() { - visitor.visit_none() - } else { - visitor.visit_some(self) - } - } - - fn deserialize_bytes(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - if let ValueType::Bytes(Some(bytes)) = self.0.typed { - match bytes { - Cow::Borrowed(bytes) => visitor.visit_borrowed_bytes(bytes), - Cow::Owned(bytes) => visitor.visit_byte_buf(bytes), - } - } else { - Err(DeserializeError::invalid_type( - Unexpected::Other(&format!("{:?}", self.0)), - &visitor, - )) - } - } - - serde::forward_to_deserialize_any! { - bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str byte_buf - string unit unit_struct newtype_struct seq tuple tuple_struct map - struct enum identifier ignored_any - } -} - -#[doc(hidden)] -pub fn make_row(cols: Vec<(&'static str, Value<'static>)>) -> ResultRow { - let mut columns = Vec::with_capacity(cols.len()); - let mut values = Vec::with_capacity(cols.len()); - - for (name, value) in cols.into_iter() { - columns.push(name.to_owned()); - values.push(value); - } - - ResultRow { - values, - columns: std::sync::Arc::new(columns), - } -} - -#[cfg(test)] -mod tests { - use super::*; - use chrono::{DateTime, Utc}; - use serde::Deserialize; - - #[derive(Deserialize, Debug, PartialEq)] - struct User { - id: u64, - name: String, - bio: Option, - } - - #[derive(Deserialize, PartialEq, Debug)] - struct Cat { - age: f32, - birthday: DateTime, - human: User, - } - - #[test] - fn deserialize_user() { - let row = make_row(vec![("id", Value::int32(12)), ("name", "Georgina".into())]); - let user: User = from_row(row).unwrap(); - - assert_eq!( - user, - User { - id: 12, - name: "Georgina".to_owned(), - bio: None, - } - ) - } - - #[test] - fn from_rows_works() { - let first_row = make_row(vec![ - ("id", Value::int32(12)), - ("name", "Georgina".into()), - ("bio", Value::null_text()), - ]); - let second_row = make_row(vec![ - ("id", 33.into()), - ("name", "Philbert".into()), - ( - "bio", - "Invented sliced bread on a meditation retreat in the Himalayas.".into(), - ), - ]); - - let result_set = ResultSet { - columns: std::sync::Arc::clone(&first_row.columns), - rows: vec![first_row.values, second_row.values], - last_insert_id: None, - }; - - let users: Vec = from_rows(result_set).unwrap(); - - assert_eq!( - users, - &[ - User { - id: 12, - name: "Georgina".to_owned(), - bio: None, - }, - User { - id: 33, - name: "Philbert".to_owned(), - bio: Some("Invented sliced bread on a meditation retreat in the Himalayas.".into()), - } - ] - ); - } - - #[test] - fn deserialize_cat() { - let row = make_row(vec![ - ("age", Value::numeric("18.800001".parse().unwrap())), - ("birthday", Value::datetime("2019-08-01T20:00:00Z".parse().unwrap())), - ( - "human", - Value::json(serde_json::json!({ - "id": 19, - "name": "Georgina" - })), - ), - ]); - let cat: Cat = from_row(row).unwrap(); - - let expected_cat = Cat { - age: 18.800001, - birthday: "2019-08-01T20:00:00Z".parse().unwrap(), - human: User { - name: "Georgina".into(), - id: 19, - bio: None, - }, - }; - - assert_eq!(cat, expected_cat); - } -} diff --git a/quaint/src/single.rs b/quaint/src/single.rs index 3dcb6eb86a33..82042f58010b 100644 --- a/quaint/src/single.rs +++ b/quaint/src/single.rs @@ -167,7 +167,6 @@ impl Quaint { } #[cfg(feature = "sqlite")] - #[cfg_attr(feature = "docs", doc(cfg(sqlite)))] /// Open a new SQLite database in memory. pub fn new_in_memory() -> crate::Result { Ok(Quaint { diff --git a/quaint/src/tests/query.rs b/quaint/src/tests/query.rs index ff16c118a46a..a239275cdd4b 100644 --- a/quaint/src/tests/query.rs +++ b/quaint/src/tests/query.rs @@ -8,8 +8,8 @@ use crate::{ error::ErrorKind, prelude::*, }; -use test_macros::test_each_connector; -use test_setup::Tags; +use quaint_test_macros::test_each_connector; +use quaint_test_setup::Tags; #[test_each_connector] async fn single_value(api: &mut dyn TestApi) -> crate::Result<()> { diff --git a/quaint/src/tests/query/error.rs b/quaint/src/tests/query/error.rs index e1c8a74202f5..3cff5401ec0f 100644 --- a/quaint/src/tests/query/error.rs +++ b/quaint/src/tests/query/error.rs @@ -3,7 +3,7 @@ use crate::{ connector::Queryable, error::{DatabaseConstraint, ErrorKind, Name}, }; -use test_macros::test_each_connector; +use quaint_test_macros::test_each_connector; #[test_each_connector] async fn table_does_not_exist(api: &mut dyn TestApi) -> crate::Result<()> { diff --git a/quaint/src/tests/test_api.rs b/quaint/src/tests/test_api.rs index 301813a70ff9..cd612628d95c 100644 --- a/quaint/src/tests/test_api.rs +++ b/quaint/src/tests/test_api.rs @@ -1,4 +1,4 @@ -use test_setup::Tags; +use quaint_test_setup::Tags; #[cfg(feature = "mssql")] pub mod mssql; diff --git a/quaint/src/tests/test_api/mssql.rs b/quaint/src/tests/test_api/mssql.rs index 2df550709947..164b3fb7ddeb 100644 --- a/quaint/src/tests/test_api/mssql.rs +++ b/quaint/src/tests/test_api/mssql.rs @@ -2,8 +2,8 @@ use super::TestApi; use crate::{connector::Queryable, single::Quaint}; use names::Generator; use once_cell::sync::Lazy; +use quaint_test_setup::Tags; use std::env; -use test_setup::Tags; pub static CONN_STR: Lazy = Lazy::new(|| env::var("TEST_MSSQL").expect("TEST_MSSQL env var")); @@ -109,7 +109,7 @@ impl<'a> TestApi for MsSql<'a> { self.names.next().unwrap().replace('-', "") } - fn connector_tag(&self) -> test_setup::Tags { + fn connector_tag(&self) -> quaint_test_setup::Tags { Tags::MSSQL } } diff --git a/quaint/src/tests/test_api/mysql.rs b/quaint/src/tests/test_api/mysql.rs index 376e15692713..764100564fdc 100644 --- a/quaint/src/tests/test_api/mysql.rs +++ b/quaint/src/tests/test_api/mysql.rs @@ -2,8 +2,8 @@ use super::TestApi; use crate::{connector::Queryable, single::Quaint}; use names::Generator; use once_cell::sync::Lazy; +use quaint_test_setup::Tags; use std::env; -use test_setup::Tags; pub static CONN_STR: Lazy = Lazy::new(|| env::var("TEST_MYSQL").expect("TEST_MYSQL env var")); pub static CONN_STR8: Lazy = Lazy::new(|| env::var("TEST_MYSQL8").expect("TEST_MYSQL8 env var")); diff --git a/quaint/src/tests/test_api/postgres.rs b/quaint/src/tests/test_api/postgres.rs index 8ba29eaeaf64..791d8b07b041 100644 --- a/quaint/src/tests/test_api/postgres.rs +++ b/quaint/src/tests/test_api/postgres.rs @@ -2,8 +2,8 @@ use super::TestApi; use crate::{connector::Queryable, single::Quaint}; use names::Generator; use once_cell::sync::Lazy; +use quaint_test_setup::Tags; use std::env; -use test_setup::Tags; pub static CONN_STR: Lazy = Lazy::new(|| env::var("TEST_PSQL").expect("TEST_PSQL env var")); pub static CRDB_CONN_STR: Lazy = Lazy::new(|| env::var("TEST_CRDB").expect("TEST_CRDB env var")); @@ -108,7 +108,7 @@ impl<'a> TestApi for PostgreSql<'a> { self.names.next().unwrap().replace('-', "") } - fn connector_tag(&self) -> test_setup::Tags { + fn connector_tag(&self) -> quaint_test_setup::Tags { Tags::POSTGRES } } diff --git a/quaint/src/tests/test_api/sqlite.rs b/quaint/src/tests/test_api/sqlite.rs index 2dd7732f68f9..bde13715d587 100644 --- a/quaint/src/tests/test_api/sqlite.rs +++ b/quaint/src/tests/test_api/sqlite.rs @@ -1,7 +1,7 @@ use super::TestApi; use crate::{connector::Queryable, single::Quaint}; use names::Generator; -use test_setup::Tags; +use quaint_test_setup::Tags; pub(crate) async fn sqlite_test_api<'a>() -> crate::Result> { Sqlite::new().await @@ -99,7 +99,7 @@ impl<'a> TestApi for Sqlite<'a> { self.names.next().unwrap().replace('-', "") } - fn connector_tag(&self) -> test_setup::Tags { + fn connector_tag(&self) -> quaint_test_setup::Tags { Tags::SQLITE } } diff --git a/quaint/src/tests/types/sqlite.rs b/quaint/src/tests/types/sqlite.rs index ac2c69131e50..e8e65c2ae722 100644 --- a/quaint/src/tests/types/sqlite.rs +++ b/quaint/src/tests/types/sqlite.rs @@ -90,7 +90,7 @@ test_type!(datetime( Value::datetime(chrono::DateTime::from_str("2020-07-29T09:23:44.458Z").unwrap()) )); -#[test_macros::test_each_connector(tags("sqlite"))] +#[quaint_test_macros::test_each_connector(tags("sqlite"))] async fn test_type_text_datetime_rfc3339(api: &mut dyn TestApi) -> crate::Result<()> { let table = api.create_type_table("DATETIME").await?; let dt = chrono::Utc::now(); @@ -110,7 +110,7 @@ async fn test_type_text_datetime_rfc3339(api: &mut dyn TestApi) -> crate::Result Ok(()) } -#[test_macros::test_each_connector(tags("sqlite"))] +#[quaint_test_macros::test_each_connector(tags("sqlite"))] async fn test_type_text_datetime_rfc2822(api: &mut dyn TestApi) -> crate::Result<()> { let table = api.create_type_table("DATETIME").await?; let dt = chrono::DateTime::parse_from_rfc2822("Tue, 1 Jul 2003 10:52:37 +0200") @@ -132,7 +132,7 @@ async fn test_type_text_datetime_rfc2822(api: &mut dyn TestApi) -> crate::Result Ok(()) } -#[test_macros::test_each_connector(tags("sqlite"))] +#[quaint_test_macros::test_each_connector(tags("sqlite"))] async fn test_type_text_datetime_custom(api: &mut dyn TestApi) -> crate::Result<()> { let table = api.create_type_table("DATETIME").await?; @@ -154,7 +154,7 @@ async fn test_type_text_datetime_custom(api: &mut dyn TestApi) -> crate::Result< Ok(()) } -#[test_macros::test_each_connector(tags("sqlite"))] +#[quaint_test_macros::test_each_connector(tags("sqlite"))] async fn test_get_int64_from_int32_field_fails(api: &mut dyn TestApi) -> crate::Result<()> { let table = api.create_type_table("INT").await?; diff --git a/quaint/src/tests/upsert.rs b/quaint/src/tests/upsert.rs index c2808aca997b..7428963141f7 100644 --- a/quaint/src/tests/upsert.rs +++ b/quaint/src/tests/upsert.rs @@ -1,6 +1,6 @@ use super::test_api::*; use crate::{connector::Queryable, prelude::*}; -use test_macros::test_each_connector; +use quaint_test_macros::test_each_connector; #[test_each_connector(tags("postgresql", "sqlite"))] async fn upsert_on_primary_key(api: &mut dyn TestApi) -> crate::Result<()> { diff --git a/quaint/src/visitor/mssql.rs b/quaint/src/visitor/mssql.rs index 4344b307f197..ad647f4d1cab 100644 --- a/quaint/src/visitor/mssql.rs +++ b/quaint/src/visitor/mssql.rs @@ -17,7 +17,6 @@ static GENERATED_KEYS: &str = "@generated_keys"; /// A visitor to generate queries for the SQL Server database. /// /// The returned parameter values can be used directly with the tiberius crate. -#[cfg_attr(feature = "docs", doc(cfg(feature = "mssql")))] pub struct Mssql<'a> { query: String, parameters: Vec>, diff --git a/quaint/src/visitor/mysql.rs b/quaint/src/visitor/mysql.rs index 39b55568a338..25bd5ecd7c30 100644 --- a/quaint/src/visitor/mysql.rs +++ b/quaint/src/visitor/mysql.rs @@ -8,7 +8,6 @@ use std::fmt::{self, Write}; /// A visitor to generate queries for the MySQL database. /// /// The returned parameter values can be used directly with the mysql crate. -#[cfg_attr(feature = "docs", doc(cfg(feature = "mysql")))] pub struct Mysql<'a> { query: String, parameters: Vec>, diff --git a/quaint/src/visitor/postgres.rs b/quaint/src/visitor/postgres.rs index ec90eda8d6f5..65eb7278d482 100644 --- a/quaint/src/visitor/postgres.rs +++ b/quaint/src/visitor/postgres.rs @@ -11,7 +11,6 @@ use std::{ /// /// The returned parameter values implement the `ToSql` trait from postgres and /// can be used directly with the database. -#[cfg_attr(feature = "docs", doc(cfg(feature = "postgresql")))] pub struct Postgres<'a> { query: String, parameters: Vec>, diff --git a/quaint/src/visitor/sqlite.rs b/quaint/src/visitor/sqlite.rs index 45b9a82468ef..e2211e300421 100644 --- a/quaint/src/visitor/sqlite.rs +++ b/quaint/src/visitor/sqlite.rs @@ -10,7 +10,6 @@ use std::fmt::{self, Write}; /// /// The returned parameter values implement the `ToSql` trait from rusqlite and /// can be used directly with the database. -#[cfg_attr(feature = "docs", doc(cfg(feature = "sqlite")))] pub struct Sqlite<'a> { query: String, parameters: Vec>, diff --git a/quaint/test-macros/src/test_each_connector.rs b/quaint/test-macros/src/test_each_connector.rs deleted file mode 100644 index 501c251413d4..000000000000 --- a/quaint/test-macros/src/test_each_connector.rs +++ /dev/null @@ -1,138 +0,0 @@ -use darling::FromMeta; -use once_cell::sync::Lazy; -use proc_macro::TokenStream; -use proc_macro2::Span; -use quote::quote; -use std::str::FromStr; -use syn::{parse_macro_input, spanned::Spanned, AttributeArgs, Ident, ItemFn}; -use test_setup::{ConnectorDefinition, Tags, CONNECTORS}; - -static TAGS_FILTER: Lazy = Lazy::new(|| { - let tags_str = std::env::var("TEST_EACH_CONNECTOR_TAGS").ok(); - let mut tags = Tags::empty(); - - if let Some(tags_str) = tags_str { - for tag_str in tags_str.split(',') { - let tag = Tags::from_str(tag_str).unwrap(); - tags |= tag; - } - } - - tags -}); - -#[derive(Debug, FromMeta)] -struct TestEachConnectorArgs { - /// If present, run only the tests for the connectors with any of the passed - /// in tags. - #[darling(default)] - tags: TagsWrapper, - - /// Optional list of tags to ignore. - #[darling(default)] - ignore: TagsWrapper, -} - -impl TestEachConnectorArgs { - fn connectors_to_test(&self) -> impl Iterator { - CONNECTORS - .all() - .filter(move |connector| TAGS_FILTER.is_empty() || connector.tags.contains(*TAGS_FILTER)) - .filter(move |connector| self.tags.0.is_empty() || connector.tags.intersects(self.tags.0)) - .filter(move |connector| !connector.tags.intersects(self.ignore.0)) - } -} - -#[derive(Debug)] -struct TagsWrapper(Tags); - -impl Default for TagsWrapper { - fn default() -> Self { - TagsWrapper(Tags::empty()) - } -} - -impl darling::FromMeta for TagsWrapper { - fn from_list(items: &[syn::NestedMeta]) -> Result { - let mut tags = Tags::empty(); - - for item in items { - match item { - syn::NestedMeta::Lit(syn::Lit::Str(s)) => { - let s = s.value(); - let tag = Tags::from_str(&s) - .map_err(|err| darling::Error::unknown_value(&err.to_string()).with_span(&item.span()))?; - tags.insert(tag); - } - syn::NestedMeta::Lit(other) => { - return Err(darling::Error::unexpected_lit_type(other).with_span(&other.span())) - } - syn::NestedMeta::Meta(meta) => { - return Err(darling::Error::unsupported_shape("Expected string literal").with_span(&meta.span())) - } - } - } - - Ok(TagsWrapper(tags)) - } -} - -pub fn test_each_connector_impl(attr: TokenStream, input: TokenStream) -> TokenStream { - let attributes_meta: syn::AttributeArgs = parse_macro_input!(attr as AttributeArgs); - let args = TestEachConnectorArgs::from_list(&attributes_meta); - - let mut test_function = parse_macro_input!(input as ItemFn); - super::strip_test_attribute(&mut test_function); - - let tests = match args { - Ok(args) => test_each_connector_async_wrapper_functions(&args, &test_function), - Err(err) => return err.write_errors().into(), - }; - - let output = quote! { - #(#tests)* - - #test_function - }; - - output.into() -} - -fn test_each_connector_async_wrapper_functions( - args: &TestEachConnectorArgs, - test_function: &ItemFn, -) -> Vec { - let test_fn_name = &test_function.sig.ident; - let mut tests = Vec::with_capacity(CONNECTORS.len()); - - let optional_unwrap = if super::function_returns_result(&test_function) { - Some(quote!(.unwrap())) - } else { - None - }; - - for connector in args.connectors_to_test() { - let connector_name = connector.name(); - let feature_name = connector.feature_name(); - let connector_test_fn_name = Ident::new(&format!("{}_on_{}", test_fn_name, connector_name), Span::call_site()); - - let conn_api_factory = Ident::new(connector.test_api(), Span::call_site()); - - let test = quote! { - #[test] - #[cfg(feature = #feature_name)] - fn #connector_test_fn_name() { - let fut = async { - let mut api = #conn_api_factory().await#optional_unwrap; - #test_fn_name(&mut api).await#optional_unwrap - }; - - test_setup::run_with_tokio(fut) - } - }; - - tests.push(test); - } - - tests -} From d70b58443ade9f14b1c0156924d8c5daba26f032 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Fri, 6 Oct 2023 10:11:59 +0200 Subject: [PATCH 072/128] driver-adapters: update prisma in smoke-tests (#4317) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .../driver-adapters/js/pnpm-lock.yaml | 30 +++++++++---------- .../js/smoke-test-js/package.json | 4 +-- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/query-engine/driver-adapters/js/pnpm-lock.yaml b/query-engine/driver-adapters/js/pnpm-lock.yaml index bc36246d5c9d..efa3787712e4 100644 --- a/query-engine/driver-adapters/js/pnpm-lock.yaml +++ b/query-engine/driver-adapters/js/pnpm-lock.yaml @@ -131,8 +131,8 @@ importers: specifier: workspace:* version: link:../adapter-planetscale '@prisma/client': - specifier: 5.4.0-integration-dispose-tx.2 - version: 5.4.0-integration-dispose-tx.2(prisma@5.4.0-integration-dispose-tx.2) + specifier: 5.4.1 + version: 5.4.1(prisma@5.4.1) '@prisma/driver-adapter-utils': specifier: workspace:* version: link:../driver-adapter-utils @@ -156,8 +156,8 @@ importers: specifier: ^7.0.3 version: 7.0.3 prisma: - specifier: 5.4.0-integration-dispose-tx.2 - version: 5.4.0-integration-dispose-tx.2 + specifier: 5.4.1 + version: 5.4.1 tsx: specifier: ^3.12.7 version: 3.12.7 @@ -521,8 +521,8 @@ packages: resolution: {integrity: sha512-aWbU+D/IRHoDE9975y+Q4c+EwwAWxCPwFId+N1AhQVFXzbeJMkj6KN2iQtoi03elcLMRdfT+V3i9Z4WRw+/oIA==} engines: {node: '>=16'} - /@prisma/client@5.4.0-integration-dispose-tx.2(prisma@5.4.0-integration-dispose-tx.2): - resolution: {integrity: sha512-MShiYnvIUS/5ThfLRjyGaKGrhtzj69f38EqEksph7KckbLzfPQ7VWAJ2ZwKoi5DGJXEPaeb3S0lQpXl2KyuGxA==} + /@prisma/client@5.4.1(prisma@5.4.1): + resolution: {integrity: sha512-xyD0DJ3gRNfLbPsC+YfMBBuLJtZKQfy1OD2qU/PZg+HKrr7SO+09174LMeTlWP0YF2wca9LxtVd4HnAiB5ketQ==} engines: {node: '>=16.13'} requiresBuild: true peerDependencies: @@ -531,16 +531,16 @@ packages: prisma: optional: true dependencies: - '@prisma/engines-version': 5.4.0-26.4bf3cce422a49f49c661da32d4016a5be81d28b4 - prisma: 5.4.0-integration-dispose-tx.2 + '@prisma/engines-version': 5.4.1-1.2f302df92bd8945e20ad4595a73def5b96afa54f + prisma: 5.4.1 dev: false - /@prisma/engines-version@5.4.0-26.4bf3cce422a49f49c661da32d4016a5be81d28b4: - resolution: {integrity: sha512-6yhw/P2lWJOljh3QIkqeBNgLPBLVca08YjKPTyOlQ771vnA3pH+EYpIi2VOb2+3NsIM9zlX1NvFadd4qSbtubA==} + /@prisma/engines-version@5.4.1-1.2f302df92bd8945e20ad4595a73def5b96afa54f: + resolution: {integrity: sha512-+nUQM/y8C+1GG5Ioeqcu6itFslCfxvQSAUVSMC9XM2G2Fcq0F4Afnp6m0pXF6X6iUBWen7jZBPmM9Qlq4Nr3/A==} dev: false - /@prisma/engines@5.4.0-integration-dispose-tx.2: - resolution: {integrity: sha512-3kYPptQRiyDARcJIZudak7naHlTo0qYB/8ObxlIyw9IjbKax2m4MiPZuVasVpdcspXYj+ayzomFmCDptjZrjzg==} + /@prisma/engines@5.4.1: + resolution: {integrity: sha512-vJTdY4la/5V3N7SFvWRmSMUh4mIQnyb/MNoDjzVbh9iLmEC+uEykj/1GPviVsorvfz7DbYSQC4RiwmlEpTEvGA==} requiresBuild: true /@types/debug@4.1.8: @@ -1253,13 +1253,13 @@ packages: /postgres-range@1.1.3: resolution: {integrity: sha512-VdlZoocy5lCP0c/t66xAfclglEapXPCIVhqqJRncYpvbCgImF0w67aPKfbqUMr72tO2k5q0TdTZwCLjPTI6C9g==} - /prisma@5.4.0-integration-dispose-tx.2: - resolution: {integrity: sha512-FBI46emn8rBapyTN6cwM0KNtmK94D9mucnQh2g+VhjWqD1SpFwFTVLXiT25tOFwEK0M/UQQ+eBsXn65BNBoisQ==} + /prisma@5.4.1: + resolution: {integrity: sha512-op9PmU8Bcw5dNAas82wBYTG0yHnpq9/O3bhxbDBrNzwZTwBqsVCxxYRLf6wHNh9HVaDGhgjjHlu1+BcW8qdnBg==} engines: {node: '>=16.13'} hasBin: true requiresBuild: true dependencies: - '@prisma/engines': 5.4.0-integration-dispose-tx.2 + '@prisma/engines': 5.4.1 /punycode@2.3.0: resolution: {integrity: sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==} diff --git a/query-engine/driver-adapters/js/smoke-test-js/package.json b/query-engine/driver-adapters/js/smoke-test-js/package.json index 23c6ed2db129..76e8bee532e3 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/package.json +++ b/query-engine/driver-adapters/js/smoke-test-js/package.json @@ -51,7 +51,7 @@ "@prisma/adapter-neon": "workspace:*", "@prisma/adapter-pg": "workspace:*", "@prisma/adapter-planetscale": "workspace:*", - "@prisma/client": "5.4.0-integration-dispose-tx.2", + "@prisma/client": "5.4.1", "@prisma/driver-adapter-utils": "workspace:*", "pg": "^8.11.3", "superjson": "^1.13.1", @@ -61,7 +61,7 @@ "@types/node": "^20.5.1", "@types/pg": "^8.10.2", "cross-env": "^7.0.3", - "prisma": "5.4.0-integration-dispose-tx.2", + "prisma": "5.4.1", "tsx": "^3.12.7" } } From 07c8014ed85f2caccf1611211e2d9fe6381a8901 Mon Sep 17 00:00:00 2001 From: Sophie <29753584+Druue@users.noreply.github.com> Date: Fri, 6 Oct 2023 10:12:38 +0200 Subject: [PATCH 073/128] fix(fmt): add missing quotation mark for `add_schema_to_schemas` (#4319) --- prisma-fmt/src/code_actions/multi_schema.rs | 2 +- .../scenarios/multi_schema_add_to_existing_schemas/result.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/prisma-fmt/src/code_actions/multi_schema.rs b/prisma-fmt/src/code_actions/multi_schema.rs index c797245b87e9..0e47a008a910 100644 --- a/prisma-fmt/src/code_actions/multi_schema.rs +++ b/prisma-fmt/src/code_actions/multi_schema.rs @@ -136,7 +136,7 @@ pub(super) fn add_schema_to_schemas( let edit = match datasource.schemas_span { Some(span) => { - let formatted_attribute = format!(r#", "{}""#, model.schema_name().unwrap()); + let formatted_attribute = format!(r#"", "{}""#, model.schema_name().unwrap()); super::create_text_edit( schema, formatted_attribute, diff --git a/prisma-fmt/tests/code_actions/scenarios/multi_schema_add_to_existing_schemas/result.json b/prisma-fmt/tests/code_actions/scenarios/multi_schema_add_to_existing_schemas/result.json index 26a243e4f105..0e3f2348b54a 100644 --- a/prisma-fmt/tests/code_actions/scenarios/multi_schema_add_to_existing_schemas/result.json +++ b/prisma-fmt/tests/code_actions/scenarios/multi_schema_add_to_existing_schemas/result.json @@ -32,7 +32,7 @@ "character": 28 } }, - "newText": ", \"base\"" + "newText": "\", \"base\"" } ] } From ffd354e5118449d24ad11eb663dd292c2af62f1c Mon Sep 17 00:00:00 2001 From: Jan Piotrowski Date: Fri, 6 Oct 2023 10:16:51 +0200 Subject: [PATCH 074/128] ci: Driver Adapters versioning post 5.4.0 (#4302) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Joël Galeran --- .github/workflows/publish-driver-adapters.yml | 78 +++++++++++++++++++ .../js/adapter-libsql/package.json | 2 +- .../js/adapter-neon/package.json | 2 +- .../js/adapter-pg/package.json | 2 +- .../js/adapter-planetscale/package.json | 2 +- .../js/driver-adapter-utils/package.json | 2 +- 6 files changed, 83 insertions(+), 5 deletions(-) create mode 100644 .github/workflows/publish-driver-adapters.yml diff --git a/.github/workflows/publish-driver-adapters.yml b/.github/workflows/publish-driver-adapters.yml new file mode 100644 index 000000000000..93254be7ba80 --- /dev/null +++ b/.github/workflows/publish-driver-adapters.yml @@ -0,0 +1,78 @@ +name: Build and publish Prisma Driver Adapters + +concurrency: publish-prisma-driver-adapters + +on: + # usually triggered via GH Actions Workflow in prisma/prisma repo + workflow_dispatch: + inputs: + enginesHash: + description: Engine commit hash to checkout for publishing + required: true + prismaVersion: + description: Prisma version to use for publishing + required: true + npmDistTag: + description: npm dist-tag to use for publishing + required: true + default: "latest" + dryRun: + description: 'Check to do a dry run (does not publish packages)' + type: boolean + +jobs: + build: + name: Build and publish Prisma Driver Adapters + runs-on: ubuntu-latest + steps: + - name: Print input + env: + THE_INPUT: "${{ toJson(github.event.inputs) }}" + run: | + echo $THE_INPUT + + - uses: actions/checkout@v4 + with: + ref: ${{ github.event.inputs.enginesHash }} + + # + # Build + # + + - uses: actions/setup-node@v3 + with: + node-version: "20.x" + registry-url: 'https://registry.npmjs.org/' + + - run: pnpm i + working-directory: query-engine/driver-adapters/js + - run: pnpm -r build + working-directory: query-engine/driver-adapters/js + + # + # Publish + # + + - run: | + # find all files package.json, and for each use jq to write the version, then write to temp file and overwrite original file with result + find . -name "package.json" -exec bash -c 'jq --arg version "${{ github.event.inputs.prismaVersion }}" ".version = \$version" "{}" > tmpfile && mv tmpfile "{}"' \; + pnpm -r publish --no-git-checks --tag ${{ github.event.inputs.npmDistTag }} --dry-run # TODO remove hardcoded dry-run and use input instead! + working-directory: query-engine/driver-adapters/js + env: + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} + + # + # Failure handlers + # + + - name: Set current job url in SLACK_FOOTER env var + if: ${{ failure() }} + run: echo "SLACK_FOOTER=<$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID|Click here to go to the job logs>" >> $GITHUB_ENV + + - name: Slack Notification on Failure + if: ${{ failure() }} + uses: rtCamp/action-slack-notify@v2.2.1 + env: + SLACK_TITLE: "prisma driver adapters publishing failed :x:" + SLACK_COLOR: "#FF0000" + SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_DRIVER_ADPATERS_FAILING }} diff --git a/query-engine/driver-adapters/js/adapter-libsql/package.json b/query-engine/driver-adapters/js/adapter-libsql/package.json index a455b6fc72b3..fbce33c98a29 100644 --- a/query-engine/driver-adapters/js/adapter-libsql/package.json +++ b/query-engine/driver-adapters/js/adapter-libsql/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/adapter-libsql", - "version": "5.4.0", + "version": "0.0.0", "description": "Prisma's driver adapter for libSQL and Turso", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/adapter-neon/package.json b/query-engine/driver-adapters/js/adapter-neon/package.json index a2a67d8d128f..03d19f6eeb0d 100644 --- a/query-engine/driver-adapters/js/adapter-neon/package.json +++ b/query-engine/driver-adapters/js/adapter-neon/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/adapter-neon", - "version": "5.4.0", + "version": "0.0.0", "description": "Prisma's driver adapter for \"@neondatabase/serverless\"", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/adapter-pg/package.json b/query-engine/driver-adapters/js/adapter-pg/package.json index 58262e63d0b0..3573d33bc161 100644 --- a/query-engine/driver-adapters/js/adapter-pg/package.json +++ b/query-engine/driver-adapters/js/adapter-pg/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/adapter-pg", - "version": "5.4.0", + "version": "0.0.0", "description": "Prisma's driver adapter for \"pg\"", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/adapter-planetscale/package.json b/query-engine/driver-adapters/js/adapter-planetscale/package.json index ac2aac284565..59d59704ab50 100644 --- a/query-engine/driver-adapters/js/adapter-planetscale/package.json +++ b/query-engine/driver-adapters/js/adapter-planetscale/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/adapter-planetscale", - "version": "5.4.0", + "version": "0.0.0", "description": "Prisma's driver adapter for \"@planetscale/database\"", "main": "dist/index.js", "module": "dist/index.mjs", diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/package.json b/query-engine/driver-adapters/js/driver-adapter-utils/package.json index 14385c963f16..64301a7a5533 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/package.json +++ b/query-engine/driver-adapters/js/driver-adapter-utils/package.json @@ -1,6 +1,6 @@ { "name": "@prisma/driver-adapter-utils", - "version": "5.4.0", + "version": "0.0.0", "description": "Internal set of utilities and types for Prisma's driver adapters.", "main": "dist/index.js", "module": "dist/index.mjs", From 97f8a055c0b9b9b7240c32f1368615c82d4c9782 Mon Sep 17 00:00:00 2001 From: Alberto Schiabel Date: Fri, 6 Oct 2023 11:36:22 +0200 Subject: [PATCH 075/128] feat(quaint): remove`bigdecimal` conditional flag (#4322) * feat(quaint): remove "bigdecimal" conditional flag * feat(quaint): remove references to serde-support --- .github/workflows/quaint.yml | 8 +++--- Cargo.toml | 1 - quaint/.github/workflows/test.yml | 8 +++--- quaint/Cargo.toml | 7 ++---- quaint/src/ast/values.rs | 27 +++++++++------------ quaint/src/connector/mssql/conversion.rs | 13 ++-------- quaint/src/connector/mysql/conversion.rs | 5 ++-- quaint/src/connector/postgres/conversion.rs | 24 ++---------------- quaint/src/connector/sqlite/conversion.rs | 3 --- quaint/src/error.rs | 1 - quaint/src/lib.rs | 3 +-- quaint/src/tests/query.rs | 4 +-- quaint/src/tests/types/mssql.rs | 1 - quaint/src/tests/types/mssql/bigdecimal.rs | 1 - quaint/src/tests/types/mysql.rs | 6 ----- quaint/src/tests/types/postgres.rs | 3 +-- quaint/src/tests/types/sqlite.rs | 4 +-- quaint/src/visitor/mssql.rs | 2 +- quaint/src/visitor/mysql.rs | 2 +- quaint/src/visitor/postgres.rs | 2 +- quaint/src/visitor/sqlite.rs | 2 +- 21 files changed, 35 insertions(+), 92 deletions(-) diff --git a/.github/workflows/quaint.yml b/.github/workflows/quaint.yml index 3c1ae42f9a70..6d1a871487c0 100644 --- a/.github/workflows/quaint.yml +++ b/.github/workflows/quaint.yml @@ -17,13 +17,13 @@ jobs: features: - "--lib --features=all" - "--lib --no-default-features --features=sqlite" - - "--lib --no-default-features --features=sqlite --features=uuid --features=pooled --features=bigdecimal" + - "--lib --no-default-features --features=sqlite --features=uuid --features=pooled" - "--lib --no-default-features --features=postgresql" - - "--lib --no-default-features --features=postgresql --features=uuid --features=pooled --features=bigdecimal" + - "--lib --no-default-features --features=postgresql --features=uuid --features=pooled" - "--lib --no-default-features --features=mysql" - - "--lib --no-default-features --features=mysql --features=uuid --features=pooled --features=bigdecimal" + - "--lib --no-default-features --features=mysql --features=uuid --features=pooled" - "--lib --no-default-features --features=mssql" - - "--lib --no-default-features --features=mssql --features=uuid --features=pooled --features=bigdecimal" + - "--lib --no-default-features --features=mssql --features=uuid --features=pooled" env: TEST_MYSQL: "mysql://root:prisma@localhost:3306/prisma" TEST_MYSQL8: "mysql://root:prisma@localhost:3307/prisma" diff --git a/Cargo.toml b/Cargo.toml index e82019b1e5c5..cbec74fa92b6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -60,7 +60,6 @@ napi-derive = "2.12.4" [workspace.dependencies.quaint] path = "quaint" features = [ - "bigdecimal", "expose-drivers", "fmt-sql", "mssql", diff --git a/quaint/.github/workflows/test.yml b/quaint/.github/workflows/test.yml index 998a1a71ca46..e3779dfdea2b 100644 --- a/quaint/.github/workflows/test.yml +++ b/quaint/.github/workflows/test.yml @@ -46,13 +46,13 @@ jobs: features: - "--lib --features=all" - "--lib --no-default-features --features=sqlite" - - "--lib --no-default-features --features=sqlite --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=sqlite --features=uuid --features=pooled" - "--lib --no-default-features --features=postgresql" - - "--lib --no-default-features --features=postgresql --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=postgresql --features=uuid --features=pooled" - "--lib --no-default-features --features=mysql" - - "--lib --no-default-features --features=mysql --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=mysql --features=uuid --features=pooled" - "--lib --no-default-features --features=mssql" - - "--lib --no-default-features --features=mssql --features=uuid --features=pooled --features=serde-support --features=bigdecimal" + - "--lib --no-default-features --features=mssql --features=uuid --features=pooled" - "--doc --features=all" env: TEST_MYSQL: "mysql://root:prisma@localhost:3306/prisma" diff --git a/quaint/Cargo.toml b/quaint/Cargo.toml index d79d0858ecfe..638a7749b407 100644 --- a/quaint/Cargo.toml +++ b/quaint/Cargo.toml @@ -29,7 +29,7 @@ docs = [] # way to access database-specific methods when you need extra control. expose-drivers = [] -all = ["mssql", "mysql", "pooled", "postgresql", "sqlite", "uuid", "bigdecimal"] +all = ["mssql", "mysql", "pooled", "postgresql", "sqlite", "uuid"] vendored-openssl = [ "postgres-native-tls/vendored-openssl", @@ -52,7 +52,6 @@ mssql = ["tiberius", "uuid", "tokio-util", "tokio/time", "tokio/net", "either"] mysql = ["mysql_async", "tokio/time", "lru-cache"] pooled = ["mobc"] sqlite = ["rusqlite", "tokio/sync"] -bigdecimal = ["bigdecimal_"] fmt-sql = ["sqlformat"] [dependencies] @@ -124,10 +123,8 @@ features = [ "winauth", ] -[dependencies.bigdecimal_] +[dependencies.bigdecimal] version = "0.3" -optional = true -package = "bigdecimal" [dependencies.uuid] version = "1" diff --git a/quaint/src/ast/values.rs b/quaint/src/ast/values.rs index 49560bbc695f..677c27fdfd93 100644 --- a/quaint/src/ast/values.rs +++ b/quaint/src/ast/values.rs @@ -1,7 +1,6 @@ use crate::ast::*; use crate::error::{Error, ErrorKind}; -#[cfg(feature = "bigdecimal")] use bigdecimal::{BigDecimal, FromPrimitive, ToPrimitive}; use chrono::{DateTime, NaiveDate, NaiveTime, Utc}; use serde_json::{Number, Value as JsonValue}; @@ -59,7 +58,6 @@ impl<'a> Value<'a> { } /// Creates a new decimal value. - #[cfg(feature = "bigdecimal")] pub fn numeric(value: BigDecimal) -> Self { ValueType::numeric(value).into_value() } @@ -270,21 +268,21 @@ impl<'a> Value<'a> { } /// `true` if the `Value` is a numeric value or can be converted to one. - #[cfg(feature = "bigdecimal")] + pub fn is_numeric(&self) -> bool { self.typed.is_numeric() } /// Returns a bigdecimal, if the value is a numeric, float or double value, /// otherwise `None`. - #[cfg(feature = "bigdecimal")] + pub fn into_numeric(self) -> Option { self.typed.into_numeric() } /// Returns a reference to a bigdecimal, if the value is a numeric. /// Otherwise `None`. - #[cfg(feature = "bigdecimal")] + pub fn as_numeric(&self) -> Option<&BigDecimal> { self.typed.as_numeric() } @@ -421,7 +419,6 @@ impl<'a> Value<'a> { ValueType::Array(None).into() } - #[cfg(feature = "bigdecimal")] pub fn null_numeric() -> Self { ValueType::Numeric(None).into() } @@ -505,7 +502,6 @@ pub enum ValueType<'a> { /// An array value (PostgreSQL). Array(Option>>), /// A numeric value. - #[cfg(feature = "bigdecimal")] Numeric(Option), /// A JSON value. Json(Option), @@ -579,7 +575,7 @@ impl<'a> fmt::Display for ValueType<'a> { write!(f, "]") }), ValueType::Xml(val) => val.as_ref().map(|v| write!(f, "{v}")), - #[cfg(feature = "bigdecimal")] + ValueType::Numeric(val) => val.as_ref().map(|v| write!(f, "{v}")), ValueType::Json(val) => val.as_ref().map(|v| write!(f, "{v}")), #[cfg(feature = "uuid")] @@ -638,7 +634,7 @@ impl<'a> From> for serde_json::Value { ValueType::Array(v) => { v.map(|v| serde_json::Value::Array(v.into_iter().map(serde_json::Value::from).collect())) } - #[cfg(feature = "bigdecimal")] + ValueType::Numeric(d) => d.map(|d| serde_json::to_value(d.to_f64().unwrap()).unwrap()), ValueType::Json(v) => v, #[cfg(feature = "uuid")] @@ -677,7 +673,7 @@ impl<'a> ValueType<'a> { } /// Creates a new decimal value. - #[cfg(feature = "bigdecimal")] + pub(crate) fn numeric(value: BigDecimal) -> Self { Self::Numeric(Some(value)) } @@ -816,7 +812,7 @@ impl<'a> ValueType<'a> { Self::Char(c) => c.is_none(), Self::Array(v) => v.is_none(), Self::Xml(s) => s.is_none(), - #[cfg(feature = "bigdecimal")] + Self::Numeric(r) => r.is_none(), #[cfg(feature = "uuid")] Self::Uuid(u) => u.is_none(), @@ -948,14 +944,14 @@ impl<'a> ValueType<'a> { } /// `true` if the `Value` is a numeric value or can be converted to one. - #[cfg(feature = "bigdecimal")] + pub(crate) fn is_numeric(&self) -> bool { matches!(self, Self::Numeric(_) | Self::Float(_) | Self::Double(_)) } /// Returns a bigdecimal, if the value is a numeric, float or double value, /// otherwise `None`. - #[cfg(feature = "bigdecimal")] + pub(crate) fn into_numeric(self) -> Option { match self { Self::Numeric(d) => d, @@ -967,7 +963,7 @@ impl<'a> ValueType<'a> { /// Returns a reference to a bigdecimal, if the value is a numeric. /// Otherwise `None`. - #[cfg(feature = "bigdecimal")] + pub(crate) fn as_numeric(&self) -> Option<&BigDecimal> { match self { Self::Numeric(d) => d.as_ref(), @@ -1128,7 +1124,7 @@ value!(val: f32, Float, val); value!(val: DateTime, DateTime, val); value!(val: chrono::NaiveTime, Time, val); value!(val: chrono::NaiveDate, Date, val); -#[cfg(feature = "bigdecimal")] + value!(val: BigDecimal, Numeric, val); value!(val: JsonValue, Json, val); #[cfg(feature = "uuid")] @@ -1155,7 +1151,6 @@ impl<'a> TryFrom> for i32 { } } -#[cfg(feature = "bigdecimal")] impl<'a> TryFrom> for BigDecimal { type Error = Error; diff --git a/quaint/src/connector/mssql/conversion.rs b/quaint/src/connector/mssql/conversion.rs index 246d1a30cdde..789e1463601b 100644 --- a/quaint/src/connector/mssql/conversion.rs +++ b/quaint/src/connector/mssql/conversion.rs @@ -1,10 +1,8 @@ use crate::ast::{Value, ValueType}; -#[cfg(not(feature = "bigdecimal"))] -use crate::error::*; -#[cfg(feature = "bigdecimal")] + use bigdecimal::BigDecimal; use std::{borrow::Cow, convert::TryFrom}; -#[cfg(feature = "bigdecimal")] + use tiberius::ToSql; use tiberius::{ColumnData, FromSql, IntoSql}; @@ -22,7 +20,6 @@ impl<'a> IntoSql<'a> for &'a Value<'a> { ValueType::Char(val) => val.as_ref().map(|val| format!("{val}")).into_sql(), ValueType::Xml(val) => val.as_deref().into_sql(), ValueType::Array(_) | ValueType::EnumArray(_, _) => panic!("Arrays are not supported on SQL Server."), - #[cfg(feature = "bigdecimal")] ValueType::Numeric(val) => (*val).to_sql(), ValueType::Json(val) => val.as_ref().map(|val| serde_json::to_string(&val).unwrap()).into_sql(), #[cfg(feature = "uuid")] @@ -49,13 +46,7 @@ impl TryFrom> for Value<'static> { ColumnData::String(s) => ValueType::Text(s), ColumnData::Guid(uuid) => ValueType::Uuid(uuid), ColumnData::Binary(bytes) => ValueType::Bytes(bytes), - #[cfg(feature = "bigdecimal")] numeric @ ColumnData::Numeric(_) => ValueType::Numeric(BigDecimal::from_sql(&numeric)?), - #[cfg(not(feature = "bigdecimal"))] - _numeric @ ColumnData::Numeric(_) => { - let kind = ErrorKind::conversion("Please enable `bigdecimal` feature to read numeric values"); - return Err(Error::builder(kind).build()); - } dt @ ColumnData::DateTime(_) => { use tiberius::time::chrono::{DateTime, NaiveDateTime, Utc}; diff --git a/quaint/src/connector/mysql/conversion.rs b/quaint/src/connector/mysql/conversion.rs index 9230199eaf40..672cca03cdb5 100644 --- a/quaint/src/connector/mysql/conversion.rs +++ b/quaint/src/connector/mysql/conversion.rs @@ -39,7 +39,7 @@ pub fn conv_params(params: &[Value<'_>]) -> crate::Result { return Err(builder.build()); } - #[cfg(feature = "bigdecimal")] + ValueType::Numeric(f) => f.as_ref().map(|f| my::Value::Bytes(f.to_string().as_bytes().to_vec())), ValueType::Json(s) => match s { Some(ref s) => { @@ -235,7 +235,6 @@ impl TakeRow for my::Row { Value::enum_variant(s) } // NEWDECIMAL returned as bytes. See https://mariadb.com/kb/en/resultset-row/#decimal-binary-encoding - #[cfg(feature = "bigdecimal")] my::Value::Bytes(b) if column.is_real() => { let s = String::from_utf8(b).map_err(|_| { let msg = "Could not convert NEWDECIMAL from bytes to String."; @@ -312,7 +311,7 @@ impl TakeRow for my::Row { t if t.is_double() => Value::null_double(), t if t.is_text() => Value::null_text(), t if t.is_bytes() => Value::null_bytes(), - #[cfg(feature = "bigdecimal")] + t if t.is_real() => Value::null_numeric(), t if t.is_datetime() => Value::null_datetime(), t if t.is_time() => Value::null_time(), diff --git a/quaint/src/connector/postgres/conversion.rs b/quaint/src/connector/postgres/conversion.rs index 0ab4413f792c..31be6dd68682 100644 --- a/quaint/src/connector/postgres/conversion.rs +++ b/quaint/src/connector/postgres/conversion.rs @@ -1,4 +1,3 @@ -#[cfg(feature = "bigdecimal")] mod decimal; use crate::{ @@ -6,12 +5,12 @@ use crate::{ connector::queryable::{GetRow, ToColumnNames}, error::{Error, ErrorKind}, }; -#[cfg(feature = "bigdecimal")] + use bigdecimal::{num_bigint::BigInt, BigDecimal, FromPrimitive, ToPrimitive}; use bit_vec::BitVec; use bytes::BytesMut; use chrono::{DateTime, NaiveDateTime, Utc}; -#[cfg(feature = "bigdecimal")] + pub(crate) use decimal::DecimalWrapper; use postgres_types::{FromSql, ToSql, WrongType}; use std::{convert::TryFrom, error::Error as StdError}; @@ -49,7 +48,6 @@ pub(crate) fn params_to_types(params: &[Value<'_>]) -> Vec { ValueType::Bytes(_) => PostgresType::BYTEA, ValueType::Boolean(_) => PostgresType::BOOL, ValueType::Char(_) => PostgresType::CHAR, - #[cfg(feature = "bigdecimal")] ValueType::Numeric(_) => PostgresType::NUMERIC, ValueType::Json(_) => PostgresType::JSONB, ValueType::Xml(_) => PostgresType::XML, @@ -87,7 +85,6 @@ pub(crate) fn params_to_types(params: &[Value<'_>]) -> Vec { ValueType::Bytes(_) => PostgresType::BYTEA_ARRAY, ValueType::Boolean(_) => PostgresType::BOOL_ARRAY, ValueType::Char(_) => PostgresType::CHAR_ARRAY, - #[cfg(feature = "bigdecimal")] ValueType::Numeric(_) => PostgresType::NUMERIC_ARRAY, ValueType::Json(_) => PostgresType::JSONB_ARRAY, ValueType::Xml(_) => PostgresType::XML_ARRAY, @@ -151,10 +148,8 @@ impl<'a> FromSql<'a> for TimeTz { /// of 2 decimals. /// /// Postgres docs: https://www.postgresql.org/docs/current/datatype-money.html -#[cfg(feature = "bigdecimal")] struct NaiveMoney(BigDecimal); -#[cfg(feature = "bigdecimal")] impl<'a> FromSql<'a> for NaiveMoney { fn from_sql(_ty: &PostgresType, raw: &'a [u8]) -> Result> { let cents = i64::from_sql(&PostgresType::INT8, raw)?; @@ -223,13 +218,11 @@ impl GetRow for PostgresRow { } None => Value::null_array(), }, - #[cfg(feature = "bigdecimal")] PostgresType::NUMERIC => { let dw: Option = row.try_get(i)?; ValueType::Numeric(dw.map(|dw| dw.0)).into_value() } - #[cfg(feature = "bigdecimal")] PostgresType::MONEY => match row.try_get(i)? { Some(val) => { let val: NaiveMoney = val; @@ -352,7 +345,6 @@ impl GetRow for PostgresRow { } None => Value::null_array(), }, - #[cfg(feature = "bigdecimal")] PostgresType::NUMERIC_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; @@ -375,7 +367,6 @@ impl GetRow for PostgresRow { None => Value::null_array(), } } - #[cfg(feature = "bigdecimal")] PostgresType::MONEY_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; @@ -652,12 +643,10 @@ impl<'a> ToSql for Value<'a> { _ => None, }, (ValueType::Int64(integer), &PostgresType::INT8) => integer.map(|integer| integer.to_sql(ty, out)), - #[cfg(feature = "bigdecimal")] (ValueType::Int32(integer), &PostgresType::NUMERIC) => integer .map(|integer| BigDecimal::from_i32(integer).unwrap()) .map(DecimalWrapper) .map(|dw| dw.to_sql(ty, out)), - #[cfg(feature = "bigdecimal")] (ValueType::Int64(integer), &PostgresType::NUMERIC) => integer .map(|integer| BigDecimal::from_i64(integer).unwrap()) .map(DecimalWrapper) @@ -699,30 +688,25 @@ impl<'a> ToSql for Value<'a> { (ValueType::Int32(integer), _) => integer.map(|integer| integer.to_sql(ty, out)), (ValueType::Int64(integer), _) => integer.map(|integer| integer.to_sql(ty, out)), (ValueType::Float(float), &PostgresType::FLOAT8) => float.map(|float| (float as f64).to_sql(ty, out)), - #[cfg(feature = "bigdecimal")] (ValueType::Float(float), &PostgresType::NUMERIC) => float .map(|float| BigDecimal::from_f32(float).unwrap()) .map(DecimalWrapper) .map(|dw| dw.to_sql(ty, out)), (ValueType::Float(float), _) => float.map(|float| float.to_sql(ty, out)), (ValueType::Double(double), &PostgresType::FLOAT4) => double.map(|double| (double as f32).to_sql(ty, out)), - #[cfg(feature = "bigdecimal")] (ValueType::Double(double), &PostgresType::NUMERIC) => double .map(|double| BigDecimal::from_f64(double).unwrap()) .map(DecimalWrapper) .map(|dw| dw.to_sql(ty, out)), (ValueType::Double(double), _) => double.map(|double| double.to_sql(ty, out)), - #[cfg(feature = "bigdecimal")] (ValueType::Numeric(decimal), &PostgresType::FLOAT4) => decimal.as_ref().map(|decimal| { let f = decimal.to_string().parse::().expect("decimal to f32 conversion"); f.to_sql(ty, out) }), - #[cfg(feature = "bigdecimal")] (ValueType::Numeric(decimal), &PostgresType::FLOAT8) => decimal.as_ref().map(|decimal| { let f = decimal.to_string().parse::().expect("decimal to f64 conversion"); f.to_sql(ty, out) }), - #[cfg(feature = "bigdecimal")] (ValueType::Array(values), &PostgresType::FLOAT4_ARRAY) => values.as_ref().map(|values| { let mut floats = Vec::with_capacity(values.len()); @@ -747,7 +731,6 @@ impl<'a> ToSql for Value<'a> { floats.to_sql(ty, out) }), - #[cfg(feature = "bigdecimal")] (ValueType::Array(values), &PostgresType::FLOAT8_ARRAY) => values.as_ref().map(|values| { let mut floats = Vec::with_capacity(values.len()); @@ -772,7 +755,6 @@ impl<'a> ToSql for Value<'a> { floats.to_sql(ty, out) }), - #[cfg(feature = "bigdecimal")] (ValueType::Numeric(decimal), &PostgresType::MONEY) => decimal.as_ref().map(|decimal| { let decimal = (decimal * BigInt::from_i32(100).unwrap()).round(0); @@ -783,11 +765,9 @@ impl<'a> ToSql for Value<'a> { i.to_sql(ty, out) }), - #[cfg(feature = "bigdecimal")] (ValueType::Numeric(decimal), &PostgresType::NUMERIC) => decimal .as_ref() .map(|decimal| DecimalWrapper(decimal.clone()).to_sql(ty, out)), - #[cfg(feature = "bigdecimal")] (ValueType::Numeric(float), _) => float .as_ref() .map(|float| DecimalWrapper(float.clone()).to_sql(ty, out)), diff --git a/quaint/src/connector/sqlite/conversion.rs b/quaint/src/connector/sqlite/conversion.rs index 4f6dea515621..e7566be81140 100644 --- a/quaint/src/connector/sqlite/conversion.rs +++ b/quaint/src/connector/sqlite/conversion.rs @@ -144,7 +144,6 @@ impl<'a> GetRow for SqliteRow<'a> { c if c.is_bytes() => Value::null_bytes(), c if c.is_float() => Value::null_float(), c if c.is_double() => Value::null_double(), - #[cfg(feature = "bigdecimal")] c if c.is_real() => Value::null_numeric(), c if c.is_datetime() => Value::null_datetime(), c if c.is_date() => Value::null_date(), @@ -191,7 +190,6 @@ impl<'a> GetRow for SqliteRow<'a> { _ => Value::int64(i), } } - #[cfg(feature = "bigdecimal")] ValueRef::Real(f) if column.is_real() => { use bigdecimal::{BigDecimal, FromPrimitive}; @@ -264,7 +262,6 @@ impl<'a> ToSql for Value<'a> { return Err(RusqlError::ToSqlConversionFailure(Box::new(builder.build()))); } - #[cfg(feature = "bigdecimal")] ValueType::Numeric(d) => d .as_ref() .map(|d| ToSqlOutput::from(d.to_string().parse::().expect("BigDecimal is not a f64."))), diff --git a/quaint/src/error.rs b/quaint/src/error.rs index 30b6f8c5116f..47b48c8dc8f9 100644 --- a/quaint/src/error.rs +++ b/quaint/src/error.rs @@ -314,7 +314,6 @@ impl From for ErrorKind { } } -#[cfg(feature = "bigdecimal")] impl From for Error { fn from(e: bigdecimal::ParseBigDecimalError) -> Self { let kind = ErrorKind::conversion(format!("{e}")); diff --git a/quaint/src/lib.rs b/quaint/src/lib.rs index 8e0d0f6bd178..1458a6ae1615 100644 --- a/quaint/src/lib.rs +++ b/quaint/src/lib.rs @@ -113,8 +113,7 @@ mod macros; #[macro_use] extern crate metrics; -#[cfg(feature = "bigdecimal")] -extern crate bigdecimal_ as bigdecimal; +extern crate bigdecimal; pub mod ast; pub mod connector; diff --git a/quaint/src/tests/query.rs b/quaint/src/tests/query.rs index a239275cdd4b..06bebe1a9601 100644 --- a/quaint/src/tests/query.rs +++ b/quaint/src/tests/query.rs @@ -1372,7 +1372,7 @@ async fn float_columns_cast_to_f32(api: &mut dyn TestApi) -> crate::Result<()> { // left: `Numeric(Some(BigDecimal("1.0")))`, // right: `Double(Some(1.0))`' #[test_each_connector(tags("mysql"), ignore("mysql8"))] -#[cfg(feature = "bigdecimal")] + async fn newdecimal_conversion_is_handled_correctly(api: &mut dyn TestApi) -> crate::Result<()> { let select = Select::default().value(sum(Value::int32(1)).alias("theone")); let result = api.conn().select(select).await?; @@ -2009,7 +2009,6 @@ async fn insert_default_keyword(api: &mut dyn TestApi) -> crate::Result<()> { Ok(()) } -#[cfg(feature = "bigdecimal")] #[test_each_connector(tags("postgresql"))] async fn ints_read_write_to_numeric(api: &mut dyn TestApi) -> crate::Result<()> { use bigdecimal::BigDecimal; @@ -2038,7 +2037,6 @@ async fn ints_read_write_to_numeric(api: &mut dyn TestApi) -> crate::Result<()> Ok(()) } -#[cfg(feature = "bigdecimal")] #[test_each_connector(tags("postgresql"))] async fn bigdecimal_read_write_to_floating(api: &mut dyn TestApi) -> crate::Result<()> { use bigdecimal::BigDecimal; diff --git a/quaint/src/tests/types/mssql.rs b/quaint/src/tests/types/mssql.rs index 9d5d51317707..ac404dd8af38 100644 --- a/quaint/src/tests/types/mssql.rs +++ b/quaint/src/tests/types/mssql.rs @@ -1,6 +1,5 @@ #![allow(clippy::approx_constant)] -#[cfg(feature = "bigdecimal")] mod bigdecimal; use crate::tests::test_api::*; diff --git a/quaint/src/tests/types/mssql/bigdecimal.rs b/quaint/src/tests/types/mssql/bigdecimal.rs index 4dbd101ff456..8fe3761624d2 100644 --- a/quaint/src/tests/types/mssql/bigdecimal.rs +++ b/quaint/src/tests/types/mssql/bigdecimal.rs @@ -2,7 +2,6 @@ use super::*; use crate::bigdecimal::BigDecimal; use std::str::FromStr; -#[cfg(feature = "bigdecimal")] test_type!(numeric( mssql, "numeric(10,2)", diff --git a/quaint/src/tests/types/mysql.rs b/quaint/src/tests/types/mysql.rs index cebfbef41033..ade4e5d2a1f2 100644 --- a/quaint/src/tests/types/mysql.rs +++ b/quaint/src/tests/types/mysql.rs @@ -2,10 +2,8 @@ use crate::tests::test_api::*; -#[cfg(feature = "bigdecimal")] use std::str::FromStr; -#[cfg(feature = "bigdecimal")] use crate::bigdecimal::BigDecimal; test_type!(tinyint( @@ -105,7 +103,6 @@ test_type!(bigint( Value::int64(i64::MAX) )); -#[cfg(feature = "bigdecimal")] test_type!(decimal( mysql, "decimal(10,2)", @@ -114,7 +111,6 @@ test_type!(decimal( )); // Highest mantissa on MySQL -#[cfg(feature = "bigdecimal")] test_type!(decimal_65_6( mysql, "decimal(65, 6)", @@ -123,7 +119,6 @@ test_type!(decimal_65_6( )?), )); -#[cfg(feature = "bigdecimal")] test_type!(float_decimal( mysql, "float", @@ -134,7 +129,6 @@ test_type!(float_decimal( ) )); -#[cfg(feature = "bigdecimal")] test_type!(double_decimal( mysql, "double", diff --git a/quaint/src/tests/types/postgres.rs b/quaint/src/tests/types/postgres.rs index bcbe30702431..ba9b02095722 100644 --- a/quaint/src/tests/types/postgres.rs +++ b/quaint/src/tests/types/postgres.rs @@ -1,8 +1,7 @@ -#[cfg(feature = "bigdecimal")] mod bigdecimal; use crate::tests::test_api::*; -#[cfg(any(feature = "bigdecimal", feature = "uuid"))] +#[cfg(feature = "uuid")] use std::str::FromStr; test_type!(boolean( diff --git a/quaint/src/tests/types/sqlite.rs b/quaint/src/tests/types/sqlite.rs index e8e65c2ae722..e16e77c55526 100644 --- a/quaint/src/tests/types/sqlite.rs +++ b/quaint/src/tests/types/sqlite.rs @@ -3,7 +3,7 @@ use crate::tests::test_api::sqlite_test_api; use crate::tests::test_api::TestApi; use crate::{ast::*, connector::Queryable}; -#[cfg(feature = "bigdecimal")] + use std::str::FromStr; test_type!(integer( @@ -28,7 +28,6 @@ test_type!(big_int( test_type!(real(sqlite, "REAL", Value::null_double(), Value::double(1.12345))); -#[cfg(feature = "bigdecimal")] test_type!(float_decimal( sqlite, "FLOAT", @@ -39,7 +38,6 @@ test_type!(float_decimal( ) )); -#[cfg(feature = "bigdecimal")] test_type!(double_decimal( sqlite, "DOUBLE", diff --git a/quaint/src/visitor/mssql.rs b/quaint/src/visitor/mssql.rs index ad647f4d1cab..1e7852a92b46 100644 --- a/quaint/src/visitor/mssql.rs +++ b/quaint/src/visitor/mssql.rs @@ -340,7 +340,7 @@ impl<'a> Visitor<'a> for Mssql<'a> { } ValueType::Json(j) => j.map(|j| self.write(format!("'{}'", serde_json::to_string(&j).unwrap()))), - #[cfg(feature = "bigdecimal")] + ValueType::Numeric(r) => r.map(|r| self.write(r)), #[cfg(feature = "uuid")] ValueType::Uuid(uuid) => uuid.map(|uuid| { diff --git a/quaint/src/visitor/mysql.rs b/quaint/src/visitor/mysql.rs index 25bd5ecd7c30..b54399a6c69b 100644 --- a/quaint/src/visitor/mysql.rs +++ b/quaint/src/visitor/mysql.rs @@ -148,7 +148,7 @@ impl<'a> Visitor<'a> for Mysql<'a> { return Err(builder.build()); } - #[cfg(feature = "bigdecimal")] + ValueType::Numeric(r) => r.as_ref().map(|r| self.write(r)), ValueType::Json(j) => match j { diff --git a/quaint/src/visitor/postgres.rs b/quaint/src/visitor/postgres.rs index 65eb7278d482..ba157563cfce 100644 --- a/quaint/src/visitor/postgres.rs +++ b/quaint/src/visitor/postgres.rs @@ -228,7 +228,7 @@ impl<'a> Visitor<'a> for Postgres<'a> { ValueType::Json(j) => j .as_ref() .map(|j| self.write(format!("'{}'", serde_json::to_string(&j).unwrap()))), - #[cfg(feature = "bigdecimal")] + ValueType::Numeric(r) => r.as_ref().map(|r| self.write(r)), #[cfg(feature = "uuid")] ValueType::Uuid(uuid) => uuid.map(|uuid| self.write(format!("'{}'", uuid.hyphenated()))), diff --git a/quaint/src/visitor/sqlite.rs b/quaint/src/visitor/sqlite.rs index e2211e300421..3feca377990b 100644 --- a/quaint/src/visitor/sqlite.rs +++ b/quaint/src/visitor/sqlite.rs @@ -110,7 +110,7 @@ impl<'a> Visitor<'a> for Sqlite<'a> { } None => None, }, - #[cfg(feature = "bigdecimal")] + ValueType::Numeric(r) => r.as_ref().map(|r| self.write(r)), #[cfg(feature = "uuid")] ValueType::Uuid(uuid) => uuid.map(|uuid| self.write(format!("'{}'", uuid.hyphenated()))), From 3aa8ec056e486fce4aa65473b3743cae4101a33f Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Fri, 6 Oct 2023 15:05:39 +0200 Subject: [PATCH 076/128] nix: update flake (#4326) * Rust 1.73.0 * Node.js 20.8.0 * etc --- flake.lock | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/flake.lock b/flake.lock index 48c36ddc3bde..c2750d0435ed 100644 --- a/flake.lock +++ b/flake.lock @@ -14,11 +14,11 @@ ] }, "locked": { - "lastModified": 1695511445, - "narHash": "sha256-mnE14re43v3/Jc50Jv0BKPMtEk7FEtDSligP6B5HwlI=", + "lastModified": 1696384830, + "narHash": "sha256-j8ZsVqzmj5sOm5MW9cqwQJUZELFFwOislDmqDDEMl6k=", "owner": "ipetkov", "repo": "crane", - "rev": "3de322e06fc88ada5e3589dc8a375b73e749f512", + "rev": "f2143cd27f8bd09ee4f0121336c65015a2a0a19c", "type": "github" }, "original": { @@ -30,11 +30,11 @@ "flake-compat": { "flake": false, "locked": { - "lastModified": 1673956053, - "narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=", + "lastModified": 1696267196, + "narHash": "sha256-AAQ/2sD+0D18bb8hKuEEVpHUYD1GmO2Uh/taFamn6XQ=", "owner": "edolstra", "repo": "flake-compat", - "rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9", + "rev": "4f910c9827911b1ec2bf26b5a062cd09f8d89f85", "type": "github" }, "original": { @@ -50,11 +50,11 @@ ] }, "locked": { - "lastModified": 1693611461, - "narHash": "sha256-aPODl8vAgGQ0ZYFIRisxYG5MOGSkIczvu2Cd8Gb9+1Y=", + "lastModified": 1696343447, + "narHash": "sha256-B2xAZKLkkeRFG5XcHHSXXcP7To9Xzr59KXeZiRf4vdQ=", "owner": "hercules-ci", "repo": "flake-parts", - "rev": "7f53fdb7bdc5bb237da7fefef12d099e4fd611ca", + "rev": "c9afaba3dfa4085dbd2ccb38dfade5141e33d9d4", "type": "github" }, "original": { @@ -105,11 +105,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1695644571, - "narHash": "sha256-asS9dCCdlt1lPq0DLwkVBbVoEKuEuz+Zi3DG7pR/RxA=", + "lastModified": 1696193975, + "narHash": "sha256-mnQjUcYgp9Guu3RNVAB2Srr1TqKcPpRXmJf4LJk6KRY=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "6500b4580c2a1f3d0f980d32d285739d8e156d92", + "rev": "fdd898f8f79e8d2f99ed2ab6b3751811ef683242", "type": "github" }, "original": { @@ -139,11 +139,11 @@ ] }, "locked": { - "lastModified": 1695780708, - "narHash": "sha256-+0difm874E5ra98MeLxW8SfoxfL+Wzn3cLzKGGd2I4M=", + "lastModified": 1696558324, + "narHash": "sha256-TnnP4LGwDB8ZGE7h2n4nA9Faee8xPkMdNcyrzJ57cbw=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "e04538a3e155ebe4d15a281559119f63d33116bb", + "rev": "fdb37574a04df04aaa8cf7708f94a9309caebe2b", "type": "github" }, "original": { From 2737bdea73dc278332d6b612e10f11be0a39ce89 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Fri, 6 Oct 2023 15:07:18 +0200 Subject: [PATCH 077/128] adapter-planetscale: handle NULL column type (#4320) When a column type is NULL, mark it as `ColumnType.Int32` in JS to eventually convert it to `quaint::ValueType::Int32(None)`, just like quaint does in its own MySQL connector. Fixes: https://github.com/prisma/prisma/issues/21369 --- .../query-engine-tests/src/utils/raw.rs | 6 +++--- .../tests/new/regressions/mod.rs | 1 + .../tests/new/regressions/prisma_21369.rs | 17 +++++++++++++++++ .../js/adapter-planetscale/src/conversion.ts | 5 ++++- 4 files changed, 25 insertions(+), 4 deletions(-) create mode 100644 query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_21369.rs diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/src/utils/raw.rs b/query-engine/connector-test-kit-rs/query-engine-tests/src/utils/raw.rs index 0db7983e3d83..54f2cd040ca9 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/src/utils/raw.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/src/utils/raw.rs @@ -2,7 +2,7 @@ use chrono::{DateTime, FixedOffset}; use prisma_value::encode_bytes; use query_tests_setup::{TestError, TestResult}; -pub fn fmt_query_raw(query: &str, params: Vec) -> String { +pub fn fmt_query_raw(query: &str, params: impl IntoIterator) -> String { let params = params_to_json(params); let params = serde_json::to_string(¶ms).unwrap(); @@ -13,7 +13,7 @@ pub fn fmt_query_raw(query: &str, params: Vec) -> String { ) } -pub fn fmt_execute_raw(query: &str, params: Vec) -> String { +pub fn fmt_execute_raw(query: &str, params: impl IntoIterator) -> String { let params = params_to_json(params); let params = serde_json::to_string(¶ms).unwrap(); @@ -66,7 +66,7 @@ impl RawParam { } } -fn params_to_json(params: Vec) -> Vec { +fn params_to_json(params: impl IntoIterator) -> Vec { params.into_iter().map(serde_json::Value::from).collect::>() } diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/mod.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/mod.rs index f3f05163eeb2..8a2cbc7f24a2 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/mod.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/mod.rs @@ -19,6 +19,7 @@ mod prisma_16760; mod prisma_17103; mod prisma_18517; mod prisma_20799; +mod prisma_21369; mod prisma_5952; mod prisma_6173; mod prisma_7010; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_21369.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_21369.rs new file mode 100644 index 000000000000..f25a83629dac --- /dev/null +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_21369.rs @@ -0,0 +1,17 @@ +use query_engine_tests::*; + +#[test_suite(schema(generic), exclude(MongoDb))] +mod prisma_21369 { + #[connector_test] + async fn select_null_works(runner: Runner) -> TestResult<()> { + let query = fmt_query_raw("SELECT NULL AS result", []); + let result = run_query!(runner, query); + + assert_eq!( + result, + r#"{"data":{"queryRaw":[{"result":{"prisma__type":"null","prisma__value":null}}]}}"# + ); + + Ok(()) + } +} diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/conversion.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/conversion.ts index 1c46538806b2..f6cf8563dc24 100644 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/conversion.ts +++ b/query-engine/driver-adapters/js/adapter-planetscale/src/conversion.ts @@ -2,7 +2,7 @@ import { ColumnTypeEnum, type ColumnType } from '@prisma/driver-adapter-utils' // See: https://github.com/planetscale/vitess-types/blob/06235e372d2050b4c0fff49972df8111e696c564/src/vitess/query/v16/query.proto#L108-L218 export type PlanetScaleColumnType - = 'NULL_TYPE' // unsupported + = 'NULL' | 'INT8' | 'UINT8' | 'INT16' @@ -89,6 +89,9 @@ export function fieldToColumnType(field: PlanetScaleColumnType): ColumnType { case 'HEXVAL': case 'GEOMETRY': return ColumnTypeEnum.Bytes + case 'NULL': + // Fall back to Int32 for consistency with quaint. + return ColumnTypeEnum.Int32 default: throw new Error(`Unsupported column type: ${field}`) } From 01b5c9534be6bec5b51250eb70749e65ca7c69bf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Fri, 6 Oct 2023 15:48:41 +0200 Subject: [PATCH 078/128] ci: cleanup names and add dry-run (#4327) --- .github/workflows/publish-driver-adapters.yml | 36 +++++++++---------- 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/.github/workflows/publish-driver-adapters.yml b/.github/workflows/publish-driver-adapters.yml index 93254be7ba80..d69f4d76ce8c 100644 --- a/.github/workflows/publish-driver-adapters.yml +++ b/.github/workflows/publish-driver-adapters.yml @@ -15,7 +15,7 @@ on: npmDistTag: description: npm dist-tag to use for publishing required: true - default: "latest" + default: 'latest' dryRun: description: 'Check to do a dry run (does not publish packages)' type: boolean @@ -27,7 +27,7 @@ jobs: steps: - name: Print input env: - THE_INPUT: "${{ toJson(github.event.inputs) }}" + THE_INPUT: '${{ toJson(github.event.inputs) }}' run: | echo $THE_INPUT @@ -35,31 +35,32 @@ jobs: with: ref: ${{ github.event.inputs.enginesHash }} - # - # Build - # - - uses: actions/setup-node@v3 with: - node-version: "20.x" + node-version: '20.x' registry-url: 'https://registry.npmjs.org/' - - run: pnpm i - working-directory: query-engine/driver-adapters/js - - run: pnpm -r build + - name: Install dependencies + run: pnpm i working-directory: query-engine/driver-adapters/js - # - # Publish - # + - name: Build + run: pnpm -r build + working-directory: query-engine/driver-adapters/js - - run: | + - name: Update version in package.json + run: | # find all files package.json, and for each use jq to write the version, then write to temp file and overwrite original file with result find . -name "package.json" -exec bash -c 'jq --arg version "${{ github.event.inputs.prismaVersion }}" ".version = \$version" "{}" > tmpfile && mv tmpfile "{}"' \; - pnpm -r publish --no-git-checks --tag ${{ github.event.inputs.npmDistTag }} --dry-run # TODO remove hardcoded dry-run and use input instead! + working-directory: query-engine/driver-adapters/js + + - name: Publish Prisma Driver Adapters packages + run: | + pnpm -r publish --no-git-checks --tag ${{ github.event.inputs.npmDistTag }} ${{ env.DRY_RUN }} working-directory: query-engine/driver-adapters/js env: NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} + DRY_RUN: ${{ github.event.inputs.dryRun && '--dry-run' || '' }} # # Failure handlers @@ -68,11 +69,10 @@ jobs: - name: Set current job url in SLACK_FOOTER env var if: ${{ failure() }} run: echo "SLACK_FOOTER=<$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID|Click here to go to the job logs>" >> $GITHUB_ENV - - name: Slack Notification on Failure if: ${{ failure() }} uses: rtCamp/action-slack-notify@v2.2.1 env: - SLACK_TITLE: "prisma driver adapters publishing failed :x:" - SLACK_COLOR: "#FF0000" + SLACK_TITLE: 'prisma driver adapters publishing failed :x:' + SLACK_COLOR: '#FF0000' SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_DRIVER_ADPATERS_FAILING }} From 0eead1d27772faf1df2bae3a06f1edb61302f339 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Fri, 6 Oct 2023 15:51:34 +0200 Subject: [PATCH 079/128] ci: better paths-ignore for workflows (#4328) --- .github/workflows/benchmark.yml | 4 +- .github/workflows/build-wasm.yml | 14 ++-- .github/workflows/compilation.yml | 26 +++---- .github/workflows/formatting.yml | 4 +- .github/workflows/query-engine-black-box.yml | 34 ++++----- .../query-engine-driver-adapters.yml | 50 ++++++------- .github/workflows/query-engine.yml | 72 ++++++++++--------- .github/workflows/schema-engine.yml | 68 +++++++++--------- 8 files changed, 143 insertions(+), 129 deletions(-) diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml index bc2feb1b7b8d..4dbfa4855fc9 100644 --- a/.github/workflows/benchmark.yml +++ b/.github/workflows/benchmark.yml @@ -5,6 +5,8 @@ on: - main pull_request: paths-ignore: + - '.github/**' + - '!.github/workflows/benchmark.yml' - '.buildkite/**' - '*.md' - 'LICENSE' @@ -17,7 +19,7 @@ concurrency: jobs: benchmark: - name: "Run benchmarks on Linux" + name: 'Run benchmarks on Linux' runs-on: ubuntu-latest steps: diff --git a/.github/workflows/build-wasm.yml b/.github/workflows/build-wasm.yml index 4d715d57afd7..7969cd2dd462 100644 --- a/.github/workflows/build-wasm.yml +++ b/.github/workflows/build-wasm.yml @@ -5,15 +5,17 @@ on: - main pull_request: paths-ignore: - - ".buildkite/**" - - "*.md" - - "LICENSE" - - "CODEOWNERS" - - "renovate.json" + - '.github/**' + - '!.github/workflows/build-wasm.yml' + - '.buildkite/**' + - '*.md' + - 'LICENSE' + - 'CODEOWNERS' + - 'renovate.json' jobs: build: - name: "prisma-schema-wasm build ${{ github.event.ref }} for commit ${{ github.event.inputs.commit }}" + name: 'prisma-schema-wasm build ${{ github.event.ref }} for commit ${{ github.event.inputs.commit }}' runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/compilation.yml b/.github/workflows/compilation.yml index 713fb1a86505..d9f81f47772b 100644 --- a/.github/workflows/compilation.yml +++ b/.github/workflows/compilation.yml @@ -1,7 +1,9 @@ -name: "Release binary compilation test" +name: 'Release binary compilation test' on: pull_request: paths-ignore: + - '.github/**' + - '!.github/workflows/compilation.yml' - '.buildkite/**' - '*.md' - 'LICENSE' @@ -14,7 +16,7 @@ concurrency: jobs: test-crate-compilation: - name: "Compile top level crates on Linux" + name: 'Compile top level crates on Linux' strategy: fail-fast: false runs-on: ubuntu-latest @@ -22,17 +24,17 @@ jobs: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@stable - - run: "cargo clean && cargo build --release -p schema-engine-cli" - name: "Compile Migration Engine" + - run: 'cargo clean && cargo build --release -p schema-engine-cli' + name: 'Compile Migration Engine' - - run: "cargo clean && cargo build --release -p prisma-fmt" - name: "Compile prisma-fmt" + - run: 'cargo clean && cargo build --release -p prisma-fmt' + name: 'Compile prisma-fmt' - - run: "cargo clean && cargo build --release -p query-engine" - name: "Compile Query Engine Binary" + - run: 'cargo clean && cargo build --release -p query-engine' + name: 'Compile Query Engine Binary' - - run: "cargo clean && cargo build --release -p query-engine-node-api" - name: "Compile Query Engine Library" + - run: 'cargo clean && cargo build --release -p query-engine-node-api' + name: 'Compile Query Engine Library' - - name: "Check that Cargo.lock did not change" - run: "git diff --exit-code" + - name: 'Check that Cargo.lock did not change' + run: 'git diff --exit-code' diff --git a/.github/workflows/formatting.yml b/.github/workflows/formatting.yml index 107d842eef6b..50b635544b91 100644 --- a/.github/workflows/formatting.yml +++ b/.github/workflows/formatting.yml @@ -5,6 +5,8 @@ on: - main pull_request: paths-ignore: + - '.github/**' + - '!.github/workflows/formatting.yml' - '.buildkite/**' - '*.md' - 'LICENSE' @@ -19,7 +21,7 @@ jobs: clippy: runs-on: ubuntu-latest env: - RUSTFLAGS: "-Dwarnings" + RUSTFLAGS: '-Dwarnings' steps: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@stable diff --git a/.github/workflows/query-engine-black-box.yml b/.github/workflows/query-engine-black-box.yml index 163ee3ad0141..05e487c779bd 100644 --- a/.github/workflows/query-engine-black-box.yml +++ b/.github/workflows/query-engine-black-box.yml @@ -5,6 +5,8 @@ on: - main pull_request: paths-ignore: + - '.github/**' + - '!.github/workflows/query-engine-black-box.yml' - '.buildkite/**' - '*.md' - 'LICENSE' @@ -17,27 +19,27 @@ concurrency: jobs: rust-tests: - name: "Test query-engine as a black-box" + name: 'Test query-engine as a black-box' strategy: fail-fast: false matrix: database: - - name: "postgres15" + - name: 'postgres15' single_threaded: false - connector: "postgres" - version: "15" + connector: 'postgres' + version: '15' env: - LOG_LEVEL: "info" - LOG_QUERIES: "y" - RUST_LOG_FORMAT: "devel" - RUST_BACKTRACE: "1" - CLICOLOR_FORCE: "1" - CLOSED_TX_CLEANUP: "2" - SIMPLE_TEST_MODE: "1" - QUERY_BATCH_SIZE: "10" - TEST_RUNNER: "direct" + LOG_LEVEL: 'info' + LOG_QUERIES: 'y' + RUST_LOG_FORMAT: 'devel' + RUST_BACKTRACE: '1' + CLICOLOR_FORCE: '1' + CLOSED_TX_CLEANUP: '2' + SIMPLE_TEST_MODE: '1' + QUERY_BATCH_SIZE: '10' + TEST_RUNNER: 'direct' TEST_CONNECTOR: ${{ matrix.database.connector }} TEST_CONNECTOR_VERSION: ${{ matrix.database.version }} @@ -52,15 +54,15 @@ jobs: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: "Start ${{ matrix.database.name }} (${{ matrix.engine_protocol }})" + - name: 'Start ${{ matrix.database.name }} (${{ matrix.engine_protocol }})' run: make start-${{ matrix.database.name }} - uses: dtolnay/rust-toolchain@stable - - run: export WORKSPACE_ROOT=$(pwd) && cargo build --package query-engine + - run: export WORKSPACE_ROOT=$(pwd) && cargo build --package query-engine env: CLICOLOR_FORCE: 1 - - run: export WORKSPACE_ROOT=$(pwd) && cargo test --package black-box-tests -- --test-threads=1 + - run: export WORKSPACE_ROOT=$(pwd) && cargo test --package black-box-tests -- --test-threads=1 env: CLICOLOR_FORCE: 1 diff --git a/.github/workflows/query-engine-driver-adapters.yml b/.github/workflows/query-engine-driver-adapters.yml index f4207e2a6d51..d3ec9acb4e58 100644 --- a/.github/workflows/query-engine-driver-adapters.yml +++ b/.github/workflows/query-engine-driver-adapters.yml @@ -5,6 +5,8 @@ on: - main pull_request: paths-ignore: + - '.github/**' + - '!.github/workflows/query-engine-driver-adapters.yml' - '.buildkite/**' - '*.md' - 'LICENSE' @@ -17,51 +19,51 @@ concurrency: jobs: rust-query-engine-tests: - name: "Test `${{ matrix.adapter.name }}` on node v${{ matrix.node_version }}" + name: 'Test `${{ matrix.adapter.name }}` on node v${{ matrix.node_version }}' strategy: fail-fast: false matrix: adapter: - - name: "pg" - setup_task: "dev-pg-postgres13" - - name: "neon:ws" - setup_task: "dev-neon-ws-postgres13" - - name: "libsql" - setup_task: "dev-libsql-sqlite" - node_version: ["18"] + - name: 'pg' + setup_task: 'dev-pg-postgres13' + - name: 'neon:ws' + setup_task: 'dev-neon-ws-postgres13' + - name: 'libsql' + setup_task: 'dev-libsql-sqlite' + node_version: ['18'] env: - LOG_LEVEL: "info" # Set to "debug" to trace the query engine and node process running the driver adapter - LOG_QUERIES: "y" - RUST_LOG: "info" - RUST_LOG_FORMAT: "devel" - RUST_BACKTRACE: "1" - CLICOLOR_FORCE: "1" - CLOSED_TX_CLEANUP: "2" - SIMPLE_TEST_MODE: "1" - QUERY_BATCH_SIZE: "10" + LOG_LEVEL: 'info' # Set to "debug" to trace the query engine and node process running the driver adapter + LOG_QUERIES: 'y' + RUST_LOG: 'info' + RUST_LOG_FORMAT: 'devel' + RUST_BACKTRACE: '1' + CLICOLOR_FORCE: '1' + CLOSED_TX_CLEANUP: '2' + SIMPLE_TEST_MODE: '1' + QUERY_BATCH_SIZE: '10' WORKSPACE_ROOT: ${{ github.workspace }} runs-on: buildjet-16vcpu-ubuntu-2004 steps: - uses: actions/checkout@v4 - - name: "Setup Node.js" + - name: 'Setup Node.js' uses: actions/setup-node@v3 with: node-version: ${{ matrix.node_version }} - - name: "Setup pnpm" + - name: 'Setup pnpm' uses: pnpm/action-setup@v2 with: version: 8 - - name: "Get pnpm store directory" + - name: 'Get pnpm store directory' shell: bash run: | echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV - - name: "Login to Docker Hub" + - name: 'Login to Docker Hub' uses: docker/login-action@v2 continue-on-error: true with: @@ -72,7 +74,5 @@ jobs: - uses: dtolnay/rust-toolchain@stable - - name: "Run tests" - run: cargo test --package query-engine-tests -- --test-threads=1 - - + - name: 'Run tests' + run: cargo test --package query-engine-tests -- --test-threads=1 diff --git a/.github/workflows/query-engine.yml b/.github/workflows/query-engine.yml index 6ed522c94f13..3f5ad954d24e 100644 --- a/.github/workflows/query-engine.yml +++ b/.github/workflows/query-engine.yml @@ -5,6 +5,8 @@ on: - main pull_request: paths-ignore: + - '.github/**' + - '!.github/workflows/query-engine.yml' - '.buildkite/**' - '*.md' - 'LICENSE' @@ -17,56 +19,56 @@ concurrency: jobs: rust-query-engine-tests: - name: "Test ${{ matrix.database.name }} (${{ matrix.engine_protocol }}) on Linux" + name: 'Test ${{ matrix.database.name }} (${{ matrix.engine_protocol }}) on Linux' strategy: fail-fast: false matrix: database: - - name: "vitess_5_7" + - name: 'vitess_5_7' single_threaded: true - connector: "vitess" - version: "5.7" - - name: "vitess_8_0" + connector: 'vitess' + version: '5.7' + - name: 'vitess_8_0' single_threaded: true - connector: "vitess" - version: "8.0" - - name: "postgres15" + connector: 'vitess' + version: '8.0' + - name: 'postgres15' single_threaded: true - connector: "postgres" - version: "15" - - name: "mssql_2022" + connector: 'postgres' + version: '15' + - name: 'mssql_2022' single_threaded: false - connector: "sqlserver" - version: "2022" - - name: "mongodb_4_2" + connector: 'sqlserver' + version: '2022' + - name: 'mongodb_4_2' single_threaded: true - connector: "mongodb" - version: "4.2" - - name: "cockroach_23_1" + connector: 'mongodb' + version: '4.2' + - name: 'cockroach_23_1' single_threaded: false - connector: "cockroachdb" - version: "23.1" - - name: "cockroach_22_2" + connector: 'cockroachdb' + version: '23.1' + - name: 'cockroach_22_2' single_threaded: false - connector: "cockroachdb" - version: "22.2" - - name: "cockroach_22_1_0" + connector: 'cockroachdb' + version: '22.2' + - name: 'cockroach_22_1_0' single_threaded: false - connector: "cockroachdb" - version: "22.1" + connector: 'cockroachdb' + version: '22.1' engine_protocol: [graphql, json] env: - LOG_LEVEL: "info" - LOG_QUERIES: "y" - RUST_LOG_FORMAT: "devel" - RUST_BACKTRACE: "1" - CLICOLOR_FORCE: "1" - CLOSED_TX_CLEANUP: "2" - SIMPLE_TEST_MODE: "1" - QUERY_BATCH_SIZE: "10" - TEST_RUNNER: "direct" + LOG_LEVEL: 'info' + LOG_QUERIES: 'y' + RUST_LOG_FORMAT: 'devel' + RUST_BACKTRACE: '1' + CLICOLOR_FORCE: '1' + CLOSED_TX_CLEANUP: '2' + SIMPLE_TEST_MODE: '1' + QUERY_BATCH_SIZE: '10' + TEST_RUNNER: 'direct' TEST_CONNECTOR: ${{ matrix.database.connector }} TEST_CONNECTOR_VERSION: ${{ matrix.database.version }} PRISMA_ENGINE_PROTOCOL: ${{ matrix.engine_protocol }} @@ -82,7 +84,7 @@ jobs: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: "Start ${{ matrix.database.name }} (${{ matrix.engine_protocol }})" + - name: 'Start ${{ matrix.database.name }} (${{ matrix.engine_protocol }})' run: make start-${{ matrix.database.name }} - uses: dtolnay/rust-toolchain@stable diff --git a/.github/workflows/schema-engine.yml b/.github/workflows/schema-engine.yml index 20c263dc17d0..e9b69c748246 100644 --- a/.github/workflows/schema-engine.yml +++ b/.github/workflows/schema-engine.yml @@ -6,6 +6,8 @@ on: pull_request: paths-ignore: # Generic + - '.github/**' + - '!.github/workflows/schema-engine.yml' - '.buildkite/**' - '*.md' - 'LICENSE' @@ -20,17 +22,17 @@ concurrency: jobs: test-mongodb-schema-connector: - name: "Test ${{ matrix.database.name }} on Linux" + name: 'Test ${{ matrix.database.name }} on Linux' strategy: fail-fast: false matrix: database: - - name: "mongodb42" - url: "mongodb://prisma:prisma@localhost:27016/?authSource=admin&retryWrites=true" - - name: "mongodb44" - url: "mongodb://prisma:prisma@localhost:27017/?authSource=admin&retryWrites=true" - - name: "mongodb5" - url: "mongodb://prisma:prisma@localhost:27018/?authSource=admin&retryWrites=true" + - name: 'mongodb42' + url: 'mongodb://prisma:prisma@localhost:27016/?authSource=admin&retryWrites=true' + - name: 'mongodb44' + url: 'mongodb://prisma:prisma@localhost:27017/?authSource=admin&retryWrites=true' + - name: 'mongodb5' + url: 'mongodb://prisma:prisma@localhost:27018/?authSource=admin&retryWrites=true' runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -43,7 +45,7 @@ jobs: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: "Start ${{ matrix.database.name }}" + - name: 'Start ${{ matrix.database.name }}' run: make start-${{ matrix.database.name }}-single - run: cargo test -p mongodb-schema-connector @@ -52,54 +54,54 @@ jobs: TEST_DATABASE_URL: ${{ matrix.database.url }} test-linux: - name: "Test ${{ matrix.database.name }} on Linux" + name: 'Test ${{ matrix.database.name }} on Linux' strategy: fail-fast: false matrix: database: - name: mssql_2017 - url: "sqlserver://localhost:1434;database=master;user=SA;password=;trustServerCertificate=true;socket_timeout=60;isolationLevel=READ UNCOMMITTED" + url: 'sqlserver://localhost:1434;database=master;user=SA;password=;trustServerCertificate=true;socket_timeout=60;isolationLevel=READ UNCOMMITTED' - name: mssql_2019 - url: "sqlserver://localhost:1433;database=master;user=SA;password=;trustServerCertificate=true;socket_timeout=60;isolationLevel=READ UNCOMMITTED" + url: 'sqlserver://localhost:1433;database=master;user=SA;password=;trustServerCertificate=true;socket_timeout=60;isolationLevel=READ UNCOMMITTED' - name: mysql_5_6 - url: "mysql://root:prisma@localhost:3309" + url: 'mysql://root:prisma@localhost:3309' - name: mysql_5_7 - url: "mysql://root:prisma@localhost:3306" + url: 'mysql://root:prisma@localhost:3306' - name: mysql_8 - url: "mysql://root:prisma@localhost:3307" + url: 'mysql://root:prisma@localhost:3307' - name: mysql_mariadb - url: "mysql://root:prisma@localhost:3308" + url: 'mysql://root:prisma@localhost:3308' - name: postgres9 - url: "postgresql://postgres:prisma@localhost:5431" + url: 'postgresql://postgres:prisma@localhost:5431' - name: postgres10 - url: "postgresql://postgres:prisma@localhost:5432" + url: 'postgresql://postgres:prisma@localhost:5432' - name: postgres11 - url: "postgresql://postgres:prisma@localhost:5433" + url: 'postgresql://postgres:prisma@localhost:5433' - name: postgres12 - url: "postgresql://postgres:prisma@localhost:5434" + url: 'postgresql://postgres:prisma@localhost:5434' - name: postgres13 - url: "postgresql://postgres:prisma@localhost:5435" + url: 'postgresql://postgres:prisma@localhost:5435' - name: postgres14 - url: "postgresql://postgres:prisma@localhost:5437" + url: 'postgresql://postgres:prisma@localhost:5437' - name: postgres15 - url: "postgresql://postgres:prisma@localhost:5438" + url: 'postgresql://postgres:prisma@localhost:5438' - name: cockroach_23_1 - url: "postgresql://prisma@localhost:26260" + url: 'postgresql://prisma@localhost:26260' - name: cockroach_22_2 - url: "postgresql://prisma@localhost:26259" + url: 'postgresql://prisma@localhost:26259' - name: cockroach_22_1_0 - url: "postgresql://prisma@localhost:26257" + url: 'postgresql://prisma@localhost:26257' - name: sqlite url: sqlite - name: vitess_5_7 - url: "mysql://root:prisma@localhost:33577/test" - shadow_database_url: "mysql://root:prisma@localhost:33578/shadow" + url: 'mysql://root:prisma@localhost:33577/test' + shadow_database_url: 'mysql://root:prisma@localhost:33578/shadow' is_vitess: true single_threaded: true - name: vitess_8_0 - url: "mysql://root:prisma@localhost:33807/test" - shadow_database_url: "mysql://root:prisma@localhost:33808/shadow" + url: 'mysql://root:prisma@localhost:33807/test' + shadow_database_url: 'mysql://root:prisma@localhost:33808/shadow' is_vitess: true single_threaded: true @@ -115,7 +117,7 @@ jobs: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: "Start ${{ matrix.database.name }}" + - name: 'Start ${{ matrix.database.name }}' run: make start-${{ matrix.database.name }} - run: cargo test -p sql-introspection-tests @@ -196,9 +198,9 @@ jobs: matrix: db: - name: mysql - url: "mysql://root@localhost:3306?connect_timeout=20&socket_timeout=60" + url: 'mysql://root@localhost:3306?connect_timeout=20&socket_timeout=60' - name: mariadb - url: "mysql://root@localhost:3306?connect_timeout=20&socket_timeout=60" + url: 'mysql://root@localhost:3306?connect_timeout=20&socket_timeout=60' rust: - stable os: @@ -206,7 +208,7 @@ jobs: runs-on: ${{ matrix.os }} - name: "Test ${{ matrix.db.name }} on Windows" + name: 'Test ${{ matrix.db.name }} on Windows' steps: - uses: actions/checkout@v4 From 21eaab7b04986b53bcceea76168a966e15b0771f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Fri, 6 Oct 2023 15:56:59 +0200 Subject: [PATCH 080/128] ci: better paths-ignore for workflows (#4330) --- .../workflows/driver-adapter-smoke-tests.yml | 6 +++-- .github/workflows/unit-tests.yml | 24 ++++++++++--------- 2 files changed, 17 insertions(+), 13 deletions(-) diff --git a/.github/workflows/driver-adapter-smoke-tests.yml b/.github/workflows/driver-adapter-smoke-tests.yml index eb80a9ef9ce8..aa653ea7a57d 100644 --- a/.github/workflows/driver-adapter-smoke-tests.yml +++ b/.github/workflows/driver-adapter-smoke-tests.yml @@ -5,6 +5,8 @@ on: - main pull_request: paths-ignore: + - '.github/**' + - '!.github/workflows/driver-adapter-smoke-tests.yml' - '.buildkite/**' - '*.md' - 'LICENSE' @@ -18,8 +20,8 @@ jobs: strategy: fail-fast: false matrix: - adapter: ["neon:ws", "neon:http", planetscale, pg, libsql] - + adapter: ['neon:ws', 'neon:http', planetscale, pg, libsql] + runs-on: ubuntu-latest services: diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index 073d7211e8cf..b852499205e9 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -5,6 +5,8 @@ on: - main pull_request: paths-ignore: + - '.github/**' + - '!.github/workflows/unit-tests.yml' - '.buildkite/**' - '*.md' - 'LICENSE' @@ -28,16 +30,16 @@ jobs: - uses: dtolnay/rust-toolchain@stable - run: | - cargo test --workspace \ - --exclude=quaint \ - --exclude=query-engine \ - --exclude=query-engine-node-api \ - --exclude=black-box-tests \ - --exclude=query-engine-tests \ - --exclude=sql-migration-tests \ - --exclude=schema-engine-cli \ - --exclude=sql-schema-describer \ - --exclude=sql-introspection-tests \ - --exclude=mongodb-schema-connector + cargo test --workspace \ + --exclude=quaint \ + --exclude=query-engine \ + --exclude=query-engine-node-api \ + --exclude=black-box-tests \ + --exclude=query-engine-tests \ + --exclude=sql-migration-tests \ + --exclude=schema-engine-cli \ + --exclude=sql-schema-describer \ + --exclude=sql-introspection-tests \ + --exclude=mongodb-schema-connector env: CLICOLOR_FORCE: 1 From 87d803456ea941b461b6e028afd62238d435a456 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Fri, 6 Oct 2023 15:59:18 +0200 Subject: [PATCH 081/128] chore(deps): update docker/login-action action to v3 (#4230) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/workflows/query-engine-black-box.yml | 2 +- .github/workflows/query-engine-driver-adapters.yml | 2 +- .github/workflows/query-engine.yml | 2 +- .github/workflows/schema-engine.yml | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/query-engine-black-box.yml b/.github/workflows/query-engine-black-box.yml index 05e487c779bd..78e60178d7f7 100644 --- a/.github/workflows/query-engine-black-box.yml +++ b/.github/workflows/query-engine-black-box.yml @@ -48,7 +48,7 @@ jobs: - uses: actions/checkout@v4 - name: Login to Docker Hub - uses: docker/login-action@v2 + uses: docker/login-action@v3 continue-on-error: true with: username: ${{ secrets.DOCKERHUB_USERNAME }} diff --git a/.github/workflows/query-engine-driver-adapters.yml b/.github/workflows/query-engine-driver-adapters.yml index d3ec9acb4e58..dea1726c56c9 100644 --- a/.github/workflows/query-engine-driver-adapters.yml +++ b/.github/workflows/query-engine-driver-adapters.yml @@ -64,7 +64,7 @@ jobs: echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV - name: 'Login to Docker Hub' - uses: docker/login-action@v2 + uses: docker/login-action@v3 continue-on-error: true with: username: ${{ secrets.DOCKERHUB_USERNAME }} diff --git a/.github/workflows/query-engine.yml b/.github/workflows/query-engine.yml index 3f5ad954d24e..9c242217662d 100644 --- a/.github/workflows/query-engine.yml +++ b/.github/workflows/query-engine.yml @@ -78,7 +78,7 @@ jobs: - uses: actions/checkout@v4 - name: Login to Docker Hub - uses: docker/login-action@v2 + uses: docker/login-action@v3 continue-on-error: true with: username: ${{ secrets.DOCKERHUB_USERNAME }} diff --git a/.github/workflows/schema-engine.yml b/.github/workflows/schema-engine.yml index e9b69c748246..5bdf25a2bd35 100644 --- a/.github/workflows/schema-engine.yml +++ b/.github/workflows/schema-engine.yml @@ -39,7 +39,7 @@ jobs: - uses: dtolnay/rust-toolchain@stable - name: Login to Docker Hub - uses: docker/login-action@v2 + uses: docker/login-action@v3 continue-on-error: true with: username: ${{ secrets.DOCKERHUB_USERNAME }} @@ -111,7 +111,7 @@ jobs: - uses: dtolnay/rust-toolchain@stable - name: Login to Docker Hub - uses: docker/login-action@v2 + uses: docker/login-action@v3 continue-on-error: true with: username: ${{ secrets.DOCKERHUB_USERNAME }} From 9cb683430ffb380f18693f48ca64cadb75c06e46 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Fri, 6 Oct 2023 17:51:44 +0200 Subject: [PATCH 082/128] qe: add json serialization span and expand `prisma:engine` duration (#4154) * qe: add json serialization span and expand `prisma:engine` duration * Add separate JSON serialization span (`prisma:engine:json`). * Expand `prisma:engine` span to cover the whole request duration including serializing the response and not only `RequestHandler::handle`. Discovered when reproducing https://github.com/prisma/prisma/issues/15345. Currently the `prisma:engine:serialize` is a possible source of confusion for users as it's easy to assume that it includes serializing response to JSON while it is actually a different kind of serialization. Additionally, the `prisma:engine` span did not cover the time spent serializing the response to JSON at all, which made it look like that time was spent in the client rather than in the engine. * Rename the span according to review suggestion --- .../query-engine-node-api/src/engine.rs | 29 +++++++++---------- 1 file changed, 14 insertions(+), 15 deletions(-) diff --git a/query-engine/query-engine-node-api/src/engine.rs b/query-engine/query-engine-node-api/src/engine.rs index e9e7ad681cd4..23782af1776a 100644 --- a/query-engine/query-engine-node-api/src/engine.rs +++ b/query-engine/query-engine-node-api/src/engine.rs @@ -377,34 +377,33 @@ impl QueryEngine { /// If connected, sends a query to the core and returns the response. #[napi] pub async fn query(&self, body: String, trace: String, tx_id: Option) -> napi::Result { + let dispatcher = self.logger.dispatcher(); + async_panic_to_js_error(async { let inner = self.inner.read().await; let engine = inner.as_engine()?; let query = RequestBody::try_from_str(&body, engine.engine_protocol())?; - let dispatcher = self.logger.dispatcher(); - - async move { - let span = if tx_id.is_none() { - tracing::info_span!("prisma:engine", user_facing = true) - } else { - Span::none() - }; + let span = if tx_id.is_none() { + tracing::info_span!("prisma:engine", user_facing = true) + } else { + Span::none() + }; - let trace_id = telemetry::helpers::set_parent_context_from_json_str(&span, &trace); + let trace_id = telemetry::helpers::set_parent_context_from_json_str(&span, &trace); + async move { let handler = RequestHandler::new(engine.executor(), engine.query_schema(), engine.engine_protocol()); - let response = handler - .handle(query, tx_id.map(TxId::from), trace_id) - .instrument(span) - .await; + let response = handler.handle(query, tx_id.map(TxId::from), trace_id).await; - Ok(serde_json::to_string(&response)?) + let serde_span = tracing::info_span!("prisma:engine:response_json_serialization", user_facing = true); + Ok(serde_span.in_scope(|| serde_json::to_string(&response))?) } - .with_subscriber(dispatcher) + .instrument(span) .await }) + .with_subscriber(dispatcher) .await } From 4bddbde41e360e03422bc5811062329162747c3a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Fri, 6 Oct 2023 17:57:06 +0200 Subject: [PATCH 083/128] ci: publish driver adapters wip (#4334) --- .github/workflows/publish-driver-adapters.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/publish-driver-adapters.yml b/.github/workflows/publish-driver-adapters.yml index d69f4d76ce8c..bd6aeceea613 100644 --- a/.github/workflows/publish-driver-adapters.yml +++ b/.github/workflows/publish-driver-adapters.yml @@ -1,4 +1,5 @@ name: Build and publish Prisma Driver Adapters +run-name: npm - release Driver Adapters ${{ github.event.inputs.prismaVersion }} from ${{ github.event.inputs.enginesHash }} on ${{ github.event.inputs.npmDistTag }} concurrency: publish-prisma-driver-adapters @@ -35,6 +36,10 @@ jobs: with: ref: ${{ github.event.inputs.enginesHash }} + - uses: pnpm/action-setup@v2.4.0 + with: + version: 8 + - uses: actions/setup-node@v3 with: node-version: '20.x' From 7071fe329a9a6c87ef37a4f1d040dca2e082638e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Fri, 6 Oct 2023 18:04:19 +0200 Subject: [PATCH 084/128] ci: publish driver adapters fix dry-run (#4335) --- .github/workflows/publish-driver-adapters.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish-driver-adapters.yml b/.github/workflows/publish-driver-adapters.yml index bd6aeceea613..7da972c35e1b 100644 --- a/.github/workflows/publish-driver-adapters.yml +++ b/.github/workflows/publish-driver-adapters.yml @@ -65,7 +65,7 @@ jobs: working-directory: query-engine/driver-adapters/js env: NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} - DRY_RUN: ${{ github.event.inputs.dryRun && '--dry-run' || '' }} + DRY_RUN: ${{ github.event.inputs.dryRun == 'true' && '--dry-run' || '' }} # # Failure handlers From 4b97085e77f295623af1655c47a4047557914eed Mon Sep 17 00:00:00 2001 From: Alberto Schiabel Date: Mon, 9 Oct 2023 11:03:15 +0200 Subject: [PATCH 085/128] feat(quaint): remove `uuid` conditional flag (#4323) * feat(quaint): remove "bigdecimal" conditional flag * feat(quaint): remove references to serde-support * feat(quaint): remove "uuid" conditional flag * chore(quaint): remove deprecated feature flags from the README --------- Co-authored-by: Jan Piotrowski --- .github/workflows/quaint.yml | 8 ++++---- Cargo.toml | 1 - quaint/.github/workflows/test.yml | 8 ++++---- quaint/Cargo.toml | 10 +++------- quaint/README.md | 4 ---- quaint/src/ast/values.rs | 18 ------------------ quaint/src/connector/mssql/conversion.rs | 1 - quaint/src/connector/mysql/conversion.rs | 1 - quaint/src/connector/postgres/conversion.rs | 8 -------- quaint/src/connector/postgres/error.rs | 2 -- quaint/src/connector/sqlite/conversion.rs | 1 - quaint/src/error.rs | 1 - quaint/src/tests/query/error.rs | 1 - quaint/src/tests/types/postgres.rs | 3 --- quaint/src/visitor/mssql.rs | 2 -- quaint/src/visitor/mysql.rs | 2 -- quaint/src/visitor/postgres.rs | 2 -- quaint/src/visitor/sqlite.rs | 2 -- 18 files changed, 11 insertions(+), 64 deletions(-) diff --git a/.github/workflows/quaint.yml b/.github/workflows/quaint.yml index 6d1a871487c0..7b49e80a7bd0 100644 --- a/.github/workflows/quaint.yml +++ b/.github/workflows/quaint.yml @@ -17,13 +17,13 @@ jobs: features: - "--lib --features=all" - "--lib --no-default-features --features=sqlite" - - "--lib --no-default-features --features=sqlite --features=uuid --features=pooled" + - "--lib --no-default-features --features=sqlite --features=pooled" - "--lib --no-default-features --features=postgresql" - - "--lib --no-default-features --features=postgresql --features=uuid --features=pooled" + - "--lib --no-default-features --features=postgresql --features=pooled" - "--lib --no-default-features --features=mysql" - - "--lib --no-default-features --features=mysql --features=uuid --features=pooled" + - "--lib --no-default-features --features=mysql --features=pooled" - "--lib --no-default-features --features=mssql" - - "--lib --no-default-features --features=mssql --features=uuid --features=pooled" + - "--lib --no-default-features --features=mssql --features=pooled" env: TEST_MYSQL: "mysql://root:prisma@localhost:3306/prisma" TEST_MYSQL8: "mysql://root:prisma@localhost:3307/prisma" diff --git a/Cargo.toml b/Cargo.toml index cbec74fa92b6..4499033a624b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -67,7 +67,6 @@ features = [ "pooled", "postgresql", "sqlite", - "uuid", ] [profile.dev.package.backtrace] diff --git a/quaint/.github/workflows/test.yml b/quaint/.github/workflows/test.yml index e3779dfdea2b..85d480919c41 100644 --- a/quaint/.github/workflows/test.yml +++ b/quaint/.github/workflows/test.yml @@ -46,13 +46,13 @@ jobs: features: - "--lib --features=all" - "--lib --no-default-features --features=sqlite" - - "--lib --no-default-features --features=sqlite --features=uuid --features=pooled" + - "--lib --no-default-features --features=sqlite --features=pooled" - "--lib --no-default-features --features=postgresql" - - "--lib --no-default-features --features=postgresql --features=uuid --features=pooled" + - "--lib --no-default-features --features=postgresql --features=pooled" - "--lib --no-default-features --features=mysql" - - "--lib --no-default-features --features=mysql --features=uuid --features=pooled" + - "--lib --no-default-features --features=mysql --features=pooled" - "--lib --no-default-features --features=mssql" - - "--lib --no-default-features --features=mssql --features=uuid --features=pooled" + - "--lib --no-default-features --features=mssql --features=pooled" - "--doc --features=all" env: TEST_MYSQL: "mysql://root:prisma@localhost:3306/prisma" diff --git a/quaint/Cargo.toml b/quaint/Cargo.toml index 638a7749b407..b699518d0910 100644 --- a/quaint/Cargo.toml +++ b/quaint/Cargo.toml @@ -29,7 +29,7 @@ docs = [] # way to access database-specific methods when you need extra control. expose-drivers = [] -all = ["mssql", "mysql", "pooled", "postgresql", "sqlite", "uuid"] +all = ["mssql", "mysql", "pooled", "postgresql", "sqlite"] vendored-openssl = [ "postgres-native-tls/vendored-openssl", @@ -48,7 +48,7 @@ postgresql = [ "byteorder", ] -mssql = ["tiberius", "uuid", "tokio-util", "tokio/time", "tokio/net", "either"] +mssql = ["tiberius", "tokio-util", "tokio/time", "tokio/net", "either"] mysql = ["mysql_async", "tokio/time", "lru-cache"] pooled = ["mobc"] sqlite = ["rusqlite", "tokio/sync"] @@ -78,6 +78,7 @@ bytes = { version = "1.0", optional = true } mobc = { version = "0.8", optional = true } serde = { version = "1.0", optional = true } sqlformat = { version = "0.2.0", optional = true } +uuid = { version = "1", features = ["v4"] } [dev-dependencies] once_cell = "1.3" @@ -87,7 +88,6 @@ paste = "1.0" serde = { version = "1.0", features = ["derive"] } quaint-test-macros = { path = "quaint-test-macros" } quaint-test-setup = { path = "quaint-test-setup" } -uuid = { version = "1", features = ["v4"] } tokio = { version = "1.0", features = ["rt-multi-thread", "macros", "time"] } [dependencies.byteorder] @@ -126,10 +126,6 @@ features = [ [dependencies.bigdecimal] version = "0.3" -[dependencies.uuid] -version = "1" -optional = true - [dependencies.tokio-postgres] features = [ "with-uuid-1", diff --git a/quaint/README.md b/quaint/README.md index 9ad45ee12182..e27c56972390 100644 --- a/quaint/README.md +++ b/quaint/README.md @@ -20,10 +20,6 @@ Quaint is an abstraction over certain SQL databases. It provides: - `sqlite`: Support for SQLite databases. - `mssql`: Support for Microsoft SQL Server databases. - `pooled`: A connection pool in `pooled::Quaint`. -- `json`: JSON type support with `serde_json` crate. -- `uuid`: UUID type support with `uuid` crate. -- `chrono`: DateTime type support with `chrono` crate. -- `bigdecimal`: Numeric values can be read as `BigDecimal`. - `vendored-openssl`: Statically links against a vendored OpenSSL library on non-Windows or non-Apple platforms. - `fmt-sql`: Enables logging SQL queries _formatted_. The `FMT_SQL` env var must be present for the formatting to be enabled. diff --git a/quaint/src/ast/values.rs b/quaint/src/ast/values.rs index 677c27fdfd93..081405374340 100644 --- a/quaint/src/ast/values.rs +++ b/quaint/src/ast/values.rs @@ -11,7 +11,6 @@ use std::{ fmt, str::FromStr, }; -#[cfg(feature = "uuid")] use uuid::Uuid; /// A value written to the query as-is without parameterization. @@ -148,7 +147,6 @@ impl<'a> Value<'a> { } /// Creates a new uuid value. - #[cfg(feature = "uuid")] pub fn uuid(value: Uuid) -> Self { ValueType::uuid(value).into_value() } @@ -303,13 +301,11 @@ impl<'a> Value<'a> { } /// `true` if the `Value` is of UUID type. - #[cfg(feature = "uuid")] pub fn is_uuid(&self) -> bool { self.typed.is_uuid() } /// Returns an UUID if the value is of UUID type, otherwise `None`. - #[cfg(feature = "uuid")] pub fn as_uuid(&self) -> Option { self.typed.as_uuid() } @@ -431,7 +427,6 @@ impl<'a> Value<'a> { ValueType::Xml(None).into() } - #[cfg(feature = "uuid")] pub fn null_uuid() -> Self { ValueType::Uuid(None).into() } @@ -507,7 +502,6 @@ pub enum ValueType<'a> { Json(Option), /// A XML value. Xml(Option>), - #[cfg(feature = "uuid")] /// An UUID value. Uuid(Option), /// A datetime value. @@ -578,7 +572,6 @@ impl<'a> fmt::Display for ValueType<'a> { ValueType::Numeric(val) => val.as_ref().map(|v| write!(f, "{v}")), ValueType::Json(val) => val.as_ref().map(|v| write!(f, "{v}")), - #[cfg(feature = "uuid")] ValueType::Uuid(val) => val.map(|v| write!(f, "\"{v}\"")), ValueType::DateTime(val) => val.map(|v| write!(f, "\"{v}\"")), ValueType::Date(val) => val.map(|v| write!(f, "\"{v}\"")), @@ -637,7 +630,6 @@ impl<'a> From> for serde_json::Value { ValueType::Numeric(d) => d.map(|d| serde_json::to_value(d.to_f64().unwrap()).unwrap()), ValueType::Json(v) => v, - #[cfg(feature = "uuid")] ValueType::Uuid(u) => u.map(|u| serde_json::Value::String(u.hyphenated().to_string())), ValueType::DateTime(dt) => dt.map(|dt| serde_json::Value::String(dt.to_rfc3339())), ValueType::Date(date) => date.map(|date| serde_json::Value::String(format!("{date}"))), @@ -764,7 +756,6 @@ impl<'a> ValueType<'a> { } /// Creates a new uuid value. - #[cfg(feature = "uuid")] pub(crate) fn uuid(value: Uuid) -> Self { Self::Uuid(Some(value)) } @@ -812,9 +803,7 @@ impl<'a> ValueType<'a> { Self::Char(c) => c.is_none(), Self::Array(v) => v.is_none(), Self::Xml(s) => s.is_none(), - Self::Numeric(r) => r.is_none(), - #[cfg(feature = "uuid")] Self::Uuid(u) => u.is_none(), Self::DateTime(dt) => dt.is_none(), Self::Date(d) => d.is_none(), @@ -999,13 +988,11 @@ impl<'a> ValueType<'a> { } /// `true` if the `Value` is of UUID type. - #[cfg(feature = "uuid")] pub(crate) fn is_uuid(&self) -> bool { matches!(self, Self::Uuid(_)) } /// Returns an UUID if the value is of UUID type, otherwise `None`. - #[cfg(feature = "uuid")] pub(crate) fn as_uuid(&self) -> Option { match self { Self::Uuid(u) => *u, @@ -1120,14 +1107,11 @@ value!(val: usize, Int64, i64::try_from(val).unwrap()); value!(val: &'a [u8], Bytes, val.into()); value!(val: f64, Double, val); value!(val: f32, Float, val); - value!(val: DateTime, DateTime, val); value!(val: chrono::NaiveTime, Time, val); value!(val: chrono::NaiveDate, Date, val); - value!(val: BigDecimal, Numeric, val); value!(val: JsonValue, Json, val); -#[cfg(feature = "uuid")] value!(val: Uuid, Uuid, val); impl<'a> TryFrom> for i64 { @@ -1233,7 +1217,6 @@ impl<'a> TryFrom<&Value<'a>> for Option { } } -#[cfg(feature = "uuid")] impl<'a> TryFrom<&Value<'a>> for Option { type Error = Error; @@ -1431,7 +1414,6 @@ mod tests { } #[test] - #[cfg(feature = "uuid")] fn display_format_for_uuid() { let id = Uuid::from_str("67e5504410b1426f9247bb680e5fe0c8").unwrap(); let pv = Value::uuid(id); diff --git a/quaint/src/connector/mssql/conversion.rs b/quaint/src/connector/mssql/conversion.rs index 789e1463601b..870654ad5de3 100644 --- a/quaint/src/connector/mssql/conversion.rs +++ b/quaint/src/connector/mssql/conversion.rs @@ -22,7 +22,6 @@ impl<'a> IntoSql<'a> for &'a Value<'a> { ValueType::Array(_) | ValueType::EnumArray(_, _) => panic!("Arrays are not supported on SQL Server."), ValueType::Numeric(val) => (*val).to_sql(), ValueType::Json(val) => val.as_ref().map(|val| serde_json::to_string(&val).unwrap()).into_sql(), - #[cfg(feature = "uuid")] ValueType::Uuid(val) => val.into_sql(), ValueType::DateTime(val) => val.into_sql(), ValueType::Date(val) => val.into_sql(), diff --git a/quaint/src/connector/mysql/conversion.rs b/quaint/src/connector/mysql/conversion.rs index 672cca03cdb5..659cc0790c07 100644 --- a/quaint/src/connector/mysql/conversion.rs +++ b/quaint/src/connector/mysql/conversion.rs @@ -50,7 +50,6 @@ pub fn conv_params(params: &[Value<'_>]) -> crate::Result { } None => None, }, - #[cfg(feature = "uuid")] ValueType::Uuid(u) => u.map(|u| my::Value::Bytes(u.hyphenated().to_string().into_bytes())), ValueType::Date(d) => { d.map(|d| my::Value::Date(d.year() as u16, d.month() as u8, d.day() as u8, 0, 0, 0, 0)) diff --git a/quaint/src/connector/postgres/conversion.rs b/quaint/src/connector/postgres/conversion.rs index 31be6dd68682..efe4debd9b94 100644 --- a/quaint/src/connector/postgres/conversion.rs +++ b/quaint/src/connector/postgres/conversion.rs @@ -19,7 +19,6 @@ use tokio_postgres::{ Row as PostgresRow, Statement as PostgresStatement, }; -#[cfg(feature = "uuid")] use uuid::Uuid; pub(crate) fn conv_params<'a>(params: &'a [Value<'a>]) -> Vec<&'a (dyn types::ToSql + Sync)> { @@ -51,7 +50,6 @@ pub(crate) fn params_to_types(params: &[Value<'_>]) -> Vec { ValueType::Numeric(_) => PostgresType::NUMERIC, ValueType::Json(_) => PostgresType::JSONB, ValueType::Xml(_) => PostgresType::XML, - #[cfg(feature = "uuid")] ValueType::Uuid(_) => PostgresType::UUID, ValueType::DateTime(_) => PostgresType::TIMESTAMPTZ, ValueType::Date(_) => PostgresType::TIMESTAMP, @@ -88,7 +86,6 @@ pub(crate) fn params_to_types(params: &[Value<'_>]) -> Vec { ValueType::Numeric(_) => PostgresType::NUMERIC_ARRAY, ValueType::Json(_) => PostgresType::JSONB_ARRAY, ValueType::Xml(_) => PostgresType::XML_ARRAY, - #[cfg(feature = "uuid")] ValueType::Uuid(_) => PostgresType::UUID_ARRAY, ValueType::DateTime(_) => PostgresType::TIMESTAMPTZ_ARRAY, ValueType::Date(_) => PostgresType::TIMESTAMP_ARRAY, @@ -260,7 +257,6 @@ impl GetRow for PostgresRow { } None => Value::null_time(), }, - #[cfg(feature = "uuid")] PostgresType::UUID => match row.try_get(i)? { Some(val) => { let val: Uuid = val; @@ -268,7 +264,6 @@ impl GetRow for PostgresRow { } None => ValueType::Uuid(None).into_value(), }, - #[cfg(feature = "uuid")] PostgresType::UUID_ARRAY => match row.try_get(i)? { Some(val) => { let val: Vec> = val; @@ -771,12 +766,10 @@ impl<'a> ToSql for Value<'a> { (ValueType::Numeric(float), _) => float .as_ref() .map(|float| DecimalWrapper(float.clone()).to_sql(ty, out)), - #[cfg(feature = "uuid")] (ValueType::Text(string), &PostgresType::UUID) => string.as_ref().map(|string| { let parsed_uuid: Uuid = string.parse()?; parsed_uuid.to_sql(ty, out) }), - #[cfg(feature = "uuid")] (ValueType::Array(values), &PostgresType::UUID_ARRAY) => values.as_ref().map(|values| { let parsed_uuid: Vec> = values .iter() @@ -849,7 +842,6 @@ impl<'a> ToSql for Value<'a> { } (ValueType::Json(value), _) => value.as_ref().map(|value| value.to_sql(ty, out)), (ValueType::Xml(value), _) => value.as_ref().map(|value| value.to_sql(ty, out)), - #[cfg(feature = "uuid")] (ValueType::Uuid(value), _) => value.map(|value| value.to_sql(ty, out)), (ValueType::DateTime(value), &PostgresType::DATE) => value.map(|value| value.date_naive().to_sql(ty, out)), (ValueType::Date(value), _) => value.map(|value| value.to_sql(ty, out)), diff --git a/quaint/src/connector/postgres/error.rs b/quaint/src/connector/postgres/error.rs index dc8699875ea8..d4e5ec7837fe 100644 --- a/quaint/src/connector/postgres/error.rs +++ b/quaint/src/connector/postgres/error.rs @@ -265,7 +265,6 @@ impl From for Error { return io_error; } - #[cfg(feature = "uuid")] if let Some(uuid_error) = try_extracting_uuid_error(&e) { return uuid_error; } @@ -312,7 +311,6 @@ impl From for Error { } } -#[cfg(feature = "uuid")] fn try_extracting_uuid_error(err: &tokio_postgres::error::Error) -> Option { use std::error::Error as _; diff --git a/quaint/src/connector/sqlite/conversion.rs b/quaint/src/connector/sqlite/conversion.rs index e7566be81140..fced37abca4c 100644 --- a/quaint/src/connector/sqlite/conversion.rs +++ b/quaint/src/connector/sqlite/conversion.rs @@ -273,7 +273,6 @@ impl<'a> ToSql for Value<'a> { ToSqlOutput::from(stringified) }), ValueType::Xml(cow) => cow.as_ref().map(|cow| ToSqlOutput::from(cow.as_ref())), - #[cfg(feature = "uuid")] ValueType::Uuid(value) => value.map(|value| ToSqlOutput::from(value.hyphenated().to_string())), ValueType::DateTime(value) => value.map(|value| ToSqlOutput::from(value.timestamp_millis())), ValueType::Date(date) => date diff --git a/quaint/src/error.rs b/quaint/src/error.rs index 47b48c8dc8f9..22037d443c35 100644 --- a/quaint/src/error.rs +++ b/quaint/src/error.rs @@ -388,7 +388,6 @@ impl From for Error { } } -#[cfg(feature = "uuid")] impl From for Error { fn from(e: uuid::Error) -> Self { Error::builder(ErrorKind::UUIDError(format!("{e}"))).build() diff --git a/quaint/src/tests/query/error.rs b/quaint/src/tests/query/error.rs index 3cff5401ec0f..69c57332b6d3 100644 --- a/quaint/src/tests/query/error.rs +++ b/quaint/src/tests/query/error.rs @@ -353,7 +353,6 @@ async fn should_execute_multi_statement_queries_with_raw_cmd(api: &mut dyn TestA Ok(()) } -#[cfg(feature = "uuid")] #[test_each_connector(tags("postgresql"))] async fn uuid_length_error(api: &mut dyn TestApi) -> crate::Result<()> { let table = api.create_temp_table("value uuid").await?; diff --git a/quaint/src/tests/types/postgres.rs b/quaint/src/tests/types/postgres.rs index ba9b02095722..d69a8dbb3424 100644 --- a/quaint/src/tests/types/postgres.rs +++ b/quaint/src/tests/types/postgres.rs @@ -1,7 +1,6 @@ mod bigdecimal; use crate::tests::test_api::*; -#[cfg(feature = "uuid")] use std::str::FromStr; test_type!(boolean( @@ -328,7 +327,6 @@ test_type!(xml_array( ]) )); -#[cfg(feature = "uuid")] test_type!(uuid( postgresql, "uuid", @@ -336,7 +334,6 @@ test_type!(uuid( Value::uuid(uuid::Uuid::from_str("936DA01F-9ABD-4D9D-80C7-02AF85C822A8").unwrap()) )); -#[cfg(feature = "uuid")] test_type!(uuid_array( postgresql, "uuid[]", diff --git a/quaint/src/visitor/mssql.rs b/quaint/src/visitor/mssql.rs index 1e7852a92b46..bf1550b96c31 100644 --- a/quaint/src/visitor/mssql.rs +++ b/quaint/src/visitor/mssql.rs @@ -342,7 +342,6 @@ impl<'a> Visitor<'a> for Mssql<'a> { ValueType::Json(j) => j.map(|j| self.write(format!("'{}'", serde_json::to_string(&j).unwrap()))), ValueType::Numeric(r) => r.map(|r| self.write(r)), - #[cfg(feature = "uuid")] ValueType::Uuid(uuid) => uuid.map(|uuid| { let s = format!("CONVERT(uniqueidentifier, N'{}')", uuid.hyphenated()); self.write(s) @@ -1252,7 +1251,6 @@ mod tests { } #[test] - #[cfg(feature = "uuid")] fn test_raw_uuid() { let uuid = uuid::Uuid::new_v4(); let (sql, params) = Mssql::build(Select::default().value(uuid.raw())).unwrap(); diff --git a/quaint/src/visitor/mysql.rs b/quaint/src/visitor/mysql.rs index b54399a6c69b..26d0f0d5fd65 100644 --- a/quaint/src/visitor/mysql.rs +++ b/quaint/src/visitor/mysql.rs @@ -158,7 +158,6 @@ impl<'a> Visitor<'a> for Mysql<'a> { } None => None, }, - #[cfg(feature = "uuid")] ValueType::Uuid(uuid) => uuid.map(|uuid| self.write(format!("'{}'", uuid.hyphenated()))), ValueType::DateTime(dt) => dt.map(|dt| self.write(format!("'{}'", dt.to_rfc3339(),))), ValueType::Date(date) => date.map(|date| self.write(format!("'{date}'"))), @@ -859,7 +858,6 @@ mod tests { } #[test] - #[cfg(feature = "uuid")] fn test_raw_uuid() { let uuid = uuid::Uuid::new_v4(); let (sql, params) = Mysql::build(Select::default().value(uuid.raw())).unwrap(); diff --git a/quaint/src/visitor/postgres.rs b/quaint/src/visitor/postgres.rs index ba157563cfce..fda8a6132037 100644 --- a/quaint/src/visitor/postgres.rs +++ b/quaint/src/visitor/postgres.rs @@ -230,7 +230,6 @@ impl<'a> Visitor<'a> for Postgres<'a> { .map(|j| self.write(format!("'{}'", serde_json::to_string(&j).unwrap()))), ValueType::Numeric(r) => r.as_ref().map(|r| self.write(r)), - #[cfg(feature = "uuid")] ValueType::Uuid(uuid) => uuid.map(|uuid| self.write(format!("'{}'", uuid.hyphenated()))), ValueType::DateTime(dt) => dt.map(|dt| self.write(format!("'{}'", dt.to_rfc3339(),))), ValueType::Date(date) => date.map(|date| self.write(format!("'{date}'"))), @@ -1020,7 +1019,6 @@ mod tests { } #[test] - #[cfg(feature = "uuid")] fn test_raw_uuid() { let uuid = uuid::Uuid::new_v4(); let (sql, params) = Postgres::build(Select::default().value(uuid.raw())).unwrap(); diff --git a/quaint/src/visitor/sqlite.rs b/quaint/src/visitor/sqlite.rs index 3feca377990b..9c15ef651694 100644 --- a/quaint/src/visitor/sqlite.rs +++ b/quaint/src/visitor/sqlite.rs @@ -112,7 +112,6 @@ impl<'a> Visitor<'a> for Sqlite<'a> { }, ValueType::Numeric(r) => r.as_ref().map(|r| self.write(r)), - #[cfg(feature = "uuid")] ValueType::Uuid(uuid) => uuid.map(|uuid| self.write(format!("'{}'", uuid.hyphenated()))), ValueType::DateTime(dt) => dt.map(|dt| self.write(format!("'{}'", dt.to_rfc3339(),))), ValueType::Date(date) => date.map(|date| self.write(format!("'{date}'"))), @@ -920,7 +919,6 @@ mod tests { } #[test] - #[cfg(feature = "uuid")] fn test_raw_uuid() { let uuid = uuid::Uuid::new_v4(); let (sql, params) = Sqlite::build(Select::default().value(uuid.raw())).unwrap(); From ce7f7054e54eaa5695095d0ce08ebcade3e79fea Mon Sep 17 00:00:00 2001 From: Jan Piotrowski Date: Mon, 9 Oct 2023 11:29:53 +0200 Subject: [PATCH 086/128] fix(smoke-test): add assertion for expected error (#4337) --- .../js/smoke-test-js/README.md | 20 ++++++++ .../js/smoke-test-js/package.json | 4 +- .../20230915202554_init/migration.sql | 5 ++ .../smoke-test-js/prisma/sqlite/schema.prisma | 4 ++ .../js/smoke-test-js/src/libquery/libquery.ts | 48 ++++++++++++++----- 5 files changed, 66 insertions(+), 15 deletions(-) diff --git a/query-engine/driver-adapters/js/smoke-test-js/README.md b/query-engine/driver-adapters/js/smoke-test-js/README.md index f719a7189282..204be94670b9 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/README.md +++ b/query-engine/driver-adapters/js/smoke-test-js/README.md @@ -55,3 +55,23 @@ In the current directory: For more fine-grained control: - Run `pnpm neon:ws:http` to test using `libquery` - Run `pnpm neon:ws:http` to test using `@prisma/client` + +### Pg + +Start database via `docker compose up postgres15` in `/docker`. + +In the current directory: +- Run `pnpm prisma:pg` to push the Prisma schema and insert the test data. +- Run `pnpm pg` to run smoke tests using `libquery` against the PostgreSQL database, using `pg` + For more fine-grained control: + - Run `pnpm pg:libquery` to test using `libquery` + - Run `pnpm pg:client` to test using `@prisma/client` + +### Libsql + +In the current directory: +- Run `pnpm prisma:libsql` to push the Prisma schema and insert the test data. +- Run `pnpm libsql` to run smoke tests using `libquery` against the SQLite database, using `libSQL` + For more fine-grained control: + - Run `pnpm libsql:libquery` to test using `libquery` + - Run `pnpm libsql:client` to test using `@prisma/client` \ No newline at end of file diff --git a/query-engine/driver-adapters/js/smoke-test-js/package.json b/query-engine/driver-adapters/js/smoke-test-js/package.json index 76e8bee532e3..90f25234be1e 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/package.json +++ b/query-engine/driver-adapters/js/smoke-test-js/package.json @@ -35,8 +35,8 @@ "planetscale:client": "DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/planetscale.test.ts", "planetscale": "pnpm planetscale:libquery && pnpm planetscale:client", "prisma:libsql": "cross-env-shell DATABASE_URL=\"${JS_LIBSQL_DATABASE_URL}\" \"pnpm prisma:db:push:sqlite && pnpm prisma:db:execute:sqlite\"", - "libsql:libquery": "cross-env-shell DATABASE_URL=\"${JS_LIBSQL_DATABASE_URL}\" node --test --loader=tsx ./src/libquery/libsql.test.ts", - "libsql:client": "cross-env-shell DATABASE_URL=\"${JS_LIBSQL_DATABASE_URL}\" node --test --loader=tsx ./src/client/libsql.test.ts", + "libsql:libquery": "cross-env-shell DATABASE_URL=\"${JS_LIBSQL_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/libsql.test.ts", + "libsql:client": "cross-env-shell DATABASE_URL=\"${JS_LIBSQL_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/libsql.test.ts", "libsql": "pnpm libsql:libquery && pnpm libsql:client" }, "keywords": [], diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/20230915202554_init/migration.sql b/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/20230915202554_init/migration.sql index 77e333ceeb53..31c63d423e22 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/20230915202554_init/migration.sql +++ b/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/20230915202554_init/migration.sql @@ -64,6 +64,11 @@ CREATE TABLE "Product" ( "properties_null" TEXT ); +-- CreateTable +CREATE TABLE "Unique" ( + "email" TEXT NOT NULL PRIMARY KEY, +); + -- CreateIndex CREATE UNIQUE INDEX "Child_c_key" ON "Child"("c"); diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/schema.prisma b/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/schema.prisma index e1432d2f316a..bde23dee66ac 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/schema.prisma +++ b/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/schema.prisma @@ -73,3 +73,7 @@ model Product { properties String properties_null String? } + +model Unique { + email String @id +} \ No newline at end of file diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts index c1cd760f5f8d..bdf50eab5669 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts @@ -283,20 +283,42 @@ export function smokeTestLibquery(adapter: ErrorCapturingDriverAdapter, prismaSc }) it('expected error', async () => { - const result = await doQuery({ - modelName: 'Unique', - action: 'createMany', - query: { - arguments: { - data: [{ email: 'duplicate@example.com' }, { email: 'duplicate@example.com' }], + + + await assert.rejects( + async () => { + const result = await doQuery({ + modelName: 'Unique', + action: 'createOne', + query: { + arguments: { + data: { email: 'duplicate@example.com' }, + }, + selection: { + $scalars: true, + }, + }, + }) + const result2 = await doQuery({ + modelName: 'Unique', + action: 'createOne', + query: { + arguments: { + data: { email: 'duplicate@example.com' } + }, + selection: { + $scalars: true, + }, + }, + }) + console.log('[nodejs] error result', JSON.stringify(result, null, 2)) }, - selection: { - $scalars: true, + (err) => { + assert.match(err.message, /unique/i); + return true; }, - }, - }) - - console.log('[nodejs] error result', JSON.stringify(result, null, 2)) + ); + }) describe('read scalar and non scalar types', () => { @@ -396,7 +418,7 @@ export function smokeTestLibquery(adapter: ErrorCapturingDriverAdapter, prismaSc selection: { bytes: true, }, - arguments: { + arguments: { data: { bytes: { $type: 'Bytes', From 3155122414c0ed0609bc8f98c8754160022d4d52 Mon Sep 17 00:00:00 2001 From: Jan Piotrowski Date: Mon, 9 Oct 2023 11:30:06 +0200 Subject: [PATCH 087/128] fix(smoke-tests): Fix case where no adapter should be used (#4338) --- .../driver-adapters/js/smoke-test-js/src/client/client.ts | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts index 7074320f4351..dcae3c46437d 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts @@ -18,8 +18,9 @@ export async function smokeTestClient(driverAdapter: DriverAdapter) { const dirname = path.dirname(new URL(import.meta.url).pathname) process.env.PRISMA_QUERY_ENGINE_LIBRARY = getLibQueryEnginePath(dirname) - for (const adapter of [driverAdapter, undefined]) { - const isUsingDriverAdapters = adapter !== undefined + // Run twice, once with adapter and once fully without + for (const adapter of [driverAdapter, null]) { + const isUsingDriverAdapters = adapter !== null describe(isUsingDriverAdapters ? `using Driver Adapters` : `using Rust drivers`, () => { it('batch queries', async () => { const prisma = new PrismaClient({ From a4e8771bb54236bc08613381ffa1443755b683ad Mon Sep 17 00:00:00 2001 From: Serhii Tatarintsev Date: Mon, 9 Oct 2023 11:33:39 +0200 Subject: [PATCH 088/128] driver-adapters: Fix impicit transaction error test (#4343) Query it used got optimized and does not start transactions anymore. --- .../smoke-test-js/prisma/postgres/schema.prisma | 7 +++++-- .../js/smoke-test-js/src/libquery/errors.test.ts | 15 ++++++++++++--- 2 files changed, 17 insertions(+), 5 deletions(-) diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma b/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma index 74ffd428c728..7319f07d8a60 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma +++ b/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma @@ -97,11 +97,14 @@ model Product { id String @id @default(cuid()) properties Json properties_null Json? + users User[] } model User { - id String @id @default(uuid()) - email String + id String @id @default(uuid()) + email String + favoriteProduct Product? @relation(fields: [productId], references: [id]) + productId String? } model Unique { diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/errors.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/errors.test.ts index c917f35fd7b5..13ac5cd9ec81 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/errors.test.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/errors.test.ts @@ -77,10 +77,19 @@ describe('errors propagation', () => { test('works with implicit transaction', async () => { await assert.rejects( doQuery({ - modelName: 'Product', - action: 'deleteMany', + modelName: 'User', + action: 'createOne', query: { - arguments: {}, + arguments: { + data: { + email: 'user@example.com', + favoriteProduct: { + create: { + properties: {}, + }, + }, + }, + }, selection: { $scalars: true, }, From 9cc3db6a9d43fdaf3572d95fff2f8015c04688f5 Mon Sep 17 00:00:00 2001 From: Flavian Desverne Date: Mon, 9 Oct 2023 12:09:22 +0200 Subject: [PATCH 089/128] fix: relation filters should use linking fields on both sides of IN clause (#4318) --- .../src/cockroach_datamodel_connector.rs | 3 +- .../src/mysql_datamodel_connector.rs | 3 +- .../src/postgres_datamodel_connector.rs | 3 +- .../src/sqlite_datamodel_connector.rs | 3 +- .../src/datamodel_connector/capabilities.rs | 1 + .../tests/queries/filters/one_relation.rs | 83 ++++++++ .../sql-query-connector/src/filter/visitor.rs | 201 ++++++++++++------ 7 files changed, 226 insertions(+), 71 deletions(-) diff --git a/psl/builtin-connectors/src/cockroach_datamodel_connector.rs b/psl/builtin-connectors/src/cockroach_datamodel_connector.rs index 1c698a644b58..5456deb59df6 100644 --- a/psl/builtin-connectors/src/cockroach_datamodel_connector.rs +++ b/psl/builtin-connectors/src/cockroach_datamodel_connector.rs @@ -57,7 +57,8 @@ const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(Connector MultiSchema | FilteredInlineChildNestedToOneDisconnect | InsertReturning | - UpdateReturning + UpdateReturning | + RowIn }); const SCALAR_TYPE_DEFAULTS: &[(ScalarType, CockroachType)] = &[ diff --git a/psl/builtin-connectors/src/mysql_datamodel_connector.rs b/psl/builtin-connectors/src/mysql_datamodel_connector.rs index d4688438d299..39995fb5d48d 100644 --- a/psl/builtin-connectors/src/mysql_datamodel_connector.rs +++ b/psl/builtin-connectors/src/mysql_datamodel_connector.rs @@ -58,7 +58,8 @@ const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(Connector SupportsTxIsolationReadUncommitted | SupportsTxIsolationReadCommitted | SupportsTxIsolationRepeatableRead | - SupportsTxIsolationSerializable + SupportsTxIsolationSerializable | + RowIn }); const CONSTRAINT_SCOPES: &[ConstraintScope] = &[ConstraintScope::GlobalForeignKey, ConstraintScope::ModelKeyIndex]; diff --git a/psl/builtin-connectors/src/postgres_datamodel_connector.rs b/psl/builtin-connectors/src/postgres_datamodel_connector.rs index 6cd160c40670..8fac79165c58 100644 --- a/psl/builtin-connectors/src/postgres_datamodel_connector.rs +++ b/psl/builtin-connectors/src/postgres_datamodel_connector.rs @@ -64,7 +64,8 @@ const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(Connector SupportsTxIsolationSerializable | NativeUpsert | InsertReturning | - UpdateReturning + UpdateReturning | + RowIn }); pub struct PostgresDatamodelConnector; diff --git a/psl/builtin-connectors/src/sqlite_datamodel_connector.rs b/psl/builtin-connectors/src/sqlite_datamodel_connector.rs index 6b66a6c524ca..d5e6041f9b43 100644 --- a/psl/builtin-connectors/src/sqlite_datamodel_connector.rs +++ b/psl/builtin-connectors/src/sqlite_datamodel_connector.rs @@ -24,7 +24,8 @@ const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(Connector OrderByNullsFirstLast | SupportsTxIsolationSerializable | NativeUpsert | - FilteredInlineChildNestedToOneDisconnect + FilteredInlineChildNestedToOneDisconnect | + RowIn // InsertReturning - While SQLite does support RETURNING, it does not return column information on the way back from the database. // This column type information is necessary in order to preserve consistency for some data types such as int, where values could overflow. // Since we care to stay consistent with reads, it is not enabled. diff --git a/psl/psl-core/src/datamodel_connector/capabilities.rs b/psl/psl-core/src/datamodel_connector/capabilities.rs index 7bfee8c02916..1b3f557e6285 100644 --- a/psl/psl-core/src/datamodel_connector/capabilities.rs +++ b/psl/psl-core/src/datamodel_connector/capabilities.rs @@ -103,6 +103,7 @@ capabilities!( NativeUpsert, InsertReturning, UpdateReturning, + RowIn, // Connector supports (a, b) IN (c, d) expression. ); /// Contains all capabilities that the connector is able to serve. diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/one_relation.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/one_relation.rs index 3a56dd18abab..cca380f81138 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/one_relation.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/one_relation.rs @@ -323,6 +323,89 @@ mod one_relation { Ok(()) } + // https://github.com/prisma/prisma/issues/21356 + fn schema_21356() -> String { + let schema = indoc! { + r#"model User { + #id(id, Int, @id) + name String? + + posts Post[] + + userId Int + userId2 Int + @@unique([userId, userId2]) + } + + model Post { + #id(id, Int, @id) + title String? + + userId Int? + userId_2 Int? + author User? @relation(fields: [userId, userId_2], references: [userId, userId2]) + }"# + }; + + schema.to_owned() + } + + #[connector_test(schema(schema_21356))] + async fn repro_21356(runner: Runner) -> TestResult<()> { + run_query!( + &runner, + r#"mutation { createOneUser(data: { id: 1, userId: 1, userId2: 1, name: "Bob", posts: { create: { id: 1, title: "Hello" } } }) { id } }"# + ); + + insta::assert_snapshot!( + run_query!(&runner, r#"{ findManyUser(where: { posts: { some: { author: { name: "Bob" } } } }) { id } }"#), + @r###"{"data":{"findManyUser":[{"id":1}]}}"### + ); + + Ok(()) + } + + // https://github.com/prisma/prisma/issues/21366 + fn schema_21366() -> String { + let schema = indoc! { + r#"model device { + #id(id, Int, @id) + + device_id String @unique + current_state device_state? @relation(fields: [device_id], references: [device_id], onDelete: NoAction) + } + + model device_state { + #id(id, Int, @id) + + device_id String @unique + device device[] + }"# + }; + + schema.to_owned() + } + + #[connector_test(schema(schema_21366))] + async fn repro_21366(runner: Runner) -> TestResult<()> { + run_query!( + &runner, + r#"mutation { + createOnedevice(data: { id: 1, current_state: { create: { id: 1, device_id: "1" } } }) { + id + } + } + "# + ); + + insta::assert_snapshot!( + run_query!(&runner, r#"{ findManydevice_state(where: { device: { some: { device_id: "1" } } }) { id } }"#), + @r###"{"data":{"findManydevice_state":[{"id":1}]}}"### + ); + + Ok(()) + } + async fn test_data(runner: &Runner) -> TestResult<()> { runner .query(indoc! { r#" diff --git a/query-engine/connectors/sql-query-connector/src/filter/visitor.rs b/query-engine/connectors/sql-query-connector/src/filter/visitor.rs index 3b42457950f7..6ab32f89735f 100644 --- a/query-engine/connectors/sql-query-connector/src/filter/visitor.rs +++ b/query-engine/connectors/sql-query-connector/src/filter/visitor.rs @@ -4,6 +4,7 @@ use crate::{model_extensions::*, Context}; use connector_interface::filter::*; use prisma_models::prelude::*; +use psl::datamodel_connector::ConnectorCapability; use quaint::ast::concat; use quaint::ast::*; use std::convert::TryInto; @@ -110,84 +111,149 @@ impl FilterVisitor { res } - fn visit_relation_filter_select(&mut self, filter: RelationFilter, ctx: &Context<'_>) -> Select<'static> { + fn visit_relation_filter_select( + &mut self, + filter: RelationFilter, + ctx: &Context<'_>, + ) -> (ModelProjection, Select<'static>) { + let is_many_to_many = filter.field.relation().is_many_to_many(); + // HACK: This is temporary. A fix should be done in Quaint instead of branching out here. + // See https://www.notion.so/prismaio/Spec-Faulty-Tuple-Join-on-SQL-Server-55b8232fb44f4a6cb4d3f36428f17bac + // for more info + let support_row_in = filter + .field + .dm + .schema + .connector + .capabilities() + .contains(ConnectorCapability::RowIn); + let has_compound_fields = filter.field.linking_fields().into_inner().len() > 1; + + // If the relation is an M2M relation we don't have a choice but to join + // If the connector does not support (a, b) IN (SELECT c, d) and there are several linking fields, then we must use a join. + // Hint: SQL Server does not support `ROW() IN ()`. + if is_many_to_many || (!support_row_in && has_compound_fields) { + self.visit_relation_filter_select_no_row(filter, ctx) + } else { + self.visit_relation_filter_select_with_row(filter, ctx) + } + } + + /// Traverses a relation filter using this rough SQL structure: + /// + /// ```sql + /// (parent.id) IN ( + /// SELECT id FROM parent + /// INNER JOIN child ON (child.parent_id = parent.id) + /// WHERE + /// ) + /// ``` + /// We need this in two cases: + /// - For M2M relations, as we need to traverse the join table so the join is not superfluous + /// - SQL Server because it does not support (a, b) IN (subselect) + fn visit_relation_filter_select_no_row( + &mut self, + filter: RelationFilter, + ctx: &Context<'_>, + ) -> (ModelProjection, Select<'static>) { let alias = self.next_alias(AliasMode::Table); let condition = filter.condition; + let table = filter.field.as_table(ctx); + let ids = ModelProjection::from(filter.field.model().primary_identifier()); - // Perf: We can skip a join if the relation is inlined on the related model. - // In this case, we can select the related table's foreign key instead of joining. - // This is not possible in the case of M2M implicit relations. - if filter.field.related_field().is_inlined_on_enclosing_model() { - let related_table = filter.field.related_model().as_table(ctx); - let related_columns: Vec<_> = ModelProjection::from(filter.field.related_field().linking_fields()) - .as_columns(ctx) - .map(|col| col.aliased_col(Some(alias), ctx)) - .collect(); + let selected_identifier: Vec = filter + .field + .identifier_columns(ctx) + .map(|col| col.aliased_col(Some(alias), ctx)) + .collect(); - let (nested_conditions, nested_joins) = - self.visit_nested_filter(alias, |this| this.visit_filter(*filter.nested_filter, ctx)); - let nested_conditions = nested_conditions.invert_if(condition.invert_of_subselect()); + let join_columns: Vec = filter + .field + .join_columns(ctx) + .map(|c| c.aliased_col(Some(alias), ctx)) + .collect(); - let conditions = related_columns - .clone() - .into_iter() - .fold(nested_conditions, |acc, column| acc.and(column.is_not_null())); + let related_table = filter.field.related_model().as_table(ctx); + let related_join_columns: Vec<_> = ModelProjection::from(filter.field.related_field().linking_fields()) + .as_columns(ctx) + .map(|col| col.aliased_col(Some(alias.flip(AliasMode::Join)), ctx)) + .collect(); - let select = Select::from_table(related_table.alias(alias.to_string(Some(AliasMode::Table)))) - .columns(related_columns) - .so_that(conditions); + let (nested_conditions, nested_joins) = self + .visit_nested_filter(alias.flip(AliasMode::Join), |nested_visitor| { + nested_visitor.visit_filter(*filter.nested_filter, ctx) + }); - if let Some(nested_joins) = nested_joins { - nested_joins.into_iter().fold(select, |acc, join| acc.join(join.data)) - } else { - select - } + let nested_conditions = nested_conditions.invert_if(condition.invert_of_subselect()); + let nested_conditons = selected_identifier + .clone() + .into_iter() + .fold(nested_conditions, |acc, column| acc.and(column.is_not_null())); + + let join = related_table + .alias(alias.to_string(Some(AliasMode::Join))) + .on(Row::from(related_join_columns).equals(Row::from(join_columns))); + + let select = Select::from_table(table.alias(alias.to_string(Some(AliasMode::Table)))) + .columns(selected_identifier) + .inner_join(join) + .so_that(nested_conditons); + + let select = if let Some(nested_joins) = nested_joins { + nested_joins.into_iter().fold(select, |acc, join| acc.join(join.data)) } else { - let table = filter.field.as_table(ctx); - let selected_identifier: Vec = filter - .field - .identifier_columns(ctx) - .map(|col| col.aliased_col(Some(alias), ctx)) - .collect(); + select + }; - let join_columns: Vec = filter - .field - .join_columns(ctx) - .map(|c| c.aliased_col(Some(alias), ctx)) - .collect(); + (ids, select) + } - let related_table = filter.field.related_model().as_table(ctx); - let related_join_columns: Vec<_> = ModelProjection::from(filter.field.related_field().linking_fields()) - .as_columns(ctx) - .map(|col| col.aliased_col(Some(alias.flip(AliasMode::Join)), ctx)) - .collect(); + /// Traverses a relation filter using this rough SQL structure: + /// + /// ```sql + /// (parent.id1, parent.id2) IN ( + /// SELECT id1, id2 FROM child + /// WHERE + /// ) + /// ``` + fn visit_relation_filter_select_with_row( + &mut self, + filter: RelationFilter, + ctx: &Context<'_>, + ) -> (ModelProjection, Select<'static>) { + let alias = self.next_alias(AliasMode::Table); + let condition = filter.condition; + let linking_fields = ModelProjection::from(filter.field.linking_fields()); - let (nested_conditions, nested_joins) = self - .visit_nested_filter(alias.flip(AliasMode::Join), |nested_visitor| { - nested_visitor.visit_filter(*filter.nested_filter, ctx) - }); + let related_table = filter.field.related_model().as_table(ctx); + // Select linking fields to match the linking fields of the parent record + let related_columns: Vec<_> = filter + .field + .related_field() + .join_columns(ctx) + .map(|col| col.aliased_col(Some(alias), ctx)) + .collect(); - let nested_conditions = nested_conditions.invert_if(condition.invert_of_subselect()); - let nested_conditons = selected_identifier - .clone() - .into_iter() - .fold(nested_conditions, |acc, column| acc.and(column.is_not_null())); + let (nested_conditions, nested_joins) = + self.visit_nested_filter(alias, |this| this.visit_filter(*filter.nested_filter, ctx)); + let nested_conditions = nested_conditions.invert_if(condition.invert_of_subselect()); - let join = related_table - .alias(alias.to_string(Some(AliasMode::Join))) - .on(Row::from(related_join_columns).equals(Row::from(join_columns))); + let conditions = related_columns + .clone() + .into_iter() + .fold(nested_conditions, |acc, column| acc.and(column.is_not_null())); - let select = Select::from_table(table.alias(alias.to_string(Some(AliasMode::Table)))) - .columns(selected_identifier) - .inner_join(join) - .so_that(nested_conditons); + let select = Select::from_table(related_table.alias(alias.to_string(Some(AliasMode::Table)))) + .columns(related_columns) + .so_that(conditions); - if let Some(nested_joins) = nested_joins { - nested_joins.into_iter().fold(select, |acc, join| acc.join(join.data)) - } else { - select - } - } + let select = if let Some(nested_joins) = nested_joins { + nested_joins.into_iter().fold(select, |acc, join| acc.join(join.data)) + } else { + select + }; + + (linking_fields, select) } } @@ -392,11 +458,12 @@ impl FilterVisitorExt for FilterVisitor { } _ => { - let ids = ModelProjection::from(filter.field.model().primary_identifier()).as_columns(ctx); - let columns: Vec> = ids.map(|col| col.aliased_col(self.parent_alias(), ctx)).collect(); - let condition = filter.condition; - let sub_select = self.visit_relation_filter_select(filter, ctx); + let (ids, sub_select) = self.visit_relation_filter_select(filter, ctx); + let columns: Vec> = ids + .as_columns(ctx) + .map(|col| col.aliased_col(self.parent_alias(), ctx)) + .collect(); let comparison = match condition { RelationCondition::AtLeastOneRelatedRecord => Row::from(columns).in_selection(sub_select), From 76cf002c0dda5c1db33fb47ebe5d85ed52af1ecb Mon Sep 17 00:00:00 2001 From: Alberto Schiabel Date: Mon, 9 Oct 2023 17:19:41 +0200 Subject: [PATCH 090/128] feat(query-engine): add skeleton for Wasm Query Engine (#4333) Co-authored-by: Sergey Tatarintsev --- Cargo.lock | 99 +++++++ Cargo.toml | 1 + query-engine/query-engine-wasm/.gitignore | 7 + query-engine/query-engine-wasm/.nvmrc | 1 + query-engine/query-engine-wasm/Cargo.toml | 36 +++ query-engine/query-engine-wasm/README.md | 40 +++ query-engine/query-engine-wasm/build.rs | 11 + query-engine/query-engine-wasm/build.sh | 51 ++++ query-engine/query-engine-wasm/example.js | 54 ++++ .../query-engine-wasm/package-lock.json | 148 ++++++++++ query-engine/query-engine-wasm/package.json | 9 + query-engine/query-engine-wasm/src/engine.rs | 265 ++++++++++++++++++ query-engine/query-engine-wasm/src/error.rs | 93 ++++++ .../query-engine-wasm/src/functions.rs | 47 ++++ query-engine/query-engine-wasm/src/lib.rs | 19 ++ query-engine/query-engine-wasm/src/logger.rs | 132 +++++++++ query-engine/query-engine-wasm/src/proxy.rs | 107 +++++++ 17 files changed, 1120 insertions(+) create mode 100644 query-engine/query-engine-wasm/.gitignore create mode 100644 query-engine/query-engine-wasm/.nvmrc create mode 100644 query-engine/query-engine-wasm/Cargo.toml create mode 100644 query-engine/query-engine-wasm/README.md create mode 100644 query-engine/query-engine-wasm/build.rs create mode 100755 query-engine/query-engine-wasm/build.sh create mode 100644 query-engine/query-engine-wasm/example.js create mode 100644 query-engine/query-engine-wasm/package-lock.json create mode 100644 query-engine/query-engine-wasm/package.json create mode 100644 query-engine/query-engine-wasm/src/engine.rs create mode 100644 query-engine/query-engine-wasm/src/error.rs create mode 100644 query-engine/query-engine-wasm/src/functions.rs create mode 100644 query-engine/query-engine-wasm/src/lib.rs create mode 100644 query-engine/query-engine-wasm/src/logger.rs create mode 100644 query-engine/query-engine-wasm/src/proxy.rs diff --git a/Cargo.lock b/Cargo.lock index 3002a1404210..8166394f8c89 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -672,6 +672,16 @@ dependencies = [ "windows-sys 0.45.0", ] +[[package]] +name = "console_error_panic_hook" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06aeb73f470f66dcdbf7223caeebb85984942f22f1adb2a088cf9668146bbbc" +dependencies = [ + "cfg-if", + "wasm-bindgen", +] + [[package]] name = "convert_case" version = "0.4.0" @@ -1563,6 +1573,19 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" +[[package]] +name = "gloo-utils" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "037fcb07216cb3a30f7292bd0176b050b7b9a052ba830ef7d5d65f6dc64ba58e" +dependencies = [ + "js-sys", + "serde", + "serde_json", + "wasm-bindgen", + "web-sys", +] + [[package]] name = "graphql-parser" version = "0.3.0" @@ -3784,6 +3807,35 @@ dependencies = [ "uuid", ] +[[package]] +name = "query-engine-wasm" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "connection-string", + "console_error_panic_hook", + "futures", + "js-sys", + "log", + "prisma-models", + "psl", + "serde", + "serde-wasm-bindgen", + "serde_json", + "thiserror", + "tokio", + "tracing", + "tracing-futures", + "tracing-subscriber", + "tsify", + "url", + "user-facing-errors", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-logger", +] + [[package]] name = "query-test-macros" version = "0.1.0" @@ -4533,6 +4585,17 @@ dependencies = [ "serde_derive", ] +[[package]] +name = "serde-wasm-bindgen" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3b143e2833c57ab9ad3ea280d21fd34e285a42837aeb0ee301f4f41890fa00e" +dependencies = [ + "js-sys", + "serde", + "wasm-bindgen", +] + [[package]] name = "serde_bytes" version = "0.11.12" @@ -4553,6 +4616,17 @@ dependencies = [ "syn 2.0.28", ] +[[package]] +name = "serde_derive_internals" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e578a843d40b4189a4d66bba51d7684f57da5bd7c304c64e14bd63efbef49509" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.28", +] + [[package]] name = "serde_json" version = "1.0.104" @@ -5663,6 +5737,31 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" +[[package]] +name = "tsify" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6b26cf145f2f3b9ff84e182c448eaf05468e247f148cf3d2a7d67d78ff023a0" +dependencies = [ + "gloo-utils", + "serde", + "serde_json", + "tsify-macros", + "wasm-bindgen", +] + +[[package]] +name = "tsify-macros" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a94b0f0954b3e59bfc2c246b4c8574390d94a4ad4ad246aaf2fb07d7dfd3b47" +dependencies = [ + "proc-macro2", + "quote", + "serde_derive_internals", + "syn 2.0.28", +] + [[package]] name = "twox-hash" version = "1.6.3" diff --git a/Cargo.toml b/Cargo.toml index 4499033a624b..4a3cd1450caf 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -24,6 +24,7 @@ members = [ "query-engine/prisma-models", "query-engine/query-engine", "query-engine/query-engine-node-api", + "query-engine/query-engine-wasm", "query-engine/request-handlers", "query-engine/schema", "libs/*", diff --git a/query-engine/query-engine-wasm/.gitignore b/query-engine/query-engine-wasm/.gitignore new file mode 100644 index 000000000000..a6f0e4dca125 --- /dev/null +++ b/query-engine/query-engine-wasm/.gitignore @@ -0,0 +1,7 @@ +/target +**/*.rs.bk +Cargo.lock +bin/ +pkg/ +wasm-pack.log +node_modules/ \ No newline at end of file diff --git a/query-engine/query-engine-wasm/.nvmrc b/query-engine/query-engine-wasm/.nvmrc new file mode 100644 index 000000000000..8c60e1e54f37 --- /dev/null +++ b/query-engine/query-engine-wasm/.nvmrc @@ -0,0 +1 @@ +v20.5.1 diff --git a/query-engine/query-engine-wasm/Cargo.toml b/query-engine/query-engine-wasm/Cargo.toml new file mode 100644 index 000000000000..a8bc393aee3f --- /dev/null +++ b/query-engine/query-engine-wasm/Cargo.toml @@ -0,0 +1,36 @@ +[package] +name = "query-engine-wasm" +version = "0.1.0" +edition = "2021" + +[lib] +doc = false +crate-type = ["cdylib"] +name = "query_engine" + +[dependencies] +anyhow = "1" +async-trait = "0.1" +user-facing-errors = { path = "../../libs/user-facing-errors" } +psl.workspace = true +prisma-models = { path = "../prisma-models" } + +thiserror = "1" +connection-string.workspace = true +url = "2" +serde_json.workspace = true +serde.workspace = true +tokio = { version = "1.25", features = ["macros", "sync", "io-util", "time"] } +futures = "0.3" +wasm-bindgen = "=0.2.87" +wasm-bindgen-futures = "0.4" +serde-wasm-bindgen = "0.5" +js-sys = "0.3" +log = "0.4.6" +wasm-logger = "0.2.0" + +tracing = "0.1" +tracing-subscriber = { version = "0.3" } +tracing-futures = "0.2" +tsify = "0.4.5" +console_error_panic_hook = "0.1.7" diff --git a/query-engine/query-engine-wasm/README.md b/query-engine/query-engine-wasm/README.md new file mode 100644 index 000000000000..f5adc7eb2894 --- /dev/null +++ b/query-engine/query-engine-wasm/README.md @@ -0,0 +1,40 @@ +# @prisma/query-engine-wasm + +**INTERNAL PACKAGE, DO NOT USE** + +This is a Wasm-compatible version of the Query Engine library (libquery). +Currently, it just contains a skeleton of the public API, as some internal crates are still not Wasm-compatible. + +The published npm package is internal to Prisma. Its API will break without prior warning. + +## Setup + +``` +# Install the latest Rust version with `rustup` +# or update the latest Rust version with `rustup` +rustup update +rustup target add wasm32-unknown-unknown +cargo install wasm-bindgen +cargo install wasm-pack +``` + +## How to Build + +From the current folder: + +- `./build.sh $OUT_NPM_VERSION` + +where e.g. `OUT_NPM_VERSION="0.0.1"` is the version you want to publish this package on npm with. + +## How to Publish + +From the current folder: + +- `wasm-pack publish --access public` + +## How to Test + +To try importing the , you can run: + +- `nvm use` +- `node --experimental-wasm-modules ./example.js` diff --git a/query-engine/query-engine-wasm/build.rs b/query-engine/query-engine-wasm/build.rs new file mode 100644 index 000000000000..2e8fe20c0503 --- /dev/null +++ b/query-engine/query-engine-wasm/build.rs @@ -0,0 +1,11 @@ +use std::process::Command; + +fn store_git_commit_hash() { + let output = Command::new("git").args(["rev-parse", "HEAD"]).output().unwrap(); + let git_hash = String::from_utf8(output.stdout).unwrap(); + println!("cargo:rustc-env=GIT_HASH={git_hash}"); +} + +fn main() { + store_git_commit_hash(); +} diff --git a/query-engine/query-engine-wasm/build.sh b/query-engine/query-engine-wasm/build.sh new file mode 100755 index 000000000000..12d8328305ff --- /dev/null +++ b/query-engine/query-engine-wasm/build.sh @@ -0,0 +1,51 @@ +#!/bin/bash + +# Call this script as `./build.sh ` + +OUT_VERSION="$1" +OUT_FOLDER="pkg" +OUT_JSON="${OUT_FOLDER}/package.json" +OUT_TARGET="bundler" # Note(jkomyno): I wasn't able to make it work with `web` target +OUT_NPM_NAME="@prisma/query-engine-wasm" + +wasm-pack build --release --target $OUT_TARGET + +sleep 1 + +# Mark the package as a ES module, set the entry point to the query_engine.js file, mark the package as public +printf '%s\n' "$(jq '. + {"type": "module"} + {"main": "./query_engine.js"} + {"private": false}' $OUT_JSON)" > $OUT_JSON + +# Add the version +printf '%s\n' "$(jq --arg version "$OUT_VERSION" '. + {"version": $version}' $OUT_JSON)" > $OUT_JSON + +# Add the package name +printf '%s\n' "$(jq --arg name "$OUT_NPM_NAME" '. + {"name": $name}' $OUT_JSON)" > $OUT_JSON + +enable_cf_in_bindings() { + # Enable Cloudflare Workers in the generated JS bindings. + # The generated bindings are compatible with: + # - Node.js + # - Cloudflare Workers / Miniflare + + local FILE="$1" # e.g., `query_engine.js` + local BG_FILE="${FILE%.js}_bg.js" + local OUTPUT_FILE="${OUT_FOLDER}/${FILE}" + + cat < "$OUTPUT_FILE" +import * as imports from "./${BG_FILE}"; + +// switch between both syntax for Node.js and for workers (Cloudflare Workers) +import * as wkmod from "./${BG_FILE%.js}.wasm"; +import * as nodemod from "./${BG_FILE%.js}.wasm"; +if ((typeof process !== 'undefined') && (process.release.name === 'node')) { + imports.__wbg_set_wasm(nodemod); +} else { + const instance = new WebAssembly.Instance(wkmod.default, { "./${BG_FILE}": imports }); + imports.__wbg_set_wasm(instance.exports); +} + +export * from "./${BG_FILE}"; +EOF +} + +enable_cf_in_bindings "query_engine.js" diff --git a/query-engine/query-engine-wasm/example.js b/query-engine/query-engine-wasm/example.js new file mode 100644 index 000000000000..bca6d5ba95d7 --- /dev/null +++ b/query-engine/query-engine-wasm/example.js @@ -0,0 +1,54 @@ +/** + * Run with: `node --experimental-wasm-modules ./example.js` + * on Node.js 18+. + */ + +import { Pool } from '@neondatabase/serverless' +import { PrismaNeon } from '@prisma/adapter-neon' +import { bindAdapter } from '@prisma/driver-adapter-utils' +import { init, QueryEngine, getBuildTimeInfo } from './pkg/query_engine.js' + +async function main() { + // Always initialize the Wasm library before using it. + // This sets up the logging and panic hooks. + init() + + const connectionString = undefined + + const pool = new Pool({ connectionString }) + const adapter = new PrismaNeon(pool) + const driverAdapter = bindAdapter(adapter) + + console.log('buildTimeInfo', getBuildTimeInfo()) + + const options = { + datamodel: /* prisma */` + datasource db { + provider = "postgres" + url = env("DATABASE_URL") + } + + generator client { + provider = "prisma-client-js" + } + + model User { + id Int @id @default(autoincrement()) + } + `, + logLevel: 'info', + logQueries: true, + datasourceOverrides: {}, + env: process.env, + configDir: '/tmp', + ignoreEnvVarErrors: true, + } + const callback = () => { console.log('log-callback') } + + const queryEngine = new QueryEngine(options, callback, driverAdapter) + + await queryEngine.connect('trace') + await queryEngine.disconnect('trace') +} + +main() diff --git a/query-engine/query-engine-wasm/package-lock.json b/query-engine/query-engine-wasm/package-lock.json new file mode 100644 index 000000000000..bc854644f6dd --- /dev/null +++ b/query-engine/query-engine-wasm/package-lock.json @@ -0,0 +1,148 @@ +{ + "name": "query-engine-wasm", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "dependencies": { + "@neondatabase/serverless": "^0.6.0", + "@prisma/adapter-neon": "^5.4.1", + "@prisma/driver-adapter-utils": "^5.4.1" + } + }, + "node_modules/@neondatabase/serverless": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/@neondatabase/serverless/-/serverless-0.6.0.tgz", + "integrity": "sha512-qXxBRYN0m2v8kVQBfMxbzNGn2xFAhTXFibzQlE++NfJ56Shz3m7+MyBBtXDlEH+3Wfa6lToDXf1MElocY4sJ3w==", + "dependencies": { + "@types/pg": "8.6.6" + } + }, + "node_modules/@prisma/adapter-neon": { + "version": "5.4.1", + "resolved": "https://registry.npmjs.org/@prisma/adapter-neon/-/adapter-neon-5.4.1.tgz", + "integrity": "sha512-mIwLmwyAwDV9HXar9lSyM2uVm9H+X8noG4reKLnC3NjFsBxBfSUgW9vS8dPGqGW/rJWX3hg4pIffjEjmX4TDqg==", + "dependencies": { + "@prisma/driver-adapter-utils": "5.4.1" + }, + "peerDependencies": { + "@neondatabase/serverless": "^0.6.0" + } + }, + "node_modules/@prisma/driver-adapter-utils": { + "version": "5.4.1", + "resolved": "https://registry.npmjs.org/@prisma/driver-adapter-utils/-/driver-adapter-utils-5.4.1.tgz", + "integrity": "sha512-muYjkzf6qdxz4uGBi7nKyPaGRGLnSgiRautqAhZiMwbTOr9hMgyNI+aCJTCaKfYfNWjYCx2r5J6R1mJtPhzFhQ==", + "dependencies": { + "debug": "^4.3.4" + } + }, + "node_modules/@types/node": { + "version": "20.8.2", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.8.2.tgz", + "integrity": "sha512-Vvycsc9FQdwhxE3y3DzeIxuEJbWGDsnrxvMADzTDF/lcdR9/K+AQIeAghTQsHtotg/q0j3WEOYS/jQgSdWue3w==" + }, + "node_modules/@types/pg": { + "version": "8.6.6", + "resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.6.6.tgz", + "integrity": "sha512-O2xNmXebtwVekJDD+02udOncjVcMZQuTEQEMpKJ0ZRf5E7/9JJX3izhKUcUifBkyKpljyUM6BTgy2trmviKlpw==", + "dependencies": { + "@types/node": "*", + "pg-protocol": "*", + "pg-types": "^2.2.0" + } + }, + "node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/pg-int8": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/pg-protocol": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.6.0.tgz", + "integrity": "sha512-M+PDm637OY5WM307051+bsDia5Xej6d9IR4GwJse1qA1DIhiKlksvrneZOYQq42OM+spubpcNYEo2FcKQrDk+Q==" + }, + "node_modules/pg-types": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "dependencies": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/postgres-array": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", + "engines": { + "node": ">=4" + } + }, + "node_modules/postgres-bytea": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", + "integrity": "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-date": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", + "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-interval": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "dependencies": { + "xtend": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "engines": { + "node": ">=0.4" + } + } + } +} diff --git a/query-engine/query-engine-wasm/package.json b/query-engine/query-engine-wasm/package.json new file mode 100644 index 000000000000..538080ec1b8c --- /dev/null +++ b/query-engine/query-engine-wasm/package.json @@ -0,0 +1,9 @@ +{ + "type": "module", + "main": "./example.js", + "dependencies": { + "@neondatabase/serverless": "^0.6.0", + "@prisma/adapter-neon": "^5.4.1", + "@prisma/driver-adapter-utils": "^5.4.1" + } +} diff --git a/query-engine/query-engine-wasm/src/engine.rs b/query-engine/query-engine-wasm/src/engine.rs new file mode 100644 index 000000000000..f9a06fabcf4b --- /dev/null +++ b/query-engine/query-engine-wasm/src/engine.rs @@ -0,0 +1,265 @@ +#![allow(dead_code)] +#![allow(unused_variables)] + +use crate::proxy; +use crate::{ + error::ApiError, + logger::{LogCallback, Logger}, +}; +use js_sys::{Function as JsFunction, Object as JsObject}; +use serde::{Deserialize, Serialize}; +use std::{ + collections::{BTreeMap, HashMap}, + path::PathBuf, + sync::Arc, +}; +use tokio::sync::RwLock; +use tracing_subscriber::filter::LevelFilter; +use tsify::Tsify; +use wasm_bindgen::prelude::wasm_bindgen; + +/// The main query engine used by JS +#[wasm_bindgen] +pub struct QueryEngine { + inner: RwLock, + logger: Logger, +} + +/// The state of the engine. +enum Inner { + /// Not connected, holding all data to form a connection. + Builder(EngineBuilder), + /// A connected engine, holding all data to disconnect and form a new + /// connection. Allows querying when on this state. + Connected(ConnectedEngine), +} + +/// Everything needed to connect to the database and have the core running. +struct EngineBuilder { + schema: Arc, + config_dir: PathBuf, + env: HashMap, +} + +/// Internal structure for querying and reconnecting with the engine. +struct ConnectedEngine { + schema: Arc, + config_dir: PathBuf, + env: HashMap, +} + +/// Returned from the `serverInfo` method in javascript. +#[derive(Debug, Serialize)] +#[serde(rename_all = "camelCase")] +struct ServerInfo { + commit: String, + version: String, + primary_connector: Option, +} + +/// Parameters defining the construction of an engine. +#[derive(Debug, Deserialize, Tsify)] +#[tsify(from_wasm_abi)] +#[serde(rename_all = "camelCase")] +pub struct ConstructorOptions { + datamodel: String, + log_level: String, + #[serde(default)] + log_queries: bool, + #[serde(default)] + datasource_overrides: BTreeMap, + #[serde(default)] + env: serde_json::Value, + config_dir: PathBuf, + #[serde(default)] + ignore_env_var_errors: bool, + #[serde(default)] + engine_protocol: Option, +} + +impl Inner { + /// Returns a builder if the engine is not connected + fn as_builder(&self) -> crate::Result<&EngineBuilder> { + match self { + Inner::Builder(ref builder) => Ok(builder), + Inner::Connected(_) => Err(ApiError::AlreadyConnected), + } + } + + /// Returns the engine if connected + fn as_engine(&self) -> crate::Result<&ConnectedEngine> { + match self { + Inner::Builder(_) => Err(ApiError::NotConnected), + Inner::Connected(ref engine) => Ok(engine), + } + } +} + +#[wasm_bindgen] +impl QueryEngine { + /// Parse a validated datamodel and configuration to allow connecting later on. + #[wasm_bindgen(constructor)] + pub fn new( + options: ConstructorOptions, + callback: JsFunction, + maybe_adapter: Option, + ) -> Result { + log::info!("Called `QueryEngine::new()`"); + + let log_callback = LogCallback(callback); + log::info!("Parsed `log_callback`"); + + let ConstructorOptions { + datamodel, + log_level, + log_queries, + datasource_overrides, + env, + config_dir, + ignore_env_var_errors, + engine_protocol, + } = options; + + let env = stringify_env_values(env)?; // we cannot trust anything JS sends us from process.env + let overrides: Vec<(_, _)> = datasource_overrides.into_iter().collect(); + + let mut schema = psl::validate(datamodel.into()); + let config = &mut schema.configuration; + + if let Some(adapter) = maybe_adapter { + let js_queryable = + proxy::from_wasm(adapter).map_err(|e| ApiError::configuration(e.as_string().unwrap_or_default()))?; + + let provider_name = schema.connector.provider_name(); + log::info!("Received driver adapter for {provider_name}."); + } + + schema + .diagnostics + .to_result() + .map_err(|err| ApiError::conversion(err, schema.db.source()))?; + + config + .resolve_datasource_urls_query_engine( + &overrides, + |key| env.get(key).map(ToString::to_string), + ignore_env_var_errors, + ) + .map_err(|err| ApiError::conversion(err, schema.db.source()))?; + + config + .validate_that_one_datasource_is_provided() + .map_err(|errors| ApiError::conversion(errors, schema.db.source()))?; + + let builder = EngineBuilder { + schema: Arc::new(schema), + config_dir, + env, + }; + + let log_level = log_level.parse::().unwrap(); + let logger = Logger::new(log_queries, log_level, log_callback); + + Ok(Self { + inner: RwLock::new(Inner::Builder(builder)), + logger, + }) + } + + /// Connect to the database, allow queries to be run. + #[wasm_bindgen] + pub async fn connect(&self, trace: String) -> Result<(), wasm_bindgen::JsError> { + log::info!("Called `QueryEngine::connect()`"); + Ok(()) + } + + /// Disconnect and drop the core. Can be reconnected later with `#connect`. + #[wasm_bindgen] + pub async fn disconnect(&self, trace: String) -> Result<(), wasm_bindgen::JsError> { + log::info!("Called `QueryEngine::disconnect()`"); + Ok(()) + } + + /// If connected, sends a query to the core and returns the response. + #[wasm_bindgen] + pub async fn query( + &self, + body: String, + trace: String, + tx_id: Option, + ) -> Result { + log::info!("Called `QueryEngine::query()`"); + Err(ApiError::configuration("Can't use `query` until `request_handlers` is Wasm-compatible.").into()) + } + + /// If connected, attempts to start a transaction in the core and returns its ID. + #[wasm_bindgen(js_name = startTransaction)] + pub async fn start_transaction(&self, input: String, trace: String) -> Result { + log::info!("Called `QueryEngine::start_transaction()`"); + Err(ApiError::configuration("Can't use `start_transaction` until `query_core` is Wasm-compatible.").into()) + } + + /// If connected, attempts to commit a transaction with id `tx_id` in the core. + #[wasm_bindgen(js_name = commitTransaction)] + pub async fn commit_transaction(&self, tx_id: String, trace: String) -> Result { + log::info!("Called `QueryEngine::commit_transaction()`"); + Err(ApiError::configuration("Can't use `commit_transaction` until `query_core` is Wasm-compatible.").into()) + } + + #[wasm_bindgen] + pub async fn dmmf(&self, trace: String) -> Result { + log::info!("Called `QueryEngine::dmmf()`"); + Err(ApiError::configuration("Can't use `dmmf` until `request_handlers` is Wasm-compatible.").into()) + } + + /// If connected, attempts to roll back a transaction with id `tx_id` in the core. + #[wasm_bindgen(js_name = rollbackTransaction)] + pub async fn rollback_transaction(&self, tx_id: String, trace: String) -> Result { + log::info!("Called `QueryEngine::rollback_transaction()`"); + Ok("{}".to_owned()) + } + + /// Loads the query schema. Only available when connected. + #[wasm_bindgen(js_name = sdlSchema)] + pub async fn sdl_schema(&self) -> Result { + log::info!("Called `QueryEngine::sdl_schema()`"); + Ok("{}".to_owned()) + } + + #[wasm_bindgen] + pub async fn metrics(&self, json_options: String) -> Result<(), wasm_bindgen::JsError> { + log::info!("Called `QueryEngine::metrics()`"); + Err(ApiError::configuration("Metrics is not enabled in Wasm.").into()) + } +} + +fn stringify_env_values(origin: serde_json::Value) -> crate::Result> { + use serde_json::Value; + + let msg = match origin { + Value::Object(map) => { + let mut result: HashMap = HashMap::new(); + + for (key, val) in map.into_iter() { + match val { + Value::Null => continue, + Value::String(val) => { + result.insert(key, val); + } + val => { + result.insert(key, val.to_string()); + } + } + } + + return Ok(result); + } + Value::Null => return Ok(Default::default()), + Value::Bool(_) => "Expected an object for the env constructor parameter, got a boolean.", + Value::Number(_) => "Expected an object for the env constructor parameter, got a number.", + Value::String(_) => "Expected an object for the env constructor parameter, got a string.", + Value::Array(_) => "Expected an object for the env constructor parameter, got an array.", + }; + + Err(ApiError::JsonDecode(msg.to_string())) +} diff --git a/query-engine/query-engine-wasm/src/error.rs b/query-engine/query-engine-wasm/src/error.rs new file mode 100644 index 000000000000..619e96564f6a --- /dev/null +++ b/query-engine/query-engine-wasm/src/error.rs @@ -0,0 +1,93 @@ +use psl::diagnostics::Diagnostics; +// use query_connector::error::ConnectorError; +// use query_core::CoreError; +use thiserror::Error; + +#[derive(Debug, Error)] +pub enum ApiError { + #[error("{:?}", _0)] + Conversion(Diagnostics, String), + + #[error("{}", _0)] + Configuration(String), + + // #[error("{}", _0)] + // Core(CoreError), + + // #[error("{}", _0)] + // Connector(ConnectorError), + #[error("Can't modify an already connected engine.")] + AlreadyConnected, + + #[error("Engine is not yet connected.")] + NotConnected, + + #[error("{}", _0)] + JsonDecode(String), +} + +impl From for user_facing_errors::Error { + fn from(err: ApiError) -> Self { + use std::fmt::Write as _; + + match err { + // ApiError::Connector(ConnectorError { + // user_facing_error: Some(err), + // .. + // }) => err.into(), + ApiError::Conversion(errors, dml_string) => { + let mut full_error = errors.to_pretty_string("schema.prisma", &dml_string); + write!(full_error, "\nValidation Error Count: {}", errors.errors().len()).unwrap(); + + user_facing_errors::Error::from(user_facing_errors::KnownError::new( + user_facing_errors::common::SchemaParserError { full_error }, + )) + } + // ApiError::Core(error) => user_facing_errors::Error::from(error), + other => user_facing_errors::Error::new_non_panic_with_current_backtrace(other.to_string()), + } + } +} + +impl ApiError { + pub fn conversion(diagnostics: Diagnostics, dml: impl ToString) -> Self { + Self::Conversion(diagnostics, dml.to_string()) + } + + pub fn configuration(msg: impl ToString) -> Self { + Self::Configuration(msg.to_string()) + } +} + +// impl From for ApiError { +// fn from(e: CoreError) -> Self { +// match e { +// CoreError::ConfigurationError(message) => Self::Configuration(message), +// core_error => Self::Core(core_error), +// } +// } +// } + +// impl From for ApiError { +// fn from(e: ConnectorError) -> Self { +// Self::Connector(e) +// } +// } + +impl From for ApiError { + fn from(e: url::ParseError) -> Self { + Self::configuration(format!("Error parsing connection string: {e}")) + } +} + +impl From for ApiError { + fn from(e: connection_string::Error) -> Self { + Self::configuration(format!("Error parsing connection string: {e}")) + } +} + +impl From for ApiError { + fn from(e: serde_json::Error) -> Self { + Self::JsonDecode(format!("{e}")) + } +} diff --git a/query-engine/query-engine-wasm/src/functions.rs b/query-engine/query-engine-wasm/src/functions.rs new file mode 100644 index 000000000000..e0f0a93aa5cd --- /dev/null +++ b/query-engine/query-engine-wasm/src/functions.rs @@ -0,0 +1,47 @@ +use crate::error::ApiError; +use serde::Serialize; +use tsify::Tsify; +use wasm_bindgen::prelude::wasm_bindgen; + +#[derive(Serialize, Tsify)] +#[tsify(into_wasm_abi)] +#[serde(rename_all = "camelCase")] +pub struct Version { + pub commit: &'static str, + pub version: &'static str, +} + +#[wasm_bindgen(js_name = "getBuildTimeInfo")] +pub fn version() -> Version { + Version { + commit: env!("GIT_HASH"), + version: env!("CARGO_PKG_VERSION"), + } +} + +#[wasm_bindgen] +pub fn dmmf(datamodel_string: String) -> Result { + let mut schema = psl::validate(datamodel_string.into()); + + schema + .diagnostics + .to_result() + .map_err(|errors| ApiError::conversion(errors, schema.db.source()))?; + + Ok("{}".to_string()) + + // let query_schema = query_core::schema::build(Arc::new(schema), true); + // let dmmf = dmmf::render_dmmf(&query_schema); + + // Ok(serde_json::to_string(&dmmf)?) +} + +#[wasm_bindgen] +pub fn debug_panic(panic_message: Option) -> Result<(), wasm_bindgen::JsError> { + let user_facing = user_facing_errors::Error::from_panic_payload(Box::new( + panic_message.unwrap_or_else(|| "query-engine-wasm debug panic".to_string()), + )); + let message = serde_json::to_string(&user_facing).unwrap(); + + Err(wasm_bindgen::JsError::new(&message)) +} diff --git a/query-engine/query-engine-wasm/src/lib.rs b/query-engine/query-engine-wasm/src/lib.rs new file mode 100644 index 000000000000..89b519515517 --- /dev/null +++ b/query-engine/query-engine-wasm/src/lib.rs @@ -0,0 +1,19 @@ +pub mod engine; +pub mod error; +pub mod functions; +pub mod logger; +mod proxy; + +pub(crate) type Result = std::result::Result; + +use wasm_bindgen::prelude::wasm_bindgen; + +/// Function that should be called before any other public function in this module. +#[wasm_bindgen] +pub fn init() { + // Set up temporary logging for the wasm module. + wasm_logger::init(wasm_logger::Config::default()); + + // Set up temporary panic hook for the wasm module. + std::panic::set_hook(Box::new(console_error_panic_hook::hook)); +} diff --git a/query-engine/query-engine-wasm/src/logger.rs b/query-engine/query-engine-wasm/src/logger.rs new file mode 100644 index 000000000000..561c48271b77 --- /dev/null +++ b/query-engine/query-engine-wasm/src/logger.rs @@ -0,0 +1,132 @@ +#![allow(dead_code)] + +use core::fmt; +use js_sys::Function as JsFunction; +use serde_json::Value; +use std::collections::BTreeMap; +use tracing::{ + field::{Field, Visit}, + level_filters::LevelFilter, + Dispatch, Level, Subscriber, +}; +use tracing_subscriber::{ + filter::{filter_fn, FilterExt}, + layer::SubscriberExt, + Layer, Registry, +}; +use wasm_bindgen::JsValue; + +pub(crate) struct LogCallback(pub JsFunction); + +unsafe impl Send for LogCallback {} +unsafe impl Sync for LogCallback {} + +pub(crate) struct Logger { + dispatcher: Dispatch, +} + +impl Logger { + /// Creates a new logger using a call layer + pub fn new(log_queries: bool, log_level: LevelFilter, log_callback: LogCallback) -> Self { + let is_sql_query = filter_fn(|meta| { + meta.target() == "quaint::connector::metrics" && meta.fields().iter().any(|f| f.name() == "query") + }); + + // is a mongodb query? + let is_mongo_query = filter_fn(|meta| meta.target() == "mongodb_query_connector::query"); + + // We need to filter the messages to send to our callback logging mechanism + let filters = if log_queries { + // Filter trace query events (for query log) or based in the defined log level + is_sql_query.or(is_mongo_query).or(log_level).boxed() + } else { + // Filter based in the defined log level + FilterExt::boxed(log_level) + }; + + let layer = CallbackLayer::new(log_callback).with_filter(filters); + + Self { + dispatcher: Dispatch::new(Registry::default().with(layer)), + } + } + + pub fn dispatcher(&self) -> Dispatch { + self.dispatcher.clone() + } +} + +pub struct JsonVisitor<'a> { + values: BTreeMap<&'a str, Value>, +} + +impl<'a> JsonVisitor<'a> { + pub fn new(level: &Level, target: &str) -> Self { + let mut values = BTreeMap::new(); + values.insert("level", serde_json::Value::from(level.to_string())); + + // NOTE: previous version used module_path, this is not correct and it should be _target_ + values.insert("module_path", serde_json::Value::from(target)); + + JsonVisitor { values } + } +} + +impl<'a> Visit for JsonVisitor<'a> { + fn record_debug(&mut self, field: &Field, value: &dyn fmt::Debug) { + match field.name() { + name if name.starts_with("r#") => { + self.values + .insert(&name[2..], serde_json::Value::from(format!("{value:?}"))); + } + name => { + self.values.insert(name, serde_json::Value::from(format!("{value:?}"))); + } + }; + } + + fn record_i64(&mut self, field: &Field, value: i64) { + self.values.insert(field.name(), serde_json::Value::from(value)); + } + + fn record_u64(&mut self, field: &Field, value: u64) { + self.values.insert(field.name(), serde_json::Value::from(value)); + } + + fn record_bool(&mut self, field: &Field, value: bool) { + self.values.insert(field.name(), serde_json::Value::from(value)); + } + + fn record_str(&mut self, field: &Field, value: &str) { + self.values.insert(field.name(), serde_json::Value::from(value)); + } +} + +impl<'a> ToString for JsonVisitor<'a> { + fn to_string(&self) -> String { + serde_json::to_string(&self.values).unwrap() + } +} + +pub(crate) struct CallbackLayer { + callback: LogCallback, +} + +impl CallbackLayer { + pub fn new(callback: LogCallback) -> Self { + CallbackLayer { callback } + } +} + +// A tracing layer for sending logs to a js callback, layers are composable, subscribers are not. +impl Layer for CallbackLayer { + fn on_event(&self, event: &tracing::Event<'_>, _ctx: tracing_subscriber::layer::Context<'_, S>) { + let mut visitor = JsonVisitor::new(event.metadata().level(), event.metadata().target()); + event.record(&mut visitor); + + let _ = self + .callback + .0 + .call1(&JsValue::NULL, &JsValue::from_str(&visitor.to_string())); + } +} diff --git a/query-engine/query-engine-wasm/src/proxy.rs b/query-engine/query-engine-wasm/src/proxy.rs new file mode 100644 index 000000000000..ad028e218236 --- /dev/null +++ b/query-engine/query-engine-wasm/src/proxy.rs @@ -0,0 +1,107 @@ +#![allow(dead_code)] +#![allow(unused_variables)] + +// This code will likely live in a separate crate, but for now it's here. + +use async_trait::async_trait; +use js_sys::{Function as JsFunction, JsString, Object as JsObject, Promise as JsPromise, Reflect as JsReflect}; +use serde::{de::DeserializeOwned, Serialize}; +use wasm_bindgen::{JsCast, JsValue}; + +type Result = std::result::Result; + +pub struct CommonProxy { + /// Execute a query given as SQL, interpolating the given parameters. + query_raw: JsFunction, + + /// Execute a query given as SQL, interpolating the given parameters and + /// returning the number of affected rows. + execute_raw: JsFunction, + + /// Return the flavour for this driver. + pub(crate) flavour: String, +} + +impl CommonProxy { + pub(crate) fn new(driver: &JsObject) -> Result { + let query_raw = JsReflect::get(driver, &"queryRaw".into())?.dyn_into::()?; + let execute_raw = JsReflect::get(driver, &"executeRaw".into())?.dyn_into::()?; + let flavour: String = JsReflect::get(driver, &"flavour".into())? + .dyn_into::()? + .into(); + + let common_proxy = Self { + query_raw, + execute_raw, + flavour, + }; + Ok(common_proxy) + } +} + +pub struct DriverProxy { + start_transaction: JsFunction, +} + +impl DriverProxy { + pub(crate) fn new(driver: &JsObject) -> Result { + let start_transaction = JsReflect::get(driver, &"startTransaction".into())?.dyn_into::()?; + + let driver_proxy = Self { start_transaction }; + Ok(driver_proxy) + } +} + +pub struct JsQueryable { + inner: CommonProxy, + driver_proxy: DriverProxy, +} + +impl JsQueryable { + pub fn new(inner: CommonProxy, driver_proxy: DriverProxy) -> Self { + Self { inner, driver_proxy } + } +} + +pub fn from_wasm(driver: JsObject) -> Result { + let common_proxy = CommonProxy::new(&driver)?; + let driver_proxy = DriverProxy::new(&driver)?; + + let js_queryable = JsQueryable::new(common_proxy, driver_proxy); + Ok(js_queryable) +} + +#[async_trait(?Send)] +trait JsAsyncFunc { + async fn call1_async(&self, arg1: T) -> Result + where + T: Serialize, + R: DeserializeOwned; + + fn call0_sync(&self) -> Result + where + R: DeserializeOwned; +} + +#[async_trait(?Send)] +impl JsAsyncFunc for JsFunction { + async fn call1_async(&self, arg1: T) -> Result + where + T: Serialize, + R: DeserializeOwned, + { + let arg1 = serde_wasm_bindgen::to_value(&arg1).map_err(|err| js_sys::Error::new(&err.to_string()))?; + let promise = self.call1(&JsValue::null(), &arg1)?; + let future = wasm_bindgen_futures::JsFuture::from(JsPromise::from(promise)); + let value = future.await?; + serde_wasm_bindgen::from_value(value).map_err(|err| js_sys::Error::new(&err.to_string())) + } + + fn call0_sync(&self) -> Result + where + R: DeserializeOwned, + { + let value = self.call0(&JsValue::null())?; + serde_wasm_bindgen::from_value(value).map_err(|err| js_sys::Error::new(&err.to_string())) + } +} From d39b430484e59dd184d26e6ef4e4918fd23ecf02 Mon Sep 17 00:00:00 2001 From: Jan Piotrowski Date: Mon, 9 Oct 2023 17:19:51 +0200 Subject: [PATCH 091/128] ci(smoke-tests): Move errors step into its own job to make clear this is not driver adapter dependant (#4344) --- .../workflows/driver-adapter-smoke-tests.yml | 47 +++++++++++++++++++ 1 file changed, 47 insertions(+) diff --git a/.github/workflows/driver-adapter-smoke-tests.yml b/.github/workflows/driver-adapter-smoke-tests.yml index aa653ea7a57d..802e3188dedc 100644 --- a/.github/workflows/driver-adapter-smoke-tests.yml +++ b/.github/workflows/driver-adapter-smoke-tests.yml @@ -37,6 +37,7 @@ jobs: ports: - 5432:5432 + # via package.json rewritten into DATABASE_URL before scripts are run env: JS_NEON_DATABASE_URL: ${{ secrets.JS_NEON_DATABASE_URL }} JS_PLANETSCALE_DATABASE_URL: ${{ secrets.JS_PLANETSCALE_DATABASE_URL }} @@ -78,6 +79,52 @@ jobs: if: always() working-directory: ./query-engine/driver-adapters/js/smoke-test-js + + driver-adapter-smoke-tests-errors: + name: Errors + + runs-on: ubuntu-latest + + # services: + # postgres: + # image: postgres + # env: + # POSTGRES_PASSWORD: postgres + # options: >- + # --health-cmd pg_isready + # --health-interval 10s + # --health-timeout 5s + # --health-retries 5 + # ports: + # - 5432:5432 + + env: + # via package.json rewritten into DATABASE_URL before scripts are run + JS_PG_DATABASE_URL: postgres://postgres:postgres@localhost:5432/test + + steps: + - uses: actions/checkout@v4 + + - uses: dtolnay/rust-toolchain@stable + + - uses: pnpm/action-setup@v2 + with: + version: 8 + - uses: actions/setup-node@v3 + with: + node-version: 18 + #cache: 'pnpm' + + - name: Compile Query Engine + run: cargo build -p query-engine-node-api + + - name: Install Dependencies (Driver Adapters) + run: pnpm install + working-directory: ./query-engine/driver-adapters/js + - name: Build Driver Adapters + run: pnpm build + working-directory: ./query-engine/driver-adapters/js + - name: pnpm errors run: pnpm errors if: always() From 72963d8dd2e8e493a6496c242a16bffb8383efe7 Mon Sep 17 00:00:00 2001 From: pierre Date: Mon, 9 Oct 2023 09:55:32 -1000 Subject: [PATCH 092/128] chore: add badges for driver adapters readme (#4347) --- query-engine/driver-adapters/js/README.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/query-engine/driver-adapters/js/README.md b/query-engine/driver-adapters/js/README.md index e5e64c60dfc8..926d6db2b0a8 100644 --- a/query-engine/driver-adapters/js/README.md +++ b/query-engine/driver-adapters/js/README.md @@ -1,5 +1,13 @@ # Prisma Driver Adapters + + + + + + +
+ This TypeScript monorepo contains the following packages: - `@prisma/driver-adapter-utils` - Internal set of utilities and types for Prisma's driver adapters. From 0722655adf11cf3e884a92f6333a101e2adb1898 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Wed, 11 Oct 2023 13:04:08 +0200 Subject: [PATCH 093/128] ci: add ignored paths & skip running Buildkite tests when git diff is empty (#4355) --- .buildkite/engineer | 35 ++++++++++++++++++++++++++++++++--- 1 file changed, 32 insertions(+), 3 deletions(-) diff --git a/.buildkite/engineer b/.buildkite/engineer index 0b1adc2d8011..701e57fa9229 100755 --- a/.buildkite/engineer +++ b/.buildkite/engineer @@ -1,5 +1,36 @@ #!/usr/bin/env bash +set -e + +if [[ -z "$2" ]]; then + printf "Error: the name of the pipeline must be provided.\nExample: './engineer pipeline test'" 1>&2 + exit 1 +else + echo "We are in the $2 pipeline." +fi + +# Checks what's the diff with the previous commit, +# excluding some paths that do not need a run, +# because they do not affect tests running in Buildkite. +GIT_DIFF=$(git diff --name-only HEAD HEAD~1 -- . ':!.github' ':!query-engine/driver-adapters/js' ':!renovate.json' ':!*.md' ':!LICENSE' ':!CODEOWNERS';) + +# $2 is either "test" or "build", depending on the pipeline +# Example: ./.buildkite/engineer pipeline test +# We only want to check for changes and skip in the test pipeline. +if [[ "$2" == "test" ]]; then + # Checking if GIT_DIFF is empty + # If it's empty then it's most likely that there are changes but they are in ignored paths. + # So we do not start Buildkite + if [ -z "${GIT_DIFF}" ]; then + echo "No changes found for the previous commit in paths that are not ignored, this run will now be skipped." + exit 0 + else + # Note that printf works better for displaying line returns in CI + printf "Changes found for the previous commit in paths that are not ignored: \n\n${GIT_DIFF}\n\nThis run will continue...\n" + fi +fi + +# Check OS if [[ "$OSTYPE" == "linux-gnu" ]]; then OS=linux-amzn elif [[ "$OSTYPE" == "darwin"* ]]; then @@ -12,7 +43,6 @@ fi # Check if the system has engineer installed, if not, use a local copy. if ! type "engineer" &> /dev/null; then # Setup Prisma engine build & test tool (engineer). - set -e curl --fail -sSL "https://prisma-engineer.s3-eu-west-1.amazonaws.com/1.59/latest/$OS/engineer.gz" --output engineer.gz gzip -d engineer.gz chmod +x engineer @@ -22,6 +52,5 @@ if ! type "engineer" &> /dev/null; then rm -rf ./engineer else # Already installed on the system - set -e engineer "$@" -fi +fi \ No newline at end of file From c5d4d1ed59b9a9e63e53f19d61a28901ab3fd7b1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Wed, 11 Oct 2023 15:01:14 +0200 Subject: [PATCH 094/128] tests: driver adapters: add build job for planetscale engine tests (#4356) --- .../query-engine-driver-adapters.yml | 2 ++ Makefile | 6 ++++ docker-compose.yml | 22 +++++++++++++++ docker/planetscale_proxy/Dockerfile | 15 ++++++++++ .../test-configs/planetscale-vitess8 | 7 +++++ .../connector-test-kit-executor/package.json | 2 ++ .../connector-test-kit-executor/src/index.ts | 28 +++++++++++++++++-- .../driver-adapters/js/pnpm-lock.yaml | 6 ++++ 8 files changed, 86 insertions(+), 2 deletions(-) create mode 100644 docker/planetscale_proxy/Dockerfile create mode 100644 query-engine/connector-test-kit-rs/test-configs/planetscale-vitess8 diff --git a/.github/workflows/query-engine-driver-adapters.yml b/.github/workflows/query-engine-driver-adapters.yml index dea1726c56c9..50f86575a8a7 100644 --- a/.github/workflows/query-engine-driver-adapters.yml +++ b/.github/workflows/query-engine-driver-adapters.yml @@ -31,6 +31,8 @@ jobs: setup_task: 'dev-neon-ws-postgres13' - name: 'libsql' setup_task: 'dev-libsql-sqlite' + - name: 'planetscale' + setup_task: 'dev-planetscale-vitess8' node_version: ['18'] env: LOG_LEVEL: 'info' # Set to "debug" to trace the query engine and node process running the driver adapter diff --git a/Makefile b/Makefile index 3a683b824e3b..06e10dbd56c7 100644 --- a/Makefile +++ b/Makefile @@ -249,6 +249,12 @@ start-vitess_8_0: dev-vitess_8_0: start-vitess_8_0 cp $(CONFIG_PATH)/vitess_8_0 $(CONFIG_FILE) +start-planetscale-vitess8: build-qe-napi build-connector-kit-js + docker compose -f docker-compose.yml up -d --remove-orphans planetscale-vitess8 + +dev-planetscale-vitess8: start-planetscale-vitess8 + cp $(CONFIG_PATH)/planetscale-vitess8 $(CONFIG_FILE) + ###################### # Local dev commands # ###################### diff --git a/docker-compose.yml b/docker-compose.yml index 1988f864d304..6f7c6de4ca07 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -121,6 +121,23 @@ services: networks: - databases + planetscale-vitess8: + build: ./docker/planetscale_proxy + environment: + MYSQL_HOST: 'vitess-test-8_0' + MYSQL_PORT: 33807 + MYSQL_DATABASE: 'test-0000-00000000' + ports: + - '8085:8085' + depends_on: + - vitess-test-8_0 + restart: always + healthcheck: + test: [ 'CMD', 'nc', '-z', '127.0.0.1', '8085' ] + interval: 5s + timeout: 2s + retries: 20 + postgres14: image: postgres:14 restart: always @@ -230,6 +247,11 @@ services: FOREIGN_KEY_MODE: "disallow" TABLET_REFRESH_INTERVAL: "500ms" ENABLE_ONLINE_DDL: false + healthcheck: + test: [ 'CMD', 'mysqladmin', 'ping', '-h127.0.0.1', '-P33807' ] + interval: 5s + timeout: 2s + retries: 20 vitess-shadow-5_7: image: vitess/vttestserver:mysql57@sha256:23863a518b34330109c502ac61a396008f5f023e96263bcb2bb1b0f7f7d5dc7f diff --git a/docker/planetscale_proxy/Dockerfile b/docker/planetscale_proxy/Dockerfile new file mode 100644 index 000000000000..ae5ec56329c2 --- /dev/null +++ b/docker/planetscale_proxy/Dockerfile @@ -0,0 +1,15 @@ +FROM golang:1 + +RUN apt update && apt install netcat-openbsd -y +RUN cd /go/src && git clone https://github.com/prisma/planetscale-proxy.git +RUN cd /go/src/planetscale-proxy && go install . + +ENTRYPOINT /go/bin/planetscale-proxy \ + -http-addr=0.0.0.0 \ + -http-port=8085 \ + -mysql-addr=$MYSQL_HOST \ + -mysql-port=$MYSQL_PORT \ + -mysql-idle-timeout=1200s \ + -mysql-no-pass \ + -mysql-max-rows=1000 \ + -mysql-dbname=$MYSQL_DATABASE diff --git a/query-engine/connector-test-kit-rs/test-configs/planetscale-vitess8 b/query-engine/connector-test-kit-rs/test-configs/planetscale-vitess8 new file mode 100644 index 000000000000..48c89c79427c --- /dev/null +++ b/query-engine/connector-test-kit-rs/test-configs/planetscale-vitess8 @@ -0,0 +1,7 @@ +{ + "connector": "vitess", + "version": "8.0", + "driver_adapter": "planetscale", + "driver_adapter_config": { "proxyUrl": "http://root:root@127.0.0.1:8085" }, + "external_test_executor": "default" +} diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/package.json b/query-engine/driver-adapters/js/connector-test-kit-executor/package.json index be6a54a315fb..4a5f093388e6 100644 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/package.json +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/package.json @@ -15,9 +15,11 @@ "dependencies": { "@libsql/client": "0.3.5", "@neondatabase/serverless": "^0.6.0", + "@planetscale/database": "1.11.0", "@prisma/adapter-libsql": "workspace:*", "@prisma/adapter-neon": "workspace:*", "@prisma/adapter-pg": "workspace:*", + "@prisma/adapter-planetscale": "workspace:*", "@prisma/driver-adapter-utils": "workspace:*", "@types/pg": "^8.10.2", "pg": "^8.11.3", diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts b/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts index 68664272a6ce..a36e0e360514 100644 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts @@ -9,18 +9,28 @@ import * as prismaPg from '@prisma/adapter-pg' // neon dependencies import { Pool as NeonPool, neonConfig } from '@neondatabase/serverless' -import { WebSocket } from 'undici' +import { fetch, WebSocket } from 'undici' import * as prismaNeon from '@prisma/adapter-neon' // libsql dependencies import { createClient } from '@libsql/client' import { PrismaLibSQL } from '@prisma/adapter-libsql' +// planetscale dependencies +import { connect as planetscaleConnect } from '@planetscale/database' +import { PrismaPlanetScale } from '@prisma/adapter-planetscale' + + import {bindAdapter, DriverAdapter, ErrorCapturingDriverAdapter} from "@prisma/driver-adapter-utils"; const SUPPORTED_ADAPTERS: Record Promise> - = {"pg": pgAdapter, "neon:ws" : neonWsAdapter, "libsql": libsqlAdapter}; + = { + "pg": pgAdapter, + "neon:ws" : neonWsAdapter, + "libsql": libsqlAdapter, + "planetscale": planetscaleAdapter, + }; // conditional debug logging based on LOG_LEVEL env var const debug = (() => { @@ -250,4 +260,18 @@ async function libsqlAdapter(url: string): Promise { return new PrismaLibSQL(libsql) } +async function planetscaleAdapter(url: string): Promise { + const proxyURL = JSON.parse(process.env.DRIVER_ADAPTER_CONFIG || '{}').proxyUrl ?? '' + if (proxyURL == '') { + throw new Error("DRIVER_ADAPTER_CONFIG is not defined or empty, but its required for planetscale adapter."); + } + + const connection = planetscaleConnect({ + url: proxyURL, + fetch, + }) + + return new PrismaPlanetScale(connection) +} + main().catch(err) diff --git a/query-engine/driver-adapters/js/pnpm-lock.yaml b/query-engine/driver-adapters/js/pnpm-lock.yaml index efa3787712e4..5236ba2ffc11 100644 --- a/query-engine/driver-adapters/js/pnpm-lock.yaml +++ b/query-engine/driver-adapters/js/pnpm-lock.yaml @@ -75,6 +75,9 @@ importers: '@neondatabase/serverless': specifier: ^0.6.0 version: 0.6.0 + '@planetscale/database': + specifier: 1.11.0 + version: 1.11.0 '@prisma/adapter-libsql': specifier: workspace:* version: link:../adapter-libsql @@ -84,6 +87,9 @@ importers: '@prisma/adapter-pg': specifier: workspace:* version: link:../adapter-pg + '@prisma/adapter-planetscale': + specifier: workspace:* + version: link:../adapter-planetscale '@prisma/driver-adapter-utils': specifier: workspace:* version: link:../driver-adapter-utils From 2b9174994bde8659013b09b8b0025d570710223c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Wed, 11 Oct 2023 18:21:51 +0200 Subject: [PATCH 095/128] ci(docker): add --wait for all & healthcheck for vitess (#4353) --- Makefile | 62 +++++++++++++++++++++++----------------------- docker-compose.yml | 59 +++++++++++++++++++++++++++++-------------- 2 files changed, 71 insertions(+), 50 deletions(-) diff --git a/Makefile b/Makefile index 06e10dbd56c7..0c3e1541e632 100644 --- a/Makefile +++ b/Makefile @@ -67,7 +67,7 @@ test-qe-black-box: build-qe ########################### all-dbs-up: - docker compose -f docker-compose.yml up -d --remove-orphans + docker compose -f docker-compose.yml up --wait -d --remove-orphans all-dbs-down: docker compose -f docker-compose.yml down -v --remove-orphans @@ -81,31 +81,31 @@ dev-libsql-sqlite: build-qe-napi build-connector-kit-js cp $(CONFIG_PATH)/libsql-sqlite $(CONFIG_FILE) start-postgres9: - docker compose -f docker-compose.yml up -d --remove-orphans postgres9 + docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres9 dev-postgres9: start-postgres9 cp $(CONFIG_PATH)/postgres9 $(CONFIG_FILE) start-postgres10: - docker compose -f docker-compose.yml up -d --remove-orphans postgres10 + docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres10 dev-postgres10: start-postgres10 cp $(CONFIG_PATH)/postgres10 $(CONFIG_FILE) start-postgres11: - docker compose -f docker-compose.yml up -d --remove-orphans postgres11 + docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres11 dev-postgres11: start-postgres11 cp $(CONFIG_PATH)/postgres11 $(CONFIG_FILE) start-postgres12: - docker compose -f docker-compose.yml up -d --remove-orphans postgres12 + docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres12 dev-postgres12: start-postgres12 cp $(CONFIG_PATH)/postgres12 $(CONFIG_FILE) start-postgres13: - docker compose -f docker-compose.yml up -d --remove-orphans postgres13 + docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres13 dev-postgres13: start-postgres13 cp $(CONFIG_PATH)/postgres13 $(CONFIG_FILE) @@ -116,120 +116,120 @@ dev-pg-postgres13: start-pg-postgres13 cp $(CONFIG_PATH)/pg-postgres13 $(CONFIG_FILE) start-neon-postgres13: build-qe-napi build-connector-kit-js - docker compose -f docker-compose.yml up -d --remove-orphans neon-postgres13 + docker compose -f docker-compose.yml up --wait -d --remove-orphans neon-postgres13 dev-neon-ws-postgres13: start-neon-postgres13 cp $(CONFIG_PATH)/neon-ws-postgres13 $(CONFIG_FILE) start-postgres14: - docker compose -f docker-compose.yml up -d --remove-orphans postgres14 + docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres14 dev-postgres14: start-postgres14 cp $(CONFIG_PATH)/postgres14 $(CONFIG_FILE) start-postgres15: - docker compose -f docker-compose.yml up -d --remove-orphans postgres15 + docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres15 dev-postgres15: start-postgres15 cp $(CONFIG_PATH)/postgres15 $(CONFIG_FILE) start-cockroach_23_1: - docker compose -f docker-compose.yml up -d --remove-orphans cockroach_23_1 + docker compose -f docker-compose.yml up --wait -d --remove-orphans cockroach_23_1 dev-cockroach_23_1: start-cockroach_23_1 cp $(CONFIG_PATH)/cockroach_23_1 $(CONFIG_FILE) start-cockroach_22_2: - docker compose -f docker-compose.yml up -d --remove-orphans cockroach_22_2 + docker compose -f docker-compose.yml up --wait -d --remove-orphans cockroach_22_2 dev-cockroach_22_2: start-cockroach_22_2 cp $(CONFIG_PATH)/cockroach_22_2 $(CONFIG_FILE) start-cockroach_22_1_0: - docker compose -f docker-compose.yml up -d --remove-orphans cockroach_22_1_0 + docker compose -f docker-compose.yml up --wait -d --remove-orphans cockroach_22_1_0 dev-cockroach_22_1_0: start-cockroach_22_1_0 cp $(CONFIG_PATH)/cockroach_22_1 $(CONFIG_FILE) start-cockroach_21_2_0_patched: - docker compose -f docker-compose.yml up -d --remove-orphans cockroach_21_2_0_patched + docker compose -f docker-compose.yml up --wait -d --remove-orphans cockroach_21_2_0_patched dev-cockroach_21_2_0_patched: start-cockroach_21_2_0_patched cp $(CONFIG_PATH)/cockroach_21_2_0_patched $(CONFIG_FILE) dev-pgbouncer: - docker compose -f docker-compose.yml up -d --remove-orphans pgbouncer postgres11 + docker compose -f docker-compose.yml up --wait -d --remove-orphans pgbouncer postgres11 start-mysql_5_7: - docker compose -f docker-compose.yml up -d --remove-orphans mysql-5-7 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mysql-5-7 dev-mysql: start-mysql_5_7 cp $(CONFIG_PATH)/mysql57 $(CONFIG_FILE) start-mysql_5_6: - docker compose -f docker-compose.yml up -d --remove-orphans mysql-5-6 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mysql-5-6 dev-mysql_5_6: start-mysql_5_6 cp $(CONFIG_PATH)/mysql56 $(CONFIG_FILE) start-mysql_8: - docker compose -f docker-compose.yml up -d --remove-orphans mysql-8-0 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mysql-8-0 dev-mysql8: start-mysql_8 cp $(CONFIG_PATH)/mysql8 $(CONFIG_FILE) start-mysql_mariadb: - docker compose -f docker-compose.yml up -d --remove-orphans mariadb-10-0 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mariadb-10-0 dev-mariadb: start-mysql_mariadb cp $(CONFIG_PATH)/mariadb $(CONFIG_FILE) start-mssql_2019: - docker compose -f docker-compose.yml up -d --remove-orphans mssql-2019 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mssql-2019 dev-mssql2019: start-mssql_2019 cp $(CONFIG_PATH)/sqlserver2019 $(CONFIG_FILE) start-mssql_2022: - docker compose -f docker-compose.yml up -d --remove-orphans mssql-2022 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mssql-2022 dev-mssql2022: start-mssql_2022 cp $(CONFIG_PATH)/sqlserver2022 $(CONFIG_FILE) start-mssql_edge: - docker compose -f docker-compose.yml up -d --remove-orphans azure-edge + docker compose -f docker-compose.yml up --wait -d --remove-orphans azure-edge dev-mssql_edge: start-mssql_edge cp $(CONFIG_PATH)/sqlserver2019 $(CONFIG_FILE) start-mssql_2017: - docker compose -f docker-compose.yml up -d --remove-orphans mssql-2017 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mssql-2017 dev-mssql2017: start-mssql_2017 cp $(CONFIG_PATH)/sqlserver2017 $(CONFIG_FILE) start-mongodb42-single: - docker compose -f docker-compose.yml up -d --remove-orphans mongo42-single + docker compose -f docker-compose.yml up --wait -d --remove-orphans mongo42-single start-mongodb44-single: - docker compose -f docker-compose.yml up -d --remove-orphans mongo44-single + docker compose -f docker-compose.yml up --wait -d --remove-orphans mongo44-single start-mongodb4-single: start-mongodb44-single start-mongodb5-single: - docker compose -f docker-compose.yml up -d --remove-orphans mongo5-single + docker compose -f docker-compose.yml up --wait -d --remove-orphans mongo5-single start-mongodb_4_2: - docker compose -f docker-compose.yml up -d --remove-orphans mongo42 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mongo42 start-mongodb_4_4: - docker compose -f docker-compose.yml up -d --remove-orphans mongo44 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mongo44 dev-mongodb_4_4: start-mongodb_4_4 cp $(CONFIG_PATH)/mongodb44 $(CONFIG_FILE) start-mongodb_5: - docker compose -f docker-compose.yml up -d --remove-orphans mongo5 + docker compose -f docker-compose.yml up --wait -d --remove-orphans mongo5 dev-mongodb_5: start-mongodb_5 cp $(CONFIG_PATH)/mongodb5 $(CONFIG_FILE) @@ -238,13 +238,13 @@ dev-mongodb_4_2: start-mongodb_4_2 cp $(CONFIG_PATH)/mongodb42 $(CONFIG_FILE) start-vitess_5_7: - docker compose -f docker-compose.yml up -d --remove-orphans vitess-test-5_7 vitess-shadow-5_7 + docker compose -f docker-compose.yml up --wait -d --remove-orphans vitess-test-5_7 vitess-shadow-5_7 dev-vitess_5_7: start-vitess_5_7 cp $(CONFIG_PATH)/vitess_5_7 $(CONFIG_FILE) start-vitess_8_0: - docker compose -f docker-compose.yml up -d --remove-orphans vitess-test-8_0 vitess-shadow-8_0 + docker compose -f docker-compose.yml up --wait -d --remove-orphans vitess-test-8_0 vitess-shadow-8_0 dev-vitess_8_0: start-vitess_8_0 cp $(CONFIG_PATH)/vitess_8_0 $(CONFIG_FILE) @@ -299,7 +299,7 @@ use-local-query-engine: cp target/release/query-engine $(PRISMA2_BINARY_PATH)/query-engine-darwin show-metrics: - docker compose -f docker-compose.yml up -d --remove-orphans grafana prometheus + docker compose -f docker-compose.yml up --wait -d --remove-orphans grafana prometheus ## OpenTelemetry otel: diff --git a/docker-compose.yml b/docker-compose.yml index 6f7c6de4ca07..97c9ed79e1c7 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -228,11 +228,18 @@ services: - 33577:33577 environment: PORT: 33574 - KEYSPACES: "test" - NUM_SHARDS: "1" - MYSQL_BIND_HOST: "0.0.0.0" - FOREIGN_KEY_MODE: "disallow" + KEYSPACES: 'test' + NUM_SHARDS: '1' + MYSQL_BIND_HOST: '0.0.0.0' + FOREIGN_KEY_MODE: 'disallow' ENABLE_ONLINE_DDL: false + MYSQL_MAX_CONNECTIONS: 100000 + TABLET_REFRESH_INTERVAL: '500ms' + healthcheck: + test: ['CMD', 'mysqladmin', 'ping', '-h127.0.0.1', '-P33577'] + interval: 5s + timeout: 2s + retries: 20 vitess-test-8_0: image: vitess/vttestserver:mysql80@sha256:8bec2644d83cb322eb2cdd596d33c0f858243ba6ade9164c95dfcc519643094e @@ -241,14 +248,15 @@ services: - 33807:33807 environment: PORT: 33804 - KEYSPACES: "test" - NUM_SHARDS: "1" - MYSQL_BIND_HOST: "0.0.0.0" - FOREIGN_KEY_MODE: "disallow" - TABLET_REFRESH_INTERVAL: "500ms" + KEYSPACES: 'test' + NUM_SHARDS: '1' + MYSQL_BIND_HOST: '0.0.0.0' + FOREIGN_KEY_MODE: 'disallow' ENABLE_ONLINE_DDL: false + MYSQL_MAX_CONNECTIONS: 100000 + TABLET_REFRESH_INTERVAL: '500ms' healthcheck: - test: [ 'CMD', 'mysqladmin', 'ping', '-h127.0.0.1', '-P33807' ] + test: ['CMD', 'mysqladmin', 'ping', '-h127.0.0.1', '-P33807'] interval: 5s timeout: 2s retries: 20 @@ -260,11 +268,18 @@ services: - 33578:33577 environment: PORT: 33574 - KEYSPACES: "shadow" - NUM_SHARDS: "1" - MYSQL_BIND_HOST: "0.0.0.0" - FOREIGN_KEY_MODE: "disallow" + KEYSPACES: 'shadow' + NUM_SHARDS: '1' + MYSQL_BIND_HOST: '0.0.0.0' + FOREIGN_KEY_MODE: 'disallow' ENABLE_ONLINE_DDL: false + MYSQL_MAX_CONNECTIONS: 100000 + TABLET_REFRESH_INTERVAL: '500ms' + healthcheck: + test: ['CMD', 'mysqladmin', 'ping', '-h127.0.0.1', '-P33577'] + interval: 5s + timeout: 2s + retries: 20 vitess-shadow-8_0: image: vitess/vttestserver:mysql80@sha256:8bec2644d83cb322eb2cdd596d33c0f858243ba6ade9164c95dfcc519643094e @@ -273,12 +288,18 @@ services: - 33808:33807 environment: PORT: 33804 - KEYSPACES: "shadow" - NUM_SHARDS: "1" - MYSQL_BIND_HOST: "0.0.0.0" - FOREIGN_KEY_MODE: "disallow" - TABLET_REFRESH_INTERVAL: "500ms" + KEYSPACES: 'shadow' + NUM_SHARDS: '1' + MYSQL_BIND_HOST: '0.0.0.0' + FOREIGN_KEY_MODE: 'disallow' ENABLE_ONLINE_DDL: false + MYSQL_MAX_CONNECTIONS: 100000 + TABLET_REFRESH_INTERVAL: '500ms' + healthcheck: + test: ['CMD', 'mysqladmin', 'ping', '-h127.0.0.1', '-P33807'] + interval: 5s + timeout: 2s + retries: 20 mssql-2017: image: mcr.microsoft.com/mssql/server:2017-latest From 26f1cbc8c0a969b85562a0a6b6e166229ce3876b Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Wed, 11 Oct 2023 18:25:16 +0200 Subject: [PATCH 096/128] .buildkite/engineer: fix shellcheck error (#4359) --- .buildkite/engineer | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.buildkite/engineer b/.buildkite/engineer index 701e57fa9229..5e586ad2f0ec 100755 --- a/.buildkite/engineer +++ b/.buildkite/engineer @@ -26,7 +26,7 @@ if [[ "$2" == "test" ]]; then exit 0 else # Note that printf works better for displaying line returns in CI - printf "Changes found for the previous commit in paths that are not ignored: \n\n${GIT_DIFF}\n\nThis run will continue...\n" + printf "Changes found for the previous commit in paths that are not ignored: \n\n%s\n\nThis run will continue...\n" "${GIT_DIFF}" fi fi @@ -53,4 +53,4 @@ if ! type "engineer" &> /dev/null; then else # Already installed on the system engineer "$@" -fi \ No newline at end of file +fi From 95d40a41c29cc69d533d79a221164aaa1c81dff5 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 12 Oct 2023 00:28:45 +0200 Subject: [PATCH 097/128] fix(deps): update prisma monorepo to v5.4.2 (patch) (#4350) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .../driver-adapters/js/pnpm-lock.yaml | 30 +++++++++---------- .../js/smoke-test-js/package.json | 4 +-- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/query-engine/driver-adapters/js/pnpm-lock.yaml b/query-engine/driver-adapters/js/pnpm-lock.yaml index 5236ba2ffc11..0b15115b5e23 100644 --- a/query-engine/driver-adapters/js/pnpm-lock.yaml +++ b/query-engine/driver-adapters/js/pnpm-lock.yaml @@ -137,8 +137,8 @@ importers: specifier: workspace:* version: link:../adapter-planetscale '@prisma/client': - specifier: 5.4.1 - version: 5.4.1(prisma@5.4.1) + specifier: 5.4.2 + version: 5.4.2(prisma@5.4.2) '@prisma/driver-adapter-utils': specifier: workspace:* version: link:../driver-adapter-utils @@ -162,8 +162,8 @@ importers: specifier: ^7.0.3 version: 7.0.3 prisma: - specifier: 5.4.1 - version: 5.4.1 + specifier: 5.4.2 + version: 5.4.2 tsx: specifier: ^3.12.7 version: 3.12.7 @@ -527,8 +527,8 @@ packages: resolution: {integrity: sha512-aWbU+D/IRHoDE9975y+Q4c+EwwAWxCPwFId+N1AhQVFXzbeJMkj6KN2iQtoi03elcLMRdfT+V3i9Z4WRw+/oIA==} engines: {node: '>=16'} - /@prisma/client@5.4.1(prisma@5.4.1): - resolution: {integrity: sha512-xyD0DJ3gRNfLbPsC+YfMBBuLJtZKQfy1OD2qU/PZg+HKrr7SO+09174LMeTlWP0YF2wca9LxtVd4HnAiB5ketQ==} + /@prisma/client@5.4.2(prisma@5.4.2): + resolution: {integrity: sha512-2xsPaz4EaMKj1WS9iW6MlPhmbqtBsXAOeVttSePp8vTFTtvzh2hZbDgswwBdSCgPzmmwF+tLB259QzggvCmJqA==} engines: {node: '>=16.13'} requiresBuild: true peerDependencies: @@ -537,16 +537,16 @@ packages: prisma: optional: true dependencies: - '@prisma/engines-version': 5.4.1-1.2f302df92bd8945e20ad4595a73def5b96afa54f - prisma: 5.4.1 + '@prisma/engines-version': 5.4.1-2.ac9d7041ed77bcc8a8dbd2ab6616b39013829574 + prisma: 5.4.2 dev: false - /@prisma/engines-version@5.4.1-1.2f302df92bd8945e20ad4595a73def5b96afa54f: - resolution: {integrity: sha512-+nUQM/y8C+1GG5Ioeqcu6itFslCfxvQSAUVSMC9XM2G2Fcq0F4Afnp6m0pXF6X6iUBWen7jZBPmM9Qlq4Nr3/A==} + /@prisma/engines-version@5.4.1-2.ac9d7041ed77bcc8a8dbd2ab6616b39013829574: + resolution: {integrity: sha512-wvupDL4AA1vf4TQNANg7kR7y98ITqPsk6aacfBxZKtrJKRIsWjURHkZCGcQliHdqCiW/hGreO6d6ZuSv9MhdAA==} dev: false - /@prisma/engines@5.4.1: - resolution: {integrity: sha512-vJTdY4la/5V3N7SFvWRmSMUh4mIQnyb/MNoDjzVbh9iLmEC+uEykj/1GPviVsorvfz7DbYSQC4RiwmlEpTEvGA==} + /@prisma/engines@5.4.2: + resolution: {integrity: sha512-fqeucJ3LH0e1eyFdT0zRx+oETLancu5+n4lhiYECyEz6H2RDskPJHJYHkVc0LhkU4Uv7fuEnppKU3nVKNzMh8g==} requiresBuild: true /@types/debug@4.1.8: @@ -1259,13 +1259,13 @@ packages: /postgres-range@1.1.3: resolution: {integrity: sha512-VdlZoocy5lCP0c/t66xAfclglEapXPCIVhqqJRncYpvbCgImF0w67aPKfbqUMr72tO2k5q0TdTZwCLjPTI6C9g==} - /prisma@5.4.1: - resolution: {integrity: sha512-op9PmU8Bcw5dNAas82wBYTG0yHnpq9/O3bhxbDBrNzwZTwBqsVCxxYRLf6wHNh9HVaDGhgjjHlu1+BcW8qdnBg==} + /prisma@5.4.2: + resolution: {integrity: sha512-GDMZwZy7mysB2oXU+angQqJ90iaPFdD0rHaZNkn+dio5NRkGLmMqmXs31//tg/qXT3iB0cTQwnGGQNuirhSTZg==} engines: {node: '>=16.13'} hasBin: true requiresBuild: true dependencies: - '@prisma/engines': 5.4.1 + '@prisma/engines': 5.4.2 /punycode@2.3.0: resolution: {integrity: sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==} diff --git a/query-engine/driver-adapters/js/smoke-test-js/package.json b/query-engine/driver-adapters/js/smoke-test-js/package.json index 90f25234be1e..27d4220f41bc 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/package.json +++ b/query-engine/driver-adapters/js/smoke-test-js/package.json @@ -51,7 +51,7 @@ "@prisma/adapter-neon": "workspace:*", "@prisma/adapter-pg": "workspace:*", "@prisma/adapter-planetscale": "workspace:*", - "@prisma/client": "5.4.1", + "@prisma/client": "5.4.2", "@prisma/driver-adapter-utils": "workspace:*", "pg": "^8.11.3", "superjson": "^1.13.1", @@ -61,7 +61,7 @@ "@types/node": "^20.5.1", "@types/pg": "^8.10.2", "cross-env": "^7.0.3", - "prisma": "5.4.1", + "prisma": "5.4.2", "tsx": "^3.12.7" } } From 7c57bf8256c25aabd52b7184bd92e8906c990612 Mon Sep 17 00:00:00 2001 From: Jan Piotrowski Date: Thu, 12 Oct 2023 00:31:10 +0200 Subject: [PATCH 098/128] fix(smoke-tests): Fix expected error tests, other small changes (#4345) --- .../js/smoke-test-js/README.md | 6 +- .../smoke-test-js/prisma/mysql/schema.prisma | 5 ++ .../prisma/postgres/schema.prisma | 5 ++ .../driver-adapters/js/smoke-test-js/setup.sh | 7 ++ .../js/smoke-test-js/src/client/client.ts | 53 +++++++++---- .../js/smoke-test-js/src/libquery/libquery.ts | 77 +++++++++++-------- .../src/libquery/neon.http.test.ts | 2 +- 7 files changed, 103 insertions(+), 52 deletions(-) create mode 100644 query-engine/driver-adapters/js/smoke-test-js/setup.sh diff --git a/query-engine/driver-adapters/js/smoke-test-js/README.md b/query-engine/driver-adapters/js/smoke-test-js/README.md index 204be94670b9..f1b81df5d268 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/README.md +++ b/query-engine/driver-adapters/js/smoke-test-js/README.md @@ -20,6 +20,8 @@ In the current directoy: pnpm i ``` +(or run `sh ./setup.sh`) + Anywhere in the repository: - Run `cargo build -p query-engine-node-api` to compile the `libquery` Query Engine @@ -53,8 +55,8 @@ In the current directory: - Run `pnpm neon:ws:client` to test using `@prisma/client` - Run `pnpm neon:http` to run smoke tests using `libquery` against the Neon database, using an HTTP connection. In this case, transactions won't work, and tests are expected to fail. For more fine-grained control: - - Run `pnpm neon:ws:http` to test using `libquery` - - Run `pnpm neon:ws:http` to test using `@prisma/client` + - Run `pnpm neon:http:libquery` to test using `libquery` + - Run `pnpm neon:http:client` to test using `@prisma/client` ### Pg diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma b/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma index 00418d57cc2c..59efb33a5594 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma +++ b/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma @@ -67,6 +67,11 @@ model type_test_2 { datetime_column_null DateTime? @db.DateTime(3) } +model type_test_3 { + id Int @id @default(autoincrement()) + bytes Bytes +} + enum type_test_enum_column { value1 value2 diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma b/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma index 7319f07d8a60..7cd31f406b9d 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma +++ b/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma @@ -51,6 +51,11 @@ model type_test_2 { datetime_column_null DateTime? @db.Timestamp(3) } +model type_test_3 { + id Int @id @default(autoincrement()) + bytes Bytes +} + model Child { c String @unique c_1 String diff --git a/query-engine/driver-adapters/js/smoke-test-js/setup.sh b/query-engine/driver-adapters/js/smoke-test-js/setup.sh new file mode 100644 index 000000000000..7654679db14e --- /dev/null +++ b/query-engine/driver-adapters/js/smoke-test-js/setup.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env bash + +cd .. || return +pnpm i && pnpm build +cargo build -p query-engine-node-api +cd smoke-test-js || exit +pnpm i \ No newline at end of file diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts index dcae3c46437d..b23cf2d97fb8 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts @@ -22,11 +22,25 @@ export async function smokeTestClient(driverAdapter: DriverAdapter) { for (const adapter of [driverAdapter, null]) { const isUsingDriverAdapters = adapter !== null describe(isUsingDriverAdapters ? `using Driver Adapters` : `using Rust drivers`, () => { + + it('expected error (on duplicate insert) as exception thrown / promise rejected', async () => { + const prisma = new PrismaClient({ adapter, log }) + + await assert.rejects( + async () => { + const result = await prisma.unique.create({ data: { email: 'duplicate@example.com' } }) + const result2 = await prisma.unique.create({ data: { email: 'duplicate@example.com' } }) + }, + (err) => { + assert.match(err.message, /unique/i); + return true; + }, + ); + + }) + it('batch queries', async () => { - const prisma = new PrismaClient({ - adapter, - log, - }) + const prisma = new PrismaClient({ adapter, log }) const queries: string[] = [] prisma.$on('query', ({ query }) => queries.push(query)) @@ -83,7 +97,11 @@ export async function smokeTestClient(driverAdapter: DriverAdapter) { }) if (['mysql'].includes(provider)) { - assert.deepEqual(queries.slice(0, 2), ['SET TRANSACTION ISOLATION LEVEL READ COMMITTED', 'BEGIN']) + if (isUsingDriverAdapters) { + assert.deepEqual(queries.slice(0, 2), ['SET TRANSACTION ISOLATION LEVEL READ COMMITTED', '-- Implicit "BEGIN" query via underlying driver']) + } else { + assert.deepEqual(queries.slice(0, 2), ['SET TRANSACTION ISOLATION LEVEL READ COMMITTED', 'BEGIN']) + } } else if (['postgres'].includes(provider)) { assert.deepEqual(queries.slice(0, 2), ['BEGIN', 'SET TRANSACTION ISOLATION LEVEL READ COMMITTED']) } @@ -102,6 +120,8 @@ export async function smokeTestClient(driverAdapter: DriverAdapter) { isolationLevel: 'Serializable', }) + console.log("queries", queries) + if (isUsingDriverAdapters) { assert.equal(queries.at(0), '-- Implicit "BEGIN" query via underlying driver') assert.equal(queries.at(-1), '-- Implicit "COMMIT" query via underlying driver') @@ -123,19 +143,22 @@ export async function smokeTestClient(driverAdapter: DriverAdapter) { ) }) - it('bytes type support', async () => { - const prisma = new PrismaClient({ adapter, log }) + }) - const result = await prisma.type_test_3.create({ - data: { - bytes: Buffer.from([1, 2, 3, 4]), - }, - }) + } - assert.deepEqual(result.bytes, Buffer.from([1, 2, 3, 4])) - }) + it('bytes type support', async () => { + const prisma = new PrismaClient({ adapter, log }) + + const result = await prisma.type_test_3.create({ + data: { + bytes: Buffer.from([1, 2, 3, 4]), + }, }) - } + + assert.deepEqual(result.bytes, Buffer.from([1, 2, 3, 4])) + }) + }) } } diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts index bdf50eab5669..4cdde4515615 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts @@ -4,8 +4,9 @@ import type { ErrorCapturingDriverAdapter } from '@prisma/driver-adapter-utils' import type { QueryEngineInstance } from '../engines/types/Library' import { createQueryFn, initQueryEngine } from './util' import { JsonQuery } from '../engines/types/JsonProtocol' +import { PrismaNeonHTTP } from '@prisma/adapter-neon' -export function smokeTestLibquery(adapter: ErrorCapturingDriverAdapter, prismaSchemaRelativePath: string) { +export function smokeTestLibquery(adapter: ErrorCapturingDriverAdapter, prismaSchemaRelativePath: string, supportsTransactions = true) { const engine = initQueryEngine(adapter, prismaSchemaRelativePath) const flavour = adapter.flavour @@ -262,11 +263,14 @@ export function smokeTestLibquery(adapter: ErrorCapturingDriverAdapter, prismaSc }) it('create explicit transaction', async () => { + if(!supportsTransactions) return + const args = { isolation_level: 'Serializable', max_wait: 5000, timeout: 15000 } const startResponse = await engine.startTransaction(JSON.stringify(args), 'trace') const tx_id = JSON.parse(startResponse).id - console.log('[nodejs] transaction id', tx_id) + assert.notStrictEqual(tx_id, undefined) + await doQuery( { action: 'findMany', @@ -282,42 +286,47 @@ export function smokeTestLibquery(adapter: ErrorCapturingDriverAdapter, prismaSc console.log('[nodejs] commited', commitResponse) }) - it('expected error', async () => { + it('expected error (on duplicate insert) as json result (not throwing error)', async () => { + // clean up first + await doQuery({ + modelName: 'Unique', + action: 'deleteMany', + query: { + selection: { + count: true, + }, + }, + }) + const result = await doQuery({ + modelName: 'Unique', + action: 'createOne', + query: { + arguments: { + data: { email: 'duplicate@example.com' }, + }, + selection: { + $scalars: true, + }, + }, + }) + console.log('[nodejs] error result1', JSON.stringify(result, null, 2)) - await assert.rejects( - async () => { - const result = await doQuery({ - modelName: 'Unique', - action: 'createOne', - query: { - arguments: { - data: { email: 'duplicate@example.com' }, - }, - selection: { - $scalars: true, - }, - }, - }) - const result2 = await doQuery({ - modelName: 'Unique', - action: 'createOne', - query: { - arguments: { - data: { email: 'duplicate@example.com' } - }, - selection: { - $scalars: true, - }, - }, - }) - console.log('[nodejs] error result', JSON.stringify(result, null, 2)) + const result2 = await doQuery({ + modelName: 'Unique', + action: 'createOne', + query: { + arguments: { + data: { email: 'duplicate@example.com' } }, - (err) => { - assert.match(err.message, /unique/i); - return true; + selection: { + $scalars: true, }, - ); + }, + }) + console.log('[nodejs] error result2', JSON.stringify(result2, null, 2)) + + // TODO assert that result2 includes `errors.error` (which should currently only pass on neon:ws) }) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts index ac165d29f584..02872b885fe3 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts @@ -12,5 +12,5 @@ describe('neon (HTTP)', () => { const adapter = new PrismaNeonHTTP(neonConnection) const driverAdapter = bindAdapter(adapter) - smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma') + smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma', false) }) From 8cae8897c727c193b0981844bb2d0fe7a99b0bf9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Thu, 12 Oct 2023 10:37:01 +0200 Subject: [PATCH 099/128] test(driver-adapters) search-path in tests for neon and pg (#4352) --- .../js/connector-test-kit-executor/src/index.ts | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts b/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts index a36e0e360514..8a05a6b2e9aa 100644 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts @@ -235,8 +235,17 @@ async function adapterFromEnv(url: string): Promise { return await SUPPORTED_ADAPTERS[adapter](url) } +function postgres_options(url: string): any { + let args: any = {connectionString: url} + const schemaName = new URL(url).searchParams.get('schema') + if (schemaName != null) { + args.options = `--search_path="${schemaName}"` + } + return args; +} + async function pgAdapter(url: string): Promise { - const pool = new pgDriver.Pool({connectionString: url}) + const pool = new pgDriver.Pool(postgres_options(url)) return new prismaPg.PrismaPg(pool) } @@ -251,7 +260,7 @@ async function neonWsAdapter(url: string): Promise { neonConfig.useSecureWebSocket = false neonConfig.pipelineConnect = false - const pool = new NeonPool({ connectionString: url }) + const pool = new NeonPool(postgres_options(url)) return new prismaNeon.PrismaNeon(pool) } From 6b41f7a3b9f16da11f37cf41d70e8149f95eb6cf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Thu, 12 Oct 2023 11:12:07 +0200 Subject: [PATCH 100/128] Driver adapters phase 1 correctness: implement flavor-specific driver adapter conversions for postgres dates (#4351) * Implement date conversions based on given types * Rename (Postgres|Neon)ColumnType to ScalarColumnType * Copy changes over pg driver adapter * ignore case in type matching * Fix unit tests * Update query-engine/driver-adapters/js/adapter-neon/src/conversion.ts Co-authored-by: Alexey Orlenko * Remove peer dependency for array * Address feedback * Update lock file * driver-adapters: parse decimals as strings in arrays in neon and pg Fixes the following test: `writes::data_types::scalar_list::defaults::decimal::basic_write`. Fixes: https://github.com/prisma/team-orm/issues/435 --------- Co-authored-by: Alexey Orlenko --- quaint/src/ast/values.rs | 32 ++- .../src/model_extensions/scalar_field.rs | 6 +- .../js/adapter-neon/package.json | 3 +- .../js/adapter-neon/src/conversion.ts | 195 ++++++++++++----- .../js/adapter-pg/package.json | 3 +- .../js/adapter-pg/src/conversion.ts | 197 ++++++++++++------ .../driver-adapters/js/pnpm-lock.yaml | 6 + .../driver-adapters/src/conversion.rs | 22 +- .../src/conversion/postgres.rs | 55 +++++ query-engine/driver-adapters/src/proxy.rs | 114 +++++----- query-engine/driver-adapters/src/queryable.rs | 48 +++-- 11 files changed, 476 insertions(+), 205 deletions(-) create mode 100644 query-engine/driver-adapters/src/conversion/postgres.rs diff --git a/quaint/src/ast/values.rs b/quaint/src/ast/values.rs index 081405374340..a1bf4f41a26d 100644 --- a/quaint/src/ast/values.rs +++ b/quaint/src/ast/values.rs @@ -33,13 +33,43 @@ where } } +/// A native-column type, i.e. the connector-specific type of the column. +#[derive(Debug, Clone, PartialEq)] +pub struct NativeColumnType<'a>(Cow<'a, str>); + +impl<'a> std::ops::Deref for NativeColumnType<'a> { + type Target = str; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl<'a> From<&'a str> for NativeColumnType<'a> { + fn from(s: &'a str) -> Self { + Self(Cow::Owned(s.to_uppercase())) + } +} + #[derive(Debug, Clone, PartialEq)] pub struct Value<'a> { pub typed: ValueType<'a>, - pub native_column_type: Option>, + pub native_column_type: Option>, } impl<'a> Value<'a> { + /// Returns the native column type of the value, if any, in the form + /// of an UPCASE string. ex: "VARCHAR, BYTEA, DATE, TIMEZ" + pub fn native_column_type_name(&'a self) -> Option<&'a str> { + self.native_column_type.as_deref() + } + + /// Changes the value to include information about the native column type + pub fn with_native_column_type>>(mut self, column_type: Option) -> Self { + self.native_column_type = column_type.map(|ct| ct.into()); + self + } + /// Creates a new 32-bit signed integer. pub fn int32(value: I) -> Self where diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs b/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs index b8ea590f25dc..7eb414dd92a8 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs +++ b/query-engine/connectors/sql-query-connector/src/model_extensions/scalar_field.rs @@ -14,7 +14,7 @@ pub(crate) trait ScalarFieldExt { impl ScalarFieldExt for ScalarField { fn value<'a>(&self, pv: PrismaValue, ctx: &Context<'_>) -> Value<'a> { - match (pv, self.type_identifier()) { + let value = match (pv, self.type_identifier()) { (PrismaValue::String(s), _) => s.into(), (PrismaValue::Float(f), _) => f.into(), (PrismaValue::Boolean(b), _) => b.into(), @@ -76,7 +76,9 @@ impl ScalarFieldExt for ScalarField { TypeIdentifier::Bytes => Value::null_bytes(), TypeIdentifier::Unsupported => unreachable!("No unsupported field should reach that path"), }, - } + }; + + value.with_native_column_type(self.native_type().map(|nt| nt.name())) } fn type_family(&self) -> TypeFamily { diff --git a/query-engine/driver-adapters/js/adapter-neon/package.json b/query-engine/driver-adapters/js/adapter-neon/package.json index 03d19f6eeb0d..02005a13572f 100644 --- a/query-engine/driver-adapters/js/adapter-neon/package.json +++ b/query-engine/driver-adapters/js/adapter-neon/package.json @@ -18,7 +18,8 @@ "license": "Apache-2.0", "sideEffects": false, "dependencies": { - "@prisma/driver-adapter-utils": "workspace:*" + "@prisma/driver-adapter-utils": "workspace:*", + "postgres-array": "^3.0.2" }, "devDependencies": { "@neondatabase/serverless": "^0.6.0" diff --git a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts b/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts index 932461e3bc3b..9f6486362d78 100644 --- a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts +++ b/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts @@ -1,10 +1,11 @@ import { ColumnTypeEnum, type ColumnType, JsonNullMarker } from '@prisma/driver-adapter-utils' import { types } from '@neondatabase/serverless' +import { parse as parseArray } from 'postgres-array' -const NeonColumnType = types.builtins +const ScalarColumnType = types.builtins /** - * PostgreSQL array column types (not defined in NeonColumnType). + * PostgreSQL array column types (not defined in ScalarColumnType). */ const ArrayColumnType = { BOOL_ARRAY: 1000, @@ -35,45 +36,46 @@ const ArrayColumnType = { */ export function fieldToColumnType(fieldTypeId: number): ColumnType { switch (fieldTypeId) { - case NeonColumnType['INT2']: - case NeonColumnType['INT4']: + case ScalarColumnType['INT2']: + case ScalarColumnType['INT4']: return ColumnTypeEnum.Int32 - case NeonColumnType['INT8']: + case ScalarColumnType['INT8']: return ColumnTypeEnum.Int64 - case NeonColumnType['FLOAT4']: + case ScalarColumnType['FLOAT4']: return ColumnTypeEnum.Float - case NeonColumnType['FLOAT8']: + case ScalarColumnType['FLOAT8']: return ColumnTypeEnum.Double - case NeonColumnType['BOOL']: + case ScalarColumnType['BOOL']: return ColumnTypeEnum.Boolean - case NeonColumnType['DATE']: + case ScalarColumnType['DATE']: return ColumnTypeEnum.Date - case NeonColumnType['TIME']: + case ScalarColumnType['TIME']: + case ScalarColumnType['TIMETZ']: return ColumnTypeEnum.Time - case NeonColumnType['TIMESTAMP']: + case ScalarColumnType['TIMESTAMP']: + case ScalarColumnType['TIMESTAMPTZ']: return ColumnTypeEnum.DateTime - case NeonColumnType['NUMERIC']: - case NeonColumnType['MONEY']: + case ScalarColumnType['NUMERIC']: + case ScalarColumnType['MONEY']: return ColumnTypeEnum.Numeric - case NeonColumnType['JSON']: - case NeonColumnType['JSONB']: + case ScalarColumnType['JSON']: + case ScalarColumnType['JSONB']: return ColumnTypeEnum.Json - case NeonColumnType['UUID']: + case ScalarColumnType['UUID']: return ColumnTypeEnum.Uuid - case NeonColumnType['OID']: + case ScalarColumnType['OID']: return ColumnTypeEnum.Int64 - case NeonColumnType['BPCHAR']: - case NeonColumnType['TEXT']: - case NeonColumnType['VARCHAR']: - case NeonColumnType['BIT']: - case NeonColumnType['VARBIT']: - case NeonColumnType['INET']: - case NeonColumnType['CIDR']: - case NeonColumnType['XML']: + case ScalarColumnType['BPCHAR']: + case ScalarColumnType['TEXT']: + case ScalarColumnType['VARCHAR']: + case ScalarColumnType['BIT']: + case ScalarColumnType['VARBIT']: + case ScalarColumnType['INET']: + case ScalarColumnType['CIDR']: + case ScalarColumnType['XML']: return ColumnTypeEnum.Text - case NeonColumnType['BYTEA']: + case ScalarColumnType['BYTEA']: return ColumnTypeEnum.Bytes - case ArrayColumnType.INT2_ARRAY: case ArrayColumnType.INT4_ARRAY: return ColumnTypeEnum.Int32Array @@ -116,6 +118,88 @@ export function fieldToColumnType(fieldTypeId: number): ColumnType { } } +function normalize_array(element_normalizer: (string) => string): (string) => string[] { + return (str) => parseArray(str, element_normalizer) +} + +/****************************/ +/* Time-related data-types */ +/****************************/ + +function normalize_numeric(numeric: string): string { + return numeric +} + +types.setTypeParser(ScalarColumnType.NUMERIC, normalize_numeric) +types.setTypeParser(ArrayColumnType.NUMERIC_ARRAY, normalize_array(normalize_numeric)) + +/****************************/ +/* Time-related data-types */ +/****************************/ + + +function normalize_date(date: string): string { + return date +} + +function normalize_timestamp(time: string): string { + return time +} + +function normalize_timestampz(time: string): string { + return time.split("+")[0] +} + +/* + * TIME, TIMETZ, TIME_ARRAY - converts value (or value elements) to a string in the format HH:mm:ss.f + */ + +function normalize_time(time: string): string { + return time +} + +function normalize_timez(time: string): string { + // Although it might be controversial, UTC is assumed in consistency with the behavior of rust postgres driver + // in quaint. See quaint/src/connector/postgres/conversion.rs + return time.split("+")[0] +} + +types.setTypeParser(ScalarColumnType.TIME, normalize_time) +types.setTypeParser(ArrayColumnType.TIME_ARRAY, normalize_array(normalize_time)) +types.setTypeParser(ScalarColumnType.TIMETZ, normalize_timez) + +/* + * DATE, DATE_ARRAY - converts value (or value elements) to a string in the format YYYY-MM-DD + */ + +types.setTypeParser(ScalarColumnType.DATE, normalize_date) +types.setTypeParser(ArrayColumnType.DATE_ARRAY, normalize_array(normalize_date)) + + +/* + * TIMESTAMP, TIMESTAMP_ARRAY - converts value (or value elements) to a string in the rfc3339 format + * ex: 1996-12-19T16:39:57-08:00 + */ +types.setTypeParser(ScalarColumnType.TIMESTAMP, normalize_timestamp) +types.setTypeParser(ArrayColumnType.TIMESTAMP_ARRAY, normalize_array(normalize_timestamp)) +types.setTypeParser(ScalarColumnType.TIMESTAMPTZ, normalize_timestampz) + +/******************/ +/* Money handling */ +/******************/ + +function normalize_money(money: string): string { + return money.slice(1) +} + +types.setTypeParser(ScalarColumnType.MONEY, normalize_money) +types.setTypeParser(ArrayColumnType.MONEY_ARRAY, normalize_array(normalize_money)) + + +/*****************/ +/* JSON handling */ +/*****************/ + /** * JsonNull are stored in JSON strings as the string "null", distinguishable from * the `null` value which is used by the driver to represent the database NULL. @@ -126,22 +210,17 @@ export function fieldToColumnType(fieldTypeId: number): ColumnType { * By converting "null" to JsonNullMarker, we can signal JsonNull in Rust side and * convert it to QuaintValue::Json(Some(Null)). */ -function convertJson(json: string): unknown { +function toJson(json: string): unknown { return (json === 'null') ? JsonNullMarker : JSON.parse(json) } -// Original BYTEA parser -const parsePgBytes = types.getTypeParser(NeonColumnType.BYTEA) as (_: string) => Buffer -/** - * Convert bytes to a JSON-encodable representation since we can't - * currently send a parsed Buffer or ArrayBuffer across JS to Rust - * boundary. - */ -function convertBytes(serializedBytes: string): number[] { - const buffer = parsePgBytes(serializedBytes) - return encodeBuffer(buffer) -} +types.setTypeParser(ScalarColumnType.JSONB, toJson) +types.setTypeParser(ScalarColumnType.JSON, toJson) + +/************************/ +/* Binary data handling */ +/************************/ /** * TODO: @@ -154,14 +233,26 @@ function encodeBuffer(buffer: Buffer) { return Array.from(new Uint8Array(buffer)) } -// return string instead of JavaScript Date object -types.setTypeParser(NeonColumnType.TIME, date => date) -types.setTypeParser(NeonColumnType.DATE, date => date) -types.setTypeParser(NeonColumnType.TIMESTAMP, date => date) -types.setTypeParser(NeonColumnType.JSONB, convertJson) -types.setTypeParser(NeonColumnType.JSON, convertJson) -types.setTypeParser(NeonColumnType.MONEY, money => money.slice(1)) -types.setTypeParser(NeonColumnType.BYTEA, convertBytes) +/* + * BYTEA - arbitrary raw binary strings + */ + +const parsePgBytes = types.getTypeParser(ScalarColumnType.BYTEA) as (_: string) => Buffer +/** + * Convert bytes to a JSON-encodable representation since we can't + * currently send a parsed Buffer or ArrayBuffer across JS to Rust + * boundary. + */ +function convertBytes(serializedBytes: string): number[] { + const buffer = parsePgBytes(serializedBytes) + return encodeBuffer(buffer) +} + +types.setTypeParser(ScalarColumnType.BYTEA, convertBytes) + +/* + * BYTEA_ARRAYS - arrays of arbitrary raw binary strings + */ const parseBytesArray = types.getTypeParser(ArrayColumnType.BYTEA_ARRAY) as (_: string) => Buffer[] @@ -169,13 +260,3 @@ types.setTypeParser(ArrayColumnType.BYTEA_ARRAY, (serializedBytesArray) => { const buffers = parseBytesArray(serializedBytesArray) return buffers.map(encodeBuffer) }) - -const parseTextArray = types.getTypeParser(ArrayColumnType.TEXT_ARRAY) as (_: string) => string[] - -types.setTypeParser(ArrayColumnType.TIME_ARRAY, parseTextArray) -types.setTypeParser(ArrayColumnType.DATE_ARRAY, parseTextArray) -types.setTypeParser(ArrayColumnType.TIMESTAMP_ARRAY, parseTextArray) - -types.setTypeParser(ArrayColumnType.MONEY_ARRAY, (moneyArray) => - parseTextArray(moneyArray).map((money) => money.slice(1)), -) diff --git a/query-engine/driver-adapters/js/adapter-pg/package.json b/query-engine/driver-adapters/js/adapter-pg/package.json index 3573d33bc161..7514569c562a 100644 --- a/query-engine/driver-adapters/js/adapter-pg/package.json +++ b/query-engine/driver-adapters/js/adapter-pg/package.json @@ -18,7 +18,8 @@ "license": "Apache-2.0", "sideEffects": false, "dependencies": { - "@prisma/driver-adapter-utils": "workspace:*" + "@prisma/driver-adapter-utils": "workspace:*", + "postgres-array": "^3.0.2" }, "devDependencies": { "pg": "^8.11.3", diff --git a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts b/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts index a1c8ce7c5e6a..69e8f1d9dec1 100644 --- a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts +++ b/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts @@ -1,10 +1,11 @@ import { ColumnTypeEnum, type ColumnType, JsonNullMarker } from '@prisma/driver-adapter-utils' import { types } from 'pg' +import { parse as parseArray } from 'postgres-array' -const PgColumnType = types.builtins +const ScalarColumnType = types.builtins /** - * PostgreSQL array column types (not defined in PgColumnType). + * PostgreSQL array column types (not defined in ScalarColumnType). */ const ArrayColumnType = { BOOL_ARRAY: 1000, @@ -35,45 +36,46 @@ const ArrayColumnType = { */ export function fieldToColumnType(fieldTypeId: number): ColumnType { switch (fieldTypeId) { - case PgColumnType['INT2']: - case PgColumnType['INT4']: + case ScalarColumnType['INT2']: + case ScalarColumnType['INT4']: return ColumnTypeEnum.Int32 - case PgColumnType['INT8']: + case ScalarColumnType['INT8']: return ColumnTypeEnum.Int64 - case PgColumnType['FLOAT4']: + case ScalarColumnType['FLOAT4']: return ColumnTypeEnum.Float - case PgColumnType['FLOAT8']: + case ScalarColumnType['FLOAT8']: return ColumnTypeEnum.Double - case PgColumnType['BOOL']: + case ScalarColumnType['BOOL']: return ColumnTypeEnum.Boolean - case PgColumnType['DATE']: + case ScalarColumnType['DATE']: return ColumnTypeEnum.Date - case PgColumnType['TIME']: + case ScalarColumnType['TIME']: + case ScalarColumnType['TIMETZ']: return ColumnTypeEnum.Time - case PgColumnType['TIMESTAMP']: + case ScalarColumnType['TIMESTAMP']: + case ScalarColumnType['TIMESTAMPTZ']: return ColumnTypeEnum.DateTime - case PgColumnType['NUMERIC']: - case PgColumnType['MONEY']: + case ScalarColumnType['NUMERIC']: + case ScalarColumnType['MONEY']: return ColumnTypeEnum.Numeric - case PgColumnType['JSON']: - case PgColumnType['JSONB']: + case ScalarColumnType['JSON']: + case ScalarColumnType['JSONB']: return ColumnTypeEnum.Json - case PgColumnType['UUID']: + case ScalarColumnType['UUID']: return ColumnTypeEnum.Uuid - case PgColumnType['OID']: + case ScalarColumnType['OID']: return ColumnTypeEnum.Int64 - case PgColumnType['BPCHAR']: - case PgColumnType['TEXT']: - case PgColumnType['VARCHAR']: - case PgColumnType['BIT']: - case PgColumnType['VARBIT']: - case PgColumnType['INET']: - case PgColumnType['CIDR']: - case PgColumnType['XML']: + case ScalarColumnType['BPCHAR']: + case ScalarColumnType['TEXT']: + case ScalarColumnType['VARCHAR']: + case ScalarColumnType['BIT']: + case ScalarColumnType['VARBIT']: + case ScalarColumnType['INET']: + case ScalarColumnType['CIDR']: + case ScalarColumnType['XML']: return ColumnTypeEnum.Text - case PgColumnType['BYTEA']: + case ScalarColumnType['BYTEA']: return ColumnTypeEnum.Bytes - case ArrayColumnType.INT2_ARRAY: case ArrayColumnType.INT4_ARRAY: return ColumnTypeEnum.Int32Array @@ -116,6 +118,88 @@ export function fieldToColumnType(fieldTypeId: number): ColumnType { } } +function normalize_array(element_normalizer: (string) => string): (string) => string[] { + return (str) => parseArray(str, element_normalizer) +} + +/****************************/ +/* Time-related data-types */ +/****************************/ + +function normalize_numeric(numeric: string): string { + return numeric +} + +types.setTypeParser(ScalarColumnType.NUMERIC, normalize_numeric) +types.setTypeParser(ArrayColumnType.NUMERIC_ARRAY, normalize_array(normalize_numeric)) + +/****************************/ +/* Time-related data-types */ +/****************************/ + + +function normalize_date(date: string): string { + return date +} + +function normalize_timestamp(time: string): string { + return time +} + +function normalize_timestampz(time: string): string { + return time.split("+")[0] +} + +/* + * TIME, TIMETZ, TIME_ARRAY - converts value (or value elements) to a string in the format HH:mm:ss.f + */ + +function normalize_time(time: string): string { + return time +} + +function normalize_timez(time: string): string { + // Although it might be controversial, UTC is assumed in consistency with the behavior of rust postgres driver + // in quaint. See quaint/src/connector/postgres/conversion.rs + return time.split("+")[0] +} + +types.setTypeParser(ScalarColumnType.TIME, normalize_time) +types.setTypeParser(ArrayColumnType.TIME_ARRAY, normalize_array(normalize_time)) +types.setTypeParser(ScalarColumnType.TIMETZ, normalize_timez) + +/* + * DATE, DATE_ARRAY - converts value (or value elements) to a string in the format YYYY-MM-DD + */ + +types.setTypeParser(ScalarColumnType.DATE, normalize_date) +types.setTypeParser(ArrayColumnType.DATE_ARRAY, normalize_array(normalize_date)) + + +/* + * TIMESTAMP, TIMESTAMP_ARRAY - converts value (or value elements) to a string in the rfc3339 format + * ex: 1996-12-19T16:39:57-08:00 + */ +types.setTypeParser(ScalarColumnType.TIMESTAMP, normalize_timestamp) +types.setTypeParser(ArrayColumnType.TIMESTAMP_ARRAY, normalize_array(normalize_timestamp)) +types.setTypeParser(ScalarColumnType.TIMESTAMPTZ, normalize_timestampz) + +/******************/ +/* Money handling */ +/******************/ + +function normalize_money(money: string): string { + return money.slice(1) +} + +types.setTypeParser(ScalarColumnType.MONEY, normalize_money) +types.setTypeParser(ArrayColumnType.MONEY_ARRAY, normalize_array(normalize_money)) + + +/*****************/ +/* JSON handling */ +/*****************/ + /** * JsonNull are stored in JSON strings as the string "null", distinguishable from * the `null` value which is used by the driver to represent the database NULL. @@ -126,22 +210,17 @@ export function fieldToColumnType(fieldTypeId: number): ColumnType { * By converting "null" to JsonNullMarker, we can signal JsonNull in Rust side and * convert it to QuaintValue::Json(Some(Null)). */ -function convertJson(json: string): unknown { +function toJson(json: string): unknown { return (json === 'null') ? JsonNullMarker : JSON.parse(json) } -// Original BYTEA parser -const parsePgBytes = types.getTypeParser(PgColumnType.BYTEA) as (_: string) => Buffer -/** - * Convert bytes to a JSON-encodable representation since we can't - * currently send a parsed Buffer or ArrayBuffer across JS to Rust - * boundary. - */ -function convertBytes(serializedBytes: string): number[] { - const buffer = parsePgBytes(serializedBytes) - return encodeBuffer(buffer) -} +types.setTypeParser(ScalarColumnType.JSONB, toJson) +types.setTypeParser(ScalarColumnType.JSON, toJson) + +/************************/ +/* Binary data handling */ +/************************/ /** * TODO: @@ -154,28 +233,30 @@ function encodeBuffer(buffer: Buffer) { return Array.from(new Uint8Array(buffer)) } -// return string instead of JavaScript Date object -types.setTypeParser(PgColumnType.TIME, date => date) -types.setTypeParser(PgColumnType.DATE, date => date) -types.setTypeParser(PgColumnType.TIMESTAMP, date => date) -types.setTypeParser(PgColumnType.JSONB, convertJson) -types.setTypeParser(PgColumnType.JSON, convertJson) -types.setTypeParser(PgColumnType.MONEY, money => money.slice(1)) -types.setTypeParser(PgColumnType.BYTEA, convertBytes) +/* + * BYTEA - arbitrary raw binary strings + */ + +const parsePgBytes = types.getTypeParser(ScalarColumnType.BYTEA) as (_: string) => Buffer +/** + * Convert bytes to a JSON-encodable representation since we can't + * currently send a parsed Buffer or ArrayBuffer across JS to Rust + * boundary. + */ +function convertBytes(serializedBytes: string): number[] { + const buffer = parsePgBytes(serializedBytes) + return encodeBuffer(buffer) +} + +types.setTypeParser(ScalarColumnType.BYTEA, convertBytes) + +/* + * BYTEA_ARRAYS - arrays of arbitrary raw binary strings + */ const parseBytesArray = types.getTypeParser(ArrayColumnType.BYTEA_ARRAY) as (_: string) => Buffer[] types.setTypeParser(ArrayColumnType.BYTEA_ARRAY, (serializedBytesArray) => { const buffers = parseBytesArray(serializedBytesArray) return buffers.map(encodeBuffer) -}) - -const parseTextArray = types.getTypeParser(ArrayColumnType.TEXT_ARRAY) as (_: string) => string[] - -types.setTypeParser(ArrayColumnType.TIME_ARRAY, parseTextArray) -types.setTypeParser(ArrayColumnType.DATE_ARRAY, parseTextArray) -types.setTypeParser(ArrayColumnType.TIMESTAMP_ARRAY, parseTextArray) - -types.setTypeParser(ArrayColumnType.MONEY_ARRAY, (moneyArray) => - parseTextArray(moneyArray).map((money) => money.slice(1)), -) +}) \ No newline at end of file diff --git a/query-engine/driver-adapters/js/pnpm-lock.yaml b/query-engine/driver-adapters/js/pnpm-lock.yaml index 0b15115b5e23..89dbc4ee3d38 100644 --- a/query-engine/driver-adapters/js/pnpm-lock.yaml +++ b/query-engine/driver-adapters/js/pnpm-lock.yaml @@ -39,6 +39,9 @@ importers: '@prisma/driver-adapter-utils': specifier: workspace:* version: link:../driver-adapter-utils + postgres-array: + specifier: ^3.0.2 + version: 3.0.2 devDependencies: '@neondatabase/serverless': specifier: ^0.6.0 @@ -49,6 +52,9 @@ importers: '@prisma/driver-adapter-utils': specifier: workspace:* version: link:../driver-adapter-utils + postgres-array: + specifier: ^3.0.2 + version: 3.0.2 devDependencies: '@types/pg': specifier: ^8.10.2 diff --git a/query-engine/driver-adapters/src/conversion.rs b/query-engine/driver-adapters/src/conversion.rs index 2d469a5ab7c3..f65cc955fb21 100644 --- a/query-engine/driver-adapters/src/conversion.rs +++ b/query-engine/driver-adapters/src/conversion.rs @@ -1,7 +1,7 @@ +pub(crate) mod postgres; + use napi::bindgen_prelude::{FromNapiValue, ToNapiValue}; use napi::NapiValue; -use quaint::ast::Value as QuaintValue; -use quaint::ast::ValueType as QuaintValueType; use serde::Serialize; use serde_json::value::Value as JsonValue; @@ -59,35 +59,35 @@ impl ToNapiValue for JSArg { } } -pub fn conv_params(params: &[QuaintValue<'_>]) -> serde_json::Result> { - let mut values = Vec::with_capacity(params.len()); +pub fn values_to_js_args(values: &[quaint::Value<'_>]) -> serde_json::Result> { + let mut args = Vec::with_capacity(values.len()); - for qv in params { + for qv in values { let res = match &qv.typed { - QuaintValueType::Json(s) => match s { + quaint::ValueType::Json(s) => match s { Some(ref s) => { let json_str = serde_json::to_string(s)?; JSArg::RawString(json_str) } None => JsonValue::Null.into(), }, - QuaintValueType::Bytes(bytes) => match bytes { + quaint::ValueType::Bytes(bytes) => match bytes { Some(bytes) => JSArg::Buffer(bytes.to_vec()), None => JsonValue::Null.into(), }, - quaint_value @ QuaintValueType::Numeric(bd) => match bd { + quaint_value @ quaint::ValueType::Numeric(bd) => match bd { Some(bd) => match bd.to_string().parse::() { Ok(double) => JSArg::from(JsonValue::from(double)), Err(_) => JSArg::from(JsonValue::from(quaint_value.clone())), }, None => JsonValue::Null.into(), }, - QuaintValueType::Array(Some(items)) => JSArg::Array(conv_params(items)?), + quaint::ValueType::Array(Some(items)) => JSArg::Array(values_to_js_args(items)?), quaint_value => JSArg::from(JsonValue::from(quaint_value.clone())), }; - values.push(res); + args.push(res); } - Ok(values) + Ok(args) } diff --git a/query-engine/driver-adapters/src/conversion/postgres.rs b/query-engine/driver-adapters/src/conversion/postgres.rs new file mode 100644 index 000000000000..77e79f549d06 --- /dev/null +++ b/query-engine/driver-adapters/src/conversion/postgres.rs @@ -0,0 +1,55 @@ +use crate::conversion::JSArg; +use chrono::format::StrftimeItems; +use once_cell::sync::Lazy; +use serde_json::value::Value as JsonValue; + +static TIME_FMT: Lazy = Lazy::new(|| StrftimeItems::new("%H:%M:%S%.f")); + +pub fn values_to_js_args(values: &[quaint::Value<'_>]) -> serde_json::Result> { + let mut args = Vec::with_capacity(values.len()); + + for qv in values { + let res = match (&qv.typed, qv.native_column_type_name()) { + (quaint::ValueType::DateTime(value), Some("DATE")) => match value { + Some(value) => JSArg::RawString(value.date_naive().to_string()), + None => JsonValue::Null.into(), + }, + (quaint::ValueType::DateTime(value), Some("TIME")) => match value { + Some(value) => JSArg::RawString(value.time().to_string()), + None => JsonValue::Null.into(), + }, + (quaint::ValueType::DateTime(value), Some("TIMETZ")) => match value { + Some(value) => JSArg::RawString(value.time().format_with_items(TIME_FMT.clone()).to_string()), + None => JsonValue::Null.into(), + }, + (quaint::ValueType::DateTime(value), _) => match value { + Some(value) => JSArg::RawString(value.naive_utc().to_string()), + None => JsonValue::Null.into(), + }, + (quaint::ValueType::Json(s), _) => match s { + Some(ref s) => { + let json_str = serde_json::to_string(s)?; + JSArg::RawString(json_str) + } + None => JsonValue::Null.into(), + }, + (quaint::ValueType::Bytes(bytes), _) => match bytes { + Some(bytes) => JSArg::Buffer(bytes.to_vec()), + None => JsonValue::Null.into(), + }, + (quaint_value @ quaint::ValueType::Numeric(bd), _) => match bd { + Some(bd) => match bd.to_string().parse::() { + Ok(double) => JSArg::from(JsonValue::from(double)), + Err(_) => JSArg::from(JsonValue::from(quaint_value.clone())), + }, + None => JsonValue::Null.into(), + }, + (quaint::ValueType::Array(Some(items)), _) => JSArg::Array(values_to_js_args(items)?), + (quaint_value, _) => JSArg::from(JsonValue::from(quaint_value.clone())), + }; + + args.push(res); + } + + Ok(args) +} diff --git a/query-engine/driver-adapters/src/proxy.rs b/query-engine/driver-adapters/src/proxy.rs index bdcab93a0c55..14bfd46e62e0 100644 --- a/query-engine/driver-adapters/src/proxy.rs +++ b/query-engine/driver-adapters/src/proxy.rs @@ -258,34 +258,31 @@ fn js_value_to_quaint( serde_json::Value::Number(n) => { // n.as_i32() is not implemented, so we need to downcast from i64 instead n.as_i64() - .ok_or(conversion_error!("number must be an integer")) + .ok_or(conversion_error!("number must be an integer in column '{column_name}'")) .and_then(|n| -> quaint::Result { n.try_into() - .map_err(|e| conversion_error!("cannot convert {n} to i32: {e}")) + .map_err(|e| conversion_error!("cannot convert {n} to i32 in column '{column_name}': {e}")) }) .map(QuaintValue::int32) } - serde_json::Value::String(s) => s - .parse::() - .map(QuaintValue::int32) - .map_err(|e| conversion_error!("string-encoded number must be an i32, got {s}: {e}")), + serde_json::Value::String(s) => s.parse::().map(QuaintValue::int32).map_err(|e| { + conversion_error!("string-encoded number must be an i32 in column '{column_name}', got {s}: {e}") + }), serde_json::Value::Null => Ok(QuaintValue::null_int32()), mismatch => Err(conversion_error!( - "expected an i32 number in column {column_name}, found {mismatch}" + "expected an i32 number in column '{column_name}', found {mismatch}" )), }, ColumnType::Int64 => match json_value { - serde_json::Value::Number(n) => n - .as_i64() - .map(QuaintValue::int64) - .ok_or(conversion_error!("number must be an i64, got {n}")), - serde_json::Value::String(s) => s - .parse::() - .map(QuaintValue::int64) - .map_err(|e| conversion_error!("string-encoded number must be an i64, got {s}: {e}")), + serde_json::Value::Number(n) => n.as_i64().map(QuaintValue::int64).ok_or(conversion_error!( + "number must be an i64 in column '{column_name}', got {n}" + )), + serde_json::Value::String(s) => s.parse::().map(QuaintValue::int64).map_err(|e| { + conversion_error!("string-encoded number must be an i64 in column '{column_name}', got {s}: {e}") + }), serde_json::Value::Null => Ok(QuaintValue::null_int64()), mismatch => Err(conversion_error!( - "expected a string or number in column {column_name}, found {mismatch}" + "expected a string or number in column '{column_name}', found {mismatch}" )), }, ColumnType::Float => match json_value { @@ -293,36 +290,39 @@ fn js_value_to_quaint( // We assume that the JSON value is a valid f32 number, but we check for overflows anyway. serde_json::Value::Number(n) => n .as_f64() - .ok_or(conversion_error!("number must be a float, got {n}")) + .ok_or(conversion_error!( + "number must be a float in column '{column_name}', got {n}" + )) .and_then(f64_to_f32) .map(QuaintValue::float), serde_json::Value::Null => Ok(QuaintValue::null_float()), mismatch => Err(conversion_error!( - "expected an f32 number in column {column_name}, found {mismatch}" + "expected an f32 number in column '{column_name}', found {mismatch}" )), }, ColumnType::Double => match json_value { - serde_json::Value::Number(n) => n - .as_f64() - .map(QuaintValue::double) - .ok_or(conversion_error!("number must be a f64, got {n}")), + serde_json::Value::Number(n) => n.as_f64().map(QuaintValue::double).ok_or(conversion_error!( + "number must be a f64 in column '{column_name}', got {n}" + )), serde_json::Value::Null => Ok(QuaintValue::null_double()), mismatch => Err(conversion_error!( - "expected an f64 number in column {column_name}, found {mismatch}" + "expected an f64 number in column '{column_name}', found {mismatch}" )), }, ColumnType::Numeric => match json_value { - serde_json::Value::String(s) => BigDecimal::from_str(&s) - .map(QuaintValue::numeric) - .map_err(|e| conversion_error!("invalid numeric value when parsing {s}: {e}")), + serde_json::Value::String(s) => BigDecimal::from_str(&s).map(QuaintValue::numeric).map_err(|e| { + conversion_error!("invalid numeric value when parsing {s} in column '{column_name}': {e}") + }), serde_json::Value::Number(n) => n .as_f64() .and_then(BigDecimal::from_f64) - .ok_or(conversion_error!("number must be an f64, got {n}")) + .ok_or(conversion_error!( + "number must be an f64 in column '{column_name}', got {n}" + )) .map(QuaintValue::numeric), serde_json::Value::Null => Ok(QuaintValue::null_numeric()), mismatch => Err(conversion_error!( - "expected a string-encoded number in column {column_name}, found {mismatch}", + "expected a string-encoded number in column '{column_name}', found {mismatch}", )), }, ColumnType::Boolean => match json_value { @@ -332,16 +332,18 @@ fn js_value_to_quaint( Some(0) => Ok(QuaintValue::boolean(false)), Some(1) => Ok(QuaintValue::boolean(true)), _ => Err(conversion_error!( - "expected number-encoded boolean to be 0 or 1, got {n}" + "expected number-encoded boolean to be 0 or 1 in column '{column_name}', got {n}" )), }, serde_json::Value::String(s) => match s.as_str() { "false" | "FALSE" | "0" => Ok(QuaintValue::boolean(false)), "true" | "TRUE" | "1" => Ok(QuaintValue::boolean(true)), - _ => Err(conversion_error!("expected string-encoded boolean, got {s}")), + _ => Err(conversion_error!( + "expected string-encoded boolean in column '{column_name}', got {s}" + )), }, mismatch => Err(conversion_error!( - "expected a boolean in column {column_name}, found {mismatch}" + "expected a boolean in column '{column_name}', found {mismatch}" )), }, ColumnType::Char => match json_value { @@ -351,43 +353,44 @@ fn js_value_to_quaint( }, serde_json::Value::Null => Ok(QuaintValue::null_character()), mismatch => Err(conversion_error!( - "expected a string in column {column_name}, found {mismatch}" + "expected a string in column '{column_name}', found {mismatch}" )), }, ColumnType::Text => match json_value { serde_json::Value::String(s) => Ok(QuaintValue::text(s)), serde_json::Value::Null => Ok(QuaintValue::null_text()), mismatch => Err(conversion_error!( - "expected a string in column {column_name}, found {mismatch}" + "expected a string in column '{column_name}', found {mismatch}" )), }, ColumnType::Date => match json_value { serde_json::Value::String(s) => NaiveDate::parse_from_str(&s, "%Y-%m-%d") .map(QuaintValue::date) - .map_err(|_| conversion_error!("expected a date string, got {s}")), + .map_err(|_| conversion_error!("expected a date string in column '{column_name}', got {s}")), serde_json::Value::Null => Ok(QuaintValue::null_date()), mismatch => Err(conversion_error!( - "expected a string in column {column_name}, found {mismatch}" + "expected a string in column '{column_name}', found {mismatch}" )), }, ColumnType::Time => match json_value { - serde_json::Value::String(s) => NaiveTime::parse_from_str(&s, "%H:%M:%S") + serde_json::Value::String(s) => NaiveTime::parse_from_str(&s, "%H:%M:%S%.f") .map(QuaintValue::time) - .map_err(|_| conversion_error!("expected a time string, got {s}")), + .map_err(|_| conversion_error!("expected a time string in column '{column_name}', got {s}")), serde_json::Value::Null => Ok(QuaintValue::null_time()), mismatch => Err(conversion_error!( - "expected a string in column {column_name}, found {mismatch}" + "expected a string in column '{column_name}', found {mismatch}" )), }, ColumnType::DateTime => match json_value { + // TODO: change parsing order to prefer RFC3339 serde_json::Value::String(s) => chrono::NaiveDateTime::parse_from_str(&s, "%Y-%m-%d %H:%M:%S%.f") .map(|dt| DateTime::from_utc(dt, Utc)) .or_else(|_| DateTime::parse_from_rfc3339(&s).map(DateTime::::from)) .map(QuaintValue::datetime) - .map_err(|_| conversion_error!("expected a datetime string, found {s}")), + .map_err(|_| conversion_error!("expected a datetime string in column '{column_name}', found {s}")), serde_json::Value::Null => Ok(QuaintValue::null_datetime()), mismatch => Err(conversion_error!( - "expected a string in column {column_name}, found {mismatch}" + "expected a string in column '{column_name}', found {mismatch}" )), }, ColumnType::Json => { @@ -403,7 +406,7 @@ fn js_value_to_quaint( serde_json::Value::String(s) => Ok(QuaintValue::enum_variant(s)), serde_json::Value::Null => Ok(QuaintValue::null_enum()), mismatch => Err(conversion_error!( - "expected a string in column {column_name}, found {mismatch}" + "expected a string in column '{column_name}', found {mismatch}" )), }, ColumnType::Bytes => match json_value { @@ -413,19 +416,21 @@ fn js_value_to_quaint( .map(|value| value.as_i64().and_then(|maybe_byte| maybe_byte.try_into().ok())) .collect::>>() .map(QuaintValue::bytes) - .ok_or(conversion_error!("elements of the array must be u8")), + .ok_or(conversion_error!( + "elements of the array in column '{column_name}' must be u8" + )), serde_json::Value::Null => Ok(QuaintValue::null_bytes()), mismatch => Err(conversion_error!( - "expected a string or an array in column {column_name}, found {mismatch}", + "expected a string or an array in column '{column_name}', found {mismatch}", )), }, ColumnType::Uuid => match json_value { serde_json::Value::String(s) => uuid::Uuid::parse_str(&s) .map(QuaintValue::uuid) - .map_err(|_| conversion_error!("Expected a UUID string")), + .map_err(|_| conversion_error!("Expected a UUID string in column '{column_name}'")), serde_json::Value::Null => Ok(QuaintValue::null_bytes()), mismatch => Err(conversion_error!( - "Expected a UUID string in column {column_name}, found {mismatch}" + "Expected a UUID string in column '{column_name}', found {mismatch}" )), }, ColumnType::UnknownNumber => match json_value { @@ -433,9 +438,11 @@ fn js_value_to_quaint( .as_i64() .map(QuaintValue::int64) .or(n.as_f64().map(QuaintValue::double)) - .ok_or(conversion_error!("number must be an i64 or f64, got {n}")), + .ok_or(conversion_error!( + "number must be an i64 or f64 in column '{column_name}', got {n}" + )), mismatch => Err(conversion_error!( - "expected a either an i64 or a f64 in column {column_name}, found {mismatch}", + "expected a either an i64 or a f64 in column '{column_name}', found {mismatch}", )), }, @@ -476,7 +483,7 @@ fn js_array_to_quaint( )), serde_json::Value::Null => Ok(QuaintValue::null_array()), mismatch => Err(conversion_error!( - "expected an array in column {column_name}, found {mismatch}", + "expected an array in column '{column_name}', found {mismatch}", )), } } @@ -832,9 +839,14 @@ mod proxy_test { let s = "23:59:59"; let json_value = serde_json::Value::String(s.to_string()); let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); - let time: NaiveTime = NaiveTime::from_hms_opt(23, 59, 59).unwrap(); assert_eq!(quaint_value, QuaintValue::time(time)); + + let s = "13:02:20.321"; + let json_value = serde_json::Value::String(s.to_string()); + let quaint_value = js_value_to_quaint(json_value, column_type, "column_name").unwrap(); + let time: NaiveTime = NaiveTime::from_hms_milli_opt(13, 02, 20, 321).unwrap(); + assert_eq!(quaint_value, QuaintValue::time(time)); } #[test] @@ -935,7 +947,7 @@ mod proxy_test { assert_eq!( quaint_value.err().unwrap().to_string(), - "Conversion failed: expected an i32 number in column column_name[2], found {}" + "Conversion failed: expected an i32 number in column 'column_name[2]', found {}" ); } @@ -957,7 +969,7 @@ mod proxy_test { assert_eq!( quaint_value.err().unwrap().to_string(), - "Conversion failed: expected a string in column column_name[0], found 10" + "Conversion failed: expected a string in column 'column_name[0]', found 10" ); } } diff --git a/query-engine/driver-adapters/src/queryable.rs b/query-engine/driver-adapters/src/queryable.rs index d8b022d0fa49..864ba5042083 100644 --- a/query-engine/driver-adapters/src/queryable.rs +++ b/query-engine/driver-adapters/src/queryable.rs @@ -10,7 +10,6 @@ use quaint::{ error::{Error, ErrorKind}, prelude::{Query as QuaintQuery, Queryable as QuaintQueryable, ResultSet, TransactionCapable}, visitor::{self, Visitor}, - Value, }; use tracing::{info_span, Instrument}; @@ -38,8 +37,8 @@ impl JsBaseQueryable { Self { proxy, flavour } } - /// visit a query according to the flavour of the JS connector - pub fn visit_query<'a>(&self, q: QuaintQuery<'a>) -> quaint::Result<(String, Vec>)> { + /// visit a quaint query AST according to the flavour of the JS connector + fn visit_quaint_query<'a>(&self, q: QuaintQuery<'a>) -> quaint::Result<(String, Vec>)> { match self.flavour { Flavour::Mysql => visitor::Mysql::build(q), Flavour::Postgres => visitor::Postgres::build(q), @@ -47,39 +46,48 @@ impl JsBaseQueryable { _ => unimplemented!("Unsupported flavour for JS connector {:?}", self.flavour), } } + + async fn build_query(&self, sql: &str, values: &[quaint::Value<'_>]) -> quaint::Result { + let sql: String = sql.to_string(); + let args = match self.flavour { + Flavour::Postgres => conversion::postgres::values_to_js_args(values), + _ => conversion::values_to_js_args(values), + }?; + Ok(Query { sql, args }) + } } #[async_trait] impl QuaintQueryable for JsBaseQueryable { async fn query(&self, q: QuaintQuery<'_>) -> quaint::Result { - let (sql, params) = self.visit_query(q)?; + let (sql, params) = self.visit_quaint_query(q)?; self.query_raw(&sql, ¶ms).await } - async fn query_raw(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn query_raw(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { metrics::query("js.query_raw", sql, params, move || async move { self.do_query_raw(sql, params).await }) .await } - async fn query_raw_typed(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn query_raw_typed(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { self.query_raw(sql, params).await } async fn execute(&self, q: QuaintQuery<'_>) -> quaint::Result { - let (sql, params) = self.visit_query(q)?; + let (sql, params) = self.visit_quaint_query(q)?; self.execute_raw(&sql, ¶ms).await } - async fn execute_raw(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn execute_raw(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { metrics::query("js.execute_raw", sql, params, move || async move { self.do_execute_raw(sql, params).await }) .await } - async fn execute_raw_typed(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn execute_raw_typed(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { self.execute_raw(sql, params).await } @@ -134,16 +142,10 @@ impl JsBaseQueryable { format!(r#"-- Implicit "{}" query via underlying driver"#, stmt) } - async fn build_query(sql: &str, values: &[quaint::Value<'_>]) -> quaint::Result { - let sql: String = sql.to_string(); - let args = conversion::conv_params(values)?; - Ok(Query { sql, args }) - } - - async fn do_query_raw(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn do_query_raw(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { let len = params.len(); let serialization_span = info_span!("js:query:args", user_facing = true, "length" = %len); - let query = Self::build_query(sql, params).instrument(serialization_span).await?; + let query = self.build_query(sql, params).instrument(serialization_span).await?; let sql_span = info_span!("js:query:sql", user_facing = true, "db.statement" = %sql); let result_set = self.proxy.query_raw(query).instrument(sql_span).await?; @@ -154,10 +156,10 @@ impl JsBaseQueryable { result_set.try_into() } - async fn do_execute_raw(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn do_execute_raw(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { let len = params.len(); let serialization_span = info_span!("js:query:args", user_facing = true, "length" = %len); - let query = Self::build_query(sql, params).instrument(serialization_span).await?; + let query = self.build_query(sql, params).instrument(serialization_span).await?; let sql_span = info_span!("js:query:sql", user_facing = true, "db.statement" = %sql); let affected_rows = self.proxy.execute_raw(query).instrument(sql_span).await?; @@ -202,11 +204,11 @@ impl QuaintQueryable for JsQueryable { self.inner.query(q).await } - async fn query_raw(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn query_raw(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { self.inner.query_raw(sql, params).await } - async fn query_raw_typed(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn query_raw_typed(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { self.inner.query_raw_typed(sql, params).await } @@ -214,11 +216,11 @@ impl QuaintQueryable for JsQueryable { self.inner.execute(q).await } - async fn execute_raw(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn execute_raw(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { self.inner.execute_raw(sql, params).await } - async fn execute_raw_typed(&self, sql: &str, params: &[Value<'_>]) -> quaint::Result { + async fn execute_raw_typed(&self, sql: &str, params: &[quaint::Value<'_>]) -> quaint::Result { self.inner.execute_raw_typed(sql, params).await } From 66f0881e93adb8868b623fa14a649cb876947c31 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Fri, 13 Oct 2023 13:16:43 +0200 Subject: [PATCH 101/128] Fix binary entrypoint for planetscale tests (#4357) * Change binary entrypoint for planetscale tests * Fix MYSQL_DATABASE name to match what the test setup uses --------- Co-authored-by: Alexey Orlenko --- docker-compose.yml | 2 +- docker/planetscale_proxy/Dockerfile | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 97c9ed79e1c7..fad49d836cde 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -126,7 +126,7 @@ services: environment: MYSQL_HOST: 'vitess-test-8_0' MYSQL_PORT: 33807 - MYSQL_DATABASE: 'test-0000-00000000' + MYSQL_DATABASE: 'test' ports: - '8085:8085' depends_on: diff --git a/docker/planetscale_proxy/Dockerfile b/docker/planetscale_proxy/Dockerfile index ae5ec56329c2..2411894d88f0 100644 --- a/docker/planetscale_proxy/Dockerfile +++ b/docker/planetscale_proxy/Dockerfile @@ -4,7 +4,7 @@ RUN apt update && apt install netcat-openbsd -y RUN cd /go/src && git clone https://github.com/prisma/planetscale-proxy.git RUN cd /go/src/planetscale-proxy && go install . -ENTRYPOINT /go/bin/planetscale-proxy \ +ENTRYPOINT /go/bin/ps-http-sim \ -http-addr=0.0.0.0 \ -http-port=8085 \ -mysql-addr=$MYSQL_HOST \ From 42bfcd0e4a2e9f73937c1ed5c2f42bf08d07289a Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Fri, 13 Oct 2023 16:34:45 +0200 Subject: [PATCH 102/128] engineer: update to 1.60 (#4366) --- .buildkite/engineer | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/engineer b/.buildkite/engineer index 5e586ad2f0ec..bf31a6e371df 100755 --- a/.buildkite/engineer +++ b/.buildkite/engineer @@ -43,7 +43,7 @@ fi # Check if the system has engineer installed, if not, use a local copy. if ! type "engineer" &> /dev/null; then # Setup Prisma engine build & test tool (engineer). - curl --fail -sSL "https://prisma-engineer.s3-eu-west-1.amazonaws.com/1.59/latest/$OS/engineer.gz" --output engineer.gz + curl --fail -sSL "https://prisma-engineer.s3-eu-west-1.amazonaws.com/1.60/latest/$OS/engineer.gz" --output engineer.gz gzip -d engineer.gz chmod +x engineer From f44b62756e369de9a75621e9cdc1a1840b3b59b2 Mon Sep 17 00:00:00 2001 From: Serhii Tatarintsev Date: Fri, 13 Oct 2023 17:19:35 +0200 Subject: [PATCH 103/128] driver-adapters: Common code for libsql/mysql error handling (#4365) * driver-adapters: Common code for libsql/mysql error handling Splits part of the code that are common between #4364 and #4362 into it's own PR so it could be reviewd and merged separately and aforementioned PR stop conflicting with each other. Also implements error handling for PG since it is necessary for modified smoke test to pass. * fix codestyle --- quaint/src/connector/postgres.rs | 4 +- quaint/src/error.rs | 2 +- .../js/adapter-neon/src/neon.ts | 7 +- .../driver-adapters/js/adapter-pg/src/pg.ts | 44 ++++++++----- .../js/driver-adapter-utils/src/binder.ts | 4 +- .../js/driver-adapter-utils/src/types.ts | 26 ++++---- .../js/smoke-test-js/src/libquery/libquery.ts | 66 +++++++++++-------- query-engine/driver-adapters/src/result.rs | 8 +-- 8 files changed, 89 insertions(+), 72 deletions(-) diff --git a/quaint/src/connector/postgres.rs b/quaint/src/connector/postgres.rs index 2c81144c812b..766be38b27e4 100644 --- a/quaint/src/connector/postgres.rs +++ b/quaint/src/connector/postgres.rs @@ -1,5 +1,5 @@ mod conversion; -pub mod error; +mod error; use crate::{ ast::{Query, Value}, @@ -27,6 +27,8 @@ use tokio_postgres::{ }; use url::{Host, Url}; +pub use error::PostgresError; + pub(crate) const DEFAULT_SCHEMA: &str = "public"; /// The underlying postgres driver. Only available with the `expose-drivers` diff --git a/quaint/src/error.rs b/quaint/src/error.rs index 22037d443c35..c7c78a24772e 100644 --- a/quaint/src/error.rs +++ b/quaint/src/error.rs @@ -6,7 +6,7 @@ use thiserror::Error; #[cfg(feature = "pooled")] use std::time::Duration; -pub use crate::connector::postgres::error::PostgresError; +pub use crate::connector::postgres::PostgresError; #[derive(Debug, PartialEq, Eq)] pub enum DatabaseConstraint { diff --git a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts b/query-engine/driver-adapters/js/adapter-neon/src/neon.ts index c86b8d88bef0..e8fe40ada22f 100644 --- a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts +++ b/query-engine/driver-adapters/js/adapter-neon/src/neon.ts @@ -67,7 +67,7 @@ class NeonWsQueryable extends NeonQ debug('Error in performIO: %O', e) if (e && e.code) { return err({ - kind: 'PostgresError', + kind: 'Postgres', code: e.code, severity: e.severity, message: e.message, @@ -84,10 +84,7 @@ class NeonWsQueryable extends NeonQ class NeonTransaction extends NeonWsQueryable implements Transaction { finished = false - constructor( - client: neon.PoolClient, - readonly options: TransactionOptions, - ) { + constructor(client: neon.PoolClient, readonly options: TransactionOptions) { super(client) } diff --git a/query-engine/driver-adapters/js/adapter-pg/src/pg.ts b/query-engine/driver-adapters/js/adapter-pg/src/pg.ts index a049b59a0740..c34050778c39 100644 --- a/query-engine/driver-adapters/js/adapter-pg/src/pg.ts +++ b/query-engine/driver-adapters/js/adapter-pg/src/pg.ts @@ -1,5 +1,5 @@ import type pg from 'pg' -import { Debug, ok } from '@prisma/driver-adapter-utils' +import { Debug, err, ok } from '@prisma/driver-adapter-utils' import type { DriverAdapter, Query, @@ -28,18 +28,17 @@ class PgQueryable implements Quer const tag = '[js::query_raw]' debug(`${tag} %O`, query) - const { fields, rows } = await this.performIO(query) - - const columns = fields.map((field) => field.name) - const columnTypes = fields.map((field) => fieldToColumnType(field.dataTypeID)) - - const resultSet: ResultSet = { - columnNames: columns, - columnTypes, - rows, - } - - return ok(resultSet) + const ioResult = await this.performIO(query) + return ioResult.map(({ fields, rows }) => { + const columns = fields.map((field) => field.name) + const columnTypes = fields.map((field) => fieldToColumnType(field.dataTypeID)) + + return { + columnNames: columns, + columnTypes, + rows, + } + }) } /** @@ -51,10 +50,8 @@ class PgQueryable implements Quer const tag = '[js::execute_raw]' debug(`${tag} %O`, query) - const { rowCount: rowsAffected } = await this.performIO(query) - // Note: `rowsAffected` can sometimes be null (e.g., when executing `"BEGIN"`) - return ok(rowsAffected ?? 0) + return (await this.performIO(query)).map(({ rowCount: rowsAffected }) => rowsAffected ?? 0) } /** @@ -62,15 +59,26 @@ class PgQueryable implements Quer * Should the query fail due to a connection error, the connection is * marked as unhealthy. */ - private async performIO(query: Query) { + private async performIO(query: Query): Promise>> { const { sql, args: values } = query try { const result = await this.client.query({ text: sql, values, rowMode: 'array' }) - return result + return ok(result) } catch (e) { const error = e as Error debug('Error in performIO: %O', error) + if (e && e.code) { + return err({ + kind: 'Postgres', + code: e.code, + severity: e.severity, + message: e.message, + detail: e.detail, + column: e.column, + hint: e.hint, + }) + } throw error } } diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts index aee18197e291..1e3aa36210cf 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts +++ b/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts @@ -60,7 +60,7 @@ function wrapAsync( return await fn(...args) } catch (error) { const id = registry.registerNewError(error) - return err({ kind: 'GenericJsError', id }) + return err({ kind: 'GenericJs', id }) } } } @@ -74,7 +74,7 @@ function wrapSync( return fn(...args) } catch (error) { const id = registry.registerNewError(error) - return err({ kind: 'GenericJsError', id }) + return err({ kind: 'GenericJs', id }) } } } diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts index 65fa002dcc3a..42f1b0513076 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts +++ b/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts @@ -33,18 +33,20 @@ export type Query = { args: Array } -export type Error = { - kind: 'GenericJsError' - id: number -} | { - kind: 'PostgresError' - code: string, - severity: string - message: string - detail: string | undefined - column: string | undefined - hint: string | undefined -} +export type Error = + | { + kind: 'GenericJs' + id: number + } + | { + kind: 'Postgres' + code: string + severity: string + message: string + detail: string | undefined + column: string | undefined + hint: string | undefined + } export interface Queryable { readonly flavour: 'mysql' | 'postgres' | 'sqlite' diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts index 4cdde4515615..61d239ea42d6 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts @@ -4,9 +4,12 @@ import type { ErrorCapturingDriverAdapter } from '@prisma/driver-adapter-utils' import type { QueryEngineInstance } from '../engines/types/Library' import { createQueryFn, initQueryEngine } from './util' import { JsonQuery } from '../engines/types/JsonProtocol' -import { PrismaNeonHTTP } from '@prisma/adapter-neon' -export function smokeTestLibquery(adapter: ErrorCapturingDriverAdapter, prismaSchemaRelativePath: string, supportsTransactions = true) { +export function smokeTestLibquery( + adapter: ErrorCapturingDriverAdapter, + prismaSchemaRelativePath: string, + supportsTransactions = true, +) { const engine = initQueryEngine(adapter, prismaSchemaRelativePath) const flavour = adapter.flavour @@ -263,7 +266,7 @@ export function smokeTestLibquery(adapter: ErrorCapturingDriverAdapter, prismaSc }) it('create explicit transaction', async () => { - if(!supportsTransactions) return + if (!supportsTransactions) return const args = { isolation_level: 'Serializable', max_wait: 5000, timeout: 15000 } const startResponse = await engine.startTransaction(JSON.stringify(args), 'trace') @@ -298,7 +301,7 @@ export function smokeTestLibquery(adapter: ErrorCapturingDriverAdapter, prismaSc }, }) - const result = await doQuery({ + await doQuery({ modelName: 'Unique', action: 'createOne', query: { @@ -310,24 +313,31 @@ export function smokeTestLibquery(adapter: ErrorCapturingDriverAdapter, prismaSc }, }, }) - console.log('[nodejs] error result1', JSON.stringify(result, null, 2)) - const result2 = await doQuery({ + const promise = doQuery({ modelName: 'Unique', action: 'createOne', query: { arguments: { - data: { email: 'duplicate@example.com' } + data: { email: 'duplicate@example.com' }, }, selection: { $scalars: true, }, }, }) - console.log('[nodejs] error result2', JSON.stringify(result2, null, 2)) - - // TODO assert that result2 includes `errors.error` (which should currently only pass on neon:ws) - + + if (flavour === 'postgres') { + const result = await promise + console.log('[nodejs] error result', JSON.stringify(result, null, 2)) + assert.equal(result?.errors?.[0]?.['user_facing_error']?.['error_code'], 'P2002') + } else { + await assert.rejects(promise, (err) => { + assert(typeof err === 'object' && err !== null) + assert.match(err['message'], /unique/i) + return true + }) + } }) describe('read scalar and non scalar types', () => { @@ -395,24 +405,22 @@ export function smokeTestLibquery(adapter: ErrorCapturingDriverAdapter, prismaSc }) } else if (['sqlite'].includes(flavour)) { it('sqlite', async () => { - const resultSet = await doQuery( - { - "action": "findMany", - "modelName": "type_test", - "query": { - "selection": { - "int_column": true, - "bigint_column": true, - "double_column": true, - "decimal_column": true, - "boolean_column": true, - "text_column": true, - "datetime_column": true, - } - } - } - ) - console.log('[nodejs] findMany resultSet', JSON.stringify((resultSet), null, 2)) + const resultSet = await doQuery({ + action: 'findMany', + modelName: 'type_test', + query: { + selection: { + int_column: true, + bigint_column: true, + double_column: true, + decimal_column: true, + boolean_column: true, + text_column: true, + datetime_column: true, + }, + }, + }) + console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) }) } else { throw new Error(`Missing test for flavour ${flavour}`) diff --git a/query-engine/driver-adapters/src/result.rs b/query-engine/driver-adapters/src/result.rs index fc6f52bd2743..10bdb8a4aecb 100644 --- a/query-engine/driver-adapters/src/result.rs +++ b/query-engine/driver-adapters/src/result.rs @@ -19,11 +19,11 @@ pub struct PostgresErrorDef { /// See driver-adapters/js/adapter-utils/src/types.ts file for example pub(crate) enum DriverAdapterError { /// Unexpected JS exception - GenericJsError { + GenericJs { id: i32, }, - PostgresError(#[serde(with = "PostgresErrorDef")] PostgresError), + Postgres(#[serde(with = "PostgresErrorDef")] PostgresError), // in the future, expected errors that map to known user errors with PXXX codes will also go here } @@ -38,8 +38,8 @@ impl FromNapiValue for DriverAdapterError { impl From for QuaintError { fn from(value: DriverAdapterError) -> Self { match value { - DriverAdapterError::GenericJsError { id } => QuaintError::external_error(id), - DriverAdapterError::PostgresError(e) => e.into(), + DriverAdapterError::GenericJs { id } => QuaintError::external_error(id), + DriverAdapterError::Postgres(e) => e.into(), // in future, more error types would be added and we'll need to convert them to proper QuaintErrors here } } From cdd7f4c02d91616346ae555ef679e66af57a78b5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 17 Oct 2023 09:47:40 +0200 Subject: [PATCH 104/128] chore(deps): bump undici in /query-engine/driver-adapters/js (#4372) Bumps [undici](https://github.com/nodejs/undici) from 5.23.0 to 5.26.2. - [Release notes](https://github.com/nodejs/undici/releases) - [Commits](https://github.com/nodejs/undici/compare/v5.23.0...v5.26.2) --- updated-dependencies: - dependency-name: undici dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .../connector-test-kit-executor/package.json | 2 +- .../driver-adapters/js/pnpm-lock.yaml | 31 +++++++------------ .../js/smoke-test-js/package.json | 2 +- 3 files changed, 14 insertions(+), 21 deletions(-) diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/package.json b/query-engine/driver-adapters/js/connector-test-kit-executor/package.json index 4a5f093388e6..2a0d16bd4ccf 100644 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/package.json +++ b/query-engine/driver-adapters/js/connector-test-kit-executor/package.json @@ -23,6 +23,6 @@ "@prisma/driver-adapter-utils": "workspace:*", "@types/pg": "^8.10.2", "pg": "^8.11.3", - "undici": "^5.23.0" + "undici": "^5.26.2" } } diff --git a/query-engine/driver-adapters/js/pnpm-lock.yaml b/query-engine/driver-adapters/js/pnpm-lock.yaml index 89dbc4ee3d38..3f7f13d3ff6a 100644 --- a/query-engine/driver-adapters/js/pnpm-lock.yaml +++ b/query-engine/driver-adapters/js/pnpm-lock.yaml @@ -106,8 +106,8 @@ importers: specifier: ^8.11.3 version: 8.11.3 undici: - specifier: ^5.23.0 - version: 5.23.0 + specifier: ^5.26.2 + version: 5.26.2 driver-adapter-utils: dependencies: @@ -155,8 +155,8 @@ importers: specifier: ^1.13.1 version: 1.13.1 undici: - specifier: ^5.23.0 - version: 5.23.0 + specifier: ^5.26.2 + version: 5.26.2 devDependencies: '@types/node': specifier: ^20.5.1 @@ -395,6 +395,11 @@ packages: dev: true optional: true + /@fastify/busboy@2.0.0: + resolution: {integrity: sha512-JUFJad5lv7jxj926GPgymrWQxxjPYuJNiNjNMzqT+HiuP6Vl3dk5xzG+8sTX96np0ZAluvaMzPsjhHZ5rNuNQQ==} + engines: {node: '>=14'} + dev: false + /@jridgewell/gen-mapping@0.3.3: resolution: {integrity: sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ==} engines: {node: '>=6.0.0'} @@ -667,13 +672,6 @@ packages: load-tsconfig: 0.2.5 dev: true - /busboy@1.6.0: - resolution: {integrity: sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA==} - engines: {node: '>=10.16.0'} - dependencies: - streamsearch: 1.1.0 - dev: false - /cac@6.7.14: resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} engines: {node: '>=8'} @@ -1361,11 +1359,6 @@ packages: resolution: {integrity: sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==} engines: {node: '>= 10.x'} - /streamsearch@1.1.0: - resolution: {integrity: sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==} - engines: {node: '>=10.0.0'} - dev: false - /strip-final-newline@2.0.0: resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==} engines: {node: '>=6'} @@ -1487,11 +1480,11 @@ packages: hasBin: true dev: true - /undici@5.23.0: - resolution: {integrity: sha512-1D7w+fvRsqlQ9GscLBwcAJinqcZGHUKjbOmXdlE/v8BvEGXjeWAax+341q44EuTcHXXnfyKNbKRq4Lg7OzhMmg==} + /undici@5.26.2: + resolution: {integrity: sha512-a4PDLQgLTPHVzOK+x3F79/M4GtyYPl+aX9AAK7aQxpwxDwCqkeZCScy7Gk5kWT3JtdFq1uhO3uZJdLtHI4dK9A==} engines: {node: '>=14.0'} dependencies: - busboy: 1.6.0 + '@fastify/busboy': 2.0.0 dev: false /web-streams-polyfill@3.2.1: diff --git a/query-engine/driver-adapters/js/smoke-test-js/package.json b/query-engine/driver-adapters/js/smoke-test-js/package.json index 27d4220f41bc..31362c1cc873 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/package.json +++ b/query-engine/driver-adapters/js/smoke-test-js/package.json @@ -55,7 +55,7 @@ "@prisma/driver-adapter-utils": "workspace:*", "pg": "^8.11.3", "superjson": "^1.13.1", - "undici": "^5.23.0" + "undici": "^5.26.2" }, "devDependencies": { "@types/node": "^20.5.1", From 2e1051d3e62163b3a4de20caf5bfa092322cb073 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Mon, 23 Oct 2023 15:10:58 +0200 Subject: [PATCH 105/128] Driver adapters: Support BIT_ARRAY, CIDR_ARRAY, INET_ARRAY, OID_ARRAY, VARBIT_ARRAY in pg and neon (#4363) * Support BIT_ARRAY, CIDR_ARRAY, INET_ARRAY, OID_ARRAY, VARBIT_ARRAY in pg and neon * INT8_ARRAY * It's ok to reduce precision a little bit * Null lists * Allow tests to drift in the different drivers' implementation of bigdecimal * revert changes to decima.rs test * Adapt comment --- .../js/adapter-neon/src/conversion.ts | 32 ++++++++++++++--- .../js/adapter-pg/src/conversion.ts | 34 ++++++++++++++++--- .../src/conversion/postgres.rs | 7 ++-- 3 files changed, 59 insertions(+), 14 deletions(-) diff --git a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts b/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts index 9f6486362d78..78f285240599 100644 --- a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts +++ b/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts @@ -6,25 +6,34 @@ const ScalarColumnType = types.builtins /** * PostgreSQL array column types (not defined in ScalarColumnType). + * + * See the semantics of each of this code in: + * https://github.com/postgres/postgres/blob/master/src/include/catalog/pg_type.dat */ const ArrayColumnType = { + BIT_ARRAY: 1561, BOOL_ARRAY: 1000, BYTEA_ARRAY: 1001, BPCHAR_ARRAY: 1014, CHAR_ARRAY: 1002, + CIDR_ARRAY: 651, DATE_ARRAY: 1182, FLOAT4_ARRAY: 1021, FLOAT8_ARRAY: 1022, + INET_ARRAY: 1041, INT2_ARRAY: 1005, INT4_ARRAY: 1007, + INT8_ARRAY: 1016, JSONB_ARRAY: 3807, JSON_ARRAY: 199, MONEY_ARRAY: 791, NUMERIC_ARRAY: 1231, + OID_ARRAY: 1028, TEXT_ARRAY: 1009, TIMESTAMP_ARRAY: 1115, TIME_ARRAY: 1183, UUID_ARRAY: 2951, + VARBIT_ARRAY: 1563, VARCHAR_ARRAY: 1015, XML_ARRAY: 143, } @@ -90,9 +99,13 @@ export function fieldToColumnType(fieldTypeId: number): ColumnType { return ColumnTypeEnum.BooleanArray case ArrayColumnType.CHAR_ARRAY: return ColumnTypeEnum.CharArray + case ArrayColumnType.BPCHAR_ARRAY: case ArrayColumnType.TEXT_ARRAY: case ArrayColumnType.VARCHAR_ARRAY: - case ArrayColumnType.BPCHAR_ARRAY: + case ArrayColumnType.VARBIT_ARRAY: + case ArrayColumnType.BIT_ARRAY: + case ArrayColumnType.INET_ARRAY: + case ArrayColumnType.CIDR_ARRAY: case ArrayColumnType.XML_ARRAY: return ColumnTypeEnum.TextArray case ArrayColumnType.DATE_ARRAY: @@ -108,7 +121,9 @@ export function fieldToColumnType(fieldTypeId: number): ColumnType { return ColumnTypeEnum.BytesArray case ArrayColumnType.UUID_ARRAY: return ColumnTypeEnum.UuidArray - + case ArrayColumnType.INT8_ARRAY: + case ArrayColumnType.OID_ARRAY: + return ColumnTypeEnum.Int64Array default: if (fieldTypeId >= 10000) { // Postgres Custom Types @@ -251,12 +266,21 @@ function convertBytes(serializedBytes: string): number[] { types.setTypeParser(ScalarColumnType.BYTEA, convertBytes) /* - * BYTEA_ARRAYS - arrays of arbitrary raw binary strings + * BYTEA_ARRAY - arrays of arbitrary raw binary strings */ const parseBytesArray = types.getTypeParser(ArrayColumnType.BYTEA_ARRAY) as (_: string) => Buffer[] types.setTypeParser(ArrayColumnType.BYTEA_ARRAY, (serializedBytesArray) => { const buffers = parseBytesArray(serializedBytesArray) - return buffers.map(encodeBuffer) + return buffers.map((buf) => buf ? encodeBuffer(buf) : null) }) + +/* BIT_ARRAY, VARBIT_ARRAY */ + +function normalizeBit(bit: string): string { + return bit +} + +types.setTypeParser(ArrayColumnType.BIT_ARRAY, normalize_array(normalizeBit)) +types.setTypeParser(ArrayColumnType.VARBIT_ARRAY, normalize_array(normalizeBit)) \ No newline at end of file diff --git a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts b/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts index 69e8f1d9dec1..c26b13877927 100644 --- a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts +++ b/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts @@ -6,25 +6,34 @@ const ScalarColumnType = types.builtins /** * PostgreSQL array column types (not defined in ScalarColumnType). + * + * See the semantics of each of this code in: + * https://github.com/postgres/postgres/blob/master/src/include/catalog/pg_type.dat */ const ArrayColumnType = { + BIT_ARRAY: 1561, BOOL_ARRAY: 1000, BYTEA_ARRAY: 1001, BPCHAR_ARRAY: 1014, CHAR_ARRAY: 1002, + CIDR_ARRAY: 651, DATE_ARRAY: 1182, FLOAT4_ARRAY: 1021, FLOAT8_ARRAY: 1022, + INET_ARRAY: 1041, INT2_ARRAY: 1005, INT4_ARRAY: 1007, + INT8_ARRAY: 1016, JSONB_ARRAY: 3807, JSON_ARRAY: 199, MONEY_ARRAY: 791, NUMERIC_ARRAY: 1231, + OID_ARRAY: 1028, TEXT_ARRAY: 1009, TIMESTAMP_ARRAY: 1115, TIME_ARRAY: 1183, UUID_ARRAY: 2951, + VARBIT_ARRAY: 1563, VARCHAR_ARRAY: 1015, XML_ARRAY: 143, } @@ -90,9 +99,13 @@ export function fieldToColumnType(fieldTypeId: number): ColumnType { return ColumnTypeEnum.BooleanArray case ArrayColumnType.CHAR_ARRAY: return ColumnTypeEnum.CharArray + case ArrayColumnType.BPCHAR_ARRAY: case ArrayColumnType.TEXT_ARRAY: case ArrayColumnType.VARCHAR_ARRAY: - case ArrayColumnType.BPCHAR_ARRAY: + case ArrayColumnType.VARBIT_ARRAY: + case ArrayColumnType.BIT_ARRAY: + case ArrayColumnType.INET_ARRAY: + case ArrayColumnType.CIDR_ARRAY: case ArrayColumnType.XML_ARRAY: return ColumnTypeEnum.TextArray case ArrayColumnType.DATE_ARRAY: @@ -108,7 +121,9 @@ export function fieldToColumnType(fieldTypeId: number): ColumnType { return ColumnTypeEnum.BytesArray case ArrayColumnType.UUID_ARRAY: return ColumnTypeEnum.UuidArray - + case ArrayColumnType.INT8_ARRAY: + case ArrayColumnType.OID_ARRAY: + return ColumnTypeEnum.Int64Array default: if (fieldTypeId >= 10000) { // Postgres Custom Types @@ -251,12 +266,21 @@ function convertBytes(serializedBytes: string): number[] { types.setTypeParser(ScalarColumnType.BYTEA, convertBytes) /* - * BYTEA_ARRAYS - arrays of arbitrary raw binary strings + * BYTEA_ARRAY - arrays of arbitrary raw binary strings */ const parseBytesArray = types.getTypeParser(ArrayColumnType.BYTEA_ARRAY) as (_: string) => Buffer[] types.setTypeParser(ArrayColumnType.BYTEA_ARRAY, (serializedBytesArray) => { const buffers = parseBytesArray(serializedBytesArray) - return buffers.map(encodeBuffer) -}) \ No newline at end of file + return buffers.map((buf) => buf ? encodeBuffer(buf) : null) +}) + +/* BIT_ARRAY, VARBIT_ARRAY */ + +function normalizeBit(bit: string): string { + return bit +} + +types.setTypeParser(ArrayColumnType.BIT_ARRAY, normalize_array(normalizeBit)) +types.setTypeParser(ArrayColumnType.VARBIT_ARRAY, normalize_array(normalizeBit)) \ No newline at end of file diff --git a/query-engine/driver-adapters/src/conversion/postgres.rs b/query-engine/driver-adapters/src/conversion/postgres.rs index 77e79f549d06..21b1ec6b2fb9 100644 --- a/query-engine/driver-adapters/src/conversion/postgres.rs +++ b/query-engine/driver-adapters/src/conversion/postgres.rs @@ -37,11 +37,8 @@ pub fn values_to_js_args(values: &[quaint::Value<'_>]) -> serde_json::Result JSArg::Buffer(bytes.to_vec()), None => JsonValue::Null.into(), }, - (quaint_value @ quaint::ValueType::Numeric(bd), _) => match bd { - Some(bd) => match bd.to_string().parse::() { - Ok(double) => JSArg::from(JsonValue::from(double)), - Err(_) => JSArg::from(JsonValue::from(quaint_value.clone())), - }, + (quaint::ValueType::Numeric(bd), _) => match bd { + Some(bd) => JSArg::RawString(bd.to_string()), None => JsonValue::Null.into(), }, (quaint::ValueType::Array(Some(items)), _) => JSArg::Array(values_to_js_args(items)?), From 98389c0f3bc634961b2866960d9cd85bb9a138ca Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Mon, 23 Oct 2023 16:35:53 +0200 Subject: [PATCH 106/128] run driver adapter tests in ubunt-latest rather than buildjet (#4374) --- .github/workflows/query-engine-driver-adapters.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/query-engine-driver-adapters.yml b/.github/workflows/query-engine-driver-adapters.yml index 50f86575a8a7..d52b446b12fb 100644 --- a/.github/workflows/query-engine-driver-adapters.yml +++ b/.github/workflows/query-engine-driver-adapters.yml @@ -46,7 +46,7 @@ jobs: QUERY_BATCH_SIZE: '10' WORKSPACE_ROOT: ${{ github.workspace }} - runs-on: buildjet-16vcpu-ubuntu-2004 + runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 From 28291c703da2b149e7feabdebc287319e1bf0a46 Mon Sep 17 00:00:00 2001 From: Serhii Tatarintsev Date: Mon, 23 Oct 2023 16:45:18 +0200 Subject: [PATCH 107/128] driver-adapters: Map planetsclae/mysql DB errors to Prisma error codes (#4364) Fix prisma/team-orm#439 --- quaint/src/connector/mysql.rs | 2 + quaint/src/connector/mysql/error.rs | 173 ++++++++++-------- quaint/src/error.rs | 1 + .../js/adapter-planetscale/src/planetscale.ts | 54 ++++-- .../js/driver-adapter-utils/src/types.ts | 6 + .../js/smoke-test-js/src/libquery/libquery.ts | 2 +- query-engine/driver-adapters/src/result.rs | 12 +- 7 files changed, 158 insertions(+), 92 deletions(-) diff --git a/quaint/src/connector/mysql.rs b/quaint/src/connector/mysql.rs index e5a1b794ab5b..4b6f27a583da 100644 --- a/quaint/src/connector/mysql.rs +++ b/quaint/src/connector/mysql.rs @@ -24,6 +24,8 @@ use std::{ use tokio::sync::Mutex; use url::{Host, Url}; +pub use error::MysqlError; + /// The underlying MySQL driver. Only available with the `expose-drivers` /// Cargo feature. #[cfg(feature = "expose-drivers")] diff --git a/quaint/src/connector/mysql/error.rs b/quaint/src/connector/mysql/error.rs index 8b381e1581bb..dd7c3d3bfa66 100644 --- a/quaint/src/connector/mysql/error.rs +++ b/quaint/src/connector/mysql/error.rs @@ -1,22 +1,29 @@ use crate::error::{DatabaseConstraint, Error, ErrorKind}; use mysql_async as my; -impl From for Error { - fn from(e: my::Error) -> Error { - use my::ServerError; +pub struct MysqlError { + pub code: u16, + pub message: String, + pub state: String, +} - match e { - my::Error::Io(my::IoError::Tls(err)) => Error::builder(ErrorKind::TlsError { - message: err.to_string(), - }) - .build(), - my::Error::Io(my::IoError::Io(err)) if err.kind() == std::io::ErrorKind::UnexpectedEof => { - Error::builder(ErrorKind::ConnectionClosed).build() - } - my::Error::Io(io_error) => Error::builder(ErrorKind::ConnectionError(io_error.into())).build(), - my::Error::Driver(e) => Error::builder(ErrorKind::QueryError(e.into())).build(), - my::Error::Server(ServerError { ref message, code, .. }) if code == 1062 => { - let constraint = message +impl From<&my::ServerError> for MysqlError { + fn from(value: &my::ServerError) -> Self { + MysqlError { + code: value.code, + message: value.message.to_owned(), + state: value.state.to_owned(), + } + } +} + +impl From for Error { + fn from(error: MysqlError) -> Self { + let code = error.code; + match code { + 1062 => { + let constraint = error + .message .split_whitespace() .last() .and_then(|s| s.split('\'').nth(1)) @@ -29,12 +36,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1451 || code == 1452 => { - let constraint = message + 1451 | 1452 => { + let constraint = error + .message .split_whitespace() .nth(17) .and_then(|s| s.split('`').nth(1)) @@ -45,12 +53,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1263 => { - let constraint = message + 1263 => { + let constraint = error + .message .split_whitespace() .last() .and_then(|s| s.split('\'').nth(1)) @@ -62,22 +71,23 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1264 => { + 1264 => { let mut builder = Error::builder(ErrorKind::ValueOutOfRange { - message: message.clone(), + message: error.message.clone(), }); builder.set_original_code(code.to_string()); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1364 || code == 1048 => { - let constraint = message + 1364 | 1048 => { + let constraint = error + .message .split_whitespace() .nth(1) .and_then(|s| s.split('\'').nth(1)) @@ -88,12 +98,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1049 => { - let db_name = message + 1049 => { + let db_name = error + .message .split_whitespace() .last() .and_then(|s| s.split('\'').nth(1)) @@ -103,12 +114,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1007 => { - let db_name = message + 1007 => { + let db_name = error + .message .split_whitespace() .nth(3) .and_then(|s| s.split('\'').nth(1)) @@ -118,12 +130,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1044 => { - let db_name = message + 1044 => { + let db_name = error + .message .split_whitespace() .last() .and_then(|s| s.split('\'').nth(1)) @@ -133,12 +146,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1045 => { - let user = message + 1045 => { + let user = error + .message .split_whitespace() .nth(4) .and_then(|s| s.split('@').next()) @@ -149,12 +163,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1146 => { - let table = message + 1146 => { + let table = error + .message .split_whitespace() .nth(1) .and_then(|s| s.split('\'').nth(1)) @@ -165,12 +180,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1054 => { - let column = message + 1054 => { + let column = error + .message .split_whitespace() .nth(2) .and_then(|s| s.split('\'').nth(1)) @@ -179,68 +195,77 @@ impl From for Error { let mut builder = Error::builder(ErrorKind::ColumnNotFound { column }); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { - ref message, - code, - state: _, - }) if code == 1406 => { - let column = message.split_whitespace().flat_map(|s| s.split('\'')).nth(6).into(); + 1406 => { + let column = error + .message + .split_whitespace() + .flat_map(|s| s.split('\'')) + .nth(6) + .into(); let kind = ErrorKind::LengthMismatch { column }; let mut builder = Error::builder(kind); builder.set_original_code(code.to_string()); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { - ref message, - code, - state: _, - }) if code == 1191 => { + 1191 => { let kind = ErrorKind::MissingFullTextSearchIndex; let mut builder = Error::builder(kind); builder.set_original_code(code.to_string()); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { - ref message, - code, - state: _, - }) if code == 1213 => { + 1213 => { let mut builder = Error::builder(ErrorKind::TransactionWriteConflict); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { - ref message, - code, - ref state, - }) => { + _ => { let kind = ErrorKind::QueryError( - my::Error::Server(ServerError { - message: message.clone(), + my::Error::Server(my::ServerError { + message: error.message.clone(), code, - state: state.clone(), + state: error.state.clone(), }) .into(), ); let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } + } + } +} + +impl From for Error { + fn from(e: my::Error) -> Error { + match e { + my::Error::Io(my::IoError::Tls(err)) => Error::builder(ErrorKind::TlsError { + message: err.to_string(), + }) + .build(), + my::Error::Io(my::IoError::Io(err)) if err.kind() == std::io::ErrorKind::UnexpectedEof => { + Error::builder(ErrorKind::ConnectionClosed).build() + } + my::Error::Io(io_error) => Error::builder(ErrorKind::ConnectionError(io_error.into())).build(), + my::Error::Driver(e) => Error::builder(ErrorKind::QueryError(e.into())).build(), + my::Error::Server(ref server_error) => { + let mysql_error: MysqlError = server_error.into(); + mysql_error.into() + } e => Error::builder(ErrorKind::QueryError(e.into())).build(), } } diff --git a/quaint/src/error.rs b/quaint/src/error.rs index c7c78a24772e..0460b77100fb 100644 --- a/quaint/src/error.rs +++ b/quaint/src/error.rs @@ -6,6 +6,7 @@ use thiserror::Error; #[cfg(feature = "pooled")] use std::time::Duration; +pub use crate::connector::mysql::MysqlError; pub use crate::connector::postgres::PostgresError; #[derive(Debug, PartialEq, Eq)] diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts index cffb00482003..5a52851112b2 100644 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts +++ b/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts @@ -1,5 +1,5 @@ import type planetScale from '@planetscale/database' -import { Debug, ok } from '@prisma/driver-adapter-utils' +import { Debug, err, ok } from '@prisma/driver-adapter-utils' import type { DriverAdapter, ResultSet, @@ -36,17 +36,16 @@ class PlanetScaleQueryable field.name) - const resultSet: ResultSet = { - columnNames: columns, - columnTypes: fields.map((field) => fieldToColumnType(field.type as PlanetScaleColumnType)), - rows: rows as ResultSet['rows'], - lastInsertId, - } - - return ok(resultSet) + const ioResult = await this.performIO(query) + return ioResult.map(({ fields, insertId: lastInsertId, rows }) => { + const columns = fields.map((field) => field.name) + return { + columnNames: columns, + columnTypes: fields.map((field) => fieldToColumnType(field.type as PlanetScaleColumnType)), + rows: rows as ResultSet['rows'], + lastInsertId, + } + }) } /** @@ -58,8 +57,7 @@ class PlanetScaleQueryable rowsAffected) } /** @@ -67,22 +65,46 @@ class PlanetScaleQueryable> { const { sql, args: values } = query try { const result = await this.client.execute(sql, values, { as: 'array', }) - return result + return ok(result) } catch (e) { const error = e as Error + if (error.name === 'DatabaseError') { + const parsed = parseErrorMessage(error.message) + if (parsed) { + return err({ + kind: 'Mysql', + ...parsed, + }) + } + } debug('Error in performIO: %O', error) throw error } } } +function parseErrorMessage(message: string) { + const match = message.match( + /target: (?:.+?) vttablet: (?.+?) \(errno (?\d+)\) \(sqlstate (?.+?)\)/, + ) + + if (!match || !match.groups) { + return undefined + } + return { + code: Number(match.groups.code), + message: match.groups.message, + state: match.groups.state, + } +} + class PlanetScaleTransaction extends PlanetScaleQueryable implements Transaction { finished = false diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts index 42f1b0513076..104b23d233c5 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts +++ b/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts @@ -47,6 +47,12 @@ export type Error = column: string | undefined hint: string | undefined } + | { + kind: 'Mysql' + code: number + message: string + state: string + } export interface Queryable { readonly flavour: 'mysql' | 'postgres' | 'sqlite' diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts index 61d239ea42d6..e94eacbae328 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts @@ -327,7 +327,7 @@ export function smokeTestLibquery( }, }) - if (flavour === 'postgres') { + if (flavour === 'postgres' || flavour === 'mysql') { const result = await promise console.log('[nodejs] error result', JSON.stringify(result, null, 2)) assert.equal(result?.errors?.[0]?.['user_facing_error']?.['error_code'], 'P2002') diff --git a/query-engine/driver-adapters/src/result.rs b/query-engine/driver-adapters/src/result.rs index 10bdb8a4aecb..08397d834ed0 100644 --- a/query-engine/driver-adapters/src/result.rs +++ b/query-engine/driver-adapters/src/result.rs @@ -1,5 +1,5 @@ use napi::{bindgen_prelude::FromNapiValue, Env, JsUnknown, NapiValue}; -use quaint::error::{Error as QuaintError, PostgresError}; +use quaint::error::{Error as QuaintError, MysqlError, PostgresError}; use serde::Deserialize; #[derive(Deserialize)] @@ -13,6 +13,14 @@ pub struct PostgresErrorDef { hint: Option, } +#[derive(Deserialize)] +#[serde(remote = "MysqlError")] +pub struct MysqlErrorDef { + pub code: u16, + pub message: String, + pub state: String, +} + #[derive(Deserialize)] #[serde(tag = "kind")] /// Wrapper for JS-side errors @@ -24,6 +32,7 @@ pub(crate) enum DriverAdapterError { }, Postgres(#[serde(with = "PostgresErrorDef")] PostgresError), + Mysql(#[serde(with = "MysqlErrorDef")] MysqlError), // in the future, expected errors that map to known user errors with PXXX codes will also go here } @@ -40,6 +49,7 @@ impl From for QuaintError { match value { DriverAdapterError::GenericJs { id } => QuaintError::external_error(id), DriverAdapterError::Postgres(e) => e.into(), + DriverAdapterError::Mysql(e) => e.into(), // in future, more error types would be added and we'll need to convert them to proper QuaintErrors here } } From 475c616176945d72f4330c92801f0c5e6398dc0f Mon Sep 17 00:00:00 2001 From: Serhii Tatarintsev Date: Mon, 23 Oct 2023 17:40:38 +0200 Subject: [PATCH 108/128] driver-adapters: Map libsql errors to Prisma errors (#4362) Similar approach to what we did with Neon: raw error data is returned from driver adapter in case of DB error, which then reuses Quaint's error handling code for adapter too. Close prisma/team-orm#393 --- quaint/src/connector/sqlite.rs | 2 + quaint/src/connector/sqlite/error.rs | 211 ++++++++---------- quaint/src/error.rs | 1 + .../js/adapter-libsql/src/libsql.ts | 48 ++-- .../js/driver-adapter-utils/src/types.ts | 8 + .../driver-adapters/js/pnpm-lock.yaml | 44 ++-- .../js/smoke-test-js/src/libquery/libquery.ts | 18 +- query-engine/driver-adapters/src/result.rs | 12 +- 8 files changed, 170 insertions(+), 174 deletions(-) diff --git a/quaint/src/connector/sqlite.rs b/quaint/src/connector/sqlite.rs index 6db49523c80a..3a1ef72b4883 100644 --- a/quaint/src/connector/sqlite.rs +++ b/quaint/src/connector/sqlite.rs @@ -1,6 +1,8 @@ mod conversion; mod error; +pub use error::SqliteError; + pub use rusqlite::{params_from_iter, version as sqlite_version}; use super::IsolationLevel; diff --git a/quaint/src/connector/sqlite/error.rs b/quaint/src/connector/sqlite/error.rs index fa8b83f3f28a..c10b335cb3c0 100644 --- a/quaint/src/connector/sqlite/error.rs +++ b/quaint/src/connector/sqlite/error.rs @@ -1,69 +1,45 @@ +use std::fmt; + use crate::error::*; use rusqlite::ffi; use rusqlite::types::FromSqlError; -impl From for Error { - fn from(e: rusqlite::Error) -> Error { - match e { - rusqlite::Error::ToSqlConversionFailure(error) => match error.downcast::() { - Ok(error) => *error, - Err(error) => { - let mut builder = Error::builder(ErrorKind::QueryError(error)); - - builder.set_original_message("Could not interpret parameters in an SQLite query."); - - builder.build() - } - }, - rusqlite::Error::InvalidQuery => { - let mut builder = Error::builder(ErrorKind::QueryError(e.into())); - - builder.set_original_message( - "Could not interpret the query or its parameters. Check the syntax and parameter types.", - ); - - builder.build() - } - rusqlite::Error::ExecuteReturnedResults => { - let mut builder = Error::builder(ErrorKind::QueryError(e.into())); - builder.set_original_message("Execute returned results, which is not allowed in SQLite."); - - builder.build() - } - - rusqlite::Error::QueryReturnedNoRows => Error::builder(ErrorKind::NotFound).build(), +#[derive(Debug)] +pub struct SqliteError { + pub extended_code: i32, + pub message: Option, +} - rusqlite::Error::SqliteFailure( - ffi::Error { - code: ffi::ErrorCode::ConstraintViolation, - extended_code: 2067, - }, - Some(description), - ) => { - let constraint = description - .split(": ") - .nth(1) - .map(|s| s.split(", ")) - .map(|i| i.flat_map(|s| s.split('.').last())) - .map(DatabaseConstraint::fields) - .unwrap_or(DatabaseConstraint::CannotParse); +impl fmt::Display for SqliteError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "Error code {}: {}", + self.extended_code, + ffi::code_to_str(self.extended_code) + ) + } +} - let kind = ErrorKind::UniqueConstraintViolation { constraint }; - let mut builder = Error::builder(kind); +impl std::error::Error for SqliteError {} - builder.set_original_code("2067"); - builder.set_original_message(description); +impl SqliteError { + pub fn new(extended_code: i32, message: Option) -> Self { + Self { extended_code, message } + } - builder.build() - } + pub fn primary_code(&self) -> i32 { + self.extended_code & 0xFF + } +} - rusqlite::Error::SqliteFailure( - ffi::Error { - code: ffi::ErrorCode::ConstraintViolation, - extended_code: 1555, - }, - Some(description), - ) => { +impl From for Error { + fn from(error: SqliteError) -> Self { + match error { + SqliteError { + extended_code: ffi::SQLITE_CONSTRAINT_UNIQUE | ffi::SQLITE_CONSTRAINT_PRIMARYKEY, + message: Some(description), + } => { let constraint = description .split(": ") .nth(1) @@ -75,19 +51,16 @@ impl From for Error { let kind = ErrorKind::UniqueConstraintViolation { constraint }; let mut builder = Error::builder(kind); - builder.set_original_code("1555"); + builder.set_original_code(error.extended_code.to_string()); builder.set_original_message(description); builder.build() } - rusqlite::Error::SqliteFailure( - ffi::Error { - code: ffi::ErrorCode::ConstraintViolation, - extended_code: 1299, - }, - Some(description), - ) => { + SqliteError { + extended_code: ffi::SQLITE_CONSTRAINT_NOTNULL, + message: Some(description), + } => { let constraint = description .split(": ") .nth(1) @@ -99,64 +72,41 @@ impl From for Error { let kind = ErrorKind::NullConstraintViolation { constraint }; let mut builder = Error::builder(kind); - builder.set_original_code("1299"); - builder.set_original_message(description); - - builder.build() - } - - rusqlite::Error::SqliteFailure( - ffi::Error { - code: ffi::ErrorCode::ConstraintViolation, - extended_code: 787, - }, - Some(description), - ) => { - let mut builder = Error::builder(ErrorKind::ForeignKeyConstraintViolation { - constraint: DatabaseConstraint::ForeignKey, - }); - - builder.set_original_code("787"); + builder.set_original_code(error.extended_code.to_string()); builder.set_original_message(description); builder.build() } - rusqlite::Error::SqliteFailure( - ffi::Error { - code: ffi::ErrorCode::ConstraintViolation, - extended_code: 1811, - }, - Some(description), - ) => { + SqliteError { + extended_code: ffi::SQLITE_CONSTRAINT_FOREIGNKEY | ffi::SQLITE_CONSTRAINT_TRIGGER, + message: Some(description), + } => { let mut builder = Error::builder(ErrorKind::ForeignKeyConstraintViolation { constraint: DatabaseConstraint::ForeignKey, }); - builder.set_original_code("1811"); + builder.set_original_code(error.extended_code.to_string()); builder.set_original_message(description); builder.build() } - rusqlite::Error::SqliteFailure( - ffi::Error { - code: ffi::ErrorCode::DatabaseBusy, - extended_code, - }, - description, - ) => { + SqliteError { extended_code, message } if error.primary_code() == ffi::SQLITE_BUSY => { let mut builder = Error::builder(ErrorKind::SocketTimeout); builder.set_original_code(format!("{extended_code}")); - if let Some(description) = description { + if let Some(description) = message { builder.set_original_message(description); } builder.build() } - rusqlite::Error::SqliteFailure(ffi::Error { extended_code, .. }, ref description) => match description { + SqliteError { + extended_code, + ref message, + } => match message { Some(d) if d.starts_with("no such table") => { let table = d.split(": ").last().into(); let kind = ErrorKind::TableDoesNotExist { table }; @@ -188,8 +138,8 @@ impl From for Error { builder.build() } _ => { - let description = description.as_ref().map(|d| d.to_string()); - let mut builder = Error::builder(ErrorKind::QueryError(e.into())); + let description = message.as_ref().map(|d| d.to_string()); + let mut builder = Error::builder(ErrorKind::QueryError(error.into())); builder.set_original_code(format!("{extended_code}")); if let Some(description) = description { @@ -199,31 +149,50 @@ impl From for Error { builder.build() } }, + } + } +} - rusqlite::Error::SqlInputError { - error: ffi::Error { extended_code, .. }, - ref msg, - .. - } => match msg { - d if d.starts_with("no such column: ") => { - let column = d.split("no such column: ").last().into(); - let kind = ErrorKind::ColumnNotFound { column }; - - let mut builder = Error::builder(kind); - builder.set_original_code(extended_code.to_string()); - builder.set_original_message(d); +impl From for Error { + fn from(e: rusqlite::Error) -> Error { + match e { + rusqlite::Error::ToSqlConversionFailure(error) => match error.downcast::() { + Ok(error) => *error, + Err(error) => { + let mut builder = Error::builder(ErrorKind::QueryError(error)); - builder.build() - } - _ => { - let description = msg.clone(); - let mut builder = Error::builder(ErrorKind::QueryError(e.into())); - builder.set_original_code(extended_code.to_string()); - builder.set_original_message(description); + builder.set_original_message("Could not interpret parameters in an SQLite query."); builder.build() } }, + rusqlite::Error::InvalidQuery => { + let mut builder = Error::builder(ErrorKind::QueryError(e.into())); + + builder.set_original_message( + "Could not interpret the query or its parameters. Check the syntax and parameter types.", + ); + + builder.build() + } + rusqlite::Error::ExecuteReturnedResults => { + let mut builder = Error::builder(ErrorKind::QueryError(e.into())); + builder.set_original_message("Execute returned results, which is not allowed in SQLite."); + + builder.build() + } + + rusqlite::Error::QueryReturnedNoRows => Error::builder(ErrorKind::NotFound).build(), + + rusqlite::Error::SqliteFailure(ffi::Error { code: _, extended_code }, message) => { + SqliteError::new(extended_code, message).into() + } + + rusqlite::Error::SqlInputError { + error: ffi::Error { extended_code, .. }, + msg, + .. + } => SqliteError::new(extended_code, Some(msg)).into(), e => Error::builder(ErrorKind::QueryError(e.into())).build(), } diff --git a/quaint/src/error.rs b/quaint/src/error.rs index 0460b77100fb..705bb6b37ee0 100644 --- a/quaint/src/error.rs +++ b/quaint/src/error.rs @@ -8,6 +8,7 @@ use std::time::Duration; pub use crate::connector::mysql::MysqlError; pub use crate::connector::postgres::PostgresError; +pub use crate::connector::sqlite::SqliteError; #[derive(Debug, PartialEq, Eq)] pub enum DatabaseConstraint { diff --git a/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts b/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts index 5d104e8e2949..6528c8f44a8a 100644 --- a/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts +++ b/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts @@ -1,4 +1,4 @@ -import { Debug, ok } from '@prisma/driver-adapter-utils' +import { Debug, ok, err } from '@prisma/driver-adapter-utils' import type { DriverAdapter, Query, @@ -8,7 +8,12 @@ import type { Transaction, TransactionOptions, } from '@prisma/driver-adapter-utils' -import type { InStatement, Client as LibSqlClientRaw, Transaction as LibSqlTransactionRaw } from '@libsql/client' +import type { + InStatement, + Client as LibSqlClientRaw, + Transaction as LibSqlTransactionRaw, + ResultSet as LibSqlResultSet, +} from '@libsql/client' import { Mutex } from 'async-mutex' import { getColumnTypes, mapRow } from './conversion' @@ -33,17 +38,17 @@ class LibSqlQueryable implements const tag = '[js::query_raw]' debug(`${tag} %O`, query) - const { columns, rows, columnTypes: declaredColumnTypes } = await this.performIO(query) - - const columnTypes = getColumnTypes(declaredColumnTypes, rows) + const ioResult = await this.performIO(query) - const resultSet: ResultSet = { - columnNames: columns, - columnTypes, - rows: rows.map((row) => mapRow(row, columnTypes)), - } + return ioResult.map(({ columns, rows, columnTypes: declaredColumnTypes }) => { + const columnTypes = getColumnTypes(declaredColumnTypes, rows) - return ok(resultSet) + return { + columnNames: columns, + columnTypes, + rows: rows.map((row) => mapRow(row, columnTypes)), + } + }) } /** @@ -55,8 +60,7 @@ class LibSqlQueryable implements const tag = '[js::execute_raw]' debug(`${tag} %O`, query) - const { rowsAffected } = await this.performIO(query) - return ok(rowsAffected ?? 0) + return (await this.performIO(query)).map(({ rowsAffected }) => rowsAffected ?? 0) } /** @@ -64,14 +68,22 @@ class LibSqlQueryable implements * Should the query fail due to a connection error, the connection is * marked as unhealthy. */ - private async performIO(query: Query) { + private async performIO(query: Query): Promise> { const release = await this[LOCK_TAG].acquire() try { const result = await this.client.execute(query as InStatement) - return result + return ok(result) } catch (e) { const error = e as Error debug('Error in performIO: %O', error) + const rawCode = error['rawCode'] ?? e.cause?.['rawCode'] + if (typeof rawCode === 'number') { + return err({ + kind: 'Sqlite', + extendedCode: rawCode, + message: error.message, + }) + } throw error } finally { release() @@ -82,11 +94,7 @@ class LibSqlQueryable implements class LibSqlTransaction extends LibSqlQueryable implements Transaction { finished = false - constructor( - client: TransactionClient, - readonly options: TransactionOptions, - readonly unlockParent: () => void, - ) { + constructor(client: TransactionClient, readonly options: TransactionOptions, readonly unlockParent: () => void) { super(client) } diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts index 104b23d233c5..92019f81824b 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts +++ b/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts @@ -53,6 +53,14 @@ export type Error = message: string state: string } + | { + kind: 'Sqlite' + /** + * Sqlite extended error code: https://www.sqlite.org/rescode.html + */ + extendedCode: number + message: string + } export interface Queryable { readonly flavour: 'mysql' | 'postgres' | 'sqlite' diff --git a/query-engine/driver-adapters/js/pnpm-lock.yaml b/query-engine/driver-adapters/js/pnpm-lock.yaml index 3f7f13d3ff6a..9a82ffdbac63 100644 --- a/query-engine/driver-adapters/js/pnpm-lock.yaml +++ b/query-engine/driver-adapters/js/pnpm-lock.yaml @@ -435,21 +435,21 @@ packages: dependencies: '@libsql/hrana-client': 0.5.5 js-base64: 3.7.5 - libsql: 0.1.23 + libsql: 0.1.28 transitivePeerDependencies: - bufferutil - encoding - utf-8-validate - /@libsql/darwin-arm64@0.1.23: - resolution: {integrity: sha512-+V9aoOrZ47iYbY5NrcS0F2bDOCH407QI0wxAtss0CLOcFxlz/T6Nw0ryLK31GabklJQAmOXIyqkumLfz5HT64w==} + /@libsql/darwin-arm64@0.1.28: + resolution: {integrity: sha512-p4nldHUOhcl9ibnH1F6oiXV5Dl3PAcPB9VIjdjVvO3/URo5J7mhqRMuwJMKO5DZJJGtkKJ5IO0gu0hc90rnKIg==} cpu: [arm64] os: [darwin] requiresBuild: true optional: true - /@libsql/darwin-x64@0.1.23: - resolution: {integrity: sha512-toHo7s0HiMl4VCIfjhGXDe9bGWWo78eP8fxIbwU6RlaLO6MNV9fjHY/GjTWccWOwyxcT+q6X/kUc957HnoW3bg==} + /@libsql/darwin-x64@0.1.28: + resolution: {integrity: sha512-WaEK+Z+wP5sr0h8EcusSGHv4Mqc3smYICeG4P/wsbRDKQ2WUMWqZrpgqaBsm+WPbXogU2vpf+qGc8BnpFZ0ggw==} cpu: [x64] os: [darwin] requiresBuild: true @@ -484,22 +484,29 @@ packages: - bufferutil - utf-8-validate - /@libsql/linux-x64-gnu@0.1.23: - resolution: {integrity: sha512-U11LdjayakOj0lQCHDYkTgUfe4Q+7AjZZh8MzgEDF/9l0bmKNI3eFLWA3JD2Xm98yz65lUx95om0WKOKu5VW/w==} + /@libsql/linux-arm64-gnu@0.1.28: + resolution: {integrity: sha512-a17ANBuOqH2L8gdyET4Kg3XggQvxWnoA+7x7sDEX5NyWNyvr7P04WzNPAT0xAOWLclC1fDD6jM5sh/fbJk/7NA==} + cpu: [arm64] + os: [linux] + requiresBuild: true + optional: true + + /@libsql/linux-x64-gnu@0.1.28: + resolution: {integrity: sha512-dkg+Ou7ApV0PHpZWd9c6NrYyc/WSNn5h/ScKotaMTLWlLL96XAMNwrYLpZpUj61I2y7QzU98XtMfiSD1Ux+VaA==} cpu: [x64] os: [linux] requiresBuild: true optional: true - /@libsql/linux-x64-musl@0.1.23: - resolution: {integrity: sha512-8UcCK2sPVzcafHsEmcU5IDp/NxjD6F6JFS5giijsMX5iGgxYQiiwTUMOmSxW0AWBeT4VY5U7G6rG5PC8JSFtfg==} + /@libsql/linux-x64-musl@0.1.28: + resolution: {integrity: sha512-ZuOxCDYlG+f1IDsxstmaxLtgG9HvlLuUKs0X3um4f5F5V+P+PF8qr08gSdD1IP2pj+JBOiwhQffaEpR1wupxhQ==} cpu: [x64] os: [linux] requiresBuild: true optional: true - /@libsql/win32-x64-msvc@0.1.23: - resolution: {integrity: sha512-HAugD66jTmRRRGNMLKRiaFeMOC3mgUsAiuO6NRdRz3nM6saf9e5QqN/Ppuu9yqHHcZfv7VhQ9UGlAvzVK64Itg==} + /@libsql/win32-x64-msvc@0.1.28: + resolution: {integrity: sha512-2cmUiMIsJLHpetebGeeYqUYaCPWEnwMjqxwu1ZEEbA5x8r+DNmIhLrc0QSQ29p7a5u14vbZnShNOtT/XG7vKew==} cpu: [x64] os: [win32] requiresBuild: true @@ -971,19 +978,20 @@ packages: /js-base64@3.7.5: resolution: {integrity: sha512-3MEt5DTINKqfScXKfJFrRbxkrnk2AxPWGBL/ycjz4dK8iqiSJ06UxD8jh8xuh6p10TX4t2+7FsBYVxxQbMg+qA==} - /libsql@0.1.23: - resolution: {integrity: sha512-Nf/1B2Glxvcnba4jYFhXcaYmicyBA3RRm0LVwBkTl8UWCIDbX+Ad7c1ecrQwixPLPffWOVxKIqyCNTuUHUkVgA==} + /libsql@0.1.28: + resolution: {integrity: sha512-yCKlT0ntV8ZIWTPGNClhQQeH/LNAzLjbbEgBvgLb+jfQwAuTbyvPpVVLwkZzesqja1nbkWApztW0pX81Jp0pkw==} cpu: [x64, arm64] os: [darwin, linux, win32] dependencies: '@neon-rs/load': 0.0.4 detect-libc: 2.0.2 optionalDependencies: - '@libsql/darwin-arm64': 0.1.23 - '@libsql/darwin-x64': 0.1.23 - '@libsql/linux-x64-gnu': 0.1.23 - '@libsql/linux-x64-musl': 0.1.23 - '@libsql/win32-x64-msvc': 0.1.23 + '@libsql/darwin-arm64': 0.1.28 + '@libsql/darwin-x64': 0.1.28 + '@libsql/linux-arm64-gnu': 0.1.28 + '@libsql/linux-x64-gnu': 0.1.28 + '@libsql/linux-x64-musl': 0.1.28 + '@libsql/win32-x64-msvc': 0.1.28 /lilconfig@2.1.0: resolution: {integrity: sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts index e94eacbae328..c50ad3e257ab 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts @@ -290,13 +290,13 @@ export function smokeTestLibquery( }) it('expected error (on duplicate insert) as json result (not throwing error)', async () => { - // clean up first await doQuery({ modelName: 'Unique', action: 'deleteMany', query: { + arguments: {}, selection: { - count: true, + $scalars: true, }, }, }) @@ -327,17 +327,9 @@ export function smokeTestLibquery( }, }) - if (flavour === 'postgres' || flavour === 'mysql') { - const result = await promise - console.log('[nodejs] error result', JSON.stringify(result, null, 2)) - assert.equal(result?.errors?.[0]?.['user_facing_error']?.['error_code'], 'P2002') - } else { - await assert.rejects(promise, (err) => { - assert(typeof err === 'object' && err !== null) - assert.match(err['message'], /unique/i) - return true - }) - } + const result = await promise + console.log('[nodejs] error result', JSON.stringify(result, null, 2)) + assert.equal(result?.errors?.[0]?.['user_facing_error']?.['error_code'], 'P2002') }) describe('read scalar and non scalar types', () => { diff --git a/query-engine/driver-adapters/src/result.rs b/query-engine/driver-adapters/src/result.rs index 08397d834ed0..c43f66a81e72 100644 --- a/query-engine/driver-adapters/src/result.rs +++ b/query-engine/driver-adapters/src/result.rs @@ -1,5 +1,5 @@ use napi::{bindgen_prelude::FromNapiValue, Env, JsUnknown, NapiValue}; -use quaint::error::{Error as QuaintError, MysqlError, PostgresError}; +use quaint::error::{Error as QuaintError, MysqlError, PostgresError, SqliteError}; use serde::Deserialize; #[derive(Deserialize)] @@ -21,6 +21,13 @@ pub struct MysqlErrorDef { pub state: String, } +#[derive(Deserialize)] +#[serde(remote = "SqliteError", rename_all = "camelCase")] +pub struct SqliteErrorDef { + pub extended_code: i32, + pub message: Option, +} + #[derive(Deserialize)] #[serde(tag = "kind")] /// Wrapper for JS-side errors @@ -33,7 +40,7 @@ pub(crate) enum DriverAdapterError { Postgres(#[serde(with = "PostgresErrorDef")] PostgresError), Mysql(#[serde(with = "MysqlErrorDef")] MysqlError), - // in the future, expected errors that map to known user errors with PXXX codes will also go here + Sqlite(#[serde(with = "SqliteErrorDef")] SqliteError), } impl FromNapiValue for DriverAdapterError { @@ -50,6 +57,7 @@ impl From for QuaintError { DriverAdapterError::GenericJs { id } => QuaintError::external_error(id), DriverAdapterError::Postgres(e) => e.into(), DriverAdapterError::Mysql(e) => e.into(), + DriverAdapterError::Sqlite(e) => e.into(), // in future, more error types would be added and we'll need to convert them to proper QuaintErrors here } } From 39b6c54adcb81cacdaca4648194c925105406ae9 Mon Sep 17 00:00:00 2001 From: Alberto Schiabel Date: Mon, 23 Oct 2023 19:31:01 +0200 Subject: [PATCH 109/128] chore(docs): Add "how to" README sections to public-facing Driver Adapters (#4377) * chore(driver-adapters): add README for PlanetScale * chore(driver-adapters): add README for Neon * chore(driver-adapters): add README for LibSQL * chore: fix sentence removing env var reference --- .../js/adapter-libsql/README.md | 90 +++++++++++++++++++ .../driver-adapters/js/adapter-neon/README.md | 68 +++++++++++++- .../js/adapter-planetscale/README.md | 67 +++++++++++++- 3 files changed, 223 insertions(+), 2 deletions(-) diff --git a/query-engine/driver-adapters/js/adapter-libsql/README.md b/query-engine/driver-adapters/js/adapter-libsql/README.md index 219200af2080..5ca415ea8ec9 100644 --- a/query-engine/driver-adapters/js/adapter-libsql/README.md +++ b/query-engine/driver-adapters/js/adapter-libsql/README.md @@ -3,3 +3,93 @@ Prisma driver adapter for Turso and libSQL. See https://prisma.io/turso for details. + +The following usage tutorial is valid for Prisma 5.4.2 and later versions. + +## How to install + +After [getting started with Turso](https://www.prisma.io/blog/prisma-turso-ea-support-rXGd_Tmy3UXX#create-a-database-on-turso), you can use the Turso serverless driver to connect to your database. You will need to install the `@prisma/adapter-libsql` driver adapter and the `@libsql/client` serverless driver. + +```sh +npm install @prisma/adapter-libsql +npm install @libsql/client +``` + +Make sure your Turso database connection string and authentication token is copied over to your `.env` file. The connection string will start with `libsql://`. + +```env +# .env +TURSO_AUTH_TOKEN="eyJhbGciOiJFZERTQSIsInR5cCI6IkpXVCJ9..." +TURSO_DATABASE_URL="libsql://turso-prisma-random-user.turso.io" +``` + +You can now reference this environment variable in your `schema.prisma` datasource. Make sure you also include the `driverAdapters` Preview feature. + +```prisma +// schema.prisma +generator client { + provider = "prisma-client-js" + previewFeatures = ["driverAdapters"] +} + +datasource db { + provider = "sqlite" + url = "file:./dev.db" +} +``` + +Now run `npx prisma generate` to re-generate Prisma Client. + +## How to setup migrations + +As Turso needs to sync between a local sqlite database and another one hosted on Turso Cloud, an additional migration setup is needed. In particular, anytime you modify models and relations in your `schema.prisma` file, you should: + +1. Create a baseline migration + +```sh +npx prisma migrate diff --from-empty \ + --to-schema-datamodel prisma/schema.prisma \ + --script > baseline.sql +``` + +2. Apply the migration to your Turso database + +```sh +turso db shell turso-prisma < baseline.sql +``` + +## How to use + +In TypeScript, you will need to: + +1. Import packages +2. Set up the libSQL serverless database driver +3. Instantiate the Prisma libSQL adapter with the libSQL serverless database driver +4. Pass the driver adapter to the Prisma Client instance + +```typescript +// Import needed packages +import { PrismaClient } from '@prisma/client'; +import { PrismaLibSQL } from '@prisma/adapter-libsql'; +import { createClient } from '@libsql/client'; + +// Setup +const connectionString = `${process.env.TURSO_DATABASE_URL}`; +const authToken = `${process.env.TURSO_AUTH_TOKEN}`; + +// Init prisma client +const libsql = createClient({ + url: connectionString, + authToken, +}); +const adapter = new PrismaLibSQL(libsql); +const prisma = new PrismaClient({ adapter }); + +// Use Prisma Client as normal +``` + +Your Prisma Client instance now uses a **single** remote Turso database. +You can take it a step further by setting up database replicas. Turso automatically picks the closest replica to your app for read queries when you create replicas. No additional logic is required to define how the routing of the read queries should be handled. Write queries will be forwarded to the primary database. +We encourage you to create an issue if you find something missing or run into a bug. + +If you have any feedback about our libSQL Serverless Driver support, please leave a comment on our [dedicated GitHub issue](https://github.com/prisma/prisma/discussions/21345) and we'll use it as we continue development. diff --git a/query-engine/driver-adapters/js/adapter-neon/README.md b/query-engine/driver-adapters/js/adapter-neon/README.md index 8af259ab74c1..f36f44c6bca4 100644 --- a/query-engine/driver-adapters/js/adapter-neon/README.md +++ b/query-engine/driver-adapters/js/adapter-neon/README.md @@ -2,4 +2,70 @@ Prisma driver adapter for [Neon Serverless Driver](https://github.com/neondatabase/serverless). -See https://github.com/prisma/prisma/releases/tag/5.4.0 for details. +See https://github.com/prisma/prisma/releases/tag/5.4.0 and https://www.prisma.io/blog/serverless-database-drivers-KML1ehXORxZV for details. + +The following usage tutorial is valid for Prisma 5.4.2 and later versions. + +## How to install + +After [creating your database on Neon](https://neon.tech/docs/get-started-with-neon/setting-up-a-project), you'll need to install the `@prisma/adapter-neon` driver adapter, Neon’s serverless database driver `@neondatabase/serverless`, and `ws` to set up a WebSocket connection for use by Neon. + +```sh +npm install @prisma/adapter-neon +npm install @neondatabase/serverless +npm install ws +``` + +Make sure your [Neon database connection string](https://neon.tech/docs/connect/connect-from-any-app) is copied over to your `.env` file. The connection string will start with `postgres://`. + +```env +# .env +DATABASE_URL="postgres://..." +``` + +Make sure you also include the `driverAdapters` Preview feature in your `schema.prisma`. + +```prisma +// schema.prisma +generator client { + provider = "prisma-client-js" + previewFeatures = ["driverAdapters"] +} + +datasource db { + provider = "postgresql" + url = env("DATABASE_URL") +} +``` + +Now run `npx prisma generate` to re-generate Prisma Client. + +## How to use + +In TypeScript, you will need to: + +1. Import packages +2. Set up the Neon serverless database driver +3. Instantiate the Prisma Neon adapter with the Neon serverless database driver +4. Pass the driver adapter to the Prisma Client instance + +```typescript +// Import needed packages +import { Pool, neonConfig } from '@neondatabase/serverless'; +import { PrismaNeon } from '@prisma/adapter-neon'; +import { PrismaClient } from '@prisma/client'; +import ws from 'ws'; + +// Setup +neonConfig.webSocketConstructor = ws; +const connectionString = `${process.env.DATABASE_URL}`; + +// Init prisma client +const pool = new Pool({ connectionString }); +const adapter = new PrismaNeon(pool); +const prisma = new PrismaClient({ adapter }); + +// Use Prisma Client as normal +``` + +Now your code has built-in benefits of the Neon serverless driver, such as WebSocket connections and [message pipelining](https://neon.tech/blog/quicker-serverless-postgres), while Prisma covers connection creation and destruction, error handling, and type safety. If you have any feedback about our Neon Serverless Driver support, please leave a comment on our [dedicated GitHub issue](https://github.com/prisma/prisma/discussions/21346) and we'll use it as we continue development. diff --git a/query-engine/driver-adapters/js/adapter-planetscale/README.md b/query-engine/driver-adapters/js/adapter-planetscale/README.md index 8e145c07c098..a4cdc132036a 100644 --- a/query-engine/driver-adapters/js/adapter-planetscale/README.md +++ b/query-engine/driver-adapters/js/adapter-planetscale/README.md @@ -2,5 +2,70 @@ Prisma driver adapter for [PlanetScale Serverless Driver](https://github.com/planetscale/database-js). -See https://github.com/prisma/prisma/releases/tag/5.4.0 for details. +See https://github.com/prisma/prisma/releases/tag/5.4.0 and https://www.prisma.io/blog/serverless-database-drivers-KML1ehXORxZV for details. +The following usage tutorial is valid for Prisma 5.4.2 and later versions. + +## How to install + +After [getting started with PlanetScale](https://neon.tech/docs/get-started-with-neon/setting-up-a-project), you can use the PlanetScale serverless driver to connect to your database. You will need to install the `@prisma/adapter-planetscale` driver adapter, the `@planetscale/database` serverless driver, and `undici` to provide a `fetch` function to the PlanetScale driver. + +```sh +npm install @prisma/adapter-planetscale +npm install @planetscale/database +npm install undici +``` + +Make sure your [PlanetScale database connection string](https://planetscale.com/docs/concepts/connection-strings) is copied over to your `.env` file. The connection string will start with `mysql://`. + +```env +# .env +DATABASE_URL="mysql://..." +``` + +You can now reference this environment variable in your `schema.prisma` datasource. Make sure you also include the `driverAdapters` Preview feature. + +```prisma +// schema.prisma +generator client { + provider = "prisma-client-js" + previewFeatures = ["driverAdapters"] +} + +datasource db { + provider = "mysql" + url = env("DATABASE_URL") + relationMode = "prisma" +} +``` + +Now run `npx prisma generate` to re-generate Prisma Client. + +## How to use + +In TypeScript, you will need to: + +1. Import packages +2. Set up the PlanetScale serverless database driver +3. Instantiate the Prisma PlanetScale adapter with the PlanetScale serverless database driver +4. Pass the driver adapter to the Prisma Client instance + +```typescript +// Import needed packages +import { connect } from '@planetscale/database'; +import { PrismaPlanetScale } from '@prisma/adapter-planetscale'; +import { PrismaClient } from '@prisma/client'; +import { fetch as undiciFetch } from 'undici'; + +// Setup +const connectionString = `${process.env.DATABASE_URL}`; + +// Init prisma client +const connection = connect({ url: connectionString, fetch: undiciFetch }); +const adapter = new PrismaPlanetScale(connection); +const prisma = new PrismaClient({ adapter }); + +// Use Prisma Client as normal +``` + +Your Prisma Client instance now uses PlanetScale's [`database-js`](https://github.com/planetscale/database-js), which can improve [`connection reliability and performance`](https://planetscale.com/blog/faster-mysql-with-http3). It uses HTTP requests instead of Prisma’s connection pool, but Prisma will continue to handle error handling and type safety. If you have any feedback about our PlanetScale Serverless Driver support, please leave a comment on our [dedicated GitHub issue](https://github.com/prisma/prisma/discussions/21347) and we'll use it as we continue development. From 2450f885b75e29f5a6d7cde46d6a3e05290e5b33 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Tue, 24 Oct 2023 16:11:10 +0200 Subject: [PATCH 110/128] chore(docker): switch restart to unless-stopped + auto-formatting (#4369) --- .../workflows/publish-prisma-schema-wasm.yml | 10 +- docker-compose.yml | 206 +++++++++--------- quaint/docker-compose.yml | 34 +-- 3 files changed, 127 insertions(+), 123 deletions(-) diff --git a/.github/workflows/publish-prisma-schema-wasm.yml b/.github/workflows/publish-prisma-schema-wasm.yml index e166c05e5841..f453811009ce 100644 --- a/.github/workflows/publish-prisma-schema-wasm.yml +++ b/.github/workflows/publish-prisma-schema-wasm.yml @@ -12,7 +12,7 @@ on: required: true npmDistTag: required: true - default: "latest" + default: 'latest' jobs: build: @@ -21,7 +21,7 @@ jobs: steps: - name: Print input env: - THE_INPUT: "${{ toJson(github.event.inputs) }}" + THE_INPUT: '${{ toJson(github.event.inputs) }}' run: | echo $THE_INPUT @@ -42,7 +42,7 @@ jobs: - uses: actions/setup-node@v3 with: - node-version: "14.x" + node-version: '14.x' - name: Set up NPM token run: echo "//registry.npmjs.org/:_authToken=${{ secrets.NPM_TOKEN }}" > ~/.npmrc @@ -65,6 +65,6 @@ jobs: if: ${{ failure() }} uses: rtCamp/action-slack-notify@v2.2.1 env: - SLACK_TITLE: "prisma-schema-wasm publishing failed :x:" - SLACK_COLOR: "#FF0000" + SLACK_TITLE: 'prisma-schema-wasm publishing failed :x:' + SLACK_COLOR: '#FF0000' SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WASM_FAILING }} diff --git a/docker-compose.yml b/docker-compose.yml index fad49d836cde..fc585adabafe 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,113 +1,115 @@ -version: "3" +version: '3' services: cockroach_23_1: image: prismagraphql/cockroachdb-custom:23.1 + restart: unless-stopped command: | start-single-node --insecure ports: - - "26260:26257" + - '26260:26257' networks: - databases cockroach_22_2: image: prismagraphql/cockroachdb-custom:22.2 - restart: always + restart: unless-stopped command: start-single-node --insecure ports: - - "26259:26257" + - '26259:26257' networks: - databases cockroach_22_1_0: image: prismagraphql/cockroachdb-custom:22.1.0 - restart: always + restart: unless-stopped command: start-single-node --insecure ports: - - "26257:26257" + - '26257:26257' networks: - databases cockroach_21_2_0_patched: image: prismagraphql/cockroachdb-custom:21.2.0-patched - restart: always + restart: unless-stopped command: start-single-node --insecure ports: - - "26258:26257" + - '26258:26257' networks: - databases pgbouncer: image: brainsam/pgbouncer:latest - restart: always + restart: unless-stopped environment: - DB_HOST: "postgres11" - DB_PORT: "5432" - DB_USER: "postgres" - DB_PASSWORD: "prisma" - POOL_MODE: "transaction" - MAX_CLIENT_CONN: "1000" + DB_HOST: 'postgres11' + DB_PORT: '5432' + DB_USER: 'postgres' + DB_PASSWORD: 'prisma' + POOL_MODE: 'transaction' + MAX_CLIENT_CONN: '1000' networks: - databases ports: - - "6432:6432" + - '6432:6432' postgres9: image: postgres:9.6 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" + POSTGRES_PASSWORD: 'prisma' ports: - - "5431:5432" + - '5431:5432' networks: - databases postgres10: image: postgres:10 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" + POSTGRES_PASSWORD: 'prisma' ports: - - "5432:5432" + - '5432:5432' networks: - databases postgres11: image: postgres:11 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" + POSTGRES_PASSWORD: 'prisma' ports: - - "5433:5432" + - '5433:5432' networks: - databases postgres12: image: postgres:12 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" + POSTGRES_PASSWORD: 'prisma' ports: - - "5434:5432" + - '5434:5432' networks: - databases postgres13: image: postgres:13 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" + POSTGRES_PASSWORD: 'prisma' ports: - - "5435:5432" + - '5435:5432' networks: - databases neon-postgres13: image: ghcr.io/neondatabase/wsproxy:latest + restart: unless-stopped environment: # the port of the postgres13 within the databases network APPEND_PORT: 'postgres13:5432' @@ -131,50 +133,50 @@ services: - '8085:8085' depends_on: - vitess-test-8_0 - restart: always + restart: unless-stopped healthcheck: - test: [ 'CMD', 'nc', '-z', '127.0.0.1', '8085' ] + test: ['CMD', 'nc', '-z', '127.0.0.1', '8085'] interval: 5s timeout: 2s retries: 20 postgres14: image: postgres:14 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" - POSTGRES_HOST_AUTH_METHOD: "md5" - POSTGRES_INITDB_ARGS: "--auth-host=md5" + POSTGRES_PASSWORD: 'prisma' + POSTGRES_HOST_AUTH_METHOD: 'md5' + POSTGRES_INITDB_ARGS: '--auth-host=md5' ports: - - "5437:5432" + - '5437:5432' networks: - databases postgres15: image: postgres:15 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" - POSTGRES_HOST_AUTH_METHOD: "md5" - POSTGRES_INITDB_ARGS: "--auth-host=md5" + POSTGRES_PASSWORD: 'prisma' + POSTGRES_HOST_AUTH_METHOD: 'md5' + POSTGRES_INITDB_ARGS: '--auth-host=md5' ports: - - "5438:5432" + - '5438:5432' networks: - databases mysql-5-6: image: mysql:5.6.50 command: mysqld - restart: always + restart: unless-stopped platform: linux/x86_64 environment: MYSQL_USER: root MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3309:3306" + - '3309:3306' networks: - databases tmpfs: /var/lib/mysql @@ -182,14 +184,14 @@ services: mysql-5-7: image: mysql:5.7.32 command: mysqld - restart: always + restart: unless-stopped platform: linux/x86_64 environment: MYSQL_USER: root MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3306:3306" + - '3306:3306' networks: - databases tmpfs: /var/lib/mysql @@ -197,33 +199,33 @@ services: mysql-8-0: image: mysql:8.0.28 command: mysqld - restart: always + restart: unless-stopped platform: linux/x86_64 environment: MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3307:3306" + - '3307:3306' networks: - databases tmpfs: /var/lib/mysql8 mariadb-10-0: image: mariadb:10 - restart: always + restart: unless-stopped environment: MYSQL_USER: root MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3308:3306" + - '3308:3306' networks: - databases tmpfs: /var/lib/mariadb vitess-test-5_7: image: vitess/vttestserver:mysql57@sha256:23863a518b34330109c502ac61a396008f5f023e96263bcb2bb1b0f7f7d5dc7f - restart: always + restart: unless-stopped ports: - 33577:33577 environment: @@ -243,7 +245,7 @@ services: vitess-test-8_0: image: vitess/vttestserver:mysql80@sha256:8bec2644d83cb322eb2cdd596d33c0f858243ba6ade9164c95dfcc519643094e - restart: always + restart: unless-stopped ports: - 33807:33807 environment: @@ -263,7 +265,7 @@ services: vitess-shadow-5_7: image: vitess/vttestserver:mysql57@sha256:23863a518b34330109c502ac61a396008f5f023e96263bcb2bb1b0f7f7d5dc7f - restart: always + restart: unless-stopped ports: - 33578:33577 environment: @@ -283,7 +285,7 @@ services: vitess-shadow-8_0: image: vitess/vttestserver:mysql80@sha256:8bec2644d83cb322eb2cdd596d33c0f858243ba6ade9164c95dfcc519643094e - restart: always + restart: unless-stopped ports: - 33808:33807 environment: @@ -303,139 +305,140 @@ services: mssql-2017: image: mcr.microsoft.com/mssql/server:2017-latest - restart: always + restart: unless-stopped environment: - ACCEPT_EULA: "Y" - SA_PASSWORD: "" + ACCEPT_EULA: 'Y' + SA_PASSWORD: '' ports: - - "1434:1433" + - '1434:1433' networks: - databases - + mssql-2019: image: mcr.microsoft.com/mssql/server:2019-latest - restart: always + restart: unless-stopped environment: - ACCEPT_EULA: "Y" - SA_PASSWORD: "" + ACCEPT_EULA: 'Y' + SA_PASSWORD: '' ports: - - "1433:1433" + - '1433:1433' networks: - databases mssql-2022: image: mcr.microsoft.com/mssql/server:2022-latest - restart: always + restart: unless-stopped environment: - ACCEPT_EULA: "Y" - SA_PASSWORD: "" + ACCEPT_EULA: 'Y' + SA_PASSWORD: '' ports: - - "1435:1433" + - '1435:1433' networks: - databases azure-edge: image: mcr.microsoft.com/azure-sql-edge - restart: always + restart: unless-stopped environment: - ACCEPT_EULA: "Y" - MSSQL_SA_PASSWORD: "" + ACCEPT_EULA: 'Y' + MSSQL_SA_PASSWORD: '' ports: - - "1433:1433" + - '1433:1433' networks: - databases mongo42: image: prismagraphql/mongo-single-replica:4.2.17-bionic - restart: always + restart: unless-stopped environment: - MONGO_INITDB_ROOT_USERNAME: "prisma" - MONGO_INITDB_ROOT_PASSWORD: "prisma" + MONGO_INITDB_ROOT_USERNAME: 'prisma' + MONGO_INITDB_ROOT_PASSWORD: 'prisma' MONGO_PORT: 27016 INIT_WAIT_SEC: $INIT_WAIT_SEC networks: - databases ports: - - "27016:27016" + - '27016:27016' mongo44: image: prismagraphql/mongo-single-replica:4.4.3-bionic - restart: always + restart: unless-stopped environment: - MONGO_INITDB_ROOT_USERNAME: "prisma" - MONGO_INITDB_ROOT_PASSWORD: "prisma" + MONGO_INITDB_ROOT_USERNAME: 'prisma' + MONGO_INITDB_ROOT_PASSWORD: 'prisma' INIT_WAIT_SEC: $INIT_WAIT_SEC ports: - - "27017:27017" + - '27017:27017' networks: - databases mongo42-single: image: mongo:4.2 - restart: always + restart: unless-stopped environment: - MONGO_INITDB_ROOT_USERNAME: "prisma" - MONGO_INITDB_ROOT_PASSWORD: "prisma" + MONGO_INITDB_ROOT_USERNAME: 'prisma' + MONGO_INITDB_ROOT_PASSWORD: 'prisma' INIT_WAIT_SEC: $INIT_WAIT_SEC ports: - - "27016:27017" + - '27016:27017' networks: - databases mongo44-single: image: mongo:4.4 - restart: always + restart: unless-stopped environment: - MONGO_INITDB_ROOT_USERNAME: "prisma" - MONGO_INITDB_ROOT_PASSWORD: "prisma" + MONGO_INITDB_ROOT_USERNAME: 'prisma' + MONGO_INITDB_ROOT_PASSWORD: 'prisma' INIT_WAIT_SEC: $INIT_WAIT_SEC ports: - - "27017:27017" + - '27017:27017' networks: - databases mongo5: image: prismagraphql/mongo-single-replica:5.0.3 - restart: always + restart: unless-stopped environment: - MONGO_INITDB_ROOT_USERNAME: "prisma" - MONGO_INITDB_ROOT_PASSWORD: "prisma" + MONGO_INITDB_ROOT_USERNAME: 'prisma' + MONGO_INITDB_ROOT_PASSWORD: 'prisma' MONGO_PORT: 27018 INIT_WAIT_SEC: $INIT_WAIT_SEC ports: - - "27018:27018" + - '27018:27018' networks: - databases mongo5-single: image: mongo:5 - restart: always + restart: unless-stopped environment: - MONGO_INITDB_ROOT_USERNAME: "prisma" - MONGO_INITDB_ROOT_PASSWORD: "prisma" + MONGO_INITDB_ROOT_USERNAME: 'prisma' + MONGO_INITDB_ROOT_PASSWORD: 'prisma' INIT_WAIT_SEC: $INIT_WAIT_SEC ports: - - "27018:27017" + - '27018:27017' networks: - databases mongo-express: image: mongo-express - restart: always + restart: unless-stopped ports: - 8081:8081 environment: - ME_CONFIG_MONGODB_ADMINUSERNAME: "prisma" - ME_CONFIG_MONGODB_ADMINPASSWORD: "prisma" + ME_CONFIG_MONGODB_ADMINUSERNAME: 'prisma' + ME_CONFIG_MONGODB_ADMINPASSWORD: 'prisma' ME_CONFIG_MONGODB_URL: mongodb://prisma:prisma@mongo4-single:27017/ networks: - databases otel: image: jaegertracing/all-in-one:1.35 + restart: unless-stopped environment: - COLLECTOR_OTLP_ENABLED: "true" - COLLECTOR_ZIPKIN_HOST_PORT: ":9411" + COLLECTOR_OTLP_ENABLED: 'true' + COLLECTOR_ZIPKIN_HOST_PORT: ':9411' ports: - 6831:6831/udp - 6832:6832/udp @@ -450,6 +453,7 @@ services: prometheus: image: prom/prometheus + restart: unless-stopped volumes: - ${PWD}/metrics/prometheus:/prometheus-data command: --config.file=/prometheus-data/prometheus.yml diff --git a/quaint/docker-compose.yml b/quaint/docker-compose.yml index ec3c06faa289..47f1a3456a6e 100644 --- a/quaint/docker-compose.yml +++ b/quaint/docker-compose.yml @@ -1,14 +1,14 @@ -version: "3" +version: '3' services: postgres13: image: postgres:13 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" - PGDATA: "/pgtmpfs13" + POSTGRES_PASSWORD: 'prisma' + PGDATA: '/pgtmpfs13' ports: - - "5432:5432" + - '5432:5432' networks: - databases tmpfs: /pgtmpfs12 @@ -16,13 +16,13 @@ services: mysql57: image: mysql:5.7 command: mysqld - restart: always + restart: unless-stopped platform: linux/x86_64 environment: MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3306:3306" + - '3306:3306' networks: - databases tmpfs: /var/lib/mysql5.7 @@ -30,48 +30,48 @@ services: mysql8: image: mysql:8.0.22 command: mysqld - restart: always + restart: unless-stopped platform: linux/x86_64 environment: MYSQL_USER: root MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3307:3306" + - '3307:3306' networks: - databases tmpfs: /var/lib/mysql8 mariadb: image: mariadb:10 - restart: always + restart: unless-stopped environment: MYSQL_USER: root MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3308:3306" + - '3308:3306' networks: - databases tmpfs: /var/lib/mariadb mssql: image: mcr.microsoft.com/mssql/server:2022-latest - restart: always + restart: unless-stopped environment: - ACCEPT_EULA: "Y" - SA_PASSWORD: "" + ACCEPT_EULA: 'Y' + SA_PASSWORD: '' ports: - - "1433:1433" + - '1433:1433' networks: - databases cockroach_22_2: image: prismagraphql/cockroachdb-custom:22.2 - restart: always + restart: unless-stopped command: start-single-node --insecure ports: - - "26259:26257" + - '26259:26257' networks: - databases From de2449110135e91857b477c346b7f74d52d61613 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 24 Oct 2023 16:29:28 +0200 Subject: [PATCH 111/128] chore(deps): update dependency node to v20.8.1 (#4204) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- query-engine/driver-adapters/js/.nvmrc | 2 +- query-engine/query-engine-wasm/.nvmrc | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/query-engine/driver-adapters/js/.nvmrc b/query-engine/driver-adapters/js/.nvmrc index 8c60e1e54f37..6569dfa4f323 100644 --- a/query-engine/driver-adapters/js/.nvmrc +++ b/query-engine/driver-adapters/js/.nvmrc @@ -1 +1 @@ -v20.5.1 +20.8.1 diff --git a/query-engine/query-engine-wasm/.nvmrc b/query-engine/query-engine-wasm/.nvmrc index 8c60e1e54f37..6569dfa4f323 100644 --- a/query-engine/query-engine-wasm/.nvmrc +++ b/query-engine/query-engine-wasm/.nvmrc @@ -1 +1 @@ -v20.5.1 +20.8.1 From f365956fa36e50f1c89d8ffe3997d512ab2d6fec Mon Sep 17 00:00:00 2001 From: Robert Craigie Date: Wed, 25 Oct 2023 15:55:21 +0100 Subject: [PATCH 112/128] fix(qe): correct /status route response body (#4246) --- query-engine/query-engine/src/server/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/query-engine/query-engine/src/server/mod.rs b/query-engine/query-engine/src/server/mod.rs index 75543dc7ee58..f3583df310d7 100644 --- a/query-engine/query-engine/src/server/mod.rs +++ b/query-engine/query-engine/src/server/mod.rs @@ -63,7 +63,7 @@ pub(crate) async fn routes(cx: Arc, req: Request) -> Result let mut res = match (req.method(), req.uri().path()) { (&Method::POST, "/") => request_handler(cx, req).await?, (&Method::GET, "/") if cx.enabled_features.contains(Feature::Playground) => playground_handler(), - (&Method::GET, "/status") => build_json_response(StatusCode::OK, r#"{"status":"ok"}"#), + (&Method::GET, "/status") => build_json_response(StatusCode::OK, &json!({"status": "ok"})), (&Method::GET, "/sdl") => { let schema = render_graphql_schema(cx.query_schema()); From 46fa0396e2de9ab6ec99c48bd342bc513b032648 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Thu, 26 Oct 2023 13:44:56 +0200 Subject: [PATCH 113/128] ci: do not skip the buildkite pipeline when previous commit is empty (#4385) --- .buildkite/engineer | 27 +++++++++++++++++++-------- 1 file changed, 19 insertions(+), 8 deletions(-) diff --git a/.buildkite/engineer b/.buildkite/engineer index bf31a6e371df..98b78284eaf2 100755 --- a/.buildkite/engineer +++ b/.buildkite/engineer @@ -9,24 +9,35 @@ else echo "We are in the $2 pipeline." fi +# Checks what's the diff with the previous commit +# This is used to detect if the previous commit was empty +GIT_DIFF=$(git diff --name-only HEAD HEAD~1 -- .) + # Checks what's the diff with the previous commit, # excluding some paths that do not need a run, # because they do not affect tests running in Buildkite. -GIT_DIFF=$(git diff --name-only HEAD HEAD~1 -- . ':!.github' ':!query-engine/driver-adapters/js' ':!renovate.json' ':!*.md' ':!LICENSE' ':!CODEOWNERS';) +GIT_DIFF_WITH_IGNORED_PATHS=$(git diff --name-only HEAD HEAD~1 -- . ':!.github' ':!query-engine/driver-adapters/js' ':!renovate.json' ':!*.md' ':!LICENSE' ':!CODEOWNERS';) # $2 is either "test" or "build", depending on the pipeline # Example: ./.buildkite/engineer pipeline test # We only want to check for changes and skip in the test pipeline. if [[ "$2" == "test" ]]; then - # Checking if GIT_DIFF is empty - # If it's empty then it's most likely that there are changes but they are in ignored paths. - # So we do not start Buildkite + # If GIT_DIFF is empty then the previous commit was empty + # We assume it's intended and we continue with the run + # Example use: to get a new engine hash built with identical code if [ -z "${GIT_DIFF}" ]; then - echo "No changes found for the previous commit in paths that are not ignored, this run will now be skipped." - exit 0 + echo "The previous commit is empty, this run will continue..." else - # Note that printf works better for displaying line returns in CI - printf "Changes found for the previous commit in paths that are not ignored: \n\n%s\n\nThis run will continue...\n" "${GIT_DIFF}" + # Checking if GIT_DIFF_WITH_IGNORED_PATHS is empty + # If it's empty then it's most likely that there are changes but they are in ignored paths. + # So we do not start Buildkite + if [ -z "${GIT_DIFF_WITH_IGNORED_PATHS}" ]; then + echo "No changes found for the previous commit in paths that are not ignored, this run will now be skipped." + exit 0 + else + # Note that printf works better for displaying line returns in CI + printf "Changes found for the previous commit in paths that are not ignored: \n\n%s\n\nThis run will continue...\n" "${GIT_DIFF_WITH_IGNORED_PATHS}" + fi fi fi From 51d8349124b96b4c636526990eba13a691d553a4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Thu, 26 Oct 2023 13:45:25 +0200 Subject: [PATCH 114/128] chore: login to Docker only if Docker credentials are truthy (#4381) --- .github/workflows/query-engine-black-box.yml | 1 + .github/workflows/query-engine-driver-adapters.yml | 1 + .github/workflows/query-engine.yml | 1 + .github/workflows/schema-engine.yml | 1 + 4 files changed, 4 insertions(+) diff --git a/.github/workflows/query-engine-black-box.yml b/.github/workflows/query-engine-black-box.yml index 78e60178d7f7..a941588dfd8e 100644 --- a/.github/workflows/query-engine-black-box.yml +++ b/.github/workflows/query-engine-black-box.yml @@ -50,6 +50,7 @@ jobs: - name: Login to Docker Hub uses: docker/login-action@v3 continue-on-error: true + if: "${{ secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }}" with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/query-engine-driver-adapters.yml b/.github/workflows/query-engine-driver-adapters.yml index d52b446b12fb..5b34b9761c4c 100644 --- a/.github/workflows/query-engine-driver-adapters.yml +++ b/.github/workflows/query-engine-driver-adapters.yml @@ -68,6 +68,7 @@ jobs: - name: 'Login to Docker Hub' uses: docker/login-action@v3 continue-on-error: true + if: "${{ secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }}" with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/query-engine.yml b/.github/workflows/query-engine.yml index 9c242217662d..3df596e20d61 100644 --- a/.github/workflows/query-engine.yml +++ b/.github/workflows/query-engine.yml @@ -80,6 +80,7 @@ jobs: - name: Login to Docker Hub uses: docker/login-action@v3 continue-on-error: true + if: "${{ secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }}" with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/schema-engine.yml b/.github/workflows/schema-engine.yml index 5bdf25a2bd35..c6249f069091 100644 --- a/.github/workflows/schema-engine.yml +++ b/.github/workflows/schema-engine.yml @@ -113,6 +113,7 @@ jobs: - name: Login to Docker Hub uses: docker/login-action@v3 continue-on-error: true + if: "${{ secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }}" with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} From 473ee41d8162d802413a60f9b23238b8e5648fd6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Thu, 26 Oct 2023 13:48:54 +0200 Subject: [PATCH 115/128] ci(biuildkite): skip test&build for changes in query-engine/query-engine-wasm (#4371) --- .buildkite/engineer | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/engineer b/.buildkite/engineer index 98b78284eaf2..5de99cea5390 100755 --- a/.buildkite/engineer +++ b/.buildkite/engineer @@ -16,7 +16,7 @@ GIT_DIFF=$(git diff --name-only HEAD HEAD~1 -- .) # Checks what's the diff with the previous commit, # excluding some paths that do not need a run, # because they do not affect tests running in Buildkite. -GIT_DIFF_WITH_IGNORED_PATHS=$(git diff --name-only HEAD HEAD~1 -- . ':!.github' ':!query-engine/driver-adapters/js' ':!renovate.json' ':!*.md' ':!LICENSE' ':!CODEOWNERS';) +GIT_DIFF_WITH_IGNORED_PATHS=$(git diff --name-only HEAD HEAD~1 -- . ':!.github' ':!query-engine/driver-adapters/js' ':!query-engine/query-engine-wasm' ':!renovate.json' ':!*.md' ':!LICENSE' ':!CODEOWNERS';) # $2 is either "test" or "build", depending on the pipeline # Example: ./.buildkite/engineer pipeline test From 9c1efedeb581438e6d20860d939957bc093154a3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Thu, 26 Oct 2023 14:35:07 +0200 Subject: [PATCH 116/128] ci(schema-wasm): cleanup the GitHub Action (#4370) Co-authored-by: Jan Piotrowski --- .../workflows/publish-prisma-schema-wasm.yml | 27 +++++++------------ 1 file changed, 10 insertions(+), 17 deletions(-) diff --git a/.github/workflows/publish-prisma-schema-wasm.yml b/.github/workflows/publish-prisma-schema-wasm.yml index f453811009ce..070bf528654a 100644 --- a/.github/workflows/publish-prisma-schema-wasm.yml +++ b/.github/workflows/publish-prisma-schema-wasm.yml @@ -1,6 +1,7 @@ name: Build and publish @prisma/prisma-schema-wasm +run-name: npm - release @prisma/prisma-schema-wasm@${{ github.event.inputs.enginesWrapperVersion }} from ${{ github.event.inputs.enginesHash }} on ${{ github.event.inputs.npmDistTag }} -concurrency: build-prisma-schema-wasm +concurrency: publish-prisma-schema-wasm on: # usually triggered via GH Actions Workflow in prisma/engines-wrapper repo @@ -30,25 +31,18 @@ jobs: ref: ${{ github.event.inputs.enginesHash }} - uses: cachix/install-nix-action@v23 - # - # Build - # - - - run: nix build .#prisma-schema-wasm - - # - # Publish - # + - name: Build + run: nix build .#prisma-schema-wasm - uses: actions/setup-node@v3 with: - node-version: '14.x' + node-version: '20.x' + registry-url: 'https://registry.npmjs.org/' - - name: Set up NPM token - run: echo "//registry.npmjs.org/:_authToken=${{ secrets.NPM_TOKEN }}" > ~/.npmrc - - - run: | - PACKAGE_DIR=$( nix run .#renderPrismaSchemaWasmPackage ${{ github.event.inputs.enginesWrapperVersion }}) + - name: Update version in package.json & Publish @prisma/prisma-schema-wasm + run: + # Update version in package.json and return directory for later usage + PACKAGE_DIR=$( nix run .#renderPrismaSchemaWasmPackage ${{ github.event.inputs.enginesWrapperVersion }}) npm publish "$PACKAGE_DIR" --access public --tag ${{ github.event.inputs.npmDistTag }} env: NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} @@ -60,7 +54,6 @@ jobs: - name: Set current job url in SLACK_FOOTER env var if: ${{ failure() }} run: echo "SLACK_FOOTER=<$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID|Click here to go to the job logs>" >> $GITHUB_ENV - - name: Slack Notification on Failure if: ${{ failure() }} uses: rtCamp/action-slack-notify@v2.2.1 From 87000b9863599bcab1769a68b466108a0e68216b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Fri, 27 Oct 2023 10:45:45 +0200 Subject: [PATCH 117/128] ci: fix publish-prisma-schema-wasm.yml (#4388) --- .github/workflows/publish-prisma-schema-wasm.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/publish-prisma-schema-wasm.yml b/.github/workflows/publish-prisma-schema-wasm.yml index 070bf528654a..e47031837224 100644 --- a/.github/workflows/publish-prisma-schema-wasm.yml +++ b/.github/workflows/publish-prisma-schema-wasm.yml @@ -37,15 +37,16 @@ jobs: - uses: actions/setup-node@v3 with: node-version: '20.x' - registry-url: 'https://registry.npmjs.org/' + + # This is needed to be done manually because of `PACKAGE_DIR` used later + - name: Set up NPM token for publishing later + run: echo "//registry.npmjs.org/:_authToken=${{ secrets.NPM_TOKEN }}" > ~/.npmrc - name: Update version in package.json & Publish @prisma/prisma-schema-wasm run: # Update version in package.json and return directory for later usage PACKAGE_DIR=$( nix run .#renderPrismaSchemaWasmPackage ${{ github.event.inputs.enginesWrapperVersion }}) npm publish "$PACKAGE_DIR" --access public --tag ${{ github.event.inputs.npmDistTag }} - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} # # Failure handlers From f2a389ec6343da9935493c8170851414a559371e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Fri, 27 Oct 2023 10:56:42 +0200 Subject: [PATCH 118/128] ci: fix publish-prisma-schema-wasm.yml (#4389) --- .github/workflows/publish-prisma-schema-wasm.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/publish-prisma-schema-wasm.yml b/.github/workflows/publish-prisma-schema-wasm.yml index e47031837224..78d139f80772 100644 --- a/.github/workflows/publish-prisma-schema-wasm.yml +++ b/.github/workflows/publish-prisma-schema-wasm.yml @@ -47,7 +47,9 @@ jobs: # Update version in package.json and return directory for later usage PACKAGE_DIR=$( nix run .#renderPrismaSchemaWasmPackage ${{ github.event.inputs.enginesWrapperVersion }}) npm publish "$PACKAGE_DIR" --access public --tag ${{ github.event.inputs.npmDistTag }} - + env: + # Required for publishing + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} # # Failure handlers # From 0bd2db9e01fa7842e8d4b6e3bc4599351bd562e0 Mon Sep 17 00:00:00 2001 From: Marco Ieni <11428655+MarcoIeni@users.noreply.github.com> Date: Fri, 27 Oct 2023 10:59:27 +0200 Subject: [PATCH 119/128] fix: typo in metric description (#4387) --- query-engine/black-box-tests/tests/metrics/smoke_tests.rs | 2 +- query-engine/metrics/src/lib.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/query-engine/black-box-tests/tests/metrics/smoke_tests.rs b/query-engine/black-box-tests/tests/metrics/smoke_tests.rs index 8542f753b78e..3397de75af99 100644 --- a/query-engine/black-box-tests/tests/metrics/smoke_tests.rs +++ b/query-engine/black-box-tests/tests/metrics/smoke_tests.rs @@ -76,7 +76,7 @@ mod smoke_tests { assert_eq!(metrics.matches("# HELP prisma_client_queries_active The number of currently active Prisma Client queries").count(), 1); assert_eq!(metrics.matches("# TYPE prisma_client_queries_active gauge").count(), 1); - assert_eq!(metrics.matches("# HELP prisma_client_queries_wait The number of datasource queries currently waiting for an free connection").count(), 1); + assert_eq!(metrics.matches("# HELP prisma_client_queries_wait The number of datasource queries currently waiting for a free connection").count(), 1); assert_eq!(metrics.matches("# TYPE prisma_client_queries_wait gauge").count(), 1); assert_eq!(metrics.matches("# HELP prisma_pool_connections_busy The number of pool connections currently executing datasource queries").count(), 1); diff --git a/query-engine/metrics/src/lib.rs b/query-engine/metrics/src/lib.rs index 7f34f84a8612..1965b56cb076 100644 --- a/query-engine/metrics/src/lib.rs +++ b/query-engine/metrics/src/lib.rs @@ -89,7 +89,7 @@ static METRIC_RENAMES: Lazy> (MOBC_POOL_CONNECTIONS_OPEN, ("prisma_pool_connections_open", "The number of pool connections currently open")), (MOBC_POOL_CONNECTIONS_BUSY, ("prisma_pool_connections_busy", "The number of pool connections currently executing datasource queries")), (MOBC_POOL_CONNECTIONS_IDLE, ("prisma_pool_connections_idle", "The number of pool connections that are not busy running a query")), - (MOBC_POOL_WAIT_COUNT, ("prisma_client_queries_wait", "The number of datasource queries currently waiting for an free connection")), + (MOBC_POOL_WAIT_COUNT, ("prisma_client_queries_wait", "The number of datasource queries currently waiting for a free connection")), (MOBC_POOL_WAIT_DURATION, ("prisma_client_queries_wait_histogram_ms", "The distribution of the time all datasource queries spent waiting for a free connection")), ]) }); From 4362521bfeb4ec2631819c6c1421af0221640137 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Fri, 27 Oct 2023 11:23:06 +0200 Subject: [PATCH 120/128] chore(deps): update mysql docker tag to v5.7.44 (#2735) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index fc585adabafe..c0d4f179e0a4 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -182,7 +182,7 @@ services: tmpfs: /var/lib/mysql mysql-5-7: - image: mysql:5.7.32 + image: mysql:5.7.44 command: mysqld restart: unless-stopped platform: linux/x86_64 From 3305eccac7176c86ac9678ad7e6da63c5b0d20c9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Fri, 27 Oct 2023 11:38:05 +0200 Subject: [PATCH 121/128] ci: fix yml files for GitHub Actions workflow for DOCKERHUB login (#4390) --- .github/workflows/query-engine-black-box.yml | 5 ++++- .github/workflows/query-engine-driver-adapters.yml | 5 ++++- .github/workflows/query-engine.yml | 5 ++++- .github/workflows/schema-engine.yml | 5 ++++- 4 files changed, 16 insertions(+), 4 deletions(-) diff --git a/.github/workflows/query-engine-black-box.yml b/.github/workflows/query-engine-black-box.yml index a941588dfd8e..5ebcd79cec4c 100644 --- a/.github/workflows/query-engine-black-box.yml +++ b/.github/workflows/query-engine-black-box.yml @@ -50,7 +50,10 @@ jobs: - name: Login to Docker Hub uses: docker/login-action@v3 continue-on-error: true - if: "${{ secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }}" + env: + DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} + if: "${{ env.DOCKERHUB_USERNAME != '' && env.DOCKERHUB_TOKEN != '' }}" with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/query-engine-driver-adapters.yml b/.github/workflows/query-engine-driver-adapters.yml index 5b34b9761c4c..7823bed70cfb 100644 --- a/.github/workflows/query-engine-driver-adapters.yml +++ b/.github/workflows/query-engine-driver-adapters.yml @@ -68,7 +68,10 @@ jobs: - name: 'Login to Docker Hub' uses: docker/login-action@v3 continue-on-error: true - if: "${{ secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }}" + env: + DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} + if: "${{ env.DOCKERHUB_USERNAME != '' && env.DOCKERHUB_TOKEN != '' }}" with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/query-engine.yml b/.github/workflows/query-engine.yml index 3df596e20d61..762c3da4a50a 100644 --- a/.github/workflows/query-engine.yml +++ b/.github/workflows/query-engine.yml @@ -80,7 +80,10 @@ jobs: - name: Login to Docker Hub uses: docker/login-action@v3 continue-on-error: true - if: "${{ secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }}" + env: + DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} + if: "${{ env.DOCKERHUB_USERNAME != '' && env.DOCKERHUB_TOKEN != '' }}" with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/schema-engine.yml b/.github/workflows/schema-engine.yml index c6249f069091..03d23317bbd0 100644 --- a/.github/workflows/schema-engine.yml +++ b/.github/workflows/schema-engine.yml @@ -113,7 +113,10 @@ jobs: - name: Login to Docker Hub uses: docker/login-action@v3 continue-on-error: true - if: "${{ secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }}" + env: + DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} + if: "${{ env.DOCKERHUB_USERNAME != '' && env.DOCKERHUB_TOKEN != '' }}" with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} From 6dda9d7a540d41932067b8c8308e086f55f7dded Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Fri, 27 Oct 2023 13:51:18 +0200 Subject: [PATCH 122/128] Migrate driver adapters to prisma/prisma (#4380) * Promote connector-test-kit to the driver-adapters directory and remove js * Remove node_modules from connector-test-kit-executor * Remove dist from connector-test-kit-executor * Ignore non-relevant files * Sort out dependencies * Makefile to setup driver adapters from a checkout of prisma * Only clone prisma/prisma shallowly * Delete driver-adapter-smoke-tests.yml * DRIVER_ADAPTERS_BRANCH=driver-adapters-migration see if this works * DRIVER_ADAPTERS_BRANCH=driver-adapters-migration change deprecated set-output command * DRIVER_ADAPTERS_BRANCH=driver-adapters-migration tmp remove * DRIVER_ADAPTERS_BRANCH=driver-adapters-migration clearer detection of branch * DRIVER_ADAPTERS_BRANCH=driver-adapters-migration Build executor separately * DRIVER_ADAPTERS_BRANCH=driver-adapters-migration Add make tasks to test driver adapters * Document and ease running driver adapter tests * Revert "DRIVER_ADAPTERS_BRANCH=driver-adapters-migration tmp remove" This reverts commit 463775a8b467ff7aa0d3e21063f1e617014f4b81. * Move documentation to where it belongs * Document how to do integration testing in shorter loops in CI. * chore(driver-adapters): remove outdated symlink to tsconfig file * fix(driver-adapters): use ws, making connector-test-kit-executor compatible with Node.js 16+ * fix(driver-adapters): remove warning "import.meta" is not available with the "cjs" output format * chore(driver-adapters): remove references to query-engine-driver-adapters.yml * Revert "chore(driver-adapters): remove references to query-engine-driver-adapters.yml" This reverts commit eeaaa8f92a24ae8ff3951b786002ca76ae6de837. * Remove publish-driver-adapters workflow * Fix using main branch * Take back conditional on docker login after bad main merge --------- Co-authored-by: jkomyno --- .../workflows/driver-adapter-smoke-tests.yml | 131 ---- .github/workflows/publish-driver-adapters.yml | 83 -- .../query-engine-driver-adapters.yml | 11 + .gitignore | 3 + Makefile | 55 +- README.md | 26 + query-engine/connector-test-kit-rs/README.md | 33 +- .../query-tests-setup/src/config.rs | 2 +- query-engine/driver-adapters/.gitignore | 3 + .../connector-test-kit-executor/.gitignore | 3 + .../connector-test-kit-executor/package.json | 40 + .../pnpm-lock.yaml | 494 +++++------- .../script/start_node.sh | 0 .../src/engines/JsonProtocol.ts | 0 .../src/engines/Library.ts | 0 .../src/engines/QueryEngine.ts | 0 .../src/engines/Transaction.ts | 0 .../connector-test-kit-executor/src/index.ts | 3 +- .../src/jsonRpc.ts | 0 .../connector-test-kit-executor/src/qe.ts | 2 +- .../tsconfig.json | 2 +- query-engine/driver-adapters/js/.gitignore | 44 -- query-engine/driver-adapters/js/.npmrc | 2 - .../driver-adapters/js/.prettierrc.yml | 5 - query-engine/driver-adapters/js/README.md | 42 - .../js/adapter-libsql/.gitignore | 1 - .../js/adapter-libsql/README.md | 95 --- .../js/adapter-libsql/package.json | 31 - .../js/adapter-libsql/src/conversion.ts | 161 ---- .../js/adapter-libsql/src/index.ts | 1 - .../js/adapter-libsql/src/libsql.ts | 171 ----- .../js/adapter-libsql/tests/types.test.mts | 151 ---- .../js/adapter-libsql/tsconfig.build.json | 6 - .../js/adapter-libsql/tsconfig.json | 3 - .../driver-adapters/js/adapter-neon/README.md | 71 -- .../js/adapter-neon/package.json | 30 - .../js/adapter-neon/src/conversion.ts | 286 ------- .../js/adapter-neon/src/index.ts | 1 - .../js/adapter-neon/src/neon.ts | 165 ---- .../js/adapter-neon/tsconfig.build.json | 6 - .../js/adapter-neon/tsconfig.json | 3 - .../driver-adapters/js/adapter-pg/README.md | 3 - .../js/adapter-pg/package.json | 31 - .../js/adapter-pg/src/conversion.ts | 286 ------- .../js/adapter-pg/src/index.ts | 1 - .../driver-adapters/js/adapter-pg/src/pg.ts | 138 ---- .../js/adapter-planetscale/README.md | 71 -- .../js/adapter-planetscale/package.json | 29 - .../js/adapter-planetscale/src/conversion.ts | 98 --- .../js/adapter-planetscale/src/deferred.ts | 13 - .../js/adapter-planetscale/src/index.ts | 1 - .../js/adapter-planetscale/src/planetscale.ts | 181 ----- .../adapter-planetscale/tsconfig.build.json | 6 - .../js/adapter-planetscale/tsconfig.json | 3 - .../connector-test-kit-executor/package.json | 28 - .../js/driver-adapter-utils/README.md | 3 - .../js/driver-adapter-utils/package.json | 26 - .../js/driver-adapter-utils/src/binder.ts | 80 -- .../js/driver-adapter-utils/src/const.ts | 48 -- .../js/driver-adapter-utils/src/debug.ts | 3 - .../js/driver-adapter-utils/src/index.ts | 5 - .../js/driver-adapter-utils/src/result.ts | 41 - .../js/driver-adapter-utils/src/types.ts | 132 ---- .../driver-adapter-utils/tsconfig.build.json | 6 - .../js/driver-adapter-utils/tsconfig.json | 3 - query-engine/driver-adapters/js/package.json | 23 - .../driver-adapters/js/pnpm-workspace.yaml | 8 - .../js/smoke-test-js/.envrc.example | 26 - .../js/smoke-test-js/.gitignore | 4 - .../js/smoke-test-js/README.md | 79 -- .../js/smoke-test-js/package.json | 67 -- .../mysql/commands/type_test/insert.sql | 51 -- .../smoke-test-js/prisma/mysql/schema.prisma | 125 --- .../postgres/commands/type_test/insert.sql | 35 - .../prisma/postgres/schema.prisma | 117 --- .../sqlite/commands/type_test/insert.sql | 17 - .../20230915202554_init/migration.sql | 85 --- .../sqlite/migrations/migration_lock.toml | 3 - .../smoke-test-js/prisma/sqlite/schema.prisma | 79 -- .../driver-adapters/js/smoke-test-js/setup.sh | 7 - .../js/smoke-test-js/src/client/client.ts | 164 ---- .../smoke-test-js/src/client/libsql.test.ts | 20 - .../src/client/neon.http.test.ts | 13 - .../smoke-test-js/src/client/neon.ws.test.ts | 16 - .../js/smoke-test-js/src/client/pg.test.ts | 13 - .../src/client/planetscale.test.ts | 13 - .../src/engines/types/JsonProtocol.ts | 78 -- .../src/engines/types/Library.ts | 46 -- .../src/engines/types/QueryEngine.ts | 97 --- .../src/engines/types/Transaction.ts | 35 - .../smoke-test-js/src/libquery/errors.test.ts | 105 --- .../js/smoke-test-js/src/libquery/libquery.ts | 722 ------------------ .../smoke-test-js/src/libquery/libsql.test.ts | 22 - .../src/libquery/neon.http.test.ts | 16 - .../src/libquery/neon.ws.test.ts | 18 - .../js/smoke-test-js/src/libquery/pg.test.ts | 15 - .../src/libquery/planetscale.test.ts | 15 - .../js/smoke-test-js/src/libquery/util.ts | 71 -- .../js/smoke-test-js/tsconfig.json | 3 - query-engine/driver-adapters/js/version.sh | 15 - query-engine/driver-adapters/src/result.rs | 2 - 101 files changed, 348 insertions(+), 5279 deletions(-) delete mode 100644 .github/workflows/driver-adapter-smoke-tests.yml delete mode 100644 .github/workflows/publish-driver-adapters.yml create mode 100644 query-engine/driver-adapters/.gitignore create mode 100644 query-engine/driver-adapters/connector-test-kit-executor/.gitignore create mode 100644 query-engine/driver-adapters/connector-test-kit-executor/package.json rename query-engine/driver-adapters/{js => connector-test-kit-executor}/pnpm-lock.yaml (79%) rename query-engine/driver-adapters/{js => }/connector-test-kit-executor/script/start_node.sh (100%) rename query-engine/driver-adapters/{js => }/connector-test-kit-executor/src/engines/JsonProtocol.ts (100%) rename query-engine/driver-adapters/{js => }/connector-test-kit-executor/src/engines/Library.ts (100%) rename query-engine/driver-adapters/{js => }/connector-test-kit-executor/src/engines/QueryEngine.ts (100%) rename query-engine/driver-adapters/{js => }/connector-test-kit-executor/src/engines/Transaction.ts (100%) rename query-engine/driver-adapters/{js => }/connector-test-kit-executor/src/index.ts (99%) rename query-engine/driver-adapters/{js => }/connector-test-kit-executor/src/jsonRpc.ts (100%) rename query-engine/driver-adapters/{js => }/connector-test-kit-executor/src/qe.ts (92%) rename query-engine/driver-adapters/{js => connector-test-kit-executor}/tsconfig.json (99%) delete mode 100644 query-engine/driver-adapters/js/.gitignore delete mode 100644 query-engine/driver-adapters/js/.npmrc delete mode 100644 query-engine/driver-adapters/js/.prettierrc.yml delete mode 100644 query-engine/driver-adapters/js/README.md delete mode 100644 query-engine/driver-adapters/js/adapter-libsql/.gitignore delete mode 100644 query-engine/driver-adapters/js/adapter-libsql/README.md delete mode 100644 query-engine/driver-adapters/js/adapter-libsql/package.json delete mode 100644 query-engine/driver-adapters/js/adapter-libsql/src/conversion.ts delete mode 100644 query-engine/driver-adapters/js/adapter-libsql/src/index.ts delete mode 100644 query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts delete mode 100644 query-engine/driver-adapters/js/adapter-libsql/tests/types.test.mts delete mode 100644 query-engine/driver-adapters/js/adapter-libsql/tsconfig.build.json delete mode 100644 query-engine/driver-adapters/js/adapter-libsql/tsconfig.json delete mode 100644 query-engine/driver-adapters/js/adapter-neon/README.md delete mode 100644 query-engine/driver-adapters/js/adapter-neon/package.json delete mode 100644 query-engine/driver-adapters/js/adapter-neon/src/conversion.ts delete mode 100644 query-engine/driver-adapters/js/adapter-neon/src/index.ts delete mode 100644 query-engine/driver-adapters/js/adapter-neon/src/neon.ts delete mode 100644 query-engine/driver-adapters/js/adapter-neon/tsconfig.build.json delete mode 100644 query-engine/driver-adapters/js/adapter-neon/tsconfig.json delete mode 100644 query-engine/driver-adapters/js/adapter-pg/README.md delete mode 100644 query-engine/driver-adapters/js/adapter-pg/package.json delete mode 100644 query-engine/driver-adapters/js/adapter-pg/src/conversion.ts delete mode 100644 query-engine/driver-adapters/js/adapter-pg/src/index.ts delete mode 100644 query-engine/driver-adapters/js/adapter-pg/src/pg.ts delete mode 100644 query-engine/driver-adapters/js/adapter-planetscale/README.md delete mode 100644 query-engine/driver-adapters/js/adapter-planetscale/package.json delete mode 100644 query-engine/driver-adapters/js/adapter-planetscale/src/conversion.ts delete mode 100644 query-engine/driver-adapters/js/adapter-planetscale/src/deferred.ts delete mode 100644 query-engine/driver-adapters/js/adapter-planetscale/src/index.ts delete mode 100644 query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts delete mode 100644 query-engine/driver-adapters/js/adapter-planetscale/tsconfig.build.json delete mode 100644 query-engine/driver-adapters/js/adapter-planetscale/tsconfig.json delete mode 100644 query-engine/driver-adapters/js/connector-test-kit-executor/package.json delete mode 100644 query-engine/driver-adapters/js/driver-adapter-utils/README.md delete mode 100644 query-engine/driver-adapters/js/driver-adapter-utils/package.json delete mode 100644 query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts delete mode 100644 query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts delete mode 100644 query-engine/driver-adapters/js/driver-adapter-utils/src/debug.ts delete mode 100644 query-engine/driver-adapters/js/driver-adapter-utils/src/index.ts delete mode 100644 query-engine/driver-adapters/js/driver-adapter-utils/src/result.ts delete mode 100644 query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts delete mode 100644 query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.build.json delete mode 100644 query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.json delete mode 100644 query-engine/driver-adapters/js/package.json delete mode 100644 query-engine/driver-adapters/js/pnpm-workspace.yaml delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/.envrc.example delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/.gitignore delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/README.md delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/package.json delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/commands/type_test/insert.sql delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/commands/type_test/insert.sql delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/commands/type_test/insert.sql delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/20230915202554_init/migration.sql delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/migration_lock.toml delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/schema.prisma delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/setup.sh delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/client/libsql.test.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/client/neon.ws.test.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/client/pg.test.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/client/planetscale.test.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/engines/types/JsonProtocol.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Library.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/engines/types/QueryEngine.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Transaction.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/libquery/errors.test.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/libquery/libsql.test.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.test.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.test.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/tsconfig.json delete mode 100755 query-engine/driver-adapters/js/version.sh diff --git a/.github/workflows/driver-adapter-smoke-tests.yml b/.github/workflows/driver-adapter-smoke-tests.yml deleted file mode 100644 index 802e3188dedc..000000000000 --- a/.github/workflows/driver-adapter-smoke-tests.yml +++ /dev/null @@ -1,131 +0,0 @@ -name: Driver Adapters, Smoke Tests -on: - push: - branches: - - main - pull_request: - paths-ignore: - - '.github/**' - - '!.github/workflows/driver-adapter-smoke-tests.yml' - - '.buildkite/**' - - '*.md' - - 'LICENSE' - - 'CODEOWNERS' - - 'renovate.json' - -jobs: - driver-adapter-smoke-tests: - name: ${{ matrix.adapter }} - - strategy: - fail-fast: false - matrix: - adapter: ['neon:ws', 'neon:http', planetscale, pg, libsql] - - runs-on: ubuntu-latest - - services: - postgres: - image: postgres - env: - POSTGRES_PASSWORD: postgres - options: >- - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - ports: - - 5432:5432 - - # via package.json rewritten into DATABASE_URL before scripts are run - env: - JS_NEON_DATABASE_URL: ${{ secrets.JS_NEON_DATABASE_URL }} - JS_PLANETSCALE_DATABASE_URL: ${{ secrets.JS_PLANETSCALE_DATABASE_URL }} - JS_PG_DATABASE_URL: postgres://postgres:postgres@localhost:5432/test # ${{ secrets.JS_PG_DATABASE_URL }} - # TODO: test sqld and embedded replicas - JS_LIBSQL_DATABASE_URL: file:/tmp/libsql.db - # TODO: test all three of ("number", "bigint", "string") and conditionally skip some tests as appropriate - JS_LIBSQL_INT_MODE: bigint - - steps: - - uses: actions/checkout@v4 - - - uses: dtolnay/rust-toolchain@stable - - - uses: pnpm/action-setup@v2 - with: - version: 8 - - uses: actions/setup-node@v3 - with: - node-version: 18 - #cache: 'pnpm' - - - name: Compile Query Engine - run: cargo build -p query-engine-node-api - - - name: Install Dependencies (Driver Adapters) - run: pnpm install - working-directory: ./query-engine/driver-adapters/js - - name: Build Driver Adapters - run: pnpm build - working-directory: ./query-engine/driver-adapters/js - - - run: pnpm prisma:${{ matrix.adapter }} - working-directory: ./query-engine/driver-adapters/js/smoke-test-js - - run: pnpm ${{ matrix.adapter }}:libquery - working-directory: ./query-engine/driver-adapters/js/smoke-test-js - - name: pnpm ${{ matrix.adapter }}:client (using @prisma/client - including engine! - from Npm) - run: pnpm ${{ matrix.adapter }}:client - if: always() - working-directory: ./query-engine/driver-adapters/js/smoke-test-js - - - driver-adapter-smoke-tests-errors: - name: Errors - - runs-on: ubuntu-latest - - # services: - # postgres: - # image: postgres - # env: - # POSTGRES_PASSWORD: postgres - # options: >- - # --health-cmd pg_isready - # --health-interval 10s - # --health-timeout 5s - # --health-retries 5 - # ports: - # - 5432:5432 - - env: - # via package.json rewritten into DATABASE_URL before scripts are run - JS_PG_DATABASE_URL: postgres://postgres:postgres@localhost:5432/test - - steps: - - uses: actions/checkout@v4 - - - uses: dtolnay/rust-toolchain@stable - - - uses: pnpm/action-setup@v2 - with: - version: 8 - - uses: actions/setup-node@v3 - with: - node-version: 18 - #cache: 'pnpm' - - - name: Compile Query Engine - run: cargo build -p query-engine-node-api - - - name: Install Dependencies (Driver Adapters) - run: pnpm install - working-directory: ./query-engine/driver-adapters/js - - name: Build Driver Adapters - run: pnpm build - working-directory: ./query-engine/driver-adapters/js - - - name: pnpm errors - run: pnpm errors - if: always() - working-directory: ./query-engine/driver-adapters/js/smoke-test-js diff --git a/.github/workflows/publish-driver-adapters.yml b/.github/workflows/publish-driver-adapters.yml deleted file mode 100644 index 7da972c35e1b..000000000000 --- a/.github/workflows/publish-driver-adapters.yml +++ /dev/null @@ -1,83 +0,0 @@ -name: Build and publish Prisma Driver Adapters -run-name: npm - release Driver Adapters ${{ github.event.inputs.prismaVersion }} from ${{ github.event.inputs.enginesHash }} on ${{ github.event.inputs.npmDistTag }} - -concurrency: publish-prisma-driver-adapters - -on: - # usually triggered via GH Actions Workflow in prisma/prisma repo - workflow_dispatch: - inputs: - enginesHash: - description: Engine commit hash to checkout for publishing - required: true - prismaVersion: - description: Prisma version to use for publishing - required: true - npmDistTag: - description: npm dist-tag to use for publishing - required: true - default: 'latest' - dryRun: - description: 'Check to do a dry run (does not publish packages)' - type: boolean - -jobs: - build: - name: Build and publish Prisma Driver Adapters - runs-on: ubuntu-latest - steps: - - name: Print input - env: - THE_INPUT: '${{ toJson(github.event.inputs) }}' - run: | - echo $THE_INPUT - - - uses: actions/checkout@v4 - with: - ref: ${{ github.event.inputs.enginesHash }} - - - uses: pnpm/action-setup@v2.4.0 - with: - version: 8 - - - uses: actions/setup-node@v3 - with: - node-version: '20.x' - registry-url: 'https://registry.npmjs.org/' - - - name: Install dependencies - run: pnpm i - working-directory: query-engine/driver-adapters/js - - - name: Build - run: pnpm -r build - working-directory: query-engine/driver-adapters/js - - - name: Update version in package.json - run: | - # find all files package.json, and for each use jq to write the version, then write to temp file and overwrite original file with result - find . -name "package.json" -exec bash -c 'jq --arg version "${{ github.event.inputs.prismaVersion }}" ".version = \$version" "{}" > tmpfile && mv tmpfile "{}"' \; - working-directory: query-engine/driver-adapters/js - - - name: Publish Prisma Driver Adapters packages - run: | - pnpm -r publish --no-git-checks --tag ${{ github.event.inputs.npmDistTag }} ${{ env.DRY_RUN }} - working-directory: query-engine/driver-adapters/js - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} - DRY_RUN: ${{ github.event.inputs.dryRun == 'true' && '--dry-run' || '' }} - - # - # Failure handlers - # - - - name: Set current job url in SLACK_FOOTER env var - if: ${{ failure() }} - run: echo "SLACK_FOOTER=<$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID|Click here to go to the job logs>" >> $GITHUB_ENV - - name: Slack Notification on Failure - if: ${{ failure() }} - uses: rtCamp/action-slack-notify@v2.2.1 - env: - SLACK_TITLE: 'prisma driver adapters publishing failed :x:' - SLACK_COLOR: '#FF0000' - SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_DRIVER_ADPATERS_FAILING }} diff --git a/.github/workflows/query-engine-driver-adapters.yml b/.github/workflows/query-engine-driver-adapters.yml index 7823bed70cfb..f3a3badfb804 100644 --- a/.github/workflows/query-engine-driver-adapters.yml +++ b/.github/workflows/query-engine-driver-adapters.yml @@ -49,6 +49,8 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} - name: 'Setup Node.js' uses: actions/setup-node@v3 @@ -76,6 +78,15 @@ jobs: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Extract Branch Name + id: extract-branch + run: | + branch="$(git show -s --format=%s | grep -o "DRIVER_ADAPTERS_BRANCH=[^ ]*" | cut -f2 -d=)" + if [ -n "$branch" ]; then + echo "Using $branch branch of driver adapters" + echo "DRIVER_ADAPTERS_BRANCH=$branch" >> "$GITHUB_ENV" + fi + - run: make ${{ matrix.adapter.setup_task }} - uses: dtolnay/rust-toolchain@stable diff --git a/.gitignore b/.gitignore index 43e03e31867d..be185b0f7afc 100644 --- a/.gitignore +++ b/.gitignore @@ -46,3 +46,6 @@ dmmf.json graph.dot prisma-schema-wasm/nodejs + +# This symlink looks orphan here, but it comes from prisma/prisma where driver adapters reference a file in their parent directory +tsconfig.build.adapter.json diff --git a/Makefile b/Makefile index 0c3e1541e632..541738c35d95 100644 --- a/Makefile +++ b/Makefile @@ -2,6 +2,7 @@ CONFIG_PATH = ./query-engine/connector-test-kit-rs/test-configs CONFIG_FILE = .test_config SCHEMA_EXAMPLES_PATH = ./query-engine/example_schemas DEV_SCHEMA_FILE = dev_datamodel.prisma +DRIVER_ADAPTERS_BRANCH ?= main LIBRARY_EXT := $(shell \ case "$$(uname -s)" in \ @@ -44,7 +45,13 @@ release: ################# test-qe: +ifndef DRIVER_ADAPTER cargo test --package query-engine-tests +else + @echo "Executing query engine tests with $(DRIVER_ADAPTER) driver adapter"; \ + # Add your actual command for the "test-driver-adapter" task here + $(MAKE) test-driver-adapter-$(DRIVER_ADAPTER); +endif test-qe-verbose: cargo test --package query-engine-tests -- --nocapture @@ -80,6 +87,10 @@ dev-sqlite: dev-libsql-sqlite: build-qe-napi build-connector-kit-js cp $(CONFIG_PATH)/libsql-sqlite $(CONFIG_FILE) +test-libsql-sqlite: dev-libsql-sqlite test-qe-st + +test-driver-adapter-libsql: test-libsql-sqlite + start-postgres9: docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres9 @@ -115,12 +126,20 @@ start-pg-postgres13: build-qe-napi build-connector-kit-js start-postgres13 dev-pg-postgres13: start-pg-postgres13 cp $(CONFIG_PATH)/pg-postgres13 $(CONFIG_FILE) +test-pg-postgres13: dev-pg-postgres13 test-qe-st + +test-driver-adapter-pg: test-pg-postgres13 + start-neon-postgres13: build-qe-napi build-connector-kit-js docker compose -f docker-compose.yml up --wait -d --remove-orphans neon-postgres13 dev-neon-ws-postgres13: start-neon-postgres13 cp $(CONFIG_PATH)/neon-ws-postgres13 $(CONFIG_FILE) +test-neon-ws-postgres13: dev-neon-ws-postgres13 test-qe-st + +test-driver-adapter-neon: test-neon-ws-postgres13 + start-postgres14: docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres14 @@ -255,6 +274,10 @@ start-planetscale-vitess8: build-qe-napi build-connector-kit-js dev-planetscale-vitess8: start-planetscale-vitess8 cp $(CONFIG_PATH)/planetscale-vitess8 $(CONFIG_FILE) +test-planetscale-vitess8: dev-planetscale-vitess8 test-qe-st + +test-driver-adapter-planetscale: test-planetscale-vitess8 + ###################### # Local dev commands # ###################### @@ -262,8 +285,36 @@ dev-planetscale-vitess8: start-planetscale-vitess8 build-qe-napi: cargo build --package query-engine-node-api -build-connector-kit-js: - cd query-engine/driver-adapters/js && pnpm i && pnpm build +build-connector-kit-js: build-driver-adapters symlink-driver-adapters + cd query-engine/driver-adapters/connector-test-kit-executor && pnpm i && pnpm build + +build-driver-adapters: ensure-prisma-present + @echo "Building driver adapters..." + @cd ../prisma && pnpm --filter "*adapter*" i && pnpm --filter "*adapter*" build + @echo "Driver adapters build completed."; + +symlink-driver-adapters: ensure-prisma-present + @echo "Creating symbolic links for driver adapters..." + @for dir in $(wildcard $(realpath ../prisma)/packages/*adapter*); do \ + if [ -d "$$dir" ]; then \ + dir_name=$$(basename "$$dir"); \ + ln -sfn "$$dir" "$(realpath .)/query-engine/driver-adapters/$$dir_name"; \ + echo "Created symbolic link for $$dir_name"; \ + fi; \ + done; + echo "Symbolic links creation completed."; + +ensure-prisma-present: + @if [ -d ../prisma ]; then \ + cd "$(realpath ../prisma)" && git fetch origin main; \ + LOCAL_CHANGES=$$(git diff --name-only HEAD origin/main -- 'packages/*adapter*'); \ + if [ -n "$$LOCAL_CHANGES" ]; then \ + echo "⚠️ ../prisma diverges from prisma/prisma main branch. Test results might diverge from those in CI ⚠️ "; \ + fi \ + else \ + echo "git clone --depth=1 https://github.com/prisma/prisma.git --branch=$(DRIVER_ADAPTERS_BRANCH) ../prisma"; \ + git clone --depth=1 https://github.com/prisma/prisma.git --branch=$(DRIVER_ADAPTERS_BRANCH) "../prisma" && echo "Prisma repository has been cloned to ../prisma"; \ + fi; # Quick schema validation of whatever you have in the dev_datamodel.prisma file. validate: diff --git a/README.md b/README.md index 6fd072072757..49c7c1a8ab39 100644 --- a/README.md +++ b/README.md @@ -203,6 +203,7 @@ integration tests. - Alternatively: Load the defined environment in `./.envrc` manually in your shell. **Setup:** + There are helper `make` commands to set up a test environment for a specific database connector you want to test. The commands set up a container (if needed) and write the `.test_config` file, which is picked up by the integration @@ -234,6 +235,31 @@ Other variables may or may not be useful. Run `cargo test` in the repository root. +### Testing driver adapters + +Please refer to the [Testing driver adapters](./query-engine/connector-test-kit-rs/README.md#testing-driver-adapters) section in the connector-test-kit-rs README. + +**ℹ️ Important note on developing features that require changes to the both the query engine, and driver adapters code** + +As explained in [Testing driver adapters](./query-engine/connector-test-kit-rs/README.md#testing-driver-adapters), running `DRIVER_ADAPTER=$adapter make qe-test` +will ensure you have prisma checked out in your filesystem in the same directory as prisma-engines. This is needed because the driver adapters code is symlinked in prisma-engines. + +When working on a feature or bugfix spanning adapters code and query-engine code, you will need to open sibling PRs in `prisma/prisma` and `prisma/prisma-engines` respectively. +Locally, each time you run `DRIVER_ADAPTER=$adapter make qe-test` tests will run using the driver adapters built from the source code in the working copy of prisma/prisma. All good. + +In CI, tho', we need to denote which branch of prisma/prisma we want to use for tests. In CI, there's no working copy of prisma/prisma before tests run. +The CI jobs clones prisma/prisma `main` branch by default, which doesn't include your local changes. To test in integration, we can tell CI to use the branch of prisma/prisma containing +the changes in adapters. To do it, you can use a simple convention in commit messages. Like this: + +``` +git commit -m "DRIVER_ADAPTERS_BRANCH=prisma-branch-with-changes-in-adapters [...]" +``` + +GitHub actions will then pick up the branch name and use it to clone that branch's code of prisma/prisma, and build the driver adapters code from there. + +When it's time to merge the sibling PRs, you'll need to merge the prisma/prisma PR first, so when merging the engines PR you have the code of the adapters ready in prisma/prisma `main` branch. + + ## Parallel rust-analyzer builds When rust-analzyer runs `cargo check` it will lock the build directory and stop any cargo commands from running until it has completed. This makes the build process feel a lot longer. It is possible to avoid this by setting a different build path for diff --git a/query-engine/connector-test-kit-rs/README.md b/query-engine/connector-test-kit-rs/README.md index 2c849a2aa985..97d19467879a 100644 --- a/query-engine/connector-test-kit-rs/README.md +++ b/query-engine/connector-test-kit-rs/README.md @@ -64,34 +64,45 @@ On the note of docker containers: Most connectors require an endpoint to run aga If you choose to set up the databases yourself, please note that the connection strings used in the tests (found in the files in `/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/`) to set up user, password and database for the test user. +### Running + +Note that by default tests run concurrently. + +- VSCode should automatically detect tests and display `run test`. +- Use `make test-qe` (minimal log output) or `make test-qe-verbose` (all log output) in `$WORKSPACE_ROOT`. +- `cargo test` in the `query-engine-tests` crate. +- A single test can be tested with the normal cargo rust facilities from command line, e.g. `cargo test --package query-engine-tests --test query_engine_tests --all-features -- queries::filters::where_unique::where_unique::no_unique_fields --exact --nocapture` where `queries::filters::where_unique::where_unique::no_unique_fields` can be substituted for the path you want to test. +- If you want to test a single relation test, define the `RELATION_TEST_IDX` env var with its index. + #### Running tests through driver adapters -The query engine is able to delegate query execution to javascript through [driver adapters](query-engine/driver-adapters/js/README.md). -This means that instead of drivers being implemented in Rust, it's a layer of adapters over NodeJs drivers the code that actually communicates with the databases. +The query engine is able to delegate query execution to javascript through driver adapters. +This means that instead of drivers being implemented in Rust, it's a layer of adapters over NodeJs +drivers the code that actually communicates with the databases. See [`adapter-*` packages in prisma/prisma](https://github.com/prisma/prisma/tree/main/packages) To run tests through a driver adapters, you should also configure the following environment variables: -* `EXTERNAL_TEST_EXECUTOR`: tells the query engine test kit to use an external process to run the queries, this is a node process running a program that will read the queries to run from STDIN, and return responses to STDOUT. The connector kit follows a protocol over JSON RPC for this communication. +* `EXTERNAL_TEST_EXECUTOR`: tells the query engine test kit to use an external process to run the queries, this is a node process running a program that will read the queries to run from STDIN, and return responses to STDOUT. The connector kit follows a protocol over JSON RPC for this communication. * `DRIVER_ADAPTER`: tells the test executor to use a particular driver adapter. Set to `neon`, `planetscale` or any other supported adapter. * `DRIVER_ADAPTER_CONFIG`: a json string with the configuration for the driver adapter. This is adapter specific. See the [github workflow for driver adapter tests](.github/workflows/query-engine-driver-adapters.yml) for examples on how to configure the driver adapters. Example: ```shell -export EXTERNAL_TEST_EXECUTOR="$WORKSPACE_ROOT/query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh" +export EXTERNAL_TEST_EXECUTOR="$WORKSPACE_ROOT/query-engine/driver-adapters/connector-test-kit-executor/script/start_node.sh" export DRIVER_ADAPTER=neon export DRIVER_ADAPTER_CONFIG ='{ "proxyUrl": "127.0.0.1:5488/v1" }' ```` -### Running +We have provided helpers to run the query-engine tests with driver adapters, these helpers set all the required environment +variables for you: -Note that by default tests run concurrently. +```shell +DRIVER_ADAPTER=$adapter make test-qe +``` + +Where `$adapter` is one of the supported adapters: `neon`, `planetscale`, `libsql`. -- VSCode should automatically detect tests and display `run test`. -- Use `make test-qe` (minimal log output) or `make test-qe-verbose` (all log output) in `$WORKSPACE_ROOT`. -- `cargo test` in the `query-engine-tests` crate. -- A single test can be tested with the normal cargo rust facilities from command line, e.g. `cargo test --package query-engine-tests --test query_engine_tests --all-features -- queries::filters::where_unique::where_unique::no_unique_fields --exact --nocapture` where `queries::filters::where_unique::where_unique::no_unique_fields` can be substituted for the path you want to test. -- If you want to test a single relation test, define the `RELATION_TEST_IDX` env var with its index. ## Authoring tests The following is an example on how to write a new test suite, as extending or changing an existing one follows the same rules and considerations. diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs index b27f27406e5c..4af4e763298a 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs @@ -159,7 +159,7 @@ impl TestConfig { /// and the workspace_root is set, then use the default external test executor. fn fill_defaults(&mut self) { const DEFAULT_TEST_EXECUTOR: &str = - "query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh"; + "query-engine/driver-adapters/connector-test-kit-executor/script/start_node.sh"; if self .external_test_executor diff --git a/query-engine/driver-adapters/.gitignore b/query-engine/driver-adapters/.gitignore new file mode 100644 index 000000000000..dab5c8905550 --- /dev/null +++ b/query-engine/driver-adapters/.gitignore @@ -0,0 +1,3 @@ +node_modules +adapter-* +driver-adapter-utils diff --git a/query-engine/driver-adapters/connector-test-kit-executor/.gitignore b/query-engine/driver-adapters/connector-test-kit-executor/.gitignore new file mode 100644 index 000000000000..37b61ff565c7 --- /dev/null +++ b/query-engine/driver-adapters/connector-test-kit-executor/.gitignore @@ -0,0 +1,3 @@ +node_modules +pnpm-debug.log +dist/ diff --git a/query-engine/driver-adapters/connector-test-kit-executor/package.json b/query-engine/driver-adapters/connector-test-kit-executor/package.json new file mode 100644 index 000000000000..b63694bb4459 --- /dev/null +++ b/query-engine/driver-adapters/connector-test-kit-executor/package.json @@ -0,0 +1,40 @@ +{ + "engines": { + "node": ">=16.13", + "pnpm": ">=8.6.6 <9" + }, + "name": "connector-test-kit-executor", + "version": "0.0.1", + "description": "", + "main": "dist/index.mjs", + "module": "dist/index.mjs", + "private": true, + "scripts": { + "build": "tsup ./src/index.ts --format esm --dts", + "lint": "tsc -p ./tsconfig.build.json" + }, + "keywords": [], + "author": "", + "sideEffects": false, + "license": "Apache-2.0", + "dependencies": { + "@libsql/client": "0.3.5", + "@neondatabase/serverless": "^0.6.0", + "@planetscale/database": "1.11.0", + "@prisma/adapter-libsql": "../adapter-libsql", + "@prisma/adapter-neon": "../adapter-neon", + "@prisma/adapter-pg": "../adapter-pg", + "@prisma/adapter-planetscale": "../adapter-planetscale", + "@prisma/driver-adapter-utils": "../driver-adapter-utils", + "@types/pg": "^8.10.2", + "pg": "^8.11.3", + "undici": "^5.26.5", + "ws": "^8.14.2" + }, + "devDependencies": { + "@types/node": "^20.5.1", + "tsup": "^7.2.0", + "tsx": "^3.12.7", + "typescript": "^5.1.6" + } +} \ No newline at end of file diff --git a/query-engine/driver-adapters/js/pnpm-lock.yaml b/query-engine/driver-adapters/connector-test-kit-executor/pnpm-lock.yaml similarity index 79% rename from query-engine/driver-adapters/js/pnpm-lock.yaml rename to query-engine/driver-adapters/connector-test-kit-executor/pnpm-lock.yaml index 9a82ffdbac63..d140be7b516c 100644 --- a/query-engine/driver-adapters/js/pnpm-lock.yaml +++ b/query-engine/driver-adapters/connector-test-kit-executor/pnpm-lock.yaml @@ -4,197 +4,79 @@ settings: autoInstallPeers: true excludeLinksFromLockfile: false -importers: - - .: - devDependencies: - '@types/node': - specifier: ^20.5.1 - version: 20.5.1 - tsup: - specifier: ^7.2.0 - version: 7.2.0(typescript@5.1.6) - tsx: - specifier: ^3.12.7 - version: 3.12.7 - typescript: - specifier: ^5.1.6 - version: 5.1.6 - - adapter-libsql: - dependencies: - '@prisma/driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils - async-mutex: - specifier: 0.4.0 - version: 0.4.0 - devDependencies: - '@libsql/client': - specifier: 0.3.5 - version: 0.3.5 - - adapter-neon: - dependencies: - '@prisma/driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils - postgres-array: - specifier: ^3.0.2 - version: 3.0.2 - devDependencies: - '@neondatabase/serverless': - specifier: ^0.6.0 - version: 0.6.0 - - adapter-pg: - dependencies: - '@prisma/driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils - postgres-array: - specifier: ^3.0.2 - version: 3.0.2 - devDependencies: - '@types/pg': - specifier: ^8.10.2 - version: 8.10.2 - pg: - specifier: ^8.11.3 - version: 8.11.3 - - adapter-planetscale: - dependencies: - '@prisma/driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils - devDependencies: - '@planetscale/database': - specifier: ^1.11.0 - version: 1.11.0 - - connector-test-kit-executor: - dependencies: - '@libsql/client': - specifier: 0.3.5 - version: 0.3.5 - '@neondatabase/serverless': - specifier: ^0.6.0 - version: 0.6.0 - '@planetscale/database': - specifier: 1.11.0 - version: 1.11.0 - '@prisma/adapter-libsql': - specifier: workspace:* - version: link:../adapter-libsql - '@prisma/adapter-neon': - specifier: workspace:* - version: link:../adapter-neon - '@prisma/adapter-pg': - specifier: workspace:* - version: link:../adapter-pg - '@prisma/adapter-planetscale': - specifier: workspace:* - version: link:../adapter-planetscale - '@prisma/driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils - '@types/pg': - specifier: ^8.10.2 - version: 8.10.2 - pg: - specifier: ^8.11.3 - version: 8.11.3 - undici: - specifier: ^5.26.2 - version: 5.26.2 - - driver-adapter-utils: - dependencies: - debug: - specifier: ^4.3.4 - version: 4.3.4 - devDependencies: - '@types/debug': - specifier: ^4.1.8 - version: 4.1.8 - - smoke-test-js: - dependencies: - '@libsql/client': - specifier: 0.3.5 - version: 0.3.5 - '@neondatabase/serverless': - specifier: ^0.6.0 - version: 0.6.0 - '@planetscale/database': - specifier: ^1.11.0 - version: 1.11.0 - '@prisma/adapter-libsql': - specifier: workspace:* - version: link:../adapter-libsql - '@prisma/adapter-neon': - specifier: workspace:* - version: link:../adapter-neon - '@prisma/adapter-pg': - specifier: workspace:* - version: link:../adapter-pg - '@prisma/adapter-planetscale': - specifier: workspace:* - version: link:../adapter-planetscale - '@prisma/client': - specifier: 5.4.2 - version: 5.4.2(prisma@5.4.2) - '@prisma/driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils - pg: - specifier: ^8.11.3 - version: 8.11.3 - superjson: - specifier: ^1.13.1 - version: 1.13.1 - undici: - specifier: ^5.26.2 - version: 5.26.2 - devDependencies: - '@types/node': - specifier: ^20.5.1 - version: 20.5.1 - '@types/pg': - specifier: ^8.10.2 - version: 8.10.2 - cross-env: - specifier: ^7.0.3 - version: 7.0.3 - prisma: - specifier: 5.4.2 - version: 5.4.2 - tsx: - specifier: ^3.12.7 - version: 3.12.7 +dependencies: + '@libsql/client': + specifier: 0.3.5 + version: 0.3.5 + '@neondatabase/serverless': + specifier: ^0.6.0 + version: 0.6.0 + '@planetscale/database': + specifier: 1.11.0 + version: 1.11.0 + '@prisma/adapter-libsql': + specifier: ../adapter-libsql + version: link:../adapter-libsql + '@prisma/adapter-neon': + specifier: ../adapter-neon + version: link:../adapter-neon + '@prisma/adapter-pg': + specifier: ../adapter-pg + version: link:../adapter-pg + '@prisma/adapter-planetscale': + specifier: ../adapter-planetscale + version: link:../adapter-planetscale + '@prisma/driver-adapter-utils': + specifier: ../driver-adapter-utils + version: link:../driver-adapter-utils + '@types/pg': + specifier: ^8.10.2 + version: 8.10.2 + pg: + specifier: ^8.11.3 + version: 8.11.3 + undici: + specifier: ^5.26.5 + version: 5.26.5 + ws: + specifier: ^8.14.2 + version: 8.14.2 + +devDependencies: + '@types/node': + specifier: ^20.5.1 + version: 20.5.1 + tsup: + specifier: ^7.2.0 + version: 7.2.0(typescript@5.1.6) + tsx: + specifier: ^3.12.7 + version: 3.12.7 + typescript: + specifier: ^5.1.6 + version: 5.1.6 packages: - /@esbuild-kit/cjs-loader@2.4.2: - resolution: {integrity: sha512-BDXFbYOJzT/NBEtp71cvsrGPwGAMGRB/349rwKuoxNSiKjPraNNnlK6MIIabViCjqZugu6j+xeMDlEkWdHHJSg==} + /@esbuild-kit/cjs-loader@2.4.4: + resolution: {integrity: sha512-NfsJX4PdzhwSkfJukczyUiZGc7zNNWZcEAyqeISpDnn0PTfzMJR1aR8xAIPskBejIxBJbIgCCMzbaYa9SXepIg==} dependencies: - '@esbuild-kit/core-utils': 3.2.2 - get-tsconfig: 4.7.0 + '@esbuild-kit/core-utils': 3.3.2 + get-tsconfig: 4.7.2 dev: true - /@esbuild-kit/core-utils@3.2.2: - resolution: {integrity: sha512-Ub6LaRaAgF80dTSzUdXpFLM1pVDdmEVB9qb5iAzSpyDlX/mfJTFGOnZ516O05p5uWWteNviMKi4PAyEuRxI5gA==} + /@esbuild-kit/core-utils@3.3.2: + resolution: {integrity: sha512-sPRAnw9CdSsRmEtnsl2WXWdyquogVpB3yZ3dgwJfe8zrOzTsV7cJvmwrKVa+0ma5BoiGJ+BoqkMvawbayKUsqQ==} dependencies: esbuild: 0.18.20 source-map-support: 0.5.21 dev: true - /@esbuild-kit/esm-loader@2.5.5: - resolution: {integrity: sha512-Qwfvj/qoPbClxCRNuac1Du01r9gvNOT+pMYtJDapfB1eoGN1YlJ1BixLyL9WVENRx5RXgNLdfYdx/CuswlGhMw==} + /@esbuild-kit/esm-loader@2.6.5: + resolution: {integrity: sha512-FxEMIkJKnodyA1OaCUoEvbYRkoZlLZ4d/eXFu9Fh8CbBBgP5EmZxrfTRyN0qpXZ4vOvqnE5YdRdcrmUUXuU+dA==} dependencies: - '@esbuild-kit/core-utils': 3.2.2 - get-tsconfig: 4.7.0 + '@esbuild-kit/core-utils': 3.3.2 + get-tsconfig: 4.7.2 dev: true /@esbuild/android-arm64@0.18.20: @@ -406,7 +288,7 @@ packages: dependencies: '@jridgewell/set-array': 1.1.2 '@jridgewell/sourcemap-codec': 1.4.15 - '@jridgewell/trace-mapping': 0.3.19 + '@jridgewell/trace-mapping': 0.3.20 dev: true /@jridgewell/resolve-uri@3.1.1: @@ -423,8 +305,8 @@ packages: resolution: {integrity: sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==} dev: true - /@jridgewell/trace-mapping@0.3.19: - resolution: {integrity: sha512-kf37QtfW+Hwx/buWGMPcR60iF9ziHa6r/CZJIHbmcm4+0qrXiVdxegAH0F6yddEVQ7zdkjcGCgCzUu+BcbhQxw==} + /@jridgewell/trace-mapping@0.3.20: + resolution: {integrity: sha512-R8LcPeWZol2zR8mmH3JeKQ6QRCFb7XgUhV9ZlGhHLGyg4wpPiPZNQOOWhFZhxKw8u//yTbNGI42Bx/3paXEQ+Q==} dependencies: '@jridgewell/resolve-uri': 3.1.1 '@jridgewell/sourcemap-codec': 1.4.15 @@ -435,24 +317,27 @@ packages: dependencies: '@libsql/hrana-client': 0.5.5 js-base64: 3.7.5 - libsql: 0.1.28 + libsql: 0.1.34 transitivePeerDependencies: - bufferutil - encoding - utf-8-validate + dev: false - /@libsql/darwin-arm64@0.1.28: - resolution: {integrity: sha512-p4nldHUOhcl9ibnH1F6oiXV5Dl3PAcPB9VIjdjVvO3/URo5J7mhqRMuwJMKO5DZJJGtkKJ5IO0gu0hc90rnKIg==} + /@libsql/darwin-arm64@0.1.34: + resolution: {integrity: sha512-Wv8jvkj/fUAO8DF3A4HaddCMldUUpKcg/WW1sY95FNsSHOxktyxqU80jAp/tCuZ85GQIJozvgSr51/ARIC0gsw==} cpu: [arm64] os: [darwin] requiresBuild: true + dev: false optional: true - /@libsql/darwin-x64@0.1.28: - resolution: {integrity: sha512-WaEK+Z+wP5sr0h8EcusSGHv4Mqc3smYICeG4P/wsbRDKQ2WUMWqZrpgqaBsm+WPbXogU2vpf+qGc8BnpFZ0ggw==} + /@libsql/darwin-x64@0.1.34: + resolution: {integrity: sha512-2NQXD9nUzC08hg7FdcZLq5uTEwGz1KbD7YvUzQb/psO1lO/E/p83wl1es1082+Pp0z5pSPDWQeRTuccD41L+3w==} cpu: [x64] os: [darwin] requiresBuild: true + dev: false optional: true /@libsql/hrana-client@0.5.5: @@ -466,59 +351,76 @@ packages: - bufferutil - encoding - utf-8-validate + dev: false /@libsql/isomorphic-fetch@0.1.10: resolution: {integrity: sha512-dH0lMk50gKSvEKD78xWMu60SY1sjp1sY//iFLO0XMmBwfVfG136P9KOk06R4maBdlb8KMXOzJ1D28FR5ZKnHTA==} dependencies: - '@types/node-fetch': 2.6.6 + '@types/node-fetch': 2.6.7 node-fetch: 2.7.0 transitivePeerDependencies: - encoding + dev: false /@libsql/isomorphic-ws@0.1.5: resolution: {integrity: sha512-DtLWIH29onUYR00i0GlQ3UdcTRC6EP4u9w/h9LxpUZJWRMARk6dQwZ6Jkd+QdwVpuAOrdxt18v0K2uIYR3fwFg==} dependencies: - '@types/ws': 8.5.5 + '@types/ws': 8.5.8 ws: 8.14.2 transitivePeerDependencies: - bufferutil - utf-8-validate + dev: false - /@libsql/linux-arm64-gnu@0.1.28: - resolution: {integrity: sha512-a17ANBuOqH2L8gdyET4Kg3XggQvxWnoA+7x7sDEX5NyWNyvr7P04WzNPAT0xAOWLclC1fDD6jM5sh/fbJk/7NA==} + /@libsql/linux-arm64-gnu@0.1.34: + resolution: {integrity: sha512-r3dY1FDYZ7eX5HX7HyAoYSqK5FPugj5NSB5Bt/nz+ygBWdXASgSKxkE/RqjJIM59vXwv300iJX9qhR5fXv8sTw==} cpu: [arm64] os: [linux] requiresBuild: true + dev: false + optional: true + + /@libsql/linux-arm64-musl@0.1.34: + resolution: {integrity: sha512-9AE/eNb9eQRcNsLxqtpLJxVEoIMmItrdwqJDImPJtOp10rhp4U0x/9RGKerl9Mg3ObVj676pyhAR2KzyudrOfQ==} + cpu: [arm64] + os: [linux] + requiresBuild: true + dev: false optional: true - /@libsql/linux-x64-gnu@0.1.28: - resolution: {integrity: sha512-dkg+Ou7ApV0PHpZWd9c6NrYyc/WSNn5h/ScKotaMTLWlLL96XAMNwrYLpZpUj61I2y7QzU98XtMfiSD1Ux+VaA==} + /@libsql/linux-x64-gnu@0.1.34: + resolution: {integrity: sha512-o8toY1Txstjt13fBhZbFe8sNAW6OaS6qVcp1Bd6bHkCLSBLZ6pjJmwzQN8rFv9QFBPAnaKP3lI4vaOXXw7huTA==} cpu: [x64] os: [linux] requiresBuild: true + dev: false optional: true - /@libsql/linux-x64-musl@0.1.28: - resolution: {integrity: sha512-ZuOxCDYlG+f1IDsxstmaxLtgG9HvlLuUKs0X3um4f5F5V+P+PF8qr08gSdD1IP2pj+JBOiwhQffaEpR1wupxhQ==} + /@libsql/linux-x64-musl@0.1.34: + resolution: {integrity: sha512-EldEmcAxxNPSCjJ73oFxg81PDDIpDbPqK/QOrhmmGYLvYwrnQtVRUIbARf80JQvcy6bCxOO/Q9dh6wGhnyHyYA==} cpu: [x64] os: [linux] requiresBuild: true + dev: false optional: true - /@libsql/win32-x64-msvc@0.1.28: - resolution: {integrity: sha512-2cmUiMIsJLHpetebGeeYqUYaCPWEnwMjqxwu1ZEEbA5x8r+DNmIhLrc0QSQ29p7a5u14vbZnShNOtT/XG7vKew==} + /@libsql/win32-x64-msvc@0.1.34: + resolution: {integrity: sha512-jnv0qfVMnrVv00r+wUOe6DHrHuao9y1w1lN543cV2J1JdQNJT/eSZzhyZFSlS3T2ZUvXfZfZ5GeL8U18IAID6w==} cpu: [x64] os: [win32] requiresBuild: true + dev: false optional: true /@neon-rs/load@0.0.4: resolution: {integrity: sha512-kTPhdZyTQxB+2wpiRcFWrDcejc4JI6tkPuS7UZCG4l6Zvc5kU/gGQ/ozvHTh1XR5tS+UlfAfGuPajjzQjCiHCw==} + dev: false /@neondatabase/serverless@0.6.0: resolution: {integrity: sha512-qXxBRYN0m2v8kVQBfMxbzNGn2xFAhTXFibzQlE++NfJ56Shz3m7+MyBBtXDlEH+3Wfa6lToDXf1MElocY4sJ3w==} dependencies: '@types/pg': 8.6.6 + dev: false /@nodelib/fs.scandir@2.1.5: resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} @@ -544,73 +446,39 @@ packages: /@planetscale/database@1.11.0: resolution: {integrity: sha512-aWbU+D/IRHoDE9975y+Q4c+EwwAWxCPwFId+N1AhQVFXzbeJMkj6KN2iQtoi03elcLMRdfT+V3i9Z4WRw+/oIA==} engines: {node: '>=16'} - - /@prisma/client@5.4.2(prisma@5.4.2): - resolution: {integrity: sha512-2xsPaz4EaMKj1WS9iW6MlPhmbqtBsXAOeVttSePp8vTFTtvzh2hZbDgswwBdSCgPzmmwF+tLB259QzggvCmJqA==} - engines: {node: '>=16.13'} - requiresBuild: true - peerDependencies: - prisma: '*' - peerDependenciesMeta: - prisma: - optional: true - dependencies: - '@prisma/engines-version': 5.4.1-2.ac9d7041ed77bcc8a8dbd2ab6616b39013829574 - prisma: 5.4.2 dev: false - /@prisma/engines-version@5.4.1-2.ac9d7041ed77bcc8a8dbd2ab6616b39013829574: - resolution: {integrity: sha512-wvupDL4AA1vf4TQNANg7kR7y98ITqPsk6aacfBxZKtrJKRIsWjURHkZCGcQliHdqCiW/hGreO6d6ZuSv9MhdAA==} - dev: false - - /@prisma/engines@5.4.2: - resolution: {integrity: sha512-fqeucJ3LH0e1eyFdT0zRx+oETLancu5+n4lhiYECyEz6H2RDskPJHJYHkVc0LhkU4Uv7fuEnppKU3nVKNzMh8g==} - requiresBuild: true - - /@types/debug@4.1.8: - resolution: {integrity: sha512-/vPO1EPOs306Cvhwv7KfVfYvOJqA/S/AXjaHQiJboCZzcNDb+TIJFN9/2C9DZ//ijSKWioNyUxD792QmDJ+HKQ==} + /@types/node-fetch@2.6.7: + resolution: {integrity: sha512-lX17GZVpJ/fuCjguZ5b3TjEbSENxmEk1B2z02yoXSK9WMEWRivhdSY73wWMn6bpcCDAOh6qAdktpKHIlkDk2lg==} dependencies: - '@types/ms': 0.7.31 - dev: true - - /@types/ms@0.7.31: - resolution: {integrity: sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA==} - dev: true - - /@types/node-fetch@2.6.6: - resolution: {integrity: sha512-95X8guJYhfqiuVVhRFxVQcf4hW/2bCuoPwDasMf/531STFoNoWTT7YDnWdXHEZKqAGUigmpG31r2FE70LwnzJw==} - dependencies: - '@types/node': 20.6.5 + '@types/node': 20.5.1 form-data: 4.0.0 + dev: false /@types/node@20.5.1: resolution: {integrity: sha512-4tT2UrL5LBqDwoed9wZ6N3umC4Yhz3W3FloMmiiG4JwmUJWpie0c7lcnUNd4gtMKuDEO4wRVS8B6Xa0uMRsMKg==} - dev: true - - /@types/node@20.5.9: - resolution: {integrity: sha512-PcGNd//40kHAS3sTlzKB9C9XL4K0sTup8nbG5lC14kzEteTNuAFh9u5nA0o5TWnSG2r/JNPRXFVcHJIIeRlmqQ==} - - /@types/node@20.6.5: - resolution: {integrity: sha512-2qGq5LAOTh9izcc0+F+dToFigBWiK1phKPt7rNhOqJSr35y8rlIBjDwGtFSgAI6MGIhjwOVNSQZVdJsZJ2uR1w==} /@types/pg@8.10.2: resolution: {integrity: sha512-MKFs9P6nJ+LAeHLU3V0cODEOgyThJ3OAnmOlsZsxux6sfQs3HRXR5bBn7xG5DjckEFhTAxsXi7k7cd0pCMxpJw==} dependencies: - '@types/node': 20.5.9 + '@types/node': 20.5.1 pg-protocol: 1.6.0 pg-types: 4.0.1 + dev: false /@types/pg@8.6.6: resolution: {integrity: sha512-O2xNmXebtwVekJDD+02udOncjVcMZQuTEQEMpKJ0ZRf5E7/9JJX3izhKUcUifBkyKpljyUM6BTgy2trmviKlpw==} dependencies: - '@types/node': 20.5.9 + '@types/node': 20.5.1 pg-protocol: 1.6.0 pg-types: 2.2.0 + dev: false - /@types/ws@8.5.5: - resolution: {integrity: sha512-lwhs8hktwxSjf9UaZ9tG5M03PGogvFaH8gUgLNbN9HKIg0dvv6q+gkSuJ8HN4/VbyxkuLzCjlN7GquQ0gUJfIg==} + /@types/ws@8.5.8: + resolution: {integrity: sha512-flUksGIQCnJd6sZ1l5dqCEG/ksaoAg/eUwiLAGTJQcfgvZJKF++Ta4bJA6A5aPSJmsr+xlseHn4KLgVlNnvPTg==} dependencies: - '@types/node': 20.6.5 + '@types/node': 20.5.1 + dev: false /any-promise@1.3.0: resolution: {integrity: sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==} @@ -629,14 +497,9 @@ packages: engines: {node: '>=8'} dev: true - /async-mutex@0.4.0: - resolution: {integrity: sha512-eJFZ1YhRR8UN8eBLoNzcDPcy/jqjsg6I1AP+KvWQX80BqOSW1oJPJXDylPUEeMr2ZQvHgnQ//Lp6f3RQ1zI7HA==} - dependencies: - tslib: 2.6.2 - dev: false - /asynckit@0.4.0: resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} + dev: false /balanced-match@1.0.2: resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} @@ -668,9 +531,10 @@ packages: /buffer-writer@2.0.0: resolution: {integrity: sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==} engines: {node: '>=4'} + dev: false - /bundle-require@4.0.1(esbuild@0.18.20): - resolution: {integrity: sha512-9NQkRHlNdNpDBGmLpngF3EFDcwodhMUuLz9PaWYciVcQF9SE4LFjM2DB/xV1Li5JiuDMv7ZUWuC3rGbqR0MAXQ==} + /bundle-require@4.0.2(esbuild@0.18.20): + resolution: {integrity: sha512-jwzPOChofl67PSTW2SGubV9HBQAhhR2i6nskiOThauo9dzwDUgOWQScFVaJkjEfYX+UXiD+LEx8EblQMc2wIag==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} peerDependencies: esbuild: '>=0.17' @@ -704,6 +568,7 @@ packages: engines: {node: '>= 0.8'} dependencies: delayed-stream: 1.0.0 + dev: false /commander@4.1.1: resolution: {integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==} @@ -714,21 +579,6 @@ packages: resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} dev: true - /copy-anything@3.0.5: - resolution: {integrity: sha512-yCEafptTtb4bk7GLEQoM8KVJpxAfdBJYaXyzQEgQQQgYrZiDp8SJmGKlYza6CYjEDNstAdNdKA3UuoULlEbS6w==} - engines: {node: '>=12.13'} - dependencies: - is-what: 4.1.15 - dev: false - - /cross-env@7.0.3: - resolution: {integrity: sha512-+/HKd6EgcQCJGh2PSjZuUitQBQynKor4wrFbRg4DtAgS1aWO+gU52xpH7M9ScGgXSYmAVS9bIJ8EzuaGw0oNAw==} - engines: {node: '>=10.14', npm: '>=6', yarn: '>=1'} - hasBin: true - dependencies: - cross-spawn: 7.0.3 - dev: true - /cross-spawn@7.0.3: resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==} engines: {node: '>= 8'} @@ -741,6 +591,7 @@ packages: /data-uri-to-buffer@4.0.1: resolution: {integrity: sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==} engines: {node: '>= 12'} + dev: false /debug@4.3.4: resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==} @@ -752,14 +603,17 @@ packages: optional: true dependencies: ms: 2.1.2 + dev: true /delayed-stream@1.0.0: resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} engines: {node: '>=0.4.0'} + dev: false /detect-libc@2.0.2: resolution: {integrity: sha512-UX6sGumvvqSaXgdKGUsgZWqcUyIXZ/vZTrlRT/iobiKhGL0zL4d3osHj3uqllWJK+i+sixDS/3COVEOFbupFyw==} engines: {node: '>=8'} + dev: false /dir-glob@3.0.1: resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} @@ -836,6 +690,7 @@ packages: dependencies: node-domexception: 1.0.0 web-streams-polyfill: 3.2.1 + dev: false /fill-range@7.0.1: resolution: {integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==} @@ -851,12 +706,14 @@ packages: asynckit: 0.4.0 combined-stream: 1.0.8 mime-types: 2.1.35 + dev: false /formdata-polyfill@4.0.10: resolution: {integrity: sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==} engines: {node: '>=12.20.0'} dependencies: fetch-blob: 3.2.0 + dev: false /fs.realpath@1.0.0: resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} @@ -875,8 +732,8 @@ packages: engines: {node: '>=10'} dev: true - /get-tsconfig@4.7.0: - resolution: {integrity: sha512-pmjiZ7xtB8URYm74PlGJozDNyhvsVLUcpBa8DZBG3bWHwaHa9bPiRpiSfovw+fjhwONSCWKRyk+JQHEGZmMrzw==} + /get-tsconfig@4.7.2: + resolution: {integrity: sha512-wuMsz4leaj5hbGgg4IvDU0bqJagpftG5l5cXIAvo8uZrqn0NJqwtfupTN00VnkQJPcIRrxYrm1Ue24btpCha2A==} dependencies: resolve-pkg-maps: 1.0.0 dev: true @@ -961,11 +818,6 @@ packages: engines: {node: '>=8'} dev: true - /is-what@4.1.15: - resolution: {integrity: sha512-uKua1wfy3Yt+YqsD6mTUEa2zSi3G1oPlqTflgaPJ7z63vUGN5pxFpnQfeSLMFnJDEsdvOtkp1rUWkYjB4YfhgA==} - engines: {node: '>=12.13'} - dev: false - /isexe@2.0.0: resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} dev: true @@ -977,21 +829,24 @@ packages: /js-base64@3.7.5: resolution: {integrity: sha512-3MEt5DTINKqfScXKfJFrRbxkrnk2AxPWGBL/ycjz4dK8iqiSJ06UxD8jh8xuh6p10TX4t2+7FsBYVxxQbMg+qA==} + dev: false - /libsql@0.1.28: - resolution: {integrity: sha512-yCKlT0ntV8ZIWTPGNClhQQeH/LNAzLjbbEgBvgLb+jfQwAuTbyvPpVVLwkZzesqja1nbkWApztW0pX81Jp0pkw==} + /libsql@0.1.34: + resolution: {integrity: sha512-LGofp7z7gi1Td6vu2GxaA4WyvSPEkuFn0f/ePSti1TsAlBU0LWxdk+bj9D8nqswzxiqe5wpAyTLhVzTIYSyXEA==} cpu: [x64, arm64] os: [darwin, linux, win32] dependencies: '@neon-rs/load': 0.0.4 detect-libc: 2.0.2 optionalDependencies: - '@libsql/darwin-arm64': 0.1.28 - '@libsql/darwin-x64': 0.1.28 - '@libsql/linux-arm64-gnu': 0.1.28 - '@libsql/linux-x64-gnu': 0.1.28 - '@libsql/linux-x64-musl': 0.1.28 - '@libsql/win32-x64-msvc': 0.1.28 + '@libsql/darwin-arm64': 0.1.34 + '@libsql/darwin-x64': 0.1.34 + '@libsql/linux-arm64-gnu': 0.1.34 + '@libsql/linux-arm64-musl': 0.1.34 + '@libsql/linux-x64-gnu': 0.1.34 + '@libsql/linux-x64-musl': 0.1.34 + '@libsql/win32-x64-msvc': 0.1.34 + dev: false /lilconfig@2.1.0: resolution: {integrity: sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==} @@ -1031,12 +886,14 @@ packages: /mime-db@1.52.0: resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} engines: {node: '>= 0.6'} + dev: false /mime-types@2.1.35: resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} engines: {node: '>= 0.6'} dependencies: mime-db: 1.52.0 + dev: false /mimic-fn@2.1.0: resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} @@ -1051,6 +908,7 @@ packages: /ms@2.1.2: resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} + dev: true /mz@2.7.0: resolution: {integrity: sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==} @@ -1063,6 +921,7 @@ packages: /node-domexception@1.0.0: resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==} engines: {node: '>=10.5.0'} + dev: false /node-fetch@2.7.0: resolution: {integrity: sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==} @@ -1074,6 +933,7 @@ packages: optional: true dependencies: whatwg-url: 5.0.0 + dev: false /node-fetch@3.3.2: resolution: {integrity: sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==} @@ -1082,6 +942,7 @@ packages: data-uri-to-buffer: 4.0.1 fetch-blob: 3.2.0 formdata-polyfill: 4.0.10 + dev: false /normalize-path@3.0.0: resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} @@ -1102,6 +963,7 @@ packages: /obuf@1.1.2: resolution: {integrity: sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==} + dev: false /once@1.4.0: resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} @@ -1118,6 +980,7 @@ packages: /packet-reader@1.0.0: resolution: {integrity: sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==} + dev: false /path-is-absolute@1.0.1: resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==} @@ -1137,18 +1000,22 @@ packages: /pg-cloudflare@1.1.1: resolution: {integrity: sha512-xWPagP/4B6BgFO+EKz3JONXv3YDgvkbVrGw2mTo3D6tVDQRh1e7cqVGvyR3BE+eQgAvx1XhW/iEASj4/jCWl3Q==} requiresBuild: true + dev: false optional: true /pg-connection-string@2.6.2: resolution: {integrity: sha512-ch6OwaeaPYcova4kKZ15sbJ2hKb/VP48ZD2gE7i1J+L4MspCtBMAx8nMgz7bksc7IojCIIWuEhHibSMFH8m8oA==} + dev: false /pg-int8@1.0.1: resolution: {integrity: sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==} engines: {node: '>=4.0.0'} + dev: false /pg-numeric@1.0.2: resolution: {integrity: sha512-BM/Thnrw5jm2kKLE5uJkXqqExRUY/toLHda65XgFTBTFYZyopbKjBe29Ii3RbkvlsMoFwD+tHeGaCjjv0gHlyw==} engines: {node: '>=4'} + dev: false /pg-pool@3.6.1(pg@8.11.3): resolution: {integrity: sha512-jizsIzhkIitxCGfPRzJn1ZdcosIt3pz9Sh3V01fm1vZnbnCMgmGl5wvGGdNN2EL9Rmb0EcFoCkixH4Pu+sP9Og==} @@ -1156,9 +1023,11 @@ packages: pg: '>=8.0' dependencies: pg: 8.11.3 + dev: false /pg-protocol@1.6.0: resolution: {integrity: sha512-M+PDm637OY5WM307051+bsDia5Xej6d9IR4GwJse1qA1DIhiKlksvrneZOYQq42OM+spubpcNYEo2FcKQrDk+Q==} + dev: false /pg-types@2.2.0: resolution: {integrity: sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==} @@ -1169,6 +1038,7 @@ packages: postgres-bytea: 1.0.0 postgres-date: 1.0.7 postgres-interval: 1.2.0 + dev: false /pg-types@4.0.1: resolution: {integrity: sha512-hRCSDuLII9/LE3smys1hRHcu5QGcLs9ggT7I/TCs0IE+2Eesxi9+9RWAAwZ0yaGjxoWICF/YHLOEjydGujoJ+g==} @@ -1181,6 +1051,7 @@ packages: postgres-date: 2.0.1 postgres-interval: 3.0.0 postgres-range: 1.1.3 + dev: false /pg@8.11.3: resolution: {integrity: sha512-+9iuvG8QfaaUrrph+kpF24cXkH1YOOUeArRNYIxq1viYHZagBxrTno7cecY1Fa44tJeZvaoG+Djpkc3JwehN5g==} @@ -1200,11 +1071,13 @@ packages: pgpass: 1.0.5 optionalDependencies: pg-cloudflare: 1.1.1 + dev: false /pgpass@1.0.5: resolution: {integrity: sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==} dependencies: split2: 4.2.0 + dev: false /picomatch@2.3.1: resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} @@ -1229,55 +1102,56 @@ packages: optional: true dependencies: lilconfig: 2.1.0 - yaml: 2.3.2 + yaml: 2.3.3 dev: true /postgres-array@2.0.0: resolution: {integrity: sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==} engines: {node: '>=4'} + dev: false /postgres-array@3.0.2: resolution: {integrity: sha512-6faShkdFugNQCLwucjPcY5ARoW1SlbnrZjmGl0IrrqewpvxvhSLHimCVzqeuULCbG0fQv7Dtk1yDbG3xv7Veog==} engines: {node: '>=12'} + dev: false /postgres-bytea@1.0.0: resolution: {integrity: sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==} engines: {node: '>=0.10.0'} + dev: false /postgres-bytea@3.0.0: resolution: {integrity: sha512-CNd4jim9RFPkObHSjVHlVrxoVQXz7quwNFpz7RY1okNNme49+sVyiTvTRobiLV548Hx/hb1BG+iE7h9493WzFw==} engines: {node: '>= 6'} dependencies: obuf: 1.1.2 + dev: false /postgres-date@1.0.7: resolution: {integrity: sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==} engines: {node: '>=0.10.0'} + dev: false /postgres-date@2.0.1: resolution: {integrity: sha512-YtMKdsDt5Ojv1wQRvUhnyDJNSr2dGIC96mQVKz7xufp07nfuFONzdaowrMHjlAzY6GDLd4f+LUHHAAM1h4MdUw==} engines: {node: '>=12'} + dev: false /postgres-interval@1.2.0: resolution: {integrity: sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==} engines: {node: '>=0.10.0'} dependencies: xtend: 4.0.2 + dev: false /postgres-interval@3.0.0: resolution: {integrity: sha512-BSNDnbyZCXSxgA+1f5UU2GmwhoI0aU5yMxRGO8CdFEcY2BQF9xm/7MqKnYoM1nJDk8nONNWDk9WeSmePFhQdlw==} engines: {node: '>=12'} + dev: false /postgres-range@1.1.3: resolution: {integrity: sha512-VdlZoocy5lCP0c/t66xAfclglEapXPCIVhqqJRncYpvbCgImF0w67aPKfbqUMr72tO2k5q0TdTZwCLjPTI6C9g==} - - /prisma@5.4.2: - resolution: {integrity: sha512-GDMZwZy7mysB2oXU+angQqJ90iaPFdD0rHaZNkn+dio5NRkGLmMqmXs31//tg/qXT3iB0cTQwnGGQNuirhSTZg==} - engines: {node: '>=16.13'} - hasBin: true - requiresBuild: true - dependencies: - '@prisma/engines': 5.4.2 + dev: false /punycode@2.3.0: resolution: {integrity: sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==} @@ -1309,8 +1183,8 @@ packages: engines: {iojs: '>=1.0.0', node: '>=0.10.0'} dev: true - /rollup@3.28.1: - resolution: {integrity: sha512-R9OMQmIHJm9znrU3m3cpE8uhN0fGdXiawME7aZIpQqvpS/85+Vt1Hq1/yVIcYfOmaQiHjvXkQAoJukvLpau6Yw==} + /rollup@3.29.4: + resolution: {integrity: sha512-oWzmBZwvYrU0iJHtDmhsm662rC15FRXmcjCk1xD771dFDx5jJ02ufAQQTn0etB2emNk4J9EZg/yWKpsn9BWGRw==} engines: {node: '>=14.18.0', npm: '>=8.0.0'} hasBin: true optionalDependencies: @@ -1366,6 +1240,7 @@ packages: /split2@4.2.0: resolution: {integrity: sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==} engines: {node: '>= 10.x'} + dev: false /strip-final-newline@2.0.0: resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==} @@ -1386,13 +1261,6 @@ packages: ts-interface-checker: 0.1.13 dev: true - /superjson@1.13.1: - resolution: {integrity: sha512-AVH2eknm9DEd3qvxM4Sq+LTCkSXE2ssfh1t11MHMXyYXFQyQ1HLgVvV+guLTsaQnJU3gnaVo34TohHPulY/wLg==} - engines: {node: '>=10'} - dependencies: - copy-anything: 3.0.5 - dev: false - /thenify-all@1.6.0: resolution: {integrity: sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==} engines: {node: '>=0.8'} @@ -1415,6 +1283,7 @@ packages: /tr46@0.0.3: resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==} + dev: false /tr46@1.0.1: resolution: {integrity: sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA==} @@ -1431,10 +1300,6 @@ packages: resolution: {integrity: sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==} dev: true - /tslib@2.6.2: - resolution: {integrity: sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==} - dev: false - /tsup@7.2.0(typescript@5.1.6): resolution: {integrity: sha512-vDHlczXbgUvY3rWvqFEbSqmC1L7woozbzngMqTtL2PGBODTtWlRwGDDawhvWzr5c1QjKe4OAKqJGfE1xeXUvtQ==} engines: {node: '>=16.14'} @@ -1451,7 +1316,7 @@ packages: typescript: optional: true dependencies: - bundle-require: 4.0.1(esbuild@0.18.20) + bundle-require: 4.0.2(esbuild@0.18.20) cac: 6.7.14 chokidar: 3.5.3 debug: 4.3.4 @@ -1461,7 +1326,7 @@ packages: joycon: 3.1.1 postcss-load-config: 4.0.1 resolve-from: 5.0.0 - rollup: 3.28.1 + rollup: 3.29.4 source-map: 0.8.0-beta.0 sucrase: 3.34.0 tree-kill: 1.2.2 @@ -1475,9 +1340,9 @@ packages: resolution: {integrity: sha512-C2Ip+jPmqKd1GWVQDvz/Eyc6QJbGfE7NrR3fx5BpEHMZsEHoIxHL1j+lKdGobr8ovEyqeNkPLSKp6SCSOt7gmw==} hasBin: true dependencies: - '@esbuild-kit/cjs-loader': 2.4.2 - '@esbuild-kit/core-utils': 3.2.2 - '@esbuild-kit/esm-loader': 2.5.5 + '@esbuild-kit/cjs-loader': 2.4.4 + '@esbuild-kit/core-utils': 3.3.2 + '@esbuild-kit/esm-loader': 2.6.5 optionalDependencies: fsevents: 2.3.3 dev: true @@ -1488,8 +1353,8 @@ packages: hasBin: true dev: true - /undici@5.26.2: - resolution: {integrity: sha512-a4PDLQgLTPHVzOK+x3F79/M4GtyYPl+aX9AAK7aQxpwxDwCqkeZCScy7Gk5kWT3JtdFq1uhO3uZJdLtHI4dK9A==} + /undici@5.26.5: + resolution: {integrity: sha512-cSb4bPFd5qgR7qr2jYAi0hlX9n5YKK2ONKkLFkxl+v/9BvC0sOpZjBHDBSXc5lWAf5ty9oZdRXytBIHzgUcerw==} engines: {node: '>=14.0'} dependencies: '@fastify/busboy': 2.0.0 @@ -1498,9 +1363,11 @@ packages: /web-streams-polyfill@3.2.1: resolution: {integrity: sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q==} engines: {node: '>= 8'} + dev: false /webidl-conversions@3.0.1: resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==} + dev: false /webidl-conversions@4.0.2: resolution: {integrity: sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==} @@ -1511,6 +1378,7 @@ packages: dependencies: tr46: 0.0.3 webidl-conversions: 3.0.1 + dev: false /whatwg-url@7.1.0: resolution: {integrity: sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg==} @@ -1543,12 +1411,14 @@ packages: optional: true utf-8-validate: optional: true + dev: false /xtend@4.0.2: resolution: {integrity: sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==} engines: {node: '>=0.4'} + dev: false - /yaml@2.3.2: - resolution: {integrity: sha512-N/lyzTPaJasoDmfV7YTrYCI0G/3ivm/9wdG0aHuheKowWQwGTsK0Eoiw6utmzAnI6pkJa0DUVygvp3spqqEKXg==} + /yaml@2.3.3: + resolution: {integrity: sha512-zw0VAJxgeZ6+++/su5AFoqBbZbrEakwu+X0M5HmcwUiBL7AzcuPKjj5we4xfQLp78LkEMpD0cOnUhmgOVy3KdQ==} engines: {node: '>= 14'} dev: true diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh b/query-engine/driver-adapters/connector-test-kit-executor/script/start_node.sh similarity index 100% rename from query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh rename to query-engine/driver-adapters/connector-test-kit-executor/script/start_node.sh diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/JsonProtocol.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/engines/JsonProtocol.ts similarity index 100% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/JsonProtocol.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/engines/JsonProtocol.ts diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Library.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/engines/Library.ts similarity index 100% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Library.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/engines/Library.ts diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/QueryEngine.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/engines/QueryEngine.ts similarity index 100% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/QueryEngine.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/engines/QueryEngine.ts diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Transaction.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/engines/Transaction.ts similarity index 100% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Transaction.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/engines/Transaction.ts diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/index.ts similarity index 99% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/index.ts index 8a05a6b2e9aa..b89348fb3e77 100644 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts +++ b/query-engine/driver-adapters/connector-test-kit-executor/src/index.ts @@ -9,7 +9,8 @@ import * as prismaPg from '@prisma/adapter-pg' // neon dependencies import { Pool as NeonPool, neonConfig } from '@neondatabase/serverless' -import { fetch, WebSocket } from 'undici' +import { fetch } from 'undici' +import { WebSocket } from 'ws' import * as prismaNeon from '@prisma/adapter-neon' // libsql dependencies diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/jsonRpc.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/jsonRpc.ts similarity index 100% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/jsonRpc.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/jsonRpc.ts diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/qe.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/qe.ts similarity index 92% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/qe.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/qe.ts index 764df8f6108d..186d7a9e80d2 100644 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/src/qe.ts +++ b/query-engine/driver-adapters/connector-test-kit-executor/src/qe.ts @@ -10,7 +10,7 @@ export function initQueryEngine(adapter: ErrorCapturingDriverAdapter, datamodel: const libExt = os.platform() === 'darwin' ? 'dylib' : 'so' const dirname = path.dirname(new URL(import.meta.url).pathname) - const libQueryEnginePath = path.join(dirname, `../../../../../target/debug/libquery_engine.${libExt}`) + const libQueryEnginePath = path.join(dirname, `../../../../target/debug/libquery_engine.${libExt}`) const libqueryEngine = { exports: {} as unknown as lib.Library } // @ts-ignore diff --git a/query-engine/driver-adapters/js/tsconfig.json b/query-engine/driver-adapters/connector-test-kit-executor/tsconfig.json similarity index 99% rename from query-engine/driver-adapters/js/tsconfig.json rename to query-engine/driver-adapters/connector-test-kit-executor/tsconfig.json index b405cea50201..516c114b3e15 100644 --- a/query-engine/driver-adapters/js/tsconfig.json +++ b/query-engine/driver-adapters/connector-test-kit-executor/tsconfig.json @@ -20,4 +20,4 @@ "resolveJsonModule": true }, "exclude": ["**/dist", "**/declaration", "**/node_modules", "**/src/__tests__"] -} +} \ No newline at end of file diff --git a/query-engine/driver-adapters/js/.gitignore b/query-engine/driver-adapters/js/.gitignore deleted file mode 100644 index e885963af278..000000000000 --- a/query-engine/driver-adapters/js/.gitignore +++ /dev/null @@ -1,44 +0,0 @@ -node_modules - -yarn-error.log -dist -build -tmp -pnpm-debug.log -sandbox -.DS_Store - -query-engine* -migration-engine* -schema-engine* -libquery_engine* -libquery-engine* -query_engine-windows.dll.node - -*tmp.db -dist/ -declaration/ - -*.tsbuildinfo -.prisma -.pnpm-store - -.vscode -!.vscode/launch.json.default -coverage - -.eslintcache - -.pnpm-debug.log - -.envrc - -esm -reproductions/* -!reproductions/basic-sqlite -!reproductions/tracing -!reproductions/pnpm-workspace.yaml - -dev.db -junit.xml -/output.txt diff --git a/query-engine/driver-adapters/js/.npmrc b/query-engine/driver-adapters/js/.npmrc deleted file mode 100644 index c87ec9b9e3d3..000000000000 --- a/query-engine/driver-adapters/js/.npmrc +++ /dev/null @@ -1,2 +0,0 @@ -git-checks=false -access=public diff --git a/query-engine/driver-adapters/js/.prettierrc.yml b/query-engine/driver-adapters/js/.prettierrc.yml deleted file mode 100644 index f0beb50a2167..000000000000 --- a/query-engine/driver-adapters/js/.prettierrc.yml +++ /dev/null @@ -1,5 +0,0 @@ -tabWidth: 2 -trailingComma: all -singleQuote: true -semi: false -printWidth: 120 diff --git a/query-engine/driver-adapters/js/README.md b/query-engine/driver-adapters/js/README.md deleted file mode 100644 index 926d6db2b0a8..000000000000 --- a/query-engine/driver-adapters/js/README.md +++ /dev/null @@ -1,42 +0,0 @@ -# Prisma Driver Adapters - - - - - - - -
- -This TypeScript monorepo contains the following packages: -- `@prisma/driver-adapter-utils` - - Internal set of utilities and types for Prisma's driver adapters. -- `@prisma/adapter-neon` - - Prisma's Driver Adapter that wraps the `@neondatabase/serverless` driver - - It uses `provider = "postgres"` - - It exposes debug logs via `DEBUG="prisma:driver-adapter:neon"` -- `@prisma/adapter-planetscale` - - Prisma's Driver Adapter that wraps the `@planetscale/database` driver - - It uses `provider = "mysql"` - - It exposes debug logs via `DEBUG="prisma:driver-adapter:planetscale"` -- `@prisma/adapter-pg` - - Prisma's Driver Adapter that wraps the `pg` driver - - It uses `provider = "postgres"` - - It exposes debug logs via `DEBUG="prisma:driver-adapter:pg"` - -## Get Started - -We assume Node.js `v18.16.1`+ is installed. If not, run `nvm use` in the current directory. -This is very important to double-check if you have multiple versions installed, as PlanetScale requires either Node.js `v18.16.1`+ or a custom `fetch` function. - -Install `pnpm` via: - -```sh -npm i -g pnpm -``` - -## Development - -- Install Node.js dependencies via `pnpm i` -- Build and link TypeScript packages via `pnpm build` -- Publish packages to `npm` via `pnpm publish -r` diff --git a/query-engine/driver-adapters/js/adapter-libsql/.gitignore b/query-engine/driver-adapters/js/adapter-libsql/.gitignore deleted file mode 100644 index c370cb644f95..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/.gitignore +++ /dev/null @@ -1 +0,0 @@ -test.db diff --git a/query-engine/driver-adapters/js/adapter-libsql/README.md b/query-engine/driver-adapters/js/adapter-libsql/README.md deleted file mode 100644 index 5ca415ea8ec9..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/README.md +++ /dev/null @@ -1,95 +0,0 @@ -# @prisma/adapter-libsql - -Prisma driver adapter for Turso and libSQL. - -See https://prisma.io/turso for details. - -The following usage tutorial is valid for Prisma 5.4.2 and later versions. - -## How to install - -After [getting started with Turso](https://www.prisma.io/blog/prisma-turso-ea-support-rXGd_Tmy3UXX#create-a-database-on-turso), you can use the Turso serverless driver to connect to your database. You will need to install the `@prisma/adapter-libsql` driver adapter and the `@libsql/client` serverless driver. - -```sh -npm install @prisma/adapter-libsql -npm install @libsql/client -``` - -Make sure your Turso database connection string and authentication token is copied over to your `.env` file. The connection string will start with `libsql://`. - -```env -# .env -TURSO_AUTH_TOKEN="eyJhbGciOiJFZERTQSIsInR5cCI6IkpXVCJ9..." -TURSO_DATABASE_URL="libsql://turso-prisma-random-user.turso.io" -``` - -You can now reference this environment variable in your `schema.prisma` datasource. Make sure you also include the `driverAdapters` Preview feature. - -```prisma -// schema.prisma -generator client { - provider = "prisma-client-js" - previewFeatures = ["driverAdapters"] -} - -datasource db { - provider = "sqlite" - url = "file:./dev.db" -} -``` - -Now run `npx prisma generate` to re-generate Prisma Client. - -## How to setup migrations - -As Turso needs to sync between a local sqlite database and another one hosted on Turso Cloud, an additional migration setup is needed. In particular, anytime you modify models and relations in your `schema.prisma` file, you should: - -1. Create a baseline migration - -```sh -npx prisma migrate diff --from-empty \ - --to-schema-datamodel prisma/schema.prisma \ - --script > baseline.sql -``` - -2. Apply the migration to your Turso database - -```sh -turso db shell turso-prisma < baseline.sql -``` - -## How to use - -In TypeScript, you will need to: - -1. Import packages -2. Set up the libSQL serverless database driver -3. Instantiate the Prisma libSQL adapter with the libSQL serverless database driver -4. Pass the driver adapter to the Prisma Client instance - -```typescript -// Import needed packages -import { PrismaClient } from '@prisma/client'; -import { PrismaLibSQL } from '@prisma/adapter-libsql'; -import { createClient } from '@libsql/client'; - -// Setup -const connectionString = `${process.env.TURSO_DATABASE_URL}`; -const authToken = `${process.env.TURSO_AUTH_TOKEN}`; - -// Init prisma client -const libsql = createClient({ - url: connectionString, - authToken, -}); -const adapter = new PrismaLibSQL(libsql); -const prisma = new PrismaClient({ adapter }); - -// Use Prisma Client as normal -``` - -Your Prisma Client instance now uses a **single** remote Turso database. -You can take it a step further by setting up database replicas. Turso automatically picks the closest replica to your app for read queries when you create replicas. No additional logic is required to define how the routing of the read queries should be handled. Write queries will be forwarded to the primary database. -We encourage you to create an issue if you find something missing or run into a bug. - -If you have any feedback about our libSQL Serverless Driver support, please leave a comment on our [dedicated GitHub issue](https://github.com/prisma/prisma/discussions/21345) and we'll use it as we continue development. diff --git a/query-engine/driver-adapters/js/adapter-libsql/package.json b/query-engine/driver-adapters/js/adapter-libsql/package.json deleted file mode 100644 index fbce33c98a29..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/package.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "name": "@prisma/adapter-libsql", - "version": "0.0.0", - "description": "Prisma's driver adapter for libSQL and Turso", - "main": "dist/index.js", - "module": "dist/index.mjs", - "types": "dist/index.d.ts", - "scripts": { - "build": "tsup ./src/index.ts --format cjs,esm --dts", - "lint": "tsc -p ./tsconfig.build.json", - "test": "node --loader tsx --test tests/*.test.mts" - }, - "files": [ - "dist", - "README.md" - ], - "keywords": [], - "author": "Alexey Orlenko ", - "license": "Apache-2.0", - "sideEffects": false, - "dependencies": { - "@prisma/driver-adapter-utils": "workspace:*", - "async-mutex": "0.4.0" - }, - "devDependencies": { - "@libsql/client": "0.3.5" - }, - "peerDependencies": { - "@libsql/client": "^0.3.5" - } -} diff --git a/query-engine/driver-adapters/js/adapter-libsql/src/conversion.ts b/query-engine/driver-adapters/js/adapter-libsql/src/conversion.ts deleted file mode 100644 index b2fa4b5b4095..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/src/conversion.ts +++ /dev/null @@ -1,161 +0,0 @@ -import { ColumnTypeEnum, ColumnType, Debug } from '@prisma/driver-adapter-utils' -import { Row, Value } from '@libsql/client' -import { isArrayBuffer } from 'node:util/types' - -const debug = Debug('prisma:driver-adapter:libsql:conversion') - -// Mirrors sqlite/conversion.rs in quaint -function mapDeclType(declType: string): ColumnType | null { - switch (declType.toUpperCase()) { - case '': - return null - case 'DECIMAL': - return ColumnTypeEnum.Numeric - case 'FLOAT': - return ColumnTypeEnum.Float - case 'DOUBLE': - case 'DOUBLE PRECISION': - case 'NUMERIC': - case 'REAL': - return ColumnTypeEnum.Double - case 'TINYINT': - case 'SMALLINT': - case 'MEDIUMINT': - case 'INT': - case 'INTEGER': - case 'SERIAL': - case 'INT2': - return ColumnTypeEnum.Int32 - case 'BIGINT': - case 'UNSIGNED BIG INT': - case 'INT8': - return ColumnTypeEnum.Int64 - case 'DATETIME': - case 'TIMESTAMP': - return ColumnTypeEnum.DateTime - case 'TIME': - return ColumnTypeEnum.Time - case 'DATE': - return ColumnTypeEnum.Date - case 'TEXT': - case 'CLOB': - case 'CHARACTER': - case 'VARCHAR': - case 'VARYING CHARACTER': - case 'NCHAR': - case 'NATIVE CHARACTER': - case 'NVARCHAR': - return ColumnTypeEnum.Text - case 'BLOB': - return ColumnTypeEnum.Bytes - case 'BOOLEAN': - return ColumnTypeEnum.Boolean - default: - debug('unknown decltype:', declType) - return null - } -} - -function mapDeclaredColumnTypes(columntTypes: string[]): [out: Array, empty: Set] { - const emptyIndices = new Set() - const result = columntTypes.map((typeName, index) => { - const mappedType = mapDeclType(typeName) - if (mappedType === null) { - emptyIndices.add(index) - } - return mappedType - }) - return [result, emptyIndices] -} - -export function getColumnTypes(declaredTypes: string[], rows: Row[]): ColumnType[] { - const [columnTypes, emptyIndices] = mapDeclaredColumnTypes(declaredTypes) - - if (emptyIndices.size === 0) { - return columnTypes as ColumnType[] - } - - columnLoop: for (const columnIndex of emptyIndices) { - // No declared column type in db schema, infer using first non-null value - for (let rowIndex = 0; rowIndex < rows.length; rowIndex++) { - const candidateValue = rows[rowIndex][columnIndex] - if (candidateValue !== null) { - columnTypes[columnIndex] = inferColumnType(candidateValue) - continue columnLoop - } - } - - // No non-null value found for this column, fall back to int32 to mimic what quaint does - columnTypes[columnIndex] = ColumnTypeEnum.Int32 - } - - return columnTypes as ColumnType[] -} - -function inferColumnType(value: NonNullable): ColumnType { - switch (typeof value) { - case 'string': - return ColumnTypeEnum.Text - case 'bigint': - return ColumnTypeEnum.Int64 - case 'boolean': - return ColumnTypeEnum.Boolean - case 'number': - return ColumnTypeEnum.UnknownNumber - case 'object': - return inferObjectType(value) - default: - throw new UnexpectedTypeError(value) - } -} - -function inferObjectType(value: {}): ColumnType { - if (isArrayBuffer(value)) { - return ColumnTypeEnum.Bytes - } - throw new UnexpectedTypeError(value) -} - -class UnexpectedTypeError extends Error { - name = 'UnexpectedTypeError' - constructor(value: unknown) { - const type = typeof value - const repr = type === 'object' ? JSON.stringify(value) : String(value) - super(`unexpected value of type ${type}: ${repr}`) - } -} - -export function mapRow(row: Row, columnTypes: ColumnType[]): unknown[] { - // `Row` doesn't have map, so we copy the array once and modify it in-place - // to avoid allocating and copying twice if we used `Array.from(row).map(...)`. - const result: unknown[] = Array.from(row) - - for (let i = 0; i < result.length; i++) { - const value = result[i] - - // Convert bigint to string as we can only use JSON-encodable types here - if (typeof value === 'bigint') { - result[i] = value.toString() - } - - // Convert array buffers to arrays of bytes. - // Base64 would've been more efficient but would collide with the existing - // logic that treats string values of type Bytes as raw UTF-8 bytes that was - // implemented for other adapters. - if (isArrayBuffer(value)) { - result[i] = Array.from(new Uint8Array(value)) - } - - // If an integer is required and the current number isn't one, - // discard the fractional part. - if ( - typeof value === 'number' && - (columnTypes[i] === ColumnTypeEnum.Int32 || columnTypes[i] === ColumnTypeEnum.Int64) && - !Number.isInteger(value) - ) { - result[i] = Math.trunc(value) - } - } - - return result -} diff --git a/query-engine/driver-adapters/js/adapter-libsql/src/index.ts b/query-engine/driver-adapters/js/adapter-libsql/src/index.ts deleted file mode 100644 index 04a95cc4cfcd..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/src/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { PrismaLibSQL } from './libsql' diff --git a/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts b/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts deleted file mode 100644 index 6528c8f44a8a..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts +++ /dev/null @@ -1,171 +0,0 @@ -import { Debug, ok, err } from '@prisma/driver-adapter-utils' -import type { - DriverAdapter, - Query, - Queryable, - Result, - ResultSet, - Transaction, - TransactionOptions, -} from '@prisma/driver-adapter-utils' -import type { - InStatement, - Client as LibSqlClientRaw, - Transaction as LibSqlTransactionRaw, - ResultSet as LibSqlResultSet, -} from '@libsql/client' -import { Mutex } from 'async-mutex' -import { getColumnTypes, mapRow } from './conversion' - -const debug = Debug('prisma:driver-adapter:libsql') - -type StdClient = LibSqlClientRaw -type TransactionClient = LibSqlTransactionRaw - -const LOCK_TAG = Symbol() - -class LibSqlQueryable implements Queryable { - readonly flavour = 'sqlite'; - - [LOCK_TAG] = new Mutex() - - constructor(protected readonly client: ClientT) {} - - /** - * Execute a query given as SQL, interpolating the given parameters. - */ - async queryRaw(query: Query): Promise> { - const tag = '[js::query_raw]' - debug(`${tag} %O`, query) - - const ioResult = await this.performIO(query) - - return ioResult.map(({ columns, rows, columnTypes: declaredColumnTypes }) => { - const columnTypes = getColumnTypes(declaredColumnTypes, rows) - - return { - columnNames: columns, - columnTypes, - rows: rows.map((row) => mapRow(row, columnTypes)), - } - }) - } - - /** - * Execute a query given as SQL, interpolating the given parameters and - * returning the number of affected rows. - * Note: Queryable expects a u64, but napi.rs only supports u32. - */ - async executeRaw(query: Query): Promise> { - const tag = '[js::execute_raw]' - debug(`${tag} %O`, query) - - return (await this.performIO(query)).map(({ rowsAffected }) => rowsAffected ?? 0) - } - - /** - * Run a query against the database, returning the result set. - * Should the query fail due to a connection error, the connection is - * marked as unhealthy. - */ - private async performIO(query: Query): Promise> { - const release = await this[LOCK_TAG].acquire() - try { - const result = await this.client.execute(query as InStatement) - return ok(result) - } catch (e) { - const error = e as Error - debug('Error in performIO: %O', error) - const rawCode = error['rawCode'] ?? e.cause?.['rawCode'] - if (typeof rawCode === 'number') { - return err({ - kind: 'Sqlite', - extendedCode: rawCode, - message: error.message, - }) - } - throw error - } finally { - release() - } - } -} - -class LibSqlTransaction extends LibSqlQueryable implements Transaction { - finished = false - - constructor(client: TransactionClient, readonly options: TransactionOptions, readonly unlockParent: () => void) { - super(client) - } - - async commit(): Promise> { - debug(`[js::commit]`) - - this.finished = true - - try { - await this.client.commit() - } finally { - this.unlockParent() - } - - return ok(undefined) - } - - async rollback(): Promise> { - debug(`[js::rollback]`) - - this.finished = true - - try { - await this.client.rollback() - } catch (error) { - debug('error in rollback:', error) - } finally { - this.unlockParent() - } - - return ok(undefined) - } - - dispose(): Result { - if (!this.finished) { - this.finished = true - this.rollback().catch(console.error) - } - return ok(undefined) - } -} - -export class PrismaLibSQL extends LibSqlQueryable implements DriverAdapter { - constructor(client: StdClient) { - super(client) - } - - async startTransaction(): Promise> { - const options: TransactionOptions = { - usePhantomQuery: true, - } - - const tag = '[js::startTransaction]' - debug(`${tag} options: %O`, options) - - const release = await this[LOCK_TAG].acquire() - - try { - const tx = await this.client.transaction('deferred') - return ok(new LibSqlTransaction(tx, options, release)) - } catch (e) { - // note: we only release the lock if creating the transaction fails, it must stay locked otherwise, - // hence `catch` and rethrowing the error and not `finally`. - release() - throw e - } - } - - async close(): Promise> { - await this[LOCK_TAG].acquire() - this.client.close() - return ok(undefined) - } -} diff --git a/query-engine/driver-adapters/js/adapter-libsql/tests/types.test.mts b/query-engine/driver-adapters/js/adapter-libsql/tests/types.test.mts deleted file mode 100644 index f7f1b474a300..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/tests/types.test.mts +++ /dev/null @@ -1,151 +0,0 @@ -import assert from 'node:assert/strict' -import { describe, it } from 'node:test' -import { Config, createClient } from '@libsql/client' -import { PrismaLibSQL } from '../dist/index.js' -import { ColumnTypeEnum } from '@jkomyno/prisma-driver-adapter-utils' - -function connect(config?: Partial): PrismaLibSQL { - const client = createClient({ url: 'file:test.db', ...config }) - return new PrismaLibSQL(client) -} - -it('checks declared types', async () => { - const client = connect() - - await client.executeRaw({ - sql: ` - DROP TABLE IF EXISTS types; - `, - args: [], - }) - - await client.executeRaw({ - sql: ` - CREATE TABLE types ( - id INTEGER PRIMARY KEY, - real REAL, - bigint BIGINT, - date DATETIME, - text TEXT, - blob BLOB - ) - `, - args: [], - }) - - const result = await client.queryRaw({ - sql: ` - SELECT * FROM types - `, - args: [], - }) - - assert(result.ok) - assert.deepEqual(result.value.columnTypes, [ - ColumnTypeEnum.Int32, - ColumnTypeEnum.Double, - ColumnTypeEnum.Int64, - ColumnTypeEnum.DateTime, - ColumnTypeEnum.Text, - ColumnTypeEnum.Bytes, - ]) -}) - -it('infers types when sqlite decltype is not available', async () => { - const client = connect() - - const result = await client.queryRaw({ - sql: ` - SELECT 1 as first, 'test' as second - `, - args: [], - }) - - assert(result.ok) - assert.deepEqual(result.value.columnTypes, [ColumnTypeEnum.Int64, ColumnTypeEnum.Text]) -}) - -describe('int64 with different intMode', () => { - const N = 2n ** 63n - 1n - - it('correctly infers int64 with intMode=number for safe JS integers', async () => { - const client = connect({ intMode: 'number' }) - - const result = await client.queryRaw({ - sql: `SELECT ?`, - args: [Number.MAX_SAFE_INTEGER], - }) - - assert(result.ok) - assert.equal(result.value.columnTypes[0], ColumnTypeEnum.Int64) - assert.equal(result.value.rows[0][0], Number.MAX_SAFE_INTEGER) - }) - - it("doesn't support very big int64 with intMode=number", async () => { - const client = connect({ intMode: 'number' }) - - assert.rejects( - client.queryRaw({ - sql: `SELECT ?`, - args: [N], - }), - ) - }) - - it('correctly infers int64 with intMode=bigint', async () => { - const client = connect({ intMode: 'bigint' }) - - const result = await client.queryRaw({ - sql: `SELECT ?`, - args: [N], - }) - - assert(result.ok) - assert.equal(result.value.columnTypes[0], ColumnTypeEnum.Int64) - - // bigints are converted to strings because we can't currently pass a bigint - // to rust due to a napi.rs limitation - assert.equal(result.value.rows[0][0], N.toString()) - }) - - it('correctly infers int64 with intMode=string when we have decltype', async () => { - const client = connect({ intMode: 'string' }) - - await client.executeRaw({ - sql: `DROP TABLE IF EXISTS test`, - args: [], - }) - - await client.executeRaw({ - sql: `CREATE TABLE test (int64 BIGINT)`, - args: [], - }) - - await client.executeRaw({ - sql: `INSERT INTO test (int64) VALUES (?)`, - args: [N], - }) - - const result = await client.queryRaw({ - sql: `SELECT int64 FROM test`, - args: [], - }) - - assert(result.ok) - assert.equal(result.value.columnTypes[0], ColumnTypeEnum.Int64) - assert.equal(result.value.rows[0][0], N.toString()) - }) - - it("can't infer int64 with intMode=string without schema", async () => { - const client = connect({ intMode: 'string' }) - - const result = await client.queryRaw({ - sql: `SELECT ?`, - args: [N], - }) - - assert(result.ok) - assert.equal(result.value.columnTypes[0], ColumnTypeEnum.Text) - assert.equal(result.value.rows[0][0], N.toString()) - }) -}) diff --git a/query-engine/driver-adapters/js/adapter-libsql/tsconfig.build.json b/query-engine/driver-adapters/js/adapter-libsql/tsconfig.build.json deleted file mode 100644 index 28c56f6c3a9a..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/tsconfig.build.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "extends": "../tsconfig.json", - "compilerOptions": { - "outDir": "declaration" - } -} diff --git a/query-engine/driver-adapters/js/adapter-libsql/tsconfig.json b/query-engine/driver-adapters/js/adapter-libsql/tsconfig.json deleted file mode 100644 index 3c43903cfdd1..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/tsconfig.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../tsconfig.json" -} diff --git a/query-engine/driver-adapters/js/adapter-neon/README.md b/query-engine/driver-adapters/js/adapter-neon/README.md deleted file mode 100644 index f36f44c6bca4..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/README.md +++ /dev/null @@ -1,71 +0,0 @@ -# @prisma/adapter-neon - -Prisma driver adapter for [Neon Serverless Driver](https://github.com/neondatabase/serverless). - -See https://github.com/prisma/prisma/releases/tag/5.4.0 and https://www.prisma.io/blog/serverless-database-drivers-KML1ehXORxZV for details. - -The following usage tutorial is valid for Prisma 5.4.2 and later versions. - -## How to install - -After [creating your database on Neon](https://neon.tech/docs/get-started-with-neon/setting-up-a-project), you'll need to install the `@prisma/adapter-neon` driver adapter, Neon’s serverless database driver `@neondatabase/serverless`, and `ws` to set up a WebSocket connection for use by Neon. - -```sh -npm install @prisma/adapter-neon -npm install @neondatabase/serverless -npm install ws -``` - -Make sure your [Neon database connection string](https://neon.tech/docs/connect/connect-from-any-app) is copied over to your `.env` file. The connection string will start with `postgres://`. - -```env -# .env -DATABASE_URL="postgres://..." -``` - -Make sure you also include the `driverAdapters` Preview feature in your `schema.prisma`. - -```prisma -// schema.prisma -generator client { - provider = "prisma-client-js" - previewFeatures = ["driverAdapters"] -} - -datasource db { - provider = "postgresql" - url = env("DATABASE_URL") -} -``` - -Now run `npx prisma generate` to re-generate Prisma Client. - -## How to use - -In TypeScript, you will need to: - -1. Import packages -2. Set up the Neon serverless database driver -3. Instantiate the Prisma Neon adapter with the Neon serverless database driver -4. Pass the driver adapter to the Prisma Client instance - -```typescript -// Import needed packages -import { Pool, neonConfig } from '@neondatabase/serverless'; -import { PrismaNeon } from '@prisma/adapter-neon'; -import { PrismaClient } from '@prisma/client'; -import ws from 'ws'; - -// Setup -neonConfig.webSocketConstructor = ws; -const connectionString = `${process.env.DATABASE_URL}`; - -// Init prisma client -const pool = new Pool({ connectionString }); -const adapter = new PrismaNeon(pool); -const prisma = new PrismaClient({ adapter }); - -// Use Prisma Client as normal -``` - -Now your code has built-in benefits of the Neon serverless driver, such as WebSocket connections and [message pipelining](https://neon.tech/blog/quicker-serverless-postgres), while Prisma covers connection creation and destruction, error handling, and type safety. If you have any feedback about our Neon Serverless Driver support, please leave a comment on our [dedicated GitHub issue](https://github.com/prisma/prisma/discussions/21346) and we'll use it as we continue development. diff --git a/query-engine/driver-adapters/js/adapter-neon/package.json b/query-engine/driver-adapters/js/adapter-neon/package.json deleted file mode 100644 index 02005a13572f..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/package.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "name": "@prisma/adapter-neon", - "version": "0.0.0", - "description": "Prisma's driver adapter for \"@neondatabase/serverless\"", - "main": "dist/index.js", - "module": "dist/index.mjs", - "types": "dist/index.d.ts", - "scripts": { - "build": "tsup ./src/index.ts --format cjs,esm --dts", - "lint": "tsc -p ./tsconfig.build.json" - }, - "files": [ - "dist", - "README.md" - ], - "keywords": [], - "author": "Alberto Schiabel ", - "license": "Apache-2.0", - "sideEffects": false, - "dependencies": { - "@prisma/driver-adapter-utils": "workspace:*", - "postgres-array": "^3.0.2" - }, - "devDependencies": { - "@neondatabase/serverless": "^0.6.0" - }, - "peerDependencies": { - "@neondatabase/serverless": "^0.6.0" - } -} diff --git a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts b/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts deleted file mode 100644 index 78f285240599..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts +++ /dev/null @@ -1,286 +0,0 @@ -import { ColumnTypeEnum, type ColumnType, JsonNullMarker } from '@prisma/driver-adapter-utils' -import { types } from '@neondatabase/serverless' -import { parse as parseArray } from 'postgres-array' - -const ScalarColumnType = types.builtins - -/** - * PostgreSQL array column types (not defined in ScalarColumnType). - * - * See the semantics of each of this code in: - * https://github.com/postgres/postgres/blob/master/src/include/catalog/pg_type.dat - */ -const ArrayColumnType = { - BIT_ARRAY: 1561, - BOOL_ARRAY: 1000, - BYTEA_ARRAY: 1001, - BPCHAR_ARRAY: 1014, - CHAR_ARRAY: 1002, - CIDR_ARRAY: 651, - DATE_ARRAY: 1182, - FLOAT4_ARRAY: 1021, - FLOAT8_ARRAY: 1022, - INET_ARRAY: 1041, - INT2_ARRAY: 1005, - INT4_ARRAY: 1007, - INT8_ARRAY: 1016, - JSONB_ARRAY: 3807, - JSON_ARRAY: 199, - MONEY_ARRAY: 791, - NUMERIC_ARRAY: 1231, - OID_ARRAY: 1028, - TEXT_ARRAY: 1009, - TIMESTAMP_ARRAY: 1115, - TIME_ARRAY: 1183, - UUID_ARRAY: 2951, - VARBIT_ARRAY: 1563, - VARCHAR_ARRAY: 1015, - XML_ARRAY: 143, -} - -/** - * This is a simplification of quaint's value inference logic. Take a look at quaint's conversion.rs - * module to see how other attributes of the field packet such as the field length are used to infer - * the correct quaint::Value variant. - */ -export function fieldToColumnType(fieldTypeId: number): ColumnType { - switch (fieldTypeId) { - case ScalarColumnType['INT2']: - case ScalarColumnType['INT4']: - return ColumnTypeEnum.Int32 - case ScalarColumnType['INT8']: - return ColumnTypeEnum.Int64 - case ScalarColumnType['FLOAT4']: - return ColumnTypeEnum.Float - case ScalarColumnType['FLOAT8']: - return ColumnTypeEnum.Double - case ScalarColumnType['BOOL']: - return ColumnTypeEnum.Boolean - case ScalarColumnType['DATE']: - return ColumnTypeEnum.Date - case ScalarColumnType['TIME']: - case ScalarColumnType['TIMETZ']: - return ColumnTypeEnum.Time - case ScalarColumnType['TIMESTAMP']: - case ScalarColumnType['TIMESTAMPTZ']: - return ColumnTypeEnum.DateTime - case ScalarColumnType['NUMERIC']: - case ScalarColumnType['MONEY']: - return ColumnTypeEnum.Numeric - case ScalarColumnType['JSON']: - case ScalarColumnType['JSONB']: - return ColumnTypeEnum.Json - case ScalarColumnType['UUID']: - return ColumnTypeEnum.Uuid - case ScalarColumnType['OID']: - return ColumnTypeEnum.Int64 - case ScalarColumnType['BPCHAR']: - case ScalarColumnType['TEXT']: - case ScalarColumnType['VARCHAR']: - case ScalarColumnType['BIT']: - case ScalarColumnType['VARBIT']: - case ScalarColumnType['INET']: - case ScalarColumnType['CIDR']: - case ScalarColumnType['XML']: - return ColumnTypeEnum.Text - case ScalarColumnType['BYTEA']: - return ColumnTypeEnum.Bytes - case ArrayColumnType.INT2_ARRAY: - case ArrayColumnType.INT4_ARRAY: - return ColumnTypeEnum.Int32Array - case ArrayColumnType.FLOAT4_ARRAY: - return ColumnTypeEnum.FloatArray - case ArrayColumnType.FLOAT8_ARRAY: - return ColumnTypeEnum.DoubleArray - case ArrayColumnType.NUMERIC_ARRAY: - case ArrayColumnType.MONEY_ARRAY: - return ColumnTypeEnum.NumericArray - case ArrayColumnType.BOOL_ARRAY: - return ColumnTypeEnum.BooleanArray - case ArrayColumnType.CHAR_ARRAY: - return ColumnTypeEnum.CharArray - case ArrayColumnType.BPCHAR_ARRAY: - case ArrayColumnType.TEXT_ARRAY: - case ArrayColumnType.VARCHAR_ARRAY: - case ArrayColumnType.VARBIT_ARRAY: - case ArrayColumnType.BIT_ARRAY: - case ArrayColumnType.INET_ARRAY: - case ArrayColumnType.CIDR_ARRAY: - case ArrayColumnType.XML_ARRAY: - return ColumnTypeEnum.TextArray - case ArrayColumnType.DATE_ARRAY: - return ColumnTypeEnum.DateArray - case ArrayColumnType.TIME_ARRAY: - return ColumnTypeEnum.TimeArray - case ArrayColumnType.TIMESTAMP_ARRAY: - return ColumnTypeEnum.DateTimeArray - case ArrayColumnType.JSON_ARRAY: - case ArrayColumnType.JSONB_ARRAY: - return ColumnTypeEnum.JsonArray - case ArrayColumnType.BYTEA_ARRAY: - return ColumnTypeEnum.BytesArray - case ArrayColumnType.UUID_ARRAY: - return ColumnTypeEnum.UuidArray - case ArrayColumnType.INT8_ARRAY: - case ArrayColumnType.OID_ARRAY: - return ColumnTypeEnum.Int64Array - default: - if (fieldTypeId >= 10000) { - // Postgres Custom Types - return ColumnTypeEnum.Enum - } - throw new Error(`Unsupported column type: ${fieldTypeId}`) - } -} - -function normalize_array(element_normalizer: (string) => string): (string) => string[] { - return (str) => parseArray(str, element_normalizer) -} - -/****************************/ -/* Time-related data-types */ -/****************************/ - -function normalize_numeric(numeric: string): string { - return numeric -} - -types.setTypeParser(ScalarColumnType.NUMERIC, normalize_numeric) -types.setTypeParser(ArrayColumnType.NUMERIC_ARRAY, normalize_array(normalize_numeric)) - -/****************************/ -/* Time-related data-types */ -/****************************/ - - -function normalize_date(date: string): string { - return date -} - -function normalize_timestamp(time: string): string { - return time -} - -function normalize_timestampz(time: string): string { - return time.split("+")[0] -} - -/* - * TIME, TIMETZ, TIME_ARRAY - converts value (or value elements) to a string in the format HH:mm:ss.f - */ - -function normalize_time(time: string): string { - return time -} - -function normalize_timez(time: string): string { - // Although it might be controversial, UTC is assumed in consistency with the behavior of rust postgres driver - // in quaint. See quaint/src/connector/postgres/conversion.rs - return time.split("+")[0] -} - -types.setTypeParser(ScalarColumnType.TIME, normalize_time) -types.setTypeParser(ArrayColumnType.TIME_ARRAY, normalize_array(normalize_time)) -types.setTypeParser(ScalarColumnType.TIMETZ, normalize_timez) - -/* - * DATE, DATE_ARRAY - converts value (or value elements) to a string in the format YYYY-MM-DD - */ - -types.setTypeParser(ScalarColumnType.DATE, normalize_date) -types.setTypeParser(ArrayColumnType.DATE_ARRAY, normalize_array(normalize_date)) - - -/* - * TIMESTAMP, TIMESTAMP_ARRAY - converts value (or value elements) to a string in the rfc3339 format - * ex: 1996-12-19T16:39:57-08:00 - */ -types.setTypeParser(ScalarColumnType.TIMESTAMP, normalize_timestamp) -types.setTypeParser(ArrayColumnType.TIMESTAMP_ARRAY, normalize_array(normalize_timestamp)) -types.setTypeParser(ScalarColumnType.TIMESTAMPTZ, normalize_timestampz) - -/******************/ -/* Money handling */ -/******************/ - -function normalize_money(money: string): string { - return money.slice(1) -} - -types.setTypeParser(ScalarColumnType.MONEY, normalize_money) -types.setTypeParser(ArrayColumnType.MONEY_ARRAY, normalize_array(normalize_money)) - - -/*****************/ -/* JSON handling */ -/*****************/ - -/** - * JsonNull are stored in JSON strings as the string "null", distinguishable from - * the `null` value which is used by the driver to represent the database NULL. - * By default, JSON and JSONB columns use JSON.parse to parse a JSON column value - * and this will lead to serde_json::Value::Null in Rust, which will be interpreted - * as DbNull. - * - * By converting "null" to JsonNullMarker, we can signal JsonNull in Rust side and - * convert it to QuaintValue::Json(Some(Null)). - */ -function toJson(json: string): unknown { - return (json === 'null') ? JsonNullMarker : JSON.parse(json) -} - - -types.setTypeParser(ScalarColumnType.JSONB, toJson) -types.setTypeParser(ScalarColumnType.JSON, toJson) - -/************************/ -/* Binary data handling */ -/************************/ - -/** - * TODO: - * 1. Check if using base64 would be more efficient than this encoding. - * 2. Consider the possibility of eliminating re-encoding altogether - * and passing bytea hex format to the engine if that can be aligned - * with other adapter flavours. - */ -function encodeBuffer(buffer: Buffer) { - return Array.from(new Uint8Array(buffer)) -} - -/* - * BYTEA - arbitrary raw binary strings - */ - -const parsePgBytes = types.getTypeParser(ScalarColumnType.BYTEA) as (_: string) => Buffer -/** - * Convert bytes to a JSON-encodable representation since we can't - * currently send a parsed Buffer or ArrayBuffer across JS to Rust - * boundary. - */ -function convertBytes(serializedBytes: string): number[] { - const buffer = parsePgBytes(serializedBytes) - return encodeBuffer(buffer) -} - -types.setTypeParser(ScalarColumnType.BYTEA, convertBytes) - -/* - * BYTEA_ARRAY - arrays of arbitrary raw binary strings - */ - -const parseBytesArray = types.getTypeParser(ArrayColumnType.BYTEA_ARRAY) as (_: string) => Buffer[] - -types.setTypeParser(ArrayColumnType.BYTEA_ARRAY, (serializedBytesArray) => { - const buffers = parseBytesArray(serializedBytesArray) - return buffers.map((buf) => buf ? encodeBuffer(buf) : null) -}) - -/* BIT_ARRAY, VARBIT_ARRAY */ - -function normalizeBit(bit: string): string { - return bit -} - -types.setTypeParser(ArrayColumnType.BIT_ARRAY, normalize_array(normalizeBit)) -types.setTypeParser(ArrayColumnType.VARBIT_ARRAY, normalize_array(normalizeBit)) \ No newline at end of file diff --git a/query-engine/driver-adapters/js/adapter-neon/src/index.ts b/query-engine/driver-adapters/js/adapter-neon/src/index.ts deleted file mode 100644 index f160d413ade0..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/src/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { PrismaNeon, PrismaNeonHTTP } from './neon' diff --git a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts b/query-engine/driver-adapters/js/adapter-neon/src/neon.ts deleted file mode 100644 index e8fe40ada22f..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts +++ /dev/null @@ -1,165 +0,0 @@ -import type neon from '@neondatabase/serverless' -import { Debug, ok, err } from '@prisma/driver-adapter-utils' -import type { - DriverAdapter, - ResultSet, - Query, - Queryable, - Transaction, - Result, - TransactionOptions, -} from '@prisma/driver-adapter-utils' -import { fieldToColumnType } from './conversion' - -const debug = Debug('prisma:driver-adapter:neon') - -type ARRAY_MODE_ENABLED = true - -type PerformIOResult = neon.QueryResult | neon.FullQueryResults - -/** - * Base class for http client, ws client and ws transaction - */ -abstract class NeonQueryable implements Queryable { - readonly flavour = 'postgres' - - async queryRaw(query: Query): Promise> { - const tag = '[js::query_raw]' - debug(`${tag} %O`, query) - - return (await this.performIO(query)).map(({ fields, rows }) => { - const columns = fields.map((field) => field.name) - const columnTypes = fields.map((field) => fieldToColumnType(field.dataTypeID)) - - return { - columnNames: columns, - columnTypes, - rows, - } - }) - } - - async executeRaw(query: Query): Promise> { - const tag = '[js::execute_raw]' - debug(`${tag} %O`, query) - - // Note: `rowsAffected` can sometimes be null (e.g., when executing `"BEGIN"`) - return (await this.performIO(query)).map((r) => r.rowCount ?? 0) - } - - abstract performIO(query: Query): Promise> -} - -/** - * Base class for WS-based queryables: top-level client and transaction - */ -class NeonWsQueryable extends NeonQueryable { - constructor(protected client: ClientT) { - super() - } - - override async performIO(query: Query): Promise> { - const { sql, args: values } = query - - try { - return ok(await this.client.query({ text: sql, values, rowMode: 'array' })) - } catch (e) { - debug('Error in performIO: %O', e) - if (e && e.code) { - return err({ - kind: 'Postgres', - code: e.code, - severity: e.severity, - message: e.message, - detail: e.detail, - column: e.column, - hint: e.hint, - }) - } - throw e - } - } -} - -class NeonTransaction extends NeonWsQueryable implements Transaction { - finished = false - - constructor(client: neon.PoolClient, readonly options: TransactionOptions) { - super(client) - } - - async commit(): Promise> { - debug(`[js::commit]`) - - this.finished = true - this.client.release() - return Promise.resolve(ok(undefined)) - } - - async rollback(): Promise> { - debug(`[js::rollback]`) - - this.finished = true - this.client.release() - return Promise.resolve(ok(undefined)) - } - - dispose(): Result { - if (!this.finished) { - this.client.release() - } - return ok(undefined) - } -} - -export class PrismaNeon extends NeonWsQueryable implements DriverAdapter { - private isRunning = true - - constructor(pool: neon.Pool) { - super(pool) - } - - async startTransaction(): Promise> { - const options: TransactionOptions = { - usePhantomQuery: false, - } - - const tag = '[js::startTransaction]' - debug(`${tag} options: %O`, options) - - const connection = await this.client.connect() - return ok(new NeonTransaction(connection, options)) - } - - async close() { - if (this.isRunning) { - await this.client.end() - this.isRunning = false - } - return ok(undefined) - } -} - -export class PrismaNeonHTTP extends NeonQueryable implements DriverAdapter { - constructor(private client: neon.NeonQueryFunction) { - super() - } - - override async performIO(query: Query): Promise> { - const { sql, args: values } = query - return ok( - await this.client(sql, values, { - arrayMode: true, - fullResults: true, - }), - ) - } - - startTransaction(): Promise> { - return Promise.reject(new Error('Transactions are not supported in HTTP mode')) - } - - async close() { - return ok(undefined) - } -} diff --git a/query-engine/driver-adapters/js/adapter-neon/tsconfig.build.json b/query-engine/driver-adapters/js/adapter-neon/tsconfig.build.json deleted file mode 100644 index 28c56f6c3a9a..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/tsconfig.build.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "extends": "../tsconfig.json", - "compilerOptions": { - "outDir": "declaration" - } -} diff --git a/query-engine/driver-adapters/js/adapter-neon/tsconfig.json b/query-engine/driver-adapters/js/adapter-neon/tsconfig.json deleted file mode 100644 index 3c43903cfdd1..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/tsconfig.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../tsconfig.json" -} diff --git a/query-engine/driver-adapters/js/adapter-pg/README.md b/query-engine/driver-adapters/js/adapter-pg/README.md deleted file mode 100644 index b8463742e25c..000000000000 --- a/query-engine/driver-adapters/js/adapter-pg/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# @prisma/adapter-pg - -**INTERNAL PACKAGE, DO NOT USE** diff --git a/query-engine/driver-adapters/js/adapter-pg/package.json b/query-engine/driver-adapters/js/adapter-pg/package.json deleted file mode 100644 index 7514569c562a..000000000000 --- a/query-engine/driver-adapters/js/adapter-pg/package.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "name": "@prisma/adapter-pg", - "version": "0.0.0", - "description": "Prisma's driver adapter for \"pg\"", - "main": "dist/index.js", - "module": "dist/index.mjs", - "types": "dist/index.d.ts", - "scripts": { - "build": "tsup ./src/index.ts --format cjs,esm --dts", - "lint": "tsc -p ./tsconfig.build.json" - }, - "files": [ - "dist", - "README.md" - ], - "keywords": [], - "author": "Tom Houlé ", - "license": "Apache-2.0", - "sideEffects": false, - "dependencies": { - "@prisma/driver-adapter-utils": "workspace:*", - "postgres-array": "^3.0.2" - }, - "devDependencies": { - "pg": "^8.11.3", - "@types/pg": "^8.10.2" - }, - "peerDependencies": { - "pg": "^8.11.3" - } -} diff --git a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts b/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts deleted file mode 100644 index c26b13877927..000000000000 --- a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts +++ /dev/null @@ -1,286 +0,0 @@ -import { ColumnTypeEnum, type ColumnType, JsonNullMarker } from '@prisma/driver-adapter-utils' -import { types } from 'pg' -import { parse as parseArray } from 'postgres-array' - -const ScalarColumnType = types.builtins - -/** - * PostgreSQL array column types (not defined in ScalarColumnType). - * - * See the semantics of each of this code in: - * https://github.com/postgres/postgres/blob/master/src/include/catalog/pg_type.dat - */ -const ArrayColumnType = { - BIT_ARRAY: 1561, - BOOL_ARRAY: 1000, - BYTEA_ARRAY: 1001, - BPCHAR_ARRAY: 1014, - CHAR_ARRAY: 1002, - CIDR_ARRAY: 651, - DATE_ARRAY: 1182, - FLOAT4_ARRAY: 1021, - FLOAT8_ARRAY: 1022, - INET_ARRAY: 1041, - INT2_ARRAY: 1005, - INT4_ARRAY: 1007, - INT8_ARRAY: 1016, - JSONB_ARRAY: 3807, - JSON_ARRAY: 199, - MONEY_ARRAY: 791, - NUMERIC_ARRAY: 1231, - OID_ARRAY: 1028, - TEXT_ARRAY: 1009, - TIMESTAMP_ARRAY: 1115, - TIME_ARRAY: 1183, - UUID_ARRAY: 2951, - VARBIT_ARRAY: 1563, - VARCHAR_ARRAY: 1015, - XML_ARRAY: 143, -} - -/** - * This is a simplification of quaint's value inference logic. Take a look at quaint's conversion.rs - * module to see how other attributes of the field packet such as the field length are used to infer - * the correct quaint::Value variant. - */ -export function fieldToColumnType(fieldTypeId: number): ColumnType { - switch (fieldTypeId) { - case ScalarColumnType['INT2']: - case ScalarColumnType['INT4']: - return ColumnTypeEnum.Int32 - case ScalarColumnType['INT8']: - return ColumnTypeEnum.Int64 - case ScalarColumnType['FLOAT4']: - return ColumnTypeEnum.Float - case ScalarColumnType['FLOAT8']: - return ColumnTypeEnum.Double - case ScalarColumnType['BOOL']: - return ColumnTypeEnum.Boolean - case ScalarColumnType['DATE']: - return ColumnTypeEnum.Date - case ScalarColumnType['TIME']: - case ScalarColumnType['TIMETZ']: - return ColumnTypeEnum.Time - case ScalarColumnType['TIMESTAMP']: - case ScalarColumnType['TIMESTAMPTZ']: - return ColumnTypeEnum.DateTime - case ScalarColumnType['NUMERIC']: - case ScalarColumnType['MONEY']: - return ColumnTypeEnum.Numeric - case ScalarColumnType['JSON']: - case ScalarColumnType['JSONB']: - return ColumnTypeEnum.Json - case ScalarColumnType['UUID']: - return ColumnTypeEnum.Uuid - case ScalarColumnType['OID']: - return ColumnTypeEnum.Int64 - case ScalarColumnType['BPCHAR']: - case ScalarColumnType['TEXT']: - case ScalarColumnType['VARCHAR']: - case ScalarColumnType['BIT']: - case ScalarColumnType['VARBIT']: - case ScalarColumnType['INET']: - case ScalarColumnType['CIDR']: - case ScalarColumnType['XML']: - return ColumnTypeEnum.Text - case ScalarColumnType['BYTEA']: - return ColumnTypeEnum.Bytes - case ArrayColumnType.INT2_ARRAY: - case ArrayColumnType.INT4_ARRAY: - return ColumnTypeEnum.Int32Array - case ArrayColumnType.FLOAT4_ARRAY: - return ColumnTypeEnum.FloatArray - case ArrayColumnType.FLOAT8_ARRAY: - return ColumnTypeEnum.DoubleArray - case ArrayColumnType.NUMERIC_ARRAY: - case ArrayColumnType.MONEY_ARRAY: - return ColumnTypeEnum.NumericArray - case ArrayColumnType.BOOL_ARRAY: - return ColumnTypeEnum.BooleanArray - case ArrayColumnType.CHAR_ARRAY: - return ColumnTypeEnum.CharArray - case ArrayColumnType.BPCHAR_ARRAY: - case ArrayColumnType.TEXT_ARRAY: - case ArrayColumnType.VARCHAR_ARRAY: - case ArrayColumnType.VARBIT_ARRAY: - case ArrayColumnType.BIT_ARRAY: - case ArrayColumnType.INET_ARRAY: - case ArrayColumnType.CIDR_ARRAY: - case ArrayColumnType.XML_ARRAY: - return ColumnTypeEnum.TextArray - case ArrayColumnType.DATE_ARRAY: - return ColumnTypeEnum.DateArray - case ArrayColumnType.TIME_ARRAY: - return ColumnTypeEnum.TimeArray - case ArrayColumnType.TIMESTAMP_ARRAY: - return ColumnTypeEnum.DateTimeArray - case ArrayColumnType.JSON_ARRAY: - case ArrayColumnType.JSONB_ARRAY: - return ColumnTypeEnum.JsonArray - case ArrayColumnType.BYTEA_ARRAY: - return ColumnTypeEnum.BytesArray - case ArrayColumnType.UUID_ARRAY: - return ColumnTypeEnum.UuidArray - case ArrayColumnType.INT8_ARRAY: - case ArrayColumnType.OID_ARRAY: - return ColumnTypeEnum.Int64Array - default: - if (fieldTypeId >= 10000) { - // Postgres Custom Types - return ColumnTypeEnum.Enum - } - throw new Error(`Unsupported column type: ${fieldTypeId}`) - } -} - -function normalize_array(element_normalizer: (string) => string): (string) => string[] { - return (str) => parseArray(str, element_normalizer) -} - -/****************************/ -/* Time-related data-types */ -/****************************/ - -function normalize_numeric(numeric: string): string { - return numeric -} - -types.setTypeParser(ScalarColumnType.NUMERIC, normalize_numeric) -types.setTypeParser(ArrayColumnType.NUMERIC_ARRAY, normalize_array(normalize_numeric)) - -/****************************/ -/* Time-related data-types */ -/****************************/ - - -function normalize_date(date: string): string { - return date -} - -function normalize_timestamp(time: string): string { - return time -} - -function normalize_timestampz(time: string): string { - return time.split("+")[0] -} - -/* - * TIME, TIMETZ, TIME_ARRAY - converts value (or value elements) to a string in the format HH:mm:ss.f - */ - -function normalize_time(time: string): string { - return time -} - -function normalize_timez(time: string): string { - // Although it might be controversial, UTC is assumed in consistency with the behavior of rust postgres driver - // in quaint. See quaint/src/connector/postgres/conversion.rs - return time.split("+")[0] -} - -types.setTypeParser(ScalarColumnType.TIME, normalize_time) -types.setTypeParser(ArrayColumnType.TIME_ARRAY, normalize_array(normalize_time)) -types.setTypeParser(ScalarColumnType.TIMETZ, normalize_timez) - -/* - * DATE, DATE_ARRAY - converts value (or value elements) to a string in the format YYYY-MM-DD - */ - -types.setTypeParser(ScalarColumnType.DATE, normalize_date) -types.setTypeParser(ArrayColumnType.DATE_ARRAY, normalize_array(normalize_date)) - - -/* - * TIMESTAMP, TIMESTAMP_ARRAY - converts value (or value elements) to a string in the rfc3339 format - * ex: 1996-12-19T16:39:57-08:00 - */ -types.setTypeParser(ScalarColumnType.TIMESTAMP, normalize_timestamp) -types.setTypeParser(ArrayColumnType.TIMESTAMP_ARRAY, normalize_array(normalize_timestamp)) -types.setTypeParser(ScalarColumnType.TIMESTAMPTZ, normalize_timestampz) - -/******************/ -/* Money handling */ -/******************/ - -function normalize_money(money: string): string { - return money.slice(1) -} - -types.setTypeParser(ScalarColumnType.MONEY, normalize_money) -types.setTypeParser(ArrayColumnType.MONEY_ARRAY, normalize_array(normalize_money)) - - -/*****************/ -/* JSON handling */ -/*****************/ - -/** - * JsonNull are stored in JSON strings as the string "null", distinguishable from - * the `null` value which is used by the driver to represent the database NULL. - * By default, JSON and JSONB columns use JSON.parse to parse a JSON column value - * and this will lead to serde_json::Value::Null in Rust, which will be interpreted - * as DbNull. - * - * By converting "null" to JsonNullMarker, we can signal JsonNull in Rust side and - * convert it to QuaintValue::Json(Some(Null)). - */ -function toJson(json: string): unknown { - return (json === 'null') ? JsonNullMarker : JSON.parse(json) -} - - -types.setTypeParser(ScalarColumnType.JSONB, toJson) -types.setTypeParser(ScalarColumnType.JSON, toJson) - -/************************/ -/* Binary data handling */ -/************************/ - -/** - * TODO: - * 1. Check if using base64 would be more efficient than this encoding. - * 2. Consider the possibility of eliminating re-encoding altogether - * and passing bytea hex format to the engine if that can be aligned - * with other adapter flavours. - */ -function encodeBuffer(buffer: Buffer) { - return Array.from(new Uint8Array(buffer)) -} - -/* - * BYTEA - arbitrary raw binary strings - */ - -const parsePgBytes = types.getTypeParser(ScalarColumnType.BYTEA) as (_: string) => Buffer -/** - * Convert bytes to a JSON-encodable representation since we can't - * currently send a parsed Buffer or ArrayBuffer across JS to Rust - * boundary. - */ -function convertBytes(serializedBytes: string): number[] { - const buffer = parsePgBytes(serializedBytes) - return encodeBuffer(buffer) -} - -types.setTypeParser(ScalarColumnType.BYTEA, convertBytes) - -/* - * BYTEA_ARRAY - arrays of arbitrary raw binary strings - */ - -const parseBytesArray = types.getTypeParser(ArrayColumnType.BYTEA_ARRAY) as (_: string) => Buffer[] - -types.setTypeParser(ArrayColumnType.BYTEA_ARRAY, (serializedBytesArray) => { - const buffers = parseBytesArray(serializedBytesArray) - return buffers.map((buf) => buf ? encodeBuffer(buf) : null) -}) - -/* BIT_ARRAY, VARBIT_ARRAY */ - -function normalizeBit(bit: string): string { - return bit -} - -types.setTypeParser(ArrayColumnType.BIT_ARRAY, normalize_array(normalizeBit)) -types.setTypeParser(ArrayColumnType.VARBIT_ARRAY, normalize_array(normalizeBit)) \ No newline at end of file diff --git a/query-engine/driver-adapters/js/adapter-pg/src/index.ts b/query-engine/driver-adapters/js/adapter-pg/src/index.ts deleted file mode 100644 index f8e51ac2685b..000000000000 --- a/query-engine/driver-adapters/js/adapter-pg/src/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { PrismaPg } from './pg' diff --git a/query-engine/driver-adapters/js/adapter-pg/src/pg.ts b/query-engine/driver-adapters/js/adapter-pg/src/pg.ts deleted file mode 100644 index c34050778c39..000000000000 --- a/query-engine/driver-adapters/js/adapter-pg/src/pg.ts +++ /dev/null @@ -1,138 +0,0 @@ -import type pg from 'pg' -import { Debug, err, ok } from '@prisma/driver-adapter-utils' -import type { - DriverAdapter, - Query, - Queryable, - Result, - ResultSet, - Transaction, - TransactionOptions, -} from '@prisma/driver-adapter-utils' -import { fieldToColumnType } from './conversion' - -const debug = Debug('prisma:driver-adapter:pg') - -type StdClient = pg.Pool -type TransactionClient = pg.PoolClient - -class PgQueryable implements Queryable { - readonly flavour = 'postgres' - - constructor(protected readonly client: ClientT) {} - - /** - * Execute a query given as SQL, interpolating the given parameters. - */ - async queryRaw(query: Query): Promise> { - const tag = '[js::query_raw]' - debug(`${tag} %O`, query) - - const ioResult = await this.performIO(query) - return ioResult.map(({ fields, rows }) => { - const columns = fields.map((field) => field.name) - const columnTypes = fields.map((field) => fieldToColumnType(field.dataTypeID)) - - return { - columnNames: columns, - columnTypes, - rows, - } - }) - } - - /** - * Execute a query given as SQL, interpolating the given parameters and - * returning the number of affected rows. - * Note: Queryable expects a u64, but napi.rs only supports u32. - */ - async executeRaw(query: Query): Promise> { - const tag = '[js::execute_raw]' - debug(`${tag} %O`, query) - - // Note: `rowsAffected` can sometimes be null (e.g., when executing `"BEGIN"`) - return (await this.performIO(query)).map(({ rowCount: rowsAffected }) => rowsAffected ?? 0) - } - - /** - * Run a query against the database, returning the result set. - * Should the query fail due to a connection error, the connection is - * marked as unhealthy. - */ - private async performIO(query: Query): Promise>> { - const { sql, args: values } = query - - try { - const result = await this.client.query({ text: sql, values, rowMode: 'array' }) - return ok(result) - } catch (e) { - const error = e as Error - debug('Error in performIO: %O', error) - if (e && e.code) { - return err({ - kind: 'Postgres', - code: e.code, - severity: e.severity, - message: e.message, - detail: e.detail, - column: e.column, - hint: e.hint, - }) - } - throw error - } - } -} - -class PgTransaction extends PgQueryable implements Transaction { - finished = false - - constructor(client: pg.PoolClient, readonly options: TransactionOptions) { - super(client) - } - - async commit(): Promise> { - debug(`[js::commit]`) - - this.finished = true - this.client.release() - return ok(undefined) - } - - async rollback(): Promise> { - debug(`[js::rollback]`) - - this.finished = true - this.client.release() - return ok(undefined) - } - - dispose(): Result { - if (!this.finished) { - this.client.release() - } - return ok(undefined) - } -} - -export class PrismaPg extends PgQueryable implements DriverAdapter { - constructor(client: pg.Pool) { - super(client) - } - - async startTransaction(): Promise> { - const options: TransactionOptions = { - usePhantomQuery: false, - } - - const tag = '[js::startTransaction]' - debug(`${tag} options: %O`, options) - - const connection = await this.client.connect() - return ok(new PgTransaction(connection, options)) - } - - async close() { - return ok(undefined) - } -} diff --git a/query-engine/driver-adapters/js/adapter-planetscale/README.md b/query-engine/driver-adapters/js/adapter-planetscale/README.md deleted file mode 100644 index a4cdc132036a..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/README.md +++ /dev/null @@ -1,71 +0,0 @@ -# @prisma/adapter-planetscale - -Prisma driver adapter for [PlanetScale Serverless Driver](https://github.com/planetscale/database-js). - -See https://github.com/prisma/prisma/releases/tag/5.4.0 and https://www.prisma.io/blog/serverless-database-drivers-KML1ehXORxZV for details. - -The following usage tutorial is valid for Prisma 5.4.2 and later versions. - -## How to install - -After [getting started with PlanetScale](https://neon.tech/docs/get-started-with-neon/setting-up-a-project), you can use the PlanetScale serverless driver to connect to your database. You will need to install the `@prisma/adapter-planetscale` driver adapter, the `@planetscale/database` serverless driver, and `undici` to provide a `fetch` function to the PlanetScale driver. - -```sh -npm install @prisma/adapter-planetscale -npm install @planetscale/database -npm install undici -``` - -Make sure your [PlanetScale database connection string](https://planetscale.com/docs/concepts/connection-strings) is copied over to your `.env` file. The connection string will start with `mysql://`. - -```env -# .env -DATABASE_URL="mysql://..." -``` - -You can now reference this environment variable in your `schema.prisma` datasource. Make sure you also include the `driverAdapters` Preview feature. - -```prisma -// schema.prisma -generator client { - provider = "prisma-client-js" - previewFeatures = ["driverAdapters"] -} - -datasource db { - provider = "mysql" - url = env("DATABASE_URL") - relationMode = "prisma" -} -``` - -Now run `npx prisma generate` to re-generate Prisma Client. - -## How to use - -In TypeScript, you will need to: - -1. Import packages -2. Set up the PlanetScale serverless database driver -3. Instantiate the Prisma PlanetScale adapter with the PlanetScale serverless database driver -4. Pass the driver adapter to the Prisma Client instance - -```typescript -// Import needed packages -import { connect } from '@planetscale/database'; -import { PrismaPlanetScale } from '@prisma/adapter-planetscale'; -import { PrismaClient } from '@prisma/client'; -import { fetch as undiciFetch } from 'undici'; - -// Setup -const connectionString = `${process.env.DATABASE_URL}`; - -// Init prisma client -const connection = connect({ url: connectionString, fetch: undiciFetch }); -const adapter = new PrismaPlanetScale(connection); -const prisma = new PrismaClient({ adapter }); - -// Use Prisma Client as normal -``` - -Your Prisma Client instance now uses PlanetScale's [`database-js`](https://github.com/planetscale/database-js), which can improve [`connection reliability and performance`](https://planetscale.com/blog/faster-mysql-with-http3). It uses HTTP requests instead of Prisma’s connection pool, but Prisma will continue to handle error handling and type safety. If you have any feedback about our PlanetScale Serverless Driver support, please leave a comment on our [dedicated GitHub issue](https://github.com/prisma/prisma/discussions/21347) and we'll use it as we continue development. diff --git a/query-engine/driver-adapters/js/adapter-planetscale/package.json b/query-engine/driver-adapters/js/adapter-planetscale/package.json deleted file mode 100644 index 59d59704ab50..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/package.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "name": "@prisma/adapter-planetscale", - "version": "0.0.0", - "description": "Prisma's driver adapter for \"@planetscale/database\"", - "main": "dist/index.js", - "module": "dist/index.mjs", - "types": "dist/index.d.ts", - "scripts": { - "build": "tsup ./src/index.ts --format cjs,esm --dts", - "lint": "tsc -p ./tsconfig.build.json" - }, - "files": [ - "dist", - "README.md" - ], - "keywords": [], - "author": "Alberto Schiabel ", - "license": "Apache-2.0", - "sideEffects": false, - "dependencies": { - "@prisma/driver-adapter-utils": "workspace:*" - }, - "devDependencies": { - "@planetscale/database": "^1.11.0" - }, - "peerDependencies": { - "@planetscale/database": "^1.11.0" - } -} diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/conversion.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/conversion.ts deleted file mode 100644 index f6cf8563dc24..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/conversion.ts +++ /dev/null @@ -1,98 +0,0 @@ -import { ColumnTypeEnum, type ColumnType } from '@prisma/driver-adapter-utils' - -// See: https://github.com/planetscale/vitess-types/blob/06235e372d2050b4c0fff49972df8111e696c564/src/vitess/query/v16/query.proto#L108-L218 -export type PlanetScaleColumnType - = 'NULL' - | 'INT8' - | 'UINT8' - | 'INT16' - | 'UINT16' - | 'INT24' - | 'UINT24' - | 'INT32' - | 'UINT32' - | 'INT64' - | 'UINT64' - | 'FLOAT32' - | 'FLOAT64' - | 'TIMESTAMP' - | 'DATE' - | 'TIME' - | 'DATETIME' - | 'YEAR' - | 'DECIMAL' - | 'TEXT' - | 'BLOB' - | 'VARCHAR' - | 'VARBINARY' - | 'CHAR' - | 'BINARY' - | 'BIT' - | 'ENUM' - | 'SET' // unsupported - | 'TUPLE' // unsupported - | 'GEOMETRY' - | 'JSON' - | 'EXPRESSION' // unsupported - | 'HEXNUM' - | 'HEXVAL' - | 'BITNUM' - -/** - * This is a simplification of quaint's value inference logic. Take a look at quaint's conversion.rs - * module to see how other attributes of the field packet such as the field length are used to infer - * the correct quaint::Value variant. - */ -export function fieldToColumnType(field: PlanetScaleColumnType): ColumnType { - switch (field) { - case 'INT8': - case 'UINT8': - case 'INT16': - case 'UINT16': - case 'INT24': - case 'UINT24': - case 'INT32': - case 'UINT32': - case 'YEAR': - return ColumnTypeEnum.Int32 - case 'INT64': - case 'UINT64': - return ColumnTypeEnum.Int64 - case 'FLOAT32': - return ColumnTypeEnum.Float - case 'FLOAT64': - return ColumnTypeEnum.Double - case 'TIMESTAMP': - case 'DATETIME': - return ColumnTypeEnum.DateTime - case 'DATE': - return ColumnTypeEnum.Date - case 'TIME': - return ColumnTypeEnum.Time - case 'DECIMAL': - return ColumnTypeEnum.Numeric - case 'CHAR': - return ColumnTypeEnum.Char - case 'TEXT': - case 'VARCHAR': - return ColumnTypeEnum.Text - case 'ENUM': - return ColumnTypeEnum.Enum - case 'JSON': - return ColumnTypeEnum.Json - case 'BLOB': - case 'BINARY': - case 'VARBINARY': - case 'BIT': - case 'BITNUM': - case 'HEXNUM': - case 'HEXVAL': - case 'GEOMETRY': - return ColumnTypeEnum.Bytes - case 'NULL': - // Fall back to Int32 for consistency with quaint. - return ColumnTypeEnum.Int32 - default: - throw new Error(`Unsupported column type: ${field}`) - } -} diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/deferred.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/deferred.ts deleted file mode 100644 index 013409c8424f..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/deferred.ts +++ /dev/null @@ -1,13 +0,0 @@ -export type Deferred = { - resolve(value: T | PromiseLike): void; - reject(reason: unknown): void; -} - - -export function createDeferred(): [Deferred, Promise] { - const deferred = {} as Deferred - return [deferred, new Promise((resolve, reject) => { - deferred.resolve = resolve - deferred.reject = reject - })] -} \ No newline at end of file diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/index.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/index.ts deleted file mode 100644 index 5e8add856fbb..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { PrismaPlanetScale } from './planetscale' diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts deleted file mode 100644 index 5a52851112b2..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts +++ /dev/null @@ -1,181 +0,0 @@ -import type planetScale from '@planetscale/database' -import { Debug, err, ok } from '@prisma/driver-adapter-utils' -import type { - DriverAdapter, - ResultSet, - Query, - Queryable, - Transaction, - Result, - TransactionOptions, -} from '@prisma/driver-adapter-utils' -import { type PlanetScaleColumnType, fieldToColumnType } from './conversion' -import { createDeferred, Deferred } from './deferred' - -const debug = Debug('prisma:driver-adapter:planetscale') - -class RollbackError extends Error { - constructor() { - super('ROLLBACK') - this.name = 'RollbackError' - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, RollbackError) - } - } -} - -class PlanetScaleQueryable implements Queryable { - readonly flavour = 'mysql' - constructor(protected client: ClientT) {} - - /** - * Execute a query given as SQL, interpolating the given parameters. - */ - async queryRaw(query: Query): Promise> { - const tag = '[js::query_raw]' - debug(`${tag} %O`, query) - - const ioResult = await this.performIO(query) - return ioResult.map(({ fields, insertId: lastInsertId, rows }) => { - const columns = fields.map((field) => field.name) - return { - columnNames: columns, - columnTypes: fields.map((field) => fieldToColumnType(field.type as PlanetScaleColumnType)), - rows: rows as ResultSet['rows'], - lastInsertId, - } - }) - } - - /** - * Execute a query given as SQL, interpolating the given parameters and - * returning the number of affected rows. - * Note: Queryable expects a u64, but napi.rs only supports u32. - */ - async executeRaw(query: Query): Promise> { - const tag = '[js::execute_raw]' - debug(`${tag} %O`, query) - - return (await this.performIO(query)).map(({ rowsAffected }) => rowsAffected) - } - - /** - * Run a query against the database, returning the result set. - * Should the query fail due to a connection error, the connection is - * marked as unhealthy. - */ - private async performIO(query: Query): Promise> { - const { sql, args: values } = query - - try { - const result = await this.client.execute(sql, values, { - as: 'array', - }) - return ok(result) - } catch (e) { - const error = e as Error - if (error.name === 'DatabaseError') { - const parsed = parseErrorMessage(error.message) - if (parsed) { - return err({ - kind: 'Mysql', - ...parsed, - }) - } - } - debug('Error in performIO: %O', error) - throw error - } - } -} - -function parseErrorMessage(message: string) { - const match = message.match( - /target: (?:.+?) vttablet: (?.+?) \(errno (?\d+)\) \(sqlstate (?.+?)\)/, - ) - - if (!match || !match.groups) { - return undefined - } - return { - code: Number(match.groups.code), - message: match.groups.message, - state: match.groups.state, - } -} - -class PlanetScaleTransaction extends PlanetScaleQueryable implements Transaction { - finished = false - - constructor( - tx: planetScale.Transaction, - readonly options: TransactionOptions, - private txDeferred: Deferred, - private txResultPromise: Promise, - ) { - super(tx) - } - - async commit(): Promise> { - debug(`[js::commit]`) - - this.finished = true - this.txDeferred.resolve() - return Promise.resolve(ok(await this.txResultPromise)) - } - - async rollback(): Promise> { - debug(`[js::rollback]`) - - this.finished = true - this.txDeferred.reject(new RollbackError()) - return Promise.resolve(ok(await this.txResultPromise)) - } - - dispose(): Result { - if (!this.finished) { - this.rollback().catch(console.error) - } - return ok(undefined) - } -} - -export class PrismaPlanetScale extends PlanetScaleQueryable implements DriverAdapter { - constructor(client: planetScale.Connection) { - super(client) - } - - async startTransaction() { - const options: TransactionOptions = { - usePhantomQuery: true, - } - - const tag = '[js::startTransaction]' - debug(`${tag} options: %O`, options) - - return new Promise>((resolve, reject) => { - const txResultPromise = this.client - .transaction(async (tx) => { - const [txDeferred, deferredPromise] = createDeferred() - const txWrapper = new PlanetScaleTransaction(tx, options, txDeferred, txResultPromise) - - resolve(ok(txWrapper)) - return deferredPromise - }) - .catch((error) => { - // Rollback error is ignored (so that tx.rollback() won't crash) - // any other error is legit and is re-thrown - if (!(error instanceof RollbackError)) { - return reject(error) - } - - return undefined - }) - }) - } - - async close() { - return ok(undefined) - } -} diff --git a/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.build.json b/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.build.json deleted file mode 100644 index 28c56f6c3a9a..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.build.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "extends": "../tsconfig.json", - "compilerOptions": { - "outDir": "declaration" - } -} diff --git a/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.json b/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.json deleted file mode 100644 index 3c43903cfdd1..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../tsconfig.json" -} diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/package.json b/query-engine/driver-adapters/js/connector-test-kit-executor/package.json deleted file mode 100644 index 2a0d16bd4ccf..000000000000 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/package.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "name": "connector-test-kit-executor", - "version": "5.4.0", - "description": "", - "main": "dist/index.js", - "private": true, - "scripts": { - "build": "tsup ./src/index.ts --format cjs,esm --dts", - "lint": "tsc -p ./tsconfig.build.json" - }, - "keywords": [], - "author": "", - "sideEffects": false, - "license": "Apache-2.0", - "dependencies": { - "@libsql/client": "0.3.5", - "@neondatabase/serverless": "^0.6.0", - "@planetscale/database": "1.11.0", - "@prisma/adapter-libsql": "workspace:*", - "@prisma/adapter-neon": "workspace:*", - "@prisma/adapter-pg": "workspace:*", - "@prisma/adapter-planetscale": "workspace:*", - "@prisma/driver-adapter-utils": "workspace:*", - "@types/pg": "^8.10.2", - "pg": "^8.11.3", - "undici": "^5.26.2" - } -} diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/README.md b/query-engine/driver-adapters/js/driver-adapter-utils/README.md deleted file mode 100644 index 78938e802bd3..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# @prisma/driver-adapters-utils - -**INTERNAL PACKAGE, DO NOT USE** diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/package.json b/query-engine/driver-adapters/js/driver-adapter-utils/package.json deleted file mode 100644 index 64301a7a5533..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/package.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "name": "@prisma/driver-adapter-utils", - "version": "0.0.0", - "description": "Internal set of utilities and types for Prisma's driver adapters.", - "main": "dist/index.js", - "module": "dist/index.mjs", - "types": "dist/index.d.ts", - "scripts": { - "build": "tsup ./src/index.ts --format cjs,esm --dts", - "lint": "tsc -p ./tsconfig.build.json" - }, - "files": [ - "dist", - "README.md" - ], - "keywords": [], - "author": "Alberto Schiabel ", - "license": "Apache-2.0", - "sideEffects": false, - "dependencies": { - "debug": "^4.3.4" - }, - "devDependencies": { - "@types/debug": "^4.1.8" - } -} diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts deleted file mode 100644 index 1e3aa36210cf..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts +++ /dev/null @@ -1,80 +0,0 @@ -import { Result, err, ok } from './result' -import type { ErrorCapturingDriverAdapter, DriverAdapter, Transaction, ErrorRegistry, ErrorRecord } from './types' - -class ErrorRegistryInternal implements ErrorRegistry { - private registeredErrors: ErrorRecord[] = [] - - consumeError(id: number): ErrorRecord | undefined { - return this.registeredErrors[id] - } - - registerNewError(error: unknown) { - let i = 0 - while (this.registeredErrors[i] !== undefined) { - i++ - } - this.registeredErrors[i] = { error } - return i - } -} - -// *.bind(adapter) is required to preserve the `this` context of functions whose -// execution is delegated to napi.rs. -export const bindAdapter = (adapter: DriverAdapter): ErrorCapturingDriverAdapter => { - const errorRegistry = new ErrorRegistryInternal() - - const startTransaction = wrapAsync(errorRegistry, adapter.startTransaction.bind(adapter)) - return { - errorRegistry, - queryRaw: wrapAsync(errorRegistry, adapter.queryRaw.bind(adapter)), - executeRaw: wrapAsync(errorRegistry, adapter.executeRaw.bind(adapter)), - flavour: adapter.flavour, - startTransaction: async (...args) => { - const result = await startTransaction(...args) - return result.map((tx) => bindTransaction(errorRegistry, tx)) - }, - close: wrapAsync(errorRegistry, adapter.close.bind(adapter)), - } -} - -// *.bind(transaction) is required to preserve the `this` context of functions whose -// execution is delegated to napi.rs. -const bindTransaction = (errorRegistry: ErrorRegistryInternal, transaction: Transaction): Transaction => { - return { - flavour: transaction.flavour, - options: transaction.options, - queryRaw: wrapAsync(errorRegistry, transaction.queryRaw.bind(transaction)), - executeRaw: wrapAsync(errorRegistry, transaction.executeRaw.bind(transaction)), - commit: wrapAsync(errorRegistry, transaction.commit.bind(transaction)), - rollback: wrapAsync(errorRegistry, transaction.rollback.bind(transaction)), - dispose: wrapSync(errorRegistry, transaction.dispose.bind(transaction)), - } -} - -function wrapAsync( - registry: ErrorRegistryInternal, - fn: (...args: A) => Promise>, -): (...args: A) => Promise> { - return async (...args) => { - try { - return await fn(...args) - } catch (error) { - const id = registry.registerNewError(error) - return err({ kind: 'GenericJs', id }) - } - } -} - -function wrapSync( - registry: ErrorRegistryInternal, - fn: (...args: A) => Result, -): (...args: A) => Result { - return (...args) => { - try { - return fn(...args) - } catch (error) { - const id = registry.registerNewError(error) - return err({ kind: 'GenericJs', id }) - } - } -} diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts deleted file mode 100644 index 5ddc7f20b390..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts +++ /dev/null @@ -1,48 +0,0 @@ -// Same order as in rust driver-adapters' `ColumnType`. -// Note: exporting const enums causes lots of problems with bundlers, so we emulate -// them via regular dictionaries. -// See: https://hackmd.io/@dzearing/Sk3xV0cLs -export const ColumnTypeEnum = { - // Scalars - Int32: 0, - Int64: 1, - Float: 2, - Double: 3, - Numeric: 4, - Boolean: 5, - Char: 6, - Text: 7, - Date: 8, - Time: 9, - DateTime: 10, - Json: 11, - Enum: 12, - Bytes: 13, - Set: 14, - Uuid: 15, - - // Arrays - Int32Array: 64, - Int64Array: 65, - FloatArray: 66, - DoubleArray: 67, - NumericArray: 68, - BooleanArray: 69, - CharArray: 70, - TextArray: 71, - DateArray: 72, - TimeArray: 73, - DateTimeArray: 74, - JsonArray: 75, - EnumArray: 76, - BytesArray: 77, - UuidArray: 78, - - // Custom - UnknownNumber: 128, -} as const - -// This string value paired with `ColumnType.Json` will be treated as JSON `null` -// when convering to a quaint value. This is to work around JS/JSON null values -// already being used to represent database NULLs. -export const JsonNullMarker = '$__prisma_null' diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/debug.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/debug.ts deleted file mode 100644 index e0a1fe380fa2..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/debug.ts +++ /dev/null @@ -1,3 +0,0 @@ -import { debug as Debug } from 'debug' - -export { Debug } diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/index.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/index.ts deleted file mode 100644 index e7c13be99966..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -export { bindAdapter } from './binder' -export { ColumnTypeEnum, JsonNullMarker } from './const' -export { Debug } from './debug' -export { ok, err, type Result } from './result' -export type * from './types' diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/result.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/result.ts deleted file mode 100644 index 5af95db68671..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/result.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { Error } from './types' -export type Result = { - // common methods - map(fn: (value: T) => U): Result - flatMap(fn: (value: T) => Result): Result -} & ( - | { - readonly ok: true - readonly value: T - } - | { - readonly ok: false - readonly error: Error - } -) - -export function ok(value: T): Result { - return { - ok: true, - value, - map(fn) { - return ok(fn(value)) - }, - flatMap(fn) { - return fn(value) - }, - } -} - -export function err(error: Error): Result { - return { - ok: false, - error, - map() { - return err(error) - }, - flatMap() { - return err(error) - }, - } -} diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts deleted file mode 100644 index 92019f81824b..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts +++ /dev/null @@ -1,132 +0,0 @@ -import { ColumnTypeEnum } from './const' -import { Result } from './result' - -export type ColumnType = (typeof ColumnTypeEnum)[keyof typeof ColumnTypeEnum] - -export interface ResultSet { - /** - * List of column types appearing in a database query, in the same order as `columnNames`. - * They are used within the Query Engine to convert values from JS to Quaint values. - */ - columnTypes: Array - - /** - * List of column names appearing in a database query, in the same order as `columnTypes`. - */ - columnNames: Array - - /** - * List of rows retrieved from a database query. - * Each row is a list of values, whose length matches `columnNames` and `columnTypes`. - */ - rows: Array> - - /** - * The last ID of an `INSERT` statement, if any. - * This is required for `AUTO_INCREMENT` columns in MySQL and SQLite-flavoured databases. - */ - lastInsertId?: string -} - -export type Query = { - sql: string - args: Array -} - -export type Error = - | { - kind: 'GenericJs' - id: number - } - | { - kind: 'Postgres' - code: string - severity: string - message: string - detail: string | undefined - column: string | undefined - hint: string | undefined - } - | { - kind: 'Mysql' - code: number - message: string - state: string - } - | { - kind: 'Sqlite' - /** - * Sqlite extended error code: https://www.sqlite.org/rescode.html - */ - extendedCode: number - message: string - } - -export interface Queryable { - readonly flavour: 'mysql' | 'postgres' | 'sqlite' - - /** - * Execute a query given as SQL, interpolating the given parameters, - * and returning the type-aware result set of the query. - * - * This is the preferred way of executing `SELECT` queries. - */ - queryRaw(params: Query): Promise> - - /** - * Execute a query given as SQL, interpolating the given parameters, - * and returning the number of affected rows. - * - * This is the preferred way of executing `INSERT`, `UPDATE`, `DELETE` queries, - * as well as transactional queries. - */ - executeRaw(params: Query): Promise> -} - -export interface DriverAdapter extends Queryable { - /** - * Starts new transation. - */ - startTransaction(): Promise> - - /** - * Closes the connection to the database, if any. - */ - close: () => Promise> -} - -export type TransactionOptions = { - usePhantomQuery: boolean -} - -export interface Transaction extends Queryable { - /** - * Transaction options. - */ - readonly options: TransactionOptions - /** - * Commit the transaction. - */ - commit(): Promise> - /** - * Rolls back the transaction. - */ - rollback(): Promise> - /** - * Discards and closes the transaction which may or may not have been committed or rolled back. - * This operation must be synchronous. If the implementation requires calling creating new - * asynchronous tasks on the event loop, the driver is responsible for handling the errors - * appropriately to ensure they don't crash the application. - */ - dispose(): Result -} - -export interface ErrorCapturingDriverAdapter extends DriverAdapter { - readonly errorRegistry: ErrorRegistry -} - -export interface ErrorRegistry { - consumeError(id: number): ErrorRecord | undefined -} - -export type ErrorRecord = { error: unknown } diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.build.json b/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.build.json deleted file mode 100644 index 2c2e266bdb3b..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.build.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "extends": "../tsconfig.json", - "compilerOptions": { - "outDir": "declaration", - } -} diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.json b/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.json deleted file mode 100644 index 3c43903cfdd1..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../tsconfig.json" -} diff --git a/query-engine/driver-adapters/js/package.json b/query-engine/driver-adapters/js/package.json deleted file mode 100644 index 2036794f8c02..000000000000 --- a/query-engine/driver-adapters/js/package.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "private": true, - "name": "js", - "version": "0.0.2", - "description": "", - "engines": { - "node": ">=16.13", - "pnpm": ">=8.6.6 <9" - }, - "license": "Apache-2.0", - "scripts": { - "build": "pnpm -r run build", - "lint": "pnpm -r run lint" - }, - "keywords": [], - "author": "", - "devDependencies": { - "@types/node": "^20.5.1", - "tsup": "^7.2.0", - "tsx": "^3.12.7", - "typescript": "^5.1.6" - } -} diff --git a/query-engine/driver-adapters/js/pnpm-workspace.yaml b/query-engine/driver-adapters/js/pnpm-workspace.yaml deleted file mode 100644 index f9e70da7ee5a..000000000000 --- a/query-engine/driver-adapters/js/pnpm-workspace.yaml +++ /dev/null @@ -1,8 +0,0 @@ -packages: - - './adapter-libsql' - - './adapter-neon' - - './adapter-pg' - - './adapter-planetscale' - - './connector-test-kit-executor' - - './driver-adapter-utils' - - './smoke-test-js' diff --git a/query-engine/driver-adapters/js/smoke-test-js/.envrc.example b/query-engine/driver-adapters/js/smoke-test-js/.envrc.example deleted file mode 100644 index 15a286787cbd..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/.envrc.example +++ /dev/null @@ -1,26 +0,0 @@ -# Uncomment "source_up" if you need to load the .envrc at the root of the -# `prisma-engines` repository before loading this one (for example, if you -# are using Nix). -# -# source_up - -export JS_PLANETSCALE_DATABASE_URL="mysql://USER:PASSWORD@aws.connect.psdb.cloud/DATABASE?sslaccept=strict" -export JS_NEON_DATABASE_URL="postgres://USER:PASSWORD@DATABASE-pooler.eu-central-1.aws.neon.tech/neondb?pgbouncer=true&connect_timeout=10" - -# Note: if you use hosted Postgres instances (e.g., from PDP provision), you need `?sslmode=disable` -export JS_PG_DATABASE_URL="postgres://postgres:prisma@localhost:5438" - -# Set this to a `file:` URL when using a local sqlite database (either -# standalone or as an embedded replica). Otherwise, when using a remote Turso -# (or sqld) database in HTTP mode directly without an embedded replica, set its -# URL here. -export JS_LIBSQL_DATABASE_URL="file:${PWD}/libsql.db" - -# # Set this to the URL of remote Turso database when using an embedded replica. -# export JS_LIBSQL_SYNC_URL="" - -# # Provide an auth token when using a remote Turso database. -# export JS_LIBSQL_AUTH_TOKEN="" - -# Can be one of "number" (the default when nothing is specified), "bigint" or "string". "bigint" works best with Prisma. -export JS_LIBSQL_INT_MODE="bigint" diff --git a/query-engine/driver-adapters/js/smoke-test-js/.gitignore b/query-engine/driver-adapters/js/smoke-test-js/.gitignore deleted file mode 100644 index be550f99317f..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -libsql.db -libsql.db-journal -libsql.db-shm -libsql.db-wal diff --git a/query-engine/driver-adapters/js/smoke-test-js/README.md b/query-engine/driver-adapters/js/smoke-test-js/README.md deleted file mode 100644 index f1b81df5d268..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/README.md +++ /dev/null @@ -1,79 +0,0 @@ -# @prisma/driver-adapters-smoke-tests-js - -This is a playground for testing the `libquery` client with the experimental Node.js drivers. -It contains a subset of `@prisma/client`, plus some handy executable smoke tests: -- [`./src/libquery`](./src/libquery): it contains smoke tests using a local `libquery`, the Query Engine library. -- [`./src/client`](./src/client): it contains smoke tests using `@prisma/client`. - -## How to setup - -We assume a recent Node.js is installed (e.g., `v20.5.x`). If not, run `nvm use` in the current directory. -It's very important to double-check if you have multiple versions installed, as both PlanetScale and Neon requires either Node.js `v18`+ or a custom `fetch` function. - -In the parent directory (`cd ..`): -- Build the driver adapters via `pnpm i && pnpm build` - -In the current directoy: -- Create a `.envrc` starting from `.envrc.example`, and fill in the missing values following the given template -- Install Node.js dependencies via - ```bash - pnpm i - ``` - -(or run `sh ./setup.sh`) - -Anywhere in the repository: -- Run `cargo build -p query-engine-node-api` to compile the `libquery` Query Engine - -### PlanetScale - -If you don't have a connection string yet: - -- [Follow the Notion document](https://www.notion.so/How-to-get-a-PlanetScale-and-Neon-database-for-testing-93d978061f9c4ffc80ebfed36896af16) or create a new database on [PlanetScale](https://planetscale.com/) -- Go to `Settings` > `Passwords`, and create a new password for the `main` database branch. Select the `Prisma` template and copy the generated URL (comprising username, password, etc). -- Paste it in the `JS_PLANETSCALE_DATABASE_URL` environment variable in `.envrc`. - -In the current directory: -- Run `pnpm prisma:planetscale` to push the Prisma schema and insert the test data. -- Run `pnpm planetscale` to run smoke tests using `libquery` against the PlanetScale database. - For more fine-grained control: - - Run `pnpm planetscale:libquery` to test using `libquery` - - Run `pnpm planetscale:client` to test using `@prisma/client` - -### Neon - -If you don't have a connection string yet: - -- [Follow the Notion document](https://www.notion.so/How-to-get-a-PlanetScale-and-Neon-database-for-testing-93d978061f9c4ffc80ebfed36896af16) or create a new database with Neon CLI `npx neonctl projects create` or in [Neon Console](https://neon.tech). -- Paste the connection string to `JS_NEON_DATABASE_URL`. - -In the current directory: -- Run `pnpm prisma:neon` to push the Prisma schema and insert the test data. -- Run `pnpm neon:ws` to run smoke tests using `libquery` against the Neon database, using a WebSocket connection. - For more fine-grained control: - - Run `pnpm neon:ws:libquery` to test using `libquery` - - Run `pnpm neon:ws:client` to test using `@prisma/client` -- Run `pnpm neon:http` to run smoke tests using `libquery` against the Neon database, using an HTTP connection. In this case, transactions won't work, and tests are expected to fail. - For more fine-grained control: - - Run `pnpm neon:http:libquery` to test using `libquery` - - Run `pnpm neon:http:client` to test using `@prisma/client` - -### Pg - -Start database via `docker compose up postgres15` in `/docker`. - -In the current directory: -- Run `pnpm prisma:pg` to push the Prisma schema and insert the test data. -- Run `pnpm pg` to run smoke tests using `libquery` against the PostgreSQL database, using `pg` - For more fine-grained control: - - Run `pnpm pg:libquery` to test using `libquery` - - Run `pnpm pg:client` to test using `@prisma/client` - -### Libsql - -In the current directory: -- Run `pnpm prisma:libsql` to push the Prisma schema and insert the test data. -- Run `pnpm libsql` to run smoke tests using `libquery` against the SQLite database, using `libSQL` - For more fine-grained control: - - Run `pnpm libsql:libquery` to test using `libquery` - - Run `pnpm libsql:client` to test using `@prisma/client` \ No newline at end of file diff --git a/query-engine/driver-adapters/js/smoke-test-js/package.json b/query-engine/driver-adapters/js/smoke-test-js/package.json deleted file mode 100644 index 31362c1cc873..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/package.json +++ /dev/null @@ -1,67 +0,0 @@ -{ - "name": "@prisma/driver-adapters-smoke-tests-js", - "private": true, - "type": "module", - "version": "5.4.0", - "description": "", - "scripts": { - "prisma:db:push:postgres": "prisma db push --schema ./prisma/postgres/schema.prisma --force-reset", - "prisma:db:execute:postgres": "prisma db execute --schema ./prisma/postgres/schema.prisma --file ./prisma/postgres/commands/type_test/insert.sql", - "prisma:studio:postgres": "prisma studio --schema ./prisma/postgres/schema.prisma", - "prisma:db:push:mysql": "prisma db push --schema ./prisma/mysql/schema.prisma --force-reset", - "prisma:db:execute:mysql": "prisma db execute --schema ./prisma/mysql/schema.prisma --file ./prisma/mysql/commands/type_test/insert.sql", - "prisma:db:push:sqlite": "prisma db push --schema ./prisma/sqlite/schema.prisma --force-reset", - "prisma:db:execute:sqlite": "prisma db execute --schema ./prisma/sqlite/schema.prisma --file ./prisma/sqlite/commands/type_test/insert.sql", - "prisma:studio:mysql": "prisma studio --schema ./prisma/mysql/schema.prisma", - "prisma:neon:ws": "pnpm prisma:neon", - "prisma:neon:http": "pnpm prisma:neon", - "prisma:neon": "cross-env-shell DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" \"pnpm prisma:db:push:postgres && pnpm prisma:db:execute:postgres\"", - "studio:neon": "cross-env-shell DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" \"pnpm prisma:studio:postgres\"", - "neon:ws:libquery": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/neon.ws.test.ts", - "neon:http:libquery": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/neon.http.test.ts", - "neon:ws:client": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/neon.ws.test.ts", - "neon:http:client": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/neon.http.test.ts", - "neon:ws": "pnpm neon:ws:libquery && pnpm neon:ws:client", - "neon:http": "pnpm neon:http:libquery && pnpm neon:http:client", - "prisma:pg": "cross-env-shell DATABASE_URL=\"${JS_PG_DATABASE_URL}\" \"pnpm prisma:db:push:postgres && pnpm prisma:db:execute:postgres\"", - "studio:pg": "cross-env-shell DATABASE_URL=\"${JS_PG_DATABASE_URL}\" \"pnpm prisma:studio:postgres\"", - "pg:libquery": "cross-env-shell DATABASE_URL=\"${JS_PG_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/pg.test.ts", - "pg:client": "DATABASE_URL=\"${JS_PG_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/pg.test.ts", - "pg": "pnpm pg:libquery && pnpm pg:client", - "errors": "DATABASE_URL=\"${JS_PG_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/errors.test.ts", - "prisma:planetscale": "cross-env-shell DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" \"pnpm prisma:db:push:mysql && pnpm prisma:db:execute:mysql\"", - "studio:planetscale": "cross-env-shell DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" \"pnpm prisma:studio:mysql\"", - "planetscale:libquery": "DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/planetscale.test.ts", - "planetscale:client": "DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/planetscale.test.ts", - "planetscale": "pnpm planetscale:libquery && pnpm planetscale:client", - "prisma:libsql": "cross-env-shell DATABASE_URL=\"${JS_LIBSQL_DATABASE_URL}\" \"pnpm prisma:db:push:sqlite && pnpm prisma:db:execute:sqlite\"", - "libsql:libquery": "cross-env-shell DATABASE_URL=\"${JS_LIBSQL_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/libsql.test.ts", - "libsql:client": "cross-env-shell DATABASE_URL=\"${JS_LIBSQL_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/libsql.test.ts", - "libsql": "pnpm libsql:libquery && pnpm libsql:client" - }, - "keywords": [], - "author": "Alberto Schiabel ", - "license": "Apache-2.0", - "sideEffects": true, - "dependencies": { - "@libsql/client": "0.3.5", - "@neondatabase/serverless": "^0.6.0", - "@planetscale/database": "^1.11.0", - "@prisma/adapter-libsql": "workspace:*", - "@prisma/adapter-neon": "workspace:*", - "@prisma/adapter-pg": "workspace:*", - "@prisma/adapter-planetscale": "workspace:*", - "@prisma/client": "5.4.2", - "@prisma/driver-adapter-utils": "workspace:*", - "pg": "^8.11.3", - "superjson": "^1.13.1", - "undici": "^5.26.2" - }, - "devDependencies": { - "@types/node": "^20.5.1", - "@types/pg": "^8.10.2", - "cross-env": "^7.0.3", - "prisma": "5.4.2", - "tsx": "^3.12.7" - } -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/commands/type_test/insert.sql b/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/commands/type_test/insert.sql deleted file mode 100644 index 6641eff216b2..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/commands/type_test/insert.sql +++ /dev/null @@ -1,51 +0,0 @@ -INSERT INTO type_test ( - tinyint_column, - smallint_column, - mediumint_column, - int_column, - bigint_column, - float_column, - double_column, - decimal_column, - boolean_column, - bit_column, - char_column, - varchar_column, - text_column, - date_column, - time_column, - year_column, - datetime_column, - timestamp_column, - json_column, - enum_column, - binary_column, - varbinary_column, - blob_column, - set_column -) VALUES ( - 127, -- tinyint - 32767, -- smallint - 8388607, -- mediumint - 2147483647, -- int - 9223372036854775807, -- bigint - 3.402823466, -- float - 1.7976931348623157, -- double - 99999999.99, -- decimal - TRUE, -- boolean - 1, -- bit - 'c', -- char - 'Sample varchar', -- varchar - 'This is a long text...', -- text - '2023-07-24', -- date - '23:59:59', -- time - 2023, -- year - '2023-07-24 23:59:59.415', -- datetime - '2023-07-24 23:59:59', -- timestamp - '{"key": "value"}', -- json - 'value3', -- enum - 0x4D7953514C, -- binary - 0x48656C6C6F20, -- varbinary - _binary 'binary', -- blob - 'option1,option3' -- set -); diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma b/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma deleted file mode 100644 index 59efb33a5594..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma +++ /dev/null @@ -1,125 +0,0 @@ -generator client { - provider = "prisma-client-js" - previewFeatures = ["driverAdapters"] -} - -datasource db { - provider = "mysql" - url = env("DATABASE_URL") -} - -model type_test { - id Int @id @default(autoincrement()) - tinyint_column Int @db.TinyInt - tinyint_column_null Int? @db.TinyInt - smallint_column Int @db.SmallInt - smallint_column_null Int? @db.SmallInt - mediumint_column Int @db.MediumInt - mediumint_column_null Int? @db.MediumInt - int_column Int - int_column_null Int? - bigint_column BigInt - bigint_column_null BigInt? - float_column Float @db.Float - float_column_null Float? @db.Float - double_column Float - double_column_null Float? - decimal_column Decimal @db.Decimal(10, 2) - decimal_column_null Decimal? @db.Decimal(10, 2) - boolean_column Boolean - boolean_column_null Boolean? - bit_column Boolean @db.Bit(1) - bit_column_null Boolean? @db.Bit(1) - char_column String @db.Char(10) - char_column_null String? @db.Char(10) - varchar_column String @db.VarChar(255) - varchar_column_null String? @db.VarChar(255) - text_column String @db.Text - text_column_null String? @db.Text - date_column DateTime @db.Date - date_column_null DateTime? @db.Date - time_column DateTime @db.Time(0) - time_column_null DateTime? @db.Time(0) - year_column Int @db.Year - year_column_null Int? @db.Year - datetime_column DateTime @db.DateTime(3) - datetime_column_null DateTime? @db.DateTime(3) - timestamp_column DateTime @db.Timestamp(0) - timestamp_column_null DateTime? @db.Timestamp(0) - json_column Json - json_column_null Json? - enum_column type_test_enum_column - enum_column_null type_test_enum_column_null? - binary_column Bytes @db.Binary(64) - binary_column_null Bytes? @db.Binary(64) - varbinary_column Bytes @db.VarBinary(128) - varbinary_column_null Bytes? @db.VarBinary(128) - blob_column Bytes @db.Blob - blob_null Bytes? @db.Blob - set_column String - set_column_null String? -} - -// This will eventually supersede type_test -model type_test_2 { - id String @id @default(cuid()) - datetime_column DateTime @default(now()) @db.DateTime(3) - datetime_column_null DateTime? @db.DateTime(3) -} - -model type_test_3 { - id Int @id @default(autoincrement()) - bytes Bytes -} - -enum type_test_enum_column { - value1 - value2 - value3 -} - -enum type_test_enum_column_null { - value1 - value2 - value3 -} - -model Child { - c String @unique - c_1 String - c_2 String - parentId String? @unique - non_unique String? - id String @id - - @@unique([c_1, c_2]) -} - -model Parent { - p String @unique - p_1 String - p_2 String - non_unique String? - id String @id - - @@unique([p_1, p_2]) -} - -model Author { - id Int @id @default(autoincrement()) - firstName String - lastName String - age Int - - @@map("authors") -} - -model Product { - id String @id @default(cuid()) - properties Json - properties_null Json? -} - -model Unique { - email String @id -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/commands/type_test/insert.sql b/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/commands/type_test/insert.sql deleted file mode 100644 index 170bafb9d810..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/commands/type_test/insert.sql +++ /dev/null @@ -1,35 +0,0 @@ -INSERT INTO type_test ( - smallint_column, - int_column, - bigint_column, - float_column, - double_column, - decimal_column, - boolean_column, - char_column, - varchar_column, - text_column, - date_column, - time_column, - datetime_column, - timestamp_column, - json_column, - enum_column -) VALUES ( - 32767, -- smallint - 2147483647, -- int - 9223372036854775807, -- bigint - 3.402823466, -- float - 1.7976931348623157, -- double - 99999999.99, -- decimal - TRUE, -- boolean - 'c', -- char - 'Sample varchar', -- varchar - 'This is a long text...', -- text - '2023-07-24', -- date - '23:59:59', -- time - '2023-07-24 23:59:59.415', -- datetime - '2023-07-24 23:59:59', -- timestamp - '{"key": "value"}', -- json - 'value3' -- enum -); diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma b/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma deleted file mode 100644 index 7cd31f406b9d..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma +++ /dev/null @@ -1,117 +0,0 @@ -generator client { - provider = "prisma-client-js" - previewFeatures = ["driverAdapters"] -} - -datasource db { - provider = "postgres" - url = env("DATABASE_URL") -} - -model type_test { - id Int @id @default(autoincrement()) - smallint_column Int @db.SmallInt - smallint_column_null Int? @db.SmallInt - int_column Int - int_column_null Int? - bigint_column BigInt - bigint_column_null BigInt? - float_column Float @db.Real - float_column_null Float? @db.Real - double_column Float - double_column_null Float? - decimal_column Decimal @db.Decimal(10, 2) - decimal_column_null Decimal? @db.Decimal(10, 2) - boolean_column Boolean - boolean_column_null Boolean? - char_column String @db.Char(10) - char_column_null String? @db.Char(10) - varchar_column String @db.VarChar(255) - varchar_column_null String? @db.VarChar(255) - text_column String - text_column_null String? - date_column DateTime @db.Date - date_column_null DateTime? @db.Date - time_column DateTime @db.Time(0) - time_column_null DateTime? @db.Time(0) - datetime_column DateTime @db.Timestamp(3) - datetime_column_null DateTime? @db.Timestamp(3) - timestamp_column DateTime @db.Timestamp(0) - timestamp_column_null DateTime? @db.Timestamp(0) - json_column Json - json_column_null Json? - enum_column type_test_enum_column - enum_column_null type_test_enum_column_null? -} - -// This will eventually supersede type_test -model type_test_2 { - id String @id @default(cuid()) - datetime_column DateTime @default(now()) @db.Timestamp(3) - datetime_column_null DateTime? @db.Timestamp(3) -} - -model type_test_3 { - id Int @id @default(autoincrement()) - bytes Bytes -} - -model Child { - c String @unique - c_1 String - c_2 String - parentId String? @unique - non_unique String? - id String @id - - @@unique([c_1, c_2]) -} - -model Parent { - p String @unique - p_1 String - p_2 String - non_unique String? - id String @id - - @@unique([p_1, p_2]) -} - -enum type_test_enum_column { - value1 - value2 - value3 -} - -enum type_test_enum_column_null { - value1 - value2 - value3 -} - -model Author { - id Int @id @default(autoincrement()) - firstName String - lastName String - age Int - - @@map("authors") -} - -model Product { - id String @id @default(cuid()) - properties Json - properties_null Json? - users User[] -} - -model User { - id String @id @default(uuid()) - email String - favoriteProduct Product? @relation(fields: [productId], references: [id]) - productId String? -} - -model Unique { - email String @id -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/commands/type_test/insert.sql b/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/commands/type_test/insert.sql deleted file mode 100644 index 014592d2fa2c..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/commands/type_test/insert.sql +++ /dev/null @@ -1,17 +0,0 @@ -INSERT INTO type_test ( - int_column, - bigint_column, - double_column, - decimal_column, - boolean_column, - text_column, - datetime_column -) VALUES ( - 2147483647, -- int - 9223372036854775807, -- bigint - 1.7976931348623157, -- double - 99999999.99, -- decimal - TRUE, -- boolean - 'This is a long text...', -- text - '2023-07-24 23:59:59.415' -- datetime -); diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/20230915202554_init/migration.sql b/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/20230915202554_init/migration.sql deleted file mode 100644 index 31c63d423e22..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/20230915202554_init/migration.sql +++ /dev/null @@ -1,85 +0,0 @@ --- CreateTable -CREATE TABLE "type_test" ( - "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, - "int_column" INTEGER NOT NULL, - "int_column_null" INTEGER, - "bigint_column" BIGINT NOT NULL, - "bigint_column_null" BIGINT, - "double_column" REAL NOT NULL, - "double_column_null" REAL, - "decimal_column" DECIMAL NOT NULL, - "decimal_column_null" DECIMAL, - "boolean_column" BOOLEAN NOT NULL, - "boolean_column_null" BOOLEAN, - "text_column" TEXT NOT NULL, - "text_column_null" TEXT, - "datetime_column" DATETIME NOT NULL, - "datetime_column_null" DATETIME -); - --- CreateTable -CREATE TABLE "type_test_2" ( - "id" TEXT NOT NULL PRIMARY KEY, - "datetime_column" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, - "datetime_column_null" DATETIME -); - --- CreateTable -CREATE TABLE "type_test_3" ( - "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, - "bytes" BLOB NOT NULL -); - --- CreateTable -CREATE TABLE "Child" ( - "c" TEXT NOT NULL, - "c_1" TEXT NOT NULL, - "c_2" TEXT NOT NULL, - "parentId" TEXT, - "non_unique" TEXT, - "id" TEXT NOT NULL PRIMARY KEY -); - --- CreateTable -CREATE TABLE "Parent" ( - "p" TEXT NOT NULL, - "p_1" TEXT NOT NULL, - "p_2" TEXT NOT NULL, - "non_unique" TEXT, - "id" TEXT NOT NULL PRIMARY KEY -); - --- CreateTable -CREATE TABLE "authors" ( - "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, - "firstName" TEXT NOT NULL, - "lastName" TEXT NOT NULL, - "age" INTEGER NOT NULL -); - --- CreateTable -CREATE TABLE "Product" ( - "id" TEXT NOT NULL PRIMARY KEY, - "properties" TEXT NOT NULL, - "properties_null" TEXT -); - --- CreateTable -CREATE TABLE "Unique" ( - "email" TEXT NOT NULL PRIMARY KEY, -); - --- CreateIndex -CREATE UNIQUE INDEX "Child_c_key" ON "Child"("c"); - --- CreateIndex -CREATE UNIQUE INDEX "Child_parentId_key" ON "Child"("parentId"); - --- CreateIndex -CREATE UNIQUE INDEX "Child_c_1_c_2_key" ON "Child"("c_1", "c_2"); - --- CreateIndex -CREATE UNIQUE INDEX "Parent_p_key" ON "Parent"("p"); - --- CreateIndex -CREATE UNIQUE INDEX "Parent_p_1_p_2_key" ON "Parent"("p_1", "p_2"); diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/migration_lock.toml b/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/migration_lock.toml deleted file mode 100644 index e5e5c4705ab0..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/migration_lock.toml +++ /dev/null @@ -1,3 +0,0 @@ -# Please do not edit this file manually -# It should be added in your version-control system (i.e. Git) -provider = "sqlite" \ No newline at end of file diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/schema.prisma b/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/schema.prisma deleted file mode 100644 index bde23dee66ac..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/schema.prisma +++ /dev/null @@ -1,79 +0,0 @@ -generator client { - provider = "prisma-client-js" - previewFeatures = ["driverAdapters"] -} - -datasource db { - provider = "sqlite" - url = env("DATABASE_URL") -} - -model type_test { - id Int @id @default(autoincrement()) - int_column Int - int_column_null Int? - bigint_column BigInt - bigint_column_null BigInt? - double_column Float - double_column_null Float? - decimal_column Decimal - decimal_column_null Decimal? - boolean_column Boolean - boolean_column_null Boolean? - text_column String - text_column_null String? - datetime_column DateTime - datetime_column_null DateTime? -} - -// This will eventually supersede type_test -model type_test_2 { - id String @id @default(cuid()) - datetime_column DateTime @default(now()) - datetime_column_null DateTime? -} - -model type_test_3 { - id Int @id @default(autoincrement()) - bytes Bytes -} - -model Child { - c String @unique - c_1 String - c_2 String - parentId String? @unique - non_unique String? - id String @id - - @@unique([c_1, c_2]) -} - -model Parent { - p String @unique - p_1 String - p_2 String - non_unique String? - id String @id - - @@unique([p_1, p_2]) -} - -model Author { - id Int @id @default(autoincrement()) - firstName String - lastName String - age Int - - @@map("authors") -} - -model Product { - id String @id @default(cuid()) - properties String - properties_null String? -} - -model Unique { - email String @id -} \ No newline at end of file diff --git a/query-engine/driver-adapters/js/smoke-test-js/setup.sh b/query-engine/driver-adapters/js/smoke-test-js/setup.sh deleted file mode 100644 index 7654679db14e..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/setup.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/usr/bin/env bash - -cd .. || return -pnpm i && pnpm build -cargo build -p query-engine-node-api -cd smoke-test-js || exit -pnpm i \ No newline at end of file diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts deleted file mode 100644 index b23cf2d97fb8..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts +++ /dev/null @@ -1,164 +0,0 @@ -import { describe, it } from 'node:test' -import path from 'node:path' -import assert from 'node:assert' -import { PrismaClient } from '@prisma/client' -import type { DriverAdapter } from '@prisma/driver-adapter-utils' -import { getLibQueryEnginePath } from '../libquery/util' - -export async function smokeTestClient(driverAdapter: DriverAdapter) { - const provider = driverAdapter.flavour - - const log = [ - { - emit: 'event', - level: 'query', - } as const, - ] - - const dirname = path.dirname(new URL(import.meta.url).pathname) - process.env.PRISMA_QUERY_ENGINE_LIBRARY = getLibQueryEnginePath(dirname) - - // Run twice, once with adapter and once fully without - for (const adapter of [driverAdapter, null]) { - const isUsingDriverAdapters = adapter !== null - describe(isUsingDriverAdapters ? `using Driver Adapters` : `using Rust drivers`, () => { - - it('expected error (on duplicate insert) as exception thrown / promise rejected', async () => { - const prisma = new PrismaClient({ adapter, log }) - - await assert.rejects( - async () => { - const result = await prisma.unique.create({ data: { email: 'duplicate@example.com' } }) - const result2 = await prisma.unique.create({ data: { email: 'duplicate@example.com' } }) - }, - (err) => { - assert.match(err.message, /unique/i); - return true; - }, - ); - - }) - - it('batch queries', async () => { - const prisma = new PrismaClient({ adapter, log }) - - const queries: string[] = [] - prisma.$on('query', ({ query }) => queries.push(query)) - - await prisma.$transaction([ - prisma.$queryRawUnsafe('SELECT 1'), - prisma.$queryRawUnsafe('SELECT 2'), - prisma.$queryRawUnsafe('SELECT 3'), - ]) - - const defaultExpectedQueries = [ - 'BEGIN', - 'SELECT 1', - 'SELECT 2', - 'SELECT 3', - 'COMMIT', - ] - - const driverAdapterExpectedQueries = [ - '-- Implicit "BEGIN" query via underlying driver', - 'SELECT 1', - 'SELECT 2', - 'SELECT 3', - '-- Implicit "COMMIT" query via underlying driver', - ] - - // TODO: sqlite should be here too but it's too flaky the way the test is currently written, - // only a subset of logs arrives on time (from 2 to 4 out of 5) - if (['mysql'].includes(provider)) { - if (isUsingDriverAdapters) { - assert.deepEqual(queries, driverAdapterExpectedQueries) - } else { - assert.deepEqual(queries, defaultExpectedQueries) - } - } else if (['postgres'].includes(provider)) { - // Note: the "DEALLOCATE ALL" query is only present after "BEGIN" when using Rust Postgres with pgbouncer. - assert.deepEqual(queries.at(0), defaultExpectedQueries.at(0)) - assert.deepEqual( - queries.filter((q) => q !== 'DEALLOCATE ALL'), - defaultExpectedQueries, - ) - } - }) - - if (provider !== 'sqlite') { - it('applies isolation level when using batch $transaction', async () => { - const prisma = new PrismaClient({ adapter, log }) - - const queries: string[] = [] - prisma.$on('query', ({ query }) => queries.push(query)) - - await prisma.$transaction([prisma.child.findMany(), prisma.child.count()], { - isolationLevel: 'ReadCommitted', - }) - - if (['mysql'].includes(provider)) { - if (isUsingDriverAdapters) { - assert.deepEqual(queries.slice(0, 2), ['SET TRANSACTION ISOLATION LEVEL READ COMMITTED', '-- Implicit "BEGIN" query via underlying driver']) - } else { - assert.deepEqual(queries.slice(0, 2), ['SET TRANSACTION ISOLATION LEVEL READ COMMITTED', 'BEGIN']) - } - } else if (['postgres'].includes(provider)) { - assert.deepEqual(queries.slice(0, 2), ['BEGIN', 'SET TRANSACTION ISOLATION LEVEL READ COMMITTED']) - } - - assert.deepEqual(queries.at(-1), 'COMMIT') - }) - } else { - describe('isolation levels with sqlite', () => { - it('accepts Serializable as a no-op', async () => { - const prisma = new PrismaClient({ adapter, log }) - - const queries: string[] = [] - prisma.$on('query', ({ query }) => queries.push(query)) - - await prisma.$transaction([prisma.child.findMany(), prisma.child.count()], { - isolationLevel: 'Serializable', - }) - - console.log("queries", queries) - - if (isUsingDriverAdapters) { - assert.equal(queries.at(0), '-- Implicit "BEGIN" query via underlying driver') - assert.equal(queries.at(-1), '-- Implicit "COMMIT" query via underlying driver') - } else { - assert.equal(queries.at(0), 'BEGIN') - assert.equal(queries.at(-1), 'COMMIT') - } - - assert(!queries.find((q) => q.includes('SET TRANSACTION ISOLATION LEVEL'))) - }) - - it('throws on unsupported isolation levels', async () => { - const prisma = new PrismaClient({ adapter }) - - assert.rejects( - prisma.$transaction([prisma.child.findMany(), prisma.child.count()], { - isolationLevel: 'ReadCommitted', - }), - ) - }) - - }) - - } - - it('bytes type support', async () => { - const prisma = new PrismaClient({ adapter, log }) - - const result = await prisma.type_test_3.create({ - data: { - bytes: Buffer.from([1, 2, 3, 4]), - }, - }) - - assert.deepEqual(result.bytes, Buffer.from([1, 2, 3, 4])) - }) - - }) - } -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/libsql.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/libsql.test.ts deleted file mode 100644 index f216b2a02ac7..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/libsql.test.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { PrismaLibSQL } from '@prisma/adapter-libsql' -import { IntMode, createClient } from '@libsql/client' -import { describe } from 'node:test' -import { smokeTestClient } from './client' - -describe('libsql with @prisma/client', async () => { - const url = process.env.JS_LIBSQL_DATABASE_URL as string - const syncUrl = process.env.JS_LIBSQL_SYNC_URL - const authToken = process.env.JS_LIBSQL_AUTH_TOKEN - const intMode = process.env.JS_LIBSQL_INT_MODE as IntMode | undefined - - const client = createClient({ url, syncUrl, authToken, intMode }) - const adapter = new PrismaLibSQL(client) - - if (syncUrl) { - await client.sync() - } - - smokeTestClient(adapter) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts deleted file mode 100644 index 53156ac56249..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { describe } from 'node:test' -import { neon } from '@neondatabase/serverless' -import { PrismaNeonHTTP } from '@prisma/adapter-neon' -import { smokeTestClient } from './client' - -describe('neon with @prisma/client', async () => { - const connectionString = process.env.JS_NEON_DATABASE_URL ?? '' - - const connection = neon(connectionString) - const adapter = new PrismaNeonHTTP(connection) - - smokeTestClient(adapter) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.ws.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.ws.test.ts deleted file mode 100644 index 37b0a9088bb7..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.ws.test.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { describe } from 'node:test' -import { Pool, neonConfig } from '@neondatabase/serverless' -import { PrismaNeon } from '@prisma/adapter-neon' -import { WebSocket } from 'undici' -import { smokeTestClient } from './client' - -neonConfig.webSocketConstructor = WebSocket - -describe('neon with @prisma/client', async () => { - const connectionString = process.env.JS_NEON_DATABASE_URL ?? '' - - const pool = new Pool({ connectionString }) - const adapter = new PrismaNeon(pool) - - smokeTestClient(adapter) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/pg.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/pg.test.ts deleted file mode 100644 index 99048ad3d95f..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/pg.test.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { describe } from 'node:test' -import pg from 'pg' -import { PrismaPg } from '@prisma/adapter-pg' -import { smokeTestClient } from './client' - -describe('pg with @prisma/client', async () => { - const connectionString = process.env.JS_PG_DATABASE_URL ?? '' - - const pool = new pg.Pool({ connectionString }) - const adapter = new PrismaPg(pool) - - smokeTestClient(adapter) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/planetscale.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/planetscale.test.ts deleted file mode 100644 index 3c22b7aa3062..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/planetscale.test.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { connect } from '@planetscale/database' -import { PrismaPlanetScale } from '@prisma/adapter-planetscale' -import { describe } from 'node:test' -import { smokeTestClient } from './client' - -describe('planetscale with @prisma/client', async () => { - const connectionString = process.env.JS_PLANETSCALE_DATABASE_URL ?? '' - - const connnection = connect({ url: connectionString }) - const adapter = new PrismaPlanetScale(connnection) - - smokeTestClient(adapter) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/JsonProtocol.ts b/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/JsonProtocol.ts deleted file mode 100644 index bd491db289a3..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/JsonProtocol.ts +++ /dev/null @@ -1,78 +0,0 @@ -import * as Transaction from './Transaction' - -export type JsonQuery = { - modelName?: string - action: JsonQueryAction - query: JsonFieldSelection -} - -export type JsonBatchQuery = { - batch: JsonQuery[] - transaction?: { isolationLevel?: Transaction.IsolationLevel } -} - -export type JsonQueryAction = - | 'findUnique' - | 'findUniqueOrThrow' - | 'findFirst' - | 'findFirstOrThrow' - | 'findMany' - | 'createOne' - | 'createMany' - | 'updateOne' - | 'updateMany' - | 'deleteOne' - | 'deleteMany' - | 'upsertOne' - | 'aggregate' - | 'groupBy' - | 'executeRaw' - | 'queryRaw' - | 'runCommandRaw' - | 'findRaw' - | 'aggregateRaw' - -export type JsonFieldSelection = { - arguments?: Record - selection: JsonSelectionSet -} - -export type JsonSelectionSet = { - $scalars?: boolean - $composites?: boolean -} & { - [fieldName: string]: boolean | JsonFieldSelection -} - -export type JsonArgumentValue = - | number - | string - | boolean - | null - | JsonTaggedValue - | JsonArgumentValue[] - | { [key: string]: JsonArgumentValue } - -export type DateTaggedValue = { $type: 'DateTime'; value: string } -export type DecimalTaggedValue = { $type: 'Decimal'; value: string } -export type BytesTaggedValue = { $type: 'Bytes'; value: string } -export type BigIntTaggedValue = { $type: 'BigInt'; value: string } -export type FieldRefTaggedValue = { $type: 'FieldRef'; value: { _ref: string } } -export type EnumTaggedValue = { $type: 'Enum'; value: string } -export type JsonTaggedValue = { $type: 'Json'; value: string } - -export type JsonInputTaggedValue = - | DateTaggedValue - | DecimalTaggedValue - | BytesTaggedValue - | BigIntTaggedValue - | FieldRefTaggedValue - | JsonTaggedValue - | EnumTaggedValue - -export type JsonOutputTaggedValue = - | DateTaggedValue - | DecimalTaggedValue - | BytesTaggedValue - | BigIntTaggedValue - | JsonTaggedValue diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Library.ts b/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Library.ts deleted file mode 100644 index a25b3dd26728..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Library.ts +++ /dev/null @@ -1,46 +0,0 @@ -import type { ErrorCapturingDriverAdapter } from '@prisma/driver-adapter-utils' -import type { QueryEngineConfig } from './QueryEngine' - -export type QueryEngineInstance = { - connect(headers: string): Promise - disconnect(headers: string): Promise - /** - * @param requestStr JSON.stringified `QueryEngineRequest | QueryEngineBatchRequest` - * @param headersStr JSON.stringified `QueryEngineRequestHeaders` - */ - query(requestStr: string, headersStr: string, transactionId?: string): Promise - sdlSchema(): Promise - dmmf(traceparent: string): Promise - startTransaction(options: string, traceHeaders: string): Promise - commitTransaction(id: string, traceHeaders: string): Promise - rollbackTransaction(id: string, traceHeaders: string): Promise - metrics(options: string): Promise -} - -export interface QueryEngineConstructor { - new( - config: QueryEngineConfig, - logger: (log: string) => void, - driverAdapter?: ErrorCapturingDriverAdapter, - ): QueryEngineInstance -} - -export interface LibraryLoader { - loadLibrary(): Promise -} - -// Main -export type Library = { - QueryEngine: QueryEngineConstructor - - version: () => { - // The commit hash of the engine - commit: string - // Currently 0.1.0 (Set in Cargo.toml) - version: string - } - /** - * This returns a string representation of `DMMF.Document` - */ - dmmf: (datamodel: string) => Promise -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/QueryEngine.ts b/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/QueryEngine.ts deleted file mode 100644 index 5bab74493dee..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/QueryEngine.ts +++ /dev/null @@ -1,97 +0,0 @@ -import { JsonBatchQuery, JsonQuery } from './JsonProtocol' -import * as Transaction from './Transaction' - -// Events -export type QueryEngineEvent = QueryEngineLogEvent | QueryEngineQueryEvent | QueryEnginePanicEvent - -export type QueryEngineLogEvent = { - level: string - module_path: string - message: string - span?: boolean -} - -export type QueryEngineQueryEvent = { - level: 'info' - module_path: string - query: string - item_type: 'query' - params: string - duration_ms: string - result: string -} - -export type QueryEnginePanicEvent = { - level: 'error' - module_path: string - message: 'PANIC' - reason: string - file: string - line: string - column: string -} - -// Configuration -export type QueryEngineLogLevel = 'trace' | 'debug' | 'info' | 'warn' | 'error' | 'off' - -export type QueryEngineTelemetry = { - enabled: Boolean - endpoint: string -} - -export type GraphQLQuery = { - query: string - variables: object -} - -export type EngineProtocol = 'graphql' | 'json' -export type EngineQuery = GraphQLQuery | JsonQuery - -export type EngineBatchQueries = GraphQLQuery[] | JsonQuery[] - -export type QueryEngineConfig = { - // TODO rename datamodel here and other places - datamodel: string - configDir: string - logQueries: boolean - ignoreEnvVarErrors: boolean - datasourceOverrides?: Record - env: Record - logLevel: QueryEngineLogLevel - telemetry?: QueryEngineTelemetry - engineProtocol: EngineProtocol -} - -// Errors -export type SyncRustError = { - is_panic: boolean - message: string - meta: { - full_error: string - } - error_code: string -} - -export type RustRequestError = { - is_panic: boolean - message: string - backtrace: string -} - -export type QueryEngineResult = { - data: T - elapsed: number -} - -export type QueryEngineBatchRequest = QueryEngineBatchGraphQLRequest | JsonBatchQuery - -export type QueryEngineBatchGraphQLRequest = { - batch: QueryEngineRequest[] - transaction?: boolean - isolationLevel?: Transaction.IsolationLevel -} - -export type QueryEngineRequest = { - query: string - variables: Object -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Transaction.ts b/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Transaction.ts deleted file mode 100644 index 1c5786cc66da..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Transaction.ts +++ /dev/null @@ -1,35 +0,0 @@ -export enum IsolationLevel { - ReadUncommitted = 'ReadUncommitted', - ReadCommitted = 'ReadCommitted', - RepeatableRead = 'RepeatableRead', - Snapshot = 'Snapshot', - Serializable = 'Serializable', -} - -/** - * maxWait ?= 2000 - * timeout ?= 5000 - */ -export type Options = { - maxWait?: number - timeout?: number - isolationLevel?: IsolationLevel -} - -export type InteractiveTransactionInfo = { - /** - * Transaction ID returned by the query engine. - */ - id: string - - /** - * Arbitrary payload the meaning of which depends on the `Engine` implementation. - * For example, `DataProxyEngine` needs to associate different API endpoints with transactions. - * In `LibraryEngine` and `BinaryEngine` it is currently not used. - */ - payload: Payload -} - -export type TransactionHeaders = { - traceparent?: string -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/errors.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/errors.test.ts deleted file mode 100644 index 13ac5cd9ec81..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/errors.test.ts +++ /dev/null @@ -1,105 +0,0 @@ -import { bindAdapter } from '@prisma/driver-adapter-utils' -import test, { after, before, describe } from 'node:test' -import { createQueryFn, initQueryEngine, throwAdapterError } from './util' -import assert from 'node:assert' - -const fakeAdapter = bindAdapter({ - flavour: 'postgres', - startTransaction() { - throw new Error('Error in startTransaction') - }, - - queryRaw() { - throw new Error('Error in queryRaw') - }, - - executeRaw() { - throw new Error('Error in executeRaw') - }, - close() { - return Promise.resolve({ ok: true, value: undefined }) - }, -}) - -const engine = initQueryEngine(fakeAdapter, '../../prisma/postgres/schema.prisma') -const doQuery = createQueryFn(engine, fakeAdapter) - -const startTransaction = async () => { - const args = { isolation_level: 'Serializable', max_wait: 5000, timeout: 15000 } - const res = JSON.parse(await engine.startTransaction(JSON.stringify(args), '{}')) - if (res['error_code']) { - throwAdapterError(res, fakeAdapter) - } -} - -describe('errors propagation', () => { - before(async () => { - await engine.connect('{}') - }) - after(async () => { - await engine.disconnect('{}') - }) - - test('works for queries', async () => { - await assert.rejects( - doQuery({ - modelName: 'Product', - action: 'findMany', - query: { - arguments: {}, - selection: { - $scalars: true, - }, - }, - }), - /Error in queryRaw/, - ) - }) - - test('works for executeRaw', async () => { - await assert.rejects( - doQuery({ - action: 'executeRaw', - query: { - arguments: { - query: 'SELECT 1', - parameters: '[]', - }, - selection: { - $scalars: true, - }, - }, - }), - /Error in executeRaw/, - ) - }) - - test('works with implicit transaction', async () => { - await assert.rejects( - doQuery({ - modelName: 'User', - action: 'createOne', - query: { - arguments: { - data: { - email: 'user@example.com', - favoriteProduct: { - create: { - properties: {}, - }, - }, - }, - }, - selection: { - $scalars: true, - }, - }, - }), - /Error in startTransaction/, - ) - }) - - test('works with explicit transaction', async () => { - await assert.rejects(startTransaction(), /Error in startTransaction/) - }) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts deleted file mode 100644 index c50ad3e257ab..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts +++ /dev/null @@ -1,722 +0,0 @@ -import { describe, it, before, after } from 'node:test' -import assert from 'node:assert' -import type { ErrorCapturingDriverAdapter } from '@prisma/driver-adapter-utils' -import type { QueryEngineInstance } from '../engines/types/Library' -import { createQueryFn, initQueryEngine } from './util' -import { JsonQuery } from '../engines/types/JsonProtocol' - -export function smokeTestLibquery( - adapter: ErrorCapturingDriverAdapter, - prismaSchemaRelativePath: string, - supportsTransactions = true, -) { - const engine = initQueryEngine(adapter, prismaSchemaRelativePath) - const flavour = adapter.flavour - - const doQuery = createQueryFn(engine, adapter) - - describe('using libquery with Driver Adapters', () => { - before(async () => { - await engine.connect('trace') - }) - - after(async () => { - await engine.disconnect('trace') - await adapter.close() - }) - - it('create JSON values', async () => { - const json = JSON.stringify({ - foo: 'bar', - baz: 1, - }) - - const created = await doQuery({ - action: 'createOne', - modelName: 'Product', - query: { - arguments: { - data: { - properties: json, - properties_null: null, - }, - }, - selection: { - properties: true, - }, - }, - }) - - if (flavour !== 'sqlite') { - assert.strictEqual(created.data.createOneProduct.properties.$type, 'Json') - } - - console.log('[nodejs] created', JSON.stringify(created, null, 2)) - - const resultSet = await doQuery({ - action: 'findMany', - modelName: 'Product', - query: { - selection: { - id: true, - properties: true, - properties_null: true, - }, - }, - }) - console.log('[nodejs] resultSet', JSON.stringify(resultSet, null, 2)) - - await doQuery({ - action: 'deleteMany', - modelName: 'Product', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - }) - - it('create with autoincrement', async () => { - await doQuery({ - modelName: 'Author', - action: 'deleteMany', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - - const author = await doQuery({ - modelName: 'Author', - action: 'createOne', - query: { - arguments: { - data: { - firstName: 'Firstname from autoincrement', - lastName: 'Lastname from autoincrement', - age: 99, - }, - }, - selection: { - id: true, - firstName: true, - lastName: true, - }, - }, - }) - console.log('[nodejs] author', JSON.stringify(author, null, 2)) - }) - - it('create non scalar types', async () => { - const create = await doQuery({ - action: 'createOne', - modelName: 'type_test_2', - query: { - arguments: { - data: {}, - }, - selection: { - id: true, - datetime_column: true, - datetime_column_null: true, - }, - }, - }) - - console.log('[nodejs] create', JSON.stringify(create, null, 2)) - - const resultSet = await doQuery({ - action: 'findMany', - modelName: 'type_test_2', - query: { - selection: { - id: true, - datetime_column: true, - datetime_column_null: true, - }, - arguments: { - where: {}, - }, - }, - }) - - console.log('[nodejs] resultSet', JSON.stringify(resultSet, null, 2)) - - await doQuery({ - action: 'deleteMany', - modelName: 'type_test_2', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - }) - - it('create/delete parent and child', async () => { - /* Delete all child and parent records */ - - // Queries: [ - // 'SELECT `cf-users`.`Child`.`id` FROM `cf-users`.`Child` WHERE 1=1', - // 'SELECT `cf-users`.`Child`.`id` FROM `cf-users`.`Child` WHERE 1=1', - // 'DELETE FROM `cf-users`.`Child` WHERE (`cf-users`.`Child`.`id` IN (?) AND 1=1)' - // ] - await doQuery({ - modelName: 'Child', - action: 'deleteMany', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - - // Queries: [ - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE 1=1', - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE 1=1', - // 'DELETE FROM `cf-users`.`Parent` WHERE (`cf-users`.`Parent`.`id` IN (?) AND 1=1)' - // ] - await doQuery({ - modelName: 'Parent', - action: 'deleteMany', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - - /* Create a parent with some new children, within a transaction */ - - // Queries: [ - // 'INSERT INTO `cf-users`.`Parent` (`p`,`p_1`,`p_2`,`id`) VALUES (?,?,?,?)', - // 'INSERT INTO `cf-users`.`Child` (`c`,`c_1`,`c_2`,`parentId`,`id`) VALUES (?,?,?,?,?)', - // 'SELECT `cf-users`.`Parent`.`id`, `cf-users`.`Parent`.`p` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`id` = ? LIMIT ? OFFSET ?', - // 'SELECT `cf-users`.`Child`.`id`, `cf-users`.`Child`.`c`, `cf-users`.`Child`.`parentId` FROM `cf-users`.`Child` WHERE `cf-users`.`Child`.`parentId` IN (?)' - // ] - await doQuery({ - modelName: 'Parent', - action: 'createOne', - query: { - arguments: { - data: { - p: 'p1', - p_1: '1', - p_2: '2', - childOpt: { - create: { - c: 'c1', - c_1: 'foo', - c_2: 'bar', - }, - }, - }, - }, - selection: { - p: true, - childOpt: { - selection: { - c: true, - }, - }, - }, - }, - }) - - /* Delete the parent */ - - // Queries: [ - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`p` = ?', - // 'SELECT `cf-users`.`Child`.`id`, `cf-users`.`Child`.`parentId` FROM `cf-users`.`Child` WHERE (1=1 AND `cf-users`.`Child`.`parentId` IN (?))', - // 'UPDATE `cf-users`.`Child` SET `parentId` = ? WHERE (`cf-users`.`Child`.`id` IN (?) AND 1=1)', - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`p` = ?', - // 'DELETE FROM `cf-users`.`Parent` WHERE (`cf-users`.`Parent`.`id` IN (?) AND `cf-users`.`Parent`.`p` = ?)' - // ] - await doQuery({ - modelName: 'Parent', - action: 'deleteMany', - query: { - arguments: { - where: { - p: 'p1', - }, - }, - selection: { - count: true, - }, - }, - }) - }) - - it('create explicit transaction', async () => { - if (!supportsTransactions) return - - const args = { isolation_level: 'Serializable', max_wait: 5000, timeout: 15000 } - const startResponse = await engine.startTransaction(JSON.stringify(args), 'trace') - const tx_id = JSON.parse(startResponse).id - console.log('[nodejs] transaction id', tx_id) - assert.notStrictEqual(tx_id, undefined) - - await doQuery( - { - action: 'findMany', - modelName: 'Author', - query: { - selection: { $scalars: true }, - }, - }, - tx_id, - ) - - const commitResponse = await engine.commitTransaction(tx_id, 'trace') - console.log('[nodejs] commited', commitResponse) - }) - - it('expected error (on duplicate insert) as json result (not throwing error)', async () => { - await doQuery({ - modelName: 'Unique', - action: 'deleteMany', - query: { - arguments: {}, - selection: { - $scalars: true, - }, - }, - }) - - await doQuery({ - modelName: 'Unique', - action: 'createOne', - query: { - arguments: { - data: { email: 'duplicate@example.com' }, - }, - selection: { - $scalars: true, - }, - }, - }) - - const promise = doQuery({ - modelName: 'Unique', - action: 'createOne', - query: { - arguments: { - data: { email: 'duplicate@example.com' }, - }, - selection: { - $scalars: true, - }, - }, - }) - - const result = await promise - console.log('[nodejs] error result', JSON.stringify(result, null, 2)) - assert.equal(result?.errors?.[0]?.['user_facing_error']?.['error_code'], 'P2002') - }) - - describe('read scalar and non scalar types', () => { - if (['mysql'].includes(flavour)) { - it('mysql', async () => { - const resultSet = await doQuery({ - action: 'findMany', - modelName: 'type_test', - query: { - selection: { - tinyint_column: true, - smallint_column: true, - mediumint_column: true, - int_column: true, - bigint_column: true, - float_column: true, - double_column: true, - decimal_column: true, - boolean_column: true, - char_column: true, - varchar_column: true, - text_column: true, - date_column: true, - time_column: true, - datetime_column: true, - timestamp_column: true, - json_column: true, - enum_column: true, - binary_column: true, - varbinary_column: true, - blob_column: true, - }, - }, - }) - - console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) - }) - } else if (['postgres'].includes(flavour)) { - it('postgres', async () => { - const resultSet = await doQuery({ - action: 'findMany', - modelName: 'type_test', - query: { - selection: { - smallint_column: true, - int_column: true, - bigint_column: true, - float_column: true, - double_column: true, - decimal_column: true, - boolean_column: true, - char_column: true, - varchar_column: true, - text_column: true, - date_column: true, - time_column: true, - datetime_column: true, - timestamp_column: true, - json_column: true, - enum_column: true, - }, - }, - }) - console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) - }) - } else if (['sqlite'].includes(flavour)) { - it('sqlite', async () => { - const resultSet = await doQuery({ - action: 'findMany', - modelName: 'type_test', - query: { - selection: { - int_column: true, - bigint_column: true, - double_column: true, - decimal_column: true, - boolean_column: true, - text_column: true, - datetime_column: true, - }, - }, - }) - console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) - }) - } else { - throw new Error(`Missing test for flavour ${flavour}`) - } - }) - - it('write and read back bytes', async () => { - const createResultSet = await doQuery({ - action: 'createOne', - modelName: 'type_test_3', - query: { - selection: { - bytes: true, - }, - arguments: { - data: { - bytes: { - $type: 'Bytes', - value: 'AQID', - }, - }, - }, - }, - }) - console.log('[nodejs] createOne resultSet:') - console.dir(createResultSet, { depth: Infinity }) - - const findResultSet = await doQuery({ - action: 'findMany', - modelName: 'type_test_3', - query: { - selection: { - bytes: true, - }, - }, - }) - console.log('[nodejs] findMany resultSet:') - console.dir(findResultSet, { depth: Infinity }) - }) - }) -} - -class SmokeTest { - readonly flavour: ErrorCapturingDriverAdapter['flavour'] - - constructor(private readonly engine: QueryEngineInstance, private readonly connector: ErrorCapturingDriverAdapter) { - this.flavour = connector.flavour - } - - async testFindManyTypeTest() { - await this.testFindManyTypeTestMySQL() - await this.testFindManyTypeTestPostgres() - } - - private async testFindManyTypeTestMySQL() { - if (this.flavour !== 'mysql') { - return - } - - const resultSet = await this.doQuery({ - action: 'findMany', - modelName: 'type_test', - query: { - selection: { - tinyint_column: true, - smallint_column: true, - mediumint_column: true, - int_column: true, - bigint_column: true, - float_column: true, - double_column: true, - decimal_column: true, - boolean_column: true, - char_column: true, - varchar_column: true, - text_column: true, - date_column: true, - time_column: true, - datetime_column: true, - timestamp_column: true, - json_column: true, - enum_column: true, - binary_column: true, - varbinary_column: true, - blob_column: true, - }, - }, - }) - - console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) - - return resultSet - } - - private async testFindManyTypeTestPostgres() { - if (this.flavour !== 'postgres') { - return - } - - const resultSet = await this.doQuery({ - action: 'findMany', - modelName: 'type_test', - query: { - selection: { - smallint_column: true, - int_column: true, - bigint_column: true, - float_column: true, - double_column: true, - decimal_column: true, - boolean_column: true, - char_column: true, - varchar_column: true, - text_column: true, - date_column: true, - time_column: true, - datetime_column: true, - timestamp_column: true, - json_column: true, - enum_column: true, - }, - }, - }) - console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) - - return resultSet - } - - async createAutoIncrement() { - await this.doQuery({ - modelName: 'Author', - action: 'deleteMany', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - - const author = await this.doQuery({ - modelName: 'Author', - action: 'createOne', - query: { - arguments: { - data: { - firstName: 'Firstname from autoincrement', - lastName: 'Lastname from autoincrement', - age: 99, - }, - }, - selection: { - id: true, - firstName: true, - lastName: true, - }, - }, - }) - console.log('[nodejs] author', JSON.stringify(author, null, 2)) - } - - async testCreateAndDeleteChildParent() { - /* Delete all child and parent records */ - - // Queries: [ - // 'SELECT `cf-users`.`Child`.`id` FROM `cf-users`.`Child` WHERE 1=1', - // 'SELECT `cf-users`.`Child`.`id` FROM `cf-users`.`Child` WHERE 1=1', - // 'DELETE FROM `cf-users`.`Child` WHERE (`cf-users`.`Child`.`id` IN (?) AND 1=1)' - // ] - await this.doQuery({ - modelName: 'Child', - action: 'deleteMany', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - - // Queries: [ - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE 1=1', - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE 1=1', - // 'DELETE FROM `cf-users`.`Parent` WHERE (`cf-users`.`Parent`.`id` IN (?) AND 1=1)' - // ] - await this.doQuery({ - modelName: 'Parent', - action: 'deleteMany', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - - /* Create a parent with some new children, within a transaction */ - - // Queries: [ - // 'INSERT INTO `cf-users`.`Parent` (`p`,`p_1`,`p_2`,`id`) VALUES (?,?,?,?)', - // 'INSERT INTO `cf-users`.`Child` (`c`,`c_1`,`c_2`,`parentId`,`id`) VALUES (?,?,?,?,?)', - // 'SELECT `cf-users`.`Parent`.`id`, `cf-users`.`Parent`.`p` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`id` = ? LIMIT ? OFFSET ?', - // 'SELECT `cf-users`.`Child`.`id`, `cf-users`.`Child`.`c`, `cf-users`.`Child`.`parentId` FROM `cf-users`.`Child` WHERE `cf-users`.`Child`.`parentId` IN (?)' - // ] - await this.doQuery({ - modelName: 'Parent', - action: 'createOne', - query: { - arguments: { - data: { - p: 'p1', - p_1: '1', - p_2: '2', - childOpt: { - create: { - c: 'c1', - c_1: 'foo', - c_2: 'bar', - }, - }, - }, - }, - selection: { - p: true, - childOpt: { - selection: { - c: true, - }, - }, - }, - }, - }) - - /* Delete the parent */ - - // Queries: [ - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`p` = ?', - // 'SELECT `cf-users`.`Child`.`id`, `cf-users`.`Child`.`parentId` FROM `cf-users`.`Child` WHERE (1=1 AND `cf-users`.`Child`.`parentId` IN (?))', - // 'UPDATE `cf-users`.`Child` SET `parentId` = ? WHERE (`cf-users`.`Child`.`id` IN (?) AND 1=1)', - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`p` = ?', - // 'DELETE FROM `cf-users`.`Parent` WHERE (`cf-users`.`Parent`.`id` IN (?) AND `cf-users`.`Parent`.`p` = ?)' - // ] - const resultDeleteMany = await this.doQuery({ - modelName: 'Parent', - action: 'deleteMany', - query: { - arguments: { - where: { - p: 'p1', - }, - }, - selection: { - count: true, - }, - }, - }) - console.log('[nodejs] resultDeleteMany', JSON.stringify(resultDeleteMany, null, 2)) - } - - async testTransaction() { - const startResponse = await this.engine.startTransaction( - JSON.stringify({ isolation_level: 'Serializable', max_wait: 5000, timeout: 15000 }), - 'trace', - ) - - const tx_id = JSON.parse(startResponse).id - - console.log('[nodejs] transaction id', tx_id) - await this.doQuery( - { - action: 'findMany', - modelName: 'Author', - query: { - selection: { $scalars: true }, - }, - }, - tx_id, - ) - - const commitResponse = await this.engine.commitTransaction(tx_id, 'trace') - console.log('[nodejs] commited', commitResponse) - } - - private async doQuery(query: JsonQuery, tx_id?: string) { - const result = await this.engine.query(JSON.stringify(query), 'trace', tx_id) - const parsedResult = JSON.parse(result) - if (parsedResult.errors) { - const error = parsedResult.errors[0]?.user_facing_error - if (error.error_code === 'P2036') { - const jsError = this.connector.errorRegistry.consumeError(error.meta.id) - if (!jsError) { - throw new Error( - `Something went wrong. Engine reported external error with id ${error.meta.id}, but it was not registered.`, - ) - } - throw jsError.error - } - } - return parsedResult - } -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libsql.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libsql.test.ts deleted file mode 100644 index 7f0a1038ec74..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libsql.test.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { PrismaLibSQL } from '@prisma/adapter-libsql' -import { bindAdapter } from '@prisma/driver-adapter-utils' -import { IntMode, createClient } from '@libsql/client' -import { describe } from 'node:test' -import { smokeTestLibquery } from './libquery' - -describe('libsql', async () => { - const url = process.env.JS_LIBSQL_DATABASE_URL as string - const syncUrl = process.env.JS_LIBSQL_SYNC_URL - const authToken = process.env.JS_LIBSQL_AUTH_TOKEN - const intMode = process.env.JS_LIBSQL_INT_MODE as IntMode | undefined - - const client = createClient({ url, syncUrl, authToken, intMode }) - const adapter = new PrismaLibSQL(client) - const driverAdapter = bindAdapter(adapter) - - if (syncUrl) { - await client.sync() - } - - smokeTestLibquery(driverAdapter, '../../prisma/sqlite/schema.prisma') -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts deleted file mode 100644 index 02872b885fe3..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { PrismaNeonHTTP } from '@prisma/adapter-neon' -import { bindAdapter } from '@prisma/driver-adapter-utils' -import { neon } from '@neondatabase/serverless' -import { describe } from 'node:test' -import { smokeTestLibquery } from './libquery' - -describe('neon (HTTP)', () => { - const connectionString = process.env.JS_NEON_DATABASE_URL ?? '' - - const neonConnection = neon(connectionString) - - const adapter = new PrismaNeonHTTP(neonConnection) - const driverAdapter = bindAdapter(adapter) - - smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma', false) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts deleted file mode 100644 index 54765f5961ba..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { PrismaNeon } from '@prisma/adapter-neon' -import { bindAdapter } from '@prisma/driver-adapter-utils' -import { WebSocket } from 'undici' -import { Pool, neonConfig } from '@neondatabase/serverless' -import { describe } from 'node:test' -import { smokeTestLibquery } from './libquery' - -neonConfig.webSocketConstructor = WebSocket - -describe('neon (WebSocket)', () => { - const connectionString = process.env.JS_NEON_DATABASE_URL ?? '' - - const pool = new Pool({ connectionString }) - const adapter = new PrismaNeon(pool) - const driverAdapter = bindAdapter(adapter) - - smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma') -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.test.ts deleted file mode 100644 index 9b79e7284be8..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.test.ts +++ /dev/null @@ -1,15 +0,0 @@ -import pg from 'pg' -import { PrismaPg } from '@prisma/adapter-pg' -import { bindAdapter } from '@prisma/driver-adapter-utils' -import { describe } from 'node:test' -import { smokeTestLibquery } from './libquery' - -describe('pg', () => { - const connectionString = process.env.JS_PG_DATABASE_URL ?? '' - - const pool = new pg.Pool({ connectionString }) - const adapter = new PrismaPg(pool) - const driverAdapter = bindAdapter(adapter) - - smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma') -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.test.ts deleted file mode 100644 index bb7c81805adc..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.test.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { connect } from '@planetscale/database' -import { PrismaPlanetScale } from '@prisma/adapter-planetscale' -import { bindAdapter } from '@prisma/driver-adapter-utils' -import { describe } from 'node:test' -import { smokeTestLibquery } from './libquery' - -describe('planetscale', () => { - const connectionString = process.env.JS_PLANETSCALE_DATABASE_URL ?? '' - - const connnection = connect({ url: connectionString }) - const adapter = new PrismaPlanetScale(connnection) - const driverAdapter = bindAdapter(adapter) - - smokeTestLibquery(driverAdapter, '../../prisma/mysql/schema.prisma') -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts deleted file mode 100644 index 783eb76759d2..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts +++ /dev/null @@ -1,71 +0,0 @@ -import path from 'node:path' -import os from 'node:os' -import fs from 'node:fs' -import type { ErrorCapturingDriverAdapter } from '@prisma/driver-adapter-utils' -import { Library, QueryEngineInstance } from '../engines/types/Library' -import { JsonQuery } from '../engines/types/JsonProtocol' - -export function initQueryEngine( - driver: ErrorCapturingDriverAdapter, - prismaSchemaRelativePath: string, -): QueryEngineInstance { - const dirname = path.dirname(new URL(import.meta.url).pathname) - const libQueryEnginePath = getLibQueryEnginePath(dirname) - - const schemaPath = path.join(dirname, prismaSchemaRelativePath) - - console.log('[nodejs] read Prisma schema from', schemaPath) - - const libqueryEngine = { exports: {} as unknown as Library } - // @ts-ignore - process.dlopen(libqueryEngine, libQueryEnginePath) - - const QueryEngine = libqueryEngine.exports.QueryEngine - - const queryEngineOptions = { - datamodel: fs.readFileSync(schemaPath, 'utf-8'), - configDir: '.', - engineProtocol: 'json' as const, - logLevel: 'info' as const, - logQueries: false, - env: process.env, - ignoreEnvVarErrors: false, - } - - const logCallback = (...args) => { - console.log(args) - } - - const engine = new QueryEngine(queryEngineOptions, logCallback, driver) - - return engine -} - -export function getLibQueryEnginePath(dirname: String) { - // I assume nobody will run this on Windows ¯\_(ツ)_/¯ - const libExt = os.platform() === 'darwin' ? 'dylib' : 'so' - return path.join(dirname, `../../../../../../target/debug/libquery_engine.${libExt}`) -} - -export function createQueryFn(engine: QueryEngineInstance, adapter: ErrorCapturingDriverAdapter) { - return async function doQuery(query: JsonQuery, tx_id?: string) { - const result = await engine.query(JSON.stringify(query), 'trace', tx_id) - const parsedResult = JSON.parse(result) - if (parsedResult.errors) { - throwAdapterError(parsedResult.errors[0]?.user_facing_error, adapter) - } - return parsedResult - } -} - -export function throwAdapterError(error: any, adapter: ErrorCapturingDriverAdapter) { - if (error.error_code === 'P2036') { - const jsError = adapter.errorRegistry.consumeError(error.meta.id) - if (!jsError) { - throw new Error( - `Something went wrong. Engine reported external error with id ${error.meta.id}, but it was not registered.`, - ) - } - throw jsError.error - } -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/tsconfig.json b/query-engine/driver-adapters/js/smoke-test-js/tsconfig.json deleted file mode 100644 index 3c43903cfdd1..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/tsconfig.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../tsconfig.json" -} diff --git a/query-engine/driver-adapters/js/version.sh b/query-engine/driver-adapters/js/version.sh deleted file mode 100755 index 8f592c0e197c..000000000000 --- a/query-engine/driver-adapters/js/version.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash - -# Usage: `./version.sh x.y.z` will set the `x.y.z` to every package in the monorepo. - -target_version=$1 -package_dirs=$(pnpm -r list -r --depth -1 --json | jq -r '.[] | .path' | tail -n +2) - -# Iterate through each package directory -for package_dir in $package_dirs; do - # Check if the directory exists - if [ -d "$package_dir" ]; then - # Set the target version using pnpm - (cd "$package_dir" && pnpm version "$target_version" --no-git-tag-version --allow-same-version) - fi -done diff --git a/query-engine/driver-adapters/src/result.rs b/query-engine/driver-adapters/src/result.rs index c43f66a81e72..53133e037b6f 100644 --- a/query-engine/driver-adapters/src/result.rs +++ b/query-engine/driver-adapters/src/result.rs @@ -31,7 +31,6 @@ pub struct SqliteErrorDef { #[derive(Deserialize)] #[serde(tag = "kind")] /// Wrapper for JS-side errors -/// See driver-adapters/js/adapter-utils/src/types.ts file for example pub(crate) enum DriverAdapterError { /// Unexpected JS exception GenericJs { @@ -64,7 +63,6 @@ impl From for QuaintError { } /// Wrapper for JS-side result type -/// See driver-adapters/js/adapter-utils/src/types.ts file for example pub(crate) enum JsResult where T: FromNapiValue, From a9694da0ea5535048ec82f30d4a7e393b77b935e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Fri, 27 Oct 2023 15:43:24 +0200 Subject: [PATCH 123/128] ci: schema wasm, revert action to Node v14 for lack of other idea (#4393) --- .github/workflows/publish-prisma-schema-wasm.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/publish-prisma-schema-wasm.yml b/.github/workflows/publish-prisma-schema-wasm.yml index 78d139f80772..684576065796 100644 --- a/.github/workflows/publish-prisma-schema-wasm.yml +++ b/.github/workflows/publish-prisma-schema-wasm.yml @@ -36,16 +36,16 @@ jobs: - uses: actions/setup-node@v3 with: - node-version: '20.x' - + node-version: '14.x' + # This is needed to be done manually because of `PACKAGE_DIR` used later - name: Set up NPM token for publishing later run: echo "//registry.npmjs.org/:_authToken=${{ secrets.NPM_TOKEN }}" > ~/.npmrc - name: Update version in package.json & Publish @prisma/prisma-schema-wasm - run: + run: # Update version in package.json and return directory for later usage - PACKAGE_DIR=$( nix run .#renderPrismaSchemaWasmPackage ${{ github.event.inputs.enginesWrapperVersion }}) + PACKAGE_DIR=$( nix run .#renderPrismaSchemaWasmPackage ${{ github.event.inputs.enginesWrapperVersion }}) npm publish "$PACKAGE_DIR" --access public --tag ${{ github.event.inputs.npmDistTag }} env: # Required for publishing From 685d9bcef717766514f83f91d65c356fdc645a56 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Fri, 27 Oct 2023 15:58:33 +0200 Subject: [PATCH 124/128] ci: finally fix schema wasm publish (#4394) --- .github/workflows/publish-prisma-schema-wasm.yml | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/.github/workflows/publish-prisma-schema-wasm.yml b/.github/workflows/publish-prisma-schema-wasm.yml index 684576065796..30ecd68a2152 100644 --- a/.github/workflows/publish-prisma-schema-wasm.yml +++ b/.github/workflows/publish-prisma-schema-wasm.yml @@ -36,20 +36,17 @@ jobs: - uses: actions/setup-node@v3 with: - node-version: '14.x' + node-version: '20.x' # This is needed to be done manually because of `PACKAGE_DIR` used later - name: Set up NPM token for publishing later run: echo "//registry.npmjs.org/:_authToken=${{ secrets.NPM_TOKEN }}" > ~/.npmrc - name: Update version in package.json & Publish @prisma/prisma-schema-wasm - run: + run: | # Update version in package.json and return directory for later usage PACKAGE_DIR=$( nix run .#renderPrismaSchemaWasmPackage ${{ github.event.inputs.enginesWrapperVersion }}) npm publish "$PACKAGE_DIR" --access public --tag ${{ github.event.inputs.npmDistTag }} - env: - # Required for publishing - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} # # Failure handlers # From 582b416e6b0ed6f95140cdea85b3a49e2dc3e52b Mon Sep 17 00:00:00 2001 From: Alberto Schiabel Date: Mon, 30 Oct 2023 10:50:38 +0100 Subject: [PATCH 125/128] feat(core): add fork of "cuid" with wasm32-unknown-unknown support (#4231) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Miguel Fernández --- Cargo.lock | 14 +++++++------- .../connectors/mongodb-query-connector/Cargo.toml | 4 +--- .../connectors/sql-query-connector/Cargo.toml | 4 +--- query-engine/core/Cargo.toml | 2 +- query-engine/dmmf/Cargo.toml | 2 +- query-engine/prisma-models/Cargo.toml | 11 +++++++---- 6 files changed, 18 insertions(+), 19 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8166394f8c89..35eff530999a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -870,29 +870,26 @@ dependencies = [ [[package]] name = "cuid" version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51294db11d38eb763c92936c5c88425d0090e27dce21dd15748134af9e53e739" +source = "git+https://github.com/prisma/cuid-rust?branch=wasm32-support#81309f9a11f70d178bb545971d51ceb7da692c52" dependencies = [ "base36", "cuid-util", "cuid2", - "hostname", "num", "once_cell", "rand 0.8.5", + "sha3", ] [[package]] name = "cuid-util" version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ea2bfe0336ff1b7ca74819b2df8dfae9afea358aff6b1688baa5c181d8c3713" +source = "git+https://github.com/prisma/cuid-rust?branch=wasm32-support#81309f9a11f70d178bb545971d51ceb7da692c52" [[package]] name = "cuid2" version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47d99cacd52fd67db7490ad051c8c1973fb75520174d69aabbae08c534c9d0e8" +source = "git+https://github.com/prisma/cuid-rust?branch=wasm32-support#81309f9a11f70d178bb545971d51ceb7da692c52" dependencies = [ "cuid-util", "num", @@ -1557,8 +1554,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" dependencies = [ "cfg-if", + "js-sys", "libc", "wasi 0.11.0+wasi-snapshot-preview1", + "wasm-bindgen", ] [[package]] @@ -3353,6 +3352,7 @@ dependencies = [ "bigdecimal", "chrono", "cuid", + "getrandom 0.2.10", "itertools", "nanoid", "prisma-value", diff --git a/query-engine/connectors/mongodb-query-connector/Cargo.toml b/query-engine/connectors/mongodb-query-connector/Cargo.toml index d41210342107..c4a02eaa8643 100644 --- a/query-engine/connectors/mongodb-query-connector/Cargo.toml +++ b/query-engine/connectors/mongodb-query-connector/Cargo.toml @@ -22,6 +22,7 @@ tracing-futures = "0.2" uuid.workspace = true indexmap = "1.7" query-engine-metrics = {path = "../../metrics"} +cuid = { git = "https://github.com/prisma/cuid-rust", branch = "wasm32-support" } [dependencies.prisma-models] path = "../../prisma-models" @@ -46,9 +47,6 @@ workspace = true [dependencies.serde] workspace = true -[dependencies.cuid] -version = "1.2" - [dependencies.user-facing-errors] features = ["sql"] workspace = true diff --git a/query-engine/connectors/sql-query-connector/Cargo.toml b/query-engine/connectors/sql-query-connector/Cargo.toml index 5fe3052f2e8d..62d0be640761 100644 --- a/query-engine/connectors/sql-query-connector/Cargo.toml +++ b/query-engine/connectors/sql-query-connector/Cargo.toml @@ -25,6 +25,7 @@ uuid.workspace = true opentelemetry = { version = "0.17", features = ["tokio"] } tracing-opentelemetry = "0.17.3" quaint.workspace = true +cuid = { git = "https://github.com/prisma/cuid-rust", branch = "wasm32-support" } [dependencies.connector-interface] package = "query-connector" @@ -44,9 +45,6 @@ version = "0.4" features = ["derive"] version = "1.0" -[dependencies.cuid] -version = "1.2" - [dependencies.user-facing-errors] features = ["sql"] path = "../../../libs/user-facing-errors" diff --git a/query-engine/core/Cargo.toml b/query-engine/core/Cargo.toml index c9700bb85f19..caadf6cdba00 100644 --- a/query-engine/core/Cargo.toml +++ b/query-engine/core/Cargo.toml @@ -29,7 +29,7 @@ tracing-subscriber = { version = "0.3", features = ["env-filter"] } tracing-opentelemetry = "0.17.4" user-facing-errors = { path = "../../libs/user-facing-errors" } uuid = "1" -cuid = "1.2" +cuid = { git = "https://github.com/prisma/cuid-rust", branch = "wasm32-support" } schema = { path = "../schema" } lru = "0.7.7" enumflags2 = "0.7" diff --git a/query-engine/dmmf/Cargo.toml b/query-engine/dmmf/Cargo.toml index f4a8bfb6e6f2..cc92c914d4e6 100644 --- a/query-engine/dmmf/Cargo.toml +++ b/query-engine/dmmf/Cargo.toml @@ -10,7 +10,7 @@ serde.workspace = true serde_json.workspace = true schema = { path = "../schema" } indexmap = { version = "1.7", features = ["serde-1"] } -prisma-models = { path = "../prisma-models" } +prisma-models = { path = "../prisma-models", features = ["default_generators"] } [dev-dependencies] expect-test = "1.2.2" diff --git a/query-engine/prisma-models/Cargo.toml b/query-engine/prisma-models/Cargo.toml index c7e012afebfb..0becd1fdea70 100644 --- a/query-engine/prisma-models/Cargo.toml +++ b/query-engine/prisma-models/Cargo.toml @@ -10,13 +10,16 @@ prisma-value = { path = "../../libs/prisma-value" } bigdecimal = "0.3" thiserror = "1.0" +getrandom = { version = "0.2" } uuid = { workspace = true, optional = true } -cuid = { version = "1.2", optional = true } +cuid = { git = "https://github.com/prisma/cuid-rust", branch = "wasm32-support", optional = true } nanoid = { version = "0.4.0", optional = true } chrono = { version = "0.4.6", features = ["serde"] } +[target.'cfg(target_arch = "wasm32")'.dependencies.getrandom] +version = "0.2" +features = ["js"] + [features] -# Support for generating default UUID, CUID, nanoid and datetime values. This -# implies random number generation works, so it won't compile on targets like -# wasm32. +# Support for generating default UUID, CUID, nanoid and datetime values. default_generators = ["uuid/v4", "cuid", "nanoid"] From 79b5ee004f2141588596af36cf709e39094df7f1 Mon Sep 17 00:00:00 2001 From: Lucian Buzzo Date: Mon, 30 Oct 2023 11:19:28 +0000 Subject: [PATCH 126/128] fix: add missing periods in quaint README (#4399) [skip-ci] --- quaint/README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/quaint/README.md b/quaint/README.md index e27c56972390..92033db269b1 100644 --- a/quaint/README.md +++ b/quaint/README.md @@ -41,7 +41,7 @@ choice. ```sh > cargo build --features all - ``` +``` ### Testing @@ -70,8 +70,8 @@ This requires the rust nightly channel: > cargo rustdoc --all-features ``` -Documentation index would be created at `$CARGO_TARGET_DIR/doc/quaint/index.html` +Documentation index would be created at `$CARGO_TARGET_DIR/doc/quaint/index.html`. ## Security -If you have a security issue to report, please contact us at [security@prisma.io](mailto:security@prisma.io?subject=[GitHub]%20Prisma%202%20Security%20Report%20Quaint) +If you have a security issue to report, please contact us at [security@prisma.io](mailto:security@prisma.io?subject=[GitHub]%20Prisma%202%20Security%20Report%20Quaint). From 49b44c541132ae3c617fa9a61ad7c048cce871f0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Mon, 30 Oct 2023 17:23:38 +0100 Subject: [PATCH 127/128] Fix driver adapters tests after migration of driver adapters to prisma (#4398) * Fix tests * Make * Remove symlinks and use instead parent directory * DRIVER_ADAPTERS_BRANCH=chore/client/adapter-porting-nits add debug to workspace * Build packages in the old directory otherwise @esbuild-register is not found despite being defined as a devDependency within driver adapters @millsp --- .gitignore | 5 ++-- Makefile | 17 +++---------- .../connector-test-kit-executor/package.json | 16 ++++++------ .../pnpm-lock.yaml | 3 --- query-engine/driver-adapters/js/.nvmrc | 1 - query-engine/driver-adapters/package.json | 25 +++++++++++++++++++ .../driver-adapters/pnpm-workspace.yaml | 8 ++++++ 7 files changed, 46 insertions(+), 29 deletions(-) delete mode 100644 query-engine/driver-adapters/js/.nvmrc create mode 100644 query-engine/driver-adapters/package.json create mode 100644 query-engine/driver-adapters/pnpm-workspace.yaml diff --git a/.gitignore b/.gitignore index be185b0f7afc..75c06e9ce68b 100644 --- a/.gitignore +++ b/.gitignore @@ -47,5 +47,6 @@ graph.dot prisma-schema-wasm/nodejs -# This symlink looks orphan here, but it comes from prisma/prisma where driver adapters reference a file in their parent directory -tsconfig.build.adapter.json +# Ignore pnpm-lock.yaml +query-engine/driver-adapters/pnpm-lock.yaml +package-lock.json diff --git a/Makefile b/Makefile index 541738c35d95..a30a32ca1871 100644 --- a/Makefile +++ b/Makefile @@ -285,25 +285,14 @@ test-driver-adapter-planetscale: test-planetscale-vitess8 build-qe-napi: cargo build --package query-engine-node-api -build-connector-kit-js: build-driver-adapters symlink-driver-adapters - cd query-engine/driver-adapters/connector-test-kit-executor && pnpm i && pnpm build +build-connector-kit-js: build-driver-adapters + cd query-engine/driver-adapters && pnpm i && pnpm build build-driver-adapters: ensure-prisma-present @echo "Building driver adapters..." - @cd ../prisma && pnpm --filter "*adapter*" i && pnpm --filter "*adapter*" build + @cd ../prisma && pnpm --filter "*adapter*" i @echo "Driver adapters build completed."; -symlink-driver-adapters: ensure-prisma-present - @echo "Creating symbolic links for driver adapters..." - @for dir in $(wildcard $(realpath ../prisma)/packages/*adapter*); do \ - if [ -d "$$dir" ]; then \ - dir_name=$$(basename "$$dir"); \ - ln -sfn "$$dir" "$(realpath .)/query-engine/driver-adapters/$$dir_name"; \ - echo "Created symbolic link for $$dir_name"; \ - fi; \ - done; - echo "Symbolic links creation completed."; - ensure-prisma-present: @if [ -d ../prisma ]; then \ cd "$(realpath ../prisma)" && git fetch origin main; \ diff --git a/query-engine/driver-adapters/connector-test-kit-executor/package.json b/query-engine/driver-adapters/connector-test-kit-executor/package.json index b63694bb4459..153b833df1e1 100644 --- a/query-engine/driver-adapters/connector-test-kit-executor/package.json +++ b/query-engine/driver-adapters/connector-test-kit-executor/package.json @@ -10,8 +10,7 @@ "module": "dist/index.mjs", "private": true, "scripts": { - "build": "tsup ./src/index.ts --format esm --dts", - "lint": "tsc -p ./tsconfig.build.json" + "build": "tsup ./src/index.ts --format esm --dts" }, "keywords": [], "author": "", @@ -21,11 +20,11 @@ "@libsql/client": "0.3.5", "@neondatabase/serverless": "^0.6.0", "@planetscale/database": "1.11.0", - "@prisma/adapter-libsql": "../adapter-libsql", - "@prisma/adapter-neon": "../adapter-neon", - "@prisma/adapter-pg": "../adapter-pg", - "@prisma/adapter-planetscale": "../adapter-planetscale", - "@prisma/driver-adapter-utils": "../driver-adapter-utils", + "@prisma/adapter-libsql": "workspace:*", + "@prisma/adapter-neon": "workspace:*", + "@prisma/adapter-pg": "workspace:*", + "@prisma/adapter-planetscale": "workspace:*", + "@prisma/driver-adapter-utils": "workspace:*", "@types/pg": "^8.10.2", "pg": "^8.11.3", "undici": "^5.26.5", @@ -34,7 +33,6 @@ "devDependencies": { "@types/node": "^20.5.1", "tsup": "^7.2.0", - "tsx": "^3.12.7", - "typescript": "^5.1.6" + "typescript": "5.2.2" } } \ No newline at end of file diff --git a/query-engine/driver-adapters/connector-test-kit-executor/pnpm-lock.yaml b/query-engine/driver-adapters/connector-test-kit-executor/pnpm-lock.yaml index d140be7b516c..d4f9fa09277d 100644 --- a/query-engine/driver-adapters/connector-test-kit-executor/pnpm-lock.yaml +++ b/query-engine/driver-adapters/connector-test-kit-executor/pnpm-lock.yaml @@ -38,9 +38,6 @@ dependencies: undici: specifier: ^5.26.5 version: 5.26.5 - ws: - specifier: ^8.14.2 - version: 8.14.2 devDependencies: '@types/node': diff --git a/query-engine/driver-adapters/js/.nvmrc b/query-engine/driver-adapters/js/.nvmrc deleted file mode 100644 index 6569dfa4f323..000000000000 --- a/query-engine/driver-adapters/js/.nvmrc +++ /dev/null @@ -1 +0,0 @@ -20.8.1 diff --git a/query-engine/driver-adapters/package.json b/query-engine/driver-adapters/package.json new file mode 100644 index 000000000000..1362da87700d --- /dev/null +++ b/query-engine/driver-adapters/package.json @@ -0,0 +1,25 @@ +{ + "private": true, + "name": "js", + "version": "0.0.2", + "description": "", + "engines": { + "node": ">=16.13", + "pnpm": ">=8.6.6 <9" + }, + "license": "Apache-2.0", + "scripts": { + "build": "pnpm -r run build", + "lint": "pnpm -r run lint", + "clean": "git clean -nXd -e !query-engine/driver-adapters" + }, + "keywords": [], + "author": "", + "devDependencies": { + "@types/node": "^20.5.1", + "tsup": "^7.2.0", + "typescript": "5.2.2", + "esbuild": "0.19.5", + "esbuild-register": "3.5.0" + } +} diff --git a/query-engine/driver-adapters/pnpm-workspace.yaml b/query-engine/driver-adapters/pnpm-workspace.yaml new file mode 100644 index 000000000000..d37910ea5ae6 --- /dev/null +++ b/query-engine/driver-adapters/pnpm-workspace.yaml @@ -0,0 +1,8 @@ +packages: + - '../../../prisma/packages/adapter-libsql' + - '../../../prisma/packages/adapter-neon' + - '../../../prisma/packages/adapter-pg' + - '../../../prisma/packages/adapter-planetscale' + - '../../../prisma/packages/driver-adapter-utils' + - '../../../prisma/packages/debug' + - './connector-test-kit-executor' \ No newline at end of file From 82dc77df83bf91471cbfcd4d418e06dd04567400 Mon Sep 17 00:00:00 2001 From: Serhii Tatarintsev Date: Wed, 1 Nov 2023 10:24:45 +0100 Subject: [PATCH 128/128] driver-adapters: Rename ColumnType::Char to Character (#4402) * driver-adapters: Rename ColumnType::Char to Character To avoid confusion with SQL's CHAR type that is a fixed-length string, not single character. * Rename CharArray too --- query-engine/driver-adapters/src/proxy.rs | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/query-engine/driver-adapters/src/proxy.rs b/query-engine/driver-adapters/src/proxy.rs index 14bfd46e62e0..da03336bdf53 100644 --- a/query-engine/driver-adapters/src/proxy.rs +++ b/query-engine/driver-adapters/src/proxy.rs @@ -117,9 +117,7 @@ pub enum ColumnType { /// - BOOLEAN (BOOLEAN) -> e.g. `1` Boolean = 5, - /// The following PlanetScale type IDs are mapped into Char: - /// - CHAR (CHAR) -> e.g. `"c"` (String-encoded) - Char = 6, + Character = 6, /// The following PlanetScale type IDs are mapped into Text: /// - TEXT (TEXT) -> e.g. `"foo"` (String-encoded) @@ -184,7 +182,7 @@ pub enum ColumnType { BooleanArray = 69, /// Char array (CHAR_ARRAY in PostgreSQL) - CharArray = 70, + CharacterArray = 70, /// Text array (TEXT_ARRAY in PostgreSQL) TextArray = 71, @@ -346,7 +344,7 @@ fn js_value_to_quaint( "expected a boolean in column '{column_name}', found {mismatch}" )), }, - ColumnType::Char => match json_value { + ColumnType::Character => match json_value { serde_json::Value::String(s) => match s.chars().next() { Some(c) => Ok(QuaintValue::character(c)), None => Ok(QuaintValue::null_character()), @@ -452,7 +450,7 @@ fn js_value_to_quaint( ColumnType::DoubleArray => js_array_to_quaint(ColumnType::Double, json_value, column_name), ColumnType::NumericArray => js_array_to_quaint(ColumnType::Numeric, json_value, column_name), ColumnType::BooleanArray => js_array_to_quaint(ColumnType::Boolean, json_value, column_name), - ColumnType::CharArray => js_array_to_quaint(ColumnType::Char, json_value, column_name), + ColumnType::CharacterArray => js_array_to_quaint(ColumnType::Character, json_value, column_name), ColumnType::TextArray => js_array_to_quaint(ColumnType::Text, json_value, column_name), ColumnType::DateArray => js_array_to_quaint(ColumnType::Date, json_value, column_name), ColumnType::TimeArray => js_array_to_quaint(ColumnType::Time, json_value, column_name), @@ -790,7 +788,7 @@ mod proxy_test { #[test] fn js_value_char_to_quaint() { - let column_type = ColumnType::Char; + let column_type = ColumnType::Character; // null test_null(QuaintValue::null_character(), column_type);