From eb7b15e8a7314fea5a34e2ca3d5715d15f50bcc9 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Fri, 8 Dec 2023 17:30:33 +0100 Subject: [PATCH 1/4] fix(qe): coerce nulls to empty lists in nested relations with joins (#4547) Currently we coerce database NULL values to empty scalar lists in the regular code path that converts quaint values to `PrismaValue`s, but this was missing in the new code path that converts the results of JSON aggregation to `PrismaValue`s when using `relationJoins` preview feature. Fixes: https://github.com/prisma/prisma/issues/22303 --- nix/shell.nix | 1 + .../tests/queries/data_types/through_relation.rs | 11 +++++++---- .../src/database/operations/coerce.rs | 8 +++++++- 3 files changed, 15 insertions(+), 5 deletions(-) diff --git a/nix/shell.nix b/nix/shell.nix index 792f9a6540f5..71e5cd9e7aa4 100644 --- a/nix/shell.nix +++ b/nix/shell.nix @@ -14,6 +14,7 @@ in nodejs.pkgs.typescript-language-server nodejs.pkgs.pnpm + cargo-insta jq graphviz wasm-bindgen-cli diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/through_relation.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/through_relation.rs index b2af72ab955e..9747740e076d 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/through_relation.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/through_relation.rs @@ -224,6 +224,8 @@ mod scalar_relations { bytes Bytes[] bool Boolean[] dt DateTime[] + empty Int[] + unset Int[] } "# }; @@ -250,19 +252,20 @@ mod scalar_relations { bytes: ["AQID", "Qk9OSk9VUg=="], bool: [false, true], dt: ["1900-10-10T01:10:10.001Z", "1999-12-12T21:12:12.121Z"], + empty: [] }"#, ) .await?; create_parent(&runner, r#"{ id: 1, children: { connect: [{ childId: 1 }] } }"#).await?; insta::assert_snapshot!( - run_query!(&runner, r#"{ findManyParent { id children { childId string int bInt float bytes bool dt } } }"#), - @r###"{"data":{"findManyParent":[{"id":1,"children":[{"childId":1,"string":["abc","def"],"int":[1,-1,1234567],"bInt":["1","-1","9223372036854775807","-9223372036854775807"],"float":[1.5,-1.5,1.234567],"bytes":["AQID","Qk9OSk9VUg=="],"bool":[false,true],"dt":["1900-10-10T01:10:10.001Z","1999-12-12T21:12:12.121Z"]}]}]}}"### + run_query!(&runner, r#"{ findManyParent { id children { childId string int bInt float bytes bool dt empty unset } } }"#), + @r###"{"data":{"findManyParent":[{"id":1,"children":[{"childId":1,"string":["abc","def"],"int":[1,-1,1234567],"bInt":["1","-1","9223372036854775807","-9223372036854775807"],"float":[1.5,-1.5,1.234567],"bytes":["AQID","Qk9OSk9VUg=="],"bool":[false,true],"dt":["1900-10-10T01:10:10.001Z","1999-12-12T21:12:12.121Z"],"empty":[],"unset":[]}]}]}}"### ); insta::assert_snapshot!( - run_query!(&runner, r#"{ findUniqueParent(where: { id: 1 }) { id children { childId string int bInt float bytes bool dt } } }"#), - @r###"{"data":{"findUniqueParent":{"id":1,"children":[{"childId":1,"string":["abc","def"],"int":[1,-1,1234567],"bInt":["1","-1","9223372036854775807","-9223372036854775807"],"float":[1.5,-1.5,1.234567],"bytes":["AQID","Qk9OSk9VUg=="],"bool":[false,true],"dt":["1900-10-10T01:10:10.001Z","1999-12-12T21:12:12.121Z"]}]}}}"### + run_query!(&runner, r#"{ findUniqueParent(where: { id: 1 }) { id children { childId string int bInt float bytes bool dt empty unset } } }"#), + @r###"{"data":{"findUniqueParent":{"id":1,"children":[{"childId":1,"string":["abc","def"],"int":[1,-1,1234567],"bInt":["1","-1","9223372036854775807","-9223372036854775807"],"float":[1.5,-1.5,1.234567],"bytes":["AQID","Qk9OSk9VUg=="],"bool":[false,true],"dt":["1900-10-10T01:10:10.001Z","1999-12-12T21:12:12.121Z"],"empty":[],"unset":[]}]}}}"### ); Ok(()) diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/coerce.rs b/query-engine/connectors/sql-query-connector/src/database/operations/coerce.rs index 61390bc5fa05..daf947e2ffea 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/coerce.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/coerce.rs @@ -96,7 +96,13 @@ pub(crate) fn coerce_json_scalar_to_pv(value: serde_json::Value, sf: &ScalarFiel } match value { - serde_json::Value::Null => Ok(PrismaValue::Null), + serde_json::Value::Null => { + if sf.is_list() { + Ok(PrismaValue::List(vec![])) + } else { + Ok(PrismaValue::Null) + } + } serde_json::Value::Bool(b) => Ok(PrismaValue::Boolean(b)), serde_json::Value::Number(n) => match sf.type_identifier() { TypeIdentifier::Int => Ok(PrismaValue::Int(n.as_i64().ok_or_else(|| { From 2c8656439603f2c216f4d8eb731580c04aecd37a Mon Sep 17 00:00:00 2001 From: Flavian Desverne Date: Mon, 11 Dec 2023 10:30:12 +0100 Subject: [PATCH 2/4] fix: m2m filtering on joined queries (#4549) --- .../tests/queries/simple/m2m.rs | 137 +++++++++++++++++- .../src/query_builder/select.rs | 31 ++-- 2 files changed, 150 insertions(+), 18 deletions(-) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/simple/m2m.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/simple/m2m.rs index bf8da606a812..34c0e3078965 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/simple/m2m.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/simple/m2m.rs @@ -1,9 +1,40 @@ use query_engine_tests::*; -#[test_suite(schema(schemas::posts_categories))] +#[test_suite(schema(schema))] mod m2m { use query_engine_tests::assert_query; + fn schema() -> String { + let schema = indoc! { + r#"model Post { + #id(id, Int, @id) + title String + content String @default("Wip") + #m2m(categories, Category[], id, Int) + } + + model Category { + #id(id, Int, @id) + name String + + #m2m(posts, Post[], id, Int) + + tags Tag[] + } + + model Tag { + #id(id, Int, @id) + name String + + categoryId Int + category Category @relation(fields: [categoryId], references: [id]) + } + "# + }; + + schema.to_owned() + } + #[connector_test] async fn fetch_only_associated(runner: Runner) -> TestResult<()> { test_data(&runner).await?; @@ -25,6 +56,100 @@ mod m2m { Ok(()) } + #[connector_test] + async fn filtering_ordering(runner: Runner) -> TestResult<()> { + test_data(&runner).await?; + + insta::assert_snapshot!( + run_query!(&runner, r#"{ + findUniquePost(where: { id: 1 }) { + categories( + where: { + OR: [ + { id: { in: [1] } }, + { tags: { some: { name: "Cinema" } } } + ] + }, + orderBy: { name: asc } + ) { + id + name + } + } + }"#), + @r###"{"data":{"findUniquePost":{"categories":[{"id":2,"name":"Fiction"},{"id":1,"name":"Marketing"}]}}}"### + ); + + Ok(()) + } + + #[connector_test] + async fn basic_pagination(runner: Runner) -> TestResult<()> { + test_data(&runner).await?; + + insta::assert_snapshot!( + run_query!(&runner, r#"{ + findUniquePost(where: { id: 1 }) { + categories( + take: 1, + orderBy: { name: desc } + ) { + id + name + } + } + }"#), + @r###"{"data":{"findUniquePost":{"categories":[{"id":1,"name":"Marketing"}]}}}"### + ); + + insta::assert_snapshot!( + run_query!(&runner, r#"{ + findUniquePost(where: { id: 1 }) { + categories( + take: 1, + orderBy: { name: asc } + ) { + id + name + } + } + }"#), + @r###"{"data":{"findUniquePost":{"categories":[{"id":2,"name":"Fiction"}]}}}"### + ); + + insta::assert_snapshot!( + run_query!(&runner, r#"{ + findUniquePost(where: { id: 1 }) { + categories( + skip: 1, + orderBy: { name: desc } + ) { + id + name + } + } + }"#), + @r###"{"data":{"findUniquePost":{"categories":[{"id":2,"name":"Fiction"}]}}}"### + ); + + insta::assert_snapshot!( + run_query!(&runner, r#"{ + findUniquePost(where: { id: 1 }) { + categories( + skip: 1, + orderBy: { name: asc } + ) { + id + name + } + } + }"#), + @r###"{"data":{"findUniquePost":{"categories":[{"id":1,"name":"Marketing"}]}}}"### + ); + + Ok(()) + } + fn m2m_sharing_same_row_schema() -> String { let schema = indoc! { r#"model User { @@ -69,7 +194,7 @@ mod m2m { Ok(()) } - fn schema() -> String { + fn schema_16390() -> String { let schema = indoc! { r#"model Item { id Int @id @default(autoincrement()) @@ -90,7 +215,7 @@ mod m2m { } // https://github.com/prisma/prisma/issues/16390 - #[connector_test(schema(schema), relation_mode = "prisma", only(Postgres))] + #[connector_test(schema(schema_16390), relation_mode = "prisma", only(Postgres))] async fn repro_16390(runner: Runner) -> TestResult<()> { run_query!(&runner, r#"mutation { createOneCategory(data: {}) { id } }"#); run_query!( @@ -124,11 +249,13 @@ mod m2m { create: [ { id: 1, - name: "Marketing" + name: "Marketing", + tags: { create: { id: 1, name: "Business" } } }, { id: 2, - name: "Fiction" + name: "Fiction", + tags: { create: { id: 2, name: "Cinema" } } } ] } diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/select.rs b/query-engine/connectors/sql-query-connector/src/query_builder/select.rs index d2ef3e62b344..d6f30fe413f8 100644 --- a/query-engine/connectors/sql-query-connector/src/query_builder/select.rs +++ b/query-engine/connectors/sql-query-connector/src/query_builder/select.rs @@ -110,11 +110,12 @@ impl SelectBuilder { let linking_fields = rs.field.related_field().linking_fields(); if rs.field.relation().is_many_to_many() { - let selection: Vec> = FieldSelection::union(vec![order_by_selection(rs), linking_fields]) - .into_projection() - .as_columns(ctx) - .map(|c| c.table(root_alias.to_table_string())) - .collect(); + let selection: Vec> = + FieldSelection::union(vec![order_by_selection(rs), linking_fields, filtering_selection(rs)]) + .into_projection() + .as_columns(ctx) + .map(|c| c.table(root_alias.to_table_string())) + .collect(); // SELECT , inner.with_columns(selection.into()) @@ -152,8 +153,9 @@ impl SelectBuilder { fn build_m2m_join<'a>(&mut self, rs: &RelationSelection, parent_alias: Alias, ctx: &Context<'_>) -> JoinData<'a> { let rf = rs.field.clone(); - let m2m_alias = m2m_join_alias_name(&rf); let m2m_table_alias = self.next_alias(); + let m2m_join_alias = self.next_alias(); + let outer_alias = self.next_alias(); let left_columns = rf.related_field().m2m_columns(ctx); let right_columns = ModelProjection::from(rf.model().primary_identifier()).as_columns(ctx); @@ -174,24 +176,27 @@ impl SelectBuilder { .unwrap(); let m2m_join_data = Table::from(self.build_related_query_select(rs, m2m_table_alias, ctx)) - .alias(join_alias_name(&rf)) + .alias(m2m_join_alias.to_table_string()) .on(ConditionTree::single(true.raw())) .lateral(); let child_table = rf.as_table(ctx).alias(m2m_table_alias.to_table_string()); let inner = Select::from_table(child_table) - .value(Column::from((join_alias_name(&rf), JSON_AGG_IDENT))) + .value(Column::from((m2m_join_alias.to_table_string(), JSON_AGG_IDENT))) .left_join(m2m_join_data) // join m2m table .and_where(join_conditions) // adds join condition to the child table - .with_ordering(&rs.args, Some(join_alias_name(&rs.field)), ctx) // adds ordering stmts - .with_filters(rs.args.filter.clone(), None, ctx) // adds query filters // TODO: avoid clone filter - .with_pagination(rs.args.take_abs(), rs.args.skip); // adds pagination + .with_ordering(&rs.args, Some(m2m_join_alias.to_table_string()), ctx) // adds ordering stmts + .with_filters(rs.args.filter.clone(), Some(m2m_join_alias), ctx) // adds query filters // TODO: avoid clone filter + .with_pagination(rs.args.take_abs(), rs.args.skip) + .comment("inner"); // adds pagination - let outer = Select::from_table(Table::from(inner).alias(format!("{}_1", m2m_alias))).value(json_agg()); + let outer = Select::from_table(Table::from(inner).alias(outer_alias.to_table_string())) + .value(json_agg()) + .comment("outer"); Table::from(outer) - .alias(m2m_alias) + .alias(m2m_join_alias_name(&rf)) .on(ConditionTree::single(true.raw())) .lateral() } From cc4d187f0d6db0487d98b6ac355ac3912ae73730 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Wed, 13 Dec 2023 13:56:56 +0100 Subject: [PATCH 3/4] fix: chunk and merge json objects on postgres (#4555) Functions in PostgreSQL can only accept up to 100 arguments, which means that we can't build an object with more than 50 fields using `JSON_BUILD_OBJECT`. To work around that, we chunk the fields into subsets of 50 fields or less, build one or more JSONB objects using one or more `JSONB_BUILD_OBJECT` invocations, and merge them together using the `||` operator (which is not possible with plain JSON). Another alternative that was considered and prototyped first was using `ROW_TO_JSON` but it turned out to not be a suitable replacement for several reasons, the final deal breaker [being the limit of the length of field names](https://github.com/hasura/graphql-engine/issues/4004#issuecomment-593831051) (63 characters). Other problems included the lack of support for `ROW_TO_JSON` on MySQL, which would have required us to have conditional logic in the query builder on the `sql-query-connector` level, which would introduce logic dependent on connector capabilities at an inappropriate abstraction layer, and difficulties in building the query compatible with `ROW_TO_JSON` without overfetching data because we would need to select additional fields (e.g. for filtering and order by) to be able to forward them to a query above without an easy way to exclude them from being added to the JSON object. The workaround with JSONB doesn't suffer from these issues, and is completely isolated on the quaint level without leaking to the query engine. Fixes: https://github.com/prisma/prisma/issues/22298 Closes: https://github.com/prisma/prisma-engines/pull/4550 --- Cargo.lock | 1 + quaint/Cargo.toml | 1 + quaint/src/ast/function.rs | 2 + quaint/src/visitor.rs | 27 +-- quaint/src/visitor/mssql.rs | 12 +- quaint/src/visitor/mysql.rs | 10 + quaint/src/visitor/postgres.rs | 89 +++++++ quaint/src/visitor/sqlite.rs | 10 + .../tests/new/regressions/mod.rs | 1 + .../tests/new/regressions/prisma_22298.rs | 218 ++++++++++++++++++ 10 files changed, 353 insertions(+), 18 deletions(-) create mode 100644 query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_22298.rs diff --git a/Cargo.lock b/Cargo.lock index 93d70a3bae4a..8c48c0dcc3dd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3558,6 +3558,7 @@ dependencies = [ "getrandom 0.2.10", "hex", "indoc 0.3.6", + "itertools", "lru-cache", "metrics 0.18.1", "mobc", diff --git a/quaint/Cargo.toml b/quaint/Cargo.toml index 52a7edf72aca..02f1d80ab3e3 100644 --- a/quaint/Cargo.toml +++ b/quaint/Cargo.toml @@ -87,6 +87,7 @@ metrics = "0.18" futures = "0.3" url = "2.1" hex = "0.4" +itertools = "0.10" either = { version = "1.6" } base64 = { version = "0.12.3" } diff --git a/quaint/src/ast/function.rs b/quaint/src/ast/function.rs index 3bcc24c4b072..659cf03bfac3 100644 --- a/quaint/src/ast/function.rs +++ b/quaint/src/ast/function.rs @@ -102,7 +102,9 @@ pub(crate) enum FunctionType<'a> { JsonExtractFirstArrayElem(JsonExtractFirstArrayElem<'a>), #[cfg(any(feature = "postgresql", feature = "mysql"))] JsonUnquote(JsonUnquote<'a>), + #[cfg(feature = "postgresql")] JsonArrayAgg(JsonArrayAgg<'a>), + #[cfg(feature = "postgresql")] JsonBuildObject(JsonBuildObject<'a>), #[cfg(any(feature = "postgresql", feature = "mysql"))] TextSearch(TextSearch<'a>), diff --git a/quaint/src/visitor.rs b/quaint/src/visitor.rs index 57159bd1e9c1..58baa09a791f 100644 --- a/quaint/src/visitor.rs +++ b/quaint/src/visitor.rs @@ -139,6 +139,12 @@ pub trait Visitor<'a> { #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_unquote(&mut self, json_unquote: JsonUnquote<'a>) -> Result; + #[cfg(feature = "postgresql")] + fn visit_json_array_agg(&mut self, array_agg: JsonArrayAgg<'a>) -> Result; + + #[cfg(feature = "postgresql")] + fn visit_json_build_object(&mut self, build_obj: JsonBuildObject<'a>) -> Result; + #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_text_search(&mut self, text_search: TextSearch<'a>) -> Result; @@ -1132,26 +1138,13 @@ pub trait Visitor<'a> { FunctionType::Concat(concat) => { self.visit_concat(concat)?; } + #[cfg(feature = "postgresql")] FunctionType::JsonArrayAgg(array_agg) => { - self.write("JSON_AGG")?; - self.surround_with("(", ")", |s| s.visit_expression(*array_agg.expr))?; + self.visit_json_array_agg(array_agg)?; } + #[cfg(feature = "postgresql")] FunctionType::JsonBuildObject(build_obj) => { - let len = build_obj.exprs.len(); - - self.write("JSON_BUILD_OBJECT")?; - self.surround_with("(", ")", |s| { - for (i, (name, expr)) in build_obj.exprs.into_iter().enumerate() { - s.visit_raw_value(Value::text(name))?; - s.write(", ")?; - s.visit_expression(expr)?; - if i < (len - 1) { - s.write(", ")?; - } - } - - Ok(()) - })?; + self.visit_json_build_object(build_obj)?; } }; diff --git a/quaint/src/visitor/mssql.rs b/quaint/src/visitor/mssql.rs index bf1550b96c31..6f259218ca77 100644 --- a/quaint/src/visitor/mssql.rs +++ b/quaint/src/visitor/mssql.rs @@ -1,6 +1,6 @@ use super::Visitor; #[cfg(any(feature = "postgresql", feature = "mysql"))] -use crate::prelude::{JsonExtract, JsonType, JsonUnquote}; +use crate::prelude::{JsonArrayAgg, JsonBuildObject, JsonExtract, JsonType, JsonUnquote}; use crate::{ ast::{ Column, Comparable, Expression, ExpressionKind, Insert, IntoRaw, Join, JoinData, Joinable, Merge, OnConflict, @@ -656,6 +656,16 @@ impl<'a> Visitor<'a> for Mssql<'a> { unimplemented!("JSON filtering is not yet supported on MSSQL") } + #[cfg(feature = "postgresql")] + fn visit_json_array_agg(&mut self, _array_agg: JsonArrayAgg<'a>) -> visitor::Result { + unimplemented!("JSON_AGG is not yet supported on MSSQL") + } + + #[cfg(feature = "postgresql")] + fn visit_json_build_object(&mut self, _build_obj: JsonBuildObject<'a>) -> visitor::Result { + unimplemented!("JSON_BUILD_OBJECT is not yet supported on MSSQL") + } + #[cfg(feature = "postgresql")] fn visit_text_search(&mut self, _text_search: crate::prelude::TextSearch<'a>) -> visitor::Result { unimplemented!("Full-text search is not yet supported on MSSQL") diff --git a/quaint/src/visitor/mysql.rs b/quaint/src/visitor/mysql.rs index 26d0f0d5fd65..aa57db799a34 100644 --- a/quaint/src/visitor/mysql.rs +++ b/quaint/src/visitor/mysql.rs @@ -562,6 +562,16 @@ impl<'a> Visitor<'a> for Mysql<'a> { Ok(()) } + #[cfg(feature = "postgresql")] + fn visit_json_array_agg(&mut self, _array_agg: JsonArrayAgg<'a>) -> visitor::Result { + unimplemented!("JSON_ARRAYAGG is not yet supported on MySQL") + } + + #[cfg(feature = "postgresql")] + fn visit_json_build_object(&mut self, _build_obj: JsonBuildObject<'a>) -> visitor::Result { + unimplemented!("JSON_OBJECT is not yet supported on MySQL") + } + fn visit_ordering(&mut self, ordering: Ordering<'a>) -> visitor::Result { let len = ordering.0.len(); diff --git a/quaint/src/visitor/postgres.rs b/quaint/src/visitor/postgres.rs index 749b752709d4..da02c26c3353 100644 --- a/quaint/src/visitor/postgres.rs +++ b/quaint/src/visitor/postgres.rs @@ -2,6 +2,7 @@ use crate::{ ast::*, visitor::{self, Visitor}, }; +use itertools::Itertools; use std::{ fmt::{self, Write}, ops::Deref, @@ -499,6 +500,57 @@ impl<'a> Visitor<'a> for Postgres<'a> { } } + #[cfg(feature = "postgresql")] + fn visit_json_array_agg(&mut self, array_agg: JsonArrayAgg<'a>) -> visitor::Result { + self.write("JSONB_AGG")?; + self.surround_with("(", ")", |s| s.visit_expression(*array_agg.expr))?; + + Ok(()) + } + + #[cfg(feature = "postgresql")] + fn visit_json_build_object(&mut self, build_obj: JsonBuildObject<'a>) -> visitor::Result { + // Functions in PostgreSQL can only accept up to 100 arguments, which means that we can't + // build an object with more than 50 fields using `JSON_BUILD_OBJECT`. To work around + // that, we chunk the fields into subsets of 50 fields or less, build one or more JSONB + // objects using one or more `JSONB_BUILD_OBJECT` invocations, and merge them together + // using the `||` operator (which is not possible with plain JSON). + // + // See . + // + // Another alternative that was considered for the specific use case of loading relations + // in Query Engine was using `ROW_TO_JSON` but it turned out to not be a suitable + // replacement for several reasons, the main one being the limit of the length of field + // names (63 characters). + const MAX_FIELDS: usize = 50; + let num_chunks = build_obj.exprs.len().div_ceil(MAX_FIELDS); + + for (i, chunk) in build_obj.exprs.into_iter().chunks(MAX_FIELDS).into_iter().enumerate() { + let mut chunk = chunk.peekable(); + + self.write("JSONB_BUILD_OBJECT")?; + + self.surround_with("(", ")", |s| { + while let Some((name, expr)) = chunk.next() { + s.visit_raw_value(Value::text(name))?; + s.write(", ")?; + s.visit_expression(expr)?; + if chunk.peek().is_some() { + s.write(", ")?; + } + } + + Ok(()) + })?; + + if i < num_chunks - 1 { + self.write(" || ")?; + } + } + + Ok(()) + } + fn visit_text_search(&mut self, text_search: crate::prelude::TextSearch<'a>) -> visitor::Result { let len = text_search.exprs.len(); self.surround_with("to_tsvector(concat_ws(' ', ", "))", |s| { @@ -1209,4 +1261,41 @@ mod tests { assert_eq!("SELECT MIN(\"enum\")::text, MAX(\"enum\")::text FROM \"User\"", sql); } + + mod test_json_build_object { + use super::*; + + #[test] + fn simple() { + let build_json = build_json_object(3); + let query = Select::default().value(build_json); + let (sql, _) = Postgres::build(query).unwrap(); + + assert_eq!("SELECT JSONB_BUILD_OBJECT('f1', $1, 'f2', $2, 'f3', $3)", sql); + } + + #[test] + fn chunked() { + let build_json = build_json_object(110); + let query = Select::default().value(build_json); + let (sql, _) = Postgres::build(query).unwrap(); + + assert_eq!( + concat!( + "SELECT JSONB_BUILD_OBJECT('f1', $1, 'f2', $2, 'f3', $3, 'f4', $4, 'f5', $5, 'f6', $6, 'f7', $7, 'f8', $8, 'f9', $9, 'f10', $10, 'f11', $11, 'f12', $12, 'f13', $13, 'f14', $14, 'f15', $15, 'f16', $16, 'f17', $17, 'f18', $18, 'f19', $19, 'f20', $20, 'f21', $21, 'f22', $22, 'f23', $23, 'f24', $24, 'f25', $25, 'f26', $26, 'f27', $27, 'f28', $28, 'f29', $29, 'f30', $30, 'f31', $31, 'f32', $32, 'f33', $33, 'f34', $34, 'f35', $35, 'f36', $36, 'f37', $37, 'f38', $38, 'f39', $39, 'f40', $40, 'f41', $41, 'f42', $42, 'f43', $43, 'f44', $44, 'f45', $45, 'f46', $46, 'f47', $47, 'f48', $48, 'f49', $49, 'f50', $50)", + " || JSONB_BUILD_OBJECT('f51', $51, 'f52', $52, 'f53', $53, 'f54', $54, 'f55', $55, 'f56', $56, 'f57', $57, 'f58', $58, 'f59', $59, 'f60', $60, 'f61', $61, 'f62', $62, 'f63', $63, 'f64', $64, 'f65', $65, 'f66', $66, 'f67', $67, 'f68', $68, 'f69', $69, 'f70', $70, 'f71', $71, 'f72', $72, 'f73', $73, 'f74', $74, 'f75', $75, 'f76', $76, 'f77', $77, 'f78', $78, 'f79', $79, 'f80', $80, 'f81', $81, 'f82', $82, 'f83', $83, 'f84', $84, 'f85', $85, 'f86', $86, 'f87', $87, 'f88', $88, 'f89', $89, 'f90', $90, 'f91', $91, 'f92', $92, 'f93', $93, 'f94', $94, 'f95', $95, 'f96', $96, 'f97', $97, 'f98', $98, 'f99', $99, 'f100', $100)", + " || JSONB_BUILD_OBJECT('f101', $101, 'f102', $102, 'f103', $103, 'f104', $104, 'f105', $105, 'f106', $106, 'f107', $107, 'f108', $108, 'f109', $109, 'f110', $110)" + ), + sql + ); + } + + fn build_json_object(num_fields: u32) -> JsonBuildObject<'static> { + let fields = (1..=num_fields) + .map(|i| (format!("f{i}").into(), Expression::from(i as i64))) + .collect(); + + JsonBuildObject { exprs: fields } + } + } } diff --git a/quaint/src/visitor/sqlite.rs b/quaint/src/visitor/sqlite.rs index 9c15ef651694..5e30bc54c78e 100644 --- a/quaint/src/visitor/sqlite.rs +++ b/quaint/src/visitor/sqlite.rs @@ -329,6 +329,16 @@ impl<'a> Visitor<'a> for Sqlite<'a> { unimplemented!("JSON filtering is not yet supported on SQLite") } + #[cfg(feature = "postgresql")] + fn visit_json_array_agg(&mut self, _array_agg: JsonArrayAgg<'a>) -> visitor::Result { + unimplemented!("JSON_AGG is not yet supported on SQLite") + } + + #[cfg(feature = "postgresql")] + fn visit_json_build_object(&mut self, _build_obj: JsonBuildObject<'a>) -> visitor::Result { + unimplemented!("JSON_BUILD_OBJECT is not yet supported on SQLite") + } + fn visit_ordering(&mut self, ordering: Ordering<'a>) -> visitor::Result { let len = ordering.0.len(); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/mod.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/mod.rs index 2cd2938f916a..deaaa7e84313 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/mod.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/mod.rs @@ -22,6 +22,7 @@ mod prisma_20799; mod prisma_21182; mod prisma_21369; mod prisma_21901; +mod prisma_22298; mod prisma_5952; mod prisma_6173; mod prisma_7010; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_22298.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_22298.rs new file mode 100644 index 000000000000..4e7c589d14f5 --- /dev/null +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_22298.rs @@ -0,0 +1,218 @@ +use query_engine_tests::*; + +#[test_suite(schema(schema))] +mod many_fields_in_related_table { + use indoc::indoc; + + fn schema() -> String { + indoc! {r#" + model A { + #id(id, Int, @id) + field1 Int + field2 Int + field3 Int + field4 Int + field5 Int + field6 Int + field7 Int + field8 Int + field9 Int + field10 Int + field11 Int + field12 Int + field13 Int + field14 Int + field15 Int + field16 Int + field17 Int + field18 Int + field19 Int + field20 Int + field21 Int + field22 Int + field23 Int + field24 Int + field25 Int + field26 Int + field27 Int + field28 Int + field29 Int + field30 Int + field31 Int + field32 Int + field33 Int + field34 Int + field35 Int + field36 Int + field37 Int + field38 Int + field39 Int + field40 Int + field41 Int + field42 Int + field43 Int + field44 Int + field45 Int + field46 Int + field47 Int + field48 Int + field49 Int + field50 Int + field51 Int + b_id Int + b B @relation(fields: [b_id], references: [id]) + c C[] + } + + model B { + #id(id, Int, @id) + a A[] + } + + model C { + #id(id, Int, @id) + a_id Int + a A @relation(fields: [a_id], references: [id]) + } + "#} + .to_owned() + } + + #[connector_test] + async fn query_53_fields_through_relation(runner: Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#" + mutation { + createOneB( + data: { + id: 1, + a: { + create: { + id: 1, + field1: 0, + field2: 0, + field3: 0, + field4: 0, + field5: 0, + field6: 0, + field7: 0, + field8: 0, + field9: 0, + field10: 0, + field11: 0, + field12: 0, + field13: 0, + field14: 0, + field15: 0, + field16: 0, + field17: 0, + field18: 0, + field19: 0, + field20: 0, + field21: 0, + field22: 0, + field23: 0, + field24: 0, + field25: 0, + field26: 0, + field27: 0, + field28: 0, + field29: 0, + field30: 0, + field31: 0, + field32: 0, + field33: 0, + field34: 0, + field35: 0, + field36: 0, + field37: 0, + field38: 0, + field39: 0, + field40: 0, + field41: 0, + field42: 0, + field43: 0, + field44: 0, + field45: 0, + field46: 0, + field47: 0, + field48: 0, + field49: 0, + field50: 0, + field51: 0, + c: { + create: { + id: 1 + } + } + } + } + } + ) { + id + a { + id + field1 + field2 + field3 + field4 + field5 + field6 + field7 + field8 + field9 + field10 + field11 + field12 + field13 + field14 + field15 + field16 + field17 + field18 + field19 + field20 + field21 + field22 + field23 + field24 + field25 + field26 + field27 + field28 + field29 + field30 + field31 + field32 + field33 + field34 + field35 + field36 + field37 + field38 + field39 + field40 + field41 + field42 + field43 + field44 + field45 + field46 + field47 + field48 + field49 + field50 + field51 + c { + id + } + } + } + } + "#), + @r###"{"data":{"createOneB":{"id":1,"a":[{"id":1,"field1":0,"field2":0,"field3":0,"field4":0,"field5":0,"field6":0,"field7":0,"field8":0,"field9":0,"field10":0,"field11":0,"field12":0,"field13":0,"field14":0,"field15":0,"field16":0,"field17":0,"field18":0,"field19":0,"field20":0,"field21":0,"field22":0,"field23":0,"field24":0,"field25":0,"field26":0,"field27":0,"field28":0,"field29":0,"field30":0,"field31":0,"field32":0,"field33":0,"field34":0,"field35":0,"field36":0,"field37":0,"field38":0,"field39":0,"field40":0,"field41":0,"field42":0,"field43":0,"field44":0,"field45":0,"field46":0,"field47":0,"field48":0,"field49":0,"field50":0,"field51":0,"c":[{"id":1}]}]}}}"### + ); + + Ok(()) + } +} From 0ca5ccbcfa6bdc81c003cf549abe4269f59c41e5 Mon Sep 17 00:00:00 2001 From: Flavian Desverne Date: Mon, 18 Dec 2023 13:21:52 +0100 Subject: [PATCH 4/4] fix: handle native types for joined queries (#4546) --- Cargo.lock | 1 + psl/builtin-connectors/Cargo.toml | 2 + .../src/cockroach_datamodel_connector.rs | 24 ++ psl/builtin-connectors/src/lib.rs | 1 + .../src/postgres_datamodel_connector.rs | 24 ++ psl/builtin-connectors/src/utils.rs | 37 ++ psl/psl-core/src/datamodel_connector.rs | 9 + quaint/src/ast/column.rs | 9 +- quaint/src/visitor/postgres.rs | 34 +- .../tests/queries/data_types/mod.rs | 1 + .../tests/queries/data_types/native/mod.rs | 1 + .../queries/data_types/native/postgres.rs | 325 ++++++++++++++++++ .../src/database/operations/coerce.rs | 58 +++- .../src/model_extensions/column.rs | 1 + .../query-structure/src/field/scalar.rs | 8 + 15 files changed, 518 insertions(+), 17 deletions(-) create mode 100644 psl/builtin-connectors/src/utils.rs create mode 100644 query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/native/mod.rs create mode 100644 query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/native/postgres.rs diff --git a/Cargo.lock b/Cargo.lock index 8c48c0dcc3dd..ec1df3dd76b3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -431,6 +431,7 @@ dependencies = [ name = "builtin-psl-connectors" version = "0.1.0" dependencies = [ + "chrono", "connection-string", "either", "enumflags2", diff --git a/psl/builtin-connectors/Cargo.toml b/psl/builtin-connectors/Cargo.toml index 218fcf8b81a5..ef9e810b8aba 100644 --- a/psl/builtin-connectors/Cargo.toml +++ b/psl/builtin-connectors/Cargo.toml @@ -13,3 +13,5 @@ indoc.workspace = true lsp-types = "0.91.1" once_cell = "1.3" regex = "1" +chrono = { version = "0.4.6", default-features = false } + diff --git a/psl/builtin-connectors/src/cockroach_datamodel_connector.rs b/psl/builtin-connectors/src/cockroach_datamodel_connector.rs index 4ab77cf45639..99f94c953971 100644 --- a/psl/builtin-connectors/src/cockroach_datamodel_connector.rs +++ b/psl/builtin-connectors/src/cockroach_datamodel_connector.rs @@ -3,6 +3,7 @@ mod validations; pub use native_types::CockroachType; +use chrono::*; use enumflags2::BitFlags; use lsp_types::{CompletionItem, CompletionItemKind, CompletionList}; use psl_core::{ @@ -307,6 +308,29 @@ impl Connector for CockroachDatamodelConnector { fn flavour(&self) -> Flavour { Flavour::Cockroach } + + fn parse_json_datetime( + &self, + str: &str, + nt: Option, + ) -> chrono::ParseResult> { + let native_type: Option<&CockroachType> = nt.as_ref().map(|nt| nt.downcast_ref()); + + match native_type { + Some(ct) => match ct { + CockroachType::Timestamptz(_) => crate::utils::parse_timestamptz(str), + CockroachType::Timestamp(_) => crate::utils::parse_timestamp(str), + CockroachType::Date => crate::utils::parse_date(str), + CockroachType::Time(_) => crate::utils::parse_time(str), + CockroachType::Timetz(_) => crate::utils::parse_timetz(str), + _ => unreachable!(), + }, + None => self.parse_json_datetime( + str, + Some(self.default_native_type_for_scalar_type(&ScalarType::DateTime)), + ), + } + } } /// An `@default(sequence())` function. diff --git a/psl/builtin-connectors/src/lib.rs b/psl/builtin-connectors/src/lib.rs index c477386a23ed..4f8d26801213 100644 --- a/psl/builtin-connectors/src/lib.rs +++ b/psl/builtin-connectors/src/lib.rs @@ -16,6 +16,7 @@ mod mysql_datamodel_connector; mod native_type_definition; mod postgres_datamodel_connector; mod sqlite_datamodel_connector; +mod utils; use psl_core::{datamodel_connector::Connector, ConnectorRegistry}; diff --git a/psl/builtin-connectors/src/postgres_datamodel_connector.rs b/psl/builtin-connectors/src/postgres_datamodel_connector.rs index 697b4b9c12bb..fa3325d1403f 100644 --- a/psl/builtin-connectors/src/postgres_datamodel_connector.rs +++ b/psl/builtin-connectors/src/postgres_datamodel_connector.rs @@ -4,6 +4,7 @@ mod validations; pub use native_types::PostgresType; +use chrono::*; use enumflags2::BitFlags; use lsp_types::{CompletionItem, CompletionItemKind, CompletionList, InsertTextFormat}; use psl_core::{ @@ -567,6 +568,29 @@ impl Connector for PostgresDatamodelConnector { fn flavour(&self) -> Flavour { Flavour::Postgres } + + fn parse_json_datetime( + &self, + str: &str, + nt: Option, + ) -> chrono::ParseResult> { + let native_type: Option<&PostgresType> = nt.as_ref().map(|nt| nt.downcast_ref()); + + match native_type { + Some(pt) => match pt { + Timestamptz(_) => crate::utils::parse_timestamptz(str), + Timestamp(_) => crate::utils::parse_timestamp(str), + Date => crate::utils::parse_date(str), + Time(_) => crate::utils::parse_time(str), + Timetz(_) => crate::utils::parse_timetz(str), + _ => unreachable!(), + }, + None => self.parse_json_datetime( + str, + Some(self.default_native_type_for_scalar_type(&ScalarType::DateTime)), + ), + } + } } fn allowed_index_operator_classes(algo: IndexAlgorithm, field: walkers::ScalarFieldWalker<'_>) -> Vec { diff --git a/psl/builtin-connectors/src/utils.rs b/psl/builtin-connectors/src/utils.rs new file mode 100644 index 000000000000..3ef9f55cd80a --- /dev/null +++ b/psl/builtin-connectors/src/utils.rs @@ -0,0 +1,37 @@ +use chrono::*; + +pub(crate) fn parse_date(str: &str) -> Result, chrono::ParseError> { + chrono::NaiveDate::parse_from_str(str, "%Y-%m-%d") + .map(|date| DateTime::::from_utc(date.and_hms_opt(0, 0, 0).unwrap(), Utc)) + .map(DateTime::::from) +} + +pub(crate) fn parse_timestamptz(str: &str) -> Result, chrono::ParseError> { + DateTime::parse_from_rfc3339(str) +} + +pub(crate) fn parse_timestamp(str: &str) -> Result, chrono::ParseError> { + NaiveDateTime::parse_from_str(str, "%Y-%m-%dT%H:%M:%S%.f") + .map(|dt| DateTime::from_utc(dt, Utc)) + .or_else(|_| DateTime::parse_from_rfc3339(str).map(DateTime::::from)) + .map(DateTime::::from) +} + +pub(crate) fn parse_time(str: &str) -> Result, chrono::ParseError> { + chrono::NaiveTime::parse_from_str(str, "%H:%M:%S%.f") + .map(|time| { + let base_date = chrono::NaiveDate::from_ymd_opt(1970, 1, 1).unwrap(); + + DateTime::::from_utc(base_date.and_time(time), Utc) + }) + .map(DateTime::::from) +} + +pub(crate) fn parse_timetz(str: &str) -> Result, chrono::ParseError> { + // We currently don't support time with timezone. + // We strip the timezone information and parse it as a time. + // This is inline with what Quaint does already. + let time_without_tz = str.split('+').next().unwrap(); + + parse_time(time_without_tz) +} diff --git a/psl/psl-core/src/datamodel_connector.rs b/psl/psl-core/src/datamodel_connector.rs index 72671e06688f..751b03ac9da1 100644 --- a/psl/psl-core/src/datamodel_connector.rs +++ b/psl/psl-core/src/datamodel_connector.rs @@ -25,6 +25,7 @@ pub use self::{ }; use crate::{configuration::DatasourceConnectorData, Configuration, Datasource, PreviewFeature}; +use chrono::{DateTime, FixedOffset}; use diagnostics::{DatamodelError, Diagnostics, NativeTypeErrorFactory, Span}; use enumflags2::BitFlags; use lsp_types::CompletionList; @@ -359,6 +360,14 @@ pub trait Connector: Send + Sync { ) -> DatasourceConnectorData { Default::default() } + + fn parse_json_datetime( + &self, + _str: &str, + _nt: Option, + ) -> chrono::ParseResult> { + unreachable!("This method is only implemented on connectors with lateral join support.") + } } #[derive(Copy, Clone, Debug, PartialEq)] diff --git a/quaint/src/ast/column.rs b/quaint/src/ast/column.rs index 836b4ce96527..cf2d157be085 100644 --- a/quaint/src/ast/column.rs +++ b/quaint/src/ast/column.rs @@ -1,4 +1,4 @@ -use super::Aliasable; +use super::{values::NativeColumnType, Aliasable}; use crate::{ ast::{Expression, ExpressionKind, Table}, Value, @@ -32,6 +32,8 @@ pub struct Column<'a> { pub(crate) alias: Option>, pub(crate) default: Option>, pub(crate) type_family: Option, + /// The underlying native type of the column. + pub(crate) native_type: Option>, /// Whether the column is an enum. pub(crate) is_enum: bool, /// Whether the column is a (scalar) list. @@ -130,6 +132,11 @@ impl<'a> Column<'a> { .map(|d| d == &DefaultValue::Generated) .unwrap_or(false) } + + pub fn native_column_type>>(mut self, native_type: Option) -> Column<'a> { + self.native_type = native_type.map(|nt| nt.into()); + self + } } impl<'a> From> for Expression<'a> { diff --git a/quaint/src/visitor/postgres.rs b/quaint/src/visitor/postgres.rs index da02c26c3353..40c80d330c14 100644 --- a/quaint/src/visitor/postgres.rs +++ b/quaint/src/visitor/postgres.rs @@ -17,6 +17,23 @@ pub struct Postgres<'a> { parameters: Vec>, } +impl<'a> Postgres<'a> { + fn visit_json_build_obj_expr(&mut self, expr: Expression<'a>) -> crate::Result<()> { + match expr.kind() { + ExpressionKind::Column(col) => match (col.type_family.as_ref(), col.native_type.as_deref()) { + (Some(TypeFamily::Decimal(_)), Some("MONEY")) => { + self.visit_expression(expr)?; + self.write("::numeric")?; + + Ok(()) + } + _ => self.visit_expression(expr), + }, + _ => self.visit_expression(expr), + } + } +} + impl<'a> Visitor<'a> for Postgres<'a> { const C_BACKTICK_OPEN: &'static str = "\""; const C_BACKTICK_CLOSE: &'static str = "\""; @@ -534,7 +551,7 @@ impl<'a> Visitor<'a> for Postgres<'a> { while let Some((name, expr)) = chunk.next() { s.visit_raw_value(Value::text(name))?; s.write(", ")?; - s.visit_expression(expr)?; + s.visit_json_build_obj_expr(expr)?; if chunk.peek().is_some() { s.write(", ")?; } @@ -1290,6 +1307,21 @@ mod tests { ); } + #[test] + fn money() { + let build_json = json_build_object(vec![( + "money".into(), + Column::from("money") + .native_column_type(Some("money")) + .type_family(TypeFamily::Decimal(None)) + .into(), + )]); + let query = Select::default().value(build_json); + let (sql, _) = Postgres::build(query).unwrap(); + + assert_eq!(sql, "SELECT JSONB_BUILD_OBJECT('money', \"money\"::numeric)"); + } + fn build_json_object(num_fields: u32) -> JsonBuildObject<'static> { let fields = (1..=num_fields) .map(|i| (format!("f{i}").into(), Expression::from(i as i64))) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/mod.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/mod.rs index 09ed6668f619..127e5e23c29a 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/mod.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/mod.rs @@ -7,5 +7,6 @@ mod enum_type; mod float; mod int; mod json; +mod native; mod string; mod through_relation; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/native/mod.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/native/mod.rs new file mode 100644 index 000000000000..70faf80832c5 --- /dev/null +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/native/mod.rs @@ -0,0 +1 @@ +mod postgres; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/native/postgres.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/native/postgres.rs new file mode 100644 index 000000000000..e25d57e854de --- /dev/null +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/native/postgres.rs @@ -0,0 +1,325 @@ +use indoc::indoc; +use query_engine_tests::*; + +#[test_suite(only(Postgres, CockroachDb))] +mod datetime { + fn schema_date() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + + childId Int? @unique + child Child? @relation(fields: [childId], references: [id]) + } + + model Child { + #id(id, Int, @id) + date DateTime @test.Date + date_2 DateTime @test.Date + time DateTime @test.Time(3) + time_2 DateTime @test.Time(3) + time_tz DateTime @test.Timetz(3) + time_tz_2 DateTime @test.Timetz(3) + ts DateTime @test.Timestamp(3) + ts_2 DateTime @test.Timestamp(3) + ts_tz DateTime @test.Timestamptz(3) + ts_tz_2 DateTime @test.Timestamptz(3) + + parent Parent? + }"# + }; + + schema.to_owned() + } + + #[connector_test(schema(schema_date))] + async fn dt_native(runner: Runner) -> TestResult<()> { + create_row( + &runner, + r#"{ + id: 1, + child: { create: { + id: 1, + date: "2016-09-24T00:00:00.000Z" + date_2: "2016-09-24T00:00:00.000+03:00" + time: "1111-11-11T13:02:20.321Z" + time_2: "1111-11-11T13:02:20.321+03:00" + time_tz: "1111-11-11T13:02:20.321Z" + time_tz_2: "1111-11-11T13:02:20.321+03:00" + ts: "2016-09-24T14:01:30.213Z" + ts_2: "2016-09-24T14:01:30.213+03:00" + ts_tz: "2016-09-24T14:01:30.213Z" + ts_tz_2: "2016-09-24T14:01:30.213+03:00" + }} + }"#, + ) + .await?; + + insta::assert_snapshot!( + run_query!(runner, r#"{ findManyParent { id child { date date_2 time time_2 time_tz time_tz_2 ts ts_2 ts_tz ts_tz_2 } } }"#), + @r###"{"data":{"findManyParent":[{"id":1,"child":{"date":"2016-09-24T00:00:00.000Z","date_2":"2016-09-23T00:00:00.000Z","time":"1970-01-01T13:02:20.321Z","time_2":"1970-01-01T10:02:20.321Z","time_tz":"1970-01-01T13:02:20.321Z","time_tz_2":"1970-01-01T10:02:20.321Z","ts":"2016-09-24T14:01:30.213Z","ts_2":"2016-09-24T11:01:30.213Z","ts_tz":"2016-09-24T14:01:30.213Z","ts_tz_2":"2016-09-24T11:01:30.213Z"}}]}}"### + ); + + Ok(()) + } + + async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { + runner + .query(format!("mutation {{ createOneParent(data: {}) {{ id }} }}", data)) + .await? + .assert_success(); + Ok(()) + } +} + +#[test_suite(only(Postgres))] +mod decimal { + fn schema_decimal() -> String { + let schema = indoc! { + r#" + model Parent { + #id(id, Int, @id) + + childId Int? @unique + child Child? @relation(fields: [childId], references: [id]) + } + + model Child { + #id(id, Int, @id) + + float Float @test.Real + dfloat Float @test.DoublePrecision + decFloat Decimal @test.Decimal(2, 1) + money Decimal @test.Money + + parent Parent? + }"# + }; + + schema.to_owned() + } + + // "Postgres native decimal types" should "work" + #[connector_test(schema(schema_decimal))] + async fn native_decimal_types(runner: Runner) -> TestResult<()> { + create_row( + &runner, + r#"{ + id: 1, + child: { create: { + id: 1, + float: 1.1, + dfloat: 2.2, + decFloat: 3.1234, + money: 3.51, + }} + }"#, + ) + .await?; + + insta::assert_snapshot!( + run_query!(&runner, r#"{ findManyParent { id child { float dfloat decFloat money } } }"#), + @r###"{"data":{"findManyParent":[{"id":1,"child":{"float":1.1,"dfloat":2.2,"decFloat":"3.1","money":"3.51"}}]}}"### + ); + + Ok(()) + } + + async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { + runner + .query(format!("mutation {{ createOneParent(data: {}) {{ id }} }}", data)) + .await? + .assert_success(); + Ok(()) + } +} + +#[test_suite(only(Postgres))] +mod string { + fn schema_string() -> String { + let schema = indoc! { + r#" + model Parent { + #id(id, Int, @id) + + childId Int? @unique + child Child? @relation(fields: [childId], references: [id]) + } + + model Child { + #id(id, Int, @id) + char String @test.Char(10) + vChar String @test.VarChar(11) + text String @test.Text + bit String @test.Bit(4) + vBit String @test.VarBit(5) + uuid String @test.Uuid + ip String @test.Inet + + parent Parent? + }"# + }; + + schema.to_owned() + } + + // "Postgres native string types" should "work" + #[connector_test(schema(schema_string))] + async fn native_string(runner: Runner) -> TestResult<()> { + create_row( + &runner, + r#"{ + id: 1, + child: { create: { + id: 1, + char: "1234567890" + vChar: "12345678910" + text: "text" + bit: "1010" + vBit: "00110" + uuid: "123e4567-e89b-12d3-a456-426614174000" + ip: "127.0.0.1" + }} + }"#, + ) + .await?; + + insta::assert_snapshot!( + run_query!(&runner, r#"{ findManyParent { + id + child { + char + vChar + text + bit + vBit + uuid + ip + } + }}"#), + @r###"{"data":{"findManyParent":[{"id":1,"child":{"char":"1234567890","vChar":"12345678910","text":"text","bit":"1010","vBit":"00110","uuid":"123e4567-e89b-12d3-a456-426614174000","ip":"127.0.0.1"}}]}}"### + ); + + Ok(()) + } + + async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { + runner + .query(format!("mutation {{ createOneParent(data: {}) {{ id }} }}", data)) + .await? + .assert_success(); + Ok(()) + } +} + +// Napi & Wasm DAs excluded because of a bytes bug +#[test_suite( + schema(schema), + only(Postgres("9", "10", "11", "12", "13", "14", "15", "pg.js", "neon.js")) +)] +mod others { + fn schema_other_types() -> String { + let schema = indoc! { + r#" + model Parent { + #id(id, Int, @id) + + childId Int? @unique + child Child? @relation(fields: [childId], references: [id]) + } + + model Child { + #id(id, Int, @id) + bool Boolean @test.Boolean + byteA Bytes @test.ByteA + json Json @test.Json + jsonb Json @test.JsonB + + parent Parent? + }"# + }; + + schema.to_owned() + } + + // "Other Postgres native types" should "work" + #[connector_test(schema(schema_other_types))] + async fn native_other_types(runner: Runner) -> TestResult<()> { + create_row( + &runner, + r#"{ + id: 1, + child: { + create: { + id: 1, + bool: true + byteA: "dGVzdA==" + json: "{}" + jsonb: "{\"a\": \"b\"}" + } + } + }"#, + ) + .await?; + + insta::assert_snapshot!( + run_query!(&runner, r#"{ findManyParent { id child { id bool byteA json jsonb } } }"#), + @r###"{"data":{"findManyParent":[{"id":1,"child":{"id":1,"bool":true,"byteA":"dGVzdA==","json":"{}","jsonb":"{\"a\":\"b\"}"}}]}}"### + ); + + Ok(()) + } + + fn schema_xml() -> String { + let schema = indoc! { + r#" + model Parent { + #id(id, Int, @id) + + childId Int? @unique + child Child? @relation(fields: [childId], references: [id]) + } + + model Child { + #id(id, Int, @id) + xml String @test.Xml + + parent Parent? + }"# + }; + + schema.to_owned() + } + + #[connector_test(schema(schema_xml), only(Postgres))] + async fn native_xml(runner: Runner) -> TestResult<()> { + create_row( + &runner, + r#"{ + id: 1, + child: { + create: { + id: 1, + xml: "wurst" + } + } + }"#, + ) + .await?; + + insta::assert_snapshot!( + run_query!(&runner, r#"{ findManyParent { id child { xml } } }"#), + @r###"{"data":{"findManyParent":[{"id":1,"child":{"xml":"wurst"}}]}}"### + ); + + Ok(()) + } + + async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { + runner + .query(format!("mutation {{ createOneParent(data: {}) {{ id }} }}", data)) + .await? + .assert_success(); + Ok(()) + } +} diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/coerce.rs b/query-engine/connectors/sql-query-connector/src/database/operations/coerce.rs index daf947e2ffea..d42dc627bf62 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/coerce.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/coerce.rs @@ -1,8 +1,7 @@ -use std::io; - -use bigdecimal::{BigDecimal, FromPrimitive}; +use bigdecimal::{BigDecimal, FromPrimitive, ParseBigDecimalError}; use itertools::{Either, Itertools}; use query_structure::*; +use std::{io, str::FromStr}; use crate::{query_arguments_ext::QueryArgumentsExt, SqlError}; @@ -106,10 +105,10 @@ pub(crate) fn coerce_json_scalar_to_pv(value: serde_json::Value, sf: &ScalarFiel serde_json::Value::Bool(b) => Ok(PrismaValue::Boolean(b)), serde_json::Value::Number(n) => match sf.type_identifier() { TypeIdentifier::Int => Ok(PrismaValue::Int(n.as_i64().ok_or_else(|| { - build_conversion_error(&format!("Number({n})"), &format!("{:?}", sf.type_identifier())) + build_conversion_error(sf, &format!("Number({n})"), &format!("{:?}", sf.type_identifier())) })?)), TypeIdentifier::BigInt => Ok(PrismaValue::BigInt(n.as_i64().ok_or_else(|| { - build_conversion_error(&format!("Number({n})"), &format!("{:?}", sf.type_identifier())) + build_conversion_error(sf, &format!("Number({n})"), &format!("{:?}", sf.type_identifier())) })?)), TypeIdentifier::Float | TypeIdentifier::Decimal => { let bd = n @@ -117,12 +116,13 @@ pub(crate) fn coerce_json_scalar_to_pv(value: serde_json::Value, sf: &ScalarFiel .and_then(BigDecimal::from_f64) .map(|bd| bd.normalized()) .ok_or_else(|| { - build_conversion_error(&format!("Number({n})"), &format!("{:?}", sf.type_identifier())) + build_conversion_error(sf, &format!("Number({n})"), &format!("{:?}", sf.type_identifier())) })?; Ok(PrismaValue::Float(bd)) } _ => Err(build_conversion_error( + sf, &format!("Number({n})"), &format!("{:?}", sf.type_identifier()), )), @@ -130,26 +130,43 @@ pub(crate) fn coerce_json_scalar_to_pv(value: serde_json::Value, sf: &ScalarFiel serde_json::Value::String(s) => match sf.type_identifier() { TypeIdentifier::String => Ok(PrismaValue::String(s)), TypeIdentifier::Enum(_) => Ok(PrismaValue::Enum(s)), - TypeIdentifier::DateTime => Ok(PrismaValue::DateTime(parse_datetime(&format!("{s}Z")).map_err( - |err| { + TypeIdentifier::DateTime => { + let res = sf.parse_json_datetime(&s).map_err(|err| { + build_conversion_error_with_reason( + sf, + &format!("String({s})"), + &format!("{:?}", sf.type_identifier()), + &err.to_string(), + ) + })?; + + Ok(PrismaValue::DateTime(res)) + } + TypeIdentifier::Decimal => { + let res = parse_decimal(&s).map_err(|err| { build_conversion_error_with_reason( + sf, &format!("String({s})"), &format!("{:?}", sf.type_identifier()), &err.to_string(), ) - }, - )?)), + })?; + + Ok(PrismaValue::Float(res)) + } TypeIdentifier::UUID => Ok(PrismaValue::Uuid(uuid::Uuid::parse_str(&s).map_err(|err| { build_conversion_error_with_reason( + sf, &format!("String({s})"), &format!("{:?}", sf.type_identifier()), &err.to_string(), ) })?)), TypeIdentifier::Bytes => { - // We skip the first two characters because they are the \x prefix. + // We skip the first two characters because there's the \x prefix. let bytes = hex::decode(&s[2..]).map_err(|err| { build_conversion_error_with_reason( + sf, &format!("String({s})"), &format!("{:?}", sf.type_identifier()), &err.to_string(), @@ -159,6 +176,7 @@ pub(crate) fn coerce_json_scalar_to_pv(value: serde_json::Value, sf: &ScalarFiel Ok(PrismaValue::Bytes(bytes)) } _ => Err(build_conversion_error( + sf, &format!("String({s})"), &format!("{:?}", sf.type_identifier()), )), @@ -173,20 +191,30 @@ pub(crate) fn coerce_json_scalar_to_pv(value: serde_json::Value, sf: &ScalarFiel } } -fn build_conversion_error(from: &str, to: &str) -> SqlError { +fn build_conversion_error(sf: &ScalarField, from: &str, to: &str) -> SqlError { + let container_name = sf.container().name(); + let field_name = sf.name(); + let error = io::Error::new( io::ErrorKind::InvalidData, - format!("Unexpected conversion failure from {from} to {to}."), + format!("Unexpected conversion failure for field {container_name}.{field_name} from {from} to {to}."), ); SqlError::ConversionError(error.into()) } -fn build_conversion_error_with_reason(from: &str, to: &str, reason: &str) -> SqlError { +fn build_conversion_error_with_reason(sf: &ScalarField, from: &str, to: &str, reason: &str) -> SqlError { + let container_name = sf.container().name(); + let field_name = sf.name(); + let error = io::Error::new( io::ErrorKind::InvalidData, - format!("Unexpected conversion failure from {from} to {to}. Reason: ${reason}"), + format!("Unexpected conversion failure for field {container_name}.{field_name} from {from} to {to}. Reason: ${reason}"), ); SqlError::ConversionError(error.into()) } + +fn parse_decimal(str: &str) -> std::result::Result { + BigDecimal::from_str(str).map(|bd| bd.normalized()) +} diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/column.rs b/query-engine/connectors/sql-query-connector/src/model_extensions/column.rs index c2eb84435d5b..81b424ca5902 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/column.rs +++ b/query-engine/connectors/sql-query-connector/src/model_extensions/column.rs @@ -107,6 +107,7 @@ impl AsColumn for ScalarField { Column::from((full_table_name, col)) .type_family(self.type_family()) + .native_column_type(self.native_type().map(|nt| nt.name())) .set_is_enum(self.type_identifier().is_enum()) .set_is_list(self.is_list()) .default(quaint::ast::DefaultValue::Generated) diff --git a/query-engine/query-structure/src/field/scalar.rs b/query-engine/query-structure/src/field/scalar.rs index b8ef8ab204e2..52d59686fdda 100644 --- a/query-engine/query-structure/src/field/scalar.rs +++ b/query-engine/query-structure/src/field/scalar.rs @@ -1,4 +1,5 @@ use crate::{ast, parent_container::ParentContainer, prelude::*, DefaultKind, NativeTypeInstance, ValueGenerator}; +use chrono::{DateTime, FixedOffset}; use psl::{ parser_database::{walkers, ScalarFieldType, ScalarType}, schema_ast::ast::FieldArity, @@ -170,6 +171,13 @@ impl ScalarField { }) } + pub fn parse_json_datetime(&self, value: &str) -> chrono::ParseResult> { + let nt = self.native_type().map(|nt| nt.native_type); + let connector = self.dm.schema.connector; + + connector.parse_json_datetime(value, nt) + } + pub fn is_autoincrement(&self) -> bool { match self.id { ScalarFieldId::InModel(id) => self.dm.walk(id).is_autoincrement(),