diff --git a/.github/workflows/codspeed.yml b/.github/workflows/codspeed.yml index 6a0f8bb124ac..96263f590079 100644 --- a/.github/workflows/codspeed.yml +++ b/.github/workflows/codspeed.yml @@ -28,10 +28,10 @@ jobs: run: cargo install cargo-codspeed - name: "Build the benchmark targets: schema" - run: cargo codspeed build -p schema + run: cargo codspeed build -p schema --features all_connectors - name: "Build the benchmark targets: request-handlers" - run: cargo codspeed build -p request-handlers + run: cargo codspeed build -p request-handlers --features native - name: Run the benchmarks uses: CodSpeedHQ/action@v2 diff --git a/Cargo.lock b/Cargo.lock index b5de417b367f..b58037af9285 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3486,6 +3486,7 @@ name = "psl-core" version = "0.1.0" dependencies = [ "bigdecimal", + "cfg-if", "chrono", "connection-string", "diagnostics", diff --git a/Cargo.toml b/Cargo.toml index bf60948855a8..f14f7c508c8c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -76,16 +76,6 @@ url = { version = "2.5.0" } [workspace.dependencies.quaint] path = "quaint" -features = [ - "expose-drivers", - "fmt-sql", - "mssql", - "mysql", - "pooled", - "postgresql", - "sqlite", - "native", -] [profile.dev.package.backtrace] opt-level = 3 diff --git a/nix/shell.nix b/nix/shell.nix index 7fdda2d910e3..14d33f64abfc 100644 --- a/nix/shell.nix +++ b/nix/shell.nix @@ -7,7 +7,8 @@ in { devShells.default = pkgs.mkShell { packages = with pkgs; [ - devToolchain + # devToolchain + rustup llvmPackages_latest.bintools nodejs_20 diff --git a/psl/psl-core/Cargo.toml b/psl/psl-core/Cargo.toml index c7f6a7b22340..eb2e59f3d489 100644 --- a/psl/psl-core/Cargo.toml +++ b/psl/psl-core/Cargo.toml @@ -3,6 +3,14 @@ edition = "2021" name = "psl-core" version = "0.1.0" +[features] +postgresql = [] +sqlite = [] +mysql = [] +cockroachdb = [] +mssql = [] +mongodb = [] + [dependencies] diagnostics = { path = "../diagnostics" } parser-database = { path = "../parser-database" } @@ -25,3 +33,4 @@ hex = "0.4" # For the connector API. lsp-types = "0.91.1" url.workspace = true +cfg-if = "1.0.0" diff --git a/psl/psl-core/src/builtin_connectors/capabilities_support.rs b/psl/psl-core/src/builtin_connectors/capabilities_support.rs new file mode 100644 index 000000000000..aef2d22cd076 --- /dev/null +++ b/psl/psl-core/src/builtin_connectors/capabilities_support.rs @@ -0,0 +1,93 @@ +use crate::datamodel_connector::{Connector, ConnectorCapabilities, ConnectorCapability}; +use cfg_if::cfg_if; + +cfg_if! { + // if built only for mysql + if #[cfg(all(feature="mysql", not(any(feature = "postgresql", feature="sqlite", feature = "cockroachdb", feature="mssql", feature="mongodb"))))] { + #[inline(always)] + const fn can_have_capability_impl(capability: ConnectorCapability) -> bool { + check_comptime_capability(super::mysql_datamodel_connector::CAPABILITIES, capability) + } + + pub fn has_capability(_: &dyn Connector, capability: ConnectorCapability) -> bool { + can_have_capability_impl(capability) + } + // if built only for sqlite + } else if #[cfg(all(feature="sqlite", not(any(feature = "postgresql", feature="mysql", feature = "cockroachdb", feature="mssql", feature="mongodb"))))] { + #[inline(always)] + const fn can_have_capability_impl(capability: ConnectorCapability) -> bool { + check_comptime_capability(super::sqlite_datamodel_connector::CAPABILITIES, capability) + } + + #[inline(always)] + pub fn has_capability(_: &dyn Connector, capability: ConnectorCapability) -> bool { + can_have_capability_impl(capability) + } + // if built only for postgresql + } else if #[cfg(all(feature="postgresql", not(any(feature = "sqlite", feature="mysql", feature = "cockroachdb", feature="mssql", feature="mongodb"))))] { + #[inline(always)] + const fn can_have_capability_impl(capability: ConnectorCapability) -> bool { + check_comptime_capability(super::postgres_datamodel_connector::CAPABILITIES, capability) + } + + #[inline(always)] + pub fn has_capability(_: &dyn Connector, capability: ConnectorCapability) -> bool { + can_have_capability_impl(capability) + } + // any other build configuration + } else { + #[inline(always)] + const fn can_have_capability_impl(_: ConnectorCapability) -> bool { + true + } + + #[inline(always)] + pub fn has_capability(connector: &dyn Connector, capability: ConnectorCapability) -> bool { + connector.capabilities().contains(capability) + } + } +} + +/// Helper function for determining if engine, compiled with the current settings, +/// can potentially have provided capability on. Useful for single-connector builds and can +/// be used to exclude certain code that we know for sure can't be executed for current connector. +/// Has no effect on multi-connector builds +/// # Example +/// ```ignore +/// if !can_have_capability(ConnectorCapability::FullTextSearch) { +/// unreachable!() +/// } +/// ... // if compiled for a single connector, optimizer will exclude the following code if connector does not support full text search +/// ``` +#[inline(always)] +pub const fn can_have_capability(cap: ConnectorCapability) -> bool { + can_have_capability_impl(cap) +} + +/// Marks the code as reachable only by the connectors, +/// having the specific capability. +/// Optimizer usually can optimize the code away if none of the connectors +/// current build supports the capability. +/// +/// If we are within a single connector build that has no such capability, +/// and the code marked with this macro is reached, it will panic. +#[macro_export] +macro_rules! reachable_only_with_capability { + ($cap: expr) => { + if !$crate::builtin_connectors::can_have_capability($cap) { + core::unreachable!() + } + }; +} + +#[inline(always)] +#[allow(dead_code)] // not used if more than one connector is built +const fn check_comptime_capability(capabilities: ConnectorCapabilities, cap: ConnectorCapability) -> bool { + (capabilities.bits_c() & (cap as u64)) > 0 +} + +#[inline(always)] +pub const fn can_support_relation_load_strategy() -> bool { + can_have_capability(ConnectorCapability::LateralJoin) + || can_have_capability(ConnectorCapability::CorrelatedSubqueries) +} diff --git a/psl/psl-core/src/builtin_connectors/completions.rs b/psl/psl-core/src/builtin_connectors/completions.rs index 120bfae425f2..b8acd4dd97a6 100644 --- a/psl/psl-core/src/builtin_connectors/completions.rs +++ b/psl/psl-core/src/builtin_connectors/completions.rs @@ -1,9 +1,6 @@ -use crate::datamodel_connector::format_completion_docs; -use lsp_types::{ - CompletionItem, CompletionItemKind, CompletionList, Documentation, InsertTextFormat, MarkupContent, MarkupKind, -}; - -pub(crate) fn extensions_completion(completion_list: &mut CompletionList) { +#[cfg(feature = "postgresql")] +pub(crate) fn extensions_completion(completion_list: &mut lsp_types::CompletionList) { + use lsp_types::*; completion_list.items.push(CompletionItem { label: "extensions".to_owned(), insert_text: Some("extensions = [$0]".to_owned()), @@ -11,7 +8,7 @@ pub(crate) fn extensions_completion(completion_list: &mut CompletionList) { kind: Some(CompletionItemKind::FIELD), documentation: Some(Documentation::MarkupContent(MarkupContent { kind: MarkupKind::Markdown, - value: format_completion_docs( + value: crate::datamodel_connector::format_completion_docs( r#"extensions = [pg_trgm, postgis(version: "2.1")]"#, r#"Enable PostgreSQL extensions. [Learn more](https://pris.ly/d/postgresql-extensions)"#, None, @@ -21,7 +18,14 @@ pub(crate) fn extensions_completion(completion_list: &mut CompletionList) { }) } -pub(crate) fn schemas_completion(completion_list: &mut CompletionList) { +#[cfg(any( + feature = "postgresql", + feature = "cockroachdb", + feature = "mssql", + feature = "mysql" +))] +pub(crate) fn schemas_completion(completion_list: &mut lsp_types::CompletionList) { + use lsp_types::*; completion_list.items.push(CompletionItem { label: "schemas".to_owned(), insert_text: Some(r#"schemas = [$0]"#.to_owned()), @@ -29,7 +33,7 @@ pub(crate) fn schemas_completion(completion_list: &mut CompletionList) { kind: Some(CompletionItemKind::FIELD), documentation: Some(Documentation::MarkupContent(MarkupContent { kind: MarkupKind::Markdown, - value: format_completion_docs( + value: crate::datamodel_connector::format_completion_docs( r#"schemas = ["foo", "bar", "baz"]"#, "The list of database schemas. [Learn More](https://pris.ly/d/multi-schema-configuration)", None, diff --git a/psl/psl-core/src/builtin_connectors/mod.rs b/psl/psl-core/src/builtin_connectors/mod.rs index 65ee251fad57..db5f5eb0a153 100644 --- a/psl/psl-core/src/builtin_connectors/mod.rs +++ b/psl/psl-core/src/builtin_connectors/mod.rs @@ -1,27 +1,64 @@ +#[cfg(feature = "cockroachdb")] pub mod cockroach_datamodel_connector; pub mod completions; +#[cfg(feature = "cockroachdb")] pub use cockroach_datamodel_connector::CockroachType; +#[cfg(feature = "mongodb")] pub use mongodb::MongoDbType; +#[cfg(feature = "mssql")] pub use mssql_datamodel_connector::{MsSqlType, MsSqlTypeParameter}; +#[cfg(feature = "mysql")] pub use mysql_datamodel_connector::MySqlType; +#[cfg(feature = "postgresql")] pub use postgres_datamodel_connector::{PostgresDatasourceProperties, PostgresType}; +mod capabilities_support; +#[cfg(feature = "mongodb")] mod mongodb; +#[cfg(feature = "mssql")] mod mssql_datamodel_connector; +#[cfg(feature = "mysql")] mod mysql_datamodel_connector; mod native_type_definition; +#[cfg(feature = "postgresql")] mod postgres_datamodel_connector; +#[cfg(feature = "sqlite")] mod sqlite_datamodel_connector; mod utils; +pub use capabilities_support::{can_have_capability, can_support_relation_load_strategy, has_capability}; -use crate::{datamodel_connector::Connector, ConnectorRegistry}; +use crate::ConnectorRegistry; -pub const POSTGRES: &'static dyn Connector = &postgres_datamodel_connector::PostgresDatamodelConnector; -pub const COCKROACH: &'static dyn Connector = &cockroach_datamodel_connector::CockroachDatamodelConnector; -pub const MYSQL: &'static dyn Connector = &mysql_datamodel_connector::MySqlDatamodelConnector; -pub const SQLITE: &'static dyn Connector = &sqlite_datamodel_connector::SqliteDatamodelConnector; -pub const MSSQL: &'static dyn Connector = &mssql_datamodel_connector::MsSqlDatamodelConnector; -pub const MONGODB: &'static dyn Connector = &mongodb::MongoDbDatamodelConnector; +#[cfg(feature = "postgresql")] +pub const POSTGRES: &'static dyn crate::datamodel_connector::Connector = + &postgres_datamodel_connector::PostgresDatamodelConnector; +#[cfg(feature = "cockroachdb")] +pub const COCKROACH: &'static dyn crate::datamodel_connector::Connector = + &cockroach_datamodel_connector::CockroachDatamodelConnector; +#[cfg(feature = "mysql")] +pub const MYSQL: &'static dyn crate::datamodel_connector::Connector = + &mysql_datamodel_connector::MySqlDatamodelConnector; +#[cfg(feature = "sqlite")] +pub const SQLITE: &'static dyn crate::datamodel_connector::Connector = + &sqlite_datamodel_connector::SqliteDatamodelConnector; +#[cfg(feature = "mssql")] +pub const MSSQL: &'static dyn crate::datamodel_connector::Connector = + &mssql_datamodel_connector::MsSqlDatamodelConnector; +#[cfg(feature = "mongodb")] +pub const MONGODB: &'static dyn crate::datamodel_connector::Connector = &mongodb::MongoDbDatamodelConnector; -pub static BUILTIN_CONNECTORS: ConnectorRegistry<'static> = &[POSTGRES, MYSQL, SQLITE, MSSQL, COCKROACH, MONGODB]; +pub static BUILTIN_CONNECTORS: ConnectorRegistry<'static> = &[ + #[cfg(feature = "postgresql")] + POSTGRES, + #[cfg(feature = "mysql")] + MYSQL, + #[cfg(feature = "sqlite")] + SQLITE, + #[cfg(feature = "mssql")] + MSSQL, + #[cfg(feature = "cockroachdb")] + COCKROACH, + #[cfg(feature = "mongodb")] + MONGODB, +]; diff --git a/psl/psl-core/src/builtin_connectors/mysql_datamodel_connector.rs b/psl/psl-core/src/builtin_connectors/mysql_datamodel_connector.rs index 75db8d2c870b..4240525bc5e3 100644 --- a/psl/psl-core/src/builtin_connectors/mysql_datamodel_connector.rs +++ b/psl/psl-core/src/builtin_connectors/mysql_datamodel_connector.rs @@ -28,7 +28,7 @@ const TEXT_TYPE_NAME: &str = "Text"; const MEDIUM_TEXT_TYPE_NAME: &str = "MediumText"; const LONG_TEXT_TYPE_NAME: &str = "LongText"; -const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(ConnectorCapability::{ +pub const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(ConnectorCapability::{ Enums | EnumArrayPush | Json | @@ -55,6 +55,7 @@ const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(Connector MultiSchema | FullTextIndex | + FullTextSearch | FullTextSearchWithIndex | MultipleFullTextAttributesPerModel | ImplicitManyToManyRelation | diff --git a/psl/psl-core/src/builtin_connectors/postgres_datamodel_connector.rs b/psl/psl-core/src/builtin_connectors/postgres_datamodel_connector.rs index 4da85fa89861..35bcc30d0244 100644 --- a/psl/psl-core/src/builtin_connectors/postgres_datamodel_connector.rs +++ b/psl/psl-core/src/builtin_connectors/postgres_datamodel_connector.rs @@ -26,7 +26,7 @@ const CONSTRAINT_SCOPES: &[ConstraintScope] = &[ ConstraintScope::ModelPrimaryKeyKeyIndexForeignKey, ]; -const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(ConnectorCapability::{ +pub const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(ConnectorCapability::{ AdvancedJsonNullability | AnyId | AutoIncrement | @@ -39,6 +39,7 @@ const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(Connector CreateSkipDuplicates | Enums | EnumArrayPush | + FullTextSearch | FullTextSearchWithoutIndex | InsensitiveFilters | Json | diff --git a/psl/psl-core/src/builtin_connectors/sqlite_datamodel_connector.rs b/psl/psl-core/src/builtin_connectors/sqlite_datamodel_connector.rs index 8c0756b97cc0..4d5febb74b51 100644 --- a/psl/psl-core/src/builtin_connectors/sqlite_datamodel_connector.rs +++ b/psl/psl-core/src/builtin_connectors/sqlite_datamodel_connector.rs @@ -10,7 +10,7 @@ use enumflags2::BitFlags; const NATIVE_TYPE_CONSTRUCTORS: &[NativeTypeConstructor] = &[]; const CONSTRAINT_SCOPES: &[ConstraintScope] = &[ConstraintScope::GlobalKeyIndex]; -const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(ConnectorCapability::{ +pub const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(ConnectorCapability::{ AnyId | AutoIncrement | CompoundIds | diff --git a/psl/psl-core/src/builtin_connectors/utils.rs b/psl/psl-core/src/builtin_connectors/utils.rs index a8d5618f5d23..8c777d63e21b 100644 --- a/psl/psl-core/src/builtin_connectors/utils.rs +++ b/psl/psl-core/src/builtin_connectors/utils.rs @@ -1,3 +1,4 @@ +#[cfg(any(feature = "postgresql", feature = "mysql"))] pub(crate) mod common { use chrono::*; @@ -25,6 +26,7 @@ pub(crate) mod common { } } +#[cfg(feature = "postgresql")] pub(crate) mod postgres { use chrono::*; @@ -50,6 +52,7 @@ pub(crate) mod postgres { } } +#[cfg(feature = "mysql")] pub(crate) mod mysql { use chrono::*; diff --git a/psl/psl-core/src/datamodel_connector.rs b/psl/psl-core/src/datamodel_connector.rs index b2c539036a9f..cb4d0bc7acd8 100644 --- a/psl/psl-core/src/datamodel_connector.rs +++ b/psl/psl-core/src/datamodel_connector.rs @@ -59,11 +59,6 @@ pub trait Connector: Send + Sync { /// The static list of capabilities for the connector. fn capabilities(&self) -> ConnectorCapabilities; - /// Does the connector have this capability? - fn has_capability(&self, capability: ConnectorCapability) -> bool { - self.capabilities().contains(capability) - } - /// The maximum length of constraint names in bytes. Connectors without a /// limit should return usize::MAX. fn max_identifier_length(&self) -> usize; @@ -105,22 +100,6 @@ pub trait Connector: Send + Sync { false } - fn supports_composite_types(&self) -> bool { - self.has_capability(ConnectorCapability::CompositeTypes) - } - - fn supports_named_primary_keys(&self) -> bool { - self.has_capability(ConnectorCapability::NamedPrimaryKeys) - } - - fn supports_named_foreign_keys(&self) -> bool { - self.has_capability(ConnectorCapability::NamedForeignKeys) - } - - fn supports_named_default_values(&self) -> bool { - self.has_capability(ConnectorCapability::NamedDefaultValues) - } - fn supports_referential_action(&self, relation_mode: &RelationMode, action: ReferentialAction) -> bool { match relation_mode { RelationMode::ForeignKeys => self.referential_actions().contains(action), @@ -222,44 +201,18 @@ pub trait Connector: Send + Sync { diagnostics: &mut Diagnostics, ) -> Option; - fn supports_scalar_lists(&self) -> bool { - self.has_capability(ConnectorCapability::ScalarLists) - } - - fn supports_enums(&self) -> bool { - self.has_capability(ConnectorCapability::Enums) - } - - fn supports_json(&self) -> bool { - self.has_capability(ConnectorCapability::Json) - } - - fn supports_json_lists(&self) -> bool { - self.has_capability(ConnectorCapability::JsonLists) - } - - fn supports_auto_increment(&self) -> bool { - self.has_capability(ConnectorCapability::AutoIncrement) - } - - fn supports_non_id_auto_increment(&self) -> bool { - self.has_capability(ConnectorCapability::AutoIncrementAllowedOnNonId) - } - - fn supports_multiple_auto_increment(&self) -> bool { - self.has_capability(ConnectorCapability::AutoIncrementMultipleAllowed) - } - - fn supports_non_indexed_auto_increment(&self) -> bool { - self.has_capability(ConnectorCapability::AutoIncrementNonIndexedAllowed) - } - - fn supports_compound_ids(&self) -> bool { - self.has_capability(ConnectorCapability::CompoundIds) + fn static_join_strategy_support(&self) -> bool { + self.capabilities().contains(ConnectorCapability::LateralJoin) + || self.capabilities().contains(ConnectorCapability::CorrelatedSubqueries) } - fn supports_decimal(&self) -> bool { - self.has_capability(ConnectorCapability::DecimalType) + // Returns whether the connector supports the `RelationLoadStrategy::Join`. + /// On some connectors, this might return `UnknownYet`. + fn runtime_join_strategy_support(&self) -> JoinStrategySupport { + match self.static_join_strategy_support() { + true => JoinStrategySupport::Yes, + false => JoinStrategySupport::No, + } } fn supported_index_types(&self) -> BitFlags { @@ -270,11 +223,6 @@ pub trait Connector: Send + Sync { self.supported_index_types().contains(algo) } - fn allows_relation_fields_in_arbitrary_order(&self) -> bool { - self.has_capability(ConnectorCapability::RelationFieldsInArbitraryOrder) - } - - /// If true, the schema validator function checks whether the referencing fields in a `@relation` attribute /// are included in an index. fn should_suggest_missing_referencing_fields_indexes(&self) -> bool { true @@ -329,20 +277,6 @@ pub trait Connector: Send + Sync { ) -> prisma_value::PrismaValueResult> { unreachable!("This method is only implemented on connectors with lateral join support.") } - - fn static_join_strategy_support(&self) -> bool { - self.has_capability(ConnectorCapability::LateralJoin) - || self.has_capability(ConnectorCapability::CorrelatedSubqueries) - } - - /// Returns whether the connector supports the `RelationLoadStrategy::Join`. - /// On some connectors, this might return `UnknownYet`. - fn runtime_join_strategy_support(&self) -> JoinStrategySupport { - match self.static_join_strategy_support() { - true => JoinStrategySupport::Yes, - false => JoinStrategySupport::No, - } - } } #[derive(Copy, Clone, Debug, PartialEq)] diff --git a/psl/psl-core/src/datamodel_connector/capabilities.rs b/psl/psl-core/src/datamodel_connector/capabilities.rs index 912a67cb2755..b520e53841a2 100644 --- a/psl/psl-core/src/datamodel_connector/capabilities.rs +++ b/psl/psl-core/src/datamodel_connector/capabilities.rs @@ -86,6 +86,7 @@ capabilities!( AnyId, // Any (or combination of) uniques and not only id fields can constitute an id for a model. SqlQueryRaw, MongoDbQueryRaw, + FullTextSearch, FullTextSearchWithoutIndex, FullTextSearchWithIndex, AdvancedJsonNullability, // Connector distinguishes between their null type and JSON null. diff --git a/psl/psl-core/src/datamodel_connector/walker_ext_traits.rs b/psl/psl-core/src/datamodel_connector/walker_ext_traits.rs index 76ab44ef5012..9c44664f4fc3 100644 --- a/psl/psl-core/src/datamodel_connector/walker_ext_traits.rs +++ b/psl/psl-core/src/datamodel_connector/walker_ext_traits.rs @@ -1,5 +1,8 @@ -use crate::datamodel_connector::{ - constraint_names::ConstraintNames, Connector, NativeTypeInstance, ReferentialAction, RelationMode, +use crate::{ + builtin_connectors::has_capability, + datamodel_connector::{ + constraint_names::ConstraintNames, Connector, NativeTypeInstance, ReferentialAction, RelationMode, + }, }; use parser_database::{ ast::{self, WithSpan}, @@ -7,6 +10,8 @@ use parser_database::{ }; use std::borrow::Cow; +use super::ConnectorCapability; + pub trait IndexWalkerExt<'db> { fn constraint_name(self, connector: &dyn Connector) -> Cow<'db, str>; } @@ -60,7 +65,7 @@ pub trait PrimaryKeyWalkerExt<'db> { impl<'db> PrimaryKeyWalkerExt<'db> for PrimaryKeyWalker<'db> { fn constraint_name(self, connector: &dyn Connector) -> Option> { - if !connector.supports_named_primary_keys() { + if !has_capability(connector, ConnectorCapability::NamedPrimaryKeys) { return None; } diff --git a/psl/psl-core/src/validate/validation_pipeline/context.rs b/psl/psl-core/src/validate/validation_pipeline/context.rs index a7d89d69e3f4..c55d37d51b7e 100644 --- a/psl/psl-core/src/validate/validation_pipeline/context.rs +++ b/psl/psl-core/src/validate/validation_pipeline/context.rs @@ -1,5 +1,6 @@ use crate::{ - datamodel_connector::{Connector, RelationMode}, + builtin_connectors::has_capability, + datamodel_connector::{Connector, ConnectorCapability, RelationMode}, Datasource, PreviewFeature, }; use diagnostics::{DatamodelError, DatamodelWarning, Diagnostics}; @@ -29,4 +30,8 @@ impl Context<'_> { pub(super) fn push_warning(&mut self, warning: DatamodelWarning) { self.diagnostics.push_warning(warning); } + + pub(super) fn has_capability(&self, capability: ConnectorCapability) -> bool { + has_capability(self.connector, capability) + } } diff --git a/psl/psl-core/src/validate/validation_pipeline/validations.rs b/psl/psl-core/src/validate/validation_pipeline/validations.rs index 90f8ec9fe79e..76e277be7394 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations.rs @@ -13,6 +13,8 @@ mod relation_fields; mod relations; mod views; +use crate::datamodel_connector::ConnectorCapability; + use super::context::Context; use names::Names; use parser_database::walkers::RefinedRelationWalker; @@ -134,7 +136,7 @@ pub(super) fn validate(ctx: &mut Context<'_>) { } } - if ctx.connector.supports_enums() { + if ctx.has_capability(ConnectorCapability::Enums) { enums::database_name_clashes(ctx); } diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/autoincrement.rs b/psl/psl-core/src/validate/validation_pipeline/validations/autoincrement.rs index 2d97c4e334d9..cf5171c82852 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/autoincrement.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/autoincrement.rs @@ -1,4 +1,5 @@ use crate::{ + datamodel_connector::ConnectorCapability, diagnostics::DatamodelError, parser_database::{ast::WithSpan, walkers::ModelWalker}, validate::validation_pipeline::context::Context, @@ -12,7 +13,7 @@ pub(super) fn validate_auto_increment(model: ModelWalker<'_>, ctx: &mut Context< } // First check if the provider supports autoincrement at all. If yes, proceed with the detailed checks. - if !ctx.connector.supports_auto_increment() { + if !ctx.has_capability(ConnectorCapability::AutoIncrement) { for field in autoincrement_fields() { let msg = "The `autoincrement()` default value is used with a datasource that does not support it."; @@ -27,7 +28,7 @@ pub(super) fn validate_auto_increment(model: ModelWalker<'_>, ctx: &mut Context< return; } - if !ctx.connector.supports_multiple_auto_increment() && autoincrement_fields().count() > 1 { + if !ctx.has_capability(ConnectorCapability::AutoIncrementMultipleAllowed) && autoincrement_fields().count() > 1 { let msg = "The `autoincrement()` default value is used multiple times on this model even though the underlying datasource only supports one instance per table."; ctx.push_error(DatamodelError::new_attribute_validation_error( @@ -41,7 +42,9 @@ pub(super) fn validate_auto_increment(model: ModelWalker<'_>, ctx: &mut Context< for field in autoincrement_fields() { let field_is_indexed = || model.field_is_indexed_for_autoincrement(field.field_id()); - if !ctx.connector.supports_non_id_auto_increment() && !model.field_is_single_pk(field.field_id()) { + if !ctx.has_capability(ConnectorCapability::AutoIncrementAllowedOnNonId) + && !model.field_is_single_pk(field.field_id()) + { let msg = "The `autoincrement()` default value is used on a non-id field even though the datasource does not support this."; ctx.push_error(DatamodelError::new_attribute_validation_error( @@ -51,7 +54,7 @@ pub(super) fn validate_auto_increment(model: ModelWalker<'_>, ctx: &mut Context< )) } - if !ctx.connector.supports_non_indexed_auto_increment() && !field_is_indexed() { + if !ctx.has_capability(ConnectorCapability::AutoIncrementNonIndexedAllowed) && !field_is_indexed() { let msg = "The `autoincrement()` default value is used on a non-indexed field even though the datasource does not support this."; ctx.push_error(DatamodelError::new_attribute_validation_error( diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/composite_types.rs b/psl/psl-core/src/validate/validation_pipeline/validations/composite_types.rs index 6a5c65dfdb3b..da0a3db3a515 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/composite_types.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/composite_types.rs @@ -1,5 +1,5 @@ use super::default_value; -use crate::validate::validation_pipeline::context::Context; +use crate::{datamodel_connector::ConnectorCapability, validate::validation_pipeline::context::Context}; use diagnostics::DatamodelError; use parser_database::{ ast::{self, WithSpan}, @@ -79,7 +79,7 @@ pub(super) fn detect_composite_cycles(ctx: &mut Context<'_>) { /// Does the connector support composite types. pub(crate) fn composite_types_support(composite_type: CompositeTypeWalker<'_>, ctx: &mut Context<'_>) { - if ctx.connector.supports_composite_types() { + if ctx.has_capability(ConnectorCapability::CompositeTypes) { return; } diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/datasource.rs b/psl/psl-core/src/validate/validation_pipeline/validations/datasource.rs index 533dde03c533..31efdb7ff9b1 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/datasource.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/datasource.rs @@ -20,10 +20,7 @@ pub(super) fn schemas_property_with_no_connector_support(datasource: &Datasource return; } - if ctx - .connector - .has_capability(crate::datamodel_connector::ConnectorCapability::MultiSchema) - { + if ctx.has_capability(crate::datamodel_connector::ConnectorCapability::MultiSchema) { return; } diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/default_value.rs b/psl/psl-core/src/validate/validation_pipeline/validations/default_value.rs index 0ac90e8c65d0..58581f4e5f81 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/default_value.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/default_value.rs @@ -6,7 +6,7 @@ use schema_ast::ast::{self, Expression}; /// Function `auto()` works for now only with MongoDB. pub(super) fn validate_auto_param(default_value: Option<&ast::Expression>, ctx: &mut Context<'_>) { - if ctx.connector.has_capability(ConnectorCapability::DefaultValueAuto) { + if ctx.has_capability(ConnectorCapability::DefaultValueAuto) { return; } diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/enums.rs b/psl/psl-core/src/validate/validation_pipeline/validations/enums.rs index 905c99ad33af..a401e34aa86f 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/enums.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/enums.rs @@ -23,7 +23,7 @@ pub(super) fn schema_is_defined_in_the_datasource(r#enum: EnumWalker<'_>, ctx: & return; } - if !ctx.connector.has_capability(ConnectorCapability::MultiSchema) { + if !ctx.has_capability(ConnectorCapability::MultiSchema) { return; } @@ -52,7 +52,7 @@ pub(super) fn schema_attribute_supported_in_connector(r#enum: EnumWalker<'_>, ct return; } - if ctx.connector.has_capability(ConnectorCapability::MultiSchema) { + if ctx.has_capability(ConnectorCapability::MultiSchema) { return; } @@ -72,7 +72,7 @@ pub(super) fn schema_attribute_missing(r#enum: EnumWalker<'_>, ctx: &mut Context return; } - if !ctx.connector.has_capability(ConnectorCapability::MultiSchema) { + if !ctx.has_capability(ConnectorCapability::MultiSchema) { return; } @@ -113,7 +113,7 @@ pub(super) fn multischema_feature_flag_needed(r#enum: EnumWalker<'_>, ctx: &mut } pub(crate) fn connector_supports_enums(r#enum: EnumWalker<'_>, ctx: &mut Context<'_>) { - if ctx.connector.supports_enums() { + if ctx.has_capability(ConnectorCapability::Enums) { return; } diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/fields.rs b/psl/psl-core/src/validate/validation_pipeline/validations/fields.rs index bdfb340f5191..0613fda2a48f 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/fields.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/fields.rs @@ -109,10 +109,7 @@ pub(crate) fn validate_length_used_with_correct_types( attribute: (&str, ast::Span), ctx: &mut Context<'_>, ) { - if !ctx - .connector - .has_capability(ConnectorCapability::IndexColumnLengthPrefixing) - { + if !ctx.has_capability(ConnectorCapability::IndexColumnLengthPrefixing) { return; } @@ -226,7 +223,7 @@ pub(super) fn validate_default_value(field: ScalarFieldWalker<'_>, ctx: &mut Con let default_attribute = field.default_attribute(); // Named defaults. - if default_mapped_name.is_some() && !ctx.connector.supports_named_default_values() { + if default_mapped_name.is_some() && !ctx.has_capability(ConnectorCapability::NamedDefaultValues) { let msg = "You defined a database name for the default value of a field on the model. This is not supported by the provider."; ctx.push_error(DatamodelError::new_attribute_validation_error( @@ -256,7 +253,7 @@ pub(super) fn validate_scalar_field_connector_specific(field: ScalarFieldWalker< match field.scalar_field_type() { ScalarFieldType::BuiltInScalar(ScalarType::Json) => { - if !ctx.connector.supports_json() { + if !ctx.has_capability(ConnectorCapability::Json) { ctx.push_error(DatamodelError::new_field_validation_error( &format!( "Field `{}` in {container} `{}` can't be of type Json. The current connector does not support the Json type.", @@ -270,7 +267,7 @@ pub(super) fn validate_scalar_field_connector_specific(field: ScalarFieldWalker< )); } - if field.ast_field().arity.is_list() && !ctx.connector.supports_json_lists() { + if field.ast_field().arity.is_list() && !ctx.has_capability(ConnectorCapability::JsonLists) { ctx.push_error(DatamodelError::new_field_validation_error( &format!( "Field `{}` in {container} `{}` can't be of type Json[]. The current connector does not support the Json List type.", @@ -286,7 +283,7 @@ pub(super) fn validate_scalar_field_connector_specific(field: ScalarFieldWalker< } ScalarFieldType::BuiltInScalar(ScalarType::Decimal) => { - if !ctx.connector.supports_decimal() { + if !ctx.has_capability(ConnectorCapability::DecimalType) { ctx.push_error(DatamodelError::new_field_validation_error( &format!( "Field `{}` in {container} `{}` can't be of type Decimal. The current connector does not support the Decimal type.", @@ -304,7 +301,7 @@ pub(super) fn validate_scalar_field_connector_specific(field: ScalarFieldWalker< _ => (), } - if field.ast_field().arity.is_list() && !ctx.connector.supports_scalar_lists() { + if field.ast_field().arity.is_list() && !ctx.has_capability(ConnectorCapability::ScalarLists) { ctx.push_error(DatamodelError::new_scalar_list_fields_are_not_supported( if field.model().ast_model().is_view() { "view" @@ -366,7 +363,7 @@ pub(super) fn validate_unsupported_field_type(field: ScalarFieldWalker<'_>, ctx: } pub(crate) fn id_supports_clustering_setting(pk: PrimaryKeyWalker<'_>, ctx: &mut Context<'_>) { - if ctx.connector.has_capability(ConnectorCapability::ClusteringSetting) { + if ctx.has_capability(ConnectorCapability::ClusteringSetting) { return; } @@ -385,7 +382,7 @@ pub(crate) fn id_supports_clustering_setting(pk: PrimaryKeyWalker<'_>, ctx: &mut /// /// Here we check the primary key. Another check in index validations. pub(crate) fn clustering_can_be_defined_only_once(pk: PrimaryKeyWalker<'_>, ctx: &mut Context<'_>) { - if !ctx.connector.has_capability(ConnectorCapability::ClusteringSetting) { + if !ctx.has_capability(ConnectorCapability::ClusteringSetting) { return; } diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/indexes.rs b/psl/psl-core/src/validate/validation_pipeline/validations/indexes.rs index 7a7d0e1d105e..9a7ac919fff7 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/indexes.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/indexes.rs @@ -72,10 +72,7 @@ pub(super) fn unique_index_has_a_unique_custom_name_per_model( /// The database must support the index length prefix for it to be allowed in the data model. pub(crate) fn field_length_prefix_supported(index: IndexWalker<'_>, ctx: &mut Context<'_>) { - if ctx - .connector - .has_capability(ConnectorCapability::IndexColumnLengthPrefixing) - { + if ctx.has_capability(ConnectorCapability::IndexColumnLengthPrefixing) { return; } @@ -109,7 +106,7 @@ pub(crate) fn fulltext_index_preview_feature_enabled(index: IndexWalker<'_>, ctx /// `@@fulltext` should only be available if we support it in the database. pub(crate) fn fulltext_index_supported(index: IndexWalker<'_>, ctx: &mut Context<'_>) { - if ctx.connector.has_capability(ConnectorCapability::FullTextIndex) { + if ctx.has_capability(ConnectorCapability::FullTextIndex) { return; } @@ -130,7 +127,7 @@ pub(crate) fn fulltext_columns_should_not_define_length(index: IndexWalker<'_>, return; } - if !ctx.connector.has_capability(ConnectorCapability::FullTextIndex) { + if !ctx.has_capability(ConnectorCapability::FullTextIndex) { return; } @@ -155,7 +152,7 @@ pub(crate) fn fulltext_column_sort_is_supported(index: IndexWalker<'_>, ctx: &mu return; } - if !ctx.connector.has_capability(ConnectorCapability::FullTextIndex) { + if !ctx.has_capability(ConnectorCapability::FullTextIndex) { return; } @@ -163,10 +160,7 @@ pub(crate) fn fulltext_column_sort_is_supported(index: IndexWalker<'_>, ctx: &mu return; } - if ctx - .connector - .has_capability(ConnectorCapability::SortOrderInFullTextIndex) - { + if ctx.has_capability(ConnectorCapability::SortOrderInFullTextIndex) { return; } @@ -191,7 +185,7 @@ pub(crate) fn fulltext_text_columns_should_be_bundled_together(index: IndexWalke return; } - if !ctx.connector.has_capability(ConnectorCapability::FullTextIndex) { + if !ctx.has_capability(ConnectorCapability::FullTextIndex) { return; } @@ -199,10 +193,7 @@ pub(crate) fn fulltext_text_columns_should_be_bundled_together(index: IndexWalke return; } - if !ctx - .connector - .has_capability(ConnectorCapability::SortOrderInFullTextIndex) - { + if !ctx.has_capability(ConnectorCapability::SortOrderInFullTextIndex) { return; } @@ -285,7 +276,7 @@ pub(super) fn has_fields(index: IndexWalker<'_>, ctx: &mut Context<'_>) { } pub(crate) fn supports_clustering_setting(index: IndexWalker<'_>, ctx: &mut Context<'_>) { - if ctx.connector.has_capability(ConnectorCapability::ClusteringSetting) { + if ctx.has_capability(ConnectorCapability::ClusteringSetting) { return; } @@ -301,7 +292,7 @@ pub(crate) fn supports_clustering_setting(index: IndexWalker<'_>, ctx: &mut Cont } pub(crate) fn clustering_can_be_defined_only_once(index: IndexWalker<'_>, ctx: &mut Context<'_>) { - if !ctx.connector.has_capability(ConnectorCapability::ClusteringSetting) { + if !ctx.has_capability(ConnectorCapability::ClusteringSetting) { return; } diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/models.rs b/psl/psl-core/src/validate/validation_pipeline/validations/models.rs index bc03a848a2b6..a8c222c91600 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/models.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/models.rs @@ -137,10 +137,7 @@ pub(super) fn has_a_unique_custom_primary_key_name_per_model( /// The database must support the primary key length prefix for it to be allowed in the data model. pub(crate) fn primary_key_length_prefix_supported(model: ModelWalker<'_>, ctx: &mut Context<'_>) { - if ctx - .connector - .has_capability(ConnectorCapability::IndexColumnLengthPrefixing) - { + if ctx.has_capability(ConnectorCapability::IndexColumnLengthPrefixing) { return; } @@ -160,10 +157,7 @@ pub(crate) fn primary_key_length_prefix_supported(model: ModelWalker<'_>, ctx: & /// Not every database is allowing sort definition in the primary key. pub(crate) fn primary_key_sort_order_supported(model: ModelWalker<'_>, ctx: &mut Context<'_>) { - if ctx - .connector - .has_capability(ConnectorCapability::PrimaryKeySortOrderDefinition) - { + if ctx.has_capability(ConnectorCapability::PrimaryKeySortOrderDefinition) { return; } @@ -186,14 +180,11 @@ pub(crate) fn only_one_fulltext_attribute_allowed(model: ModelWalker<'_>, ctx: & return; } - if !ctx.connector.has_capability(ConnectorCapability::FullTextIndex) { + if !ctx.has_capability(ConnectorCapability::FullTextIndex) { return; } - if ctx - .connector - .has_capability(ConnectorCapability::MultipleFullTextAttributesPerModel) - { + if ctx.has_capability(ConnectorCapability::MultipleFullTextAttributesPerModel) { return; } @@ -226,7 +217,7 @@ pub(crate) fn primary_key_connector_specific(model: ModelWalker<'_>, ctx: &mut C let container_type = if model.ast_model().is_view() { "view" } else { "model" }; - if primary_key.mapped_name().is_some() && !ctx.connector.supports_named_primary_keys() { + if primary_key.mapped_name().is_some() && !ctx.has_capability(ConnectorCapability::NamedPrimaryKeys) { ctx.push_error(DatamodelError::new_model_validation_error( "You defined a database name for the primary key on the model. This is not supported by the provider.", container_type, @@ -235,7 +226,7 @@ pub(crate) fn primary_key_connector_specific(model: ModelWalker<'_>, ctx: &mut C )); } - if primary_key.fields().len() > 1 && !ctx.connector.supports_compound_ids() { + if primary_key.fields().len() > 1 && !ctx.has_capability(ConnectorCapability::CompoundIds) { return ctx.push_error(DatamodelError::new_model_validation_error( "The current connector does not support compound ids.", container_type, @@ -289,7 +280,7 @@ pub(super) fn schema_is_defined_in_the_datasource(model: ModelWalker<'_>, ctx: & return; } - if !ctx.connector.has_capability(ConnectorCapability::MultiSchema) { + if !ctx.has_capability(ConnectorCapability::MultiSchema) { return; } @@ -318,7 +309,7 @@ pub(super) fn schema_attribute_supported_in_connector(model: ModelWalker<'_>, ct return; } - if ctx.connector.has_capability(ConnectorCapability::MultiSchema) { + if ctx.has_capability(ConnectorCapability::MultiSchema) { return; } @@ -338,7 +329,7 @@ pub(super) fn schema_attribute_missing(model: ModelWalker<'_>, ctx: &mut Context return; } - if !ctx.connector.has_capability(ConnectorCapability::MultiSchema) { + if !ctx.has_capability(ConnectorCapability::MultiSchema) { return; } diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/relation_fields.rs b/psl/psl-core/src/validate/validation_pipeline/validations/relation_fields.rs index 71a07dacae5c..765b6b2bb39f 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/relation_fields.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/relation_fields.rs @@ -1,7 +1,7 @@ use super::{database_name::validate_db_name, names::Names}; use crate::{ ast::{self, WithName, WithSpan}, - datamodel_connector::RelationMode, + datamodel_connector::{ConnectorCapability, RelationMode}, diagnostics::DatamodelError, validate::validation_pipeline::context::Context, }; @@ -230,7 +230,7 @@ pub(super) fn map(field: RelationFieldWalker<'_>, ctx: &mut Context<'_>) { return; } - if !ctx.connector.supports_named_foreign_keys() { + if !ctx.has_capability(ConnectorCapability::NamedForeignKeys) { let span = field .ast_field() .span_for_attribute("relation") diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/relations.rs b/psl/psl-core/src/validate/validation_pipeline/validations/relations.rs index a960e7d59b5f..ec78b9a61a3f 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/relations.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/relations.rs @@ -175,7 +175,7 @@ fn referencing_fields_in_correct_order(relation: InlineRelationWalker<'_>, ctx: return; } - if ctx.connector.allows_relation_fields_in_arbitrary_order() { + if ctx.has_capability(ConnectorCapability::RelationFieldsInArbitraryOrder) { return; } @@ -222,9 +222,7 @@ fn referencing_fields_in_correct_order(relation: InlineRelationWalker<'_>, ctx: /// foreign key. Many to many relations we skip. The user must set one of the /// relation links to NoAction for both referential actions. pub(super) fn cycles(relation: CompleteInlineRelationWalker<'_>, ctx: &mut Context<'_>) { - if !ctx - .connector - .has_capability(ConnectorCapability::ReferenceCycleDetection) + if !ctx.has_capability(ConnectorCapability::ReferenceCycleDetection) && ctx .datasource .map(|ds| ds.relation_mode().uses_foreign_keys()) @@ -302,11 +300,7 @@ pub(super) fn cycles(relation: CompleteInlineRelationWalker<'_>, ctx: &mut Conte /// The user must set one of these relations to use NoAction for onUpdate and /// onDelete. pub(super) fn multiple_cascading_paths(relation: CompleteInlineRelationWalker<'_>, ctx: &mut Context<'_>) { - if !ctx - .connector - .has_capability(ConnectorCapability::ReferenceCycleDetection) - || ctx.relation_mode.is_prisma() - { + if !ctx.has_capability(ConnectorCapability::ReferenceCycleDetection) || ctx.relation_mode.is_prisma() { return; } diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/relations/many_to_many/embedded.rs b/psl/psl-core/src/validate/validation_pipeline/validations/relations/many_to_many/embedded.rs index eb4571914c04..32d45eb34645 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/relations/many_to_many/embedded.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/relations/many_to_many/embedded.rs @@ -4,10 +4,7 @@ use parser_database::{ast::WithSpan, walkers::TwoWayEmbeddedManyToManyRelationWa /// Only MongoDb should support embedded M:N relations. pub(crate) fn supports_embedded_relations(relation: TwoWayEmbeddedManyToManyRelationWalker<'_>, ctx: &mut Context<'_>) { - if ctx - .connector - .has_capability(ConnectorCapability::TwoWayEmbeddedManyToManyRelation) - { + if ctx.has_capability(ConnectorCapability::TwoWayEmbeddedManyToManyRelation) { return; } @@ -31,10 +28,7 @@ pub(crate) fn defines_references_on_both_sides( relation: TwoWayEmbeddedManyToManyRelationWalker<'_>, ctx: &mut Context<'_>, ) { - if !ctx - .connector - .has_capability(ConnectorCapability::TwoWayEmbeddedManyToManyRelation) - { + if !ctx.has_capability(ConnectorCapability::TwoWayEmbeddedManyToManyRelation) { return; } @@ -67,10 +61,7 @@ pub(crate) fn defines_fields_on_both_sides( relation: TwoWayEmbeddedManyToManyRelationWalker<'_>, ctx: &mut Context<'_>, ) { - if !ctx - .connector - .has_capability(ConnectorCapability::TwoWayEmbeddedManyToManyRelation) - { + if !ctx.has_capability(ConnectorCapability::TwoWayEmbeddedManyToManyRelation) { return; } @@ -102,10 +93,7 @@ pub(crate) fn references_id_from_both_sides( relation: TwoWayEmbeddedManyToManyRelationWalker<'_>, ctx: &mut Context<'_>, ) { - if !ctx - .connector - .has_capability(ConnectorCapability::TwoWayEmbeddedManyToManyRelation) - { + if !ctx.has_capability(ConnectorCapability::TwoWayEmbeddedManyToManyRelation) { return; } @@ -136,10 +124,7 @@ pub(crate) fn referencing_with_an_array_field_of_correct_type( relation: TwoWayEmbeddedManyToManyRelationWalker<'_>, ctx: &mut Context<'_>, ) { - if !ctx - .connector - .has_capability(ConnectorCapability::TwoWayEmbeddedManyToManyRelation) - { + if !ctx.has_capability(ConnectorCapability::TwoWayEmbeddedManyToManyRelation) { return; } @@ -183,10 +168,7 @@ pub(crate) fn validate_no_referential_actions( relation: TwoWayEmbeddedManyToManyRelationWalker<'_>, ctx: &mut Context<'_>, ) { - if !ctx - .connector - .has_capability(ConnectorCapability::TwoWayEmbeddedManyToManyRelation) - { + if !ctx.has_capability(ConnectorCapability::TwoWayEmbeddedManyToManyRelation) { return; } diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/relations/many_to_many/implicit.rs b/psl/psl-core/src/validate/validation_pipeline/validations/relations/many_to_many/implicit.rs index d2fa7609876d..dd60ad6c8044 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/relations/many_to_many/implicit.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/relations/many_to_many/implicit.rs @@ -60,10 +60,7 @@ pub(crate) fn validate_no_referential_actions(relation: ImplicitManyToManyRelati /// We do not support implicit m:n relations on MongoDb. pub(crate) fn supports_implicit_relations(relation: ImplicitManyToManyRelationWalker<'_>, ctx: &mut Context<'_>) { - if ctx - .connector - .has_capability(ConnectorCapability::ImplicitManyToManyRelation) - { + if ctx.has_capability(ConnectorCapability::ImplicitManyToManyRelation) { return; } diff --git a/psl/psl/Cargo.toml b/psl/psl/Cargo.toml index 148a4c3da5e7..e230ffe8c443 100644 --- a/psl/psl/Cargo.toml +++ b/psl/psl/Cargo.toml @@ -3,6 +3,15 @@ name = "psl" version = "0.1.0" edition = "2021" +[features] +postgresql = ["psl-core/postgresql"] +sqlite = ["psl-core/sqlite"] +mysql = ["psl-core/mysql"] +cockroachdb = ["psl-core/cockroachdb"] +mssql = ["psl-core/mssql"] +mongodb = ["psl-core/mongodb"] +all = ["postgresql", "sqlite", "mysql", "cockroachdb", "mssql", "mongodb"] + [dependencies] psl-core = { path = "../psl-core" } diff --git a/psl/psl/src/lib.rs b/psl/psl/src/lib.rs index a00400125c69..9d7fb8f26168 100644 --- a/psl/psl/src/lib.rs +++ b/psl/psl/src/lib.rs @@ -3,12 +3,14 @@ pub use psl_core::builtin_connectors; pub use psl_core::{ + builtin_connectors::{can_have_capability, can_support_relation_load_strategy, has_capability}, datamodel_connector, diagnostics::{self, Diagnostics}, is_reserved_type_name, mcf::config_to_mcf_json_value as get_config, mcf::{generators_to_json, render_sources_to_json}, // for tests parser_database::{self, SourceFile}, + reachable_only_with_capability, reformat, schema_ast, set_config_dir, diff --git a/quaint/Cargo.toml b/quaint/Cargo.toml index 018de045bc4c..eebd14e62775 100644 --- a/quaint/Cargo.toml +++ b/quaint/Cargo.toml @@ -23,7 +23,6 @@ resolver = "2" features = ["docs", "all"] [features] -default = ["mysql", "postgresql", "mssql", "sqlite"] docs = [] # Expose the underlying database drivers when a connector is enabled. This is a # way to access database-specific methods when you need extra control. diff --git a/quaint/src/ast/compare.rs b/quaint/src/ast/compare.rs index 9c7548303466..ef465cc62952 100644 --- a/quaint/src/ast/compare.rs +++ b/quaint/src/ast/compare.rs @@ -37,19 +37,14 @@ pub enum Compare<'a> { /// without visitor transformation in between. Raw(Box>, Cow<'a, str>, Box>), /// All json related comparators - #[cfg(any(feature = "postgresql", feature = "mysql"))] JsonCompare(JsonCompare<'a>), /// `left` @@ to_tsquery(`value`) - #[cfg(feature = "postgresql")] Matches(Box>, Cow<'a, str>), /// (NOT `left` @@ to_tsquery(`value`)) - #[cfg(feature = "postgresql")] NotMatches(Box>, Cow<'a, str>), /// ANY (`left`) - #[cfg(feature = "postgresql")] Any(Box>), /// ALL (`left`) - #[cfg(feature = "postgresql")] All(Box>), } @@ -558,7 +553,6 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_contains(self, item: T) -> Compare<'a> where T: Into>; @@ -578,7 +572,6 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_contains(self, item: T) -> Compare<'a> where T: Into>; @@ -608,7 +601,6 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_begins_with(self, item: T) -> Compare<'a> where T: Into>; @@ -638,7 +630,6 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_begins_with(self, item: T) -> Compare<'a> where T: Into>; @@ -666,7 +657,6 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_ends_into(self, item: T) -> Compare<'a> where T: Into>; @@ -694,7 +684,6 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_ends_into(self, item: T) -> Compare<'a> where T: Into>; @@ -713,7 +702,6 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_type_equals(self, json_type: T) -> Compare<'a> where T: Into>; @@ -732,7 +720,6 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_type_not_equals(self, json_type: T) -> Compare<'a> where T: Into>; @@ -756,7 +743,6 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(feature = "postgresql")] fn matches(self, query: T) -> Compare<'a> where T: Into>; @@ -780,7 +766,6 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(feature = "postgresql")] fn not_matches(self, query: T) -> Compare<'a> where T: Into>; @@ -796,7 +781,6 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(feature = "postgresql")] fn any(self) -> Compare<'a>; /// Matches all elem of a list of values. @@ -810,7 +794,6 @@ pub trait Comparable<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(feature = "postgresql")] fn all(self) -> Compare<'a>; /// Compares two expressions with a custom operator. @@ -977,7 +960,6 @@ where left.compare_raw(raw_comparator.into(), right) } - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_contains(self, item: T) -> Compare<'a> where T: Into>, @@ -988,7 +970,6 @@ where val.json_array_contains(item) } - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_contains(self, item: T) -> Compare<'a> where T: Into>, @@ -999,7 +980,6 @@ where val.json_array_not_contains(item) } - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_begins_with(self, item: T) -> Compare<'a> where T: Into>, @@ -1010,7 +990,6 @@ where val.json_array_begins_with(item) } - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_begins_with(self, item: T) -> Compare<'a> where T: Into>, @@ -1021,7 +1000,6 @@ where val.json_array_not_begins_with(item) } - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_ends_into(self, item: T) -> Compare<'a> where T: Into>, @@ -1032,7 +1010,6 @@ where val.json_array_ends_into(item) } - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_ends_into(self, item: T) -> Compare<'a> where T: Into>, @@ -1043,7 +1020,6 @@ where val.json_array_not_ends_into(item) } - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_type_equals(self, json_type: T) -> Compare<'a> where T: Into>, @@ -1054,7 +1030,6 @@ where val.json_type_equals(json_type) } - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_type_not_equals(self, json_type: T) -> Compare<'a> where T: Into>, @@ -1065,7 +1040,6 @@ where val.json_type_not_equals(json_type) } - #[cfg(feature = "postgresql")] fn matches(self, query: T) -> Compare<'a> where T: Into>, @@ -1076,7 +1050,6 @@ where val.matches(query) } - #[cfg(feature = "postgresql")] fn not_matches(self, query: T) -> Compare<'a> where T: Into>, @@ -1087,7 +1060,6 @@ where val.not_matches(query) } - #[cfg(feature = "postgresql")] fn any(self) -> Compare<'a> { let col: Column<'a> = self.into(); let val: Expression<'a> = col.into(); @@ -1095,7 +1067,6 @@ where val.any() } - #[cfg(feature = "postgresql")] fn all(self) -> Compare<'a> { let col: Column<'a> = self.into(); let val: Expression<'a> = col.into(); diff --git a/quaint/src/ast/delete.rs b/quaint/src/ast/delete.rs index e0e7316e8e99..590b82f7930a 100644 --- a/quaint/src/ast/delete.rs +++ b/quaint/src/ast/delete.rs @@ -91,7 +91,6 @@ impl<'a> Delete<'a> { /// assert_eq!("DELETE FROM `users` RETURNING \"id\"", sql); /// # Ok(()) /// # } - #[cfg(any(feature = "postgresql", feature = "mssql", feature = "sqlite"))] pub fn returning(mut self, columns: I) -> Self where K: Into>, diff --git a/quaint/src/ast/expression.rs b/quaint/src/ast/expression.rs index ea4c32a4fb61..1d1a653a2e3b 100644 --- a/quaint/src/ast/expression.rs +++ b/quaint/src/ast/expression.rs @@ -1,4 +1,3 @@ -#[cfg(any(feature = "postgresql", feature = "mysql"))] use super::compare::{JsonCompare, JsonType}; use crate::ast::*; use query::SelectQuery; @@ -43,6 +42,7 @@ impl<'a> Expression<'a> { } } + #[cfg(feature = "mysql")] pub(crate) fn is_json_expr(&self) -> bool { match &self.kind { ExpressionKind::Parameterized(Value { @@ -435,7 +435,6 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::Raw(Box::new(self), raw_comparator.into(), Box::new(right.into())) } - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_contains(self, item: T) -> Compare<'a> where T: Into>, @@ -443,7 +442,6 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::JsonCompare(JsonCompare::ArrayContains(Box::new(self), Box::new(item.into()))) } - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_contains(self, item: T) -> Compare<'a> where T: Into>, @@ -451,7 +449,6 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::JsonCompare(JsonCompare::ArrayNotContains(Box::new(self), Box::new(item.into()))) } - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_begins_with(self, item: T) -> Compare<'a> where T: Into>, @@ -461,7 +458,6 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::Equals(Box::new(array_starts_with), Box::new(item.into())) } - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_begins_with(self, item: T) -> Compare<'a> where T: Into>, @@ -471,7 +467,6 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::NotEquals(Box::new(array_starts_with), Box::new(item.into())) } - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_ends_into(self, item: T) -> Compare<'a> where T: Into>, @@ -481,7 +476,6 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::Equals(Box::new(array_ends_into), Box::new(item.into())) } - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_ends_into(self, item: T) -> Compare<'a> where T: Into>, @@ -491,7 +485,6 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::NotEquals(Box::new(array_ends_into), Box::new(item.into())) } - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_type_equals(self, json_type: T) -> Compare<'a> where T: Into>, @@ -499,7 +492,6 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::JsonCompare(JsonCompare::TypeEquals(Box::new(self), json_type.into())) } - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_type_not_equals(self, json_type: T) -> Compare<'a> where T: Into>, @@ -507,7 +499,6 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::JsonCompare(JsonCompare::TypeNotEquals(Box::new(self), json_type.into())) } - #[cfg(feature = "postgresql")] fn matches(self, query: T) -> Compare<'a> where T: Into>, @@ -515,7 +506,6 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::Matches(Box::new(self), query.into()) } - #[cfg(feature = "postgresql")] fn not_matches(self, query: T) -> Compare<'a> where T: Into>, @@ -523,12 +513,10 @@ impl<'a> Comparable<'a> for Expression<'a> { Compare::NotMatches(Box::new(self), query.into()) } - #[cfg(feature = "postgresql")] fn any(self) -> Compare<'a> { Compare::Any(Box::new(self)) } - #[cfg(feature = "postgresql")] fn all(self) -> Compare<'a> { Compare::All(Box::new(self)) } diff --git a/quaint/src/ast/function.rs b/quaint/src/ast/function.rs index 246ea762b34e..82a86f773d2d 100644 --- a/quaint/src/ast/function.rs +++ b/quaint/src/ast/function.rs @@ -5,24 +5,18 @@ mod concat; mod count; mod json_array_agg; mod json_build_obj; -#[cfg(any(feature = "postgresql", feature = "mysql"))] mod json_extract; -#[cfg(any(feature = "postgresql", feature = "mysql"))] mod json_extract_array; -#[cfg(any(feature = "postgresql", feature = "mysql"))] mod json_unquote; mod lower; mod maximum; mod minimum; mod row_number; -#[cfg(feature = "postgresql")] mod row_to_json; -#[cfg(any(feature = "postgresql", feature = "mysql"))] mod search; mod sum; mod upper; -#[cfg(feature = "mysql")] mod uuid; pub use aggregate_to_string::*; @@ -32,24 +26,18 @@ pub use concat::*; pub use count::*; pub use json_array_agg::*; pub use json_build_obj::*; -#[cfg(any(feature = "postgresql", feature = "mysql"))] pub use json_extract::*; -#[cfg(any(feature = "postgresql", feature = "mysql"))] pub(crate) use json_extract_array::*; -#[cfg(any(feature = "postgresql", feature = "mysql"))] pub use json_unquote::*; pub use lower::*; pub use maximum::*; pub use minimum::*; pub use row_number::*; -#[cfg(feature = "postgresql")] pub use row_to_json::*; -#[cfg(any(feature = "mysql", feature = "postgresql"))] pub use search::*; pub use sum::*; pub use upper::*; -#[cfg(feature = "mysql")] pub use self::uuid::*; use super::{Aliasable, Expression}; @@ -64,24 +52,19 @@ pub struct Function<'a> { impl<'a> Function<'a> { pub fn returns_json(&self) -> bool { - match self.typ_ { - #[cfg(feature = "postgresql")] - FunctionType::RowToJson(_) => true, - #[cfg(feature = "mysql")] - FunctionType::JsonExtract(_) => true, - #[cfg(any(feature = "postgresql", feature = "mysql"))] - FunctionType::JsonExtractLastArrayElem(_) => true, - #[cfg(any(feature = "postgresql", feature = "mysql"))] - FunctionType::JsonExtractFirstArrayElem(_) => true, - _ => false, - } + matches!( + self.typ_, + FunctionType::RowToJson(_) + | FunctionType::JsonExtract(_) + | FunctionType::JsonExtractLastArrayElem(_) + | FunctionType::JsonExtractFirstArrayElem(_) + ) } } /// A database function type #[derive(Debug, Clone, PartialEq)] pub(crate) enum FunctionType<'a> { - #[cfg(feature = "postgresql")] RowToJson(RowToJson<'a>), RowNumber(RowNumber<'a>), Count(Count<'a>), @@ -94,27 +77,16 @@ pub(crate) enum FunctionType<'a> { Maximum(Maximum<'a>), Coalesce(Coalesce<'a>), Concat(Concat<'a>), - #[cfg(any(feature = "postgresql", feature = "mysql"))] JsonExtract(JsonExtract<'a>), - #[cfg(any(feature = "postgresql", feature = "mysql"))] JsonExtractLastArrayElem(JsonExtractLastArrayElem<'a>), - #[cfg(any(feature = "postgresql", feature = "mysql"))] JsonExtractFirstArrayElem(JsonExtractFirstArrayElem<'a>), - #[cfg(any(feature = "postgresql", feature = "mysql"))] JsonUnquote(JsonUnquote<'a>), - #[cfg(feature = "postgresql")] JsonArrayAgg(JsonArrayAgg<'a>), - #[cfg(feature = "postgresql")] JsonBuildObject(JsonBuildObject<'a>), - #[cfg(any(feature = "postgresql", feature = "mysql"))] TextSearch(TextSearch<'a>), - #[cfg(any(feature = "postgresql", feature = "mysql"))] TextSearchRelevance(TextSearchRelevance<'a>), - #[cfg(feature = "mysql")] UuidToBin, - #[cfg(feature = "mysql")] UuidToBinSwapped, - #[cfg(feature = "mysql")] Uuid, } @@ -130,27 +102,24 @@ impl<'a> Aliasable<'a> for Function<'a> { } } -#[cfg(feature = "postgresql")] function!(RowToJson); -#[cfg(any(feature = "postgresql", feature = "mysql"))] function!(JsonExtract); -#[cfg(any(feature = "postgresql", feature = "mysql"))] function!(JsonExtractLastArrayElem); -#[cfg(any(feature = "postgresql", feature = "mysql"))] function!(JsonExtractFirstArrayElem); -#[cfg(any(feature = "postgresql", feature = "mysql"))] function!(JsonUnquote); -#[cfg(any(feature = "postgresql", feature = "mysql"))] function!(TextSearch); -#[cfg(any(feature = "postgresql", feature = "mysql"))] function!(TextSearchRelevance); +function!(JsonArrayAgg); + +function!(JsonBuildObject); + function!( RowNumber, Count, @@ -162,7 +131,5 @@ function!( Minimum, Maximum, Coalesce, - Concat, - JsonArrayAgg, - JsonBuildObject + Concat ); diff --git a/quaint/src/ast/function/json_extract.rs b/quaint/src/ast/function/json_extract.rs index f45295026c74..11e7d20d507f 100644 --- a/quaint/src/ast/function/json_extract.rs +++ b/quaint/src/ast/function/json_extract.rs @@ -11,14 +11,11 @@ pub struct JsonExtract<'a> { #[derive(Debug, Clone, PartialEq, Eq)] pub enum JsonPath<'a> { - #[cfg(feature = "mysql")] String(Cow<'a, str>), - #[cfg(feature = "postgresql")] Array(Vec>), } impl<'a> JsonPath<'a> { - #[cfg(feature = "mysql")] pub fn string(string: S) -> JsonPath<'a> where S: Into>, @@ -26,7 +23,6 @@ impl<'a> JsonPath<'a> { JsonPath::String(string.into()) } - #[cfg(feature = "postgresql")] pub fn array(array: A) -> JsonPath<'a> where V: Into>, diff --git a/quaint/src/ast/function/row_to_json.rs b/quaint/src/ast/function/row_to_json.rs index 9ffeb6653484..f670a622f3bc 100644 --- a/quaint/src/ast/function/row_to_json.rs +++ b/quaint/src/ast/function/row_to_json.rs @@ -2,7 +2,6 @@ use super::Function; use crate::ast::Table; #[derive(Debug, Clone, PartialEq)] -#[cfg(feature = "postgresql")] /// A representation of the `ROW_TO_JSON` function in the database. /// Only for `Postgresql` pub struct RowToJson<'a> { @@ -38,7 +37,6 @@ pub struct RowToJson<'a> { /// # Ok(()) /// # } /// ``` -#[cfg(feature = "postgresql")] pub fn row_to_json<'a, T>(expr: T, pretty_print: bool) -> Function<'a> where T: Into>, diff --git a/quaint/src/ast/function/search.rs b/quaint/src/ast/function/search.rs index 92ba6978e160..2626f09e40c7 100644 --- a/quaint/src/ast/function/search.rs +++ b/quaint/src/ast/function/search.rs @@ -25,7 +25,6 @@ pub struct TextSearch<'a> { /// # Ok(()) /// # } /// ``` -#[cfg(any(feature = "postgresql", feature = "mysql"))] pub fn text_search<'a, T: Clone>(exprs: &[T]) -> super::Function<'a> where T: Into>, @@ -61,7 +60,6 @@ pub struct TextSearchRelevance<'a> { /// # Ok(()) /// # } /// ``` -#[cfg(any(feature = "postgresql", feature = "mysql"))] pub fn text_search_relevance<'a, E: Clone, Q>(exprs: &[E], query: Q) -> super::Function<'a> where E: Into>, diff --git a/quaint/src/ast/function/uuid.rs b/quaint/src/ast/function/uuid.rs index f1528eae7523..610b0abec5ad 100644 --- a/quaint/src/ast/function/uuid.rs +++ b/quaint/src/ast/function/uuid.rs @@ -13,7 +13,6 @@ use crate::ast::Expression; /// # Ok(()) /// # } /// ``` -#[cfg(feature = "mysql")] pub fn uuid_to_bin() -> Expression<'static> { let func = Function { typ_: FunctionType::UuidToBin, @@ -56,7 +55,6 @@ pub fn uuid_to_bin_swapped() -> Expression<'static> { /// # Ok(()) /// # } /// ``` -#[cfg(feature = "mysql")] pub fn native_uuid() -> Expression<'static> { let func = Function { typ_: FunctionType::Uuid, diff --git a/quaint/src/ast/insert.rs b/quaint/src/ast/insert.rs index 6f3fe80b2de2..12d65ce29dcc 100644 --- a/quaint/src/ast/insert.rs +++ b/quaint/src/ast/insert.rs @@ -258,7 +258,6 @@ impl<'a> Insert<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(any(feature = "postgresql", feature = "mssql", feature = "sqlite"))] pub fn returning(mut self, columns: I) -> Self where K: Into>, diff --git a/quaint/src/ast/row.rs b/quaint/src/ast/row.rs index e556cee966af..034eca25c273 100644 --- a/quaint/src/ast/row.rs +++ b/quaint/src/ast/row.rs @@ -1,4 +1,3 @@ -#[cfg(any(feature = "postgresql", feature = "mysql"))] use super::compare::JsonType; use crate::ast::{Comparable, Compare, Expression}; use std::borrow::Cow; @@ -283,7 +282,6 @@ impl<'a> Comparable<'a> for Row<'a> { value.compare_raw(raw_comparator, right) } - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_contains(self, item: T) -> Compare<'a> where T: Into>, @@ -293,7 +291,6 @@ impl<'a> Comparable<'a> for Row<'a> { value.json_array_contains(item) } - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_contains(self, item: T) -> Compare<'a> where T: Into>, @@ -303,7 +300,6 @@ impl<'a> Comparable<'a> for Row<'a> { value.json_array_not_contains(item) } - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_begins_with(self, item: T) -> Compare<'a> where T: Into>, @@ -313,7 +309,6 @@ impl<'a> Comparable<'a> for Row<'a> { value.json_array_begins_with(item) } - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_begins_with(self, item: T) -> Compare<'a> where T: Into>, @@ -323,7 +318,6 @@ impl<'a> Comparable<'a> for Row<'a> { value.json_array_not_begins_with(item) } - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_ends_into(self, item: T) -> Compare<'a> where T: Into>, @@ -333,7 +327,6 @@ impl<'a> Comparable<'a> for Row<'a> { value.json_array_ends_into(item) } - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_array_not_ends_into(self, item: T) -> Compare<'a> where T: Into>, @@ -343,7 +336,6 @@ impl<'a> Comparable<'a> for Row<'a> { value.json_array_not_ends_into(item) } - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_type_equals(self, json_type: T) -> Compare<'a> where T: Into>, @@ -353,7 +345,6 @@ impl<'a> Comparable<'a> for Row<'a> { value.json_type_equals(json_type) } - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn json_type_not_equals(self, json_type: T) -> Compare<'a> where T: Into>, @@ -363,7 +354,6 @@ impl<'a> Comparable<'a> for Row<'a> { value.json_type_not_equals(json_type) } - #[cfg(feature = "postgresql")] fn matches(self, query: T) -> Compare<'a> where T: Into>, @@ -373,7 +363,6 @@ impl<'a> Comparable<'a> for Row<'a> { value.matches(query) } - #[cfg(feature = "postgresql")] fn not_matches(self, query: T) -> Compare<'a> where T: Into>, @@ -383,14 +372,12 @@ impl<'a> Comparable<'a> for Row<'a> { value.not_matches(query) } - #[cfg(feature = "postgresql")] fn any(self) -> Compare<'a> { let value: Expression<'a> = self.into(); value.any() } - #[cfg(feature = "postgresql")] fn all(self) -> Compare<'a> { let value: Expression<'a> = self.into(); diff --git a/quaint/src/ast/update.rs b/quaint/src/ast/update.rs index 751655bd82e1..a690f070ea38 100644 --- a/quaint/src/ast/update.rs +++ b/quaint/src/ast/update.rs @@ -149,7 +149,7 @@ impl<'a> Update<'a> { /// # Ok(()) /// # } /// ``` - #[cfg(any(feature = "postgresql", feature = "sqlite"))] + #[cfg(any(feature = "postgresql", feature = "sqlite", feature = "mysql"))] pub fn returning(mut self, columns: I) -> Self where K: Into>, diff --git a/quaint/src/visitor.rs b/quaint/src/visitor.rs index 58baa09a791f..4561479289e5 100644 --- a/quaint/src/visitor.rs +++ b/quaint/src/visitor.rs @@ -121,37 +121,26 @@ pub trait Visitor<'a> { /// Visit a non-parameterized value. fn visit_raw_value(&mut self, value: Value<'a>) -> Result; - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract(&mut self, json_extract: JsonExtract<'a>) -> Result; - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract_last_array_item(&mut self, extract: JsonExtractLastArrayElem<'a>) -> Result; - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract_first_array_item(&mut self, extract: JsonExtractFirstArrayElem<'a>) -> Result; - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_array_contains(&mut self, left: Expression<'a>, right: Expression<'a>, not: bool) -> Result; - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_type_equals(&mut self, left: Expression<'a>, right: JsonType<'a>, not: bool) -> Result; - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_unquote(&mut self, json_unquote: JsonUnquote<'a>) -> Result; - #[cfg(feature = "postgresql")] fn visit_json_array_agg(&mut self, array_agg: JsonArrayAgg<'a>) -> Result; - #[cfg(feature = "postgresql")] fn visit_json_build_object(&mut self, build_obj: JsonBuildObject<'a>) -> Result; - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_text_search(&mut self, text_search: TextSearch<'a>) -> Result; - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_matches(&mut self, left: Expression<'a>, right: std::borrow::Cow<'a, str>, not: bool) -> Result; - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_text_search_relevance(&mut self, text_search_relevance: TextSearchRelevance<'a>) -> Result; fn visit_parameterized_enum(&mut self, variant: EnumVariant<'a>, name: Option>) -> Result { @@ -975,23 +964,18 @@ pub trait Visitor<'a> { self.write(" ")?; self.visit_expression(*right) } - #[cfg(any(feature = "postgresql", feature = "mysql"))] Compare::JsonCompare(json_compare) => match json_compare { JsonCompare::ArrayContains(left, right) => self.visit_json_array_contains(*left, *right, false), JsonCompare::ArrayNotContains(left, right) => self.visit_json_array_contains(*left, *right, true), JsonCompare::TypeEquals(left, json_type) => self.visit_json_type_equals(*left, json_type, false), JsonCompare::TypeNotEquals(left, json_type) => self.visit_json_type_equals(*left, json_type, true), }, - #[cfg(feature = "postgresql")] Compare::Matches(left, right) => self.visit_matches(*left, right, false), - #[cfg(feature = "postgresql")] Compare::NotMatches(left, right) => self.visit_matches(*left, right, true), - #[cfg(feature = "postgresql")] Compare::Any(left) => { self.write("ANY")?; self.surround_with("(", ")", |s| s.visit_expression(*left)) } - #[cfg(feature = "postgresql")] Compare::All(left) => { self.write("ALL")?; self.surround_with("(", ")", |s| s.visit_expression(*left)) @@ -1071,7 +1055,6 @@ pub trait Visitor<'a> { FunctionType::AggregateToString(agg) => { self.visit_aggregate_to_string(agg.value.as_ref().clone())?; } - #[cfg(feature = "postgresql")] FunctionType::RowToJson(row_to_json) => { self.write("ROW_TO_JSON")?; self.surround_with("(", ")", |ref mut s| s.visit_table(row_to_json.expr, false))? @@ -1101,48 +1084,37 @@ pub trait Visitor<'a> { self.write("COALESCE")?; self.surround_with("(", ")", |s| s.visit_columns(coalesce.exprs))?; } - #[cfg(any(feature = "postgresql", feature = "mysql"))] FunctionType::JsonExtract(json_extract) => { self.visit_json_extract(json_extract)?; } - #[cfg(any(feature = "postgresql", feature = "mysql"))] FunctionType::JsonExtractFirstArrayElem(extract) => { self.visit_json_extract_first_array_item(extract)?; } - #[cfg(any(feature = "postgresql", feature = "mysql"))] FunctionType::JsonExtractLastArrayElem(extract) => { self.visit_json_extract_last_array_item(extract)?; } - #[cfg(any(feature = "postgresql", feature = "mysql"))] FunctionType::JsonUnquote(unquote) => { self.visit_json_unquote(unquote)?; } - #[cfg(any(feature = "postgresql", feature = "mysql"))] FunctionType::TextSearch(text_search) => { self.visit_text_search(text_search)?; } - #[cfg(any(feature = "postgresql", feature = "mysql"))] FunctionType::TextSearchRelevance(text_search_relevance) => { self.visit_text_search_relevance(text_search_relevance)?; } - #[cfg(feature = "mysql")] FunctionType::UuidToBin => { self.write("uuid_to_bin(uuid())")?; } - #[cfg(feature = "mysql")] FunctionType::UuidToBinSwapped => { self.write("uuid_to_bin(uuid(), 1)")?; } - #[cfg(feature = "mysql")] FunctionType::Uuid => self.write("uuid()")?, FunctionType::Concat(concat) => { self.visit_concat(concat)?; } - #[cfg(feature = "postgresql")] FunctionType::JsonArrayAgg(array_agg) => { self.visit_json_array_agg(array_agg)?; } - #[cfg(feature = "postgresql")] FunctionType::JsonBuildObject(build_obj) => { self.visit_json_build_object(build_obj)?; } diff --git a/quaint/src/visitor/mysql.rs b/quaint/src/visitor/mysql.rs index a406000cd7c0..54029e1b6e98 100644 --- a/quaint/src/visitor/mysql.rs +++ b/quaint/src/visitor/mysql.rs @@ -418,7 +418,6 @@ impl<'a> Visitor<'a> for Mysql<'a> { self.write(", ")?; match json_extract.path.clone() { - #[cfg(feature = "postgresql")] JsonPath::Array(_) => panic!("JSON path array notation is not supported for MySQL"), JsonPath::String(path) => self.visit_parameterized(Value::text(path))?, } diff --git a/quaint/src/visitor/postgres.rs b/quaint/src/visitor/postgres.rs index 8ab679f42701..b80162e7b5c5 100644 --- a/quaint/src/visitor/postgres.rs +++ b/quaint/src/visitor/postgres.rs @@ -409,7 +409,6 @@ impl<'a> Visitor<'a> for Postgres<'a> { #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract(&mut self, json_extract: JsonExtract<'a>) -> visitor::Result { match json_extract.path { - #[cfg(feature = "mysql")] JsonPath::String(_) => panic!("JSON path string notation is not supported for Postgres"), JsonPath::Array(json_path) => { self.write("(")?; diff --git a/quaint/src/visitor/sqlite.rs b/quaint/src/visitor/sqlite.rs index d6289078393a..e3d1b14dba47 100644 --- a/quaint/src/visitor/sqlite.rs +++ b/quaint/src/visitor/sqlite.rs @@ -282,12 +282,10 @@ impl<'a> Visitor<'a> for Sqlite<'a> { }) } - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract(&mut self, _json_extract: JsonExtract<'a>) -> visitor::Result { unimplemented!("JSON filtering is not yet supported on SQLite") } - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_array_contains( &mut self, _left: Expression<'a>, @@ -297,17 +295,14 @@ impl<'a> Visitor<'a> for Sqlite<'a> { unimplemented!("JSON filtering is not yet supported on SQLite") } - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_type_equals(&mut self, _left: Expression<'a>, _json_type: JsonType, _not: bool) -> visitor::Result { unimplemented!("JSON_TYPE is not yet supported on SQLite") } - #[cfg(feature = "postgresql")] fn visit_text_search(&mut self, _text_search: crate::prelude::TextSearch<'a>) -> visitor::Result { unimplemented!("Full-text search is not yet supported on SQLite") } - #[cfg(feature = "postgresql")] fn visit_matches( &mut self, _left: Expression<'a>, @@ -317,32 +312,26 @@ impl<'a> Visitor<'a> for Sqlite<'a> { unimplemented!("Full-text search is not yet supported on SQLite") } - #[cfg(feature = "postgresql")] fn visit_text_search_relevance(&mut self, _text_search_relevance: TextSearchRelevance<'a>) -> visitor::Result { unimplemented!("Full-text search is not yet supported on SQLite") } - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract_last_array_item(&mut self, _extract: JsonExtractLastArrayElem<'a>) -> visitor::Result { unimplemented!("JSON filtering is not yet supported on SQLite") } - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_extract_first_array_item(&mut self, _extract: JsonExtractFirstArrayElem<'a>) -> visitor::Result { unimplemented!("JSON filtering is not yet supported on SQLite") } - #[cfg(any(feature = "postgresql", feature = "mysql"))] fn visit_json_unquote(&mut self, _json_unquote: JsonUnquote<'a>) -> visitor::Result { unimplemented!("JSON filtering is not yet supported on SQLite") } - #[cfg(feature = "postgresql")] fn visit_json_array_agg(&mut self, _array_agg: JsonArrayAgg<'a>) -> visitor::Result { unimplemented!("JSON_AGG is not yet supported on SQLite") } - #[cfg(feature = "postgresql")] fn visit_json_build_object(&mut self, _build_obj: JsonBuildObject<'a>) -> visitor::Result { unimplemented!("JSON_BUILD_OBJECT is not yet supported on SQLite") } diff --git a/query-engine/connectors/sql-query-connector/Cargo.toml b/query-engine/connectors/sql-query-connector/Cargo.toml index 7a4baeefe678..4c55cff55420 100644 --- a/query-engine/connectors/sql-query-connector/Cargo.toml +++ b/query-engine/connectors/sql-query-connector/Cargo.toml @@ -4,12 +4,25 @@ name = "sql-query-connector" version = "0.1.0" [features] -default = ["postgresql", "sqlite", "mysql"] +postgresql = ["relation_joins", "quaint/postgresql", "psl/postgresql"] +mysql = ["relation_joins", "quaint/mysql", "psl/mysql"] +sqlite = ["quaint/sqlite", "psl/sqlite"] +mssql = ["quaint/mssql"] +cockroachdb = ["relation_joins", "quaint/postgresql", "psl/cockroachdb"] vendored-openssl = ["quaint/vendored-openssl"] -postgresql = ["quaint/postgresql"] -sqlite = ["quaint/sqlite"] -mysql = ["quaint/mysql"] - +native_all = [ + "sqlite", + "mysql", + "postgresql", + "mssql", + "cockroachdb", + "quaint/native", + "quaint/pooled", +] +# TODO: At the moment of writing (rustc 1.77.0), can_have_capability from psl does not eliminate joins +# code from bundle for some reason, so we are doing it explicitly. Check with a newer version of compiler - if elimination +# happens successfully, we don't need this feature anymore +relation_joins = [] # Enable Driver Adapters driver-adapters = [] @@ -31,12 +44,8 @@ uuid.workspace = true opentelemetry = { version = "0.17", features = ["tokio"] } tracing-opentelemetry = "0.17.3" cuid = { git = "https://github.com/prisma/cuid-rust", branch = "wasm32-support" } - -[target.'cfg(not(target_arch = "wasm32"))'.dependencies] quaint.workspace = true -[target.'cfg(target_arch = "wasm32")'.dependencies] -quaint = { path = "../../../quaint", default-features = false } [dependencies.connector-interface] package = "query-connector" diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/mod.rs b/query-engine/connectors/sql-query-connector/src/database/operations/mod.rs index b4eadcceb221..ce7140776356 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/mod.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/mod.rs @@ -1,3 +1,4 @@ +#[cfg(feature = "relation_joins")] pub mod coerce; pub mod read; pub(crate) mod update; diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/read.rs b/query-engine/connectors/sql-query-connector/src/database/operations/read.rs index 24b576c936c2..5587e7d743fd 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/read.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/read.rs @@ -1,4 +1,3 @@ -use super::coerce::{coerce_record_with_json_relation, IndexedSelection}; use crate::{ column_metadata, model_extensions::*, @@ -21,18 +20,23 @@ pub(crate) async fn get_single_record( ctx: &Context<'_>, ) -> crate::Result> { match relation_load_strategy { + #[cfg(feature = "relation_joins")] RelationLoadStrategy::Join => get_single_record_joins(conn, model, filter, selected_fields, ctx).await, + #[cfg(not(feature = "relation_joins"))] + RelationLoadStrategy::Join => unreachable!(), RelationLoadStrategy::Query => get_single_record_wo_joins(conn, model, filter, selected_fields, ctx).await, } } -pub(crate) async fn get_single_record_joins( +#[cfg(feature = "relation_joins")] +async fn get_single_record_joins( conn: &dyn Queryable, model: &Model, filter: &Filter, selected_fields: &FieldSelection, ctx: &Context<'_>, ) -> crate::Result> { + use super::coerce::coerce_record_with_json_relation; let selected_fields = selected_fields.to_virtuals_last(); let field_names: Vec<_> = selected_fields.prisma_names_grouping_virtuals().collect(); let idents = selected_fields.type_identifiers_with_arities_grouping_virtuals(); @@ -58,7 +62,7 @@ pub(crate) async fn get_single_record_joins( Ok(record.map(|record| SingleRecord { record, field_names })) } -pub(crate) async fn get_single_record_wo_joins( +async fn get_single_record_wo_joins( conn: &dyn Queryable, model: &Model, filter: &Filter, @@ -117,20 +121,25 @@ pub(crate) async fn get_many_records( ctx: &Context<'_>, ) -> crate::Result { match relation_load_strategy { + #[cfg(feature = "relation_joins")] RelationLoadStrategy::Join => get_many_records_joins(conn, model, query_arguments, selected_fields, ctx).await, + #[cfg(not(feature = "relation_joins"))] + RelationLoadStrategy::Join => unreachable!(), RelationLoadStrategy::Query => { get_many_records_wo_joins(conn, model, query_arguments, selected_fields, ctx).await } } } -pub(crate) async fn get_many_records_joins( +#[cfg(feature = "relation_joins")] +async fn get_many_records_joins( conn: &dyn Queryable, _model: &Model, query_arguments: QueryArguments, selected_fields: &FieldSelection, ctx: &Context<'_>, ) -> crate::Result { + use super::coerce::coerce_record_with_json_relation; let selected_fields = selected_fields.to_virtuals_last(); let field_names: Vec<_> = selected_fields.prisma_names_grouping_virtuals().collect(); let idents = selected_fields.type_identifiers_with_arities_grouping_virtuals(); @@ -176,7 +185,7 @@ pub(crate) async fn get_many_records_joins( Ok(records) } -pub(crate) async fn get_many_records_wo_joins( +async fn get_many_records_wo_joins( conn: &dyn Queryable, model: &Model, mut query_arguments: QueryArguments, @@ -409,11 +418,13 @@ async fn group_by_aggregate( /// Find the indexes of the relation records and the virtual selection objects to traverse a set of /// records faster when coercing JSON values. +#[cfg(feature = "relation_joins")] fn get_selection_indexes<'a>( relations: Vec<&'a RelationSelection>, virtuals: Vec<&'a VirtualSelection>, field_names: &'a [String], -) -> Vec<(usize, IndexedSelection<'a>)> { +) -> Vec<(usize, super::coerce::IndexedSelection<'a>)> { + use super::coerce::IndexedSelection; field_names .iter() .enumerate() diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/write.rs b/query-engine/connectors/sql-query-connector/src/database/operations/write.rs index dd27c35fb087..edea91d5aef7 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/write.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/write.rs @@ -95,7 +95,7 @@ pub(crate) async fn create_record( ) -> crate::Result { let id_field: FieldSelection = model.primary_identifier(); - let returned_id = if *sql_family == SqlFamily::Mysql { + let returned_id = if sql_family.is_mysql() { generate_id(conn, &id_field, &args, ctx) .await? .or_else(|| args.as_selection_result(ModelProjection::from(id_field))) @@ -104,7 +104,7 @@ pub(crate) async fn create_record( }; let args = match returned_id { - Some(ref pk) if *sql_family == SqlFamily::Mysql => { + Some(ref pk) if sql_family.is_mysql() => { for (field, value) in pk.pairs.iter() { let field = DatasourceFieldName(field.db_name().into()); let value = WriteOperation::scalar_set(value.clone()); diff --git a/query-engine/connectors/sql-query-connector/src/filter/alias.rs b/query-engine/connectors/sql-query-connector/src/filter/alias.rs index af3ad932748a..10fc31080aae 100644 --- a/query-engine/connectors/sql-query-connector/src/filter/alias.rs +++ b/query-engine/connectors/sql-query-connector/src/filter/alias.rs @@ -50,6 +50,7 @@ impl Alias { } } + #[cfg(feature = "relation_joins")] pub fn to_table_string(&self) -> String { self.to_string(Some(AliasMode::Table)) } diff --git a/query-engine/connectors/sql-query-connector/src/filter/visitor.rs b/query-engine/connectors/sql-query-connector/src/filter/visitor.rs index b27ab539e604..f067cf94f8c3 100644 --- a/query-engine/connectors/sql-query-connector/src/filter/visitor.rs +++ b/query-engine/connectors/sql-query-connector/src/filter/visitor.rs @@ -3,6 +3,7 @@ use crate::join_utils::{compute_one2m_join, AliasedJoin}; use crate::{model_extensions::*, Context}; use psl::datamodel_connector::ConnectorCapability; +use psl::reachable_only_with_capability; use quaint::ast::concat; use quaint::ast::*; use query_structure::{filter::*, prelude::*}; @@ -68,6 +69,7 @@ impl FilterVisitor { self.parent_alias } + #[cfg(feature = "relation_joins")] pub fn set_parent_alias_opt(mut self, alias: Option) -> Self { self.parent_alias = alias; self @@ -353,6 +355,7 @@ impl FilterVisitorExt for FilterVisitor { fn visit_scalar_filter(&mut self, filter: ScalarFilter, ctx: &Context<'_>) -> ConditionTree<'static> { match filter.condition { ScalarCondition::Search(_, _) | ScalarCondition::NotSearch(_, _) => { + reachable_only_with_capability!(ConnectorCapability::FullTextSearch); let mut projections = match filter.condition.clone() { ScalarCondition::Search(_, proj) => proj, ScalarCondition::NotSearch(_, proj) => proj, @@ -601,6 +604,8 @@ impl FilterVisitorExt for FilterVisitor { } fn visit_scalar_list_filter(&mut self, filter: ScalarListFilter, ctx: &Context<'_>) -> ConditionTree<'static> { + reachable_only_with_capability!(ConnectorCapability::ScalarLists); + let comparable: Expression = filter.field.aliased_col(self.parent_alias(), ctx).into(); let cond = filter.condition; let field = &filter.field; @@ -705,15 +710,18 @@ fn convert_scalar_filter( ctx: &Context<'_>, ) -> ConditionTree<'static> { match cond { - ScalarCondition::JsonCompare(json_compare) => convert_json_filter( - comparable, - json_compare, - reverse, - fields.first().unwrap(), - mode, - alias, - ctx, - ), + ScalarCondition::JsonCompare(json_compare) => { + reachable_only_with_capability!(ConnectorCapability::JsonFiltering); + convert_json_filter( + comparable, + json_compare, + reverse, + fields.first().unwrap(), + mode, + alias, + ctx, + ) + } _ => match mode { QueryMode::Default => default_scalar_filter(comparable, cond, fields, alias, ctx), QueryMode::Insensitive => { @@ -950,6 +958,7 @@ fn default_scalar_filter( comparable.not_equals(Expression::from(field_ref.aliased_col(alias, ctx)).all()) } ScalarCondition::Search(value, _) => { + reachable_only_with_capability!(ConnectorCapability::FullTextSearch); let query: String = value .into_value() .unwrap() @@ -959,6 +968,7 @@ fn default_scalar_filter( comparable.matches(query) } ScalarCondition::NotSearch(value, _) => { + reachable_only_with_capability!(ConnectorCapability::FullTextSearch); let query: String = value .into_value() .unwrap() @@ -1130,6 +1140,7 @@ fn insensitive_scalar_filter( comparable.compare_raw("NOT ILIKE", Expression::from(field_ref.aliased_col(alias, ctx)).all()) } ScalarCondition::Search(value, _) => { + reachable_only_with_capability!(ConnectorCapability::FullTextSearch); let query: String = value .into_value() .unwrap() @@ -1139,6 +1150,7 @@ fn insensitive_scalar_filter( comparable.matches(query) } ScalarCondition::NotSearch(value, _) => { + reachable_only_with_capability!(ConnectorCapability::FullTextSearch); let query: String = value .into_value() .unwrap() diff --git a/query-engine/connectors/sql-query-connector/src/ordering.rs b/query-engine/connectors/sql-query-connector/src/ordering.rs index aedb7a75fd99..a5c04097b9e3 100644 --- a/query-engine/connectors/sql-query-connector/src/ordering.rs +++ b/query-engine/connectors/sql-query-connector/src/ordering.rs @@ -1,5 +1,6 @@ use crate::{join_utils::*, model_extensions::*, query_arguments_ext::QueryArgumentsExt, Context}; use itertools::Itertools; +use psl::{datamodel_connector::ConnectorCapability, reachable_only_with_capability}; use quaint::ast::*; use query_structure::*; @@ -24,6 +25,7 @@ pub(crate) struct OrderByBuilder { } impl OrderByBuilder { + #[cfg(feature = "relation_joins")] pub(crate) fn with_parent_alias(mut self, alias: Option) -> Self { self.parent_alias = alias; self @@ -44,7 +46,10 @@ impl OrderByBuilder { self.build_order_aggr_scalar(order_by, needs_reversed_order, ctx) } OrderBy::ToManyAggregation(order_by) => self.build_order_aggr_rel(order_by, needs_reversed_order, ctx), - OrderBy::Relevance(order_by) => self.build_order_relevance(order_by, needs_reversed_order, ctx), + OrderBy::Relevance(order_by) => { + reachable_only_with_capability!(ConnectorCapability::FullTextSearch); + self.build_order_relevance(order_by, needs_reversed_order, ctx) + } }) .collect_vec() } diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/mod.rs b/query-engine/connectors/sql-query-connector/src/query_builder/mod.rs index 7f16b84f95fd..199847a2f340 100644 --- a/query-engine/connectors/sql-query-connector/src/query_builder/mod.rs +++ b/query-engine/connectors/sql-query-connector/src/query_builder/mod.rs @@ -1,4 +1,5 @@ pub(crate) mod read; +#[cfg(feature = "relation_joins")] pub(crate) mod select; pub(crate) mod write; diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/select/mod.rs b/query-engine/connectors/sql-query-connector/src/query_builder/select/mod.rs index 5aec46423b3b..27997311f71c 100644 --- a/query-engine/connectors/sql-query-connector/src/query_builder/select/mod.rs +++ b/query-engine/connectors/sql-query-connector/src/query_builder/select/mod.rs @@ -4,7 +4,10 @@ mod subquery; use std::borrow::Cow; use tracing::Span; -use psl::datamodel_connector::{ConnectorCapability, Flavour}; +use psl::{ + datamodel_connector::{ConnectorCapability, Flavour}, + has_capability, +}; use quaint::prelude::*; use query_structure::*; @@ -699,9 +702,5 @@ fn connector_flavour(args: &QueryArguments) -> Flavour { } fn supports_lateral_join(args: &QueryArguments) -> bool { - args.model() - .dm - .schema - .connector - .has_capability(ConnectorCapability::LateralJoin) + has_capability(args.model().dm.schema.connector, ConnectorCapability::LateralJoin) } diff --git a/query-engine/core/src/interpreter/query_interpreters/read.rs b/query-engine/core/src/interpreter/query_interpreters/read.rs index 89747d33dbe3..8b279bfad0ec 100644 --- a/query-engine/core/src/interpreter/query_interpreters/read.rs +++ b/query-engine/core/src/interpreter/query_interpreters/read.rs @@ -2,6 +2,7 @@ use super::{inmemory_record_processor::InMemoryRecordProcessor, *}; use crate::{interpreter::InterpretationResult, query_ast::*, result_ast::*}; use connector::{error::ConnectorError, ConnectionLike}; use futures::future::{BoxFuture, FutureExt}; +use psl::can_support_relation_load_strategy; use query_structure::{ManyRecords, RelationLoadStrategy, RelationSelection}; use user_facing_errors::KnownError; @@ -156,6 +157,9 @@ fn read_many_by_joins( query: ManyRecordsQuery, trace_id: Option, ) -> BoxFuture<'_, InterpretationResult> { + if !can_support_relation_load_strategy() { + unreachable!() + } let fut = async move { let result = tx .get_many_records( diff --git a/query-engine/dmmf/Cargo.toml b/query-engine/dmmf/Cargo.toml index b2ad4fc4c88c..4595c5dada0a 100644 --- a/query-engine/dmmf/Cargo.toml +++ b/query-engine/dmmf/Cargo.toml @@ -10,13 +10,15 @@ serde.workspace = true serde_json.workspace = true schema = { path = "../schema" } indexmap.workspace = true -query-structure = { path = "../query-structure", features = ["default_generators"] } +query-structure = { path = "../query-structure", features = [ + "default_generators", +] } [dev-dependencies] expect-test = "1.2.2" indoc.workspace = true pretty_assertions = "1" flate2 = "1.0" -similar = { version = "2.2.1", features=["text", "inline", "bytes"] } +similar = { version = "2.2.1", features = ["text", "inline", "bytes"] } colored = "2.0.0" itertools.workspace = true diff --git a/query-engine/query-engine-node-api/Cargo.toml b/query-engine/query-engine-node-api/Cargo.toml index 7acc8f98336e..83997d887dee 100644 --- a/query-engine/query-engine-node-api/Cargo.toml +++ b/query-engine/query-engine-node-api/Cargo.toml @@ -20,12 +20,14 @@ driver-adapters = [ anyhow = "1" async-trait.workspace = true query-core = { path = "../core", features = ["metrics"] } -request-handlers = { path = "../request-handlers" } +request-handlers = { path = "../request-handlers", features = ["native"] } query-connector = { path = "../connectors/query-connector" } query-engine-common = { path = "../../libs/query-engine-common" } user-facing-errors = { path = "../../libs/user-facing-errors" } -psl.workspace = true -sql-connector = { path = "../connectors/sql-query-connector", package = "sql-query-connector" } +psl = { workspace = true, features = ["all"] } +sql-connector = { path = "../connectors/sql-query-connector", package = "sql-query-connector", features = [ + "native_all", +] } query-structure = { path = "../query-structure" } driver-adapters = { path = "../driver-adapters", features = [ "postgresql", diff --git a/query-engine/query-engine-wasm/Cargo.toml b/query-engine/query-engine-wasm/Cargo.toml index 89c3b3d7de3e..c8aaf3205e24 100644 --- a/query-engine/query-engine-wasm/Cargo.toml +++ b/query-engine/query-engine-wasm/Cargo.toml @@ -9,9 +9,24 @@ crate-type = ["cdylib"] name = "query_engine_wasm" [features] -sqlite = ["driver-adapters/sqlite", "sql-connector/sqlite"] -postgresql = ["driver-adapters/postgresql", "sql-connector/postgresql"] -mysql = ["driver-adapters/mysql", "sql-connector/mysql"] +sqlite = [ + "driver-adapters/sqlite", + "sql-connector/sqlite", + "psl/sqlite", + "request-handlers/sqlite", +] +postgresql = [ + "driver-adapters/postgresql", + "sql-connector/postgresql", + "psl/postgresql", + "request-handlers/postgresql", +] +mysql = [ + "driver-adapters/mysql", + "sql-connector/mysql", + "psl/mysql", + "request-handlers/mysql", +] [dependencies] @@ -22,15 +37,14 @@ async-trait.workspace = true user-facing-errors = { path = "../../libs/user-facing-errors" } psl.workspace = true query-structure = { path = "../query-structure" } -sql-connector = { path = "../connectors/sql-query-connector", package = "sql-query-connector", default-features = false } +sql-connector = { path = "../connectors/sql-query-connector", package = "sql-query-connector" } request-handlers = { path = "../request-handlers", default-features = false, features = [ "sql", "driver-adapters", ] } query-core = { path = "../core" } driver-adapters = { path = "../driver-adapters" } -quaint = { path = "../../quaint", default-features = false } - +quaint.workspace = true connection-string.workspace = true js-sys.workspace = true serde-wasm-bindgen.workspace = true diff --git a/query-engine/query-engine-wasm/build.sh b/query-engine/query-engine-wasm/build.sh index 0db1aad5bf08..6cb767b169e6 100755 --- a/query-engine/query-engine-wasm/build.sh +++ b/query-engine/query-engine-wasm/build.sh @@ -105,11 +105,17 @@ optimize() { } report_size() { - local CONNECTOR="$1" + local CONNECTOR + local GZ_SIZE + local FORMATTED_GZ_SIZE + + CONNECTOR="$1" + GZ_SIZE=$(gzip -c "${OUT_FOLDER}/$CONNECTOR/query_engine_bg.wasm" | wc -c) + FORMATTED_GZ_SIZE=$(echo "$GZ_SIZE"|numfmt --format '%.3f' --to=iec-i --suffix=B) echo "$CONNECTOR:" echo "ℹ️ raw: $(du -h "${OUT_FOLDER}/$CONNECTOR/query_engine_bg.wasm")" - echo "ℹ️ zip: $(gzip -c "${OUT_FOLDER}/$CONNECTOR/query_engine_bg.wasm" | wc -c) bytes" + echo "ℹ️ zip: $GZ_SIZE bytes ($FORMATTED_GZ_SIZE)" echo "" } diff --git a/query-engine/query-engine/Cargo.toml b/query-engine/query-engine/Cargo.toml index 45f433b892ea..f0f41fcbe4f0 100644 --- a/query-engine/query-engine/Cargo.toml +++ b/query-engine/query-engine/Cargo.toml @@ -6,7 +6,7 @@ version = "0.1.0" [features] default = ["sql", "mongodb"] mongodb = ["mongodb-connector"] -sql = ["sql-connector"] +sql = ["sql-connector", "sql-connector/native_all"] vendored-openssl = ["sql-connector/vendored-openssl"] [dependencies] @@ -17,11 +17,11 @@ base64 = "0.13" connection-string.workspace = true connector = { path = "../connectors/query-connector", package = "query-connector" } enumflags2.workspace = true -psl.workspace = true +psl = { workspace = true, features = ["all"] } graphql-parser = { git = "https://github.com/prisma/graphql-parser" } mongodb-connector = { path = "../connectors/mongodb-query-connector", optional = true, package = "mongodb-query-connector" } query-core = { path = "../core", features = ["metrics"] } -request-handlers = { path = "../request-handlers" } +request-handlers = { path = "../request-handlers", features = ["native"] } serde.workspace = true serde_json.workspace = true sql-connector = { path = "../connectors/sql-query-connector", optional = true, package = "sql-query-connector" } @@ -34,9 +34,9 @@ tracing-opentelemetry = "0.17.3" tracing-subscriber = { version = "0.3", features = ["json", "env-filter"] } opentelemetry = { version = "0.17.0", features = ["rt-tokio"] } opentelemetry-otlp = { version = "0.10", features = ["tls", "tls-roots"] } -query-engine-metrics = {path = "../metrics"} +query-engine-metrics = { path = "../metrics" } -user-facing-errors = {path = "../../libs/user-facing-errors"} +user-facing-errors = { path = "../../libs/user-facing-errors" } [dev-dependencies] serial_test = "*" diff --git a/query-engine/query-structure/src/query_arguments.rs b/query-engine/query-structure/src/query_arguments.rs index eb895b46711d..d8a43cff68c5 100644 --- a/query-engine/query-structure/src/query_arguments.rs +++ b/query-engine/query-structure/src/query_arguments.rs @@ -1,4 +1,4 @@ -use psl::{datamodel_connector::ConnectorCapability, PreviewFeature}; +use psl::{datamodel_connector::ConnectorCapability, has_capability, PreviewFeature}; use crate::*; @@ -118,11 +118,7 @@ impl QueryArguments { } fn connector_supports_distinct_on(&self) -> bool { - self.model() - .dm - .schema - .connector - .has_capability(ConnectorCapability::DistinctOn) + has_capability(self.model().dm.schema.connector, ConnectorCapability::DistinctOn) } /// An unstable cursor is a cursor that is used in conjunction with an unstable (non-unique) combination of orderBys. diff --git a/query-engine/request-handlers/Cargo.toml b/query-engine/request-handlers/Cargo.toml index 133f670c2b06..8c2277948193 100644 --- a/query-engine/request-handlers/Cargo.toml +++ b/query-engine/request-handlers/Cargo.toml @@ -4,11 +4,11 @@ version = "0.1.0" edition = "2021" [dependencies] +psl.workspace = true query-structure = { path = "../query-structure" } query-core = { path = "../core" } user-facing-errors = { path = "../../libs/user-facing-errors" } -quaint = { path = "../../quaint", default-features = false } -psl.workspace = true +quaint.workspace = true dmmf_crate = { path = "../dmmf", package = "dmmf" } itertools.workspace = true graphql-parser = { git = "https://github.com/prisma/graphql-parser", optional = true } @@ -24,7 +24,7 @@ connection-string.workspace = true once_cell = "1.15" mongodb-query-connector = { path = "../connectors/mongodb-query-connector", optional = true } -sql-query-connector = { path = "../connectors/sql-query-connector", optional = true } +sql-query-connector = { path = "../connectors/sql-query-connector", optional = true, default-features = false } [dev-dependencies] insta = "1.7.1" @@ -32,14 +32,18 @@ schema = { path = "../schema" } codspeed-criterion-compat = "1.1.0" [features] -default = ["sql", "mongodb", "native", "graphql-protocol"] -mongodb = ["mongodb-query-connector"] +mongodb = ["mongodb-query-connector", "psl/mongodb"] sql = ["sql-query-connector"] +postgresql = ["sql", "sql-query-connector/postgresql", "psl/postgresql"] +mysql = ["sql", "sql-query-connector/mysql", "psl/mysql"] +sqlite = ["sql", "sql-query-connector/sqlite", "psl/sqlite"] driver-adapters = ["sql-query-connector/driver-adapters"] native = [ "mongodb", - "sql-query-connector", - "quaint/native", + "sql", + "graphql-protocol", + "psl/all", + "sql-query-connector/native_all", "query-core/metrics", ] graphql-protocol = ["query-core/graphql-protocol", "dep:graphql-parser"] diff --git a/query-engine/schema/Cargo.toml b/query-engine/schema/Cargo.toml index 12664344572d..77a39bcfa85f 100644 --- a/query-engine/schema/Cargo.toml +++ b/query-engine/schema/Cargo.toml @@ -15,3 +15,6 @@ codspeed-criterion-compat = "1.1.0" [[bench]] name = "schema_builder_bench" harness = false + +[features] +all_connectors = ["psl/all"] diff --git a/query-engine/schema/src/query_schema.rs b/query-engine/schema/src/query_schema.rs index dbd96dd4ab77..e677b10e75a5 100644 --- a/query-engine/schema/src/query_schema.rs +++ b/query-engine/schema/src/query_schema.rs @@ -1,7 +1,8 @@ use crate::{IdentifierType, ObjectType, OutputField}; use psl::{ + can_support_relation_load_strategy, datamodel_connector::{Connector, ConnectorCapabilities, ConnectorCapability, JoinStrategySupport, RelationMode}, - PreviewFeature, PreviewFeatures, + has_capability, PreviewFeature, PreviewFeatures, }; use query_structure::{ast, InternalDataModel}; use std::{collections::HashMap, fmt}; @@ -63,7 +64,9 @@ impl QuerySchema { relation_mode, mutation_fields: Default::default(), query_fields: Default::default(), - join_strategy_support: if preview_features.contains(PreviewFeature::RelationJoins) { + join_strategy_support: if preview_features.contains(PreviewFeature::RelationJoins) + && can_support_relation_load_strategy() + { connector.runtime_join_strategy_support() } else { JoinStrategySupport::No @@ -98,22 +101,23 @@ impl QuerySchema { } pub(crate) fn supports_any(&self, capabilities: &[ConnectorCapability]) -> bool { - capabilities.iter().any(|c| self.connector.has_capability(*c)) + capabilities.iter().any(|c| has_capability(self.connector, *c)) } pub(crate) fn can_full_text_search(&self) -> bool { - self.has_feature(PreviewFeature::FullTextSearch) - && (self.has_capability(ConnectorCapability::FullTextSearchWithoutIndex) - || self.has_capability(ConnectorCapability::FullTextSearchWithIndex)) + self.has_feature(PreviewFeature::FullTextSearch) && self.has_capability(ConnectorCapability::FullTextSearch) } /// Returns whether the loaded connector supports the join strategy. pub fn can_resolve_relation_with_joins(&self) -> bool { - !matches!(self.join_strategy_support, JoinStrategySupport::No) + !matches!(self.join_strategy_support(), JoinStrategySupport::No) } /// Returns whether the database version of the loaded connector supports the join strategy. pub fn join_strategy_support(&self) -> JoinStrategySupport { + if !can_support_relation_load_strategy() { + return JoinStrategySupport::No; + } self.join_strategy_support } @@ -139,7 +143,7 @@ impl QuerySchema { } pub fn has_capability(&self, capability: ConnectorCapability) -> bool { - self.connector.has_capability(capability) + has_capability(self.connector, capability) } pub fn capabilities(&self) -> ConnectorCapabilities { diff --git a/schema-engine/cli/Cargo.toml b/schema-engine/cli/Cargo.toml index fcb52f71d60c..8bd6c3f65b96 100644 --- a/schema-engine/cli/Cargo.toml +++ b/schema-engine/cli/Cargo.toml @@ -17,7 +17,12 @@ serde.workspace = true tokio.workspace = true tracing.workspace = true tracing-error = "0.2" -tracing-subscriber = { version = "0.3", features = [ "fmt", "json", "time", "env-filter" ] } +tracing-subscriber = { version = "0.3", features = [ + "fmt", + "json", + "time", + "env-filter", +] } [dev-dependencies] tempfile = "3.1.0" @@ -25,9 +30,9 @@ test-setup = { path = "../../libs/test-setup" } test-macros = { path = "../../libs/test-macros" } url.workspace = true indoc.workspace = true -connection-string.workspace = true +connection-string.workspace = true expect-test = "1.4.0" -quaint.workspace = true +quaint = { workspace = true, features = ["native"] } [[bin]] name = "schema-engine" diff --git a/schema-engine/connectors/mongodb-schema-connector/Cargo.toml b/schema-engine/connectors/mongodb-schema-connector/Cargo.toml index 7157ba122fc6..c23ad970bd16 100644 --- a/schema-engine/connectors/mongodb-schema-connector/Cargo.toml +++ b/schema-engine/connectors/mongodb-schema-connector/Cargo.toml @@ -4,7 +4,7 @@ name = "mongodb-schema-connector" version = "0.1.0" [dependencies] -psl.workspace = true +psl = { workspace = true, features = ["mongodb"] } mongodb-client = { path = "../../../libs/mongodb-client" } mongodb-schema-describer = { path = "../../mongodb-schema-describer" } datamodel-renderer = { path = "../../datamodel-renderer" } diff --git a/schema-engine/connectors/schema-connector/Cargo.toml b/schema-engine/connectors/schema-connector/Cargo.toml index 18bbc0059874..f023fe4f49e9 100644 --- a/schema-engine/connectors/schema-connector/Cargo.toml +++ b/schema-engine/connectors/schema-connector/Cargo.toml @@ -5,7 +5,7 @@ edition = "2021" [dependencies] psl.workspace = true -quaint.workspace = true +quaint = { workspace = true, features = ["native", "pooled"] } serde.workspace = true serde_json.workspace = true user-facing-errors = { path = "../../../libs/user-facing-errors" } diff --git a/schema-engine/connectors/sql-schema-connector/Cargo.toml b/schema-engine/connectors/sql-schema-connector/Cargo.toml index 3127ed51d16c..d2347d8bfec6 100644 --- a/schema-engine/connectors/sql-schema-connector/Cargo.toml +++ b/schema-engine/connectors/sql-schema-connector/Cargo.toml @@ -8,7 +8,12 @@ vendored-openssl = ["quaint/vendored-openssl"] [dependencies] psl.workspace = true -quaint.workspace = true +quaint = { workspace = true, features = [ + "native", + "expose-drivers", + "pooled", + "fmt-sql", +] } tokio.workspace = true serde.workspace = true indoc.workspace = true @@ -19,7 +24,9 @@ schema-connector = { path = "../schema-connector" } sql-schema-describer = { path = "../../sql-schema-describer" } datamodel-renderer = { path = "../../datamodel-renderer" } sql-ddl = { path = "../../../libs/sql-ddl" } -user-facing-errors = { path = "../../../libs/user-facing-errors", features = ["sql"] } +user-facing-errors = { path = "../../../libs/user-facing-errors", features = [ + "sql", +] } chrono.workspace = true connection-string.workspace = true diff --git a/schema-engine/core/Cargo.toml b/schema-engine/core/Cargo.toml index 215a4a7e8e97..bd9a33247af3 100644 --- a/schema-engine/core/Cargo.toml +++ b/schema-engine/core/Cargo.toml @@ -4,7 +4,7 @@ name = "schema-core" version = "0.1.0" [dependencies] -psl.workspace = true +psl = { workspace = true, features = ["all"] } schema-connector = { path = "../connectors/schema-connector" } mongodb-schema-connector = { path = "../connectors/mongodb-schema-connector" } sql-schema-connector = { path = "../connectors/sql-schema-connector" } diff --git a/schema-engine/sql-introspection-tests/Cargo.toml b/schema-engine/sql-introspection-tests/Cargo.toml index 8ec7f33f6aea..f537eae61b7b 100644 --- a/schema-engine/sql-introspection-tests/Cargo.toml +++ b/schema-engine/sql-introspection-tests/Cargo.toml @@ -7,13 +7,13 @@ edition = "2021" schema-connector = { path = "../connectors/schema-connector" } sql-schema-connector = { path = "../connectors/sql-schema-connector" } sql-schema-describer = { path = "../sql-schema-describer" } -psl.workspace = true +psl = { workspace = true, features = ["all"] } test-macros = { path = "../../libs/test-macros" } user-facing-errors = { path = "../../libs/user-facing-errors" } test-setup = { path = "../../libs/test-setup" } enumflags2.workspace = true -connection-string.workspace = true +connection-string.workspace = true pretty_assertions = "1" tracing-futures = "0.2" tokio.workspace = true @@ -21,7 +21,7 @@ tracing.workspace = true indoc.workspace = true expect-test = "1.1.0" url.workspace = true -quaint.workspace = true +quaint = { workspace = true, features = ["native"] } [dependencies.barrel] git = "https://github.com/prisma/barrel.git" diff --git a/schema-engine/sql-migration-tests/Cargo.toml b/schema-engine/sql-migration-tests/Cargo.toml index c3dbebab0432..5b99906acc05 100644 --- a/schema-engine/sql-migration-tests/Cargo.toml +++ b/schema-engine/sql-migration-tests/Cargo.toml @@ -4,7 +4,7 @@ version = "0.1.0" edition = "2021" [dependencies] -psl.workspace = true +psl = { workspace = true, features = ["all"] } schema-core = { path = "../core" } sql-schema-connector = { path = "../connectors/sql-schema-connector" } sql-schema-describer = { path = "../sql-schema-describer" } @@ -30,4 +30,4 @@ tokio.workspace = true tracing.workspace = true tracing-futures = "0.2" url.workspace = true -quaint.workspace = true +quaint = { workspace = true, features = ["native"] } diff --git a/schema-engine/sql-schema-describer/Cargo.toml b/schema-engine/sql-schema-describer/Cargo.toml index 8bfdfaad59b9..bfd1de7f359f 100644 --- a/schema-engine/sql-schema-describer/Cargo.toml +++ b/schema-engine/sql-schema-describer/Cargo.toml @@ -5,8 +5,9 @@ version = "0.1.0" [dependencies] prisma-value = { path = "../../libs/prisma-value" } -psl.workspace = true +psl = { workspace = true, features = ["all"] } +either = "1.8.0" async-trait.workspace = true bigdecimal = "0.3" enumflags2.workspace = true @@ -18,8 +19,12 @@ serde.workspace = true tracing.workspace = true tracing-error = "0.2" tracing-futures = "0.2" -quaint.workspace = true -either = "1.8.0" +quaint = { workspace = true, features = [ + "native", + "pooled", + "expose-drivers", + "fmt-sql", +] } [dev-dependencies] expect-test = "1.2.2"