Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(schema-engine): use pk for implicit join tables on postgres #5057

Merged
merged 24 commits into from
Nov 28, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ impl<'a> DatamodelCalculatorContext<'a> {
.table_walkers()
.filter(|table| !is_old_migration_table(*table))
.filter(|table| !is_new_migration_table(*table))
.filter(|table| !is_prisma_m_to_n_relation(*table))
.filter(|table| !is_prisma_m_to_n_relation(*table, self.flavour.uses_pk_in_m2m_join_tables(self)))
.filter(|table| !is_relay_table(*table))
.map(move |next| {
let previous = self.existing_model(next.id);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,4 +61,8 @@ pub(crate) trait IntrospectionFlavour {
fn uses_exclude_constraint(&self, _ctx: &DatamodelCalculatorContext<'_>, _table: TableWalker<'_>) -> bool {
false
}

fn uses_pk_in_m2m_join_tables(&self, _ctx: &DatamodelCalculatorContext<'_>) -> bool {
false
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -96,4 +96,8 @@ impl super::IntrospectionFlavour for PostgresIntrospectionFlavour {
let pg_ext: &PostgresSchemaExt = ctx.sql_schema.downcast_connector_data();
pg_ext.uses_exclude_constraint(table.id)
}

fn uses_pk_in_m2m_join_tables(&self, ctx: &DatamodelCalculatorContext<'_>) -> bool {
!ctx.is_cockroach()
}
}
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
//! Small utility functions.

use sql::walkers::TableWalker;
use sql_schema_describer::{self as sql, IndexType};
use sql_schema_describer::{self as sql, IndexColumnWalker, IndexType};
use std::cmp;

/// This function implements the reverse behaviour of the `Ord` implementation for `Option`: it
Expand Down Expand Up @@ -56,7 +56,7 @@ pub(crate) fn is_relay_table(table: TableWalker<'_>) -> bool {
}

/// If a relation defines a Prisma many to many relation.
pub(crate) fn is_prisma_m_to_n_relation(table: TableWalker<'_>) -> bool {
pub(crate) fn is_prisma_m_to_n_relation(table: TableWalker<'_>, pk_allowed: bool) -> bool {
fn is_a(column: &str) -> bool {
column.eq_ignore_ascii_case("a")
}
Expand All @@ -65,9 +65,18 @@ pub(crate) fn is_prisma_m_to_n_relation(table: TableWalker<'_>) -> bool {
column.eq_ignore_ascii_case("b")
}

fn index_columns_match<'a>(mut columns: impl ExactSizeIterator<Item = IndexColumnWalker<'a>>) -> bool {
columns.len() == 2
&& match (columns.next(), columns.next()) {
(Some(a), Some(b)) => is_a(a.name()) && is_b(b.name()),
_ => false,
}
}

let mut fks = table.foreign_keys();
let first_fk = fks.next();
let second_fk = fks.next();

let a_b_match = || {
let first_fk = first_fk.unwrap();
let second_fk = second_fk.unwrap();
Expand All @@ -80,14 +89,13 @@ pub(crate) fn is_prisma_m_to_n_relation(table: TableWalker<'_>) -> bool {
&& is_b(first_fk_col)
&& is_a(second_fk_col))
};

table.name().starts_with('_')
//UNIQUE INDEX [A,B]
&& table.indexes().any(|i| {
i.columns().len() == 2
&& is_a(i.columns().next().unwrap().as_column().name())
&& is_b(i.columns().nth(1).unwrap().as_column().name())
// UNIQUE INDEX (A, B) or PRIMARY KEY (A, B)
&& (table.indexes().any(|i| {
index_columns_match(i.columns())
&& i.is_unique()
})
}) || pk_allowed && table.primary_key_columns().map(index_columns_match).unwrap_or(false))
//INDEX [B]
&& table
.indexes()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,11 +50,11 @@ impl<'a> IntrospectionMap<'a> {
match_enums(sql_schema, prisma_schema, &mut map);
match_existing_scalar_fields(sql_schema, prisma_schema, &mut map);
match_existing_inline_relations(sql_schema, prisma_schema, &mut map);
match_existing_m2m_relations(sql_schema, prisma_schema, &mut map);
match_existing_m2m_relations(sql_schema, prisma_schema, ctx, &mut map);
relation_names::introspect(ctx, &mut map);
position_inline_relation_fields(sql_schema, &mut map);
position_m2m_relation_fields(sql_schema, &mut map);
populate_top_level_names(sql_schema, prisma_schema, &mut map);
position_inline_relation_fields(sql_schema, ctx, &mut map);
position_m2m_relation_fields(sql_schema, ctx, &mut map);
populate_top_level_names(sql_schema, prisma_schema, ctx, &mut map);

map
}
Expand All @@ -63,11 +63,12 @@ impl<'a> IntrospectionMap<'a> {
fn populate_top_level_names<'a>(
sql_schema: &'a sql::SqlSchema,
prisma_schema: &'a psl::ValidatedSchema,
ctx: &DatamodelCalculatorContext<'_>,
map: &mut IntrospectionMap<'a>,
) {
for table in sql_schema
.table_walkers()
.filter(|t| !helpers::is_prisma_m_to_n_relation(*t))
.filter(|t| !helpers::is_prisma_m_to_n_relation(*t, ctx.flavour.uses_pk_in_m2m_join_tables(ctx)))
{
let name = map
.existing_models
Expand Down Expand Up @@ -115,10 +116,14 @@ fn populate_top_level_names<'a>(

/// Inlined relation fields (foreign key is defined in a model) are
/// sorted in a specific way. We handle the sorting here.
fn position_inline_relation_fields(sql_schema: &sql::SqlSchema, map: &mut IntrospectionMap<'_>) {
fn position_inline_relation_fields(
sql_schema: &sql::SqlSchema,
ctx: &DatamodelCalculatorContext<'_>,
map: &mut IntrospectionMap<'_>,
) {
for table in sql_schema
.table_walkers()
.filter(|t| !helpers::is_prisma_m_to_n_relation(*t))
.filter(|t| !helpers::is_prisma_m_to_n_relation(*t, ctx.flavour.uses_pk_in_m2m_join_tables(ctx)))
{
for fk in table.foreign_keys() {
map.inline_relation_positions
Expand All @@ -133,10 +138,14 @@ fn position_inline_relation_fields(sql_schema: &sql::SqlSchema, map: &mut Intros
/// Many to many relation fields (foreign keys are defined in a hidden
/// join table) are sorted in a specific way. We handle the sorting
/// here.
fn position_m2m_relation_fields(sql_schema: &sql::SqlSchema, map: &mut IntrospectionMap<'_>) {
fn position_m2m_relation_fields(
sql_schema: &sql::SqlSchema,
ctx: &DatamodelCalculatorContext<'_>,
map: &mut IntrospectionMap<'_>,
) {
for table in sql_schema
.table_walkers()
.filter(|t| helpers::is_prisma_m_to_n_relation(*t))
.filter(|t| helpers::is_prisma_m_to_n_relation(*t, ctx.flavour.uses_pk_in_m2m_join_tables(ctx)))
{
let mut fks = table.foreign_keys();

Expand Down Expand Up @@ -313,11 +322,12 @@ fn match_existing_inline_relations<'a>(
fn match_existing_m2m_relations(
sql_schema: &sql::SqlSchema,
prisma_schema: &psl::ValidatedSchema,
ctx: &DatamodelCalculatorContext<'_>,
map: &mut IntrospectionMap<'_>,
) {
map.existing_m2m_relations = sql_schema
.table_walkers()
.filter(|t| helpers::is_prisma_m_to_n_relation(*t))
.filter(|t| helpers::is_prisma_m_to_n_relation(*t, ctx.flavour.uses_pk_in_m2m_join_tables(ctx)))
.filter_map(|table| {
prisma_schema
.db
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ pub(super) fn introspect<'a>(ctx: &DatamodelCalculatorContext<'a>, map: &mut sup
let ambiguous_relations = find_ambiguous_relations(ctx);

for table in ctx.sql_schema.table_walkers() {
if is_prisma_m_to_n_relation(table) {
if is_prisma_m_to_n_relation(table, ctx.flavour.uses_pk_in_m2m_join_tables(ctx)) {
let name = prisma_m2m_relation_name(table, &ambiguous_relations, ctx);
names.m2m_relation_names.insert(table.id, name);
} else {
Expand Down Expand Up @@ -175,8 +175,8 @@ fn find_ambiguous_relations(ctx: &DatamodelCalculatorContext<'_>) -> HashSet<[sq
let mut ambiguous_relations = HashSet::new();

for table in ctx.sql_schema.table_walkers() {
if is_prisma_m_to_n_relation(table) {
m2m_relation_ambiguousness(table, &mut ambiguous_relations)
if is_prisma_m_to_n_relation(table, ctx.flavour.uses_pk_in_m2m_join_tables(ctx)) {
m2m_relation_ambiguousness(table, ctx, &mut ambiguous_relations)
} else {
for fk in table.foreign_keys() {
inline_relation_ambiguousness(fk, &mut ambiguous_relations, ctx)
Expand All @@ -187,7 +187,11 @@ fn find_ambiguous_relations(ctx: &DatamodelCalculatorContext<'_>) -> HashSet<[sq
ambiguous_relations
}

fn m2m_relation_ambiguousness(table: sql::TableWalker<'_>, ambiguous_relations: &mut HashSet<[sql::TableId; 2]>) {
fn m2m_relation_ambiguousness(
table: sql::TableWalker<'_>,
ctx: &DatamodelCalculatorContext<'_>,
ambiguous_relations: &mut HashSet<[sql::TableId; 2]>,
) {
let tables = table_ids_for_m2m_relation_table(table);

if ambiguous_relations.contains(&tables) {
Expand All @@ -205,7 +209,11 @@ fn m2m_relation_ambiguousness(table: sql::TableWalker<'_>, ambiguous_relations:
}

// Check for conflicts with another m2m relation.
for other_m2m in table.schema.table_walkers().filter(|t| is_prisma_m_to_n_relation(*t)) {
for other_m2m in table
.schema
.table_walkers()
.filter(|t| is_prisma_m_to_n_relation(*t, ctx.flavour.uses_pk_in_m2m_join_tables(ctx)))
{
if other_m2m.id != table.id && table_ids_for_m2m_relation_table(other_m2m) == tables {
ambiguous_relations.insert(tables);
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
mod sql_schema_calculator_flavour;

use sql_schema_calculator_flavour::JoinTableUniquenessConstraint;
pub(super) use sql_schema_calculator_flavour::SqlSchemaCalculatorFlavour;

use crate::{flavour::SqlFlavour, SqlDatabaseSchema};
Expand All @@ -12,7 +13,7 @@ use psl::{
},
ValidatedSchema,
};
use sql_schema_describer::{self as sql, PrismaValue};
use sql_schema_describer::{self as sql, PrismaValue, SqlSchema};
use std::collections::HashMap;

pub(crate) fn calculate_sql_schema(datamodel: &ValidatedSchema, flavour: &dyn SqlFlavour) -> SqlDatabaseSchema {
Expand Down Expand Up @@ -261,13 +262,24 @@ fn push_relation_tables(ctx: &mut Context<'_>) {
},
);

// Unique index on AB
// Unique index or PK on AB
{
let index_name = format!(
"{}_AB_unique",
table_name.chars().take(max_identifier_length - 10).collect::<String>()
let (constraint_suffix, push_constraint): (_, fn(_, _, _) -> _) =
match ctx.flavour.m2m_join_table_constraint() {
JoinTableUniquenessConstraint::PrimaryKey => ("_AB_pkey", SqlSchema::push_primary_key),
JoinTableUniquenessConstraint::UniqueIndex => ("_AB_unique", SqlSchema::push_unique_constraint),
};

let constraint_name = format!(
"{}{constraint_suffix}",
table_name
.chars()
.take(max_identifier_length - constraint_suffix.len())
.collect::<String>()
);
let index_id = ctx.schema.describer_schema.push_unique_constraint(table_id, index_name);

let index_id = push_constraint(&mut ctx.schema.describer_schema, table_id, constraint_name);

ctx.schema.describer_schema.push_index_column(sql::IndexColumn {
index_id,
column_id: column_a_id,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,4 +39,13 @@ pub(crate) trait SqlSchemaCalculatorFlavour {
}

fn push_connector_data(&self, _context: &mut super::Context<'_>) {}

fn m2m_join_table_constraint(&self) -> JoinTableUniquenessConstraint {
JoinTableUniquenessConstraint::UniqueIndex
}
}

pub(crate) enum JoinTableUniquenessConstraint {
PrimaryKey,
UniqueIndex,
}
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use super::{super::Context, SqlSchemaCalculatorFlavour};
use super::{super::Context, JoinTableUniquenessConstraint, SqlSchemaCalculatorFlavour};
use crate::flavour::{PostgresFlavour, SqlFlavour};
use either::Either;
use psl::{
Expand Down Expand Up @@ -162,6 +162,14 @@ impl SqlSchemaCalculatorFlavour for PostgresFlavour {
.describer_schema
.set_connector_data(Box::new(postgres_ext));
}

fn m2m_join_table_constraint(&self) -> JoinTableUniquenessConstraint {
if self.is_cockroachdb() {
JoinTableUniquenessConstraint::UniqueIndex
} else {
JoinTableUniquenessConstraint::PrimaryKey
}
}
}

fn convert_opclass(opclass: OperatorClass, algo: Option<IndexAlgorithm>) -> sql::postgres::SQLOperatorClass {
Expand Down
Loading
Loading