Skip to content

Commit

Permalink
Separatedd command line handling into dedicated files, added parsing …
Browse files Browse the repository at this point in the history
…and validation of the typed schema
  • Loading branch information
oskardudycz committed Sep 12, 2024
1 parent c35e14a commit a078053
Show file tree
Hide file tree
Showing 9 changed files with 267 additions and 165 deletions.
161 changes: 1 addition & 160 deletions src/packages/pongo/src/cli.ts
Original file line number Diff line number Diff line change
@@ -1,172 +1,13 @@
#!/usr/bin/env node
import {
combineMigrations,
dumbo,
migrationTableSchemaComponent,
runPostgreSQLMigrations,
} from '@event-driven-io/dumbo';
import { Command } from 'commander';
import { pongoCollectionSchemaComponent, type PongoSchemaConfig } from './core';

interface MigrateRunOptions {
collection: string[];
connectionString: string;
config?: string;
dryRun?: boolean;
}

interface MigrateSqlOptions {
print?: boolean;
write?: string;
collection: string[];
}
import { migrateCommand } from './commandLine';

const program = new Command();

program.name('pongo').description('CLI tool for Pongo');

const migrateCommand = new Command('migrate').description(
'Manage database migrations',
);

/// Add `migrate:run` subcommand
migrateCommand
.command('run')
.description('Run database migrations')
.option(
'-cs, --connectionString <string>',
'Connection string for the database',
)
.option(
'-col, --collection <name>',
'Specify the collection name',
(value: string, previous: string[]) => {
// Accumulate collection names into an array (explicitly typing `previous` as `string[]`)
return previous.concat([value]);
},
[] as string[],
)
.option(
'-f, --config <path>',
'Path to configuration file with collection list',
)
.option('-dr, --dryRun', 'Perform dry run without commiting changes', false)
.action(async (options: MigrateRunOptions) => {
const { collection, connectionString, dryRun } = options;
let collectionNames: string[];

if (!connectionString) {
console.error(
'Error: Connection string is required. Provide it either as a "cs" parameter or through the DB_CONNECTION_STRING environment variable.',
);
process.exit(1);
}

if (options.config) {
const config = await loadConfigFile(options.config);
collectionNames = config.collections;
} else if (collection) {
collectionNames = collection;
} else {
console.error(
'Error: You need to provide at least one collection name is required. Provide it either through "config" file or as a "col" parameter.',
);
process.exit(1);
}

const pool = dumbo({ connectionString });

const migrations = collectionNames.flatMap((collectionsName) =>
pongoCollectionSchemaComponent(collectionsName).migrations({
connector: 'PostgreSQL:pg', // TODO: Provide connector here
}),
);

await runPostgreSQLMigrations(pool, migrations, {
dryRun,
});
});

// Add `migrate:sql` subcommand
migrateCommand
.command('sql')
.description('Generate SQL for database migration')
.option(
'-col, --collection <name>',
'Specify the collection name',
(value: string, previous: string[]) => {
// Accumulate collection names into an array (explicitly typing `previous` as `string[]`)
return previous.concat([value]);
},
[] as string[],
)
.option('--print', 'Print the SQL to the console (default)', true)
//.option('--write <filename>', 'Write the SQL to a specified file')
.action((options: MigrateSqlOptions) => {
const { collection } = options;

if (!collection) {
console.error(
'Error: You need to provide at least one collection name is required. Provide it either as a "col" parameter.',
);
process.exit(1);
}
const coreMigrations = migrationTableSchemaComponent.migrations({
connector: 'PostgreSQL:pg',
});
const migrations = [
...coreMigrations,
...collection.flatMap((collectionsName) =>
pongoCollectionSchemaComponent(collectionsName).migrations({
connector: 'PostgreSQL:pg', // TODO: Provide connector here
}),
),
];

console.log('Printing SQL:');
console.log(combineMigrations(...migrations));
});

const loadConfigFile = async (
configPath: string,
): Promise<PongoSchemaConfig> => {
const configUrl = new URL(configPath, `file://${process.cwd()}/`);
try {
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
const imported: Partial<{ default: PongoSchemaConfig }> = await import(
configUrl.href
);

if (!imported.default) {
console.error(
'Error: Config should contain default export with object with collections array',
);
process.exit(1);
}

if (
!(
imported.default.collections &&
Array.isArray(imported.default.collections)
)
) {
console.error('Error: Config file should contain collections array');
process.exit(1);
}

console.log(JSON.stringify(imported));

return { collections: imported.default.collections };
} catch {
console.error(`Error: Couldn't load file: ${configUrl.href}`);
process.exit(1);
}
};

// Add the `migrate` command to the main program
program.addCommand(migrateCommand);

// Parse the command-line arguments
program.parse(process.argv);

export default program;
83 changes: 83 additions & 0 deletions src/packages/pongo/src/commandLine/configFile.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
import { objectEntries, type PongoSchemaConfig } from '../core';
import {
toDbSchemaMetadata,
type PongoDbSchemaMetadata,
} from '../core/typing/schema';

const sampleConfig = `import { pongoSchema } from '@event-driven-io/pongo';
type User = { name: string };
export default {
schema: pongoSchema.client({
database: pongoSchema.db({
users: pongoSchema.collection<User>('users'),
}),
}),
};`;

const missingDefaultExport = `Error: Config should contain default export, e.g.\n\n${sampleConfig}`;
const missingSchema = `Error: Config should contain schema property, e.g.\n\n${sampleConfig}`;
const missingDbs = `Error: Config should have at least a single database defined, e.g.\n\n${sampleConfig}`;
const missingDefaultDb = `Error: Config should have a default database defined (without name or or with default database name), e.g.\n\n${sampleConfig}`;
const missingCollections = `Error: Database should have defined at least one collection, e.g.\n\n${sampleConfig}`;

export const loadConfigFile = async (
configPath: string,
): Promise<PongoDbSchemaMetadata> => {
const configUrl = new URL(configPath, `file://${process.cwd()}/`);
try {
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
const imported: Partial<{ default: PongoSchemaConfig }> = await import(
configUrl.href
);

const parsed = parseDefaultDbSchema(imported);

if (typeof parsed === 'string') {
console.error(parsed);
process.exit(1);
}

return parsed;
} catch {
console.error(`Error: Couldn't load file: ${configUrl.href}`);
process.exit(1);
}
};

export const parseDefaultDbSchema = (
imported: Partial<{ default: PongoSchemaConfig }>,
): PongoDbSchemaMetadata | string => {
if (!imported.default) {
return missingDefaultExport;
}

if (!imported.default.schema) {
return missingSchema;
}

if (!imported.default.schema.dbs) {
return missingDbs;
}

const dbs = objectEntries(imported.default.schema.dbs).map((db) => db[1]);

const defaultDb = dbs.find((db) => db.name === undefined);

if (!defaultDb) {
return missingDefaultDb;
}

if (!defaultDb.collections) {
return missingCollections;
}

const collections = objectEntries(defaultDb.collections).map((col) => col[1]);

if (collections.length === 0) {
return missingCollections;
}

return toDbSchemaMetadata(defaultDb);
};
2 changes: 2 additions & 0 deletions src/packages/pongo/src/commandLine/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
export * from './configFile';
export * from './migrate';
125 changes: 125 additions & 0 deletions src/packages/pongo/src/commandLine/migrate.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
import {
combineMigrations,
dumbo,
migrationTableSchemaComponent,
runPostgreSQLMigrations,
} from '@event-driven-io/dumbo';
import { Command } from 'commander';
import { pongoCollectionSchemaComponent } from '../core';
import { loadConfigFile } from './configFile';

interface MigrateRunOptions {
collection: string[];
connectionString: string;
config?: string;
dryRun?: boolean;
}

interface MigrateSqlOptions {
print?: boolean;
write?: string;
collection: string[];
}

export const migrateCommand = new Command('migrate').description(
'Manage database migrations',
);

migrateCommand
.command('run')
.description('Run database migrations')
.option(
'-cs, --connectionString <string>',
'Connection string for the database',
)
.option(
'-col, --collection <name>',
'Specify the collection name',
(value: string, previous: string[]) => {
// Accumulate collection names into an array (explicitly typing `previous` as `string[]`)
return previous.concat([value]);
},
[] as string[],
)
.option(
'-f, --config <path>',
'Path to configuration file with collection list',
)
.option('-dr, --dryRun', 'Perform dry run without commiting changes', false)
.action(async (options: MigrateRunOptions) => {
const { collection, dryRun } = options;
const connectionString =
options.connectionString ?? process.env.DB_CONNECTION_STRING;
let collectionNames: string[];

if (!connectionString) {
console.error(
'Error: Connection string is required. Provide it either as a "--connectionString" parameter or through the DB_CONNECTION_STRING environment variable.',
);
process.exit(1);
}

if (options.config) {
const config = await loadConfigFile(options.config);

collectionNames = config.collections.map((c) => c.name);
} else if (collection) {
collectionNames = collection;
} else {
console.error(
'Error: You need to provide at least one collection name. Provide it either through "--config" file or as a "--collection" parameter.',
);
process.exit(1);
}

const pool = dumbo({ connectionString });

const migrations = collectionNames.flatMap((collectionsName) =>
pongoCollectionSchemaComponent(collectionsName).migrations({
connector: 'PostgreSQL:pg', // TODO: Provide connector here
}),
);

await runPostgreSQLMigrations(pool, migrations, {
dryRun,
});
});

migrateCommand
.command('sql')
.description('Generate SQL for database migration')
.option(
'-col, --collection <name>',
'Specify the collection name',
(value: string, previous: string[]) => {
// Accumulate collection names into an array (explicitly typing `previous` as `string[]`)
return previous.concat([value]);
},
[] as string[],
)
.option('--print', 'Print the SQL to the console (default)', true)
//.option('--write <filename>', 'Write the SQL to a specified file')
.action((options: MigrateSqlOptions) => {
const { collection } = options;

if (!collection) {
console.error(
'Error: You need to provide at least one collection name is required. Provide it either as a "col" parameter.',
);
process.exit(1);
}
const coreMigrations = migrationTableSchemaComponent.migrations({
connector: 'PostgreSQL:pg',
});
const migrations = [
...coreMigrations,
...collection.flatMap((collectionsName) =>
pongoCollectionSchemaComponent(collectionsName).migrations({
connector: 'PostgreSQL:pg', // TODO: Provide connector here
}),
),
];

console.log('Printing SQL:');
console.log(combineMigrations(...migrations));
});
2 changes: 1 addition & 1 deletion src/packages/pongo/src/core/index.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
export * from './collection';
export * from './migrations';
export * from './schema';
export * from './pongoClient';
export * from './pongoDb';
export * from './pongoSession';
Expand Down
3 changes: 0 additions & 3 deletions src/packages/pongo/src/core/migrations/index.ts

This file was deleted.

Loading

0 comments on commit a078053

Please sign in to comment.