AsyncPipe; table schemata; migrations; File logging
All checks were successful
continuous-integration/drone/push Build is passing
All checks were successful
continuous-integration/drone/push Build is passing
This commit is contained in:
parent
e86cf420df
commit
fcce28081b
@ -15,6 +15,7 @@ import {RunLevelErrorHandler} from './RunLevelErrorHandler'
|
|||||||
import {Unit, UnitStatus} from './Unit'
|
import {Unit, UnitStatus} from './Unit'
|
||||||
import * as dotenv from 'dotenv'
|
import * as dotenv from 'dotenv'
|
||||||
import {CacheFactory} from '../support/cache/CacheFactory'
|
import {CacheFactory} from '../support/cache/CacheFactory'
|
||||||
|
import {FileLogger} from '../util/logging/FileLogger'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Helper function that resolves and infers environment variable values.
|
* Helper function that resolves and infers environment variable values.
|
||||||
@ -225,6 +226,12 @@ export class Application extends Container {
|
|||||||
const logging: Logging = this.make<Logging>(Logging)
|
const logging: Logging = this.make<Logging>(Logging)
|
||||||
|
|
||||||
logging.registerLogger(standard)
|
logging.registerLogger(standard)
|
||||||
|
|
||||||
|
if ( this.env('EXTOLLO_LOGGING_ENABLE_FILE') ) {
|
||||||
|
const file: FileLogger = this.make<FileLogger>(FileLogger)
|
||||||
|
logging.registerLogger(file)
|
||||||
|
}
|
||||||
|
|
||||||
logging.verbose('Attempting to load logging level from the environment...')
|
logging.verbose('Attempting to load logging level from the environment...')
|
||||||
|
|
||||||
const envLevel = this.env('EXTOLLO_LOGGING_LEVEL')
|
const envLevel = this.env('EXTOLLO_LOGGING_LEVEL')
|
||||||
|
@ -0,0 +1,39 @@
|
|||||||
|
import {Inject, Injectable} from '../di'
|
||||||
|
import {ConstraintType, DatabaseService, FieldType, Migration, Schema} from '../orm'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Migration that creates the sessions table used by the ORMSession backend.
|
||||||
|
*/
|
||||||
|
@Injectable()
|
||||||
|
export default class CreateSessionsTableMigration extends Migration {
|
||||||
|
@Inject()
|
||||||
|
protected readonly db!: DatabaseService
|
||||||
|
|
||||||
|
async up(): Promise<void> {
|
||||||
|
const schema: Schema = this.db.get().schema()
|
||||||
|
const table = await schema.table('sessions')
|
||||||
|
|
||||||
|
table.primaryKey('session_uuid', FieldType.varchar)
|
||||||
|
.required()
|
||||||
|
|
||||||
|
table.column('session_data')
|
||||||
|
.type(FieldType.json)
|
||||||
|
.required()
|
||||||
|
.default('{}')
|
||||||
|
|
||||||
|
table.constraint('session_uuid_ck')
|
||||||
|
.type(ConstraintType.Check)
|
||||||
|
.expression('LENGTH(session_uuid) > 0')
|
||||||
|
|
||||||
|
await schema.commit(table)
|
||||||
|
}
|
||||||
|
|
||||||
|
async down(): Promise<void> {
|
||||||
|
const schema: Schema = this.db.get().schema()
|
||||||
|
const table = await schema.table('sessions')
|
||||||
|
|
||||||
|
table.dropIfExists()
|
||||||
|
|
||||||
|
await schema.commit(table)
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,47 @@
|
|||||||
|
import {Inject, Injectable} from '../di'
|
||||||
|
import {DatabaseService, FieldType, Migration, Schema} from '../orm'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Migration that creates the users table used by @extollo/lib.auth.
|
||||||
|
*/
|
||||||
|
@Injectable()
|
||||||
|
export default class CreateUsersTableMigration extends Migration {
|
||||||
|
@Inject()
|
||||||
|
protected readonly db!: DatabaseService
|
||||||
|
|
||||||
|
async up(): Promise<void> {
|
||||||
|
const schema: Schema = this.db.get().schema()
|
||||||
|
const table = await schema.table('users')
|
||||||
|
|
||||||
|
table.primaryKey('user_id')
|
||||||
|
.required()
|
||||||
|
|
||||||
|
table.column('first_name')
|
||||||
|
.type(FieldType.varchar)
|
||||||
|
.required()
|
||||||
|
|
||||||
|
table.column('last_name')
|
||||||
|
.type(FieldType.varchar)
|
||||||
|
.required()
|
||||||
|
|
||||||
|
table.column('password_hash')
|
||||||
|
.type(FieldType.text)
|
||||||
|
.nullable()
|
||||||
|
|
||||||
|
table.column('username')
|
||||||
|
.type(FieldType.varchar)
|
||||||
|
.required()
|
||||||
|
.unique()
|
||||||
|
|
||||||
|
await schema.commit(table)
|
||||||
|
}
|
||||||
|
|
||||||
|
async down(): Promise<void> {
|
||||||
|
const schema: Schema = this.db.get().schema()
|
||||||
|
const table = await schema.table('users')
|
||||||
|
|
||||||
|
table.dropIfExists()
|
||||||
|
|
||||||
|
await schema.commit(table)
|
||||||
|
}
|
||||||
|
}
|
@ -41,7 +41,7 @@ export class DatabaseService extends AppClass {
|
|||||||
* Get a connection instance by its name. Throws if none exists.
|
* Get a connection instance by its name. Throws if none exists.
|
||||||
* @param name
|
* @param name
|
||||||
*/
|
*/
|
||||||
get(name: string): Connection {
|
get(name = 'default'): Connection {
|
||||||
if ( !this.has(name) ) {
|
if ( !this.has(name) ) {
|
||||||
throw new ErrorWithContext(`No such connection is registered: ${name}`)
|
throw new ErrorWithContext(`No such connection is registered: ${name}`)
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import {Inject} from '../../di'
|
import {Inject, Injectable} from '../../di'
|
||||||
import {DatabaseService} from '../DatabaseService'
|
import {DatabaseService} from '../DatabaseService'
|
||||||
import {
|
import {
|
||||||
Constraint, ConstraintConnectionOperator,
|
Constraint, ConstraintConnectionOperator,
|
||||||
@ -9,7 +9,7 @@ import {
|
|||||||
SpecifiedField,
|
SpecifiedField,
|
||||||
} from '../types'
|
} from '../types'
|
||||||
import {Connection} from '../connection/Connection'
|
import {Connection} from '../connection/Connection'
|
||||||
import {deepCopy, ErrorWithContext} from '../../util'
|
import {deepCopy, ErrorWithContext, Maybe} from '../../util'
|
||||||
import {EscapeValue, QuerySafeValue, raw} from '../dialect/SQLDialect'
|
import {EscapeValue, QuerySafeValue, raw} from '../dialect/SQLDialect'
|
||||||
import {ResultCollection} from './result/ResultCollection'
|
import {ResultCollection} from './result/ResultCollection'
|
||||||
import {AbstractResultIterable} from './result/AbstractResultIterable'
|
import {AbstractResultIterable} from './result/AbstractResultIterable'
|
||||||
@ -24,6 +24,7 @@ export type ConstraintGroupClosure<T> = (group: AbstractBuilder<T>) => any
|
|||||||
* A base class that facilitates building database queries using a fluent interface.
|
* A base class that facilitates building database queries using a fluent interface.
|
||||||
* This can be specialized by child-classes to yield query results of the given type `T`.
|
* This can be specialized by child-classes to yield query results of the given type `T`.
|
||||||
*/
|
*/
|
||||||
|
@Injectable()
|
||||||
export abstract class AbstractBuilder<T> extends AppClass {
|
export abstract class AbstractBuilder<T> extends AppClass {
|
||||||
@Inject()
|
@Inject()
|
||||||
protected readonly databaseService!: DatabaseService
|
protected readonly databaseService!: DatabaseService
|
||||||
@ -55,6 +56,9 @@ export abstract class AbstractBuilder<T> extends AppClass {
|
|||||||
/** The connection on which the query should be executed. */
|
/** The connection on which the query should be executed. */
|
||||||
protected registeredConnection?: Connection
|
protected registeredConnection?: Connection
|
||||||
|
|
||||||
|
/** Raw SQL to use instead. Overrides builder methods. */
|
||||||
|
protected rawSql?: string
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a new, empty, instance of the current builder.
|
* Create a new, empty, instance of the current builder.
|
||||||
*/
|
*/
|
||||||
@ -80,6 +84,7 @@ export abstract class AbstractBuilder<T> extends AppClass {
|
|||||||
bldr.registeredGroupings = deepCopy(this.registeredGroupings)
|
bldr.registeredGroupings = deepCopy(this.registeredGroupings)
|
||||||
bldr.registeredOrders = deepCopy(this.registeredOrders)
|
bldr.registeredOrders = deepCopy(this.registeredOrders)
|
||||||
bldr.registeredConnection = this.registeredConnection
|
bldr.registeredConnection = this.registeredConnection
|
||||||
|
bldr.rawSql = this.rawSql
|
||||||
|
|
||||||
return bldr
|
return bldr
|
||||||
}
|
}
|
||||||
@ -115,6 +120,11 @@ export abstract class AbstractBuilder<T> extends AppClass {
|
|||||||
return deepCopy(this.registeredOrders)
|
return deepCopy(this.registeredOrders)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Get the raw SQL overriding the builder methods, if it exists. */
|
||||||
|
public get appliedRawSql(): Maybe<string> {
|
||||||
|
return this.rawSql
|
||||||
|
}
|
||||||
|
|
||||||
/** Get the source table for this query. */
|
/** Get the source table for this query. */
|
||||||
public get querySource(): QuerySource | undefined {
|
public get querySource(): QuerySource | undefined {
|
||||||
if ( this.source ) {
|
if ( this.source ) {
|
||||||
@ -555,6 +565,21 @@ export abstract class AbstractBuilder<T> extends AppClass {
|
|||||||
return Boolean(result.rows.first())
|
return Boolean(result.rows.first())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the query manually. Overrides any builder methods.
|
||||||
|
* @example
|
||||||
|
* ```ts
|
||||||
|
* (new Builder())
|
||||||
|
* .raw('SELECT NOW() AS example_column')
|
||||||
|
* .get()
|
||||||
|
* ```
|
||||||
|
* @param sql
|
||||||
|
*/
|
||||||
|
raw(sql: string): this {
|
||||||
|
this.rawSql = sql
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Adds a constraint to this query. This is used internally by the various `where`, `whereIn`, `orWhereNot`, &c.
|
* Adds a constraint to this query. This is used internally by the various `where`, `whereIn`, `orWhereNot`, &c.
|
||||||
* @param preop
|
* @param preop
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import {ErrorWithContext} from '../../util'
|
import {ErrorWithContext} from '../../util'
|
||||||
import {Container} from '../../di'
|
import {Container, Injectable} from '../../di'
|
||||||
import {ResultIterable} from './result/ResultIterable'
|
import {ResultIterable} from './result/ResultIterable'
|
||||||
import {QueryRow} from '../types'
|
import {QueryRow} from '../types'
|
||||||
import {AbstractBuilder} from './AbstractBuilder'
|
import {AbstractBuilder} from './AbstractBuilder'
|
||||||
@ -8,6 +8,7 @@ import {AbstractResultIterable} from './result/AbstractResultIterable'
|
|||||||
/**
|
/**
|
||||||
* Implementation of the abstract builder class that returns simple QueryRow objects.
|
* Implementation of the abstract builder class that returns simple QueryRow objects.
|
||||||
*/
|
*/
|
||||||
|
@Injectable()
|
||||||
export class Builder extends AbstractBuilder<QueryRow> {
|
export class Builder extends AbstractBuilder<QueryRow> {
|
||||||
public getNewInstance(): AbstractBuilder<QueryRow> {
|
public getNewInstance(): AbstractBuilder<QueryRow> {
|
||||||
return Container.getContainer().make<Builder>(Builder)
|
return Container.getContainer().make<Builder>(Builder)
|
||||||
|
@ -5,6 +5,7 @@ import {AppClass} from '../../lifecycle/AppClass'
|
|||||||
import {Inject, Injectable} from '../../di'
|
import {Inject, Injectable} from '../../di'
|
||||||
import {EventBus} from '../../event/EventBus'
|
import {EventBus} from '../../event/EventBus'
|
||||||
import {QueryExecutedEvent} from './event/QueryExecutedEvent'
|
import {QueryExecutedEvent} from './event/QueryExecutedEvent'
|
||||||
|
import {Schema} from '../schema/Schema'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Error thrown when a connection is used before it is ready.
|
* Error thrown when a connection is used before it is ready.
|
||||||
@ -61,15 +62,11 @@ export abstract class Connection extends AppClass {
|
|||||||
*/
|
*/
|
||||||
public abstract close(): Promise<void>
|
public abstract close(): Promise<void>
|
||||||
|
|
||||||
// public abstract databases(): Promise<Collection<Database>>
|
/**
|
||||||
|
* Get a Schema on this connection.
|
||||||
// public abstract database(name: string): Promise<Database | undefined>
|
* @param name
|
||||||
|
*/
|
||||||
// public abstract database_as_schema(name: string): Promise<Database>
|
public abstract schema(name?: string): Schema
|
||||||
|
|
||||||
// public abstract tables(database_name: string): Promise<Collection<Table>>
|
|
||||||
|
|
||||||
// public abstract table(database_name: string, table_name: string): Promise<Table | undefined>
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Fire a QueryExecutedEvent for the given query string.
|
* Fire a QueryExecutedEvent for the given query string.
|
||||||
|
@ -6,6 +6,8 @@ import {collect} from '../../util'
|
|||||||
import {SQLDialect} from '../dialect/SQLDialect'
|
import {SQLDialect} from '../dialect/SQLDialect'
|
||||||
import {PostgreSQLDialect} from '../dialect/PostgreSQLDialect'
|
import {PostgreSQLDialect} from '../dialect/PostgreSQLDialect'
|
||||||
import {Logging} from '../../service/Logging'
|
import {Logging} from '../../service/Logging'
|
||||||
|
import {Schema} from '../schema/Schema'
|
||||||
|
import {PostgresSchema} from '../schema/PostgresSchema'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Type interface representing the config for a PostgreSQL connection.
|
* Type interface representing the config for a PostgreSQL connection.
|
||||||
@ -67,4 +69,8 @@ export class PostgresConnection extends Connection {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public schema(name?: string): Schema {
|
||||||
|
return new PostgresSchema(this, name)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,9 +1,13 @@
|
|||||||
import {EscapeValue, QuerySafeValue, raw, SQLDialect} from './SQLDialect'
|
import {EscapeValue, QuerySafeValue, raw, SQLDialect} from './SQLDialect'
|
||||||
import {Constraint, isConstraintGroup, isConstraintItem, SpecifiedField} from '../types'
|
import {Constraint, inverseFieldType, isConstraintGroup, isConstraintItem, SpecifiedField} from '../types'
|
||||||
import {AbstractBuilder} from '../builder/AbstractBuilder'
|
import {AbstractBuilder} from '../builder/AbstractBuilder'
|
||||||
|
import {ColumnBuilder, ConstraintBuilder, ConstraintType, IndexBuilder, TableBuilder} from '../schema/TableBuilder'
|
||||||
|
import {ErrorWithContext, Maybe} from '../../util'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An implementation of the SQLDialect specific to PostgreSQL.
|
* An implementation of the SQLDialect specific to PostgreSQL.
|
||||||
|
* @todo joins
|
||||||
|
* @todo sub-selects
|
||||||
*/
|
*/
|
||||||
export class PostgreSQLDialect extends SQLDialect {
|
export class PostgreSQLDialect extends SQLDialect {
|
||||||
|
|
||||||
@ -29,7 +33,7 @@ export class PostgreSQLDialect extends SQLDialect {
|
|||||||
`${pad(value.getSeconds())}`,
|
`${pad(value.getSeconds())}`,
|
||||||
]
|
]
|
||||||
|
|
||||||
return new QuerySafeValue(value, `${y}-${m}-${d} ${h}:${i}:${s}`)
|
return new QuerySafeValue(value, `'${y}-${m}-${d} ${h}:${i}:${s}'`)
|
||||||
} else if ( !isNaN(Number(value)) ) {
|
} else if ( !isNaN(Number(value)) ) {
|
||||||
return new QuerySafeValue(value, String(Number(value)))
|
return new QuerySafeValue(value, String(Number(value)))
|
||||||
} else if ( value === null || typeof value === 'undefined' ) {
|
} else if ( value === null || typeof value === 'undefined' ) {
|
||||||
@ -55,7 +59,7 @@ export class PostgreSQLDialect extends SQLDialect {
|
|||||||
'FROM (',
|
'FROM (',
|
||||||
...query.split('\n').map(x => ` ${x}`),
|
...query.split('\n').map(x => ` ${x}`),
|
||||||
') AS extollo_target_query',
|
') AS extollo_target_query',
|
||||||
`OFFSET ${start} LIMIT ${(end - start) + 1}`,
|
`OFFSET ${start} LIMIT ${(end - start) + 1}`, // FIXME - the +1 is only needed when start === end
|
||||||
].join('\n')
|
].join('\n')
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -85,6 +89,11 @@ export class PostgreSQLDialect extends SQLDialect {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public renderSelect(builder: AbstractBuilder<any>): string {
|
public renderSelect(builder: AbstractBuilder<any>): string {
|
||||||
|
const rawSql = builder.appliedRawSql
|
||||||
|
if ( rawSql ) {
|
||||||
|
return rawSql
|
||||||
|
}
|
||||||
|
|
||||||
const indent = (item: string, level = 1) => Array(level + 1).fill('')
|
const indent = (item: string, level = 1) => Array(level + 1).fill('')
|
||||||
.join(' ') + item
|
.join(' ') + item
|
||||||
const queryLines = [
|
const queryLines = [
|
||||||
@ -147,6 +156,11 @@ export class PostgreSQLDialect extends SQLDialect {
|
|||||||
|
|
||||||
// TODO support FROM, RETURNING
|
// TODO support FROM, RETURNING
|
||||||
public renderUpdate(builder: AbstractBuilder<any>, data: {[key: string]: EscapeValue}): string {
|
public renderUpdate(builder: AbstractBuilder<any>, data: {[key: string]: EscapeValue}): string {
|
||||||
|
const rawSql = builder.appliedRawSql
|
||||||
|
if ( rawSql ) {
|
||||||
|
return rawSql
|
||||||
|
}
|
||||||
|
|
||||||
const queryLines: string[] = []
|
const queryLines: string[] = []
|
||||||
|
|
||||||
// Add table source
|
// Add table source
|
||||||
@ -171,6 +185,15 @@ export class PostgreSQLDialect extends SQLDialect {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public renderExistential(builder: AbstractBuilder<any>): string {
|
public renderExistential(builder: AbstractBuilder<any>): string {
|
||||||
|
const rawSql = builder.appliedRawSql
|
||||||
|
if ( rawSql ) {
|
||||||
|
return `
|
||||||
|
SELECT EXISTS(
|
||||||
|
${rawSql}
|
||||||
|
)
|
||||||
|
`
|
||||||
|
}
|
||||||
|
|
||||||
const query = builder.clone()
|
const query = builder.clone()
|
||||||
.clearFields()
|
.clearFields()
|
||||||
.field(raw('TRUE'))
|
.field(raw('TRUE'))
|
||||||
@ -181,6 +204,11 @@ export class PostgreSQLDialect extends SQLDialect {
|
|||||||
|
|
||||||
// FIXME: subquery support here and with select
|
// FIXME: subquery support here and with select
|
||||||
public renderInsert(builder: AbstractBuilder<any>, data: {[key: string]: EscapeValue}|{[key: string]: EscapeValue}[] = []): string {
|
public renderInsert(builder: AbstractBuilder<any>, data: {[key: string]: EscapeValue}|{[key: string]: EscapeValue}[] = []): string {
|
||||||
|
const rawSql = builder.appliedRawSql
|
||||||
|
if ( rawSql ) {
|
||||||
|
return rawSql
|
||||||
|
}
|
||||||
|
|
||||||
const indent = (item: string, level = 1) => Array(level + 1).fill('')
|
const indent = (item: string, level = 1) => Array(level + 1).fill('')
|
||||||
.join(' ') + item
|
.join(' ') + item
|
||||||
const queryLines: string[] = []
|
const queryLines: string[] = []
|
||||||
@ -188,6 +216,11 @@ export class PostgreSQLDialect extends SQLDialect {
|
|||||||
if ( !Array.isArray(data) ) {
|
if ( !Array.isArray(data) ) {
|
||||||
data = [data]
|
data = [data]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if ( data.length < 1 ) {
|
||||||
|
return ''
|
||||||
|
}
|
||||||
|
|
||||||
const columns = Object.keys(data[0])
|
const columns = Object.keys(data[0])
|
||||||
|
|
||||||
// Add table source
|
// Add table source
|
||||||
@ -227,6 +260,11 @@ export class PostgreSQLDialect extends SQLDialect {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public renderDelete(builder: AbstractBuilder<any>): string {
|
public renderDelete(builder: AbstractBuilder<any>): string {
|
||||||
|
const rawSql = builder.appliedRawSql
|
||||||
|
if ( rawSql ) {
|
||||||
|
return rawSql
|
||||||
|
}
|
||||||
|
|
||||||
const indent = (item: string, level = 1) => Array(level + 1).fill('')
|
const indent = (item: string, level = 1) => Array(level + 1).fill('')
|
||||||
.join(' ') + item
|
.join(' ') + item
|
||||||
const queryLines: string[] = []
|
const queryLines: string[] = []
|
||||||
@ -270,6 +308,11 @@ export class PostgreSQLDialect extends SQLDialect {
|
|||||||
if ( isConstraintGroup(constraint) ) {
|
if ( isConstraintGroup(constraint) ) {
|
||||||
statements.push(`${indent}${statements.length < 1 ? '' : constraint.preop + ' '}(\n${constraintsToSql(constraint.items, level + 1)}\n${indent})`)
|
statements.push(`${indent}${statements.length < 1 ? '' : constraint.preop + ' '}(\n${constraintsToSql(constraint.items, level + 1)}\n${indent})`)
|
||||||
} else if ( isConstraintItem(constraint) ) {
|
} else if ( isConstraintItem(constraint) ) {
|
||||||
|
if ( Array.isArray(constraint.operand) && !constraint.operand.length ) {
|
||||||
|
statements.push(`${indent}1 = 0 -- ${constraint.field} ${constraint.operator} empty set`)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
const field: string = constraint.field.split('.').map(x => `"${x}"`)
|
const field: string = constraint.field.split('.').map(x => `"${x}"`)
|
||||||
.join('.')
|
.join('.')
|
||||||
statements.push(`${indent}${statements.length < 1 ? '' : constraint.preop + ' '}${field} ${constraint.operator} ${this.escape(constraint.operand).value}`)
|
statements.push(`${indent}${statements.length < 1 ? '' : constraint.preop + ' '}${field} ${constraint.operator} ${this.escape(constraint.operand).value}`)
|
||||||
@ -294,4 +337,247 @@ export class PostgreSQLDialect extends SQLDialect {
|
|||||||
|
|
||||||
return ['SET', ...sets].join('\n')
|
return ['SET', ...sets].join('\n')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public renderCreateTable(builder: TableBuilder): string {
|
||||||
|
const cols = this.renderTableColumns(builder).map(x => ` ${x}`)
|
||||||
|
|
||||||
|
const builderConstraints = builder.getConstraints()
|
||||||
|
const constraints: string[] = []
|
||||||
|
for ( const constraintName in builderConstraints ) {
|
||||||
|
if ( !Object.prototype.hasOwnProperty.call(builderConstraints, constraintName) ) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const constraintBuilder = builderConstraints[constraintName]
|
||||||
|
const constraintDefinition = this.renderConstraintDefinition(constraintBuilder)
|
||||||
|
if ( constraintDefinition ) {
|
||||||
|
constraints.push(` CONSTRAINT ${constraintDefinition}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const parts = [
|
||||||
|
`CREATE TABLE ${builder.isSkippedIfExisting() ? 'IF NOT EXISTS ' : ''}${builder.name} (`,
|
||||||
|
[
|
||||||
|
...cols,
|
||||||
|
...constraints,
|
||||||
|
].join(',\n'),
|
||||||
|
`)`,
|
||||||
|
]
|
||||||
|
|
||||||
|
return parts.join('\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
public renderTableColumns(builder: TableBuilder): string[] {
|
||||||
|
const defined = builder.getColumns()
|
||||||
|
const rendered: string[] = []
|
||||||
|
|
||||||
|
for ( const columnName in defined ) {
|
||||||
|
if ( !Object.prototype.hasOwnProperty.call(defined, columnName) ) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const columnBuilder = defined[columnName]
|
||||||
|
rendered.push(this.renderColumnDefinition(columnBuilder))
|
||||||
|
}
|
||||||
|
|
||||||
|
return rendered
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given a constraint schema-builder, render the constraint definition.
|
||||||
|
* @param builder
|
||||||
|
* @protected
|
||||||
|
*/
|
||||||
|
protected renderConstraintDefinition(builder: ConstraintBuilder): Maybe<string> {
|
||||||
|
const constraintType = builder.getType()
|
||||||
|
if ( constraintType === ConstraintType.Unique ) {
|
||||||
|
const fields = builder.getFields()
|
||||||
|
.map(x => `"${x}"`)
|
||||||
|
.join(',')
|
||||||
|
|
||||||
|
return `${builder.name} UNIQUE(${fields})`
|
||||||
|
} else if ( constraintType === ConstraintType.Check ) {
|
||||||
|
const expression = builder.getExpression()
|
||||||
|
if ( !expression ) {
|
||||||
|
throw new ErrorWithContext('Cannot create check constraint without expression.', {
|
||||||
|
constraintName: builder.name,
|
||||||
|
tableName: builder.parent.name,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return `${builder.name} CHECK(${expression})`
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given a column-builder, render the SQL-definition as used in
|
||||||
|
* CREATE TABLE and ALTER TABLE statements.
|
||||||
|
* @fixme Type `serial` only exists on CREATE TABLE... queries
|
||||||
|
* @param builder
|
||||||
|
* @protected
|
||||||
|
*/
|
||||||
|
protected renderColumnDefinition(builder: ColumnBuilder): string {
|
||||||
|
const type = builder.getType()
|
||||||
|
if ( !type ) {
|
||||||
|
throw new ErrorWithContext(`Missing field type for column: ${builder.name}`, {
|
||||||
|
columnName: builder.name,
|
||||||
|
columnType: type,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
let render = `"${builder.name}" ${inverseFieldType(type)}`
|
||||||
|
|
||||||
|
if ( builder.getLength() ) {
|
||||||
|
render += `(${builder.getLength()})`
|
||||||
|
}
|
||||||
|
|
||||||
|
const defaultValue = builder.getDefaultValue()
|
||||||
|
if ( typeof defaultValue !== 'undefined' ) {
|
||||||
|
render += ` DEFAULT ${this.escape(defaultValue)}`
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( builder.isPrimary() ) {
|
||||||
|
render += ` CONSTRAINT ${builder.name}_pk PRIMARY KEY`
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( builder.isUnique() ) {
|
||||||
|
render += ` UNIQUE`
|
||||||
|
}
|
||||||
|
|
||||||
|
render += ` ${builder.isNullable() ? 'NULL' : 'NOT NULL'}`
|
||||||
|
return render
|
||||||
|
}
|
||||||
|
|
||||||
|
public renderDropTable(builder: TableBuilder): string {
|
||||||
|
return `DROP TABLE ${builder.isSkippedIfExisting() ? 'IF EXISTS ' : ''}${builder.name}`
|
||||||
|
}
|
||||||
|
|
||||||
|
public renderCreateIndex(builder: IndexBuilder): string {
|
||||||
|
const cols = builder.getFields().map(x => `"${x}"`)
|
||||||
|
const parts = [
|
||||||
|
`CREATE ${builder.isUnique() ? 'UNIQUE ' : ''}INDEX ${builder.isSkippedIfExisting() ? 'IF NOT EXISTS ' : ''}${builder.name}`,
|
||||||
|
` ON ${builder.parent.name}`,
|
||||||
|
` (${cols.join(',')})`,
|
||||||
|
]
|
||||||
|
|
||||||
|
return parts.join('\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
public renderAlterTable(builder: TableBuilder): string {
|
||||||
|
const alters: string[] = []
|
||||||
|
const columns = builder.getColumns()
|
||||||
|
|
||||||
|
for ( const columnName in columns ) {
|
||||||
|
if ( !Object.prototype.hasOwnProperty.call(columns, columnName) ) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const columnBuilder = columns[columnName]
|
||||||
|
if ( !columnBuilder.isExisting() ) {
|
||||||
|
// The column doesn't exist on the table, but was added to the schema
|
||||||
|
alters.push(` ADD COLUMN ${this.renderColumnDefinition(columnBuilder)}`)
|
||||||
|
} else if ( columnBuilder.isDirty() && columnBuilder.originalFromSchema ) {
|
||||||
|
// The column exists in the table, but was modified in the schema
|
||||||
|
if ( columnBuilder.isDropping() || columnBuilder.isDroppingIfExists() ) {
|
||||||
|
alters.push(` DROP COLUMN "${columnBuilder.name}"`)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Change the data type of the column
|
||||||
|
if ( columnBuilder.getType() !== columnBuilder.originalFromSchema.getType() ) {
|
||||||
|
const renderedType = `${columnBuilder.getType()}${columnBuilder.getLength() ? `(${columnBuilder.getLength()})` : ''}`
|
||||||
|
alters.push(` ALTER COLUMN "${columnBuilder.name}" TYPE ${renderedType}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Change the default value of the column
|
||||||
|
if ( columnBuilder.getDefaultValue() !== columnBuilder.originalFromSchema.getDefaultValue() ) {
|
||||||
|
alters.push(` ALTER COLUMN "${columnBuilder.name}" SET default ${this.escape(columnBuilder.getDefaultValue())}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Change the nullable-status of the column
|
||||||
|
if ( columnBuilder.isNullable() !== columnBuilder.originalFromSchema.isNullable() ) {
|
||||||
|
if ( columnBuilder.isNullable() ) {
|
||||||
|
alters.push(` ALTER COLUMN "${columnBuilder.name}" DROP NOT NULL`)
|
||||||
|
} else {
|
||||||
|
alters.push(` ALTER COLUMN "${columnBuilder.name}" SET NOT NULL`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Change the name of the column
|
||||||
|
if ( columnBuilder.getRename() ) {
|
||||||
|
alters.push(` RENAME COLUMN "${columnBuilder.name}" TO "${columnBuilder.getRename()}"`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const constraints = builder.getConstraints()
|
||||||
|
for ( const constraintName in constraints ) {
|
||||||
|
if ( !Object.prototype.hasOwnProperty.call(constraints, constraintName) ) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const constraintBuilder = constraints[constraintName]
|
||||||
|
|
||||||
|
// Drop the constraint if specified
|
||||||
|
if ( constraintBuilder.isDropping() ) {
|
||||||
|
alters.push(` DROP CONSTRAINT ${constraintBuilder.name}`)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Drop the constraint with IF EXISTS if specified
|
||||||
|
if ( constraintBuilder.isDroppingIfExists() ) {
|
||||||
|
alters.push(` DROP CONSTRAINT IF EXISTS ${constraintBuilder.name}`)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise, drop and recreate the constraint if it was modified
|
||||||
|
if ( constraintBuilder.isDirty() ) {
|
||||||
|
if ( constraintBuilder.isExisting() ) {
|
||||||
|
alters.push(` DROP CONSTRAINT IF EXISTS ${constraintBuilder.name}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const constraintDefinition = this.renderConstraintDefinition(constraintBuilder)
|
||||||
|
if ( constraintDefinition ) {
|
||||||
|
alters.push(` ADD CONSTRAINT ${constraintDefinition}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( builder.getRename() ) {
|
||||||
|
alters.push(` RENAME TO "${builder.getRename()}"`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return 'ALTER TABLE ' + builder.name + '\n' + alters.join(',\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
public renderDropIndex(builder: IndexBuilder): string {
|
||||||
|
return `DROP INDEX ${builder.isDroppingIfExists() ? 'IF EXISTS ' : ''}${builder.name}`
|
||||||
|
}
|
||||||
|
|
||||||
|
public renderTransaction(queries: string[]): string {
|
||||||
|
const parts = [
|
||||||
|
'BEGIN',
|
||||||
|
...queries,
|
||||||
|
'COMMIT',
|
||||||
|
]
|
||||||
|
|
||||||
|
return parts.join(';\n\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
public renderRenameIndex(builder: IndexBuilder): string {
|
||||||
|
return `ALTER INDEX ${builder.name} RENAME TO ${builder.getRename()}`
|
||||||
|
}
|
||||||
|
|
||||||
|
public renderRecreateIndex(builder: IndexBuilder): string {
|
||||||
|
return `${this.renderDropIndex(builder)};\n\n${this.renderCreateIndex(builder)}`
|
||||||
|
}
|
||||||
|
|
||||||
|
public renderDropColumn(builder: ColumnBuilder): string {
|
||||||
|
const parts = [
|
||||||
|
`ALTER TABLE ${builder.parent.name} ${builder.parent.isSkippedIfExisting() ? 'IF EXISTS ' : ''}`,
|
||||||
|
` DROP COLUMN ${builder.isSkippedIfExisting() ? 'IF EXISTS ' : ''}${builder.name}`,
|
||||||
|
]
|
||||||
|
|
||||||
|
return parts.join('\n')
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
import {Constraint} from '../types'
|
import {Constraint} from '../types'
|
||||||
import {AbstractBuilder} from '../builder/AbstractBuilder'
|
import {AbstractBuilder} from '../builder/AbstractBuilder'
|
||||||
import {AppClass} from '../../lifecycle/AppClass'
|
import {AppClass} from '../../lifecycle/AppClass'
|
||||||
|
import {ColumnBuilder, IndexBuilder, TableBuilder} from '../schema/TableBuilder'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A value which can be escaped to be interpolated into an SQL query.
|
* A value which can be escaped to be interpolated into an SQL query.
|
||||||
@ -160,10 +161,141 @@ export abstract class SQLDialect extends AppClass {
|
|||||||
* This function should escape the values before they are included in the query string.
|
* This function should escape the values before they are included in the query string.
|
||||||
*
|
*
|
||||||
* @example
|
* @example
|
||||||
|
* ```ts
|
||||||
* dialect.renderUpdateSet({field1: 'value', field2: 45})
|
* dialect.renderUpdateSet({field1: 'value', field2: 45})
|
||||||
* // => "SET field1 = 'value', field2 = 45"
|
* // => "SET field1 = 'value', field2 = 45"
|
||||||
|
* ```
|
||||||
*
|
*
|
||||||
* @param data
|
* @param data
|
||||||
*/
|
*/
|
||||||
public abstract renderUpdateSet(data: {[key: string]: EscapeValue}): string;
|
public abstract renderUpdateSet(data: {[key: string]: EscapeValue}): string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given a table schema-builder, render a `CREATE TABLE...` query.
|
||||||
|
* @param builder
|
||||||
|
*/
|
||||||
|
public abstract renderCreateTable(builder: TableBuilder): string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given a table schema-builder, render an `ALTER TABLE...` query.
|
||||||
|
* @param builder
|
||||||
|
*/
|
||||||
|
public abstract renderAlterTable(builder: TableBuilder): string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given a table schema-builder, render a `DROP TABLE...` query.
|
||||||
|
* @param builder
|
||||||
|
*/
|
||||||
|
public abstract renderDropTable(builder: TableBuilder): string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Render the table-column definitions for the table defined by
|
||||||
|
* the given schema-builder.
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* ```ts
|
||||||
|
* dialect.renderTableColumns(builder)
|
||||||
|
* // => ['col1 varchar(100) NULL', 'col2 serial NOT NULL']
|
||||||
|
* ```
|
||||||
|
*
|
||||||
|
* @param builder
|
||||||
|
*/
|
||||||
|
public abstract renderTableColumns(builder: TableBuilder): string[];
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given an index schema-builder, render a `CREATE INDEX...` query.
|
||||||
|
* @param builder
|
||||||
|
*/
|
||||||
|
public abstract renderCreateIndex(builder: IndexBuilder): string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given a column schema-builder, render an `ALTER TABLE... DROP COLUMN...` query.
|
||||||
|
* @param builder
|
||||||
|
*/
|
||||||
|
public abstract renderDropColumn(builder: ColumnBuilder): string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given an index schema-builder, render a `DROP INDEX...` query.
|
||||||
|
* @param builder
|
||||||
|
*/
|
||||||
|
public abstract renderDropIndex(builder: IndexBuilder): string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given an index schema-builder, render an `ALTER INDEX... RENAME...` query.
|
||||||
|
* @param builder
|
||||||
|
*/
|
||||||
|
public abstract renderRenameIndex(builder: IndexBuilder): string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given an index schema-builder, render either an `ALTER INDEX...` query,
|
||||||
|
* or a `DROP INDEX...; CREATE INDEX...` query.
|
||||||
|
* @param builder
|
||||||
|
*/
|
||||||
|
public abstract renderRecreateIndex(builder: IndexBuilder): string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given a series of fully-formed queries, render them as a single transaction.
|
||||||
|
* @example
|
||||||
|
* ```ts
|
||||||
|
* const queries = [
|
||||||
|
* 'SELECT * FROM a',
|
||||||
|
* 'UPDATE b SET col = 123',
|
||||||
|
* ]
|
||||||
|
*
|
||||||
|
* dialect.renderTransaction(queries)
|
||||||
|
* // => 'BEGIN; SELECT * FROM a; UPDATE b SET col = 123; COMMIT;'
|
||||||
|
* ```
|
||||||
|
* @param queries
|
||||||
|
*/
|
||||||
|
public abstract renderTransaction(queries: string[]): string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given a table schema-builder, render a series of queries as a transaction
|
||||||
|
* that apply the given schema to database.
|
||||||
|
* @todo handle constraints better - ConstraintBuilder
|
||||||
|
* @param builder
|
||||||
|
*/
|
||||||
|
public renderCommitSchemaTransaction(builder: TableBuilder): string {
|
||||||
|
if ( builder.isDropping() || builder.isDroppingIfExists() ) {
|
||||||
|
// If we're dropping the table, just return the DROP TABLE query
|
||||||
|
return this.renderTransaction([
|
||||||
|
this.renderDropTable(builder),
|
||||||
|
])
|
||||||
|
}
|
||||||
|
|
||||||
|
// Render the queries to create/update/drop indexes
|
||||||
|
const indexes = Object.values(builder.getIndexes())
|
||||||
|
.filter(index => !index.isExisting() || index.isDirty())
|
||||||
|
.map(index => {
|
||||||
|
if ( index.isDropping() || index.isDroppingIfExists() ) {
|
||||||
|
return this.renderDropIndex(index)
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( index.isExisting() ) {
|
||||||
|
// The index was changed in the schema, but exists in the DB
|
||||||
|
return this.renderRecreateIndex(index)
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.renderCreateIndex(index)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Render the queries to rename indexes AFTER the above operations
|
||||||
|
const renamedIndexes = Object.values(builder.getIndexes())
|
||||||
|
.filter(idx => idx.getRename())
|
||||||
|
.map(x => this.renderRenameIndex(x))
|
||||||
|
|
||||||
|
let parts: string[] = []
|
||||||
|
|
||||||
|
// Render the CREATE/ALTER TABLE query
|
||||||
|
if ( !builder.isExisting() && builder.isDirty() ) {
|
||||||
|
parts.push(this.renderCreateTable(builder))
|
||||||
|
} else if ( builder.isExisting() && builder.isDirty() ) {
|
||||||
|
parts.push(this.renderAlterTable(builder))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Render the various schema queries as a single transaction
|
||||||
|
parts = parts.concat(...indexes)
|
||||||
|
parts = parts.concat(...renamedIndexes)
|
||||||
|
return this.renderTransaction(parts)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
48
src/orm/directive/CreateMigrationDirective.ts
Normal file
48
src/orm/directive/CreateMigrationDirective.ts
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
import {Directive, OptionDefinition} from '../../cli'
|
||||||
|
import {Injectable} from '../../di'
|
||||||
|
import {stringToPascal} from '../../util'
|
||||||
|
import {templateMigration} from '../template/migration'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* CLI directive that creates migration classes from template.
|
||||||
|
*/
|
||||||
|
@Injectable()
|
||||||
|
export class CreateMigrationDirective extends Directive {
|
||||||
|
getDescription(): string {
|
||||||
|
return 'create a new migration'
|
||||||
|
}
|
||||||
|
|
||||||
|
getKeywords(): string | string[] {
|
||||||
|
return ['create-migration', 'make-migration']
|
||||||
|
}
|
||||||
|
|
||||||
|
getOptions(): OptionDefinition[] {
|
||||||
|
return [
|
||||||
|
'{description} | Description of what the migration does',
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
getHelpText(): string {
|
||||||
|
return [
|
||||||
|
'Creates a new migration file in `src/app/migrations`.',
|
||||||
|
'To use, specify a string describing what the migration does. For example:',
|
||||||
|
'./ex create-migration "Add version column to sessions table"',
|
||||||
|
].join('\n\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
async handle(): Promise<void> {
|
||||||
|
const description = this.option('description')
|
||||||
|
const className = `${stringToPascal(description)}Migration`
|
||||||
|
const fileName = `${(new Date()).toISOString()}_${className}.migration.ts`
|
||||||
|
const path = this.app().path('..', 'src', 'app', 'migrations', fileName)
|
||||||
|
|
||||||
|
// Create the migrations directory, if it doesn't already exist
|
||||||
|
await path.concat('..').mkdir()
|
||||||
|
|
||||||
|
// Render the template
|
||||||
|
const rendered = await templateMigration.render(className, className, path)
|
||||||
|
await path.write(rendered)
|
||||||
|
|
||||||
|
this.success(`Created migration: ${className}`)
|
||||||
|
}
|
||||||
|
}
|
117
src/orm/directive/MigrateDirective.ts
Normal file
117
src/orm/directive/MigrateDirective.ts
Normal file
@ -0,0 +1,117 @@
|
|||||||
|
import {Directive, OptionDefinition} from '../../cli'
|
||||||
|
import {Container, Inject, Injectable} from '../../di'
|
||||||
|
import {EventBus} from '../../event/EventBus'
|
||||||
|
import {Migrator} from '../migrations/Migrator'
|
||||||
|
import {Migrations} from '../services/Migrations'
|
||||||
|
import {ApplyingMigrationEvent} from '../migrations/events/ApplyingMigrationEvent'
|
||||||
|
import {AppliedMigrationEvent} from '../migrations/events/AppliedMigrationEvent'
|
||||||
|
import {EventSubscription} from '../../event/types'
|
||||||
|
import {NothingToMigrateError} from '../migrations/NothingToMigrateError'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* CLI directive that applies migrations using the default Migrator.
|
||||||
|
* @fixme Support dry run mode
|
||||||
|
*/
|
||||||
|
@Injectable()
|
||||||
|
export class MigrateDirective extends Directive {
|
||||||
|
@Inject()
|
||||||
|
protected readonly bus!: EventBus
|
||||||
|
|
||||||
|
@Inject('injector')
|
||||||
|
protected readonly injector!: Container
|
||||||
|
|
||||||
|
/** Event bus subscriptions. */
|
||||||
|
protected subscriptions: EventSubscription[] = []
|
||||||
|
|
||||||
|
getKeywords(): string | string[] {
|
||||||
|
return ['migrate']
|
||||||
|
}
|
||||||
|
|
||||||
|
getDescription(): string {
|
||||||
|
return 'apply pending migrations'
|
||||||
|
}
|
||||||
|
|
||||||
|
getOptions(): OptionDefinition[] {
|
||||||
|
return [
|
||||||
|
'--package -p {name} | apply migrations for a specific namespace',
|
||||||
|
'--identifier -i {name} | apply a specific migration, by identifier',
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
getHelpText(): string {
|
||||||
|
return [
|
||||||
|
'Migrations are single-run code patches used to track changes to things like database schemata.',
|
||||||
|
'',
|
||||||
|
'You can create migrations in your app using the ./ex command and they can be applied and rolled-back.',
|
||||||
|
'',
|
||||||
|
'./ex migrate:create "Add version column to sessions table"',
|
||||||
|
'',
|
||||||
|
'Modules and packages can also register their own migrations. These are run by default.',
|
||||||
|
'',
|
||||||
|
'To run the migrations for a specific package, and no others, use the --package option. Example:',
|
||||||
|
'',
|
||||||
|
'./ex migrate --package @extollo',
|
||||||
|
'',
|
||||||
|
].join('\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
async handle(): Promise<void> {
|
||||||
|
await this.registerListeners()
|
||||||
|
|
||||||
|
const namespace = this.option('package')
|
||||||
|
const identifier = this.option('identifier')
|
||||||
|
|
||||||
|
let identifiers
|
||||||
|
if ( namespace ) {
|
||||||
|
identifiers = (this.injector.make<Migrations>(Migrations))
|
||||||
|
.all(namespace)
|
||||||
|
.map(id => `${namespace}:${id}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( identifier ) {
|
||||||
|
if ( !identifiers ) {
|
||||||
|
identifiers = [identifier]
|
||||||
|
}
|
||||||
|
|
||||||
|
identifiers = identifiers.filter(x => x === identifier)
|
||||||
|
}
|
||||||
|
|
||||||
|
let error
|
||||||
|
try {
|
||||||
|
await (this.injector.make<Migrator>(Migrator)).migrate(identifiers)
|
||||||
|
} catch (e) {
|
||||||
|
if ( e instanceof NothingToMigrateError ) {
|
||||||
|
this.info(e.message)
|
||||||
|
} else {
|
||||||
|
error = e
|
||||||
|
this.error(e)
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
await this.removeListeners()
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( error ) {
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Register event bus listeners to print messages for the user.
|
||||||
|
* @protected
|
||||||
|
*/
|
||||||
|
protected async registerListeners(): Promise<void> {
|
||||||
|
this.subscriptions.push(await this.bus.subscribe(ApplyingMigrationEvent, event => {
|
||||||
|
this.info(`Applying migration ${event.migration.identifier}...`)
|
||||||
|
}))
|
||||||
|
|
||||||
|
this.subscriptions.push(await this.bus.subscribe(AppliedMigrationEvent, event => {
|
||||||
|
this.success(`Applied migration: ${event.migration.identifier}`)
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Remove event bus listeners before finish. */
|
||||||
|
protected async removeListeners(): Promise<void> {
|
||||||
|
await Promise.all(this.subscriptions.map(x => x.unsubscribe()))
|
||||||
|
this.subscriptions = []
|
||||||
|
}
|
||||||
|
}
|
102
src/orm/directive/RollbackDirective.ts
Normal file
102
src/orm/directive/RollbackDirective.ts
Normal file
@ -0,0 +1,102 @@
|
|||||||
|
import {Directive, OptionDefinition} from '../../cli'
|
||||||
|
import {Container, Inject, Injectable} from '../../di'
|
||||||
|
import {EventBus} from '../../event/EventBus'
|
||||||
|
import {Migrator} from '../migrations/Migrator'
|
||||||
|
import {Migrations} from '../services/Migrations'
|
||||||
|
import {RollingBackMigrationEvent} from '../migrations/events/RollingBackMigrationEvent'
|
||||||
|
import {RolledBackMigrationEvent} from '../migrations/events/RolledBackMigrationEvent'
|
||||||
|
import {EventSubscription} from '../../event/types'
|
||||||
|
import {NothingToMigrateError} from '../migrations/NothingToMigrateError'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* CLI directive that undoes applied migrations using the default Migrator.
|
||||||
|
* @fixme Support dry run mode
|
||||||
|
*/
|
||||||
|
@Injectable()
|
||||||
|
export class RollbackDirective extends Directive {
|
||||||
|
@Inject()
|
||||||
|
protected readonly bus!: EventBus
|
||||||
|
|
||||||
|
@Inject('injector')
|
||||||
|
protected readonly injector!: Container
|
||||||
|
|
||||||
|
@Inject()
|
||||||
|
protected readonly migrations!: Migrations
|
||||||
|
|
||||||
|
/** Event bus subscriptions. */
|
||||||
|
protected subscriptions: EventSubscription[] = []
|
||||||
|
|
||||||
|
getKeywords(): string | string[] {
|
||||||
|
return ['rollback']
|
||||||
|
}
|
||||||
|
|
||||||
|
getDescription(): string {
|
||||||
|
return 'roll-back applied migrations'
|
||||||
|
}
|
||||||
|
|
||||||
|
getOptions(): OptionDefinition[] {
|
||||||
|
return [
|
||||||
|
'--identifier -i {name} | roll-back a specific migration, by identifier',
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
getHelpText(): string {
|
||||||
|
return [
|
||||||
|
'Use this command to undo one or more migrations that were applied.',
|
||||||
|
'',
|
||||||
|
'By default, the command will undo all of the migrations applied the last time the migrate command was run.',
|
||||||
|
'',
|
||||||
|
'To undo a specific migration, pass its identifier using the --identifier option.',
|
||||||
|
'',
|
||||||
|
].join('\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
async handle(): Promise<void> {
|
||||||
|
await this.registerListeners()
|
||||||
|
|
||||||
|
const identifier = this.option('identifier')
|
||||||
|
|
||||||
|
let identifiers
|
||||||
|
if ( identifier ) {
|
||||||
|
identifiers = [identifier]
|
||||||
|
}
|
||||||
|
|
||||||
|
let error
|
||||||
|
try {
|
||||||
|
await (this.injector.make<Migrator>(Migrator)).rollback(identifiers)
|
||||||
|
} catch (e) {
|
||||||
|
if ( e instanceof NothingToMigrateError ) {
|
||||||
|
this.info(e.message)
|
||||||
|
} else {
|
||||||
|
error = e
|
||||||
|
this.error(e)
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
await this.removeListeners()
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( error ) {
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Register event-bus listeners to print messages for the user.
|
||||||
|
* @protected
|
||||||
|
*/
|
||||||
|
protected async registerListeners(): Promise<void> {
|
||||||
|
this.subscriptions.push(await this.bus.subscribe(RollingBackMigrationEvent, event => {
|
||||||
|
this.info(`Rolling-back migration ${event.migration.identifier}...`)
|
||||||
|
}))
|
||||||
|
|
||||||
|
this.subscriptions.push(await this.bus.subscribe(RolledBackMigrationEvent, event => {
|
||||||
|
this.success(`Rolled-back migration: ${event.migration.identifier}`)
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Remove event bus listeners before finish. */
|
||||||
|
protected async removeListeners(): Promise<void> {
|
||||||
|
await Promise.all(this.subscriptions.map(x => x.unsubscribe()))
|
||||||
|
this.subscriptions = []
|
||||||
|
}
|
||||||
|
}
|
@ -18,9 +18,6 @@ export * from './model/ModelResultIterable'
|
|||||||
export * from './model/events'
|
export * from './model/events'
|
||||||
export * from './model/Model'
|
export * from './model/Model'
|
||||||
|
|
||||||
export * from './services/Database'
|
|
||||||
export * from './services/Models'
|
|
||||||
|
|
||||||
export * from './support/SessionModel'
|
export * from './support/SessionModel'
|
||||||
export * from './support/ORMSession'
|
export * from './support/ORMSession'
|
||||||
export * from './support/CacheModel'
|
export * from './support/CacheModel'
|
||||||
@ -28,3 +25,25 @@ export * from './support/ORMCache'
|
|||||||
|
|
||||||
export * from './DatabaseService'
|
export * from './DatabaseService'
|
||||||
export * from './types'
|
export * from './types'
|
||||||
|
|
||||||
|
export * from './schema/TableBuilder'
|
||||||
|
export * from './schema/Schema'
|
||||||
|
export * from './schema/PostgresSchema'
|
||||||
|
|
||||||
|
export * from './migrations/NothingToMigrateError'
|
||||||
|
export * from './migrations/events/ApplyingMigrationEvent'
|
||||||
|
export * from './migrations/events/AppliedMigrationEvent'
|
||||||
|
export * from './migrations/events/RollingBackMigrationEvent'
|
||||||
|
export * from './migrations/events/RolledBackMigrationEvent'
|
||||||
|
export * from './migrations/Migration'
|
||||||
|
export * from './migrations/Migrator'
|
||||||
|
export * from './migrations/MigratorFactory'
|
||||||
|
export * from './migrations/DatabaseMigrator'
|
||||||
|
|
||||||
|
export * from './services/Database'
|
||||||
|
export * from './services/Models'
|
||||||
|
export * from './services/Migrations'
|
||||||
|
|
||||||
|
export * from './directive/CreateMigrationDirective'
|
||||||
|
export * from './directive/MigrateDirective'
|
||||||
|
export * from './directive/RollbackDirective'
|
||||||
|
179
src/orm/migrations/DatabaseMigrator.ts
Normal file
179
src/orm/migrations/DatabaseMigrator.ts
Normal file
@ -0,0 +1,179 @@
|
|||||||
|
import {Container, Inject, Injectable} from '../../di'
|
||||||
|
import {Migrator} from './Migrator'
|
||||||
|
import {DatabaseService} from '../DatabaseService'
|
||||||
|
import {FieldType} from '../types'
|
||||||
|
import {Migration} from './Migration'
|
||||||
|
import {Builder} from '../builder/Builder'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Migrator implementation that tracks applied migrations in a database table.
|
||||||
|
* @todo allow configuring more of this
|
||||||
|
*/
|
||||||
|
@Injectable()
|
||||||
|
export class DatabaseMigrator extends Migrator {
|
||||||
|
@Inject()
|
||||||
|
protected readonly db!: DatabaseService
|
||||||
|
|
||||||
|
@Inject('injector')
|
||||||
|
protected readonly injector!: Container
|
||||||
|
|
||||||
|
/** True if we've initialized the migrator. */
|
||||||
|
protected initialized = false
|
||||||
|
|
||||||
|
public async initialize(): Promise<void> {
|
||||||
|
await super.initialize()
|
||||||
|
|
||||||
|
if ( this.initialized ) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const schema = this.db.get().schema()
|
||||||
|
if ( !(await schema.hasTable('migrations')) ) {
|
||||||
|
const table = await schema.table('migrations')
|
||||||
|
|
||||||
|
table.primaryKey('id', FieldType.serial).required()
|
||||||
|
|
||||||
|
table.column('identifier')
|
||||||
|
.type(FieldType.varchar)
|
||||||
|
.required()
|
||||||
|
|
||||||
|
table.column('applygroup')
|
||||||
|
.type(FieldType.integer)
|
||||||
|
.required()
|
||||||
|
|
||||||
|
table.column('applydate')
|
||||||
|
.type(FieldType.timestamp)
|
||||||
|
.required()
|
||||||
|
|
||||||
|
await schema.commit(table)
|
||||||
|
}
|
||||||
|
|
||||||
|
this.initialized = true
|
||||||
|
}
|
||||||
|
|
||||||
|
async has(migration: Migration): Promise<boolean> {
|
||||||
|
return this.builder()
|
||||||
|
.connection('default')
|
||||||
|
.select('id')
|
||||||
|
.from('migrations')
|
||||||
|
.where('identifier', '=', migration.identifier)
|
||||||
|
.exists()
|
||||||
|
}
|
||||||
|
|
||||||
|
async markApplied(migrations: Migration | Migration[], applyDate: Date = new Date()): Promise<void> {
|
||||||
|
if ( !Array.isArray(migrations) ) {
|
||||||
|
migrations = [migrations]
|
||||||
|
}
|
||||||
|
|
||||||
|
const applyGroup = await this.getNextGroupIdentifier()
|
||||||
|
const rows = migrations.map(migration => {
|
||||||
|
return {
|
||||||
|
applygroup: applyGroup,
|
||||||
|
applydate: applyDate,
|
||||||
|
identifier: migration.identifier,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
await this.builder()
|
||||||
|
.connection('default')
|
||||||
|
.table('migrations')
|
||||||
|
.insert(rows)
|
||||||
|
}
|
||||||
|
|
||||||
|
async unmarkApplied(migrations: Migration | Migration[]): Promise<void> {
|
||||||
|
if ( !Array.isArray(migrations) ) {
|
||||||
|
migrations = [migrations]
|
||||||
|
}
|
||||||
|
|
||||||
|
const identifiers = migrations.map(migration => migration.identifier)
|
||||||
|
|
||||||
|
await this.builder()
|
||||||
|
.connection('default')
|
||||||
|
.table('migrations')
|
||||||
|
.whereIn('identifier', identifiers)
|
||||||
|
.delete()
|
||||||
|
}
|
||||||
|
|
||||||
|
async getLastApplyGroup(): Promise<string[]> {
|
||||||
|
const applyGroup = await this.builder()
|
||||||
|
.connection('default')
|
||||||
|
.select('applygroup')
|
||||||
|
.from('migrations')
|
||||||
|
.get()
|
||||||
|
.max<number>('applygroup')
|
||||||
|
|
||||||
|
return this.builder()
|
||||||
|
.connection('default')
|
||||||
|
.select('identifier')
|
||||||
|
.from('migrations')
|
||||||
|
.where('applygroup', '=', applyGroup)
|
||||||
|
.get()
|
||||||
|
.asyncPipe()
|
||||||
|
.tap(coll => {
|
||||||
|
return coll.pluck<string>('identifier')
|
||||||
|
})
|
||||||
|
.tap(coll => {
|
||||||
|
return coll.all()
|
||||||
|
})
|
||||||
|
.resolve()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper method to look up the next `applygroup` that should be used.
|
||||||
|
* @protected
|
||||||
|
*/
|
||||||
|
protected async getNextGroupIdentifier(): Promise<number> {
|
||||||
|
const current = await this.builder()
|
||||||
|
.connection('default')
|
||||||
|
.select('applygroup')
|
||||||
|
.from('migrations')
|
||||||
|
.get()
|
||||||
|
.max<number>('applygroup')
|
||||||
|
|
||||||
|
return (current ?? 0) + 1
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given a list of migration identifiers, filter out those that have been applied.
|
||||||
|
* @override to make this more efficient
|
||||||
|
* @param identifiers
|
||||||
|
* @protected
|
||||||
|
*/
|
||||||
|
protected async filterAppliedMigrations(identifiers: string[]): Promise<string[]> {
|
||||||
|
const existing = await this.builder()
|
||||||
|
.connection('default')
|
||||||
|
.select('identifier')
|
||||||
|
.from('migrations')
|
||||||
|
.whereIn('identifier', identifiers)
|
||||||
|
.get()
|
||||||
|
.pluck<string>('identifier')
|
||||||
|
|
||||||
|
return identifiers.filter(id => !existing.includes(id))
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given a list of migration identifiers, filter out those that have not been applied.
|
||||||
|
* @override to make this more efficient
|
||||||
|
* @param identifiers
|
||||||
|
* @protected
|
||||||
|
*/
|
||||||
|
protected async filterPendingMigrations(identifiers: string[]): Promise<string[]> {
|
||||||
|
const existing = await this.builder()
|
||||||
|
.connection('default')
|
||||||
|
.select('identifier')
|
||||||
|
.from('migrations')
|
||||||
|
.whereIn('identifier', identifiers)
|
||||||
|
.get()
|
||||||
|
.pluck<string>('identifier')
|
||||||
|
|
||||||
|
return existing.all()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a query builder instance.
|
||||||
|
* @protected
|
||||||
|
*/
|
||||||
|
protected builder(): Builder {
|
||||||
|
return this.injector.make<Builder>(Builder)
|
||||||
|
}
|
||||||
|
}
|
39
src/orm/migrations/Migration.ts
Normal file
39
src/orm/migrations/Migration.ts
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
import {Injectable} from '../../di'
|
||||||
|
import {Awaitable} from '../../util'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Abstract base-class for one-time migrations.
|
||||||
|
*/
|
||||||
|
@Injectable()
|
||||||
|
export abstract class Migration {
|
||||||
|
/** Set by the Migrations unit on load. */
|
||||||
|
protected migrationIdentifier!: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sets the migration identifier.
|
||||||
|
* This is used internally when the Migrations service loads
|
||||||
|
* the migration files to determine the ID from the file-name.
|
||||||
|
* It shouldn't be used externally.
|
||||||
|
* @param name
|
||||||
|
*/
|
||||||
|
public setMigrationIdentifier(name: string): void {
|
||||||
|
this.migrationIdentifier = name
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the unique identifier of this migration.
|
||||||
|
*/
|
||||||
|
public get identifier(): string {
|
||||||
|
return this.migrationIdentifier
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Apply the migration.
|
||||||
|
*/
|
||||||
|
abstract up(): Awaitable<void>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Undo the migration.
|
||||||
|
*/
|
||||||
|
abstract down(): Awaitable<void>
|
||||||
|
}
|
295
src/orm/migrations/Migrator.ts
Normal file
295
src/orm/migrations/Migrator.ts
Normal file
@ -0,0 +1,295 @@
|
|||||||
|
import {Container, Inject, Injectable} from '../../di'
|
||||||
|
import {Awaitable, collect, ErrorWithContext} from '../../util'
|
||||||
|
import {Migration} from './Migration'
|
||||||
|
import {Migrations} from '../services/Migrations'
|
||||||
|
import {EventBus} from '../../event/EventBus'
|
||||||
|
import {ApplyingMigrationEvent} from './events/ApplyingMigrationEvent'
|
||||||
|
import {AppliedMigrationEvent} from './events/AppliedMigrationEvent'
|
||||||
|
import {RollingBackMigrationEvent} from './events/RollingBackMigrationEvent'
|
||||||
|
import {RolledBackMigrationEvent} from './events/RolledBackMigrationEvent'
|
||||||
|
import {NothingToMigrateError} from './NothingToMigrateError'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Manages single-run patches/migrations.
|
||||||
|
*/
|
||||||
|
@Injectable()
|
||||||
|
export abstract class Migrator {
|
||||||
|
@Inject()
|
||||||
|
protected readonly migrations!: Migrations
|
||||||
|
|
||||||
|
@Inject()
|
||||||
|
protected readonly bus!: EventBus
|
||||||
|
|
||||||
|
@Inject('injector')
|
||||||
|
protected readonly injector!: Container
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Should resolve true if the given migration has already been applied.
|
||||||
|
* @param migration
|
||||||
|
*/
|
||||||
|
public abstract has(migration: Migration): Awaitable<boolean>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Should mark the given migrations as being applied.
|
||||||
|
*
|
||||||
|
* If a date is specified, then that is the timestamp when the migrations
|
||||||
|
* were applied, otherwise, use `new Date()`.
|
||||||
|
*
|
||||||
|
* @param migrations
|
||||||
|
* @param date
|
||||||
|
*/
|
||||||
|
public abstract markApplied(migrations: Migration | Migration[], date?: Date): Awaitable<void>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Should un-mark the given migrations as being applied.
|
||||||
|
* @param migration
|
||||||
|
*/
|
||||||
|
public abstract unmarkApplied(migration: Migration | Migration[]): Awaitable<void>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the identifiers of the last group of migrations that were applied.
|
||||||
|
*/
|
||||||
|
public abstract getLastApplyGroup(): Awaitable<string[]>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Do any initial setup required to get the migrator ready.
|
||||||
|
* This can be overridden by implementation classes to do any necessary setup.
|
||||||
|
*/
|
||||||
|
public initialize(): Awaitable<void> {} // eslint-disable-line @typescript-eslint/no-empty-function
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Apply pending migrations.
|
||||||
|
*
|
||||||
|
* If identifiers are specified, only the pending migrations with those
|
||||||
|
* identifiers are applied. If none are specified, all pending migrations
|
||||||
|
* will be applied.
|
||||||
|
*
|
||||||
|
* @param identifiers
|
||||||
|
*/
|
||||||
|
public async migrate(identifiers?: string[]): Promise<void> {
|
||||||
|
await this.initialize()
|
||||||
|
|
||||||
|
if ( !identifiers ) {
|
||||||
|
identifiers = this.getAllMigrationIdentifiers()
|
||||||
|
}
|
||||||
|
|
||||||
|
identifiers = (await this.filterAppliedMigrations(identifiers)).sort()
|
||||||
|
if ( !identifiers.length ) {
|
||||||
|
throw new NothingToMigrateError()
|
||||||
|
}
|
||||||
|
|
||||||
|
const migrations = collect(identifiers)
|
||||||
|
.map(id => {
|
||||||
|
const migration = this.migrations.get(id)
|
||||||
|
|
||||||
|
if ( !migration ) {
|
||||||
|
throw new ErrorWithContext(`Unable to find migration with identifier: ${id}`, {
|
||||||
|
identifier: id,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return migration
|
||||||
|
})
|
||||||
|
|
||||||
|
await migrations.promiseMap(migration => {
|
||||||
|
return this.apply(migration)
|
||||||
|
})
|
||||||
|
|
||||||
|
await this.markApplied(migrations.all())
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rollback applied migrations.
|
||||||
|
*
|
||||||
|
* If specified, only applied migrations with the given identifiers will
|
||||||
|
* be rolled back. If not specified, then the last "batch" of applied
|
||||||
|
* migrations will be rolled back.
|
||||||
|
*
|
||||||
|
* @param identifiers
|
||||||
|
*/
|
||||||
|
public async rollback(identifiers?: string[]): Promise<void> {
|
||||||
|
await this.initialize()
|
||||||
|
|
||||||
|
if ( !identifiers ) {
|
||||||
|
identifiers = await this.getLastApplyGroup()
|
||||||
|
}
|
||||||
|
|
||||||
|
identifiers = (await this.filterPendingMigrations(identifiers)).sort()
|
||||||
|
if ( !identifiers.length ) {
|
||||||
|
throw new NothingToMigrateError()
|
||||||
|
}
|
||||||
|
|
||||||
|
const migrations = collect(identifiers)
|
||||||
|
.map(id => {
|
||||||
|
const migration = this.migrations.get(id)
|
||||||
|
|
||||||
|
if ( !migration ) {
|
||||||
|
throw new ErrorWithContext(`Unable to find migration with identifier: ${id}`, {
|
||||||
|
identifier: id,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return migration
|
||||||
|
})
|
||||||
|
|
||||||
|
await migrations.promiseMap(migration => {
|
||||||
|
return this.undo(migration)
|
||||||
|
})
|
||||||
|
|
||||||
|
await this.unmarkApplied(migrations.all())
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Apply a single migration.
|
||||||
|
* @param migration
|
||||||
|
*/
|
||||||
|
public async apply(migration: Migration): Promise<void> {
|
||||||
|
await this.initialize()
|
||||||
|
|
||||||
|
await this.applying(migration)
|
||||||
|
|
||||||
|
await migration.up()
|
||||||
|
|
||||||
|
await this.applied(migration)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rollback a single migration.
|
||||||
|
* @param migration
|
||||||
|
*/
|
||||||
|
public async undo(migration: Migration): Promise<void> {
|
||||||
|
await this.initialize()
|
||||||
|
|
||||||
|
await this.rollingBack(migration)
|
||||||
|
|
||||||
|
await migration.down()
|
||||||
|
|
||||||
|
await this.rolledBack(migration)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all registered migrations, by their string-form identifiers.
|
||||||
|
* @protected
|
||||||
|
*/
|
||||||
|
protected getAllMigrationIdentifiers(): string[] {
|
||||||
|
return collect<string>(this.migrations.namespaces())
|
||||||
|
.map(nsp => {
|
||||||
|
return this.migrations.all(nsp)
|
||||||
|
.map(id => `${nsp}:${id}`)
|
||||||
|
})
|
||||||
|
.tap(coll => {
|
||||||
|
// non-namespaced migrations
|
||||||
|
coll.push(this.migrations.all())
|
||||||
|
return coll
|
||||||
|
})
|
||||||
|
.reduce((current, item) => {
|
||||||
|
return current.concat(item)
|
||||||
|
}, [])
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given a list of migration identifiers, filter out those that have been applied.
|
||||||
|
* @param identifiers
|
||||||
|
* @protected
|
||||||
|
*/
|
||||||
|
protected async filterAppliedMigrations(identifiers: string[]): Promise<string[]> {
|
||||||
|
return collect(identifiers)
|
||||||
|
.partialMap(identifier => {
|
||||||
|
const migration = this.migrations.get(identifier)
|
||||||
|
if ( migration ) {
|
||||||
|
return {
|
||||||
|
identifier,
|
||||||
|
migration,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.asyncPipe()
|
||||||
|
.tap(coll => {
|
||||||
|
return coll.promiseMap(async group => {
|
||||||
|
return {
|
||||||
|
...group,
|
||||||
|
has: await this.has(group.migration),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.tap(coll => {
|
||||||
|
return coll.filter(group => !group.has)
|
||||||
|
.pluck<string>('identifier')
|
||||||
|
.all()
|
||||||
|
})
|
||||||
|
.resolve()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given a list of migration identifiers, filter out those that have not been applied.
|
||||||
|
* @param identifiers
|
||||||
|
* @protected
|
||||||
|
*/
|
||||||
|
protected async filterPendingMigrations(identifiers: string[]): Promise<string[]> {
|
||||||
|
return collect(identifiers)
|
||||||
|
.partialMap(identifier => {
|
||||||
|
const migration = this.migrations.get(identifier)
|
||||||
|
if ( migration ) {
|
||||||
|
return {
|
||||||
|
identifier,
|
||||||
|
migration,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.asyncPipe()
|
||||||
|
.tap(coll => {
|
||||||
|
return coll.promiseMap(async group => {
|
||||||
|
return {
|
||||||
|
...group,
|
||||||
|
has: await this.has(group.migration),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.tap(coll => {
|
||||||
|
return coll.filter(group => group.has)
|
||||||
|
.pluck<string>('identifier')
|
||||||
|
.all()
|
||||||
|
})
|
||||||
|
.resolve()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fire the ApplyingMigrationEvent.
|
||||||
|
* @param migration
|
||||||
|
* @protected
|
||||||
|
*/
|
||||||
|
protected async applying(migration: Migration): Promise<void> {
|
||||||
|
const event = <ApplyingMigrationEvent> this.injector.make(ApplyingMigrationEvent, migration)
|
||||||
|
await this.bus.dispatch(event)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fire the AppliedMigrationEvent.
|
||||||
|
* @param migration
|
||||||
|
* @protected
|
||||||
|
*/
|
||||||
|
protected async applied(migration: Migration): Promise<void> {
|
||||||
|
const event = <AppliedMigrationEvent> this.injector.make(AppliedMigrationEvent, migration)
|
||||||
|
await this.bus.dispatch(event)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fire the RollingBackMigrationEvent.
|
||||||
|
* @param migration
|
||||||
|
* @protected
|
||||||
|
*/
|
||||||
|
protected async rollingBack(migration: Migration): Promise<void> {
|
||||||
|
const event = <RollingBackMigrationEvent> this.injector.make(RollingBackMigrationEvent, migration)
|
||||||
|
await this.bus.dispatch(event)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fire the RolledBackMigrationEvent.
|
||||||
|
* @param migration
|
||||||
|
* @protected
|
||||||
|
*/
|
||||||
|
protected async rolledBack(migration: Migration): Promise<void> {
|
||||||
|
const event = <RolledBackMigrationEvent> this.injector.make(RolledBackMigrationEvent, migration)
|
||||||
|
await this.bus.dispatch(event)
|
||||||
|
}
|
||||||
|
}
|
81
src/orm/migrations/MigratorFactory.ts
Normal file
81
src/orm/migrations/MigratorFactory.ts
Normal file
@ -0,0 +1,81 @@
|
|||||||
|
import {
|
||||||
|
AbstractFactory,
|
||||||
|
DependencyRequirement,
|
||||||
|
PropertyDependency,
|
||||||
|
isInstantiable,
|
||||||
|
DEPENDENCY_KEYS_METADATA_KEY,
|
||||||
|
DEPENDENCY_KEYS_PROPERTY_METADATA_KEY, Instantiable, Injectable, Inject,
|
||||||
|
} from '../../di'
|
||||||
|
import {Collection, ErrorWithContext} from '../../util'
|
||||||
|
import {Logging} from '../../service/Logging'
|
||||||
|
import {Config} from '../../service/Config'
|
||||||
|
import {Migrator} from './Migrator'
|
||||||
|
import {DatabaseMigrator} from './DatabaseMigrator'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A dependency injection factory that matches the abstract Migrator class
|
||||||
|
* and produces an instance of the configured session driver implementation.
|
||||||
|
*/
|
||||||
|
@Injectable()
|
||||||
|
export class MigratorFactory extends AbstractFactory<Migrator> {
|
||||||
|
@Inject()
|
||||||
|
protected readonly logging!: Logging
|
||||||
|
|
||||||
|
@Inject()
|
||||||
|
protected readonly config!: Config
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
super({})
|
||||||
|
}
|
||||||
|
|
||||||
|
produce(): Migrator {
|
||||||
|
return new (this.getMigratorClass())()
|
||||||
|
}
|
||||||
|
|
||||||
|
match(something: unknown): boolean {
|
||||||
|
return something === Migrator
|
||||||
|
}
|
||||||
|
|
||||||
|
getDependencyKeys(): Collection<DependencyRequirement> {
|
||||||
|
const meta = Reflect.getMetadata(DEPENDENCY_KEYS_METADATA_KEY, this.getMigratorClass())
|
||||||
|
if ( meta ) {
|
||||||
|
return meta
|
||||||
|
}
|
||||||
|
|
||||||
|
return new Collection<DependencyRequirement>()
|
||||||
|
}
|
||||||
|
|
||||||
|
getInjectedProperties(): Collection<PropertyDependency> {
|
||||||
|
const meta = new Collection<PropertyDependency>()
|
||||||
|
let currentToken = this.getMigratorClass()
|
||||||
|
|
||||||
|
do {
|
||||||
|
const loadedMeta = Reflect.getMetadata(DEPENDENCY_KEYS_PROPERTY_METADATA_KEY, currentToken)
|
||||||
|
if ( loadedMeta ) {
|
||||||
|
meta.concat(loadedMeta)
|
||||||
|
}
|
||||||
|
currentToken = Object.getPrototypeOf(currentToken)
|
||||||
|
} while (Object.getPrototypeOf(currentToken) !== Function.prototype && Object.getPrototypeOf(currentToken) !== Object.prototype)
|
||||||
|
|
||||||
|
return meta
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return the instantiable class of the configured migrator backend.
|
||||||
|
* @protected
|
||||||
|
* @return Instantiable<Migrator>
|
||||||
|
*/
|
||||||
|
protected getMigratorClass(): Instantiable<Migrator> {
|
||||||
|
const MigratorClass = this.config.get('database.migrations.driver', DatabaseMigrator)
|
||||||
|
|
||||||
|
if ( !isInstantiable(MigratorClass) || !(MigratorClass.prototype instanceof Migrator) ) {
|
||||||
|
const e = new ErrorWithContext('Provided migration driver class does not extend from @extollo/lib.Migrator')
|
||||||
|
e.context = {
|
||||||
|
configKey: 'database.migrations.driver',
|
||||||
|
class: MigratorClass.toString(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return MigratorClass
|
||||||
|
}
|
||||||
|
}
|
14
src/orm/migrations/NothingToMigrateError.ts
Normal file
14
src/orm/migrations/NothingToMigrateError.ts
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
import {ErrorWithContext} from '../../util'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Error thrown when the migrator is run, but no migrations need
|
||||||
|
* to be applied/rolled-back.
|
||||||
|
*/
|
||||||
|
export class NothingToMigrateError extends ErrorWithContext {
|
||||||
|
constructor(
|
||||||
|
message = 'There is nothing to migrate',
|
||||||
|
context?: {[key: string]: any},
|
||||||
|
) {
|
||||||
|
super(message, context)
|
||||||
|
}
|
||||||
|
}
|
8
src/orm/migrations/events/AppliedMigrationEvent.ts
Normal file
8
src/orm/migrations/events/AppliedMigrationEvent.ts
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
import {Injectable} from '../../../di'
|
||||||
|
import {MigrationEvent} from './MigrationEvent'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Event fired after a migration is applied.
|
||||||
|
*/
|
||||||
|
@Injectable()
|
||||||
|
export class AppliedMigrationEvent extends MigrationEvent {}
|
8
src/orm/migrations/events/ApplyingMigrationEvent.ts
Normal file
8
src/orm/migrations/events/ApplyingMigrationEvent.ts
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
import {Injectable} from '../../../di'
|
||||||
|
import {MigrationEvent} from './MigrationEvent'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Event fired before a migration is applied.
|
||||||
|
*/
|
||||||
|
@Injectable()
|
||||||
|
export class ApplyingMigrationEvent extends MigrationEvent {}
|
49
src/orm/migrations/events/MigrationEvent.ts
Normal file
49
src/orm/migrations/events/MigrationEvent.ts
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
import {Event} from '../../../event/Event'
|
||||||
|
import {Migration} from '../Migration'
|
||||||
|
import {Inject, Injectable} from '../../../di'
|
||||||
|
import {Migrations} from '../../services/Migrations'
|
||||||
|
import {ErrorWithContext} from '../../../util'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generic base-class for migration-related events.
|
||||||
|
*/
|
||||||
|
@Injectable()
|
||||||
|
export abstract class MigrationEvent extends Event {
|
||||||
|
@Inject()
|
||||||
|
protected readonly migrations!: Migrations
|
||||||
|
|
||||||
|
/** The migration relevant to this event. */
|
||||||
|
private internalMigration: Migration
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the relevant migration.
|
||||||
|
*/
|
||||||
|
public get migration(): Migration {
|
||||||
|
return this.internalMigration
|
||||||
|
}
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
migration: Migration,
|
||||||
|
) {
|
||||||
|
super()
|
||||||
|
this.internalMigration = migration
|
||||||
|
}
|
||||||
|
|
||||||
|
dehydrate(): {identifier: string} {
|
||||||
|
return {
|
||||||
|
identifier: this.migration.identifier,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
rehydrate(state: {identifier: string}): void {
|
||||||
|
const migration = this.migrations.get(state.identifier)
|
||||||
|
|
||||||
|
if ( !migration ) {
|
||||||
|
throw new ErrorWithContext(`Unable to find migration with identifier: ${state.identifier}`, {
|
||||||
|
identifier: state.identifier,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
this.internalMigration = migration
|
||||||
|
}
|
||||||
|
}
|
8
src/orm/migrations/events/RolledBackMigrationEvent.ts
Normal file
8
src/orm/migrations/events/RolledBackMigrationEvent.ts
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
import {Injectable} from '../../../di'
|
||||||
|
import {MigrationEvent} from './MigrationEvent'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Event fired after a migration has been rolled-back.
|
||||||
|
*/
|
||||||
|
@Injectable()
|
||||||
|
export class RolledBackMigrationEvent extends MigrationEvent {}
|
8
src/orm/migrations/events/RollingBackMigrationEvent.ts
Normal file
8
src/orm/migrations/events/RollingBackMigrationEvent.ts
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
import {Injectable} from '../../../di'
|
||||||
|
import {MigrationEvent} from './MigrationEvent'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Event fired before a migration is rolled back.
|
||||||
|
*/
|
||||||
|
@Injectable()
|
||||||
|
export class RollingBackMigrationEvent extends MigrationEvent {}
|
345
src/orm/schema/PostgresSchema.ts
Normal file
345
src/orm/schema/PostgresSchema.ts
Normal file
@ -0,0 +1,345 @@
|
|||||||
|
import {Schema} from './Schema'
|
||||||
|
import {Awaitable, collect, Collection} from '../../util'
|
||||||
|
import {ConstraintType, TableBuilder} from './TableBuilder'
|
||||||
|
import {PostgresConnection} from '../connection/PostgresConnection'
|
||||||
|
import {Builder} from '../builder/Builder'
|
||||||
|
import {raw} from '../dialect/SQLDialect'
|
||||||
|
import {QueryRow} from '../types'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A PostgreSQL-compatible schema implementation.
|
||||||
|
*/
|
||||||
|
export class PostgresSchema extends Schema {
|
||||||
|
constructor(
|
||||||
|
connection: PostgresConnection,
|
||||||
|
public readonly schema: string = 'public',
|
||||||
|
) {
|
||||||
|
super(connection)
|
||||||
|
}
|
||||||
|
|
||||||
|
hasColumn(table: string, name: string): Awaitable<boolean> {
|
||||||
|
return (new Builder()).connection(this.connection)
|
||||||
|
.select(raw('*'))
|
||||||
|
.from('information_schema.columns')
|
||||||
|
.where('table_schema', '=', this.schema)
|
||||||
|
.where('table_name', '=', table)
|
||||||
|
.where('column_name', '=', name)
|
||||||
|
.exists()
|
||||||
|
}
|
||||||
|
|
||||||
|
async hasColumns(table: string, name: string[]): Promise<boolean> {
|
||||||
|
const num = await (new Builder()).connection(this.connection)
|
||||||
|
.select(raw('*'))
|
||||||
|
.from('information_schema.columns')
|
||||||
|
.where('table_schema', '=', this.schema)
|
||||||
|
.where('table_name', '=', table)
|
||||||
|
.whereIn('column_name', name)
|
||||||
|
.get()
|
||||||
|
.count()
|
||||||
|
|
||||||
|
return num === name.length
|
||||||
|
}
|
||||||
|
|
||||||
|
hasTable(name: string): Awaitable<boolean> {
|
||||||
|
return (new Builder()).connection(this.connection)
|
||||||
|
.select(raw('*'))
|
||||||
|
.from('information_schema.tables')
|
||||||
|
.where('table_schema', '=', this.schema)
|
||||||
|
.where('table_name', '=', name)
|
||||||
|
.exists()
|
||||||
|
}
|
||||||
|
|
||||||
|
async table(table: string): Promise<TableBuilder> {
|
||||||
|
return this.populateTable(new TableBuilder(table))
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If the table for the given TableBuilder already exists in the
|
||||||
|
* database, fill in the columns, constraints, and indexes.
|
||||||
|
* @param table
|
||||||
|
* @protected
|
||||||
|
*/
|
||||||
|
protected async populateTable(table: TableBuilder): Promise<TableBuilder> {
|
||||||
|
if ( await this.hasTable(table.name) ) {
|
||||||
|
// Load the existing columns
|
||||||
|
const cols = await this.getColumns(table.name)
|
||||||
|
cols.each(col => {
|
||||||
|
table.column(col.column_name)
|
||||||
|
.type(col.data_type)
|
||||||
|
.pipe()
|
||||||
|
.when(col.is_nullable, builder => {
|
||||||
|
builder.isNullable()
|
||||||
|
return builder
|
||||||
|
})
|
||||||
|
.when(col.column_default, builder => {
|
||||||
|
builder.default(raw(col.column_default))
|
||||||
|
return builder
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
// Load the existing constraints
|
||||||
|
const constraints = await this.getConstraints(table.name)
|
||||||
|
|
||||||
|
// Apply the unique constraints
|
||||||
|
const uniques = constraints.where('constraint_type', '=', 'u')
|
||||||
|
.sortBy('constraint_name')
|
||||||
|
.groupBy('constraint_name')
|
||||||
|
|
||||||
|
for ( const key in uniques ) {
|
||||||
|
if ( !Object.prototype.hasOwnProperty.call(uniques, key) ) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
table.constraint(key)
|
||||||
|
.type(ConstraintType.Unique)
|
||||||
|
.pipe()
|
||||||
|
.peek(constraint => {
|
||||||
|
collect<{column_name: string}>(uniques[key]) // eslint-disable-line camelcase
|
||||||
|
.pluck<string>('column_name')
|
||||||
|
.each(column => constraint.field(column))
|
||||||
|
})
|
||||||
|
.get()
|
||||||
|
.flagAsExistingInSchema()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply the primary key constraints
|
||||||
|
constraints.where('constraint_type', '=', 'p')
|
||||||
|
.pipe()
|
||||||
|
.when(c => c.count() > 0, pk => {
|
||||||
|
pk.each(constraint => {
|
||||||
|
table.column(constraint.column_name)
|
||||||
|
.primary()
|
||||||
|
})
|
||||||
|
|
||||||
|
return pk
|
||||||
|
})
|
||||||
|
|
||||||
|
// Apply the non-null constraints
|
||||||
|
// Builder columns are non-null by default, so mark the others as nullable
|
||||||
|
const nonNullable = constraints.filter(x => !x.constraint_type)
|
||||||
|
.where('is_nullable', '=', 'NO')
|
||||||
|
|
||||||
|
collect<string>(Object.keys(table.getColumns()))
|
||||||
|
.map(column => {
|
||||||
|
return {
|
||||||
|
column,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.whereNotIn('column', nonNullable.pluck('column_name'))
|
||||||
|
.pluck<string>('column')
|
||||||
|
.each(column => {
|
||||||
|
table.column(column)
|
||||||
|
.nullable()
|
||||||
|
})
|
||||||
|
|
||||||
|
// Look up and apply the check constraints
|
||||||
|
const checkConstraints = await this.getCheckConstraints(table.name)
|
||||||
|
|
||||||
|
checkConstraints.each(constraint => {
|
||||||
|
table.constraint(constraint.constraint_name)
|
||||||
|
.type(ConstraintType.Check)
|
||||||
|
.expression(constraint.check_clause)
|
||||||
|
.flagAsExistingInSchema()
|
||||||
|
})
|
||||||
|
|
||||||
|
// Mark the columns as existing in the database
|
||||||
|
cols.each(col => {
|
||||||
|
table.column(col.column_name)
|
||||||
|
.flagAsExistingInSchema()
|
||||||
|
})
|
||||||
|
|
||||||
|
// Look up table indexes
|
||||||
|
const indexes = await this.getIndexes(table.name)
|
||||||
|
const groupedIndexes = indexes.groupBy('index_name')
|
||||||
|
|
||||||
|
for ( const key in groupedIndexes ) {
|
||||||
|
if ( !Object.prototype.hasOwnProperty.call(groupedIndexes, key) ) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
table.index(key)
|
||||||
|
.pipe()
|
||||||
|
.peek(idx => {
|
||||||
|
collect<{column_name: string}>(groupedIndexes[key]) // eslint-disable-line camelcase
|
||||||
|
.pluck<string>('column_name')
|
||||||
|
.each(col => idx.field(col))
|
||||||
|
})
|
||||||
|
.when(groupedIndexes[key]?.[0]?.indisprimary, idx => {
|
||||||
|
idx.primary()
|
||||||
|
})
|
||||||
|
.when(groupedIndexes[key]?.[0]?.indisunique, idx => {
|
||||||
|
idx.unique()
|
||||||
|
})
|
||||||
|
.get()
|
||||||
|
.flagAsExistingInSchema()
|
||||||
|
}
|
||||||
|
|
||||||
|
table.flagAsExistingInSchema()
|
||||||
|
}
|
||||||
|
|
||||||
|
return table
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Query the database to look up all indexes on a table, by column.
|
||||||
|
* @see https://stackoverflow.com/a/2213199/4971138
|
||||||
|
* @param table
|
||||||
|
* @protected
|
||||||
|
*/
|
||||||
|
protected async getIndexes(table: string): Promise<Collection<QueryRow>> {
|
||||||
|
const rawQuery = `
|
||||||
|
select
|
||||||
|
t.relname as table_name,
|
||||||
|
i.relname as index_name,
|
||||||
|
a.attname as column_name,
|
||||||
|
ix.*
|
||||||
|
from pg_class t
|
||||||
|
left join pg_attribute a
|
||||||
|
on a.attrelid = t.oid
|
||||||
|
left join pg_index ix
|
||||||
|
on t.oid = ix.indrelid
|
||||||
|
left join pg_class i
|
||||||
|
on i.oid = ix.indexrelid
|
||||||
|
left join pg_namespace n
|
||||||
|
on n.oid = i.relnamespace
|
||||||
|
where
|
||||||
|
a.attnum = any(ix.indkey)
|
||||||
|
and t.relkind = 'r'
|
||||||
|
and t.relname = '${table}'
|
||||||
|
and n.nspname = '${this.schema}'
|
||||||
|
order by
|
||||||
|
t.relname,
|
||||||
|
i.relname;
|
||||||
|
`
|
||||||
|
|
||||||
|
return (new Builder()).connection(this.connection)
|
||||||
|
.raw(rawQuery)
|
||||||
|
.get()
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Query the database to look up all constraints on a table, by column.
|
||||||
|
* @see https://dba.stackexchange.com/a/290854
|
||||||
|
* @param table
|
||||||
|
* @protected
|
||||||
|
*/
|
||||||
|
protected async getConstraints(table: string): Promise<Collection<QueryRow>> {
|
||||||
|
const rawQuery = `
|
||||||
|
SELECT * FROM (
|
||||||
|
SELECT
|
||||||
|
pgc.contype AS constraint_type,
|
||||||
|
pgc.conname AS constraint_name,
|
||||||
|
ccu.table_schema AS table_schema,
|
||||||
|
kcu.table_name AS table_name,
|
||||||
|
CASE WHEN (pgc.contype = 'f') THEN kcu.COLUMN_NAME ELSE ccu.COLUMN_NAME END AS column_name,
|
||||||
|
CASE WHEN (pgc.contype = 'f') THEN ccu.TABLE_NAME ELSE (null) END AS reference_table,
|
||||||
|
CASE WHEN (pgc.contype = 'f') THEN ccu.COLUMN_NAME ELSE (null) END AS reference_col,
|
||||||
|
CASE WHEN (pgc.contype = 'p') THEN 'yes' ELSE 'no' END AS auto_inc,
|
||||||
|
CASE WHEN (pgc.contype = 'p') THEN 'NO' ELSE 'YES' END AS is_nullable,
|
||||||
|
'integer' AS data_type,
|
||||||
|
'0' AS numeric_scale,
|
||||||
|
'32' AS numeric_precision
|
||||||
|
FROM
|
||||||
|
pg_constraint AS pgc
|
||||||
|
JOIN pg_namespace nsp
|
||||||
|
ON nsp.oid = pgc.connamespace
|
||||||
|
JOIN pg_class cls
|
||||||
|
ON pgc.conrelid = cls.oid
|
||||||
|
JOIN information_schema.key_column_usage kcu
|
||||||
|
ON kcu.constraint_name = pgc.conname
|
||||||
|
LEFT JOIN information_schema.constraint_column_usage ccu
|
||||||
|
ON pgc.conname = ccu.CONSTRAINT_NAME
|
||||||
|
AND nsp.nspname = ccu.CONSTRAINT_SCHEMA
|
||||||
|
WHERE
|
||||||
|
kcu.table_name = '${table}'
|
||||||
|
UNION
|
||||||
|
SELECT
|
||||||
|
NULL AS constraint_type,
|
||||||
|
NULL AS constraint_name,
|
||||||
|
table_schema,
|
||||||
|
table_name,
|
||||||
|
column_name,
|
||||||
|
NULL AS refrence_table,
|
||||||
|
NULL AS refrence_col,
|
||||||
|
'no' AS auto_inc,
|
||||||
|
is_nullable,
|
||||||
|
data_type,
|
||||||
|
numeric_scale,
|
||||||
|
numeric_precision
|
||||||
|
FROM information_schema.columns cols
|
||||||
|
WHERE
|
||||||
|
table_schema = '${this.schema}'
|
||||||
|
AND table_name = '${table}'
|
||||||
|
) AS child
|
||||||
|
ORDER BY table_name DESC
|
||||||
|
`
|
||||||
|
|
||||||
|
return (new Builder()).connection(this.connection)
|
||||||
|
.raw(rawQuery)
|
||||||
|
.get()
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see https://dataedo.com/kb/query/postgresql/list-table-check-constraints
|
||||||
|
* @param table
|
||||||
|
* @protected
|
||||||
|
*/
|
||||||
|
protected async getCheckConstraints(table: string): Promise<Collection<QueryRow>> {
|
||||||
|
const rawQuery = `
|
||||||
|
SELECT
|
||||||
|
tc.table_schema,
|
||||||
|
tc.table_name,
|
||||||
|
ARRAY_AGG(col.column_name) AS columns,
|
||||||
|
tc.constraint_name,
|
||||||
|
cc.check_clause
|
||||||
|
FROM information_schema.table_constraints tc
|
||||||
|
JOIN information_schema.check_constraints cc
|
||||||
|
ON tc.constraint_schema = cc.constraint_schema
|
||||||
|
AND tc.constraint_name = cc.constraint_name
|
||||||
|
JOIN pg_namespace nsp
|
||||||
|
ON nsp.nspname = cc.constraint_schema
|
||||||
|
JOIN pg_constraint pgc
|
||||||
|
ON pgc.conname = cc.constraint_name
|
||||||
|
AND pgc.connamespace = nsp.oid
|
||||||
|
AND pgc.contype = 'c'
|
||||||
|
JOIN information_schema.columns col
|
||||||
|
ON col.table_schema = tc.table_schema
|
||||||
|
AND col.table_name = tc.table_name
|
||||||
|
AND col.ordinal_position = ANY(pgc.conkey)
|
||||||
|
WHERE
|
||||||
|
tc.constraint_schema NOT IN ('pg_catalog', 'information_schema')
|
||||||
|
AND tc.table_schema = '${this.schema}'
|
||||||
|
AND tc.table_name = '${table}'
|
||||||
|
GROUP BY
|
||||||
|
tc.table_schema,
|
||||||
|
tc.table_name,
|
||||||
|
tc.constraint_name,
|
||||||
|
cc.check_clause
|
||||||
|
ORDER BY
|
||||||
|
tc.table_schema,
|
||||||
|
tc.table_name
|
||||||
|
`
|
||||||
|
|
||||||
|
return (new Builder()).connection(this.connection)
|
||||||
|
.raw(rawQuery)
|
||||||
|
.get()
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Query the database to look up all columns on a table.
|
||||||
|
* @param table
|
||||||
|
* @protected
|
||||||
|
*/
|
||||||
|
protected async getColumns(table: string): Promise<Collection<QueryRow>> {
|
||||||
|
return (new Builder()).connection(this.connection)
|
||||||
|
.select(raw('*'))
|
||||||
|
.from('information_schema.columns')
|
||||||
|
.where('table_schema', '=', this.schema)
|
||||||
|
.where('table_name', '=', table)
|
||||||
|
.get()
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
}
|
@ -1,14 +1,51 @@
|
|||||||
import {Connection} from '../connection/Connection'
|
import {Connection} from '../connection/Connection'
|
||||||
import {Awaitable} from '../../util'
|
import {Awaitable} from '../../util'
|
||||||
|
import {TableBuilder} from './TableBuilder'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Represents a SQL-schema implementation.
|
||||||
|
*/
|
||||||
export abstract class Schema {
|
export abstract class Schema {
|
||||||
constructor(
|
constructor(
|
||||||
|
/** The SQL connection to execute against. */
|
||||||
protected readonly connection: Connection,
|
protected readonly connection: Connection,
|
||||||
) { }
|
) { }
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve true if the schema has a table with the given name.
|
||||||
|
* @param name
|
||||||
|
*/
|
||||||
public abstract hasTable(name: string): Awaitable<boolean>
|
public abstract hasTable(name: string): Awaitable<boolean>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve true if the schema table with the given name has a column with the given name.
|
||||||
|
* @param table
|
||||||
|
* @param name
|
||||||
|
*/
|
||||||
public abstract hasColumn(table: string, name: string): Awaitable<boolean>
|
public abstract hasColumn(table: string, name: string): Awaitable<boolean>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve true if the schema table with the given name has all the specified columns.
|
||||||
|
* @param table
|
||||||
|
* @param name
|
||||||
|
*/
|
||||||
public abstract hasColumns(table: string, name: string[]): Awaitable<boolean>
|
public abstract hasColumns(table: string, name: string[]): Awaitable<boolean>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a TableBuilder instance for a table on the schema.
|
||||||
|
* @param table
|
||||||
|
*/
|
||||||
|
public abstract table(table: string): Awaitable<TableBuilder>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Apply the table to the schema.
|
||||||
|
* @param schema
|
||||||
|
*/
|
||||||
|
public async commit(schema: TableBuilder): Promise<void> {
|
||||||
|
const query = this.connection
|
||||||
|
.dialect()
|
||||||
|
.renderCommitSchemaTransaction(schema)
|
||||||
|
|
||||||
|
await this.connection.query(query)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,109 +1,770 @@
|
|||||||
import {Pipe} from '../../util'
|
import {collect, Maybe, ParameterizedCallback, Pipe} from '../../util'
|
||||||
|
import {FieldType} from '../types'
|
||||||
|
import {EscapeValue, QuerySafeValue, raw} from '../dialect/SQLDialect'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Base class with shared logic for the various schema
|
||||||
|
* builders (table, column, index).
|
||||||
|
*/
|
||||||
export abstract class SchemaBuilderBase {
|
export abstract class SchemaBuilderBase {
|
||||||
|
/**
|
||||||
|
* Whether or not the schema item should be dropped.
|
||||||
|
* - `exists` - drop if exists
|
||||||
|
* @protected
|
||||||
|
*/
|
||||||
protected shouldDrop: 'yes'|'no'|'exists' = 'no'
|
protected shouldDrop: 'yes'|'no'|'exists' = 'no'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The name the schema item should have if renaming.
|
||||||
|
* @protected
|
||||||
|
*/
|
||||||
protected shouldRenameTo?: string
|
protected shouldRenameTo?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If true, apply IF NOT EXISTS syntax.
|
||||||
|
* @protected
|
||||||
|
*/
|
||||||
|
protected shouldSkipIfExists = false
|
||||||
|
|
||||||
|
/** True if the schema has been modified since created/loaded. */
|
||||||
|
protected dirty = false
|
||||||
|
|
||||||
|
/** True if this resource exists, in some form, in the schema. */
|
||||||
|
protected existsInSchema = false
|
||||||
|
|
||||||
|
/** If the resource exists in the schema, the unaltered values it has. */
|
||||||
|
public originalFromSchema?: SchemaBuilderBase
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
protected readonly name: string,
|
/** The name of the schema item. */
|
||||||
|
public readonly name: string,
|
||||||
) { }
|
) { }
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clone the properties of this resource to a different instance.
|
||||||
|
* @param newBuilder
|
||||||
|
*/
|
||||||
|
public cloneTo(newBuilder: SchemaBuilderBase): SchemaBuilderBase {
|
||||||
|
newBuilder.shouldDrop = this.shouldDrop
|
||||||
|
newBuilder.shouldRenameTo = this.shouldRenameTo
|
||||||
|
newBuilder.shouldSkipIfExists = this.shouldSkipIfExists
|
||||||
|
newBuilder.dirty = this.dirty
|
||||||
|
newBuilder.existsInSchema = this.existsInSchema
|
||||||
|
return newBuilder
|
||||||
|
}
|
||||||
|
|
||||||
|
/** True if this resource should be dropped. */
|
||||||
|
public isDropping(): boolean {
|
||||||
|
return this.shouldDrop === 'yes'
|
||||||
|
}
|
||||||
|
|
||||||
|
/** True if this resource should be dropped with IF EXISTS syntax. */
|
||||||
|
public isDroppingIfExists(): boolean {
|
||||||
|
return this.shouldDrop === 'exists'
|
||||||
|
}
|
||||||
|
|
||||||
|
/** True if this resource should be created with IF NOT EXISTS syntax. */
|
||||||
|
public isSkippedIfExisting(): boolean {
|
||||||
|
return this.shouldSkipIfExists
|
||||||
|
}
|
||||||
|
|
||||||
|
/** True if the resource already exists in some form in the schema. */
|
||||||
|
public isExisting(): boolean {
|
||||||
|
return this.existsInSchema
|
||||||
|
}
|
||||||
|
|
||||||
|
/** True if the resource has been modified since created/loaded. */
|
||||||
|
public isDirty(): boolean {
|
||||||
|
return this.dirty
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the name this resource should be renamed to, if it exists.
|
||||||
|
*/
|
||||||
|
public getRename(): Maybe<string> {
|
||||||
|
return this.shouldRenameTo
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Mark the resource to be removed. */
|
||||||
public drop(): this {
|
public drop(): this {
|
||||||
|
this.dirty = true
|
||||||
this.shouldDrop = 'yes'
|
this.shouldDrop = 'yes'
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Mark the resource to be removed, if it exists. */
|
||||||
public dropIfExists(): this {
|
public dropIfExists(): this {
|
||||||
|
this.dirty = true
|
||||||
this.shouldDrop = 'exists'
|
this.shouldDrop = 'exists'
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rename the resource to a different name.
|
||||||
|
* @param to
|
||||||
|
*/
|
||||||
public rename(to: string): this {
|
public rename(to: string): this {
|
||||||
|
this.dirty = true
|
||||||
this.shouldRenameTo = to
|
this.shouldRenameTo = to
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Mark the resource to use IF NOT EXISTS syntax.
|
||||||
|
*/
|
||||||
|
public ifNotExists(): this {
|
||||||
|
this.shouldSkipIfExists = true
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Used internally.
|
||||||
|
* Mark that the resource exists in the schema in some form,
|
||||||
|
* and reset the `dirty` flag.
|
||||||
|
*/
|
||||||
|
public flagAsExistingInSchema(): this {
|
||||||
|
this.existsInSchema = true
|
||||||
|
this.dirty = false
|
||||||
|
this.originalFromSchema = this.cloneTo(this.cloneInstance())
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Get a Pipe containing this instance. */
|
||||||
pipe(): Pipe<this> {
|
pipe(): Pipe<this> {
|
||||||
return Pipe.wrap<this>(this)
|
return Pipe.wrap<this>(this)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a new instance of the concrete implementation of this class.
|
||||||
|
* @protected
|
||||||
|
*/
|
||||||
|
protected abstract cloneInstance(): this
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builder to specify the schema of a table column.
|
||||||
|
*/
|
||||||
export class ColumnBuilder extends SchemaBuilderBase {
|
export class ColumnBuilder extends SchemaBuilderBase {
|
||||||
|
/** The data type of the column. */
|
||||||
|
protected targetType?: FieldType
|
||||||
|
|
||||||
}
|
/** True if the column should allow NULL values. */
|
||||||
|
protected shouldBeNullable = false
|
||||||
|
|
||||||
export class IndexBuilder extends SchemaBuilderBase {
|
/** The default value of the column, if one should exist. */
|
||||||
|
protected defaultValue?: EscapeValue
|
||||||
|
|
||||||
protected fields: Set<string> = new Set<string>()
|
/** The data length of this column, if set */
|
||||||
|
protected targetLength?: number
|
||||||
protected removedFields: Set<string> = new Set<string>()
|
|
||||||
|
|
||||||
protected shouldBeUnique = false
|
|
||||||
|
|
||||||
|
/** True if this is a primary key constraint. */
|
||||||
protected shouldBePrimary = false
|
protected shouldBePrimary = false
|
||||||
|
|
||||||
protected field(name: string): this {
|
/** True if this column should contain distinct values. */
|
||||||
|
protected shouldBeUnique = false
|
||||||
|
|
||||||
|
public originalFromSchema?: ColumnBuilder
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
name: string,
|
||||||
|
|
||||||
|
/** The table this column belongs to. */
|
||||||
|
public readonly parent: TableBuilder,
|
||||||
|
) {
|
||||||
|
super(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
public cloneTo(newBuilder: ColumnBuilder): ColumnBuilder {
|
||||||
|
super.cloneTo(newBuilder)
|
||||||
|
newBuilder.targetType = this.targetType
|
||||||
|
newBuilder.shouldBeNullable = this.shouldBeNullable
|
||||||
|
newBuilder.defaultValue = this.defaultValue
|
||||||
|
newBuilder.targetLength = this.targetLength
|
||||||
|
newBuilder.shouldBePrimary = this.shouldBePrimary
|
||||||
|
newBuilder.shouldBeUnique = this.shouldBeUnique
|
||||||
|
return newBuilder
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Get the field type of the column, if it exists. */
|
||||||
|
public getType(): Maybe<FieldType> {
|
||||||
|
return this.targetType
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Get the data-type length of the column, if it exists. */
|
||||||
|
public getLength(): Maybe<number> {
|
||||||
|
return this.targetLength
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Get the default value of the column, if it exists. */
|
||||||
|
public getDefaultValue(): Maybe<EscapeValue> {
|
||||||
|
return this.defaultValue
|
||||||
|
}
|
||||||
|
|
||||||
|
/** True if the column should allow NULL values. */
|
||||||
|
public isNullable(): boolean {
|
||||||
|
return this.shouldBeNullable
|
||||||
|
}
|
||||||
|
|
||||||
|
/** True if the column is a primary key. */
|
||||||
|
public isPrimary(): boolean {
|
||||||
|
return this.shouldBePrimary
|
||||||
|
}
|
||||||
|
|
||||||
|
/** True if the column should require unique values. */
|
||||||
|
public isUnique(): boolean {
|
||||||
|
return this.shouldBeUnique
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Specify the data type of the column.
|
||||||
|
* @param type
|
||||||
|
*/
|
||||||
|
public type(type: FieldType): this {
|
||||||
|
if ( this.targetType === type ) {
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
this.dirty = true
|
||||||
|
this.targetType = type
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Make the column nullable.
|
||||||
|
*/
|
||||||
|
public nullable(): this {
|
||||||
|
if ( this.shouldBeNullable ) {
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
this.dirty = true
|
||||||
|
this.shouldBeNullable = true
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Make the column non-nullable.
|
||||||
|
*/
|
||||||
|
public required(): this {
|
||||||
|
if ( !this.shouldBeNullable ) {
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
this.dirty = true
|
||||||
|
this.shouldBeNullable = false
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Specify the default value of the column.
|
||||||
|
* @param value
|
||||||
|
*/
|
||||||
|
public default(value: EscapeValue): this {
|
||||||
|
if ( this.defaultValue === value ) {
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
this.dirty = true
|
||||||
|
this.defaultValue = value
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the length of this column's data type.
|
||||||
|
* @param value
|
||||||
|
*/
|
||||||
|
public length(value: number): this {
|
||||||
|
if ( this.targetLength === value ) {
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
this.dirty = true
|
||||||
|
this.targetLength = value
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Make this a primary-key column.
|
||||||
|
*/
|
||||||
|
primary(): this {
|
||||||
|
if ( this.shouldBePrimary ) {
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
this.dirty = true
|
||||||
|
this.shouldBePrimary = true
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Make this column require distinct values.
|
||||||
|
*/
|
||||||
|
unique(): this {
|
||||||
|
if ( this.shouldBeUnique ) {
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
this.dirty = true
|
||||||
|
this.shouldBeUnique = true
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
protected cloneInstance(): this {
|
||||||
|
return new ColumnBuilder(this.name, this.parent) as this
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Valid constraint types. */
|
||||||
|
export enum ConstraintType {
|
||||||
|
Unique = 'un',
|
||||||
|
Check = 'ck',
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builder to specify the schema of a table constraint.
|
||||||
|
*/
|
||||||
|
export class ConstraintBuilder extends SchemaBuilderBase {
|
||||||
|
public originalFromSchema?: ConstraintBuilder
|
||||||
|
|
||||||
|
/** The fields included in this constraint. */
|
||||||
|
protected fields: Set<string> = new Set<string>()
|
||||||
|
|
||||||
|
/** The type of this constraint. */
|
||||||
|
protected constraintType: ConstraintType = ConstraintType.Unique
|
||||||
|
|
||||||
|
/** The expression defining this constraint, if applicable. */
|
||||||
|
protected constraintExpression?: QuerySafeValue
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
name: string,
|
||||||
|
|
||||||
|
/** The table this constraint belongs to. */
|
||||||
|
public readonly parent: TableBuilder,
|
||||||
|
) {
|
||||||
|
super(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Get the type of this constraint. */
|
||||||
|
public getType(): ConstraintType {
|
||||||
|
return this.constraintType
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Get the fields included in this constraint. */
|
||||||
|
public getFields(): string[] {
|
||||||
|
return [...this.fields]
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Get the expression used to evaluate this constraint, if it exists. */
|
||||||
|
public getExpression(): Maybe<QuerySafeValue> {
|
||||||
|
return this.constraintExpression
|
||||||
|
}
|
||||||
|
|
||||||
|
public cloneTo(newBuilder: ConstraintBuilder): ConstraintBuilder {
|
||||||
|
super.cloneTo(newBuilder)
|
||||||
|
newBuilder.fields = new Set<string>([...this.fields])
|
||||||
|
newBuilder.constraintType = this.constraintType
|
||||||
|
return newBuilder
|
||||||
|
}
|
||||||
|
|
||||||
|
protected cloneInstance(): this {
|
||||||
|
return new ConstraintBuilder(this.name, this.parent) as this
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Add a field to this constraint. */
|
||||||
|
public field(name: string): this {
|
||||||
|
if ( this.fields.has(name) ) {
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
this.dirty = true
|
||||||
this.fields.add(name)
|
this.fields.add(name)
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Remove a field from this constraint. */
|
||||||
|
public removeField(name: string): this {
|
||||||
|
if ( !this.fields.has(name) ) {
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
this.dirty = true
|
||||||
|
this.fields.delete(name)
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Specify the type of this constraint. */
|
||||||
|
public type(type: ConstraintType): this {
|
||||||
|
if ( this.constraintType === type ) {
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
this.dirty = true
|
||||||
|
this.constraintType = type
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Specify the expression used to evaluate this constraint, if applicable. */
|
||||||
|
public expression(sql: string | QuerySafeValue): this {
|
||||||
|
if ( String(this.constraintExpression) === String(sql) ) {
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
this.dirty = true
|
||||||
|
|
||||||
|
if ( sql instanceof QuerySafeValue ) {
|
||||||
|
this.constraintExpression = sql
|
||||||
|
}
|
||||||
|
|
||||||
|
this.constraintExpression = raw(sql)
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builder to specify the schema of a table index.
|
||||||
|
*/
|
||||||
|
export class IndexBuilder extends SchemaBuilderBase {
|
||||||
|
/** The fields included in the index. */
|
||||||
|
protected fields: Set<string> = new Set<string>()
|
||||||
|
|
||||||
|
/** Fields to remove from the index. */
|
||||||
|
protected removedFields: Set<string> = new Set<string>()
|
||||||
|
|
||||||
|
/** True if this is a unique index. */
|
||||||
|
protected shouldBeUnique = false
|
||||||
|
|
||||||
|
/** True if this is a primary key index. */
|
||||||
|
protected shouldBePrimary = false
|
||||||
|
|
||||||
|
public originalFromSchema?: IndexBuilder
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
name: string,
|
||||||
|
|
||||||
|
/** The table this index belongs to. */
|
||||||
|
public readonly parent: TableBuilder,
|
||||||
|
) {
|
||||||
|
super(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
public cloneTo(newBuilder: IndexBuilder): IndexBuilder {
|
||||||
|
super.cloneTo(newBuilder)
|
||||||
|
newBuilder.fields = new Set<string>([...this.fields])
|
||||||
|
newBuilder.removedFields = new Set<string>([...this.removedFields])
|
||||||
|
newBuilder.shouldBeUnique = this.shouldBeUnique
|
||||||
|
newBuilder.shouldBePrimary = this.shouldBePrimary
|
||||||
|
return newBuilder
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Get the fields in this index. */
|
||||||
|
public getFields(): string[] {
|
||||||
|
return [...this.fields]
|
||||||
|
}
|
||||||
|
|
||||||
|
/** True if this index is a unique index. */
|
||||||
|
public isUnique(): boolean {
|
||||||
|
return this.shouldBeUnique
|
||||||
|
}
|
||||||
|
|
||||||
|
/** True if this index is the primary key index. */
|
||||||
|
public isPrimary(): boolean {
|
||||||
|
return this.shouldBePrimary
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add the given field to this index.
|
||||||
|
* @param name
|
||||||
|
*/
|
||||||
|
public field(name: string): this {
|
||||||
|
if ( this.fields.has(name) ) {
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
this.dirty = true
|
||||||
|
this.fields.add(name)
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove the given field from this index.
|
||||||
|
* @param name
|
||||||
|
* @protected
|
||||||
|
*/
|
||||||
protected removeField(name: string): this {
|
protected removeField(name: string): this {
|
||||||
|
if ( !this.fields.has(name) ) {
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
this.dirty = true
|
||||||
this.removedFields.add(name)
|
this.removedFields.add(name)
|
||||||
this.fields.delete(name)
|
this.fields.delete(name)
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Make this a primary-key index.
|
||||||
|
*/
|
||||||
primary(): this {
|
primary(): this {
|
||||||
|
if ( this.shouldBePrimary ) {
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
this.dirty = true
|
||||||
this.shouldBePrimary = true
|
this.shouldBePrimary = true
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Make this a unique index.
|
||||||
|
*/
|
||||||
unique(): this {
|
unique(): this {
|
||||||
|
if ( this.shouldBeUnique ) {
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
this.dirty = true
|
||||||
this.shouldBeUnique = true
|
this.shouldBeUnique = true
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected cloneInstance(): this {
|
||||||
|
return new IndexBuilder(this.name, this.parent) as this
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builder to specify the schema of a table.
|
||||||
|
*/
|
||||||
export class TableBuilder extends SchemaBuilderBase {
|
export class TableBuilder extends SchemaBuilderBase {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Mapping of column name to column schemata.
|
||||||
|
* @protected
|
||||||
|
*/
|
||||||
protected columns: {[key: string]: ColumnBuilder} = {}
|
protected columns: {[key: string]: ColumnBuilder} = {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Mapping of index name to index schemata.
|
||||||
|
* @protected
|
||||||
|
*/
|
||||||
protected indexes: {[key: string]: IndexBuilder} = {}
|
protected indexes: {[key: string]: IndexBuilder} = {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Mapping of constraint name to constraint schemata.
|
||||||
|
* @protected
|
||||||
|
*/
|
||||||
|
protected constraints: {[key: string]: ConstraintBuilder} = {}
|
||||||
|
|
||||||
|
public originalFromSchema?: TableBuilder
|
||||||
|
|
||||||
|
public cloneTo(newBuilder: TableBuilder): TableBuilder {
|
||||||
|
super.cloneTo(newBuilder)
|
||||||
|
newBuilder.columns = {...this.columns}
|
||||||
|
newBuilder.indexes = {...this.indexes}
|
||||||
|
newBuilder.constraints = {...this.constraints}
|
||||||
|
return newBuilder
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the columns defined on this table.
|
||||||
|
*/
|
||||||
|
public getColumns(): {[key: string]: ColumnBuilder} {
|
||||||
|
return {
|
||||||
|
...this.columns,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the indices defined on this table.
|
||||||
|
*/
|
||||||
|
public getIndexes(): {[key: string]: IndexBuilder} {
|
||||||
|
return {
|
||||||
|
...this.indexes,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the constraints defined on this table.
|
||||||
|
*/
|
||||||
|
public getConstraints(): {[key: string]: ConstraintBuilder} {
|
||||||
|
return {
|
||||||
|
...this.constraints,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Mark a column to be dropped.
|
||||||
|
* @param name
|
||||||
|
*/
|
||||||
public dropColumn(name: string): this {
|
public dropColumn(name: string): this {
|
||||||
|
this.dirty = true
|
||||||
this.column(name).drop()
|
this.column(name).drop()
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Mark a column to be renamed.
|
||||||
|
* @param from
|
||||||
|
* @param to
|
||||||
|
*/
|
||||||
public renameColumn(from: string, to: string): this {
|
public renameColumn(from: string, to: string): this {
|
||||||
this.column(from).rename(to)
|
this.column(from).rename(to)
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Mark an index to be dropped.
|
||||||
|
* @param name
|
||||||
|
*/
|
||||||
public dropIndex(name: string): this {
|
public dropIndex(name: string): this {
|
||||||
|
this.dirty = true
|
||||||
this.index(name).drop()
|
this.index(name).drop()
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Mark an index to be renamed.
|
||||||
|
* @param from
|
||||||
|
* @param to
|
||||||
|
*/
|
||||||
public renameIndex(from: string, to: string): this {
|
public renameIndex(from: string, to: string): this {
|
||||||
this.index(from).rename(to)
|
this.index(from).rename(to)
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
public column(name: string) {
|
/**
|
||||||
|
* Add a column to this table.
|
||||||
|
* @param name
|
||||||
|
* @param callback
|
||||||
|
*/
|
||||||
|
public column(name: string, callback?: ParameterizedCallback<ColumnBuilder>): ColumnBuilder {
|
||||||
if ( !this.columns[name] ) {
|
if ( !this.columns[name] ) {
|
||||||
this.columns[name] = new ColumnBuilder(name)
|
this.dirty = true
|
||||||
|
this.columns[name] = new ColumnBuilder(name, this)
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( callback ) {
|
||||||
|
callback(this.columns[name])
|
||||||
}
|
}
|
||||||
|
|
||||||
return this.columns[name]
|
return this.columns[name]
|
||||||
}
|
}
|
||||||
|
|
||||||
public index(name: string) {
|
/**
|
||||||
|
* Add an index to this table.
|
||||||
|
* @param name
|
||||||
|
* @param callback
|
||||||
|
*/
|
||||||
|
public index(name: string, callback?: ParameterizedCallback<IndexBuilder>): IndexBuilder {
|
||||||
if ( !this.indexes[name] ) {
|
if ( !this.indexes[name] ) {
|
||||||
this.indexes[name] = new IndexBuilder(name)
|
this.dirty = true
|
||||||
|
this.indexes[name] = new IndexBuilder(name, this)
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( callback ) {
|
||||||
|
callback(this.indexes[name])
|
||||||
}
|
}
|
||||||
|
|
||||||
return this.indexes[name]
|
return this.indexes[name]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a constraint to this table.
|
||||||
|
* @param name
|
||||||
|
* @param callback
|
||||||
|
*/
|
||||||
|
public constraint(name: string, callback?: ParameterizedCallback<ConstraintBuilder>): ConstraintBuilder {
|
||||||
|
if ( !this.constraints[name] ) {
|
||||||
|
this.dirty = true
|
||||||
|
this.constraints[name] = new ConstraintBuilder(name, this)
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( callback ) {
|
||||||
|
callback(this.constraints[name])
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.constraints[name]
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate a programmatically-incrementing constraint name.
|
||||||
|
* @param suffix
|
||||||
|
* @protected
|
||||||
|
*/
|
||||||
|
protected getNextAvailableConstraintName(suffix: ConstraintType): string {
|
||||||
|
let current = 1
|
||||||
|
let name = `${this.name}_${current}_${suffix}`
|
||||||
|
|
||||||
|
while ( this.constraints[name] ) {
|
||||||
|
current += 1
|
||||||
|
name = `${this.name}_${current}_${suffix}`
|
||||||
|
}
|
||||||
|
|
||||||
|
return name
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a new check constraint with the given expression.
|
||||||
|
* @param expression
|
||||||
|
*/
|
||||||
|
public check(expression: string | QuerySafeValue): this {
|
||||||
|
const name = this.getNextAvailableConstraintName(ConstraintType.Check)
|
||||||
|
this.constraint(name)
|
||||||
|
.type(ConstraintType.Check)
|
||||||
|
.expression(expression)
|
||||||
|
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a new unique constraint for the given fields.
|
||||||
|
* @param fields
|
||||||
|
*/
|
||||||
|
public unique(...fields: string[]): this {
|
||||||
|
// First, check if an existing constraint exists with these fields
|
||||||
|
for ( const key in this.constraints ) {
|
||||||
|
if ( !Object.prototype.hasOwnProperty.call(this.constraints, key) ) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( this.constraints[key].getType() !== ConstraintType.Unique ) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const existingFields = collect<string>(this.constraints[key].getFields())
|
||||||
|
const intersection = existingFields.intersect(fields)
|
||||||
|
|
||||||
|
if ( existingFields.length === fields.length && intersection.length === fields.length ) {
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If an existing constraint can't satisfy this, create a new one
|
||||||
|
const name = this.getNextAvailableConstraintName(ConstraintType.Unique)
|
||||||
|
this.constraint(name)
|
||||||
|
.type(ConstraintType.Unique)
|
||||||
|
.pipe()
|
||||||
|
.peek(constraint => {
|
||||||
|
fields.forEach(field => constraint.field(field))
|
||||||
|
})
|
||||||
|
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a primary key (column & index) to this table.
|
||||||
|
* @param name
|
||||||
|
* @param type
|
||||||
|
*/
|
||||||
|
public primaryKey(name: string, type: FieldType = FieldType.serial): ColumnBuilder {
|
||||||
|
this.dirty = true
|
||||||
|
|
||||||
|
return this.column(name)
|
||||||
|
.type(type)
|
||||||
|
.primary()
|
||||||
|
}
|
||||||
|
|
||||||
|
protected cloneInstance(): this {
|
||||||
|
return new TableBuilder(this.name) as this
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,10 +1,11 @@
|
|||||||
import {Inject, Singleton} from '../../di'
|
import {Container, Inject, Singleton} from '../../di'
|
||||||
import {DatabaseService} from '../DatabaseService'
|
import {DatabaseService} from '../DatabaseService'
|
||||||
import {PostgresConnection} from '../connection/PostgresConnection'
|
import {PostgresConnection} from '../connection/PostgresConnection'
|
||||||
import {ErrorWithContext} from '../../util'
|
import {ErrorWithContext} from '../../util'
|
||||||
import {Unit} from '../../lifecycle/Unit'
|
import {Unit} from '../../lifecycle/Unit'
|
||||||
import {Config} from '../../service/Config'
|
import {Config} from '../../service/Config'
|
||||||
import {Logging} from '../../service/Logging'
|
import {Logging} from '../../service/Logging'
|
||||||
|
import {MigratorFactory} from '../migrations/MigratorFactory'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Application unit responsible for loading and creating database connections from config.
|
* Application unit responsible for loading and creating database connections from config.
|
||||||
@ -12,13 +13,16 @@ import {Logging} from '../../service/Logging'
|
|||||||
@Singleton()
|
@Singleton()
|
||||||
export class Database extends Unit {
|
export class Database extends Unit {
|
||||||
@Inject()
|
@Inject()
|
||||||
protected readonly config!: Config;
|
protected readonly config!: Config
|
||||||
|
|
||||||
@Inject()
|
@Inject()
|
||||||
protected readonly dbService!: DatabaseService;
|
protected readonly dbService!: DatabaseService
|
||||||
|
|
||||||
@Inject()
|
@Inject()
|
||||||
protected readonly logging!: Logging;
|
protected readonly logging!: Logging
|
||||||
|
|
||||||
|
@Inject('injector')
|
||||||
|
protected readonly injector!: Container
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Load the `database.connections` config and register Connection instances for each config.
|
* Load the `database.connections` config and register Connection instances for each config.
|
||||||
@ -28,6 +32,9 @@ export class Database extends Unit {
|
|||||||
const connections = this.config.get('database.connections')
|
const connections = this.config.get('database.connections')
|
||||||
const promises = []
|
const promises = []
|
||||||
|
|
||||||
|
// Register the migrator factory
|
||||||
|
this.injector.registerFactory(this.injector.make(MigratorFactory))
|
||||||
|
|
||||||
for ( const key in connections ) {
|
for ( const key in connections ) {
|
||||||
if ( !Object.prototype.hasOwnProperty.call(connections, key) ) {
|
if ( !Object.prototype.hasOwnProperty.call(connections, key) ) {
|
||||||
continue
|
continue
|
||||||
|
98
src/orm/services/Migrations.ts
Normal file
98
src/orm/services/Migrations.ts
Normal file
@ -0,0 +1,98 @@
|
|||||||
|
import {Inject, Singleton} from '../../di'
|
||||||
|
import {CanonicalInstantiable} from '../../service/CanonicalInstantiable'
|
||||||
|
import {Migration} from '../migrations/Migration'
|
||||||
|
import {CanonicalDefinition, CanonicalResolver} from '../../service/Canonical'
|
||||||
|
import {Migrator} from '../migrations/Migrator'
|
||||||
|
import {UniversalPath} from '../../util'
|
||||||
|
import {lib} from '../../lib'
|
||||||
|
import {CommandLine} from '../../cli'
|
||||||
|
import {MigrateDirective} from '../directive/MigrateDirective'
|
||||||
|
import {RollbackDirective} from '../directive/RollbackDirective'
|
||||||
|
import {CreateMigrationDirective} from '../directive/CreateMigrationDirective'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Service unit that loads and instantiates migration classes.
|
||||||
|
*/
|
||||||
|
@Singleton()
|
||||||
|
export class Migrations extends CanonicalInstantiable<Migration> {
|
||||||
|
@Inject()
|
||||||
|
protected readonly migrator!: Migrator
|
||||||
|
|
||||||
|
@Inject()
|
||||||
|
protected readonly cli!: CommandLine
|
||||||
|
|
||||||
|
protected appPath = ['migrations']
|
||||||
|
|
||||||
|
protected canonicalItem = 'migration'
|
||||||
|
|
||||||
|
protected suffix = '.migration.js'
|
||||||
|
|
||||||
|
async up(): Promise<void> {
|
||||||
|
if ( await this.path.exists() ) {
|
||||||
|
await super.up()
|
||||||
|
} else {
|
||||||
|
this.logging.debug(`Base migration path does not exist, or has no files: ${this.path}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Register the migrations for @extollo/lib
|
||||||
|
const basePath = lib().concat('migrations')
|
||||||
|
const resolver = await this.buildMigrationNamespaceResolver('@extollo', basePath)
|
||||||
|
this.registerNamespace('@extollo', resolver)
|
||||||
|
|
||||||
|
// Register the migrate CLI directives
|
||||||
|
this.cli.registerDirective(MigrateDirective)
|
||||||
|
this.cli.registerDirective(RollbackDirective)
|
||||||
|
this.cli.registerDirective(CreateMigrationDirective)
|
||||||
|
}
|
||||||
|
|
||||||
|
async initCanonicalItem(definition: CanonicalDefinition): Promise<Migration> {
|
||||||
|
const instance = await super.initCanonicalItem(definition)
|
||||||
|
|
||||||
|
if ( !(instance instanceof Migration) ) {
|
||||||
|
throw new TypeError(`Invalid migration: ${definition.originalName}. Migrations must extend from @extollo/lib.Migration.`)
|
||||||
|
}
|
||||||
|
|
||||||
|
instance.setMigrationIdentifier(definition.canonicalName)
|
||||||
|
|
||||||
|
return instance
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a CanonicalResolver for a directory that contains migration files.
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* ```ts
|
||||||
|
* const path = universalPath('path', 'to', 'migrations', 'folder')
|
||||||
|
* const namespace = '@mypackage'
|
||||||
|
*
|
||||||
|
* const resolver = await migrations.buildMigrationNamespaceResolver(namespace, path)
|
||||||
|
* migrations.registerNamespace(namespace, resolver)
|
||||||
|
* ```
|
||||||
|
* @param name
|
||||||
|
* @param basePath
|
||||||
|
*/
|
||||||
|
public async buildMigrationNamespaceResolver(name: string, basePath: UniversalPath): Promise<CanonicalResolver<Migration>> {
|
||||||
|
if ( !name.startsWith('@') ) {
|
||||||
|
name = `@${name}`
|
||||||
|
}
|
||||||
|
|
||||||
|
const namespace: {[key: string]: Migration} = {}
|
||||||
|
|
||||||
|
for await ( const entry of basePath.walk() ) {
|
||||||
|
if ( !entry.endsWith(this.suffix) ) {
|
||||||
|
this.logging.debug(`buildMigrationNamespaceResolver - Skipping file with invalid suffix: ${entry}`)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const definition = await this.buildCanonicalDefinition(entry, basePath)
|
||||||
|
this.logging.verbose(`buildMigrationNamespaceResolver - Discovered canonical ${this.canonicalItem} "${definition.canonicalName}" from ${entry}`)
|
||||||
|
namespace[definition.canonicalName] = await this.initCanonicalItem(definition)
|
||||||
|
namespace[definition.canonicalName].setMigrationIdentifier(`${name}:${namespace[definition.canonicalName].identifier}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
get: (key: string) => namespace[key],
|
||||||
|
all: () => Object.keys(namespace),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
40
src/orm/template/migration.ts
Normal file
40
src/orm/template/migration.ts
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
import {Template} from '../../cli'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Template for creating new migration classes in app/migrations.
|
||||||
|
*/
|
||||||
|
const templateMigration: Template = {
|
||||||
|
name: 'migration',
|
||||||
|
fileSuffix: '.migration.ts',
|
||||||
|
baseAppPath: ['migrations'],
|
||||||
|
description: 'Create a new class that applies a one-time migration',
|
||||||
|
render: (name: string) => {
|
||||||
|
return `import {Injectable, Migration} from '@extollo/lib'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* ${name}
|
||||||
|
* ----------------------------------
|
||||||
|
* Put some description here.
|
||||||
|
*/
|
||||||
|
@Injectable()
|
||||||
|
export default class ${name} extends Migration {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Apply the migration.
|
||||||
|
*/
|
||||||
|
async up(): Promise<void> {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Undo the migration.
|
||||||
|
*/
|
||||||
|
async down(): Promise<void> {
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
export { templateMigration }
|
@ -152,3 +152,56 @@ export enum FieldType {
|
|||||||
other = 'other',
|
other = 'other',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given a FieldType, get the inverse (that is, the code-form name).
|
||||||
|
* @example
|
||||||
|
* ```ts
|
||||||
|
* console.log(FieldType.varchar) // => "character varying"
|
||||||
|
* console.log(inverseFieldType(FieldType.varchar)) // => "varchar"
|
||||||
|
* console.log(inverseFieldType('character varying')) // => "varchar"
|
||||||
|
* ```
|
||||||
|
* @param type
|
||||||
|
*/
|
||||||
|
export function inverseFieldType(type: FieldType): string {
|
||||||
|
return ({
|
||||||
|
bigint: 'bigint',
|
||||||
|
bigserial: 'bigserial',
|
||||||
|
bit: 'bit',
|
||||||
|
'bit varying': 'varbit',
|
||||||
|
boolean: 'boolean',
|
||||||
|
box: 'box',
|
||||||
|
bytea: 'bytea',
|
||||||
|
character: 'character',
|
||||||
|
char: 'character',
|
||||||
|
'character varying': 'varchar',
|
||||||
|
cidr: 'cidr',
|
||||||
|
circle: 'circle',
|
||||||
|
date: 'date',
|
||||||
|
'double precision': 'float8',
|
||||||
|
inet: 'inet',
|
||||||
|
integer: 'integer',
|
||||||
|
interval: 'interval',
|
||||||
|
json: 'json',
|
||||||
|
line: 'line',
|
||||||
|
lseg: 'lseg',
|
||||||
|
macaddr: 'macaddr',
|
||||||
|
money: 'money',
|
||||||
|
numeric: 'numeric',
|
||||||
|
path: 'path',
|
||||||
|
point: 'point',
|
||||||
|
polygon: 'polygon',
|
||||||
|
real: 'real',
|
||||||
|
smallint: 'smallint',
|
||||||
|
smallserial: 'smallserial',
|
||||||
|
serial: 'serial',
|
||||||
|
text: 'text',
|
||||||
|
time: 'time',
|
||||||
|
timestamp: 'timestamp',
|
||||||
|
tsquery: 'tsquery',
|
||||||
|
tsvector: 'tsvector',
|
||||||
|
txidSnapshot: 'txidSnapshot',
|
||||||
|
uuid: 'uuid',
|
||||||
|
xml: 'xml',
|
||||||
|
other: 'other',
|
||||||
|
})[type]
|
||||||
|
}
|
||||||
|
@ -20,7 +20,17 @@ export interface CanonicalDefinition {
|
|||||||
/**
|
/**
|
||||||
* Type alias for a function that resolves a canonical name to a canonical item, if one exists.
|
* Type alias for a function that resolves a canonical name to a canonical item, if one exists.
|
||||||
*/
|
*/
|
||||||
export type CanonicalResolver<T> = (key: string) => T | undefined
|
export type CanonicalResolverFunction<T> = (key: string) => T | undefined
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Interface for a canonical resolver that provides additional information.
|
||||||
|
*/
|
||||||
|
export interface ComplexCanonicalResolver<T> {
|
||||||
|
get: CanonicalResolverFunction<T>,
|
||||||
|
all: () => string[],
|
||||||
|
}
|
||||||
|
|
||||||
|
export type CanonicalResolver<T> = CanonicalResolverFunction<T> | ComplexCanonicalResolver<T>
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Base type for a canonical name reference.
|
* Base type for a canonical name reference.
|
||||||
@ -105,10 +115,33 @@ export abstract class Canonical<T> extends Unit {
|
|||||||
/**
|
/**
|
||||||
* Return an array of all loaded canonical names.
|
* Return an array of all loaded canonical names.
|
||||||
*/
|
*/
|
||||||
public all(): string[] {
|
public all(namespace?: string): string[] {
|
||||||
|
if ( namespace ) {
|
||||||
|
const resolver = this.loadedNamespaces[namespace]
|
||||||
|
if ( !resolver ) {
|
||||||
|
throw new ErrorWithContext(`Unable to find namespace for ${this.canonicalItem}: ${namespace}`, {
|
||||||
|
canonicalItem: this.canonicalItem,
|
||||||
|
namespace,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( typeof resolver === 'function' ) {
|
||||||
|
return []
|
||||||
|
} else {
|
||||||
|
return resolver.all()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return Object.keys(this.loadedItems)
|
return Object.keys(this.loadedItems)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return an array of all loaded canonical namespaces.
|
||||||
|
*/
|
||||||
|
public namespaces(): string[] {
|
||||||
|
return Object.keys(this.loadedNamespaces)
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get a Universal path to the base directory where this unit loads its canonical files from.
|
* Get a Universal path to the base directory where this unit loads its canonical files from.
|
||||||
*/
|
*/
|
||||||
@ -127,7 +160,8 @@ export abstract class Canonical<T> extends Unit {
|
|||||||
const [namespace, ...rest] = key.split(':')
|
const [namespace, ...rest] = key.split(':')
|
||||||
key = rest.join(':')
|
key = rest.join(':')
|
||||||
|
|
||||||
if ( !this.loadedNamespaces[namespace] ) {
|
const resolver = this.loadedNamespaces[namespace]
|
||||||
|
if ( !resolver ) {
|
||||||
throw new ErrorWithContext(`Unable to find namespace for ${this.canonicalItem}: ${namespace}`, {
|
throw new ErrorWithContext(`Unable to find namespace for ${this.canonicalItem}: ${namespace}`, {
|
||||||
canonicalItem: this.canonicalItem,
|
canonicalItem: this.canonicalItem,
|
||||||
namespace,
|
namespace,
|
||||||
@ -135,7 +169,11 @@ export abstract class Canonical<T> extends Unit {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
return this.loadedNamespaces[namespace](key)
|
if ( typeof resolver === 'function' ) {
|
||||||
|
return resolver(key)
|
||||||
|
} else {
|
||||||
|
return resolver.get(key)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return this.loadedItems[key]
|
return this.loadedItems[key]
|
||||||
@ -208,10 +246,11 @@ export abstract class Canonical<T> extends Unit {
|
|||||||
/**
|
/**
|
||||||
* Given the path to a file in the canonical items directory, create a CanonicalDefinition record from that file.
|
* Given the path to a file in the canonical items directory, create a CanonicalDefinition record from that file.
|
||||||
* @param filePath
|
* @param filePath
|
||||||
|
* @param basePath
|
||||||
* @protected
|
* @protected
|
||||||
*/
|
*/
|
||||||
protected async buildCanonicalDefinition(filePath: string): Promise<CanonicalDefinition> {
|
protected async buildCanonicalDefinition(filePath: string, basePath?: UniversalPath): Promise<CanonicalDefinition> {
|
||||||
const originalName = filePath.replace(this.path.toLocal, '').substr(1)
|
const originalName = filePath.replace((basePath || this.path).toLocal, '').substr(1)
|
||||||
const pathRegex = new RegExp(nodePath.sep, 'g')
|
const pathRegex = new RegExp(nodePath.sep, 'g')
|
||||||
const canonicalName = originalName.replace(pathRegex, ':')
|
const canonicalName = originalName.replace(pathRegex, ':')
|
||||||
.split('')
|
.split('')
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
* @extends Error
|
* @extends Error
|
||||||
*/
|
*/
|
||||||
import {Canonical, CanonicalDefinition} from './Canonical'
|
import {Canonical, CanonicalDefinition} from './Canonical'
|
||||||
import {Instantiable, isInstantiable} from '../di'
|
import {isInstantiable} from '../di'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Error thrown when the export of a canonical file is determined to be invalid.
|
* Error thrown when the export of a canonical file is determined to be invalid.
|
||||||
@ -17,8 +17,8 @@ export class InvalidCanonicalExportError extends Error {
|
|||||||
/**
|
/**
|
||||||
* Variant of the Canonical unit whose files export classes which are instantiated using the global container.
|
* Variant of the Canonical unit whose files export classes which are instantiated using the global container.
|
||||||
*/
|
*/
|
||||||
export class CanonicalInstantiable<T> extends Canonical<Instantiable<T>> {
|
export class CanonicalInstantiable<T> extends Canonical<T> {
|
||||||
public async initCanonicalItem(definition: CanonicalDefinition): Promise<Instantiable<T>> {
|
public async initCanonicalItem(definition: CanonicalDefinition): Promise<T> {
|
||||||
if ( isInstantiable(definition.imported.default) ) {
|
if ( isInstantiable(definition.imported.default) ) {
|
||||||
return this.app().make(definition.imported.default)
|
return this.app().make(definition.imported.default)
|
||||||
}
|
}
|
||||||
|
@ -7,6 +7,7 @@ import {
|
|||||||
} from './Collection'
|
} from './Collection'
|
||||||
import {Iterable, StopIteration} from './Iterable'
|
import {Iterable, StopIteration} from './Iterable'
|
||||||
import {applyWhere, WhereOperator} from './where'
|
import {applyWhere, WhereOperator} from './where'
|
||||||
|
import {AsyncPipe, Pipe} from '../support/Pipe'
|
||||||
type AsyncCollectionComparable<T> = CollectionItem<T>[] | Collection<T> | AsyncCollection<T>
|
type AsyncCollectionComparable<T> = CollectionItem<T>[] | Collection<T> | AsyncCollection<T>
|
||||||
type AsyncKeyFunction<T, T2> = (item: CollectionItem<T>, index: number) => CollectionItem<T2> | Promise<CollectionItem<T2>>
|
type AsyncKeyFunction<T, T2> = (item: CollectionItem<T>, index: number) => CollectionItem<T2> | Promise<CollectionItem<T2>>
|
||||||
type AsyncCollectionFunction<T, T2> = (items: AsyncCollection<T>) => T2
|
type AsyncCollectionFunction<T, T2> = (items: AsyncCollection<T>) => T2
|
||||||
@ -318,6 +319,24 @@ export class AsyncCollection<T> {
|
|||||||
return new Collection<T2>(newItems)
|
return new Collection<T2>(newItems)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new collection by mapping the items in this collection using the given function,
|
||||||
|
* excluding any for which the function resolves undefined.
|
||||||
|
* @param func
|
||||||
|
*/
|
||||||
|
async partialMap<T2>(func: AsyncKeyFunction<T, T2 | undefined>): Promise<Collection<NonNullable<T2>>> {
|
||||||
|
const newItems: CollectionItem<NonNullable<T2>>[] = []
|
||||||
|
|
||||||
|
await this.each(async (item, index) => {
|
||||||
|
const result = await func(item, index)
|
||||||
|
if ( typeof result !== 'undefined' ) {
|
||||||
|
newItems.push(result as NonNullable<T2>)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
return new Collection<NonNullable<T2>>(newItems)
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns true if the given operator returns true for every item in the collection.
|
* Returns true if the given operator returns true for every item in the collection.
|
||||||
* @param {AsyncKeyFunction} func
|
* @param {AsyncKeyFunction} func
|
||||||
@ -783,10 +802,24 @@ export class AsyncCollection<T> {
|
|||||||
* Return the value of the function, passing this collection to it.
|
* Return the value of the function, passing this collection to it.
|
||||||
* @param {AsyncCollectionFunction} func
|
* @param {AsyncCollectionFunction} func
|
||||||
*/
|
*/
|
||||||
pipe<T2>(func: AsyncCollectionFunction<T, T2>): any {
|
pipeTo<T2>(func: AsyncCollectionFunction<T, T2>): any {
|
||||||
return func(this)
|
return func(this)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return a new Pipe of this collection.
|
||||||
|
*/
|
||||||
|
pipe(): Pipe<AsyncCollection<T>> {
|
||||||
|
return Pipe.wrap(this)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return a new AsyncPipe of this collection.
|
||||||
|
*/
|
||||||
|
asyncPipe(): AsyncPipe<AsyncCollection<T>> {
|
||||||
|
return AsyncPipe.wrap(this)
|
||||||
|
}
|
||||||
|
|
||||||
/* async pop(): Promise<MaybeCollectionItem<T>> {
|
/* async pop(): Promise<MaybeCollectionItem<T>> {
|
||||||
const nextItem = await this.storedItems.next()
|
const nextItem = await this.storedItems.next()
|
||||||
if ( !nextItem.done ) {
|
if ( !nextItem.done ) {
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
import {Pipe} from '../support/Pipe'
|
import {AsyncPipe, Pipe} from '../support/Pipe'
|
||||||
|
|
||||||
type CollectionItem<T> = T
|
type CollectionItem<T> = T
|
||||||
type MaybeCollectionItem<T> = CollectionItem<T> | undefined
|
type MaybeCollectionItem<T> = CollectionItem<T> | undefined
|
||||||
type KeyFunction<T, T2> = (item: CollectionItem<T>, index: number) => CollectionItem<T2>
|
type KeyFunction<T, T2> = (item: CollectionItem<T>, index: number) => CollectionItem<T2>
|
||||||
type KeyReducerFunction<T, T2> = (current: any, item: CollectionItem<T>, index: number) => T2
|
type KeyReducerFunction<T, T2> = (current: any, item: CollectionItem<T>, index: number) => T2
|
||||||
type CollectionFunction<T, T2> = (items: Collection<T>) => T2
|
type CollectionFunction<T, T2> = (items: Collection<T>) => T2
|
||||||
type KeyOperator<T, T2> = string | KeyFunction<T, T2>
|
type KeyOperator<T, T2> = keyof T | KeyFunction<T, T2>
|
||||||
type AssociatedCollectionItem<T2, T> = { key: T2, item: CollectionItem<T> }
|
type AssociatedCollectionItem<T2, T> = { key: T2, item: CollectionItem<T> }
|
||||||
type CollectionComparable<T> = CollectionItem<T>[] | Collection<T>
|
type CollectionComparable<T> = CollectionItem<T>[] | Collection<T>
|
||||||
type DeterminesEquality<T> = (item: CollectionItem<T>, other: any) => boolean
|
type DeterminesEquality<T> = (item: CollectionItem<T>, other: any) => boolean
|
||||||
@ -313,6 +313,24 @@ class Collection<T> {
|
|||||||
return new Collection<T2>(newItems)
|
return new Collection<T2>(newItems)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new collection by mapping the items in this collection using the given function,
|
||||||
|
* excluding any for which the function returns undefined.
|
||||||
|
* @param func
|
||||||
|
*/
|
||||||
|
partialMap<T2>(func: KeyFunction<T, T2 | undefined>): Collection<NonNullable<T2>> {
|
||||||
|
const newItems: CollectionItem<NonNullable<T2>>[] = []
|
||||||
|
|
||||||
|
this.each(((item, index) => {
|
||||||
|
const result = func(item, index)
|
||||||
|
if ( typeof result !== 'undefined' ) {
|
||||||
|
newItems.push(result as NonNullable<T2>)
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
|
||||||
|
return new Collection<NonNullable<T2>>(newItems)
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert this collection to an object keyed by the given field.
|
* Convert this collection to an object keyed by the given field.
|
||||||
*
|
*
|
||||||
@ -354,10 +372,10 @@ class Collection<T> {
|
|||||||
this.allAssociated(key).forEach(assoc => {
|
this.allAssociated(key).forEach(assoc => {
|
||||||
i += 1
|
i += 1
|
||||||
|
|
||||||
if ( typeof value === 'string' ) {
|
if ( typeof value === 'function' ) {
|
||||||
obj[assoc.key] = (assoc.item as any)[value]
|
|
||||||
} else {
|
|
||||||
obj[assoc.key] = value(assoc.item, i)
|
obj[assoc.key] = value(assoc.item, i)
|
||||||
|
} else {
|
||||||
|
obj[assoc.key] = (assoc.item[value] as any) as T2
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -805,6 +823,13 @@ class Collection<T> {
|
|||||||
return Pipe.wrap(this)
|
return Pipe.wrap(this)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return a new AsyncPipe of this collection.
|
||||||
|
*/
|
||||||
|
asyncPipe(): AsyncPipe<Collection<T>> {
|
||||||
|
return AsyncPipe.wrap(this)
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Remove the last item from this collection.
|
* Remove the last item from this collection.
|
||||||
*/
|
*/
|
||||||
|
@ -13,6 +13,7 @@ export * from './error/ErrorWithContext'
|
|||||||
|
|
||||||
export * from './logging/Logger'
|
export * from './logging/Logger'
|
||||||
export * from './logging/StandardLogger'
|
export * from './logging/StandardLogger'
|
||||||
|
export * from './logging/FileLogger'
|
||||||
export * from './logging/types'
|
export * from './logging/types'
|
||||||
|
|
||||||
export * from './support/BehaviorSubject'
|
export * from './support/BehaviorSubject'
|
||||||
|
44
src/util/logging/FileLogger.ts
Normal file
44
src/util/logging/FileLogger.ts
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
import {Logger} from './Logger'
|
||||||
|
import {LogMessage} from './types'
|
||||||
|
import {Injectable} from '../../di'
|
||||||
|
import {universalPath} from '../support/path'
|
||||||
|
import {appPath, env} from '../../lifecycle/Application'
|
||||||
|
import {Writable} from 'stream'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A Logger implementation that writes to a UniversalPath.
|
||||||
|
*/
|
||||||
|
@Injectable()
|
||||||
|
export class FileLogger extends Logger {
|
||||||
|
private resolvedPath?: Writable
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the re-usable write stream to the log file.
|
||||||
|
* @protected
|
||||||
|
*/
|
||||||
|
protected async getWriteStream(): Promise<Writable> {
|
||||||
|
if ( !this.resolvedPath ) {
|
||||||
|
let basePath = env('EXTOLLO_LOGGING_FILE')
|
||||||
|
if ( basePath && !Array.isArray(basePath) ) {
|
||||||
|
basePath = [basePath]
|
||||||
|
}
|
||||||
|
|
||||||
|
const resolvedPath = basePath ? universalPath(...basePath) : appPath('..', '..', 'extollo.log')
|
||||||
|
|
||||||
|
if ( !(await resolvedPath.exists()) ) {
|
||||||
|
await resolvedPath.concat('..').mkdir()
|
||||||
|
await resolvedPath.write('')
|
||||||
|
}
|
||||||
|
|
||||||
|
this.resolvedPath = await resolvedPath.writeStream()
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.resolvedPath
|
||||||
|
}
|
||||||
|
|
||||||
|
public async write(message: LogMessage): Promise<void> {
|
||||||
|
const text = `${message.level} ${this.formatDate(message.date)} (${message.callerName || 'Unknown'}) ${message.output}`
|
||||||
|
const stream = await this.getWriteStream()
|
||||||
|
stream.write(text + '\n')
|
||||||
|
}
|
||||||
|
}
|
@ -1,5 +1,6 @@
|
|||||||
import {LoggingLevel, LogMessage} from './types'
|
import {LoggingLevel, LogMessage} from './types'
|
||||||
import * as color from 'colors/safe'
|
import * as color from 'colors/safe'
|
||||||
|
import {Awaitable} from '../support/types'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Base class for an application logger.
|
* Base class for an application logger.
|
||||||
@ -10,7 +11,7 @@ export abstract class Logger {
|
|||||||
* @param {LogMessage} message
|
* @param {LogMessage} message
|
||||||
* @return Promise<void>
|
* @return Promise<void>
|
||||||
*/
|
*/
|
||||||
public abstract write(message: LogMessage): Promise<void> | void;
|
public abstract write(message: LogMessage): Awaitable<void>;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Format the date object to the string output format.
|
* Format the date object to the string output format.
|
||||||
|
@ -1,12 +1,14 @@
|
|||||||
/**
|
/**
|
||||||
* A closure that maps a given pipe item to a different type.
|
* A closure that maps a given pipe item to a different type.
|
||||||
*/
|
*/
|
||||||
|
import {Awaitable} from './types'
|
||||||
|
|
||||||
export type PipeOperator<T, T2> = (subject: T) => T2
|
export type PipeOperator<T, T2> = (subject: T) => T2
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A closure that maps a given pipe item to an item of the same type.
|
* A closure that maps a given pipe item to an item of the same type.
|
||||||
*/
|
*/
|
||||||
export type ReflexivePipeOperator<T> = (subject: T) => T
|
export type ReflexivePipeOperator<T> = (subject: T) => T|void
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A condition or condition-resolving function for pipe methods.
|
* A condition or condition-resolving function for pipe methods.
|
||||||
@ -77,6 +79,15 @@ export class Pipe<T> {
|
|||||||
return new Pipe(op(this.subject))
|
return new Pipe(op(this.subject))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Like tap, but always returns the original pipe.
|
||||||
|
* @param op
|
||||||
|
*/
|
||||||
|
peek<T2>(op: PipeOperator<T, T2>): this {
|
||||||
|
op(this.subject)
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* If `check` is truthy, apply the given operator to the item in the pipe and return the result.
|
* If `check` is truthy, apply the given operator to the item in the pipe and return the result.
|
||||||
* Otherwise, just return the current pipe unchanged.
|
* Otherwise, just return the current pipe unchanged.
|
||||||
@ -86,7 +97,7 @@ export class Pipe<T> {
|
|||||||
*/
|
*/
|
||||||
when(check: PipeCondition<T>, op: ReflexivePipeOperator<T>): Pipe<T> {
|
when(check: PipeCondition<T>, op: ReflexivePipeOperator<T>): Pipe<T> {
|
||||||
if ( (typeof check === 'function' && check(this.subject)) || check ) {
|
if ( (typeof check === 'function' && check(this.subject)) || check ) {
|
||||||
return Pipe.wrap(op(this.subject))
|
Pipe.wrap(op(this.subject))
|
||||||
}
|
}
|
||||||
|
|
||||||
return this
|
return this
|
||||||
@ -104,7 +115,8 @@ export class Pipe<T> {
|
|||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
return Pipe.wrap(op(this.subject))
|
Pipe.wrap(op(this.subject))
|
||||||
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -127,4 +139,134 @@ export class Pipe<T> {
|
|||||||
get(): T {
|
get(): T {
|
||||||
return this.subject
|
return this.subject
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get an AsyncPipe with the current item in the pipe.
|
||||||
|
*/
|
||||||
|
async(): AsyncPipe<T> {
|
||||||
|
return AsyncPipe.wrap<T>(this.subject)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A subject function that yields the value in the AsyncPipe.
|
||||||
|
*/
|
||||||
|
export type AsyncPipeResolver<T> = () => Awaitable<T>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A closure that maps a given pipe item to a different type.
|
||||||
|
*/
|
||||||
|
export type AsyncPipeOperator<T, T2> = (subject: T) => Awaitable<T2>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A closure that maps a given pipe item to an item of the same type.
|
||||||
|
*/
|
||||||
|
export type ReflexiveAsyncPipeOperator<T> = (subject: T) => Awaitable<T|void>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A condition or condition-resolving function for pipe methods.
|
||||||
|
*/
|
||||||
|
export type AsyncPipeCondition<T> = boolean | ((subject: T) => Awaitable<boolean>)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An asynchronous version of the Pipe helper.
|
||||||
|
*/
|
||||||
|
export class AsyncPipe<T> {
|
||||||
|
/**
|
||||||
|
* Get an AsyncPipe with the given value in it.
|
||||||
|
* @param subject
|
||||||
|
*/
|
||||||
|
static wrap<subjectType>(subject: subjectType): AsyncPipe<subjectType> {
|
||||||
|
return new AsyncPipe<subjectType>(() => subject)
|
||||||
|
}
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
/** The current value resolver of the pipe. */
|
||||||
|
private subject: AsyncPipeResolver<T>,
|
||||||
|
) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Apply a transformative operator to the pipe.
|
||||||
|
* @param op
|
||||||
|
*/
|
||||||
|
tap<T2>(op: AsyncPipeOperator<T, T2>): AsyncPipe<T2> {
|
||||||
|
return new AsyncPipe<T2>(async () => op(await this.subject()))
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Apply an operator to the pipe, but return the reference
|
||||||
|
* to the current pipe. The operator is resolved when the
|
||||||
|
* overall pipe is resolved.
|
||||||
|
* @param op
|
||||||
|
*/
|
||||||
|
peek<T2>(op: AsyncPipeOperator<T, T2>): AsyncPipe<T> {
|
||||||
|
return new AsyncPipe<T>(async () => {
|
||||||
|
const subject = await this.subject()
|
||||||
|
await op(subject)
|
||||||
|
return subject
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Apply an operator to the pipe, if the check condition passes.
|
||||||
|
* @param check
|
||||||
|
* @param op
|
||||||
|
*/
|
||||||
|
when(check: AsyncPipeCondition<T>, op: ReflexiveAsyncPipeOperator<T>): AsyncPipe<T> {
|
||||||
|
return new AsyncPipe<T>(async () => {
|
||||||
|
let subject
|
||||||
|
|
||||||
|
if ( typeof check === 'function' ) {
|
||||||
|
check = await check(subject = await this.subject())
|
||||||
|
}
|
||||||
|
|
||||||
|
subject = subject ?? await this.subject()
|
||||||
|
if ( check ) {
|
||||||
|
return ((await op(subject)) ?? subject) as T
|
||||||
|
}
|
||||||
|
|
||||||
|
return subject as T
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Apply an operator to the pipe, if the check condition fails.
|
||||||
|
* @param check
|
||||||
|
* @param op
|
||||||
|
*/
|
||||||
|
unless(check: AsyncPipeCondition<T>, op: ReflexiveAsyncPipeOperator<T>): AsyncPipe<T> {
|
||||||
|
if ( typeof check === 'function' ) {
|
||||||
|
return this.when(async (subject: T) => !(await check(subject)), op)
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.when(!check, op)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Alias of `unless()`.
|
||||||
|
* @param check
|
||||||
|
* @param op
|
||||||
|
*/
|
||||||
|
whenNot(check: AsyncPipeCondition<T>, op: ReflexiveAsyncPipeOperator<T>): AsyncPipe<T> {
|
||||||
|
return this.unless(check, op)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the transformed value from the pipe.
|
||||||
|
*/
|
||||||
|
async resolve(): Promise<T> {
|
||||||
|
return this.subject()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve the value and return it in a sync `Pipe` instance.
|
||||||
|
*/
|
||||||
|
async sync(): Promise<Pipe<T>> {
|
||||||
|
return Pipe.wrap<T>(await this.subject())
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Get the transformed value from the pipe. Allows awaiting the pipe directly. */
|
||||||
|
then(): Promise<T> {
|
||||||
|
return this.resolve()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,28 @@
|
|||||||
import * as nodeUUID from 'uuid'
|
import * as nodeUUID from 'uuid'
|
||||||
import {ErrorWithContext} from '../error/ErrorWithContext'
|
import {ErrorWithContext} from '../error/ErrorWithContext'
|
||||||
import {JSONState} from './Rehydratable'
|
import {JSONState} from './Rehydratable'
|
||||||
|
import {KeyValue} from './types'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create an array of key-value pairs for the keys in a uniform object.
|
||||||
|
* @param obj
|
||||||
|
*/
|
||||||
|
export function objectToKeyValue<T>(obj: {[key: string]: T}): KeyValue<T>[] {
|
||||||
|
const values: KeyValue<T>[] = []
|
||||||
|
|
||||||
|
for ( const key in obj ) {
|
||||||
|
if ( !Object.prototype.hasOwnProperty.call(obj, key) ) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
values.push({
|
||||||
|
key,
|
||||||
|
value: obj[key],
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return values
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Make a deep copy of an object.
|
* Make a deep copy of an object.
|
||||||
|
@ -47,3 +47,17 @@ export function padCenter(string: string, length: number, padWith = ' '): string
|
|||||||
|
|
||||||
return string
|
return string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert a string to PascalCase.
|
||||||
|
* @param input
|
||||||
|
*/
|
||||||
|
export function stringToPascal(input: string): string {
|
||||||
|
return input.split(/[\s_]+/i)
|
||||||
|
.map(part => {
|
||||||
|
return part[0].toUpperCase() + part.substr(1)
|
||||||
|
})
|
||||||
|
.join('')
|
||||||
|
.split(/\W+/i)
|
||||||
|
.join('')
|
||||||
|
}
|
||||||
|
@ -3,3 +3,9 @@ export type Awaitable<T> = T | Promise<T>
|
|||||||
|
|
||||||
/** Type alias for something that may be undefined. */
|
/** Type alias for something that may be undefined. */
|
||||||
export type Maybe<T> = T | undefined
|
export type Maybe<T> = T | undefined
|
||||||
|
|
||||||
|
/** Type alias for a callback that accepts a typed argument. */
|
||||||
|
export type ParameterizedCallback<T> = ((arg: T) => any)
|
||||||
|
|
||||||
|
/** A key-value form of a given type. */
|
||||||
|
export type KeyValue<T> = {key: string, value: T}
|
||||||
|
Loading…
Reference in New Issue
Block a user