diff --git a/.gitignore b/.gitignore index ce3080911..fff3d9393 100644 --- a/.gitignore +++ b/.gitignore @@ -4,3 +4,4 @@ dist node_modules .vscode .idea +.DS_Store \ No newline at end of file diff --git a/src/dialect/postgres/postgres-dialect-config.ts b/src/dialect/postgres/postgres-dialect-config.ts index 88facba7c..f364a599c 100644 --- a/src/dialect/postgres/postgres-dialect-config.ts +++ b/src/dialect/postgres/postgres-dialect-config.ts @@ -39,6 +39,20 @@ export interface PostgresDialectConfig { * Called every time a connection is acquired from the pool. */ onReserveConnection?: (connection: DatabaseConnection) => Promise + + /** + * Used to define type parsers. + * + * @example + * ``` + * new PostgresDialect({ + * pool: new Pool('postgres://localhost:5432/mydb'), + * types: { + * [types.builtins.DATE]: (v) => v, + * } + * }) + */ + types?: Record any> } /** @@ -61,6 +75,7 @@ export interface PostgresPoolClient { ): Promise> query(cursor: PostgresCursor): PostgresCursor release(): void + setTypeParser(typeOrOid: number, parser: (val: string) => any): void } export interface PostgresCursor { diff --git a/src/dialect/postgres/postgres-driver.ts b/src/dialect/postgres/postgres-driver.ts index 5fe16648f..8a405c174 100644 --- a/src/dialect/postgres/postgres-driver.ts +++ b/src/dialect/postgres/postgres-driver.ts @@ -35,9 +35,17 @@ export class PostgresDriver implements Driver { async acquireConnection(): Promise { const client = await this.#pool!.connect() + let connection = this.#connections.get(client) if (!connection) { + if (this.#config.types) { + for (const [typeId, parserFn] of Object.entries(this.#config.types)) { + const id = Number(typeId) + client.setTypeParser(id, parserFn) + } + } + connection = new PostgresConnection(client, { cursor: this.#config.cursor ?? null, }) diff --git a/src/helpers/postgres.ts b/src/helpers/postgres.ts index 8cbb39f4b..db7593f97 100644 --- a/src/helpers/postgres.ts +++ b/src/helpers/postgres.ts @@ -227,3 +227,30 @@ export type MergeAction = 'INSERT' | 'UPDATE' | 'DELETE' export function mergeAction(): RawBuilder { return sql`merge_action()` } + +/** + * We can't use `new Date(v).toISOString()` because `Date` has less precision (milliseconds) + * compared to Postgres' (microseconds). + * + * @see https://www.postgresql.org/docs/current/datatype-datetime.html#DATATYPE-DATETIME-OUTPUT + * @private + */ +function postgresTimestampToIsoString(value: string): string { + return value.replace(' ', 'T').replace(/([+-]\d{2})$/, '$1:00') +} + +// Reference: https://github.com/brianc/node-pg-types/blob/master/lib/builtins.js +const TIMESTAMP = 1114 +const TIMESTAMPTZ = 1184 +const DATE = 1082 + +/** + * Sets sensible type parsers that are optimised for: + * - No loss of data + * - Matching the output of `jsonArrayFrom`/`jsonObjectFrom` + */ +export const SENSIBLE_TYPES: Record any> = { + [TIMESTAMP]: (v) => postgresTimestampToIsoString(v), + [TIMESTAMPTZ]: (v) => postgresTimestampToIsoString(v), + [DATE]: (v) => v, +} diff --git a/test/node/src/sensible-pg-defaults.test.ts b/test/node/src/sensible-pg-defaults.test.ts new file mode 100644 index 000000000..f5496c7b0 --- /dev/null +++ b/test/node/src/sensible-pg-defaults.test.ts @@ -0,0 +1,129 @@ +import { Pool } from 'pg' +import { Generated, Kysely, PostgresDialect, sql } from '../../../' +import { jsonObjectFrom, SENSIBLE_TYPES } from '../../../helpers/postgres' + +import { + destroyTest, + initTest, + TestContext, + expect, + Database, + insertDefaultDataSet, + clearDatabase, + DIALECTS, + DIALECT_CONFIGS, + PLUGINS, +} from './test-setup.js' + +interface Values { + id: Generated + bigint: string + timestamptz: string + timestamp: string + date: string + time: string + timetz: string + array: number[] +} + +if (DIALECTS.includes('postgres')) { + const dialect = 'postgres' + + describe(`${dialect} sensible defaults`, () => { + let ctx: TestContext + let db: Kysely + + before(async function () { + ctx = await initTest(this, dialect, {}) + + await ctx.db.schema + .createTable('values') + .addColumn('id', 'serial', (col) => col.primaryKey()) + .addColumn('bigint', 'bigint', (col) => col.notNull()) + .addColumn('timestamptz', 'timestamptz', (col) => col.notNull()) + .addColumn('timestamp', 'timestamp', (col) => col.notNull()) + .addColumn('date', 'date', (col) => col.notNull()) + .addColumn('time', 'time', (col) => col.notNull()) + .addColumn('timetz', 'timetz', (col) => col.notNull()) + .addColumn('array', sql`integer[]`, (col) => col.notNull()) + .execute() + + db = new Kysely({ + dialect: new PostgresDialect({ + pool: async () => new Pool(DIALECT_CONFIGS.postgres), + types: SENSIBLE_TYPES, + }), + plugins: PLUGINS, + }) + }) + + beforeEach(async () => { + await insertDefaultDataSet(ctx) + + await db + .insertInto('values') + .values({ + bigint: '9223372036854775807', + timestamptz: '2025-08-10 14:44:40.687342+02', + timestamp: '2025-08-10 14:44:40.687342Z', + date: '2025-08-10', + time: '14:44:40.687342', + timetz: '14:44:40.687342+02', + array: [1, 2, 3], + }) + .execute() + }) + + afterEach(async () => { + await db.deleteFrom('values').execute() + await clearDatabase(ctx) + }) + + after(async () => { + await ctx.db.schema.dropTable('values').ifExists().execute() + await destroyTest(ctx) + }) + + it('regular selects should return the same values as JSON serialized values', async () => { + const columns: (keyof Values)[] = [ + 'timestamptz', + 'timestamp', + 'date', + 'timetz', + 'time', + 'array', + ] + const rawValues = await db + .selectFrom('values') + .select(columns) + .executeTakeFirstOrThrow() + const { value: jsonValues } = await db + .selectNoFrom((eb) => + jsonObjectFrom(eb.selectFrom('values').select(columns)) + .$notNull() + .as('value'), + ) + .executeTakeFirstOrThrow() + + expect(rawValues).to.eql(jsonValues) + }) + + it('to prevent data loss some types should not have the same value as their JSON serialized equivalent', async () => { + const columns: (keyof Values)[] = ['bigint'] + const rawValues = await db + .selectFrom('values') + .select(columns) + .executeTakeFirstOrThrow() + const { value: jsonValues } = await db + .selectNoFrom((eb) => + jsonObjectFrom(eb.selectFrom('values').select(columns)) + .$notNull() + .as('value'), + ) + .executeTakeFirstOrThrow() + + expect(rawValues.bigint).to.eql('9223372036854775807') + expect(jsonValues.bigint).to.eql(9223372036854776000) + }) + }) +}