diff --git a/.eslintignore b/.eslintignore index d88c5d722..c13a17faa 100644 --- a/.eslintignore +++ b/.eslintignore @@ -6,3 +6,5 @@ examples **/*.mjs **/*.cjs **/playground +integration-tests/tests/prisma/*/client +integration-tests/tests/prisma/*/drizzle diff --git a/.eslintrc.yaml b/.eslintrc.yaml index bc71e00bc..906d73ffa 100644 --- a/.eslintrc.yaml +++ b/.eslintrc.yaml @@ -10,7 +10,17 @@ plugins: - import - unused-imports - no-instanceof - - drizzle + - drizzle-internal +overrides: + - files: + - '**/tests/**/*.ts' + - '**/type-tests/**/*.ts' + rules: + import/extensions: 'off' + no-instanceof: 'off' + - files: 'eslint-plugin-drizzle/**/*' + rules: + import/extensions: 'off' rules: '@typescript-eslint/consistent-type-imports': - error @@ -24,6 +34,10 @@ rules: import/no-useless-path-segments: error import/newline-after-import: error import/no-duplicates: error + import/extensions: + - error + - always + - ignorePackages: true '@typescript-eslint/no-explicit-any': 'off' '@typescript-eslint/no-non-null-assertion': 'off' '@typescript-eslint/no-namespace': 'off' @@ -61,7 +75,7 @@ rules: 'unicorn/relative-url-style': 'off' 'eqeqeq': 'error' 'no-instanceof/no-instanceof': 'error' - 'drizzle/require-entity-kind': 'error' + 'drizzle-internal/require-entity-kind': 'error' 'unicorn/prefer-string-replace-all': 'off' 'unicorn/no-process-exit': 'off' '@typescript-eslint/ban-ts-comment': 'off' diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 9a0e35b0f..966e3d12d 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -12,7 +12,7 @@ on: jobs: analyze: name: Analyze - runs-on: ubuntu-latest + runs-on: ubuntu-20.04 permissions: actions: read contents: read diff --git a/.github/workflows/release-feature-branch.yaml b/.github/workflows/release-feature-branch.yaml index 63d7c9e30..a130f78b9 100644 --- a/.github/workflows/release-feature-branch.yaml +++ b/.github/workflows/release-feature-branch.yaml @@ -61,7 +61,7 @@ jobs: --health-timeout 5s --health-retries 5 ports: - - 5432:5432 + - 55432:5432 mysql: image: mysql:8 env: @@ -73,13 +73,13 @@ jobs: --health-timeout 5s --health-retries 5 ports: - - 3306:3306 + - 33306:3306 steps: - uses: actions/checkout@v4 - uses: actions/setup-node@v4 with: - node-version: 18 + node-version: '18.18' registry-url: 'https://registry.npmjs.org' - uses: pnpm/action-setup@v3 @@ -131,15 +131,25 @@ jobs: - name: Build if: steps.checks.outputs.has_new_release == 'true' run: | + ( + cd drizzle-orm + pnpm prisma generate --schema src/prisma/schema.prisma + ) + ( + cd integration-tests + pnpm prisma generate --schema tests/prisma/pg/schema.prisma + pnpm prisma generate --schema tests/prisma/mysql/schema.prisma + pnpm prisma generate --schema tests/prisma/sqlite/schema.prisma + ) pnpm build - name: Run tests if: steps.checks.outputs.has_new_release == 'true' env: - PG_CONNECTION_STRING: postgres://postgres:postgres@localhost:5432/drizzle + PG_CONNECTION_STRING: postgres://postgres:postgres@localhost:55432/drizzle PG_VECTOR_CONNECTION_STRING: postgres://postgres:postgres@localhost:54321/drizzle PG_POSTGIS_CONNECTION_STRING: postgres://postgres:postgres@localhost:54322/drizzle - MYSQL_CONNECTION_STRING: mysql://root:root@localhost:3306/drizzle + MYSQL_CONNECTION_STRING: mysql://root:root@localhost:33306/drizzle PLANETSCALE_CONNECTION_STRING: ${{ secrets.PLANETSCALE_CONNECTION_STRING }} NEON_CONNECTION_STRING: ${{ secrets.NEON_CONNECTION_STRING }} TIDB_CONNECTION_STRING: ${{ secrets.TIDB_CONNECTION_STRING }} diff --git a/.github/workflows/release-latest.yaml b/.github/workflows/release-latest.yaml index c5026bd42..3e94649f9 100644 --- a/.github/workflows/release-latest.yaml +++ b/.github/workflows/release-latest.yaml @@ -54,7 +54,7 @@ jobs: --health-timeout 5s --health-retries 5 ports: - - 5432:5432 + - 55432:5432 mysql: image: mysql:8 env: @@ -66,13 +66,13 @@ jobs: --health-timeout 5s --health-retries 5 ports: - - 3306:3306 + - 33306:3306 steps: - uses: actions/checkout@v4 - uses: actions/setup-node@v4 with: - node-version: 18 + node-version: '18.18' registry-url: 'https://registry.npmjs.org' - uses: pnpm/action-setup@v3 @@ -134,15 +134,25 @@ jobs: - name: Build if: steps.checks.outputs.has_new_release == 'true' run: | + ( + cd drizzle-orm + pnpm prisma generate --schema src/prisma/schema.prisma + ) + ( + cd integration-tests + pnpm prisma generate --schema tests/prisma/pg/schema.prisma + pnpm prisma generate --schema tests/prisma/mysql/schema.prisma + pnpm prisma generate --schema tests/prisma/sqlite/schema.prisma + ) pnpm build - name: Run tests if: steps.checks.outputs.has_new_release == 'true' env: - PG_CONNECTION_STRING: postgres://postgres:postgres@localhost:5432/drizzle + PG_CONNECTION_STRING: postgres://postgres:postgres@localhost:55432/drizzle PG_VECTOR_CONNECTION_STRING: postgres://postgres:postgres@localhost:54321/drizzle PG_POSTGIS_CONNECTION_STRING: postgres://postgres:postgres@localhost:54322/drizzle - MYSQL_CONNECTION_STRING: mysql://root:root@localhost:3306/drizzle + MYSQL_CONNECTION_STRING: mysql://root:root@localhost:33306/drizzle PLANETSCALE_CONNECTION_STRING: ${{ secrets.PLANETSCALE_CONNECTION_STRING }} NEON_CONNECTION_STRING: ${{ secrets.NEON_CONNECTION_STRING }} TIDB_CONNECTION_STRING: ${{ secrets.TIDB_CONNECTION_STRING }} diff --git a/.github/workflows/unpublish-release-feature-branch.yaml b/.github/workflows/unpublish-release-feature-branch.yaml index cf7f247fd..1f0d30624 100644 --- a/.github/workflows/unpublish-release-feature-branch.yaml +++ b/.github/workflows/unpublish-release-feature-branch.yaml @@ -19,7 +19,7 @@ jobs: - uses: actions/setup-node@v4 with: - node-version: 18 + node-version: '18.18' registry-url: 'https://registry.npmjs.org' - name: Unpublish diff --git a/.gitignore b/.gitignore index 8982b9105..45788cac5 100644 --- a/.gitignore +++ b/.gitignore @@ -10,3 +10,5 @@ dist.new .rollup.cache dist-dts rollup.config-*.mjs +*.log +.DS_Store diff --git a/changelogs/drizzle-orm/0.31.0-beta.md b/changelogs/drizzle-orm/0.31.0-beta.md index 275d5d5d8..48ab86b8f 100644 --- a/changelogs/drizzle-orm/0.31.0-beta.md +++ b/changelogs/drizzle-orm/0.31.0-beta.md @@ -136,3 +136,5 @@ db.select({ innerProduct: sql`(${maxInnerProduct(items.embedding, [3,1,2])}) * - // and more! ``` + +- 🛠️ Fixed RQB behavior for tables with same names in different schemas diff --git a/changelogs/drizzle-orm/0.31.3.md b/changelogs/drizzle-orm/0.31.3.md new file mode 100644 index 000000000..4c10b6daa --- /dev/null +++ b/changelogs/drizzle-orm/0.31.3.md @@ -0,0 +1,17 @@ +### Bug fixed + +- 🛠️ Fixed RQB behavior for tables with same names in different schemas +- 🛠️ Fixed [BUG]: Mismatched type hints when using RDS Data API - #2097 + +### New Prisma-Drizzle extension + +```ts +import { PrismaClient } from '@prisma/client'; +import { drizzle } from 'drizzle-orm/prisma/pg'; +import { User } from './drizzle'; + +const prisma = new PrismaClient().$extends(drizzle()); +const users = await prisma.$drizzle.select().from(User); +``` + +For more info, check docs: https://orm.drizzle.team/docs/prisma diff --git a/dprint.json b/dprint.json index 98a398c2e..385b610b6 100644 --- a/dprint.json +++ b/dprint.json @@ -21,11 +21,13 @@ "**/*snapshot.json", "**/_journal.json", "**/tsup.config*.mjs", - "**/.sst" + "**/.sst", + "integration-tests/tests/prisma/*/client", + "integration-tests/tests/prisma/*/drizzle" ], "plugins": [ - "https://plugins.dprint.dev/typescript-0.83.0.wasm", - "https://plugins.dprint.dev/json-0.19.2.wasm", - "https://plugins.dprint.dev/markdown-0.15.2.wasm" + "https://plugins.dprint.dev/typescript-0.91.1.wasm", + "https://plugins.dprint.dev/json-0.19.3.wasm", + "https://plugins.dprint.dev/markdown-0.17.1.wasm" ] } diff --git a/drizzle-orm/package.json b/drizzle-orm/package.json index 6a5e0e951..9eae4cdb6 100644 --- a/drizzle-orm/package.json +++ b/drizzle-orm/package.json @@ -1,6 +1,6 @@ { "name": "drizzle-orm", - "version": "0.31.2", + "version": "0.31.3", "description": "Drizzle ORM package for SQL databases", "type": "module", "scripts": { @@ -51,6 +51,8 @@ "@op-engineering/op-sqlite": ">=2", "@opentelemetry/api": "^1.4.1", "@planetscale/database": ">=1", + "@prisma/client": "*", + "@tidbcloud/serverless": "*", "@types/better-sqlite3": "*", "@types/pg": "*", "@types/react": ">=18", @@ -67,8 +69,7 @@ "postgres": ">=3", "react": ">=18", "sql.js": ">=1", - "sqlite3": ">=5", - "@tidbcloud/serverless": "*" + "sqlite3": ">=5" }, "peerDependenciesMeta": { "mysql2": { @@ -160,6 +161,7 @@ "@opentelemetry/api": "^1.4.1", "@originjs/vite-plugin-commonjs": "^1.0.3", "@planetscale/database": "^1.16.0", + "@prisma/client": "5.14.0", "@tidbcloud/serverless": "^0.1.1", "@types/better-sqlite3": "^7.6.4", "@types/node": "^20.2.5", @@ -177,13 +179,14 @@ "mysql2": "^3.3.3", "pg": "^8.11.0", "postgres": "^3.3.5", + "prisma": "5.14.0", "react": "^18.2.0", "sql.js": "^1.8.0", "sqlite3": "^5.1.2", "tslib": "^2.5.2", "tsx": "^3.12.7", - "vite-tsconfig-paths": "^4.2.0", - "vitest": "^0.31.4", + "vite-tsconfig-paths": "^4.3.2", + "vitest": "^1.6.0", "zod": "^3.20.2", "zx": "^7.2.2" } diff --git a/drizzle-orm/scripts/build.ts b/drizzle-orm/scripts/build.ts index 058a719b9..393719f31 100755 --- a/drizzle-orm/scripts/build.ts +++ b/drizzle-orm/scripts/build.ts @@ -50,10 +50,10 @@ await fs.remove('dist.new'); await Promise.all([ (async () => { - await $`tsup`; + await $`tsup`.stdio('pipe', 'pipe', 'pipe'); })(), (async () => { - await $`tsc -p tsconfig.dts.json`; + await $`tsc -p tsconfig.dts.json`.stdio('pipe', 'pipe', 'pipe'); await cpy('dist-dts/**/*.d.ts', 'dist.new', { rename: (basename) => basename.replace(/\.d\.ts$/, '.d.cts'), }); @@ -64,8 +64,8 @@ await Promise.all([ ]); await Promise.all([ - $`tsup src/version.ts --no-config --dts --format esm --outDir dist.new`, - $`tsup src/version.ts --no-config --dts --format cjs --outDir dist.new`, + $`tsup src/version.ts --no-config --dts --format esm --outDir dist.new`.stdio('pipe', 'pipe', 'pipe'), + $`tsup src/version.ts --no-config --dts --format cjs --outDir dist.new`.stdio('pipe', 'pipe', 'pipe'), ]); await $`scripts/fix-imports.ts`; diff --git a/drizzle-orm/src/aws-data-api/pg/driver.ts b/drizzle-orm/src/aws-data-api/pg/driver.ts index cab5cc6e4..5174c24d0 100644 --- a/drizzle-orm/src/aws-data-api/pg/driver.ts +++ b/drizzle-orm/src/aws-data-api/pg/driver.ts @@ -56,17 +56,14 @@ export class AwsPgDialect extends PgDialect { { table, values, onConflict, returning }: PgInsertConfig>, ): SQL { const columns: Record = table[Table.Symbol.Columns]; - const colEntries: [string, PgColumn][] = Object.entries(columns); for (const value of values) { - for (const [fieldName, col] of colEntries) { + for (const fieldName of Object.keys(columns)) { const colValue = value[fieldName]; if ( is(colValue, Param) && colValue.value !== undefined && is(colValue.encoder, PgArray) && Array.isArray(colValue.value) ) { - value[fieldName] = sql`cast(${col.mapToDriverValue(colValue.value)} as ${ - sql.raw(colValue.encoder.getSQLType()) - })`; + value[fieldName] = sql`cast(${colValue} as ${sql.raw(colValue.encoder.getSQLType())})`; } } } @@ -83,9 +80,7 @@ export class AwsPgDialect extends PgDialect { currentColumn && is(colValue, Param) && colValue.value !== undefined && is(colValue.encoder, PgArray) && Array.isArray(colValue.value) ) { - set[colName] = sql`cast(${currentColumn?.mapToDriverValue(colValue.value)} as ${ - sql.raw(colValue.encoder.getSQLType()) - })`; + set[colName] = sql`cast(${colValue} as ${sql.raw(colValue.encoder.getSQLType())})`; } } return super.buildUpdateSet(table, set); diff --git a/drizzle-orm/src/aws-data-api/pg/session.ts b/drizzle-orm/src/aws-data-api/pg/session.ts index 353a77cf3..4fc43ddf6 100644 --- a/drizzle-orm/src/aws-data-api/pg/session.ts +++ b/drizzle-orm/src/aws-data-api/pg/session.ts @@ -10,11 +10,11 @@ import type { Logger } from '~/logger.ts'; import { type PgDialect, PgPreparedQuery, + type PgQueryResultHKT, PgSession, PgTransaction, type PgTransactionConfig, type PreparedQueryConfig, - type QueryResultHKT, } from '~/pg-core/index.ts'; import type { SelectedFieldsOrdered } from '~/pg-core/query-builders/select.types.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; @@ -265,6 +265,6 @@ export class AwsDataApiTransaction< export type AwsDataApiPgQueryResult = ExecuteStatementCommandOutput & { rows: T[] }; -export interface AwsDataApiPgQueryResultHKT extends QueryResultHKT { +export interface AwsDataApiPgQueryResultHKT extends PgQueryResultHKT { type: AwsDataApiPgQueryResult; } diff --git a/drizzle-orm/src/d1/session.ts b/drizzle-orm/src/d1/session.ts index e2e184fa9..0f2989c12 100644 --- a/drizzle-orm/src/d1/session.ts +++ b/drizzle-orm/src/d1/session.ts @@ -149,7 +149,7 @@ function d1ToRawMapping(results: any) { } export class D1PreparedQuery extends SQLitePreparedQuery< - { type: 'async'; run: D1Result; all: T['all']; get: T['get']; values: T['values']; execute: T['execute'] } + { type: 'async'; run: D1Response; all: T['all']; get: T['get']; values: T['values']; execute: T['execute'] } > { static readonly [entityKind]: string = 'D1PreparedQuery'; @@ -177,7 +177,7 @@ export class D1PreparedQuery): Promise { + run(placeholderValues?: Record): Promise { const params = fillPlaceholders(this.query.params, placeholderValues ?? {}); this.logger.logQuery(this.query.sql, params); return this.stmt.bind(...params).run(); diff --git a/drizzle-orm/src/mysql-core/db.ts b/drizzle-orm/src/mysql-core/db.ts index 9b39e68e8..8df6ff343 100644 --- a/drizzle-orm/src/mysql-core/db.ts +++ b/drizzle-orm/src/mysql-core/db.ts @@ -18,18 +18,18 @@ import { RelationalQueryBuilder } from './query-builders/query.ts'; import type { SelectedFields } from './query-builders/select.types.ts'; import type { Mode, + MySqlQueryResultHKT, + MySqlQueryResultKind, MySqlSession, MySqlTransaction, MySqlTransactionConfig, PreparedQueryHKTBase, - QueryResultHKT, - QueryResultKind, } from './session.ts'; import type { WithSubqueryWithSelection } from './subquery.ts'; import type { MySqlTable } from './table.ts'; export class MySqlDatabase< - TQueryResult extends QueryResultHKT, + TQueryResult extends MySqlQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase, TFullSchema extends Record = {}, TSchema extends TablesRelationalConfig = ExtractTablesWithRelations, @@ -452,7 +452,7 @@ export class MySqlDatabase< execute( query: SQLWrapper, - ): Promise> { + ): Promise> { return this.session.execute(query.getSQL()); } @@ -470,7 +470,7 @@ export class MySqlDatabase< export type MySQLWithReplicas = Q & { $primary: Q }; export const withReplicas = < - HKT extends QueryResultHKT, + HKT extends MySqlQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase, TFullSchema extends Record, TSchema extends TablesRelationalConfig, diff --git a/drizzle-orm/src/mysql-core/dialect.ts b/drizzle-orm/src/mysql-core/dialect.ts index 923749966..215021898 100644 --- a/drizzle-orm/src/mysql-core/dialect.ts +++ b/drizzle-orm/src/mysql-core/dialect.ts @@ -19,7 +19,7 @@ import { import { Param, SQL, sql, View } from '~/sql/sql.ts'; import type { Name, QueryWithTypings, SQLChunk } from '~/sql/sql.ts'; import { Subquery } from '~/subquery.ts'; -import { getTableName, Table } from '~/table.ts'; +import { getTableName, getTableUniqueName, Table } from '~/table.ts'; import { orderSelectedFields, type UpdateSet } from '~/utils.ts'; import { ViewBaseConfig } from '~/view-common.ts'; import { MySqlColumn } from './columns/common.ts'; @@ -616,7 +616,7 @@ export class MySqlDialect { } of selectedRelations ) { const normalizedRelation = normalizeRelation(schema, tableNamesMap, relation); - const relationTableName = relation.referencedTable[Table.Symbol.Name]; + const relationTableName = getTableUniqueName(relation.referencedTable); const relationTableTsName = tableNamesMap[relationTableName]!; const relationTableAlias = `${tableAlias}_${selectedRelationTsKey}`; const joinOn = and( @@ -913,7 +913,7 @@ export class MySqlDialect { } of selectedRelations ) { const normalizedRelation = normalizeRelation(schema, tableNamesMap, relation); - const relationTableName = relation.referencedTable[Table.Symbol.Name]; + const relationTableName = getTableUniqueName(relation.referencedTable); const relationTableTsName = tableNamesMap[relationTableName]!; const relationTableAlias = `${tableAlias}_${selectedRelationTsKey}`; const joinOn = and( diff --git a/drizzle-orm/src/mysql-core/query-builders/delete.ts b/drizzle-orm/src/mysql-core/query-builders/delete.ts index 4deffbe03..e9a48da8e 100644 --- a/drizzle-orm/src/mysql-core/query-builders/delete.ts +++ b/drizzle-orm/src/mysql-core/query-builders/delete.ts @@ -1,13 +1,13 @@ import { entityKind } from '~/entity.ts'; import type { MySqlDialect } from '~/mysql-core/dialect.ts'; import type { - AnyQueryResultHKT, + AnyMySqlQueryResultHKT, + MySqlPreparedQueryConfig, + MySqlQueryResultHKT, + MySqlQueryResultKind, MySqlSession, - PreparedQueryConfig, PreparedQueryHKTBase, PreparedQueryKind, - QueryResultHKT, - QueryResultKind, } from '~/mysql-core/session.ts'; import type { MySqlTable } from '~/mysql-core/table.ts'; import { QueryPromise } from '~/query-promise.ts'; @@ -33,7 +33,7 @@ export type MySqlDeleteWithout< export type MySqlDelete< TTable extends MySqlTable = MySqlTable, - TQueryResult extends QueryResultHKT = AnyQueryResultHKT, + TQueryResult extends MySqlQueryResultHKT = AnyMySqlQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, > = MySqlDeleteBase; @@ -46,8 +46,8 @@ export interface MySqlDeleteConfig { export type MySqlDeletePrepare = PreparedQueryKind< T['_']['preparedQueryHKT'], - PreparedQueryConfig & { - execute: QueryResultKind; + MySqlPreparedQueryConfig & { + execute: MySqlQueryResultKind; iterator: never; }, true @@ -63,11 +63,11 @@ type AnyMySqlDeleteBase = MySqlDeleteBase; export interface MySqlDeleteBase< TTable extends MySqlTable, - TQueryResult extends QueryResultHKT, + TQueryResult extends MySqlQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase, TDynamic extends boolean = false, TExcludedMethods extends string = never, -> extends QueryPromise> { +> extends QueryPromise> { readonly _: { readonly table: TTable; readonly queryResult: TQueryResult; @@ -79,13 +79,13 @@ export interface MySqlDeleteBase< export class MySqlDeleteBase< TTable extends MySqlTable, - TQueryResult extends QueryResultHKT, + TQueryResult extends MySqlQueryResultHKT, // eslint-disable-next-line @typescript-eslint/no-unused-vars TPreparedQueryHKT extends PreparedQueryHKTBase, TDynamic extends boolean = false, // eslint-disable-next-line @typescript-eslint/no-unused-vars TExcludedMethods extends string = never, -> extends QueryPromise> implements SQLWrapper { +> extends QueryPromise> implements SQLWrapper { static readonly [entityKind]: string = 'MySqlDelete'; private config: MySqlDeleteConfig; diff --git a/drizzle-orm/src/mysql-core/query-builders/insert.ts b/drizzle-orm/src/mysql-core/query-builders/insert.ts index 3aa51329f..9b1b5c94e 100644 --- a/drizzle-orm/src/mysql-core/query-builders/insert.ts +++ b/drizzle-orm/src/mysql-core/query-builders/insert.ts @@ -1,13 +1,13 @@ import { entityKind, is } from '~/entity.ts'; import type { MySqlDialect } from '~/mysql-core/dialect.ts'; import type { - AnyQueryResultHKT, + AnyMySqlQueryResultHKT, + MySqlPreparedQueryConfig, + MySqlQueryResultHKT, + MySqlQueryResultKind, MySqlSession, - PreparedQueryConfig, PreparedQueryHKTBase, PreparedQueryKind, - QueryResultHKT, - QueryResultKind, } from '~/mysql-core/session.ts'; import type { MySqlTable } from '~/mysql-core/table.ts'; import { QueryPromise } from '~/query-promise.ts'; @@ -34,7 +34,7 @@ export type MySqlInsertValue = export class MySqlInsertBuilder< TTable extends MySqlTable, - TQueryResult extends QueryResultHKT, + TQueryResult extends MySqlQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase, > { static readonly [entityKind]: string = 'MySqlInsertBuilder'; @@ -96,8 +96,8 @@ export type MySqlInsertDynamic = MySqlInsert< export type MySqlInsertPrepare = PreparedQueryKind< T['_']['preparedQueryHKT'], - PreparedQueryConfig & { - execute: QueryResultKind; + MySqlPreparedQueryConfig & { + execute: MySqlQueryResultKind; iterator: never; }, true @@ -109,7 +109,7 @@ export type MySqlInsertOnDuplicateKeyUpdateConfig = { export type MySqlInsert< TTable extends MySqlTable = MySqlTable, - TQueryResult extends QueryResultHKT = AnyQueryResultHKT, + TQueryResult extends MySqlQueryResultHKT = AnyMySqlQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, > = MySqlInsertBase; @@ -117,11 +117,11 @@ export type AnyMySqlInsert = MySqlInsertBase; export interface MySqlInsertBase< TTable extends MySqlTable, - TQueryResult extends QueryResultHKT, + TQueryResult extends MySqlQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase, TDynamic extends boolean = false, TExcludedMethods extends string = never, -> extends QueryPromise>, SQLWrapper { +> extends QueryPromise>, SQLWrapper { readonly _: { readonly table: TTable; readonly queryResult: TQueryResult; @@ -133,14 +133,14 @@ export interface MySqlInsertBase< export class MySqlInsertBase< TTable extends MySqlTable, - TQueryResult extends QueryResultHKT, + TQueryResult extends MySqlQueryResultHKT, // eslint-disable-next-line @typescript-eslint/no-unused-vars TPreparedQueryHKT extends PreparedQueryHKTBase, // eslint-disable-next-line @typescript-eslint/no-unused-vars TDynamic extends boolean = false, // eslint-disable-next-line @typescript-eslint/no-unused-vars TExcludedMethods extends string = never, -> extends QueryPromise> implements SQLWrapper { +> extends QueryPromise> implements SQLWrapper { static readonly [entityKind]: string = 'MySqlInsert'; declare protected $table: TTable; diff --git a/drizzle-orm/src/mysql-core/query-builders/query.ts b/drizzle-orm/src/mysql-core/query-builders/query.ts index 8efeb0692..955f73428 100644 --- a/drizzle-orm/src/mysql-core/query-builders/query.ts +++ b/drizzle-orm/src/mysql-core/query-builders/query.ts @@ -11,7 +11,13 @@ import { import type { Query, QueryWithTypings, SQL } from '~/sql/sql.ts'; import type { KnownKeysOnly } from '~/utils.ts'; import type { MySqlDialect } from '../dialect.ts'; -import type { Mode, MySqlSession, PreparedQueryConfig, PreparedQueryHKTBase, PreparedQueryKind } from '../session.ts'; +import type { + Mode, + MySqlPreparedQueryConfig, + MySqlSession, + PreparedQueryHKTBase, + PreparedQueryKind, +} from '../session.ts'; import type { MySqlTable } from '../table.ts'; export class RelationalQueryBuilder< @@ -102,7 +108,7 @@ export class MySqlRelationalQuery< } return rows as TResult; }, - ) as PreparedQueryKind; + ) as PreparedQueryKind; } private _getQuery() { diff --git a/drizzle-orm/src/mysql-core/query-builders/select.ts b/drizzle-orm/src/mysql-core/query-builders/select.ts index 59dbe914e..a5a0ca69a 100644 --- a/drizzle-orm/src/mysql-core/query-builders/select.ts +++ b/drizzle-orm/src/mysql-core/query-builders/select.ts @@ -1,7 +1,7 @@ import { entityKind, is } from '~/entity.ts'; import type { MySqlColumn } from '~/mysql-core/columns/index.ts'; import type { MySqlDialect } from '~/mysql-core/dialect.ts'; -import type { MySqlSession, PreparedQueryConfig, PreparedQueryHKTBase } from '~/mysql-core/session.ts'; +import type { MySqlPreparedQueryConfig, MySqlSession, PreparedQueryHKTBase } from '~/mysql-core/session.ts'; import type { SubqueryWithSelection } from '~/mysql-core/subquery.ts'; import type { MySqlTable } from '~/mysql-core/table.ts'; import { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; @@ -950,7 +950,7 @@ export class MySqlSelectBase< } const fieldsList = orderSelectedFields(this.config.fields); const query = this.session.prepareQuery< - PreparedQueryConfig & { execute: SelectResult[] }, + MySqlPreparedQueryConfig & { execute: SelectResult[] }, TPreparedQueryHKT >(this.dialect.sqlToQuery(this.getSQL()), fieldsList); query.joinsNotNullableMap = this.joinsNotNullableMap; diff --git a/drizzle-orm/src/mysql-core/query-builders/select.types.ts b/drizzle-orm/src/mysql-core/query-builders/select.types.ts index bfefd7613..5f490a2d9 100644 --- a/drizzle-orm/src/mysql-core/query-builders/select.types.ts +++ b/drizzle-orm/src/mysql-core/query-builders/select.types.ts @@ -22,7 +22,7 @@ import type { ColumnsSelection, Placeholder, SQL, View } from '~/sql/sql.ts'; import type { Subquery } from '~/subquery.ts'; import type { Table, UpdateTableConfig } from '~/table.ts'; import type { Assume, ValidateShape } from '~/utils.ts'; -import type { PreparedQueryConfig, PreparedQueryHKTBase, PreparedQueryKind } from '../session.ts'; +import type { MySqlPreparedQueryConfig, PreparedQueryHKTBase, PreparedQueryKind } from '../session.ts'; import type { MySqlViewBase } from '../view-base.ts'; import type { MySqlViewWithSelection } from '../view.ts'; import type { MySqlSelectBase, MySqlSelectQueryBuilderBase } from './select.ts'; @@ -236,7 +236,7 @@ export type MySqlSelectWithout< export type MySqlSelectPrepare = PreparedQueryKind< T['_']['preparedQueryHKT'], - PreparedQueryConfig & { + MySqlPreparedQueryConfig & { execute: T['_']['result']; iterator: T['_']['result'][number]; }, diff --git a/drizzle-orm/src/mysql-core/query-builders/update.ts b/drizzle-orm/src/mysql-core/query-builders/update.ts index 9667e492f..08dbf53a9 100644 --- a/drizzle-orm/src/mysql-core/query-builders/update.ts +++ b/drizzle-orm/src/mysql-core/query-builders/update.ts @@ -2,13 +2,13 @@ import type { GetColumnData } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { MySqlDialect } from '~/mysql-core/dialect.ts'; import type { - AnyQueryResultHKT, + AnyMySqlQueryResultHKT, + MySqlPreparedQueryConfig, + MySqlQueryResultHKT, + MySqlQueryResultKind, MySqlSession, - PreparedQueryConfig, PreparedQueryHKTBase, PreparedQueryKind, - QueryResultHKT, - QueryResultKind, } from '~/mysql-core/session.ts'; import type { MySqlTable } from '~/mysql-core/table.ts'; import { QueryPromise } from '~/query-promise.ts'; @@ -35,7 +35,7 @@ export type MySqlUpdateSetSource = export class MySqlUpdateBuilder< TTable extends MySqlTable, - TQueryResult extends QueryResultHKT, + TQueryResult extends MySqlQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase, > { static readonly [entityKind]: string = 'MySqlUpdateBuilder'; @@ -73,8 +73,8 @@ export type MySqlUpdateWithout< export type MySqlUpdatePrepare = PreparedQueryKind< T['_']['preparedQueryHKT'], - PreparedQueryConfig & { - execute: QueryResultKind; + MySqlPreparedQueryConfig & { + execute: MySqlQueryResultKind; iterator: never; }, true @@ -88,7 +88,7 @@ export type MySqlUpdateDynamic = MySqlUpdate< export type MySqlUpdate< TTable extends MySqlTable = MySqlTable, - TQueryResult extends QueryResultHKT = AnyQueryResultHKT, + TQueryResult extends MySqlQueryResultHKT = AnyMySqlQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, > = MySqlUpdateBase; @@ -96,11 +96,11 @@ export type AnyMySqlUpdateBase = MySqlUpdateBase; export interface MySqlUpdateBase< TTable extends MySqlTable, - TQueryResult extends QueryResultHKT, + TQueryResult extends MySqlQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase, TDynamic extends boolean = false, TExcludedMethods extends string = never, -> extends QueryPromise>, SQLWrapper { +> extends QueryPromise>, SQLWrapper { readonly _: { readonly table: TTable; readonly queryResult: TQueryResult; @@ -112,14 +112,14 @@ export interface MySqlUpdateBase< export class MySqlUpdateBase< TTable extends MySqlTable, - TQueryResult extends QueryResultHKT, + TQueryResult extends MySqlQueryResultHKT, // eslint-disable-next-line @typescript-eslint/no-unused-vars TPreparedQueryHKT extends PreparedQueryHKTBase, // eslint-disable-next-line @typescript-eslint/no-unused-vars TDynamic extends boolean = false, // eslint-disable-next-line @typescript-eslint/no-unused-vars TExcludedMethods extends string = never, -> extends QueryPromise> implements SQLWrapper { +> extends QueryPromise> implements SQLWrapper { static readonly [entityKind]: string = 'MySqlUpdate'; private config: MySqlUpdateConfig; diff --git a/drizzle-orm/src/mysql-core/session.ts b/drizzle-orm/src/mysql-core/session.ts index 528782d7b..2dd1e6dcc 100644 --- a/drizzle-orm/src/mysql-core/session.ts +++ b/drizzle-orm/src/mysql-core/session.ts @@ -9,39 +9,40 @@ import type { SelectedFieldsOrdered } from './query-builders/select.types.ts'; export type Mode = 'default' | 'planetscale'; -export interface QueryResultHKT { - readonly $brand: 'MySqlQueryRowHKT'; +export interface MySqlQueryResultHKT { + readonly $brand: 'MySqlQueryResultHKT'; readonly row: unknown; readonly type: unknown; } -export interface AnyQueryResultHKT extends QueryResultHKT { +export interface AnyMySqlQueryResultHKT extends MySqlQueryResultHKT { readonly type: any; } -export type QueryResultKind = (TKind & { +export type MySqlQueryResultKind = (TKind & { readonly row: TRow; })['type']; -export interface PreparedQueryConfig { +export interface MySqlPreparedQueryConfig { execute: unknown; iterator: unknown; } -export interface PreparedQueryHKT { +export interface MySqlPreparedQueryHKT { readonly $brand: 'MySqlPreparedQueryHKT'; readonly config: unknown; readonly type: unknown; } export type PreparedQueryKind< - TKind extends PreparedQueryHKT, - TConfig extends PreparedQueryConfig, + TKind extends MySqlPreparedQueryHKT, + TConfig extends MySqlPreparedQueryConfig, TAssume extends boolean = false, -> = Equal extends true ? Assume<(TKind & { readonly config: TConfig })['type'], PreparedQuery> +> = Equal extends true + ? Assume<(TKind & { readonly config: TConfig })['type'], MySqlPreparedQuery> : (TKind & { readonly config: TConfig })['type']; -export abstract class PreparedQuery { +export abstract class MySqlPreparedQuery { static readonly [entityKind]: string = 'MySqlPreparedQuery'; /** @internal */ @@ -59,7 +60,7 @@ export interface MySqlTransactionConfig { } export abstract class MySqlSession< - TQueryResult extends QueryResultHKT = QueryResultHKT, + TQueryResult extends MySqlQueryResultHKT = MySqlQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, TFullSchema extends Record = Record, TSchema extends TablesRelationalConfig = Record, @@ -68,14 +69,14 @@ export abstract class MySqlSession< constructor(protected dialect: MySqlDialect) {} - abstract prepareQuery( + abstract prepareQuery( query: Query, fields: SelectedFieldsOrdered | undefined, customResultMapper?: (rows: unknown[][]) => T['execute'], ): PreparedQueryKind; execute(query: SQL): Promise { - return this.prepareQuery( + return this.prepareQuery( this.dialect.sqlToQuery(query), undefined, ).execute(); @@ -114,7 +115,7 @@ export abstract class MySqlSession< } export abstract class MySqlTransaction< - TQueryResult extends QueryResultHKT, + TQueryResult extends MySqlQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase, TFullSchema extends Record = Record, TSchema extends TablesRelationalConfig = Record, @@ -141,6 +142,6 @@ export abstract class MySqlTransaction< ): Promise; } -export interface PreparedQueryHKTBase extends PreparedQueryHKT { - type: PreparedQuery>; +export interface PreparedQueryHKTBase extends MySqlPreparedQueryHKT { + type: MySqlPreparedQuery>; } diff --git a/drizzle-orm/src/mysql-proxy/session.ts b/drizzle-orm/src/mysql-proxy/session.ts index 973cb2393..c5ab0295d 100644 --- a/drizzle-orm/src/mysql-proxy/session.ts +++ b/drizzle-orm/src/mysql-proxy/session.ts @@ -6,13 +6,13 @@ import type { MySqlDialect } from '~/mysql-core/dialect.ts'; import { MySqlTransaction } from '~/mysql-core/index.ts'; import type { SelectedFieldsOrdered } from '~/mysql-core/query-builders/select.types.ts'; import type { + MySqlPreparedQueryConfig, + MySqlPreparedQueryHKT, + MySqlQueryResultHKT, MySqlTransactionConfig, - PreparedQueryConfig, - PreparedQueryHKT, PreparedQueryKind, - QueryResultHKT, } from '~/mysql-core/session.ts'; -import { MySqlSession, PreparedQuery as PreparedQueryBase } from '~/mysql-core/session.ts'; +import { MySqlPreparedQuery as PreparedQueryBase, MySqlSession } from '~/mysql-core/session.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { fillPlaceholders } from '~/sql/sql.ts'; import type { Query, SQL } from '~/sql/sql.ts'; @@ -43,7 +43,7 @@ export class MySqlRemoteSession< this.logger = options.logger ?? new NoopLogger(); } - prepareQuery( + prepareQuery( query: Query, fields: SelectedFieldsOrdered | undefined, customResultMapper?: (rows: unknown[][]) => T['execute'], @@ -85,7 +85,7 @@ export class MySqlProxyTransaction< } } -export class PreparedQuery extends PreparedQueryBase { +export class PreparedQuery extends PreparedQueryBase { static readonly [entityKind]: string = 'MySqlProxyPreparedQuery'; constructor( @@ -128,10 +128,10 @@ export class PreparedQuery extends PreparedQueryB } } -export interface MySqlRemoteQueryResultHKT extends QueryResultHKT { +export interface MySqlRemoteQueryResultHKT extends MySqlQueryResultHKT { type: MySqlRawQueryResult; } -export interface MySqlRemotePreparedQueryHKT extends PreparedQueryHKT { - type: PreparedQuery>; +export interface MySqlRemotePreparedQueryHKT extends MySqlPreparedQueryHKT { + type: PreparedQuery>; } diff --git a/drizzle-orm/src/mysql2/session.ts b/drizzle-orm/src/mysql2/session.ts index f29e11d6f..be7005c9c 100644 --- a/drizzle-orm/src/mysql2/session.ts +++ b/drizzle-orm/src/mysql2/session.ts @@ -17,14 +17,14 @@ import type { MySqlDialect } from '~/mysql-core/dialect.ts'; import type { SelectedFieldsOrdered } from '~/mysql-core/query-builders/select.types.ts'; import { type Mode, + MySqlPreparedQuery, + type MySqlPreparedQueryConfig, + type MySqlPreparedQueryHKT, + type MySqlQueryResultHKT, MySqlSession, MySqlTransaction, type MySqlTransactionConfig, - PreparedQuery, - type PreparedQueryConfig, - type PreparedQueryHKT, type PreparedQueryKind, - type QueryResultHKT, } from '~/mysql-core/session.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { fillPlaceholders, type Query, type SQL, sql } from '~/sql/sql.ts'; @@ -38,7 +38,7 @@ export type MySqlQueryResult< T = any, > = [T extends ResultSetHeader ? T : T[], FieldPacket[]]; -export class MySql2PreparedQuery extends PreparedQuery { +export class MySql2PreparedQuery extends MySqlPreparedQuery { static readonly [entityKind]: string = 'MySql2PreparedQuery'; private rawQuery: QueryOptions; @@ -156,7 +156,7 @@ export interface MySql2SessionOptions { export class MySql2Session< TFullSchema extends Record, TSchema extends TablesRelationalConfig, -> extends MySqlSession { +> extends MySqlSession { static readonly [entityKind]: string = 'MySql2Session'; private logger: Logger; @@ -173,7 +173,7 @@ export class MySql2Session< this.mode = options.mode; } - prepareQuery( + prepareQuery( query: Query, fields: SelectedFieldsOrdered | undefined, customResultMapper?: (rows: unknown[][]) => T['execute'], @@ -289,10 +289,10 @@ function isPool(client: MySql2Client): client is Pool { return 'getConnection' in client; } -export interface MySql2QueryResultHKT extends QueryResultHKT { +export interface MySql2QueryResultHKT extends MySqlQueryResultHKT { type: MySqlRawQueryResult; } -export interface MySql2PreparedQueryHKT extends PreparedQueryHKT { - type: MySql2PreparedQuery>; +export interface MySql2PreparedQueryHKT extends MySqlPreparedQueryHKT { + type: MySql2PreparedQuery>; } diff --git a/drizzle-orm/src/neon-http/driver.ts b/drizzle-orm/src/neon-http/driver.ts index ab78d7356..81a66c69b 100644 --- a/drizzle-orm/src/neon-http/driver.ts +++ b/drizzle-orm/src/neon-http/driver.ts @@ -36,6 +36,7 @@ export class NeonHttpDriver { types.setTypeParser(types.builtins.TIMESTAMPTZ, (val) => val); types.setTypeParser(types.builtins.TIMESTAMP, (val) => val); types.setTypeParser(types.builtins.DATE, (val) => val); + types.setTypeParser(types.builtins.INTERVAL, (val) => val); } } diff --git a/drizzle-orm/src/neon-http/session.ts b/drizzle-orm/src/neon-http/session.ts index 5df6cff92..6d7685116 100644 --- a/drizzle-orm/src/neon-http/session.ts +++ b/drizzle-orm/src/neon-http/session.ts @@ -6,7 +6,7 @@ import { NoopLogger } from '~/logger.ts'; import type { PgDialect } from '~/pg-core/dialect.ts'; import { PgTransaction } from '~/pg-core/index.ts'; import type { SelectedFieldsOrdered } from '~/pg-core/query-builders/select.types.ts'; -import type { PgTransactionConfig, PreparedQueryConfig, QueryResultHKT } from '~/pg-core/session.ts'; +import type { PgQueryResultHKT, PgTransactionConfig, PreparedQueryConfig } from '~/pg-core/session.ts'; import { PgPreparedQuery as PgPreparedQuery, PgSession } from '~/pg-core/session.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import type { PreparedQuery } from '~/session.ts'; @@ -194,6 +194,6 @@ export class NeonTransaction< export type NeonHttpQueryResult = Omit, 'rows'> & { rows: T[] }; -export interface NeonHttpQueryResultHKT extends QueryResultHKT { +export interface NeonHttpQueryResultHKT extends PgQueryResultHKT { type: NeonHttpQueryResult; } diff --git a/drizzle-orm/src/neon-serverless/session.ts b/drizzle-orm/src/neon-serverless/session.ts index f1ded44ec..82c405333 100644 --- a/drizzle-orm/src/neon-serverless/session.ts +++ b/drizzle-orm/src/neon-serverless/session.ts @@ -13,7 +13,7 @@ import { NoopLogger } from '~/logger.ts'; import type { PgDialect } from '~/pg-core/dialect.ts'; import { PgTransaction } from '~/pg-core/index.ts'; import type { SelectedFieldsOrdered } from '~/pg-core/query-builders/select.types.ts'; -import type { PgTransactionConfig, PreparedQueryConfig, QueryResultHKT } from '~/pg-core/session.ts'; +import type { PgQueryResultHKT, PgTransactionConfig, PreparedQueryConfig } from '~/pg-core/session.ts'; import { PgPreparedQuery, PgSession } from '~/pg-core/session.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { fillPlaceholders, type Query, sql } from '~/sql/sql.ts'; @@ -188,6 +188,6 @@ export class NeonTransaction< } } -export interface NeonQueryResultHKT extends QueryResultHKT { +export interface NeonQueryResultHKT extends PgQueryResultHKT { type: QueryResult>; } diff --git a/drizzle-orm/src/node-postgres/session.ts b/drizzle-orm/src/node-postgres/session.ts index 0a5e59975..91a21312a 100644 --- a/drizzle-orm/src/node-postgres/session.ts +++ b/drizzle-orm/src/node-postgres/session.ts @@ -5,7 +5,7 @@ import { type Logger, NoopLogger } from '~/logger.ts'; import type { PgDialect } from '~/pg-core/dialect.ts'; import { PgTransaction } from '~/pg-core/index.ts'; import type { SelectedFieldsOrdered } from '~/pg-core/query-builders/select.types.ts'; -import type { PgTransactionConfig, PreparedQueryConfig, QueryResultHKT } from '~/pg-core/session.ts'; +import type { PgQueryResultHKT, PgTransactionConfig, PreparedQueryConfig } from '~/pg-core/session.ts'; import { PgPreparedQuery, PgSession } from '~/pg-core/session.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { fillPlaceholders, type Query, sql } from '~/sql/sql.ts'; @@ -192,6 +192,6 @@ export class NodePgTransaction< } } -export interface NodePgQueryResultHKT extends QueryResultHKT { +export interface NodePgQueryResultHKT extends PgQueryResultHKT { type: QueryResult>; } diff --git a/drizzle-orm/src/pg-core/db.ts b/drizzle-orm/src/pg-core/db.ts index 01b17f75a..4e8d2f354 100644 --- a/drizzle-orm/src/pg-core/db.ts +++ b/drizzle-orm/src/pg-core/db.ts @@ -8,12 +8,12 @@ import { QueryBuilder, } from '~/pg-core/query-builders/index.ts'; import type { + PgQueryResultHKT, + PgQueryResultKind, PgSession, PgTransaction, PgTransactionConfig, PreparedQueryConfig, - QueryResultHKT, - QueryResultKind, } from '~/pg-core/session.ts'; import type { PgTable } from '~/pg-core/table.ts'; import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; @@ -31,7 +31,7 @@ import type { WithSubqueryWithSelection } from './subquery.ts'; import type { PgMaterializedView } from './view.ts'; export class PgDatabase< - TQueryResult extends QueryResultHKT, + TQueryResult extends PgQueryResultHKT, TFullSchema extends Record = Record, TSchema extends TablesRelationalConfig = ExtractTablesWithRelations, > { @@ -589,10 +589,12 @@ export class PgDatabase< execute = Record>( query: SQLWrapper, - ): PgRaw> { + ): PgRaw> { const sql = query.getSQL(); const builtQuery = this.dialect.sqlToQuery(sql); - const prepared = this.session.prepareQuery }>( + const prepared = this.session.prepareQuery< + PreparedQueryConfig & { execute: PgQueryResultKind } + >( builtQuery, undefined, undefined, @@ -617,7 +619,7 @@ export class PgDatabase< export type PgWithReplicas = Q & { $primary: Q }; export const withReplicas = < - HKT extends QueryResultHKT, + HKT extends PgQueryResultHKT, TFullSchema extends Record, TSchema extends TablesRelationalConfig, Q extends PgDatabase, diff --git a/drizzle-orm/src/pg-core/dialect.ts b/drizzle-orm/src/pg-core/dialect.ts index fe94a926e..8538ec3f4 100644 --- a/drizzle-orm/src/pg-core/dialect.ts +++ b/drizzle-orm/src/pg-core/dialect.ts @@ -47,7 +47,7 @@ import { type SQLChunk, } from '~/sql/sql.ts'; import { Subquery } from '~/subquery.ts'; -import { getTableName, Table } from '~/table.ts'; +import { getTableName, getTableUniqueName, Table } from '~/table.ts'; import { orderSelectedFields, type UpdateSet } from '~/utils.ts'; import { ViewBaseConfig } from '~/view-common.ts'; import type { PgSession } from './session.ts'; @@ -1219,7 +1219,7 @@ export class PgDialect { } of selectedRelations ) { const normalizedRelation = normalizeRelation(schema, tableNamesMap, relation); - const relationTableName = relation.referencedTable[Table.Symbol.Name]; + const relationTableName = getTableUniqueName(relation.referencedTable); const relationTableTsName = tableNamesMap[relationTableName]!; const relationTableAlias = `${tableAlias}_${selectedRelationTsKey}`; const joinOn = and( diff --git a/drizzle-orm/src/pg-core/query-builders/delete.ts b/drizzle-orm/src/pg-core/query-builders/delete.ts index 4e763c043..dc127f167 100644 --- a/drizzle-orm/src/pg-core/query-builders/delete.ts +++ b/drizzle-orm/src/pg-core/query-builders/delete.ts @@ -2,10 +2,10 @@ import { entityKind } from '~/entity.ts'; import type { PgDialect } from '~/pg-core/dialect.ts'; import type { PgPreparedQuery, + PgQueryResultHKT, + PgQueryResultKind, PgSession, PreparedQueryConfig, - QueryResultHKT, - QueryResultKind, } from '~/pg-core/session.ts'; import type { PgTable } from '~/pg-core/table.ts'; import type { SelectResultFields } from '~/query-builders/select.types.ts'; @@ -37,7 +37,7 @@ export type PgDeleteWithout< export type PgDelete< TTable extends PgTable = PgTable, - TQueryResult extends QueryResultHKT = QueryResultHKT, + TQueryResult extends PgQueryResultHKT = PgQueryResultHKT, TReturning extends Record | undefined = Record | undefined, > = PgDeleteBase; @@ -81,7 +81,7 @@ export type PgDeleteReturning< export type PgDeletePrepare = PgPreparedQuery< PreparedQueryConfig & { - execute: T['_']['returning'] extends undefined ? QueryResultKind + execute: T['_']['returning'] extends undefined ? PgQueryResultKind : T['_']['returning'][]; } >; @@ -96,13 +96,13 @@ export type AnyPgDeleteBase = PgDeleteBase; export interface PgDeleteBase< TTable extends PgTable, - TQueryResult extends QueryResultHKT, + TQueryResult extends PgQueryResultHKT, TReturning extends Record | undefined = undefined, TDynamic extends boolean = false, TExcludedMethods extends string = never, > extends - QueryPromise : TReturning[]>, - RunnableQuery : TReturning[], 'pg'>, + QueryPromise : TReturning[]>, + RunnableQuery : TReturning[], 'pg'>, SQLWrapper { readonly _: { @@ -112,20 +112,20 @@ export interface PgDeleteBase< readonly returning: TReturning; readonly dynamic: TDynamic; readonly excludedMethods: TExcludedMethods; - readonly result: TReturning extends undefined ? QueryResultKind : TReturning[]; + readonly result: TReturning extends undefined ? PgQueryResultKind : TReturning[]; }; } export class PgDeleteBase< TTable extends PgTable, - TQueryResult extends QueryResultHKT, + TQueryResult extends PgQueryResultHKT, TReturning extends Record | undefined = undefined, TDynamic extends boolean = false, // eslint-disable-next-line @typescript-eslint/no-unused-vars TExcludedMethods extends string = never, -> extends QueryPromise : TReturning[]> +> extends QueryPromise : TReturning[]> implements - RunnableQuery : TReturning[], 'pg'>, + RunnableQuery : TReturning[], 'pg'>, SQLWrapper { static readonly [entityKind]: string = 'PgDelete'; @@ -222,7 +222,7 @@ export class PgDeleteBase< return tracer.startActiveSpan('drizzle.prepareQuery', () => { return this.session.prepareQuery< PreparedQueryConfig & { - execute: TReturning extends undefined ? QueryResultKind : TReturning[]; + execute: TReturning extends undefined ? PgQueryResultKind : TReturning[]; } >(this.dialect.sqlToQuery(this.getSQL()), this.config.returning, name, true); }); diff --git a/drizzle-orm/src/pg-core/query-builders/insert.ts b/drizzle-orm/src/pg-core/query-builders/insert.ts index 64d72b125..c27f8ce9b 100644 --- a/drizzle-orm/src/pg-core/query-builders/insert.ts +++ b/drizzle-orm/src/pg-core/query-builders/insert.ts @@ -3,10 +3,10 @@ import type { PgDialect } from '~/pg-core/dialect.ts'; import type { IndexColumn } from '~/pg-core/indexes.ts'; import type { PgPreparedQuery, + PgQueryResultHKT, + PgQueryResultKind, PgSession, PreparedQueryConfig, - QueryResultHKT, - QueryResultKind, } from '~/pg-core/session.ts'; import type { PgTable } from '~/pg-core/table.ts'; import type { SelectResultFields } from '~/query-builders/select.types.ts'; @@ -36,7 +36,7 @@ export type PgInsertValue = } & {}; -export class PgInsertBuilder { +export class PgInsertBuilder { static readonly [entityKind]: string = 'PgInsertBuilder'; constructor( @@ -112,7 +112,7 @@ export interface PgInsertOnConflictDoUpdateConfig { export type PgInsertPrepare = PgPreparedQuery< PreparedQueryConfig & { - execute: T['_']['returning'] extends undefined ? QueryResultKind + execute: T['_']['returning'] extends undefined ? PgQueryResultKind : T['_']['returning'][]; } >; @@ -127,19 +127,19 @@ export type AnyPgInsert = PgInsertBase; export type PgInsert< TTable extends PgTable = PgTable, - TQueryResult extends QueryResultHKT = QueryResultHKT, + TQueryResult extends PgQueryResultHKT = PgQueryResultHKT, TReturning extends Record | undefined = Record | undefined, > = PgInsertBase; export interface PgInsertBase< TTable extends PgTable, - TQueryResult extends QueryResultHKT, + TQueryResult extends PgQueryResultHKT, TReturning extends Record | undefined = undefined, TDynamic extends boolean = false, TExcludedMethods extends string = never, > extends - QueryPromise : TReturning[]>, - RunnableQuery : TReturning[], 'pg'>, + QueryPromise : TReturning[]>, + RunnableQuery : TReturning[], 'pg'>, SQLWrapper { readonly _: { @@ -149,21 +149,21 @@ export interface PgInsertBase< readonly returning: TReturning; readonly dynamic: TDynamic; readonly excludedMethods: TExcludedMethods; - readonly result: TReturning extends undefined ? QueryResultKind : TReturning[]; + readonly result: TReturning extends undefined ? PgQueryResultKind : TReturning[]; }; } export class PgInsertBase< TTable extends PgTable, - TQueryResult extends QueryResultHKT, + TQueryResult extends PgQueryResultHKT, TReturning extends Record | undefined = undefined, // eslint-disable-next-line @typescript-eslint/no-unused-vars TDynamic extends boolean = false, // eslint-disable-next-line @typescript-eslint/no-unused-vars TExcludedMethods extends string = never, -> extends QueryPromise : TReturning[]> +> extends QueryPromise : TReturning[]> implements - RunnableQuery : TReturning[], 'pg'>, + RunnableQuery : TReturning[], 'pg'>, SQLWrapper { static readonly [entityKind]: string = 'PgInsert'; @@ -317,7 +317,7 @@ export class PgInsertBase< return tracer.startActiveSpan('drizzle.prepareQuery', () => { return this.session.prepareQuery< PreparedQueryConfig & { - execute: TReturning extends undefined ? QueryResultKind : TReturning[]; + execute: TReturning extends undefined ? PgQueryResultKind : TReturning[]; } >(this.dialect.sqlToQuery(this.getSQL()), this.config.returning, name, true); }); diff --git a/drizzle-orm/src/pg-core/query-builders/refresh-materialized-view.ts b/drizzle-orm/src/pg-core/query-builders/refresh-materialized-view.ts index e091e9545..d2bedac68 100644 --- a/drizzle-orm/src/pg-core/query-builders/refresh-materialized-view.ts +++ b/drizzle-orm/src/pg-core/query-builders/refresh-materialized-view.ts @@ -2,10 +2,10 @@ import { entityKind } from '~/entity.ts'; import type { PgDialect } from '~/pg-core/dialect.ts'; import type { PgPreparedQuery, + PgQueryResultHKT, + PgQueryResultKind, PgSession, PreparedQueryConfig, - QueryResultHKT, - QueryResultKind, } from '~/pg-core/session.ts'; import type { PgMaterializedView } from '~/pg-core/view.ts'; import { QueryPromise } from '~/query-promise.ts'; @@ -14,21 +14,21 @@ import type { Query, SQL, SQLWrapper } from '~/sql/sql.ts'; import { tracer } from '~/tracing.ts'; // eslint-disable-next-line @typescript-eslint/no-empty-interface -export interface PgRefreshMaterializedView +export interface PgRefreshMaterializedView extends - QueryPromise>, - RunnableQuery, 'pg'>, + QueryPromise>, + RunnableQuery, 'pg'>, SQLWrapper { readonly _: { readonly dialect: 'pg'; - readonly result: QueryResultKind; + readonly result: PgQueryResultKind; }; } -export class PgRefreshMaterializedView - extends QueryPromise> - implements RunnableQuery, 'pg'>, SQLWrapper +export class PgRefreshMaterializedView + extends QueryPromise> + implements RunnableQuery, 'pg'>, SQLWrapper { static readonly [entityKind]: string = 'PgRefreshMaterializedView'; @@ -76,7 +76,7 @@ export class PgRefreshMaterializedView /** @internal */ _prepare(name?: string): PgPreparedQuery< PreparedQueryConfig & { - execute: QueryResultKind; + execute: PgQueryResultKind; } > { return tracer.startActiveSpan('drizzle.prepareQuery', () => { @@ -86,7 +86,7 @@ export class PgRefreshMaterializedView prepare(name: string): PgPreparedQuery< PreparedQueryConfig & { - execute: QueryResultKind; + execute: PgQueryResultKind; } > { return this._prepare(name); diff --git a/drizzle-orm/src/pg-core/query-builders/update.ts b/drizzle-orm/src/pg-core/query-builders/update.ts index 4a7dd50a8..ab579621f 100644 --- a/drizzle-orm/src/pg-core/query-builders/update.ts +++ b/drizzle-orm/src/pg-core/query-builders/update.ts @@ -3,10 +3,10 @@ import { entityKind } from '~/entity.ts'; import type { PgDialect } from '~/pg-core/dialect.ts'; import type { PgPreparedQuery, + PgQueryResultHKT, + PgQueryResultKind, PgSession, PreparedQueryConfig, - QueryResultHKT, - QueryResultKind, } from '~/pg-core/session.ts'; import type { PgTable } from '~/pg-core/table.ts'; import type { SelectResultFields } from '~/query-builders/select.types.ts'; @@ -35,7 +35,7 @@ export type PgUpdateSetSource = } & {}; -export class PgUpdateBuilder { +export class PgUpdateBuilder { static readonly [entityKind]: string = 'PgUpdateBuilder'; declare readonly _: { @@ -105,7 +105,7 @@ export type PgUpdateReturning< export type PgUpdatePrepare = PgPreparedQuery< PreparedQueryConfig & { - execute: T['_']['returning'] extends undefined ? QueryResultKind + execute: T['_']['returning'] extends undefined ? PgQueryResultKind : T['_']['returning'][]; } >; @@ -118,7 +118,7 @@ export type PgUpdateDynamic = PgUpdate< export type PgUpdate< TTable extends PgTable = PgTable, - TQueryResult extends QueryResultHKT = QueryResultHKT, + TQueryResult extends PgQueryResultHKT = PgQueryResultHKT, TReturning extends Record | undefined = Record | undefined, > = PgUpdateBase; @@ -126,13 +126,13 @@ type AnyPgUpdate = PgUpdateBase; export interface PgUpdateBase< TTable extends PgTable, - TQueryResult extends QueryResultHKT, + TQueryResult extends PgQueryResultHKT, TReturning extends Record | undefined = undefined, TDynamic extends boolean = false, TExcludedMethods extends string = never, > extends - QueryPromise : TReturning[]>, - RunnableQuery : TReturning[], 'pg'>, + QueryPromise : TReturning[]>, + RunnableQuery : TReturning[], 'pg'>, SQLWrapper { readonly _: { @@ -142,21 +142,21 @@ export interface PgUpdateBase< readonly returning: TReturning; readonly dynamic: TDynamic; readonly excludedMethods: TExcludedMethods; - readonly result: TReturning extends undefined ? QueryResultKind : TReturning[]; + readonly result: TReturning extends undefined ? PgQueryResultKind : TReturning[]; }; } export class PgUpdateBase< TTable extends PgTable, - TQueryResult extends QueryResultHKT, + TQueryResult extends PgQueryResultHKT, TReturning extends Record | undefined = undefined, // eslint-disable-next-line @typescript-eslint/no-unused-vars TDynamic extends boolean = false, // eslint-disable-next-line @typescript-eslint/no-unused-vars TExcludedMethods extends string = never, -> extends QueryPromise : TReturning[]> +> extends QueryPromise : TReturning[]> implements - RunnableQuery : TReturning[], 'pg'>, + RunnableQuery : TReturning[], 'pg'>, SQLWrapper { static readonly [entityKind]: string = 'PgUpdate'; diff --git a/drizzle-orm/src/pg-core/schema.ts b/drizzle-orm/src/pg-core/schema.ts index 35f674729..210ae7407 100644 --- a/drizzle-orm/src/pg-core/schema.ts +++ b/drizzle-orm/src/pg-core/schema.ts @@ -1,10 +1,11 @@ import { entityKind, is } from '~/entity.ts'; +import { SQL, sql, type SQLWrapper } from '~/sql/sql.ts'; import type { pgEnum } from './columns/enum.ts'; import { pgEnumWithSchema } from './columns/enum.ts'; import { type PgTableFn, pgTableWithSchema } from './table.ts'; import { type pgMaterializedView, pgMaterializedViewWithSchema, type pgView, pgViewWithSchema } from './view.ts'; -export class PgSchema { +export class PgSchema implements SQLWrapper { static readonly [entityKind]: string = 'PgSchema'; constructor( public readonly schemaName: TName, @@ -25,6 +26,14 @@ export class PgSchema { enum: typeof pgEnum = ((name, values) => { return pgEnumWithSchema(name, values, this.schemaName); }); + + getSQL(): SQL { + return new SQL([sql.identifier(this.schemaName)]); + } + + shouldOmitSQLParens(): boolean { + return true; + } } export function isPgSchema(obj: unknown): obj is PgSchema { diff --git a/drizzle-orm/src/pg-core/session.ts b/drizzle-orm/src/pg-core/session.ts index 61ac9f5bb..434ebc086 100644 --- a/drizzle-orm/src/pg-core/session.ts +++ b/drizzle-orm/src/pg-core/session.ts @@ -46,7 +46,7 @@ export interface PgTransactionConfig { } export abstract class PgSession< - TQueryResult extends QueryResultHKT = QueryResultHKT, + TQueryResult extends PgQueryResultHKT = PgQueryResultHKT, TFullSchema extends Record = Record, TSchema extends TablesRelationalConfig = Record, > { @@ -93,7 +93,7 @@ export abstract class PgSession< } export abstract class PgTransaction< - TQueryResult extends QueryResultHKT, + TQueryResult extends PgQueryResultHKT, TFullSchema extends Record = Record, TSchema extends TablesRelationalConfig = Record, > extends PgDatabase { @@ -140,12 +140,12 @@ export abstract class PgTransaction< ): Promise; } -export interface QueryResultHKT { - readonly $brand: 'QueryRowHKT'; +export interface PgQueryResultHKT { + readonly $brand: 'PgQueryResultHKT'; readonly row: unknown; readonly type: unknown; } -export type QueryResultKind = (TKind & { +export type PgQueryResultKind = (TKind & { readonly row: TRow; })['type']; diff --git a/drizzle-orm/src/pg-proxy/session.ts b/drizzle-orm/src/pg-proxy/session.ts index 386d830f7..eb6a1b1a3 100644 --- a/drizzle-orm/src/pg-proxy/session.ts +++ b/drizzle-orm/src/pg-proxy/session.ts @@ -4,7 +4,7 @@ import { NoopLogger } from '~/logger.ts'; import type { PgDialect } from '~/pg-core/dialect.ts'; import { PgTransaction } from '~/pg-core/index.ts'; import type { SelectedFieldsOrdered } from '~/pg-core/query-builders/select.types.ts'; -import type { PgTransactionConfig, PreparedQueryConfig, QueryResultHKT } from '~/pg-core/session.ts'; +import type { PgQueryResultHKT, PgTransactionConfig, PreparedQueryConfig } from '~/pg-core/session.ts'; import { PgPreparedQuery as PreparedQueryBase, PgSession } from '~/pg-core/session.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import type { QueryWithTypings } from '~/sql/sql.ts'; @@ -138,7 +138,7 @@ export class PreparedQuery extends PreparedQueryB } } -export interface PgRemoteQueryResultHKT extends QueryResultHKT { +export interface PgRemoteQueryResultHKT extends PgQueryResultHKT { type: Assume[]; diff --git a/drizzle-orm/src/pglite/session.ts b/drizzle-orm/src/pglite/session.ts index 3559f4110..c7a1dbb5d 100644 --- a/drizzle-orm/src/pglite/session.ts +++ b/drizzle-orm/src/pglite/session.ts @@ -4,7 +4,7 @@ import { type Logger, NoopLogger } from '~/logger.ts'; import type { PgDialect } from '~/pg-core/dialect.ts'; import { PgTransaction } from '~/pg-core/index.ts'; import type { SelectedFieldsOrdered } from '~/pg-core/query-builders/select.types.ts'; -import type { PgTransactionConfig, PreparedQueryConfig, QueryResultHKT } from '~/pg-core/session.ts'; +import type { PgQueryResultHKT, PgTransactionConfig, PreparedQueryConfig } from '~/pg-core/session.ts'; import { PgPreparedQuery, PgSession } from '~/pg-core/session.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { fillPlaceholders, type Query, sql } from '~/sql/sql.ts'; @@ -168,6 +168,6 @@ export class PgliteTransaction< } } -export interface PgliteQueryResultHKT extends QueryResultHKT { +export interface PgliteQueryResultHKT extends PgQueryResultHKT { type: Results>; } diff --git a/drizzle-orm/src/planetscale-serverless/session.ts b/drizzle-orm/src/planetscale-serverless/session.ts index f640cf071..60b7d83d8 100644 --- a/drizzle-orm/src/planetscale-serverless/session.ts +++ b/drizzle-orm/src/planetscale-serverless/session.ts @@ -5,18 +5,18 @@ import { NoopLogger } from '~/logger.ts'; import type { MySqlDialect } from '~/mysql-core/dialect.ts'; import type { SelectedFieldsOrdered } from '~/mysql-core/query-builders/select.types.ts'; import { + MySqlPreparedQuery, + type MySqlPreparedQueryConfig, + type MySqlPreparedQueryHKT, + type MySqlQueryResultHKT, MySqlSession, MySqlTransaction, - PreparedQuery, - type PreparedQueryConfig, - type PreparedQueryHKT, - type QueryResultHKT, } from '~/mysql-core/session.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { fillPlaceholders, type Query, type SQL, sql } from '~/sql/sql.ts'; import { type Assume, mapResultRow } from '~/utils.ts'; -export class PlanetScalePreparedQuery extends PreparedQuery { +export class PlanetScalePreparedQuery extends MySqlPreparedQuery { static readonly [entityKind]: string = 'PlanetScalePreparedQuery'; private rawQuery = { as: 'object' } as const; @@ -64,7 +64,7 @@ export interface PlanetscaleSessionOptions { export class PlanetscaleSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, -> extends MySqlSession { +> extends MySqlSession { static readonly [entityKind]: string = 'PlanetscaleSession'; private logger: Logger; @@ -82,11 +82,11 @@ export class PlanetscaleSession< this.logger = options.logger ?? new NoopLogger(); } - prepareQuery( + prepareQuery( query: Query, fields: SelectedFieldsOrdered | undefined, customResultMapper?: (rows: unknown[][]) => T['execute'], - ): PreparedQuery { + ): MySqlPreparedQuery { return new PlanetScalePreparedQuery(this.client, query.sql, query.params, this.logger, fields, customResultMapper); } @@ -161,10 +161,10 @@ export class PlanetScaleTransaction< } } -export interface PlanetscaleQueryResultHKT extends QueryResultHKT { +export interface PlanetscaleQueryResultHKT extends MySqlQueryResultHKT { type: ExecutedQuery; } -export interface PlanetScalePreparedQueryHKT extends PreparedQueryHKT { - type: PlanetScalePreparedQuery>; +export interface PlanetScalePreparedQueryHKT extends MySqlPreparedQueryHKT { + type: PlanetScalePreparedQuery>; } diff --git a/drizzle-orm/src/postgres-js/session.ts b/drizzle-orm/src/postgres-js/session.ts index e93c3c862..05179ebdb 100644 --- a/drizzle-orm/src/postgres-js/session.ts +++ b/drizzle-orm/src/postgres-js/session.ts @@ -5,7 +5,7 @@ import { NoopLogger } from '~/logger.ts'; import type { PgDialect } from '~/pg-core/dialect.ts'; import { PgTransaction } from '~/pg-core/index.ts'; import type { SelectedFieldsOrdered } from '~/pg-core/query-builders/select.types.ts'; -import type { PgTransactionConfig, PreparedQueryConfig, QueryResultHKT } from '~/pg-core/session.ts'; +import type { PgQueryResultHKT, PgTransactionConfig, PreparedQueryConfig } from '~/pg-core/session.ts'; import { PgPreparedQuery, PgSession } from '~/pg-core/session.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { fillPlaceholders, type Query } from '~/sql/sql.ts'; @@ -192,6 +192,6 @@ export class PostgresJsTransaction< } } -export interface PostgresJsQueryResultHKT extends QueryResultHKT { +export interface PostgresJsQueryResultHKT extends PgQueryResultHKT { type: RowList[]>; } diff --git a/drizzle-orm/src/prisma/mysql/driver.ts b/drizzle-orm/src/prisma/mysql/driver.ts new file mode 100644 index 000000000..586832948 --- /dev/null +++ b/drizzle-orm/src/prisma/mysql/driver.ts @@ -0,0 +1,42 @@ +import type { PrismaClient } from '@prisma/client/extension'; + +import { Prisma } from '@prisma/client'; + +import { entityKind } from '~/entity.ts'; +import type { Logger } from '~/logger.ts'; +import { DefaultLogger } from '~/logger.ts'; +import { MySqlDatabase, MySqlDialect } from '~/mysql-core/index.ts'; +import type { DrizzleConfig } from '~/utils.ts'; +import type { PrismaMySqlPreparedQueryHKT, PrismaMySqlQueryResultHKT } from './session.ts'; +import { PrismaMySqlSession } from './session.ts'; + +export class PrismaMySqlDatabase + extends MySqlDatabase> +{ + static readonly [entityKind]: string = 'PrismaMySqlDatabase'; + + constructor(client: PrismaClient, logger: Logger | undefined) { + const dialect = new MySqlDialect(); + super(dialect, new PrismaMySqlSession(dialect, client, { logger }), undefined, 'default'); + } +} + +export type PrismaMySqlConfig = Omit; + +export function drizzle(config: PrismaMySqlConfig = {}) { + let logger: Logger | undefined; + if (config.logger === true) { + logger = new DefaultLogger(); + } else if (config.logger !== false) { + logger = config.logger; + } + + return Prisma.defineExtension((client) => { + return client.$extends({ + name: 'drizzle', + client: { + $drizzle: new PrismaMySqlDatabase(client, logger), + }, + }); + }); +} diff --git a/drizzle-orm/src/prisma/mysql/index.ts b/drizzle-orm/src/prisma/mysql/index.ts new file mode 100644 index 000000000..b1b6a52e7 --- /dev/null +++ b/drizzle-orm/src/prisma/mysql/index.ts @@ -0,0 +1,2 @@ +export * from './driver.ts'; +export * from './session.ts'; diff --git a/drizzle-orm/src/prisma/mysql/session.ts b/drizzle-orm/src/prisma/mysql/session.ts new file mode 100644 index 000000000..a6b12a0c3 --- /dev/null +++ b/drizzle-orm/src/prisma/mysql/session.ts @@ -0,0 +1,92 @@ +import type { PrismaClient } from '@prisma/client/extension'; + +import { entityKind } from '~/entity.ts'; +import { type Logger, NoopLogger } from '~/logger.ts'; +import type { + MySqlDialect, + MySqlPreparedQueryConfig, + MySqlPreparedQueryHKT, + MySqlQueryResultHKT, + MySqlTransaction, + MySqlTransactionConfig, +} from '~/mysql-core/index.ts'; +import { MySqlPreparedQuery, MySqlSession } from '~/mysql-core/index.ts'; +import { fillPlaceholders } from '~/sql/sql.ts'; +import type { Query, SQL } from '~/sql/sql.ts'; +import type { Assume } from '~/utils.ts'; + +export class PrismaMySqlPreparedQuery extends MySqlPreparedQuery { + override iterator(_placeholderValues?: Record | undefined): AsyncGenerator { + throw new Error('Method not implemented.'); + } + static readonly [entityKind]: string = 'PrismaMySqlPreparedQuery'; + + constructor( + private readonly prisma: PrismaClient, + private readonly query: Query, + private readonly logger: Logger, + ) { + super(); + } + + override execute(placeholderValues?: Record): Promise { + const params = fillPlaceholders(this.query.params, placeholderValues ?? {}); + this.logger.logQuery(this.query.sql, params); + return this.prisma.$queryRawUnsafe(this.query.sql, ...params); + } +} + +export interface PrismaMySqlSessionOptions { + logger?: Logger; +} + +export class PrismaMySqlSession extends MySqlSession { + static readonly [entityKind]: string = 'PrismaMySqlSession'; + + private readonly logger: Logger; + + constructor( + dialect: MySqlDialect, + private readonly prisma: PrismaClient, + private readonly options: PrismaMySqlSessionOptions, + ) { + super(dialect); + this.logger = options.logger ?? new NoopLogger(); + } + + override execute(query: SQL): Promise { + return this.prepareQuery(this.dialect.sqlToQuery(query)).execute(); + } + + override all(_query: SQL): Promise { + throw new Error('Method not implemented.'); + } + + override prepareQuery( + query: Query, + ): MySqlPreparedQuery { + return new PrismaMySqlPreparedQuery(this.prisma, query, this.logger); + } + + override transaction( + _transaction: ( + tx: MySqlTransaction< + PrismaMySqlQueryResultHKT, + PrismaMySqlPreparedQueryHKT, + Record, + Record + >, + ) => Promise, + _config?: MySqlTransactionConfig, + ): Promise { + throw new Error('Method not implemented.'); + } +} + +export interface PrismaMySqlQueryResultHKT extends MySqlQueryResultHKT { + type: []; +} + +export interface PrismaMySqlPreparedQueryHKT extends MySqlPreparedQueryHKT { + type: PrismaMySqlPreparedQuery>; +} diff --git a/drizzle-orm/src/prisma/pg/driver.ts b/drizzle-orm/src/prisma/pg/driver.ts new file mode 100644 index 000000000..23678f09f --- /dev/null +++ b/drizzle-orm/src/prisma/pg/driver.ts @@ -0,0 +1,40 @@ +import type { PrismaClient } from '@prisma/client/extension'; + +import { Prisma } from '@prisma/client'; + +import { entityKind } from '~/entity.ts'; +import type { Logger } from '~/logger.ts'; +import { DefaultLogger } from '~/logger.ts'; +import { PgDatabase, PgDialect } from '~/pg-core/index.ts'; +import type { DrizzleConfig } from '~/utils.ts'; +import type { PrismaPgQueryResultHKT } from './session.ts'; +import { PrismaPgSession } from './session.ts'; + +export class PrismaPgDatabase extends PgDatabase> { + static readonly [entityKind]: string = 'PrismaPgDatabase'; + + constructor(client: PrismaClient, logger: Logger | undefined) { + const dialect = new PgDialect(); + super(dialect, new PrismaPgSession(dialect, client, { logger }), undefined); + } +} + +export type PrismaPgConfig = Omit; + +export function drizzle(config: PrismaPgConfig = {}) { + let logger: Logger | undefined; + if (config.logger === true) { + logger = new DefaultLogger(); + } else if (config.logger !== false) { + logger = config.logger; + } + + return Prisma.defineExtension((client) => { + return client.$extends({ + name: 'drizzle', + client: { + $drizzle: new PrismaPgDatabase(client, logger), + }, + }); + }); +} diff --git a/drizzle-orm/src/prisma/pg/index.ts b/drizzle-orm/src/prisma/pg/index.ts new file mode 100644 index 000000000..b1b6a52e7 --- /dev/null +++ b/drizzle-orm/src/prisma/pg/index.ts @@ -0,0 +1,2 @@ +export * from './driver.ts'; +export * from './session.ts'; diff --git a/drizzle-orm/src/prisma/pg/session.ts b/drizzle-orm/src/prisma/pg/session.ts new file mode 100644 index 000000000..077326ef3 --- /dev/null +++ b/drizzle-orm/src/prisma/pg/session.ts @@ -0,0 +1,78 @@ +import type { PrismaClient } from '@prisma/client/extension'; + +import { entityKind } from '~/entity.ts'; +import { type Logger, NoopLogger } from '~/logger.ts'; +import type { + PgDialect, + PgQueryResultHKT, + PgTransaction, + PgTransactionConfig, + PreparedQueryConfig, +} from '~/pg-core/index.ts'; +import { PgPreparedQuery, PgSession } from '~/pg-core/index.ts'; +import type { Query, SQL } from '~/sql/sql.ts'; +import { fillPlaceholders } from '~/sql/sql.ts'; + +export class PrismaPgPreparedQuery extends PgPreparedQuery { + static readonly [entityKind]: string = 'PrismaPgPreparedQuery'; + + constructor( + private readonly prisma: PrismaClient, + query: Query, + private readonly logger: Logger, + ) { + super(query); + } + + override execute(placeholderValues?: Record): Promise { + const params = fillPlaceholders(this.query.params, placeholderValues ?? {}); + this.logger.logQuery(this.query.sql, params); + return this.prisma.$queryRawUnsafe(this.query.sql, ...params); + } + + override all(): Promise { + throw new Error('Method not implemented.'); + } + + override isResponseInArrayMode(): boolean { + return false; + } +} + +export interface PrismaPgSessionOptions { + logger?: Logger; +} + +export class PrismaPgSession extends PgSession { + static readonly [entityKind]: string = 'PrismaPgSession'; + + private readonly logger: Logger; + + constructor( + dialect: PgDialect, + private readonly prisma: PrismaClient, + private readonly options: PrismaPgSessionOptions, + ) { + super(dialect); + this.logger = options.logger ?? new NoopLogger(); + } + + override execute(query: SQL): Promise { + return this.prepareQuery(this.dialect.sqlToQuery(query)).execute(); + } + + override prepareQuery(query: Query): PgPreparedQuery { + return new PrismaPgPreparedQuery(this.prisma, query, this.logger); + } + + override transaction( + _transaction: (tx: PgTransaction, Record>) => Promise, + _config?: PgTransactionConfig, + ): Promise { + throw new Error('Method not implemented.'); + } +} + +export interface PrismaPgQueryResultHKT extends PgQueryResultHKT { + type: []; +} diff --git a/drizzle-orm/src/prisma/schema.prisma b/drizzle-orm/src/prisma/schema.prisma new file mode 100644 index 000000000..e9b2f3ce6 --- /dev/null +++ b/drizzle-orm/src/prisma/schema.prisma @@ -0,0 +1,14 @@ +generator client { + provider = "prisma-client-js" +} + +datasource db { + provider = "postgresql" + url = env("DB_URL") +} + +model User { + id Int @id @default(autoincrement()) + email String @unique + name String? +} diff --git a/drizzle-orm/src/prisma/sqlite/driver.ts b/drizzle-orm/src/prisma/sqlite/driver.ts new file mode 100644 index 000000000..2a8f1e4c8 --- /dev/null +++ b/drizzle-orm/src/prisma/sqlite/driver.ts @@ -0,0 +1,32 @@ +import { Prisma } from '@prisma/client'; + +import type { Logger } from '~/logger.ts'; +import { DefaultLogger } from '~/logger.ts'; +import { BaseSQLiteDatabase, SQLiteAsyncDialect } from '~/sqlite-core/index.ts'; +import type { DrizzleConfig } from '~/utils.ts'; +import { PrismaSQLiteSession } from './session.ts'; + +export type PrismaSQLiteDatabase = BaseSQLiteDatabase<'async', []>; + +export type PrismaSQLiteConfig = Omit; + +export function drizzle(config: PrismaSQLiteConfig = {}) { + const dialect = new SQLiteAsyncDialect(); + let logger: Logger | undefined; + if (config.logger === true) { + logger = new DefaultLogger(); + } else if (config.logger !== false) { + logger = config.logger; + } + + return Prisma.defineExtension((client) => { + const session = new PrismaSQLiteSession(client, dialect, { logger }); + + return client.$extends({ + name: 'drizzle', + client: { + $drizzle: new BaseSQLiteDatabase('async', dialect, session, undefined) as PrismaSQLiteDatabase, + }, + }); + }); +} diff --git a/drizzle-orm/src/prisma/sqlite/index.ts b/drizzle-orm/src/prisma/sqlite/index.ts new file mode 100644 index 000000000..b1b6a52e7 --- /dev/null +++ b/drizzle-orm/src/prisma/sqlite/index.ts @@ -0,0 +1,2 @@ +export * from './driver.ts'; +export * from './session.ts'; diff --git a/drizzle-orm/src/prisma/sqlite/session.ts b/drizzle-orm/src/prisma/sqlite/session.ts new file mode 100644 index 000000000..3dbdc6f1a --- /dev/null +++ b/drizzle-orm/src/prisma/sqlite/session.ts @@ -0,0 +1,90 @@ +import type { PrismaClient } from '@prisma/client/extension'; + +import { entityKind } from '~/entity.ts'; +import { type Logger, NoopLogger } from '~/logger.ts'; +import type { Query } from '~/sql/sql.ts'; +import { fillPlaceholders } from '~/sql/sql.ts'; +import type { + PreparedQueryConfig as PreparedQueryConfigBase, + SelectedFieldsOrdered, + SQLiteAsyncDialect, + SQLiteExecuteMethod, + SQLiteTransaction, + SQLiteTransactionConfig, +} from '~/sqlite-core/index.ts'; +import { SQLitePreparedQuery, SQLiteSession } from '~/sqlite-core/index.ts'; + +type PreparedQueryConfig = Omit; + +export class PrismaSQLitePreparedQuery extends SQLitePreparedQuery< + { type: 'async'; run: []; all: T['all']; get: T['get']; values: never; execute: T['execute'] } +> { + static readonly [entityKind]: string = 'PrismaSQLitePreparedQuery'; + + constructor( + private readonly prisma: PrismaClient, + query: Query, + private readonly logger: Logger, + executeMethod: SQLiteExecuteMethod, + ) { + super('async', executeMethod, query); + } + + override all(placeholderValues?: Record): Promise { + const params = fillPlaceholders(this.query.params, placeholderValues ?? {}); + this.logger.logQuery(this.query.sql, params); + return this.prisma.$queryRawUnsafe(this.query.sql, ...params); + } + + override async run(placeholderValues?: Record | undefined): Promise<[]> { + await this.all(placeholderValues); + return []; + } + + override async get(placeholderValues?: Record | undefined): Promise { + const all = await this.all(placeholderValues) as unknown[]; + return all[0]; + } + + override values(_placeholderValues?: Record | undefined): Promise { + throw new Error('Method not implemented.'); + } + + override isResponseInArrayMode(): boolean { + return false; + } +} + +export interface PrismaSQLiteSessionOptions { + logger?: Logger; +} + +export class PrismaSQLiteSession extends SQLiteSession<'async', unknown, Record, Record> { + static readonly [entityKind]: string = 'PrismaSQLiteSession'; + + private readonly logger: Logger; + + constructor( + private readonly prisma: PrismaClient, + dialect: SQLiteAsyncDialect, + options: PrismaSQLiteSessionOptions, + ) { + super(dialect); + this.logger = options.logger ?? new NoopLogger(); + } + + override prepareQuery>( + query: Query, + fields: SelectedFieldsOrdered | undefined, + executeMethod: SQLiteExecuteMethod, + ): PrismaSQLitePreparedQuery { + return new PrismaSQLitePreparedQuery(this.prisma, query, this.logger, executeMethod); + } + + override transaction( + _transaction: (tx: SQLiteTransaction<'async', unknown, Record, Record>) => Promise, + _config?: SQLiteTransactionConfig, + ): Promise { + throw new Error('Method not implemented.'); + } +} diff --git a/drizzle-orm/src/query-builders/select.types.ts b/drizzle-orm/src/query-builders/select.types.ts index 78deb2f71..07579662f 100644 --- a/drizzle-orm/src/query-builders/select.types.ts +++ b/drizzle-orm/src/query-builders/select.types.ts @@ -106,7 +106,7 @@ export type AppendToResult< TSelectedFields extends SelectedFields, TOldSelectMode extends SelectMode, > = TOldSelectMode extends 'partial' ? TResult - : TOldSelectMode extends 'single' ? + : TOldSelectMode extends 'single' ? & (TTableName extends string ? Record : TResult) & (TJoinedName extends string ? Record : TSelectedFields) : TResult & (TJoinedName extends string ? Record : TSelectedFields); @@ -115,7 +115,7 @@ export type BuildSubquerySelection< TSelection extends ColumnsSelection, TNullability extends Record, > = TSelection extends never ? any - : + : & { [Key in keyof TSelection]: TSelection[Key] extends SQL ? DrizzleTypeError<'You cannot reference this field without assigning it an alias first - use `.as()`'> diff --git a/drizzle-orm/src/relations.ts b/drizzle-orm/src/relations.ts index e316df9c2..99780897e 100644 --- a/drizzle-orm/src/relations.ts +++ b/drizzle-orm/src/relations.ts @@ -1,4 +1,4 @@ -import { type AnyTable, type InferModelFromColumns, isTable, Table } from '~/table.ts'; +import { type AnyTable, getTableUniqueName, type InferModelFromColumns, Table } from '~/table.ts'; import { type AnyColumn, Column } from './column.ts'; import { entityKind, is } from './entity.ts'; import { PrimaryKeyBuilder } from './pg-core/primary-keys.ts'; @@ -240,7 +240,7 @@ export type DBQueryConfig< operators: { sql: Operators['sql'] }, ) => Record); } - & (TRelationType extends 'many' ? + & (TRelationType extends 'many' ? & { where?: | SQL @@ -323,7 +323,7 @@ export type BuildRelationResult< TSchema, FindTableByDBName, Assume> - > extends infer TResult ? TRel extends One ? + > extends infer TResult ? TRel extends One ? | TResult | (Equal extends true ? null : never) : TResult[] @@ -361,7 +361,7 @@ export type BuildQueryResult< keyof TTableConfig['columns'], NonUndefinedKeysOnly > - : + : & { [K in keyof TFullSelection['columns']]: Equal< TFullSelection['columns'][K], @@ -429,8 +429,8 @@ export function extractTablesRelationalConfig< > = {}; const tablesConfig: TablesRelationalConfig = {}; for (const [key, value] of Object.entries(schema)) { - if (isTable(value)) { - const dbName = value[Table.Symbol.Name]; + if (is(value, Table)) { + const dbName = getTableUniqueName(value); const bufferedRelations = relationsBuffer[dbName]; tableNamesMap[dbName] = key; tablesConfig[key] = { @@ -462,7 +462,7 @@ export function extractTablesRelationalConfig< } } } else if (is(value, Relations)) { - const dbName: string = value.table[Table.Symbol.Name]; + const dbName = getTableUniqueName(value.table); const tableName = tableNamesMap[dbName]; const relations: Record = value.config( configHelpers(value.table), @@ -561,7 +561,7 @@ export function normalizeRelation( }; } - const referencedTableTsName = tableNamesMap[relation.referencedTable[Table.Symbol.Name]]; + const referencedTableTsName = tableNamesMap[getTableUniqueName(relation.referencedTable)]; if (!referencedTableTsName) { throw new Error( `Table "${relation.referencedTable[Table.Symbol.Name]}" not found in schema`, @@ -574,7 +574,7 @@ export function normalizeRelation( } const sourceTable = relation.sourceTable; - const sourceTableTsName = tableNamesMap[sourceTable[Table.Symbol.Name]]; + const sourceTableTsName = tableNamesMap[getTableUniqueName(sourceTable)]; if (!sourceTableTsName) { throw new Error( `Table "${sourceTable[Table.Symbol.Name]}" not found in schema`, diff --git a/drizzle-orm/src/sql/sql.ts b/drizzle-orm/src/sql/sql.ts index 7e852794b..f5b3f30b4 100644 --- a/drizzle-orm/src/sql/sql.ts +++ b/drizzle-orm/src/sql/sql.ts @@ -61,6 +61,7 @@ export interface QueryWithTypings extends Query { */ export interface SQLWrapper { getSQL(): SQL; + shouldOmitSQLParens?(): boolean; } export function isSQLWrapper(value: unknown): value is SQLWrapper { @@ -213,7 +214,7 @@ export class SQL implements SQLWrapper { } let typings: QueryTypingsValue[] | undefined; - if (prepareTyping !== undefined) { + if (prepareTyping) { typings = [prepareTyping(chunk.encoder)]; } @@ -221,7 +222,7 @@ export class SQL implements SQLWrapper { } if (is(chunk, Placeholder)) { - return { sql: escapeParam(paramStartIndex.value++, chunk), params: [chunk] }; + return { sql: escapeParam(paramStartIndex.value++, chunk), params: [chunk], typings: ['none'] }; } if (is(chunk, SQL.Aliased) && chunk.fieldAlias !== undefined) { @@ -248,6 +249,9 @@ export class SQL implements SQLWrapper { } if (isSQLWrapper(chunk)) { + if (chunk.shouldOmitSQLParens?.()) { + return this.buildQueryFromSourceParams([chunk.getSQL()], config); + } return this.buildQueryFromSourceParams([ new StringChunk('('), chunk.getSQL(), @@ -441,11 +445,10 @@ export type SQLChunk = export function sql(strings: TemplateStringsArray, ...params: any[]): SQL; /* - The type of `params` is specified as `SQLSourceParam[]`, but that's slightly incorrect - + The type of `params` is specified as `SQLChunk[]`, but that's slightly incorrect - in runtime, users won't pass `FakePrimitiveParam` instances as `params` - they will pass primitive values - which will be wrapped in `Param` using `buildChunksFromParam(...)`. That's why the overload - specify `params` as `any[]` and not as `SQLSourceParam[]`. This type is used to make our lives easier and - the type checker happy. + which will be wrapped in `Param`. That's why the overload specifies `params` as `any[]` and not as `SQLSourceParam[]`. + This type is used to make our lives easier and the type checker happy. */ export function sql(strings: TemplateStringsArray, ...params: SQLChunk[]): SQL { const queryChunks: SQLChunk[] = []; diff --git a/drizzle-orm/src/sqlite-core/dialect.ts b/drizzle-orm/src/sqlite-core/dialect.ts index 0f8e4d879..7b7cf0ab1 100644 --- a/drizzle-orm/src/sqlite-core/dialect.ts +++ b/drizzle-orm/src/sqlite-core/dialect.ts @@ -23,7 +23,7 @@ import { SQLiteColumn } from '~/sqlite-core/columns/index.ts'; import type { SQLiteDeleteConfig, SQLiteInsertConfig, SQLiteUpdateConfig } from '~/sqlite-core/query-builders/index.ts'; import { SQLiteTable } from '~/sqlite-core/table.ts'; import { Subquery } from '~/subquery.ts'; -import { getTableName, Table } from '~/table.ts'; +import { getTableName, getTableUniqueName, Table } from '~/table.ts'; import { orderSelectedFields, type UpdateSet } from '~/utils.ts'; import { ViewBaseConfig } from '~/view-common.ts'; import type { @@ -585,7 +585,7 @@ export abstract class SQLiteDialect { } of selectedRelations ) { const normalizedRelation = normalizeRelation(schema, tableNamesMap, relation); - const relationTableName = relation.referencedTable[Table.Symbol.Name]; + const relationTableName = getTableUniqueName(relation.referencedTable); const relationTableTsName = tableNamesMap[relationTableName]!; const relationTableAlias = `${tableAlias}_${selectedRelationTsKey}`; // const relationTable = schema[relationTableTsName]!; @@ -779,7 +779,7 @@ export class SQLiteAsyncDialect extends SQLiteDialect { async migrate( migrations: MigrationMeta[], - session: SQLiteSession<'async', unknown, any, TablesRelationalConfig>, + session: SQLiteSession<'async', any, any, any>, config?: string | MigrationConfig, ): Promise { const migrationsTable = config === undefined diff --git a/drizzle-orm/src/table.ts b/drizzle-orm/src/table.ts index f1bf1c7d1..3db9d5559 100644 --- a/drizzle-orm/src/table.ts +++ b/drizzle-orm/src/table.ts @@ -110,8 +110,6 @@ export class Table implements SQLWrapper { /** @internal */ [ExtraConfigBuilder]: ((self: any) => Record) | undefined = undefined; - [IsDrizzleTable] = true; - constructor(name: string, schema: string | undefined, baseName: string) { this[TableName] = this[OriginalName] = name; this[Schema] = schema; @@ -146,6 +144,10 @@ export function getTableName(table: T): T['_']['name'] { return table[TableName]; } +export function getTableUniqueName(table: T): `${T['_']['schema']}.${T['_']['name']}` { + return `${table[Schema] ?? 'public'}.${table[TableName]}`; +} + export type MapColumnName = TDBColumNames extends true ? TColumn['_']['name'] : TName; @@ -155,7 +157,7 @@ export type InferModelFromColumns< TInferMode extends 'select' | 'insert' = 'select', TConfig extends { dbColumnNames: boolean } = { dbColumnNames: false }, > = Simplify< - TInferMode extends 'insert' ? + TInferMode extends 'insert' ? & { [ Key in keyof TColumns & string as RequiredKeyOnly< diff --git a/drizzle-orm/src/tidb-serverless/session.ts b/drizzle-orm/src/tidb-serverless/session.ts index 2dbdbbf52..e87c7a7e2 100644 --- a/drizzle-orm/src/tidb-serverless/session.ts +++ b/drizzle-orm/src/tidb-serverless/session.ts @@ -6,12 +6,12 @@ import { NoopLogger } from '~/logger.ts'; import type { MySqlDialect } from '~/mysql-core/dialect.ts'; import type { SelectedFieldsOrdered } from '~/mysql-core/query-builders/select.types.ts'; import { + MySqlPreparedQuery, + type MySqlPreparedQueryConfig, + type MySqlPreparedQueryHKT, + type MySqlQueryResultHKT, MySqlSession, MySqlTransaction, - PreparedQuery, - type PreparedQueryConfig, - type PreparedQueryHKT, - type QueryResultHKT, } from '~/mysql-core/session.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { fillPlaceholders, type Query, type SQL, sql } from '~/sql/sql.ts'; @@ -20,7 +20,7 @@ import { type Assume, mapResultRow } from '~/utils.ts'; const executeRawConfig = { fullResult: true } satisfies ExecuteOptions; const queryConfig = { arrayMode: true } satisfies ExecuteOptions; -export class TiDBServerlessPreparedQuery extends PreparedQuery { +export class TiDBServerlessPreparedQuery extends MySqlPreparedQuery { static readonly [entityKind]: string = 'TiDBPreparedQuery'; constructor( @@ -83,11 +83,11 @@ export class TiDBServerlessSession< this.logger = options.logger ?? new NoopLogger(); } - prepareQuery( + prepareQuery( query: Query, fields: SelectedFieldsOrdered | undefined, customResultMapper?: (rows: unknown[][]) => T['execute'], - ): PreparedQuery { + ): MySqlPreparedQuery { return new TiDBServerlessPreparedQuery( this.client, query.sql, @@ -162,10 +162,10 @@ export class TiDBServerlessTransaction< } } -export interface TiDBServerlessQueryResultHKT extends QueryResultHKT { +export interface TiDBServerlessQueryResultHKT extends MySqlQueryResultHKT { type: FullResult; } -export interface TiDBServerlessPreparedQueryHKT extends PreparedQueryHKT { - type: TiDBServerlessPreparedQuery>; +export interface TiDBServerlessPreparedQueryHKT extends MySqlPreparedQueryHKT { + type: TiDBServerlessPreparedQuery>; } diff --git a/drizzle-orm/src/utils.ts b/drizzle-orm/src/utils.ts index 07b8290fd..54bd44325 100644 --- a/drizzle-orm/src/utils.ts +++ b/drizzle-orm/src/utils.ts @@ -132,12 +132,11 @@ export type UpdateSet = Record; export type OneOrMany = T | T[]; -export type Update = Simplify< +export type Update = & { [K in Exclude]: T[K]; } - & TUpdate ->; + & TUpdate; export type Simplify = & { diff --git a/drizzle-orm/src/vercel-postgres/session.ts b/drizzle-orm/src/vercel-postgres/session.ts index 1a1ec2dae..51a987905 100644 --- a/drizzle-orm/src/vercel-postgres/session.ts +++ b/drizzle-orm/src/vercel-postgres/session.ts @@ -11,7 +11,7 @@ import { entityKind } from '~/entity.ts'; import { type Logger, NoopLogger } from '~/logger.ts'; import { type PgDialect, PgTransaction } from '~/pg-core/index.ts'; import type { SelectedFieldsOrdered } from '~/pg-core/query-builders/select.types.ts'; -import type { PgTransactionConfig, PreparedQueryConfig, QueryResultHKT } from '~/pg-core/session.ts'; +import type { PgQueryResultHKT, PgTransactionConfig, PreparedQueryConfig } from '~/pg-core/session.ts'; import { PgPreparedQuery, PgSession } from '~/pg-core/session.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { fillPlaceholders, type Query, sql } from '~/sql/sql.ts'; @@ -194,6 +194,6 @@ export class VercelPgTransaction< } } -export interface VercelPgQueryResultHKT extends QueryResultHKT { +export interface VercelPgQueryResultHKT extends PgQueryResultHKT { type: QueryResult>; } diff --git a/drizzle-orm/src/xata-http/session.ts b/drizzle-orm/src/xata-http/session.ts index 7b8b917af..c666ba09d 100644 --- a/drizzle-orm/src/xata-http/session.ts +++ b/drizzle-orm/src/xata-http/session.ts @@ -5,7 +5,7 @@ import { NoopLogger } from '~/logger.ts'; import type { PgDialect } from '~/pg-core/dialect.ts'; import { PgTransaction } from '~/pg-core/index.ts'; import type { SelectedFieldsOrdered } from '~/pg-core/query-builders/select.types.ts'; -import type { PgTransactionConfig, PreparedQueryConfig, QueryResultHKT } from '~/pg-core/session.ts'; +import type { PgQueryResultHKT, PgTransactionConfig, PreparedQueryConfig } from '~/pg-core/session.ts'; import { PgPreparedQuery, PgSession } from '~/pg-core/session.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { fillPlaceholders, type Query } from '~/sql/sql.ts'; @@ -159,6 +159,6 @@ export class XataTransaction, TSchem } } -export interface XataHttpQueryResultHKT extends QueryResultHKT { +export interface XataHttpQueryResultHKT extends PgQueryResultHKT { type: SQLQueryResult; } diff --git a/drizzle-orm/tests/relation.test.ts b/drizzle-orm/tests/relation.test.ts new file mode 100644 index 000000000..0fe054812 --- /dev/null +++ b/drizzle-orm/tests/relation.test.ts @@ -0,0 +1,38 @@ +import { expect, test } from 'vitest'; + +import { pgSchema, pgTable } from '~/pg-core/index.ts'; +import { createTableRelationsHelpers, extractTablesRelationalConfig } from '~/relations.ts'; + +test('tables with same name in different schemas', () => { + const folder = pgSchema('folder'); + const schema = { + folder: { + usersInFolder: folder.table('users', {}), + }, + public: { + users: pgTable('users', {}), + }, + }; + + const relationalSchema = { + ...Object.fromEntries( + Object.entries(schema) + .flatMap(([key, val]) => { + // have unique keys across schemas + + const mappedTableEntries = Object.entries(val).map((tableEntry) => { + return [`__${key}__.${tableEntry[0]}`, tableEntry[1]]; + }); + + return mappedTableEntries; + }), + ), + }; + + const relationsConfig = extractTablesRelationalConfig( + relationalSchema, + createTableRelationsHelpers, + ); + + expect(Object.keys(relationsConfig)).toHaveLength(2); +}); diff --git a/drizzle-orm/tests/type-hints.test.ts b/drizzle-orm/tests/type-hints.test.ts new file mode 100644 index 000000000..40b40b958 --- /dev/null +++ b/drizzle-orm/tests/type-hints.test.ts @@ -0,0 +1,84 @@ +import { RDSDataClient } from '@aws-sdk/client-rds-data'; +import crypto from 'crypto'; +import { expect, test } from 'vitest'; + +import { drizzle } from '~/aws-data-api/pg'; +import { customType, json, PgDialect, pgTable, text, timestamp, uuid, varchar } from '~/pg-core'; +import { sql } from '~/sql/sql'; + +const db = drizzle(new RDSDataClient(), { + database: '', + resourceArn: '', + secretArn: '', +}); + +test('type hints - case #1', () => { + const t = pgTable('t', { + id: varchar('id', { length: 255 }).primaryKey(), + workspaceID: varchar('workspace_id', { length: 255 }).notNull(), + description: text('description').notNull(), + enrichment: json('enrichment').notNull(), + category: text('category'), + tags: text('tags').array().notNull(), + counterpartyName: text('counterparty_name'), + timePlaced: timestamp('time_placed').notNull(), + timeSynced: timestamp('time_synced').notNull(), + }); + + const q = db.insert(t).values({ + id: 'id', + tags: [], + workspaceID: 'workspaceID', + enrichment: {}, + category: 'category', + description: 'description', + timePlaced: new Date(), + timeSynced: sql`CURRENT_TIMESTAMP(6)`, + counterpartyName: 'counterpartyName', + }); + + const query = new PgDialect().sqlToQuery(q.getSQL()); + + expect(query.typings).toEqual(['none', 'none', 'none', 'json', 'none', 'none', 'none', 'timestamp']); +}); + +test('type hints - case #2', () => { + const prefixedUlid = ( + name: string, + opts: { prefix: Prefix }, + ) => + customType<{ data: PrefixedUlid; driverData: string }>({ + dataType: () => 'uuid', + toDriver: (value) => { + return value as string; + }, + fromDriver: (value) => { + return `${opts.prefix}_${value}` as PrefixedUlid; + }, + })(name); + + const calendars = pgTable('calendars', { + id: uuid('id').primaryKey().default(sql`gen_random_uuid()`), + orgMembershipId: prefixedUlid('om_id', { prefix: 'om' }).notNull(), + platform: text('platform').notNull(), + externalId: text('external_id').notNull(), + externalData: json('external_data').notNull(), + updatedAt: timestamp('updated_at').notNull().default(sql`now()`), + createdAt: timestamp('created_at').notNull().default(sql`now()`), + }); + + const q = db + .insert(calendars) + .values({ + id: crypto.randomUUID(), + orgMembershipId: 'om_id', + platform: 'platform', + externalId: 'externalId', + externalData: {}, + }) + .returning(); + + const query = new PgDialect().sqlToQuery(q.getSQL()); + + expect(query.typings).toEqual(['uuid', 'none', 'none', 'none', 'json']); +}); diff --git a/drizzle-orm/type-tests/mysql/set-operators.ts b/drizzle-orm/type-tests/mysql/set-operators.ts index 9afac2346..520f96b96 100644 --- a/drizzle-orm/type-tests/mysql/set-operators.ts +++ b/drizzle-orm/type-tests/mysql/set-operators.ts @@ -159,7 +159,7 @@ const exceptAll2Test = await exceptAll( db.select({ userId: newYorkers.userId, cityId: newYorkers.cityId, - }).from(newYorkers).leftJoin(newYorkers, sql``), + }).from(newYorkers).leftJoin(users, sql``), ); Expect>; diff --git a/drizzle-orm/type-tests/pg/set-operators.ts b/drizzle-orm/type-tests/pg/set-operators.ts index 3d53c4043..aa8be119e 100644 --- a/drizzle-orm/type-tests/pg/set-operators.ts +++ b/drizzle-orm/type-tests/pg/set-operators.ts @@ -151,7 +151,7 @@ const exceptAll2Test = await exceptAll( db.select({ userId: newYorkers.userId, cityId: newYorkers.cityId, - }).from(newYorkers).leftJoin(newYorkers, sql``), + }).from(newYorkers).leftJoin(users, sql``), ); Expect>; diff --git a/drizzle-orm/type-tests/sqlite/set-operators.ts b/drizzle-orm/type-tests/sqlite/set-operators.ts index e0239ba24..c7109d271 100644 --- a/drizzle-orm/type-tests/sqlite/set-operators.ts +++ b/drizzle-orm/type-tests/sqlite/set-operators.ts @@ -151,7 +151,7 @@ const exceptAll2Test = await except( db.select({ userId: newYorkers.userId, cityId: newYorkers.cityId, - }).from(newYorkers).leftJoin(newYorkers, sql``), + }).from(newYorkers).leftJoin(users, sql``), ); Expect>; diff --git a/drizzle-typebox/package.json b/drizzle-typebox/package.json index c268df13c..5e812f4fe 100644 --- a/drizzle-typebox/package.json +++ b/drizzle-typebox/package.json @@ -9,7 +9,7 @@ "test:types": "cd tests && tsc", "pack": "(cd dist && npm pack --pack-destination ..) && rm -f package.tgz && mv *.tgz package.tgz", "publish": "npm publish package.tgz", - "test": "NODE_OPTIONS='--loader=tsx --no-warnings' ava" + "test": "vitest run" }, "exports": { ".": { @@ -35,18 +35,6 @@ "type": "git", "url": "git+https://github.com/drizzle-team/drizzle-orm.git" }, - "ava": { - "files": [ - "tests/**/*.test.ts", - "!tests/bun/**/*" - ], - "extensions": { - "ts": "module" - }, - "nodeArguments": [ - "--loader=tsx" - ] - }, "keywords": [ "typebox", "validate", @@ -75,12 +63,12 @@ "@rollup/plugin-typescript": "^11.1.0", "@sinclair/typebox": "^0.29.6", "@types/node": "^18.15.10", - "ava": "^5.1.0", "cpy": "^10.1.0", "drizzle-orm": "link:../drizzle-orm/dist", "rimraf": "^5.0.0", "rollup": "^3.20.7", - "tsx": "^3.12.2", + "vite-tsconfig-paths": "^4.3.2", + "vitest": "^1.6.0", "zx": "^7.2.2" } } diff --git a/drizzle-typebox/tests/mysql.test.ts b/drizzle-typebox/tests/mysql.test.ts index f16b39361..d6942a529 100644 --- a/drizzle-typebox/tests/mysql.test.ts +++ b/drizzle-typebox/tests/mysql.test.ts @@ -1,6 +1,5 @@ import { Type } from '@sinclair/typebox'; import { Value } from '@sinclair/typebox/value'; -import test from 'ava'; import { bigint, binary, @@ -31,8 +30,9 @@ import { varchar, year, } from 'drizzle-orm/mysql-core'; +import { expect, test } from 'vitest'; import { createInsertSchema, createSelectSchema, jsonSchema } from '../src'; -import { expectSchemaShape } from './utils'; +import { expectSchemaShape } from './utils.ts'; const customInt = customType<{ data: number }>({ dataType() { @@ -127,40 +127,34 @@ const testTableRow = { autoIncrement: 1, }; -test('insert valid row', (t) => { +test('insert valid row', () => { const schema = createInsertSchema(testTable); - t.is( - Value.Check( - schema, - testTableRow, - ), - true, - ); + expect(Value.Check( + schema, + testTableRow, + )).toBeTruthy(); }); -test('insert invalid varchar length', (t) => { +test('insert invalid varchar length', () => { const schema = createInsertSchema(testTable); - t.is( - Value.Check(schema, { - ...testTableRow, - varchar: 'A'.repeat(201), - }), /* schema.safeParse({ ...testTableRow, varchar: 'A'.repeat(201) }).success */ - false, - ); + expect(Value.Check(schema, { + ...testTableRow, + varchar: 'A'.repeat(201), + })).toBeFalsy(); }); -test('insert smaller char length should work', (t) => { +test('insert smaller char length should work', () => { const schema = createInsertSchema(testTable); - t.is(Value.Check(schema, { ...testTableRow, char: 'abc' }), true); + expect(Value.Check(schema, { ...testTableRow, char: 'abc' })).toBeTruthy(); }); -test('insert larger char length should fail', (t) => { +test('insert larger char length should fail', () => { const schema = createInsertSchema(testTable); - t.is(Value.Check(schema, { ...testTableRow, char: 'abcde' }), false); + expect(Value.Check(schema, { ...testTableRow, char: 'abcde' })).toBeFalsy(); }); test('insert schema', (t) => { diff --git a/drizzle-typebox/tests/pg.test.ts b/drizzle-typebox/tests/pg.test.ts index a9631614b..355dee531 100644 --- a/drizzle-typebox/tests/pg.test.ts +++ b/drizzle-typebox/tests/pg.test.ts @@ -1,9 +1,9 @@ import { Type } from '@sinclair/typebox'; import { Value } from '@sinclair/typebox/value'; -import test from 'ava'; import { char, date, integer, pgEnum, pgTable, serial, text, timestamp, varchar } from 'drizzle-orm/pg-core'; +import { expect, test } from 'vitest'; import { createInsertSchema, createSelectSchema, Nullable } from '../src'; -import { expectSchemaShape } from './utils'; +import { expectSchemaShape } from './utils.ts'; export const roleEnum = pgEnum('role', ['admin', 'user']); @@ -39,28 +39,25 @@ const testUser = { initials: 'JD', }; -test('users insert valid user', (t) => { +test('users insert valid user', () => { const schema = createInsertSchema(users); - t.is(Value.Check(schema, testUser), true); + expect(Value.Check(schema, testUser)).toBeTruthy(); }); -test('users insert invalid varchar', (t) => { +test('users insert invalid varchar', () => { const schema = createInsertSchema(users); - t.is( - Value.Check(schema, { - ...testUser, - profession: 'Chief Executive Officer', - }), - false, - ); + expect(Value.Check(schema, { + ...testUser, + profession: 'Chief Executive Officer', + })).toBeFalsy(); }); -test('users insert invalid char', (t) => { +test('users insert invalid char', () => { const schema = createInsertSchema(users); - t.is(Value.Check(schema, { ...testUser, initials: 'JoDo' }), false); + expect(Value.Check(schema, { ...testUser, initials: 'JoDo' })).toBeFalsy(); }); test('users insert schema', (t) => { diff --git a/drizzle-typebox/tests/sqlite.test.ts b/drizzle-typebox/tests/sqlite.test.ts index 9b37b8447..a8506a269 100644 --- a/drizzle-typebox/tests/sqlite.test.ts +++ b/drizzle-typebox/tests/sqlite.test.ts @@ -1,9 +1,9 @@ import { type Static, Type } from '@sinclair/typebox'; import { Value } from '@sinclair/typebox/value'; -import test from 'ava'; import { blob, integer, numeric, real, sqliteTable, text } from 'drizzle-orm/sqlite-core'; +import { expect, test } from 'vitest'; import { createInsertSchema, createSelectSchema, jsonSchema, Nullable } from '../src'; -import { expectSchemaShape } from './utils'; +import { expectSchemaShape } from './utils.ts'; const blobJsonSchema = Type.Object({ foo: Type.String(), @@ -39,16 +39,16 @@ const testUser = { role: 'admin', }; -test('users insert valid user', (t) => { +test('users insert valid user', () => { const schema = createInsertSchema(users); // - t.is(Value.Check(schema, testUser), true); + expect(Value.Check(schema, testUser)).toBeTruthy(); }); -test('users insert invalid text length', (t) => { +test('users insert invalid text length', () => { const schema = createInsertSchema(users); - t.is(Value.Check(schema, { ...testUser, text: 'a'.repeat(256) }), false); + expect(Value.Check(schema, { ...testUser, text: 'a'.repeat(256) })).toBeFalsy(); }); test('users insert schema', (t) => { diff --git a/drizzle-typebox/tests/utils.ts b/drizzle-typebox/tests/utils.ts index 0454dd48d..e17e5f26d 100644 --- a/drizzle-typebox/tests/utils.ts +++ b/drizzle-typebox/tests/utils.ts @@ -1,15 +1,15 @@ import type { TSchema } from '@sinclair/typebox'; -import type { ExecutionContext } from 'ava'; +import { expect, type TaskContext } from 'vitest'; -export function expectSchemaShape(t: ExecutionContext, expected: T) { +export function expectSchemaShape(t: TaskContext, expected: T) { return { from(actual: T) { - t.deepEqual(Object.keys(actual), Object.keys(expected)); + expect(Object.keys(actual)).toStrictEqual(Object.keys(expected)); for (const key of Object.keys(actual)) { - t.deepEqual(actual[key].type, expected[key]?.type, `key: ${key}`); + expect(actual[key].type).toStrictEqual(expected[key]?.type); if (actual[key].optional) { - t.deepEqual(actual[key].optional, expected[key]?.optional, `key (optional): ${key}`); + expect(actual[key].optional).toStrictEqual(expected[key]?.optional); } } }, diff --git a/drizzle-typebox/vitest.config.ts b/drizzle-typebox/vitest.config.ts new file mode 100644 index 000000000..1f0eb7ad9 --- /dev/null +++ b/drizzle-typebox/vitest.config.ts @@ -0,0 +1,25 @@ +import tsconfigPaths from 'vite-tsconfig-paths'; +import { defineConfig } from 'vitest/config'; + +export default defineConfig({ + test: { + include: [ + 'tests/**/*.test.ts', + ], + exclude: [ + 'tests/bun/**/*', + ], + typecheck: { + tsconfig: 'tsconfig.json', + }, + testTimeout: 100000, + hookTimeout: 100000, + isolate: false, + poolOptions: { + threads: { + singleThread: true, + }, + }, + }, + plugins: [tsconfigPaths()], +}); diff --git a/drizzle-valibot/package.json b/drizzle-valibot/package.json index 5e85b4b4c..1d88fd26a 100644 --- a/drizzle-valibot/package.json +++ b/drizzle-valibot/package.json @@ -9,7 +9,7 @@ "test:types": "cd tests && tsc", "pack": "(cd dist && npm pack --pack-destination ..) && rm -f package.tgz && mv *.tgz package.tgz", "publish": "npm publish package.tgz", - "test": "NODE_OPTIONS='--loader=tsx --no-warnings' ava" + "test": "vitest run" }, "exports": { ".": { @@ -35,18 +35,6 @@ "type": "git", "url": "git+https://github.com/drizzle-team/drizzle-orm.git" }, - "ava": { - "files": [ - "tests/**/*.test.ts", - "!tests/bun/**/*" - ], - "extensions": { - "ts": "module" - }, - "nodeArguments": [ - "--loader=tsx" - ] - }, "keywords": [ "valibot", "validate", @@ -74,13 +62,13 @@ "@rollup/plugin-terser": "^0.4.1", "@rollup/plugin-typescript": "^11.1.0", "@types/node": "^18.15.10", - "ava": "^5.1.0", "cpy": "^10.1.0", "drizzle-orm": "link:../drizzle-orm/dist", "rimraf": "^5.0.0", "rollup": "^3.20.7", - "tsx": "^3.12.2", "valibot": "^0.30.0", + "vite-tsconfig-paths": "^4.3.2", + "vitest": "^1.6.0", "zx": "^7.2.2" } } diff --git a/drizzle-valibot/tests/mysql.test.ts b/drizzle-valibot/tests/mysql.test.ts index c5f25f0a8..9635ef8fa 100644 --- a/drizzle-valibot/tests/mysql.test.ts +++ b/drizzle-valibot/tests/mysql.test.ts @@ -1,4 +1,3 @@ -import test from 'ava'; import { bigint, binary, @@ -44,8 +43,9 @@ import { picklist, string, } from 'valibot'; +import { expect, test } from 'vitest'; import { createInsertSchema, createSelectSchema, jsonSchema } from '../src'; -import { expectSchemaShape } from './utils'; +import { expectSchemaShape } from './utils.ts'; const customInt = customType<{ data: number }>({ dataType() { @@ -140,39 +140,35 @@ const testTableRow = { autoIncrement: 1, }; -test('insert valid row', (t) => { +test('insert valid row', () => { const schema = createInsertSchema(testTable); - t.deepEqual(parse(schema, testTableRow), testTableRow); + expect(parse(schema, testTableRow)).toStrictEqual(testTableRow); }); -test('insert invalid varchar length', (t) => { +test('insert invalid varchar length', () => { const schema = createInsertSchema(testTable); - t.throws( - () => - parse(schema, { - ...testTableRow, - varchar: 'A'.repeat(201), - }), - undefined, /* schema.safeParse({ ...testTableRow, varchar: 'A'.repeat(201) }).success */ - ); + + expect(() => + parse(schema, { + ...testTableRow, + varchar: 'A'.repeat(201), + }) + ).toThrow(undefined); }); -test('insert smaller char length should work', (t) => { +test('insert smaller char length should work', () => { const schema = createInsertSchema(testTable); const input = { ...testTableRow, char: 'abc' }; - t.deepEqual(parse(schema, input), input); + expect(parse(schema, input)).toStrictEqual(input); }); -test('insert larger char length should fail', (t) => { +test('insert larger char length should fail', () => { const schema = createInsertSchema(testTable); - t.throws( - () => parse(schema, { ...testTableRow, char: 'abcde' }), - undefined, - ); + expect(() => parse(schema, { ...testTableRow, char: 'abcde' })).toThrow(undefined); }); test('insert schema', (t) => { diff --git a/drizzle-valibot/tests/pg.test.ts b/drizzle-valibot/tests/pg.test.ts index e23af699a..659845fa1 100644 --- a/drizzle-valibot/tests/pg.test.ts +++ b/drizzle-valibot/tests/pg.test.ts @@ -1,4 +1,3 @@ -import test from 'ava'; import { char, date, integer, pgEnum, pgTable, serial, text, timestamp, varchar } from 'drizzle-orm/pg-core'; import { array, @@ -15,8 +14,9 @@ import { picklist, string, } from 'valibot'; +import { expect, test } from 'vitest'; import { createInsertSchema, createSelectSchema } from '../src'; -import { expectSchemaShape } from './utils'; +import { expectSchemaShape } from './utils.ts'; export const roleEnum = pgEnum('role', ['admin', 'user']); @@ -52,29 +52,27 @@ const testUser = { initials: 'JD', }; -test('users insert valid user', (t) => { +test('users insert valid user', () => { const schema = createInsertSchema(users); - t.deepEqual(parse(schema, testUser), testUser); + expect(parse(schema, testUser)).toStrictEqual(testUser); }); -test('users insert invalid varchar', (t) => { +test('users insert invalid varchar', () => { const schema = createInsertSchema(users); - t.throws( - () => - parse(schema, { - ...testUser, - profession: 'Chief Executive Officer', - }), - undefined, - ); + expect(() => + parse(schema, { + ...testUser, + profession: 'Chief Executive Officer', + }) + ).toThrow(undefined); }); -test('users insert invalid char', (t) => { +test('users insert invalid char', () => { const schema = createInsertSchema(users); - t.throws(() => parse(schema, { ...testUser, initials: 'JoDo' }), undefined); + expect(() => parse(schema, { ...testUser, initials: 'JoDo' })).toThrow(undefined); }); test('users insert schema', (t) => { diff --git a/drizzle-valibot/tests/sqlite.test.ts b/drizzle-valibot/tests/sqlite.test.ts index bcec0d7f8..a520108f0 100644 --- a/drizzle-valibot/tests/sqlite.test.ts +++ b/drizzle-valibot/tests/sqlite.test.ts @@ -1,4 +1,3 @@ -import test from 'ava'; import { blob, integer, numeric, real, sqliteTable, text } from 'drizzle-orm/sqlite-core'; import { bigint as valibigint, @@ -14,8 +13,9 @@ import { picklist, string, } from 'valibot'; +import { expect, test } from 'vitest'; import { createInsertSchema, createSelectSchema, jsonSchema } from '../src'; -import { expectSchemaShape } from './utils'; +import { expectSchemaShape } from './utils.ts'; const blobJsonSchema = object({ foo: string(), @@ -51,18 +51,15 @@ const testUser = { role: 'admin' as const, }; -test('users insert valid user', (t) => { +test('users insert valid user', () => { const schema = createInsertSchema(users); // - t.deepEqual(parse(schema, testUser), testUser); + expect(parse(schema, testUser)).toStrictEqual(testUser); }); -test('users insert invalid text length', (t) => { +test('users insert invalid text length', () => { const schema = createInsertSchema(users); - t.throws( - () => parse(schema, { ...testUser, text: 'a'.repeat(256) }), - undefined, - ); + expect(() => parse(schema, { ...testUser, text: 'a'.repeat(256) })).toThrow(undefined); }); test('users insert schema', (t) => { diff --git a/drizzle-valibot/tests/utils.ts b/drizzle-valibot/tests/utils.ts index 19a129d4d..189731956 100644 --- a/drizzle-valibot/tests/utils.ts +++ b/drizzle-valibot/tests/utils.ts @@ -1,10 +1,10 @@ -import type { ExecutionContext } from 'ava'; import type { BaseSchema } from 'valibot'; +import { expect, type TaskContext } from 'vitest'; -export function expectSchemaShape>(t: ExecutionContext, expected: T) { +export function expectSchemaShape>(t: TaskContext, expected: T) { return { from(actual: T) { - t.deepEqual(Object.keys(actual), Object.keys(expected)); + expect(Object.keys(actual)).toStrictEqual(Object.keys(expected)); }, }; } diff --git a/drizzle-valibot/vitest.config.ts b/drizzle-valibot/vitest.config.ts new file mode 100644 index 000000000..1f0eb7ad9 --- /dev/null +++ b/drizzle-valibot/vitest.config.ts @@ -0,0 +1,25 @@ +import tsconfigPaths from 'vite-tsconfig-paths'; +import { defineConfig } from 'vitest/config'; + +export default defineConfig({ + test: { + include: [ + 'tests/**/*.test.ts', + ], + exclude: [ + 'tests/bun/**/*', + ], + typecheck: { + tsconfig: 'tsconfig.json', + }, + testTimeout: 100000, + hookTimeout: 100000, + isolate: false, + poolOptions: { + threads: { + singleThread: true, + }, + }, + }, + plugins: [tsconfigPaths()], +}); diff --git a/drizzle-zod/package.json b/drizzle-zod/package.json index e4c3a21c8..4d3acef81 100644 --- a/drizzle-zod/package.json +++ b/drizzle-zod/package.json @@ -9,7 +9,7 @@ "test:types": "cd tests && tsc", "pack": "(cd dist && npm pack --pack-destination ..) && rm -f package.tgz && mv *.tgz package.tgz", "publish": "npm publish package.tgz", - "test": "NODE_OPTIONS='--loader=tsx --no-warnings' ava" + "test": "vitest run" }, "exports": { ".": { @@ -42,10 +42,7 @@ ], "extensions": { "ts": "module" - }, - "nodeArguments": [ - "--loader=tsx" - ] + } }, "keywords": [ "zod", @@ -74,12 +71,12 @@ "@rollup/plugin-terser": "^0.4.1", "@rollup/plugin-typescript": "^11.1.0", "@types/node": "^18.15.10", - "ava": "^5.1.0", "cpy": "^10.1.0", "drizzle-orm": "link:../drizzle-orm/dist", "rimraf": "^5.0.0", "rollup": "^3.20.7", - "tsx": "^3.12.2", + "vite-tsconfig-paths": "^4.3.2", + "vitest": "^1.6.0", "zod": "^3.20.2", "zx": "^7.2.2" } diff --git a/drizzle-zod/tests/mysql.test.ts b/drizzle-zod/tests/mysql.test.ts index 3a7bc9392..f28d6a768 100644 --- a/drizzle-zod/tests/mysql.test.ts +++ b/drizzle-zod/tests/mysql.test.ts @@ -1,4 +1,3 @@ -import test from 'ava'; import { bigint, binary, @@ -29,9 +28,10 @@ import { varchar, year, } from 'drizzle-orm/mysql-core'; +import { expect, test } from 'vitest'; import { z } from 'zod'; import { createInsertSchema, createSelectSchema, jsonSchema } from '~/index'; -import { expectSchemaShape } from './utils'; +import { expectSchemaShape } from './utils.ts'; const customInt = customType<{ data: number }>({ dataType() { @@ -121,28 +121,28 @@ const testTableRow = { autoIncrement: 1, }; -test('insert valid row', (t) => { +test('insert valid row', () => { const schema = createInsertSchema(testTable); - t.is(schema.safeParse(testTableRow).success, true); + expect(schema.safeParse(testTableRow).success).toBeTruthy(); }); -test('insert invalid varchar length', (t) => { +test('insert invalid varchar length', () => { const schema = createInsertSchema(testTable); - t.is(schema.safeParse({ ...testTableRow, varchar: 'A'.repeat(201) }).success, false); + expect(schema.safeParse({ ...testTableRow, varchar: 'A'.repeat(201) }).success).toBeFalsy(); }); -test('insert smaller char length should work', (t) => { +test('insert smaller char length should work', () => { const schema = createInsertSchema(testTable); - t.is(schema.safeParse({ ...testTableRow, char: 'abc' }).success, true); + expect(schema.safeParse({ ...testTableRow, char: 'abc' }).success).toBeTruthy(); }); -test('insert larger char length should fail', (t) => { +test('insert larger char length should fail', () => { const schema = createInsertSchema(testTable); - t.is(schema.safeParse({ ...testTableRow, char: 'abcde' }).success, false); + expect(schema.safeParse({ ...testTableRow, char: 'abcde' }).success).toBeFalsy(); }); test('insert schema', (t) => { diff --git a/drizzle-zod/tests/pg.test.ts b/drizzle-zod/tests/pg.test.ts index 8bf1d69ca..b1f6e0c20 100644 --- a/drizzle-zod/tests/pg.test.ts +++ b/drizzle-zod/tests/pg.test.ts @@ -1,8 +1,8 @@ -import test from 'ava'; import { char, date, integer, pgEnum, pgTable, serial, text, timestamp, varchar } from 'drizzle-orm/pg-core'; +import { expect, test } from 'vitest'; import { z } from 'zod'; import { createInsertSchema, createSelectSchema } from '../src'; -import { expectSchemaShape } from './utils'; +import { expectSchemaShape } from './utils.ts'; export const roleEnum = pgEnum('role', ['admin', 'user']); @@ -36,22 +36,22 @@ const testUser = { initials: 'JD', }; -test('users insert valid user', (t) => { +test('users insert valid user', () => { const schema = createInsertSchema(users); - t.is(schema.safeParse(testUser).success, true); + expect(schema.safeParse(testUser).success).toBeTruthy(); }); -test('users insert invalid varchar', (t) => { +test('users insert invalid varchar', () => { const schema = createInsertSchema(users); - t.is(schema.safeParse({ ...testUser, profession: 'Chief Executive Officer' }).success, false); + expect(schema.safeParse({ ...testUser, profession: 'Chief Executive Officer' }).success).toBeFalsy(); }); -test('users insert invalid char', (t) => { +test('users insert invalid char', () => { const schema = createInsertSchema(users); - t.is(schema.safeParse({ ...testUser, initials: 'JoDo' }).success, false); + expect(schema.safeParse({ ...testUser, initials: 'JoDo' }).success).toBeFalsy(); }); test('users insert schema', (t) => { diff --git a/drizzle-zod/tests/sqlite.test.ts b/drizzle-zod/tests/sqlite.test.ts index dca127956..5a2c3a04e 100644 --- a/drizzle-zod/tests/sqlite.test.ts +++ b/drizzle-zod/tests/sqlite.test.ts @@ -1,8 +1,8 @@ -import test from 'ava'; import { blob, integer, numeric, real, sqliteTable, text } from 'drizzle-orm/sqlite-core'; +import { expect, test } from 'vitest'; import { z } from 'zod'; import { createInsertSchema, createSelectSchema, jsonSchema } from '../src'; -import { expectSchemaShape } from './utils'; +import { expectSchemaShape } from './utils.ts'; const blobJsonSchema = z.object({ foo: z.string(), @@ -34,16 +34,16 @@ const testUser = { role: 'admin', }; -test('users insert valid user', (t) => { +test('users insert valid user', () => { const schema = createInsertSchema(users); - t.is(schema.safeParse(testUser).success, true); + expect(schema.safeParse(testUser).success).toBeTruthy(); }); -test('users insert invalid text length', (t) => { +test('users insert invalid text length', () => { const schema = createInsertSchema(users); - t.is(schema.safeParse({ ...testUser, text: 'a'.repeat(256) }).success, false); + expect(schema.safeParse({ ...testUser, text: 'a'.repeat(256) }).success).toBeFalsy(); }); test('users insert schema', (t) => { diff --git a/drizzle-zod/tests/utils.ts b/drizzle-zod/tests/utils.ts index b8daf972e..1c28be260 100644 --- a/drizzle-zod/tests/utils.ts +++ b/drizzle-zod/tests/utils.ts @@ -1,18 +1,16 @@ -import type { ExecutionContext } from 'ava'; +import { expect, type TaskContext } from 'vitest'; import type { z } from 'zod'; -export function expectSchemaShape(t: ExecutionContext, expected: z.ZodObject) { +export function expectSchemaShape(t: TaskContext, expected: z.ZodObject) { return { from(actual: z.ZodObject) { - t.deepEqual(Object.keys(actual.shape), Object.keys(expected.shape)); + expect(Object.keys(actual.shape)).toStrictEqual(Object.keys(expected.shape)); for (const key of Object.keys(actual.shape)) { - t.deepEqual(actual.shape[key]!._def.typeName, expected.shape[key]?._def.typeName, `key: ${key}`); + expect(actual.shape[key]!._def.typeName).toStrictEqual(expected.shape[key]?._def.typeName); if (actual.shape[key]?._def.typeName === 'ZodOptional') { - t.deepEqual( + expect(actual.shape[key]!._def.innerType._def.typeName).toStrictEqual( actual.shape[key]!._def.innerType._def.typeName, - expected.shape[key]!._def.innerType._def.typeName, - `key (optional): ${key}`, ); } } diff --git a/drizzle-zod/vitest.config.ts b/drizzle-zod/vitest.config.ts new file mode 100644 index 000000000..1f0eb7ad9 --- /dev/null +++ b/drizzle-zod/vitest.config.ts @@ -0,0 +1,25 @@ +import tsconfigPaths from 'vite-tsconfig-paths'; +import { defineConfig } from 'vitest/config'; + +export default defineConfig({ + test: { + include: [ + 'tests/**/*.test.ts', + ], + exclude: [ + 'tests/bun/**/*', + ], + typecheck: { + tsconfig: 'tsconfig.json', + }, + testTimeout: 100000, + hookTimeout: 100000, + isolate: false, + poolOptions: { + threads: { + singleThread: true, + }, + }, + }, + plugins: [tsconfigPaths()], +}); diff --git a/eslint-plugin-drizzle/package.json b/eslint-plugin-drizzle/package.json index 8153ff028..22ba2a390 100644 --- a/eslint-plugin-drizzle/package.json +++ b/eslint-plugin-drizzle/package.json @@ -29,7 +29,7 @@ "cpy-cli": "^5.0.0", "eslint": "^8.53.0", "typescript": "^5.2.2", - "vitest": "^0.34.6" + "vitest": "^1.6.0" }, "peerDependencies": { "eslint": ">=8.0.0" diff --git a/eslint/eslint-plugin-drizzle/index.js b/eslint/eslint-plugin-drizzle-internal/index.js similarity index 100% rename from eslint/eslint-plugin-drizzle/index.js rename to eslint/eslint-plugin-drizzle-internal/index.js diff --git a/integration-tests/package.json b/integration-tests/package.json index c5fb6a598..f50e5b08b 100644 --- a/integration-tests/package.json +++ b/integration-tests/package.json @@ -5,48 +5,18 @@ "type": "module", "scripts": { "test:types": "tsc", - "test": "pnpm test:ava && pnpm test:esm && pnpm test:rqb", - "test:ava": "NODE_OPTIONS='--loader=tsx --no-warnings' ava tests --timeout=60s --serial", - "test:rqb": "vitest run --no-threads", + "test": "pnpm test:vitest", + "test:vitest": "vitest run", "test:esm": "node tests/imports.test.mjs && node tests/imports.test.cjs", "test:data-api": "sst shell vitest run tests/awsdatapi.test.ts" }, - "ava": { - "files": [ - "tests/**/*.test.{ts,cts,mts,js,cjs,mjs}", - "!tests/imports.test.mjs", - "!tests/imports.test.cjs", - "!tests/awsdatapi.alltypes.test.ts", - "!tests/awsdatapi.test.ts", - "!tests/planetscale-serverless/**/*.ts", - "!tests/bun/**/*", - "!tests/vercel-pg.test.ts", - "!tests/relational/**/*", - "!tests/libsql-batch.test.ts", - "!tests/xata-http.test.ts", - "!tests/d1-batch.test.ts", - "!tests/sqlite-proxy-batch.test.ts", - "!tests/neon-http-batch.test.ts", - "!tests/neon-http.test.ts", - "!tests/tidb-serverless.test.ts", - "!tests/replicas/**/*", - "!tests/imports/**/*", - "!tests/extensions/**/*" - ], - "extensions": { - "ts": "module" - }, - "nodeArguments": [ - "--loader=tsx" - ] - }, "keywords": [], "author": "Drizzle Team", "license": "Apache-2.0", "private": true, "devDependencies": { "@neondatabase/serverless": "0.9.0", - "@originjs/vite-plugin-commonjs": "^1.0.3", + "@types/async-retry": "^1.4.8", "@types/axios": "^0.14.0", "@types/better-sqlite3": "^7.6.4", "@types/dockerode": "^3.3.18", @@ -55,12 +25,14 @@ "@types/pg": "^8.10.1", "@types/sql.js": "^1.4.4", "@types/uuid": "^9.0.1", - "@vitest/ui": "^0.31.4", + "@vitest/ui": "^1.6.0", "ava": "^5.3.0", "axios": "^1.4.0", - "tsx": "^3.12.7", - "vite": "^4.3.9", - "vite-tsconfig-paths": "^4.2.0", + "cross-env": "^7.0.3", + "ts-node": "^10.9.2", + "tsx": "^4.14.0", + "vite": "^5.2.13", + "vite-tsconfig-paths": "^4.3.2", "zx": "^7.2.2" }, "dependencies": { @@ -71,13 +43,16 @@ "@miniflare/d1": "^2.14.2", "@miniflare/shared": "^2.14.2", "@planetscale/database": "^1.16.0", + "@prisma/client": "5.14.0", "@tidbcloud/serverless": "^0.1.1", "@typescript/analyze-trace": "^0.10.0", - "@vercel/postgres": "^0.3.0", + "@vercel/postgres": "^0.8.0", "@xata.io/client": "^0.29.3", + "async-retry": "^1.3.3", "better-sqlite3": "^8.4.0", "dockerode": "^3.3.4", "dotenv": "^16.1.4", + "drizzle-prisma-generator": "^0.1.2", "drizzle-typebox": "workspace:../drizzle-typebox/dist", "drizzle-valibot": "workspace:../drizzle-valibot/dist", "drizzle-zod": "workspace:../drizzle-zod/dist", @@ -86,13 +61,14 @@ "mysql2": "^3.3.3", "pg": "^8.11.0", "postgres": "^3.3.5", + "prisma": "5.14.0", "source-map-support": "^0.5.21", "sql.js": "^1.8.0", "sqlite3": "^5.1.4", "sst": "^3.0.4", "uuid": "^9.0.0", "uvu": "^0.5.6", - "vitest": "^0.31.4", + "vitest": "^1.6.0", "zod": "^3.20.2" } } diff --git a/integration-tests/tests/awsdatapi.alltypes.test.ts b/integration-tests/tests/awsdatapi.alltypes.test.ts index 585692cdd..16c49c05f 100644 --- a/integration-tests/tests/awsdatapi.alltypes.test.ts +++ b/integration-tests/tests/awsdatapi.alltypes.test.ts @@ -1,544 +1,545 @@ -import 'dotenv/config'; - -import { RDSDataClient } from '@aws-sdk/client-rds-data'; -import { fromIni } from '@aws-sdk/credential-providers'; -import type { TestFn } from 'ava'; -import anyTest from 'ava'; -import * as dotenv from 'dotenv'; -import { name, sql } from 'drizzle-orm'; -import type { AwsDataApiPgDatabase } from 'drizzle-orm/aws-data-api/pg'; -import { drizzle } from 'drizzle-orm/aws-data-api/pg'; -import { - bigint, - bigserial, - boolean, - date, - decimal, - doublePrecision, - integer, - json, - jsonb, - numeric, - pgEnum, - pgTable, - real, - serial, - smallint, - text, - time, - timestamp, - varchar, -} from 'drizzle-orm/pg-core'; - -dotenv.config(); - -export const allColumns = pgTable('all_columns', { - sm: smallint('smallint'), - smdef: smallint('smallint_def').default(10), - int: integer('integer'), - intdef: integer('integer_def').default(10), - numeric: numeric('numeric'), - numeric2: numeric('numeric2', { precision: 5 }), - numeric3: numeric('numeric3', { scale: 2 }), - numeric4: numeric('numeric4', { precision: 5, scale: 2 }), - numericdef: numeric('numeridef').default('100'), - bigint: bigint('bigint', { mode: 'number' }), - bigintdef: bigint('bigintdef', { mode: 'number' }).default(100), - bool: boolean('boolean'), - booldef: boolean('boolean_def').default(true), - text: text('text'), - textdef: text('textdef').default('text'), - varchar: varchar('varchar'), - varchardef: varchar('varchardef').default('text'), - serial: serial('serial'), - bigserial: bigserial('bigserial', { mode: 'number' }), - decimal: decimal('decimal', { precision: 100, scale: 2 }), - decimaldef: decimal('decimaldef', { precision: 100, scale: 2 }).default('100.0'), - doublePrecision: doublePrecision('doublePrecision'), - doublePrecisiondef: doublePrecision('doublePrecisiondef').default(100), - real: real('real'), - realdef: real('realdef').default(100), - json: json('json').$type<{ attr: string }>(), - jsondef: json('jsondef').$type<{ attr: string }>().default({ attr: 'value' }), - jsonb: jsonb('jsonb').$type<{ attr: string }>(), - jsonbdef: jsonb('jsonbdef').$type<{ attr: string }>().default({ attr: 'value' }), - time: time('time'), - time2: time('time2', { precision: 6, withTimezone: true }), - timedefnow: time('timedefnow').defaultNow(), - timestamp: timestamp('timestamp'), - timestamp2: timestamp('timestamp2', { precision: 6, withTimezone: true }), - timestamp3: timestamp('timestamp3', { withTimezone: true }), - timestamp4: timestamp('timestamp4', { precision: 4 }), - timestampdef: timestamp('timestampdef').defaultNow(), - date: date('date', { mode: 'date' }), - datedef: date('datedef').defaultNow(), -}); - -interface Context { - db: AwsDataApiPgDatabase; - row: typeof allColumns.$inferSelect; -} - -const test = anyTest as TestFn; - -test.before(async (t) => { - const ctx = t.context; - const database = process.env['AWS_DATA_API_DB']!; - const secretArn = process.env['AWS_DATA_API_SECRET_ARN']!; - const resourceArn = process.env['AWS_DATA_API_RESOURCE_ARN']!; - - const rdsClient = new RDSDataClient({ - credentials: fromIni({ profile: process.env['AWS_TEST_PROFILE'] }), - region: 'us-east-1', - }); - - ctx.db = drizzle(rdsClient, { - database, - secretArn, - resourceArn, - // logger: new DefaultLogger(), - }); - - await ctx.db.execute(sql` - CREATE TABLE IF NOT EXISTS "all_columns" ( - "smallint" smallint, - "smallint_def" smallint DEFAULT 10, - "integer" integer, - "integer_def" integer DEFAULT 10, - "numeric" numeric, - "numeric2" numeric(5), - "numeric3" numeric, - "numeric4" numeric(5, 2), - "numeridef" numeric DEFAULT '100', - "bigint" bigint, - "bigintdef" bigint DEFAULT 100, - "boolean" boolean, - "boolean_def" boolean DEFAULT true, - "text" text, - "textdef" text DEFAULT 'text', - "varchar" varchar, - "varchardef" varchar DEFAULT 'text', - "serial" serial, - "bigserial" bigserial, - "decimal" numeric(100, 2), - "decimaldef" numeric(100, 2) DEFAULT '100.0', - "doublePrecision" double precision, - "doublePrecisiondef" double precision DEFAULT 100, - "real" real, - "realdef" real DEFAULT 100, - "json" json, - "jsondef" json DEFAULT '{"attr":"value"}'::json, - "jsonb" jsonb, - "jsonbdef" jsonb DEFAULT '{"attr":"value"}'::jsonb, - "time" time, - "time2" time, - "timedefnow" time DEFAULT now(), - "timestamp" timestamp, - "timestamp2" timestamp (6) with time zone, - "timestamp3" timestamp with time zone, - "timestamp4" timestamp (4), - "timestampdef" timestamp DEFAULT now(), - "date" date, - "datedef" date DEFAULT now() - ) - `); - - const now = new Date(); - - await ctx.db.insert(allColumns).values({ - sm: 12, - int: 22, - numeric: '1.1', - numeric2: '123.45', - numeric3: '123.45', - numeric4: '123.45', - bigint: 1578, - bool: true, - text: 'inserted_text', - varchar: 'inserted_varchar', - serial: 44, - bigserial: 63473487, - decimal: '100.1', - doublePrecision: 7384.34, - real: 73849.11, - json: { attr: 'hellohello' }, - jsonb: { attr: 'hellohello' }, - time: '11:12:00', - time2: '11:12:00', - timestamp: now, - timestamp2: now, - timestamp3: now, - timestamp4: now, - date: now, - // interval: '10 days' - }); - - const resultRows = await ctx.db.select().from(allColumns); - t.is(resultRows.length, 1); - - const row = resultRows[0]!; - ctx.row = row; -}); - -test.serial('[small] serial type', async (t) => { - const { row } = t.context; - - t.assert(typeof row.sm === 'number'); - t.is(row.sm, 12); -}); - -test.serial('[small serial] type with default', async (t) => { - const { row } = t.context; - - t.assert(typeof row.sm === 'number'); - t.is(row.smdef, 10); -}); - -test.serial('[int] type', async (t) => { - const { row } = t.context; - - t.assert(typeof row.int === 'number'); - t.is(row.int, 22); -}); - -test.serial('[int] type with default', async (t) => { - const { row } = t.context; - - t.assert(typeof row.intdef === 'number'); - t.is(row.intdef, 10); -}); - -test.serial('[numeric] type', async (t) => { - const { row } = t.context; - - t.assert(typeof row.int === 'number'); - t.is(row.int, 22); -}); - -test.serial('[numeric(precision)] type', async (t) => { - const { row } = t.context; - - t.assert(typeof row.int === 'number'); - t.is(row.int, 22); -}); - -test.serial('[numeric(scale)] type', async (t) => { - const { row } = t.context; - - t.assert(typeof row.int === 'number'); - t.is(row.int, 22); -}); - -test.serial('[numeric(precision, scale)] type', async (t) => { - const { row } = t.context; - - t.assert(typeof row.int === 'number'); - t.is(row.int, 22); -}); - -test.serial('[numeric] type with default', async (t) => { - const { row } = t.context; +/* eslint-disable unicorn/no-empty-file */ +// import 'dotenv/config'; + +// import { RDSDataClient } from '@aws-sdk/client-rds-data'; +// import { fromIni } from '@aws-sdk/credential-providers'; +// import type { TestFn } from 'ava'; +// import anyTest from 'ava'; +// import * as dotenv from 'dotenv'; +// import { name, sql } from 'drizzle-orm'; +// import type { AwsDataApiPgDatabase } from 'drizzle-orm/aws-data-api/pg'; +// import { drizzle } from 'drizzle-orm/aws-data-api/pg'; +// import { +// bigint, +// bigserial, +// boolean, +// date, +// decimal, +// doublePrecision, +// integer, +// json, +// jsonb, +// numeric, +// pgEnum, +// pgTable, +// real, +// serial, +// smallint, +// text, +// time, +// timestamp, +// varchar, +// } from 'drizzle-orm/pg-core'; + +// dotenv.config(); + +// export const allColumns = pgTable('all_columns', { +// sm: smallint('smallint'), +// smdef: smallint('smallint_def').default(10), +// int: integer('integer'), +// intdef: integer('integer_def').default(10), +// numeric: numeric('numeric'), +// numeric2: numeric('numeric2', { precision: 5 }), +// numeric3: numeric('numeric3', { scale: 2 }), +// numeric4: numeric('numeric4', { precision: 5, scale: 2 }), +// numericdef: numeric('numeridef').default('100'), +// bigint: bigint('bigint', { mode: 'number' }), +// bigintdef: bigint('bigintdef', { mode: 'number' }).default(100), +// bool: boolean('boolean'), +// booldef: boolean('boolean_def').default(true), +// text: text('text'), +// textdef: text('textdef').default('text'), +// varchar: varchar('varchar'), +// varchardef: varchar('varchardef').default('text'), +// serial: serial('serial'), +// bigserial: bigserial('bigserial', { mode: 'number' }), +// decimal: decimal('decimal', { precision: 100, scale: 2 }), +// decimaldef: decimal('decimaldef', { precision: 100, scale: 2 }).default('100.0'), +// doublePrecision: doublePrecision('doublePrecision'), +// doublePrecisiondef: doublePrecision('doublePrecisiondef').default(100), +// real: real('real'), +// realdef: real('realdef').default(100), +// json: json('json').$type<{ attr: string }>(), +// jsondef: json('jsondef').$type<{ attr: string }>().default({ attr: 'value' }), +// jsonb: jsonb('jsonb').$type<{ attr: string }>(), +// jsonbdef: jsonb('jsonbdef').$type<{ attr: string }>().default({ attr: 'value' }), +// time: time('time'), +// time2: time('time2', { precision: 6, withTimezone: true }), +// timedefnow: time('timedefnow').defaultNow(), +// timestamp: timestamp('timestamp'), +// timestamp2: timestamp('timestamp2', { precision: 6, withTimezone: true }), +// timestamp3: timestamp('timestamp3', { withTimezone: true }), +// timestamp4: timestamp('timestamp4', { precision: 4 }), +// timestampdef: timestamp('timestampdef').defaultNow(), +// date: date('date', { mode: 'date' }), +// datedef: date('datedef').defaultNow(), +// }); + +// interface Context { +// db: AwsDataApiPgDatabase; +// row: typeof allColumns.$inferSelect; +// } + +// const test = anyTest as TestFn; + +// test.before(async (t) => { +// const ctx = t.context; +// const database = process.env['AWS_DATA_API_DB']!; +// const secretArn = process.env['AWS_DATA_API_SECRET_ARN']!; +// const resourceArn = process.env['AWS_DATA_API_RESOURCE_ARN']!; + +// const rdsClient = new RDSDataClient({ +// credentials: fromIni({ profile: process.env['AWS_TEST_PROFILE'] }), +// region: 'us-east-1', +// }); + +// ctx.db = drizzle(rdsClient, { +// database, +// secretArn, +// resourceArn, +// // logger: new DefaultLogger(), +// }); + +// await ctx.db.execute(sql` +// CREATE TABLE IF NOT EXISTS "all_columns" ( +// "smallint" smallint, +// "smallint_def" smallint DEFAULT 10, +// "integer" integer, +// "integer_def" integer DEFAULT 10, +// "numeric" numeric, +// "numeric2" numeric(5), +// "numeric3" numeric, +// "numeric4" numeric(5, 2), +// "numeridef" numeric DEFAULT '100', +// "bigint" bigint, +// "bigintdef" bigint DEFAULT 100, +// "boolean" boolean, +// "boolean_def" boolean DEFAULT true, +// "text" text, +// "textdef" text DEFAULT 'text', +// "varchar" varchar, +// "varchardef" varchar DEFAULT 'text', +// "serial" serial, +// "bigserial" bigserial, +// "decimal" numeric(100, 2), +// "decimaldef" numeric(100, 2) DEFAULT '100.0', +// "doublePrecision" double precision, +// "doublePrecisiondef" double precision DEFAULT 100, +// "real" real, +// "realdef" real DEFAULT 100, +// "json" json, +// "jsondef" json DEFAULT '{"attr":"value"}'::json, +// "jsonb" jsonb, +// "jsonbdef" jsonb DEFAULT '{"attr":"value"}'::jsonb, +// "time" time, +// "time2" time, +// "timedefnow" time DEFAULT now(), +// "timestamp" timestamp, +// "timestamp2" timestamp (6) with time zone, +// "timestamp3" timestamp with time zone, +// "timestamp4" timestamp (4), +// "timestampdef" timestamp DEFAULT now(), +// "date" date, +// "datedef" date DEFAULT now() +// ) +// `); + +// const now = new Date(); + +// await ctx.db.insert(allColumns).values({ +// sm: 12, +// int: 22, +// numeric: '1.1', +// numeric2: '123.45', +// numeric3: '123.45', +// numeric4: '123.45', +// bigint: 1578, +// bool: true, +// text: 'inserted_text', +// varchar: 'inserted_varchar', +// serial: 44, +// bigserial: 63473487, +// decimal: '100.1', +// doublePrecision: 7384.34, +// real: 73849.11, +// json: { attr: 'hellohello' }, +// jsonb: { attr: 'hellohello' }, +// time: '11:12:00', +// time2: '11:12:00', +// timestamp: now, +// timestamp2: now, +// timestamp3: now, +// timestamp4: now, +// date: now, +// // interval: '10 days' +// }); + +// const resultRows = await ctx.db.select().from(allColumns); +// t.is(resultRows.length, 1); + +// const row = resultRows[0]!; +// ctx.row = row; +// }); + +// test.serial('[small] serial type', async (t) => { +// const { row } = t.context; + +// t.assert(typeof row.sm === 'number'); +// t.is(row.sm, 12); +// }); + +// test.serial('[small serial] type with default', async (t) => { +// const { row } = t.context; + +// t.assert(typeof row.sm === 'number'); +// t.is(row.smdef, 10); +// }); + +// test.serial('[int] type', async (t) => { +// const { row } = t.context; + +// t.assert(typeof row.int === 'number'); +// t.is(row.int, 22); +// }); + +// test.serial('[int] type with default', async (t) => { +// const { row } = t.context; + +// t.assert(typeof row.intdef === 'number'); +// t.is(row.intdef, 10); +// }); + +// test.serial('[numeric] type', async (t) => { +// const { row } = t.context; + +// t.assert(typeof row.int === 'number'); +// t.is(row.int, 22); +// }); + +// test.serial('[numeric(precision)] type', async (t) => { +// const { row } = t.context; + +// t.assert(typeof row.int === 'number'); +// t.is(row.int, 22); +// }); + +// test.serial('[numeric(scale)] type', async (t) => { +// const { row } = t.context; + +// t.assert(typeof row.int === 'number'); +// t.is(row.int, 22); +// }); + +// test.serial('[numeric(precision, scale)] type', async (t) => { +// const { row } = t.context; + +// t.assert(typeof row.int === 'number'); +// t.is(row.int, 22); +// }); + +// test.serial('[numeric] type with default', async (t) => { +// const { row } = t.context; - t.assert(typeof row.int === 'number'); - t.is(row.int, 22); -}); +// t.assert(typeof row.int === 'number'); +// t.is(row.int, 22); +// }); -test.serial('[bigint] type', async (t) => { - const { row } = t.context; +// test.serial('[bigint] type', async (t) => { +// const { row } = t.context; - t.assert(typeof row.bigint === 'number'); - t.is(row.bigint, 1578); -}); +// t.assert(typeof row.bigint === 'number'); +// t.is(row.bigint, 1578); +// }); -test.serial('[bigint] type with default', async (t) => { - const { row } = t.context; +// test.serial('[bigint] type with default', async (t) => { +// const { row } = t.context; - t.assert(typeof row.bigintdef === 'number'); - t.is(row.bigintdef, 100); -}); +// t.assert(typeof row.bigintdef === 'number'); +// t.is(row.bigintdef, 100); +// }); -test.serial('[boolean] type', async (t) => { - const { row } = t.context; +// test.serial('[boolean] type', async (t) => { +// const { row } = t.context; - t.assert(typeof row.bool === 'boolean'); - t.is(row.bool, true); -}); +// t.assert(typeof row.bool === 'boolean'); +// t.is(row.bool, true); +// }); -test.serial('[boolean] type with default', async (t) => { - const { row } = t.context; +// test.serial('[boolean] type with default', async (t) => { +// const { row } = t.context; - t.assert(typeof row.booldef === 'boolean'); - t.is(row.booldef, true); -}); +// t.assert(typeof row.booldef === 'boolean'); +// t.is(row.booldef, true); +// }); -test.serial('[text] type', async (t) => { - const { row } = t.context; +// test.serial('[text] type', async (t) => { +// const { row } = t.context; - t.assert(typeof row.text === 'string'); - t.is(row.text, 'inserted_text'); -}); +// t.assert(typeof row.text === 'string'); +// t.is(row.text, 'inserted_text'); +// }); -test.serial('[text] type with default', async (t) => { - const { row } = t.context; +// test.serial('[text] type with default', async (t) => { +// const { row } = t.context; - t.assert(typeof row.textdef === 'string'); - t.is(row.textdef, 'text'); -}); +// t.assert(typeof row.textdef === 'string'); +// t.is(row.textdef, 'text'); +// }); -test.serial('[varchar] type', async (t) => { - const { row } = t.context; +// test.serial('[varchar] type', async (t) => { +// const { row } = t.context; - t.assert(typeof row.varchar === 'string'); - t.is(row.varchar, 'inserted_varchar'); -}); +// t.assert(typeof row.varchar === 'string'); +// t.is(row.varchar, 'inserted_varchar'); +// }); -test.serial('[varchar] type with default', async (t) => { - const { row } = t.context; +// test.serial('[varchar] type with default', async (t) => { +// const { row } = t.context; - t.assert(typeof row.varchardef === 'string'); - t.is(row.varchardef, 'text'); -}); +// t.assert(typeof row.varchardef === 'string'); +// t.is(row.varchardef, 'text'); +// }); -test.serial('[serial] type', async (t) => { - const { row } = t.context; +// test.serial('[serial] type', async (t) => { +// const { row } = t.context; - t.assert(typeof row.serial === 'number'); - t.is(row.serial, 44); -}); +// t.assert(typeof row.serial === 'number'); +// t.is(row.serial, 44); +// }); -test.serial('[bigserial] type', async (t) => { - const { row } = t.context; +// test.serial('[bigserial] type', async (t) => { +// const { row } = t.context; - t.assert(typeof row.bigserial === 'number'); - t.is(row.bigserial, 63473487); -}); +// t.assert(typeof row.bigserial === 'number'); +// t.is(row.bigserial, 63473487); +// }); -test.serial('[decimal] type', async (t) => { - const { row } = t.context; +// test.serial('[decimal] type', async (t) => { +// const { row } = t.context; - t.assert(typeof row.decimal === 'string'); - t.is(row.decimal, '100.10'); -}); +// t.assert(typeof row.decimal === 'string'); +// t.is(row.decimal, '100.10'); +// }); -test.serial('[decimal] type with default', async (t) => { - const { row } = t.context; +// test.serial('[decimal] type with default', async (t) => { +// const { row } = t.context; - t.assert(typeof row.decimaldef === 'string'); - t.is(row.decimaldef, '100.00'); -}); +// t.assert(typeof row.decimaldef === 'string'); +// t.is(row.decimaldef, '100.00'); +// }); -test.serial('[double precision] type', async (t) => { - const { row } = t.context; +// test.serial('[double precision] type', async (t) => { +// const { row } = t.context; - t.assert(typeof row.doublePrecision === 'number'); - t.is(row.doublePrecision, 7384.34); -}); +// t.assert(typeof row.doublePrecision === 'number'); +// t.is(row.doublePrecision, 7384.34); +// }); -test.serial('[double precision] type with default', async (t) => { - const { row } = t.context; +// test.serial('[double precision] type with default', async (t) => { +// const { row } = t.context; - t.assert(typeof row.doublePrecisiondef === 'number'); - t.is(row.doublePrecisiondef, 100); -}); +// t.assert(typeof row.doublePrecisiondef === 'number'); +// t.is(row.doublePrecisiondef, 100); +// }); -test.serial('[real] type', async (t) => { - const { row } = t.context; +// test.serial('[real] type', async (t) => { +// const { row } = t.context; - t.assert(typeof row.real === 'number'); - t.is(row.real, 73849.11); -}); +// t.assert(typeof row.real === 'number'); +// t.is(row.real, 73849.11); +// }); -test.serial('[real] type with default', async (t) => { - const { row } = t.context; +// test.serial('[real] type with default', async (t) => { +// const { row } = t.context; - t.assert(typeof row.realdef === 'number'); - t.is(row.realdef, 100); -}); +// t.assert(typeof row.realdef === 'number'); +// t.is(row.realdef, 100); +// }); -test.serial('[json] type', async (t) => { - const { row } = t.context; +// test.serial('[json] type', async (t) => { +// const { row } = t.context; - t.assert(typeof row.json?.attr === 'string'); - t.deepEqual(row.json, { attr: 'hellohello' }); -}); - -test.serial('[json] type with default', async (t) => { - const { row } = t.context; - - t.assert(typeof row.jsondef?.attr === 'string'); - t.deepEqual(row.jsondef, { attr: 'value' }); -}); - -test.serial('[jsonb] type', async (t) => { - const { row } = t.context; - - t.assert(typeof row.jsonb?.attr === 'string'); - t.deepEqual(row.jsonb, { attr: 'hellohello' }); -}); - -test.serial('[jsonb] type with default', async (t) => { - const { row } = t.context; - - t.assert(typeof row.jsonbdef?.attr === 'string'); - t.deepEqual(row.jsonbdef, { attr: 'value' }); -}); - -test.serial('[time] type', async (t) => { - const { row } = t.context; - - t.assert(typeof row.time === 'string'); - t.assert(typeof row.time2 === 'string'); - t.assert(typeof row.timedefnow === 'string'); -}); - -test.serial('[timestamp] type with default', async (t) => { - const { row } = t.context; - - t.assert(row.timestamp instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(row.timestamp2 instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(row.timestamp3 instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(row.timestamp4 instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(row.timestampdef instanceof Date); // eslint-disable-line no-instanceof/no-instanceof -}); - -test.serial('[date] type with default', async (t) => { - const { row } = t.context; - - t.assert(row.date instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(typeof row.datedef === 'string'); -}); - -test.serial('select from enum', async (t) => { - const { db } = t.context; - - const muscleEnum = pgEnum('muscle', [ - 'abdominals', - 'hamstrings', - 'adductors', - 'quadriceps', - 'biceps', - 'shoulders', - 'chest', - 'middle_back', - 'calves', - 'glutes', - 'lower_back', - 'lats', - 'triceps', - 'traps', - 'forearms', - 'neck', - 'abductors', - ]); - - const forceEnum = pgEnum('force', ['isometric', 'isotonic', 'isokinetic']); - - const levelEnum = pgEnum('level', ['beginner', 'intermediate', 'advanced']); - - const mechanicEnum = pgEnum('mechanic', ['compound', 'isolation']); - - const equipmentEnum = pgEnum('equipment', ['barbell', 'dumbbell', 'bodyweight', 'machine', 'cable', 'kettlebell']); - - const categoryEnum = pgEnum('category', ['upper_body', 'lower_body', 'full_body']); - - const exercises = pgTable('exercises', { - id: serial('id').primaryKey(), - name: varchar('name').notNull(), - force: forceEnum('force'), - level: levelEnum('level'), - mechanic: mechanicEnum('mechanic'), - equipment: equipmentEnum('equipment'), - instructions: text('instructions'), - category: categoryEnum('category'), - primaryMuscles: muscleEnum('primary_muscles').array(), - secondaryMuscles: muscleEnum('secondary_muscles').array(), - createdAt: timestamp('created_at').notNull().default(sql`now()`), - updatedAt: timestamp('updated_at').notNull().default(sql`now()`), - }); - - await db.execute(sql`drop table if exists ${exercises}`); - await db.execute(sql`drop type if exists ${name(muscleEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(forceEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(levelEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(mechanicEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(equipmentEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(categoryEnum.enumName)}`); - - await db.execute( - sql`create type ${ - name(muscleEnum.enumName) - } as enum ('abdominals', 'hamstrings', 'adductors', 'quadriceps', 'biceps', 'shoulders', 'chest', 'middle_back', 'calves', 'glutes', 'lower_back', 'lats', 'triceps', 'traps', 'forearms', 'neck', 'abductors')`, - ); - await db.execute(sql`create type ${name(forceEnum.enumName)} as enum ('isometric', 'isotonic', 'isokinetic')`); - await db.execute(sql`create type ${name(levelEnum.enumName)} as enum ('beginner', 'intermediate', 'advanced')`); - await db.execute(sql`create type ${name(mechanicEnum.enumName)} as enum ('compound', 'isolation')`); - await db.execute( - sql`create type ${ - name(equipmentEnum.enumName) - } as enum ('barbell', 'dumbbell', 'bodyweight', 'machine', 'cable', 'kettlebell')`, - ); - await db.execute(sql`create type ${name(categoryEnum.enumName)} as enum ('upper_body', 'lower_body', 'full_body')`); - await db.execute(sql` - create table ${exercises} ( - id serial primary key, - name varchar not null, - force force, - level level, - mechanic mechanic, - equipment equipment, - instructions text, - category category, - primary_muscles muscle[], - secondary_muscles muscle[], - created_at timestamp not null default now(), - updated_at timestamp not null default now() - ) - `); - - await db.insert(exercises).values({ - name: 'Bench Press', - force: 'isotonic', - level: 'beginner', - mechanic: 'compound', - equipment: 'barbell', - instructions: - 'Lie on your back on a flat bench. Grasp the barbell with an overhand grip, slightly wider than shoulder width. Unrack the barbell and hold it over you with your arms locked. Lower the barbell to your chest. Press the barbell back to the starting position.', - category: 'upper_body', - primaryMuscles: ['chest', 'triceps'], - secondaryMuscles: ['shoulders', 'traps'], - }); - - const result = await db.select().from(exercises); - - t.deepEqual(result, [ - { - id: 1, - name: 'Bench Press', - force: 'isotonic', - level: 'beginner', - mechanic: 'compound', - equipment: 'barbell', - instructions: - 'Lie on your back on a flat bench. Grasp the barbell with an overhand grip, slightly wider than shoulder width. Unrack the barbell and hold it over you with your arms locked. Lower the barbell to your chest. Press the barbell back to the starting position.', - category: 'upper_body', - primaryMuscles: ['chest', 'triceps'], - secondaryMuscles: ['shoulders', 'traps'], - createdAt: result[0]!.createdAt, - updatedAt: result[0]!.updatedAt, - }, - ]); - - await db.execute(sql`drop table ${exercises}`); - await db.execute(sql`drop type ${name(muscleEnum.enumName)}`); - await db.execute(sql`drop type ${name(forceEnum.enumName)}`); - await db.execute(sql`drop type ${name(levelEnum.enumName)}`); - await db.execute(sql`drop type ${name(mechanicEnum.enumName)}`); - await db.execute(sql`drop type ${name(equipmentEnum.enumName)}`); - await db.execute(sql`drop type ${name(categoryEnum.enumName)}`); -}); - -test.after.always(async (t) => { - const ctx = t.context; - - await ctx.db.execute(sql`drop table "all_columns"`); -}); +// t.assert(typeof row.json?.attr === 'string'); +// t.deepEqual(row.json, { attr: 'hellohello' }); +// }); + +// test.serial('[json] type with default', async (t) => { +// const { row } = t.context; + +// t.assert(typeof row.jsondef?.attr === 'string'); +// t.deepEqual(row.jsondef, { attr: 'value' }); +// }); + +// test.serial('[jsonb] type', async (t) => { +// const { row } = t.context; + +// t.assert(typeof row.jsonb?.attr === 'string'); +// t.deepEqual(row.jsonb, { attr: 'hellohello' }); +// }); + +// test.serial('[jsonb] type with default', async (t) => { +// const { row } = t.context; + +// t.assert(typeof row.jsonbdef?.attr === 'string'); +// t.deepEqual(row.jsonbdef, { attr: 'value' }); +// }); + +// test.serial('[time] type', async (t) => { +// const { row } = t.context; + +// t.assert(typeof row.time === 'string'); +// t.assert(typeof row.time2 === 'string'); +// t.assert(typeof row.timedefnow === 'string'); +// }); + +// test.serial('[timestamp] type with default', async (t) => { +// const { row } = t.context; + +// t.assert(row.timestamp instanceof Date); // eslint-disable-line no-instanceof/no-instanceof +// t.assert(row.timestamp2 instanceof Date); // eslint-disable-line no-instanceof/no-instanceof +// t.assert(row.timestamp3 instanceof Date); // eslint-disable-line no-instanceof/no-instanceof +// t.assert(row.timestamp4 instanceof Date); // eslint-disable-line no-instanceof/no-instanceof +// t.assert(row.timestampdef instanceof Date); // eslint-disable-line no-instanceof/no-instanceof +// }); + +// test.serial('[date] type with default', async (t) => { +// const { row } = t.context; + +// t.assert(row.date instanceof Date); // eslint-disable-line no-instanceof/no-instanceof +// t.assert(typeof row.datedef === 'string'); +// }); + +// test.serial('select from enum', async (t) => { +// const { db } = t.context; + +// const muscleEnum = pgEnum('muscle', [ +// 'abdominals', +// 'hamstrings', +// 'adductors', +// 'quadriceps', +// 'biceps', +// 'shoulders', +// 'chest', +// 'middle_back', +// 'calves', +// 'glutes', +// 'lower_back', +// 'lats', +// 'triceps', +// 'traps', +// 'forearms', +// 'neck', +// 'abductors', +// ]); + +// const forceEnum = pgEnum('force', ['isometric', 'isotonic', 'isokinetic']); + +// const levelEnum = pgEnum('level', ['beginner', 'intermediate', 'advanced']); + +// const mechanicEnum = pgEnum('mechanic', ['compound', 'isolation']); + +// const equipmentEnum = pgEnum('equipment', ['barbell', 'dumbbell', 'bodyweight', 'machine', 'cable', 'kettlebell']); + +// const categoryEnum = pgEnum('category', ['upper_body', 'lower_body', 'full_body']); + +// const exercises = pgTable('exercises', { +// id: serial('id').primaryKey(), +// name: varchar('name').notNull(), +// force: forceEnum('force'), +// level: levelEnum('level'), +// mechanic: mechanicEnum('mechanic'), +// equipment: equipmentEnum('equipment'), +// instructions: text('instructions'), +// category: categoryEnum('category'), +// primaryMuscles: muscleEnum('primary_muscles').array(), +// secondaryMuscles: muscleEnum('secondary_muscles').array(), +// createdAt: timestamp('created_at').notNull().default(sql`now()`), +// updatedAt: timestamp('updated_at').notNull().default(sql`now()`), +// }); + +// await db.execute(sql`drop table if exists ${exercises}`); +// await db.execute(sql`drop type if exists ${name(muscleEnum.enumName)}`); +// await db.execute(sql`drop type if exists ${name(forceEnum.enumName)}`); +// await db.execute(sql`drop type if exists ${name(levelEnum.enumName)}`); +// await db.execute(sql`drop type if exists ${name(mechanicEnum.enumName)}`); +// await db.execute(sql`drop type if exists ${name(equipmentEnum.enumName)}`); +// await db.execute(sql`drop type if exists ${name(categoryEnum.enumName)}`); + +// await db.execute( +// sql`create type ${ +// name(muscleEnum.enumName) +// } as enum ('abdominals', 'hamstrings', 'adductors', 'quadriceps', 'biceps', 'shoulders', 'chest', 'middle_back', 'calves', 'glutes', 'lower_back', 'lats', 'triceps', 'traps', 'forearms', 'neck', 'abductors')`, +// ); +// await db.execute(sql`create type ${name(forceEnum.enumName)} as enum ('isometric', 'isotonic', 'isokinetic')`); +// await db.execute(sql`create type ${name(levelEnum.enumName)} as enum ('beginner', 'intermediate', 'advanced')`); +// await db.execute(sql`create type ${name(mechanicEnum.enumName)} as enum ('compound', 'isolation')`); +// await db.execute( +// sql`create type ${ +// name(equipmentEnum.enumName) +// } as enum ('barbell', 'dumbbell', 'bodyweight', 'machine', 'cable', 'kettlebell')`, +// ); +// await db.execute(sql`create type ${name(categoryEnum.enumName)} as enum ('upper_body', 'lower_body', 'full_body')`); +// await db.execute(sql` +// create table ${exercises} ( +// id serial primary key, +// name varchar not null, +// force force, +// level level, +// mechanic mechanic, +// equipment equipment, +// instructions text, +// category category, +// primary_muscles muscle[], +// secondary_muscles muscle[], +// created_at timestamp not null default now(), +// updated_at timestamp not null default now() +// ) +// `); + +// await db.insert(exercises).values({ +// name: 'Bench Press', +// force: 'isotonic', +// level: 'beginner', +// mechanic: 'compound', +// equipment: 'barbell', +// instructions: +// 'Lie on your back on a flat bench. Grasp the barbell with an overhand grip, slightly wider than shoulder width. Unrack the barbell and hold it over you with your arms locked. Lower the barbell to your chest. Press the barbell back to the starting position.', +// category: 'upper_body', +// primaryMuscles: ['chest', 'triceps'], +// secondaryMuscles: ['shoulders', 'traps'], +// }); + +// const result = await db.select().from(exercises); + +// t.deepEqual(result, [ +// { +// id: 1, +// name: 'Bench Press', +// force: 'isotonic', +// level: 'beginner', +// mechanic: 'compound', +// equipment: 'barbell', +// instructions: +// 'Lie on your back on a flat bench. Grasp the barbell with an overhand grip, slightly wider than shoulder width. Unrack the barbell and hold it over you with your arms locked. Lower the barbell to your chest. Press the barbell back to the starting position.', +// category: 'upper_body', +// primaryMuscles: ['chest', 'triceps'], +// secondaryMuscles: ['shoulders', 'traps'], +// createdAt: result[0]!.createdAt, +// updatedAt: result[0]!.updatedAt, +// }, +// ]); + +// await db.execute(sql`drop table ${exercises}`); +// await db.execute(sql`drop type ${name(muscleEnum.enumName)}`); +// await db.execute(sql`drop type ${name(forceEnum.enumName)}`); +// await db.execute(sql`drop type ${name(levelEnum.enumName)}`); +// await db.execute(sql`drop type ${name(mechanicEnum.enumName)}`); +// await db.execute(sql`drop type ${name(equipmentEnum.enumName)}`); +// await db.execute(sql`drop type ${name(categoryEnum.enumName)}`); +// }); + +// test.after.always(async (t) => { +// const ctx = t.context; + +// await ctx.db.execute(sql`drop table "all_columns"`); +// }); diff --git a/integration-tests/tests/better-sqlite.test.ts b/integration-tests/tests/better-sqlite.test.ts deleted file mode 100644 index 0f86166f0..000000000 --- a/integration-tests/tests/better-sqlite.test.ts +++ /dev/null @@ -1,2110 +0,0 @@ -import 'dotenv/config'; - -import type { TestFn } from 'ava'; -import anyTest from 'ava'; -import Database from 'better-sqlite3'; -import { - and, - asc, - eq, - type Equal, - exists, - gt, - inArray, - name, - placeholder, - sql, - TransactionRollbackError, -} from 'drizzle-orm'; -import { type BetterSQLite3Database, drizzle } from 'drizzle-orm/better-sqlite3'; -import { migrate } from 'drizzle-orm/better-sqlite3/migrator'; -import { - alias, - blob, - getTableConfig, - getViewConfig, - int, - integer, - primaryKey, - sqliteTable, - sqliteTableCreator, - sqliteView, - text, - unique, - uniqueKeyName, -} from 'drizzle-orm/sqlite-core'; -import { Expect, randomString } from './utils.ts'; - -const ENABLE_LOGGING = false; - -const usersTable = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - verified: integer('verified', { mode: 'boolean' }).notNull().default(false), - json: blob('json', { mode: 'json' }).$type(), - createdAt: integer('created_at', { mode: 'timestamp' }).notNull().default(sql`strftime('%s', 'now')`), -}); - -const users2Table = sqliteTable('users2', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').references(() => citiesTable.id), -}); - -const citiesTable = sqliteTable('cities', { - id: integer('id').primaryKey(), - name: text('name').notNull(), -}); - -const coursesTable = sqliteTable('courses', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - categoryId: integer('category_id').references(() => courseCategoriesTable.id), -}); - -const courseCategoriesTable = sqliteTable('course_categories', { - id: integer('id').primaryKey(), - name: text('name').notNull(), -}); - -const orders = sqliteTable('orders', { - id: integer('id').primaryKey(), - region: text('region').notNull(), - product: text('product').notNull(), - amount: integer('amount').notNull(), - quantity: integer('quantity').notNull(), -}); - -const usersMigratorTable = sqliteTable('users12', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - email: text('email').notNull(), -}); - -const anotherUsersMigratorTable = sqliteTable('another_users', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - email: text('email').notNull(), -}); - -const pkExampleTable = sqliteTable('pk_example', { - id: integer('id').notNull(), - name: text('name').notNull(), - email: text('email').notNull(), -}, (table) => ({ - compositePk: primaryKey(table.id, table.name), -})); - -const bigIntExample = sqliteTable('big_int_example', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - bigInt: blob('big_int', { mode: 'bigint' }).notNull(), -}); - -interface Context { - db: BetterSQLite3Database; - client: Database.Database; -} - -const test = anyTest as TestFn; - -test.before((t) => { - const ctx = t.context; - const dbPath = process.env['SQLITE_DB_PATH'] ?? ':memory:'; - - ctx.client = new Database(dbPath); - ctx.db = drizzle(ctx.client, { logger: ENABLE_LOGGING }); -}); - -test.after.always((t) => { - const ctx = t.context; - ctx.client?.close(); -}); - -test.after.always((t) => { - const ctx = t.context; - ctx.client?.close(); -}); - -test.beforeEach((t) => { - const ctx = t.context; - - ctx.db.run(sql`drop table if exists ${usersTable}`); - ctx.db.run(sql`drop table if exists ${users2Table}`); - ctx.db.run(sql`drop table if exists ${citiesTable}`); - ctx.db.run(sql`drop table if exists ${coursesTable}`); - ctx.db.run(sql`drop table if exists ${courseCategoriesTable}`); - ctx.db.run(sql`drop table if exists ${orders}`); - ctx.db.run(sql`drop table if exists ${bigIntExample}`); - ctx.db.run(sql`drop table if exists ${pkExampleTable}`); - - ctx.db.run(sql` - create table ${usersTable} ( - id integer primary key, - name text not null, - verified integer not null default 0, - json blob, - created_at integer not null default (strftime('%s', 'now')) - ) - `); - ctx.db.run(sql` - create table ${users2Table} ( - id integer primary key, - name text not null, - city_id integer references ${citiesTable}(${name(citiesTable.id.name)}) - ) - `); - ctx.db.run(sql` - create table ${citiesTable} ( - id integer primary key, - name text not null - ) - `); - ctx.db.run(sql` - create table ${courseCategoriesTable} ( - id integer primary key, - name text not null - ) - `); - ctx.db.run(sql` - create table ${coursesTable} ( - id integer primary key, - name text not null, - category_id integer references ${courseCategoriesTable}(${name(courseCategoriesTable.id.name)}) - ) - `); - ctx.db.run(sql` - create table ${orders} ( - id integer primary key, - region text not null, - product text not null, - amount integer not null, - quantity integer not null - ) - `); - ctx.db.run(sql` - create table ${pkExampleTable} ( - id integer not null, - name text not null, - email text not null, - primary key (id, name) - ) - `); - ctx.db.run(sql` - create table ${bigIntExample} ( - id integer primary key, - name text not null, - big_int blob not null - ) - `); -}); - -test.serial('table configs: unique third param', (t) => { - const cities1Table = sqliteTable('cities1', { - id: int('id').primaryKey(), - name: text('name').notNull(), - state: text('state'), - }, (t) => ({ - f: unique().on(t.name, t.state), - f1: unique('custom').on(t.name, t.state), - })); - - const tableConfig = getTableConfig(cities1Table); - - t.assert(tableConfig.uniqueConstraints.length === 2); - - t.deepEqual(tableConfig.uniqueConstraints[0]?.columns.map((t) => t.name), ['name', 'state']); - t.assert( - tableConfig.uniqueConstraints[0]?.name - === uniqueKeyName(cities1Table, tableConfig.uniqueConstraints[0]?.columns?.map((column) => column.name) ?? []), - ); - - t.deepEqual(tableConfig.uniqueConstraints[1]?.columns.map((t) => t.name), ['name', 'state']); - t.assert(tableConfig.uniqueConstraints[1]?.name === 'custom'); -}); - -test.serial('table configs: unique in column', (t) => { - const cities1Table = sqliteTable('cities1', { - id: int('id').primaryKey(), - name: text('name').notNull().unique(), - state: text('state').unique('custom'), - field: text('field').unique(), - }); - - const tableConfig = getTableConfig(cities1Table); - - const columnName = tableConfig.columns.find((it) => it.name === 'name'); - t.assert(columnName?.isUnique); - t.assert(columnName?.uniqueName === uniqueKeyName(cities1Table, [columnName!.name])); - - const columnState = tableConfig.columns.find((it) => it.name === 'state'); - t.assert(columnState?.isUnique); - t.assert(columnState?.uniqueName === 'custom'); - - const columnField = tableConfig.columns.find((it) => it.name === 'field'); - t.assert(columnField?.isUnique); - t.assert(columnField?.uniqueName === uniqueKeyName(cities1Table, [columnField!.name])); -}); - -test.serial('insert bigint values', (t) => { - const { db } = t.context; - - db.insert(bigIntExample).values({ name: 'one', bigInt: BigInt('0') }).run(); - db.insert(bigIntExample).values({ name: 'two', bigInt: BigInt('127') }).run(); - db.insert(bigIntExample).values({ name: 'three', bigInt: BigInt('32767') }).run(); - db.insert(bigIntExample).values({ name: 'four', bigInt: BigInt('1234567890') }).run(); - db.insert(bigIntExample).values({ name: 'five', bigInt: BigInt('12345678900987654321') }).run(); - - const result = db.select().from(bigIntExample).all(); - t.deepEqual(result, [ - { id: 1, name: 'one', bigInt: BigInt('0') }, - { id: 2, name: 'two', bigInt: BigInt('127') }, - { id: 3, name: 'three', bigInt: BigInt('32767') }, - { id: 4, name: 'four', bigInt: BigInt('1234567890') }, - { id: 5, name: 'five', bigInt: BigInt('12345678900987654321') }, - ]); -}); - -test.serial('select all fields', (t) => { - const { db } = t.context; - - const now = Date.now(); - - db.insert(usersTable).values({ name: 'John' }).run(); - const result = db.select().from(usersTable).all(); - - t.assert(result[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 5000); - t.deepEqual(result, [{ id: 1, name: 'John', verified: false, json: null, createdAt: result[0]!.createdAt }]); -}); - -test.serial('select partial', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ name: 'John' }).run(); - const result = db.select({ name: usersTable.name }).from(usersTable).all(); - - t.deepEqual(result, [{ name: 'John' }]); -}); - -test.serial('select sql', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ name: 'John' }).run(); - const users = db.select({ - name: sql`upper(${usersTable.name})`, - }).from(usersTable).all(); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('select typed sql', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ name: 'John' }).run(); - const users = db.select({ - name: sql`upper(${usersTable.name})`, - }).from(usersTable).all(); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('select distinct', (t) => { - const { db } = t.context; - - const usersDistinctTable = sqliteTable('users_distinct', { - id: integer('id').notNull(), - name: text('name').notNull(), - }); - - db.run(sql`drop table if exists ${usersDistinctTable}`); - db.run(sql`create table ${usersDistinctTable} (id integer, name text)`); - - db.insert(usersDistinctTable).values([ - { id: 1, name: 'John' }, - { id: 1, name: 'John' }, - { id: 2, name: 'John' }, - { id: 1, name: 'Jane' }, - ]).run(); - const users = db.selectDistinct().from(usersDistinctTable).orderBy( - usersDistinctTable.id, - usersDistinctTable.name, - ).all(); - - db.run(sql`drop table ${usersDistinctTable}`); - - t.deepEqual(users, [{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); -}); - -test.serial('insert returning sql', (t) => { - const { db } = t.context; - - const users = db.insert(usersTable).values({ name: 'John' }).returning({ - name: sql`upper(${usersTable.name})`, - }).all(); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('insert returning sql + get()', (t) => { - const { db } = t.context; - - const users = db.insert(usersTable).values({ name: 'John' }).returning({ - name: sql`upper(${usersTable.name})`, - }).get(); - - t.deepEqual(users, { name: 'JOHN' }); -}); - -test.serial('delete returning sql', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ name: 'John' }).run(); - const users = db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ - name: sql`upper(${usersTable.name})`, - }).all(); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('update returning sql', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ name: 'John' }).run(); - const users = db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ - name: sql`upper(${usersTable.name})`, - }).all(); - - t.deepEqual(users, [{ name: 'JANE' }]); -}); - -test.serial('update returning sql + get()', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ name: 'John' }).run(); - const users = db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ - name: sql`upper(${usersTable.name})`, - }).get(); - - t.deepEqual(users, { name: 'JANE' }); -}); - -test.serial('insert with auto increment', (t) => { - const { db } = t.context; - - db.insert(usersTable).values([ - { name: 'John' }, - { name: 'Jane' }, - { name: 'George' }, - { name: 'Austin' }, - ]).run(); - const result = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).all(); - - t.deepEqual(result, [ - { id: 1, name: 'John' }, - { id: 2, name: 'Jane' }, - { id: 3, name: 'George' }, - { id: 4, name: 'Austin' }, - ]); -}); - -test.serial('insert with default values', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ name: 'John' }).run(); - const result = db.select().from(usersTable).all(); - - t.deepEqual(result, [{ id: 1, name: 'John', verified: false, json: null, createdAt: result[0]!.createdAt }]); -}); - -test.serial('insert with overridden default values', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ name: 'John', verified: true }).run(); - const result = db.select().from(usersTable).all(); - - t.deepEqual(result, [{ id: 1, name: 'John', verified: true, json: null, createdAt: result[0]!.createdAt }]); -}); - -test.serial('update with returning all fields', (t) => { - const { db } = t.context; - - const now = Date.now(); - - db.insert(usersTable).values({ name: 'John' }).run(); - const users = db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning().all(); - - t.assert(users[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 5000); - t.deepEqual(users, [{ id: 1, name: 'Jane', verified: false, json: null, createdAt: users[0]!.createdAt }]); -}); - -test.serial('update with returning all fields + get()', (t) => { - const { db } = t.context; - - const now = Date.now(); - - db.insert(usersTable).values({ name: 'John' }).run(); - const users = db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning().get(); - - t.assert(users.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(users.createdAt.getTime() - now) < 5000); - t.deepEqual(users, { id: 1, name: 'Jane', verified: false, json: null, createdAt: users.createdAt }); -}); - -test.serial('update with returning partial', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ name: 'John' }).run(); - const users = db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ - id: usersTable.id, - name: usersTable.name, - }).all(); - - t.deepEqual(users, [{ id: 1, name: 'Jane' }]); -}); - -test.serial('delete with returning all fields', (t) => { - const { db } = t.context; - - const now = Date.now(); - - db.insert(usersTable).values({ name: 'John' }).run(); - const users = db.delete(usersTable).where(eq(usersTable.name, 'John')).returning().all(); - - t.assert(users[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 5000); - t.deepEqual(users, [{ id: 1, name: 'John', verified: false, json: null, createdAt: users[0]!.createdAt }]); -}); - -test.serial('delete with returning all fields + get()', (t) => { - const { db } = t.context; - - const now = Date.now(); - - db.insert(usersTable).values({ name: 'John' }).run(); - const users = db.delete(usersTable).where(eq(usersTable.name, 'John')).returning().get(); - - t.assert(users!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(users!.createdAt.getTime() - now) < 5000); - t.deepEqual(users, { id: 1, name: 'John', verified: false, json: null, createdAt: users!.createdAt }); -}); - -test.serial('delete with returning partial', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ name: 'John' }).run(); - const users = db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ - id: usersTable.id, - name: usersTable.name, - }).all(); - - t.deepEqual(users, [{ id: 1, name: 'John' }]); -}); - -test.serial('delete with returning partial + get()', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ name: 'John' }).run(); - const users = db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ - id: usersTable.id, - name: usersTable.name, - }).get(); - - t.deepEqual(users, { id: 1, name: 'John' }); -}); - -test.serial('insert + select', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ name: 'John' }).run(); - const result = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).all(); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); - - db.insert(usersTable).values({ name: 'Jane' }).run(); - const result2 = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).all(); - - t.deepEqual(result2, [{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }]); -}); - -test.serial('json insert', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ name: 'John', json: ['foo', 'bar'] }).run(); - const result = db.select({ - id: usersTable.id, - name: usersTable.name, - json: usersTable.json, - }).from(usersTable).all(); - - t.deepEqual(result, [{ id: 1, name: 'John', json: ['foo', 'bar'] }]); -}); - -test.serial('insert many', (t) => { - const { db } = t.context; - - db.insert(usersTable).values([ - { name: 'John' }, - { name: 'Bruce', json: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]).run(); - const result = db.select({ - id: usersTable.id, - name: usersTable.name, - json: usersTable.json, - verified: usersTable.verified, - }).from(usersTable).all(); - - t.deepEqual(result, [ - { id: 1, name: 'John', json: null, verified: false }, - { id: 2, name: 'Bruce', json: ['foo', 'bar'], verified: false }, - { id: 3, name: 'Jane', json: null, verified: false }, - { id: 4, name: 'Austin', json: null, verified: true }, - ]); -}); - -test.serial('insert many with returning', (t) => { - const { db } = t.context; - - const result = db.insert(usersTable).values([ - { name: 'John' }, - { name: 'Bruce', json: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]) - .returning({ - id: usersTable.id, - name: usersTable.name, - json: usersTable.json, - verified: usersTable.verified, - }) - .all(); - - t.deepEqual(result, [ - { id: 1, name: 'John', json: null, verified: false }, - { id: 2, name: 'Bruce', json: ['foo', 'bar'], verified: false }, - { id: 3, name: 'Jane', json: null, verified: false }, - { id: 4, name: 'Austin', json: null, verified: true }, - ]); -}); - -test.serial('partial join with alias', (t) => { - const { db } = t.context; - const customerAlias = alias(usersTable, 'customer'); - - db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]).run(); - const result = db - .select({ - user: { - id: usersTable.id, - name: usersTable.name, - }, - customer: { - id: customerAlias.id, - name: customerAlias.name, - }, - }).from(usersTable) - .leftJoin(customerAlias, eq(customerAlias.id, 11)) - .where(eq(usersTable.id, 10)) - .all(); - - t.deepEqual(result, [{ - user: { id: 10, name: 'Ivan' }, - customer: { id: 11, name: 'Hans' }, - }]); -}); - -test.serial('full join with alias', (t) => { - const { db } = t.context; - - const sqliteTable = sqliteTableCreator((name) => `prefixed_${name}`); - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - }); - - db.run(sql`drop table if exists ${users}`); - db.run(sql`create table ${users} (id integer primary key, name text not null)`); - - const customers = alias(users, 'customer'); - - db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]).run(); - const result = db - .select().from(users) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(users.id, 10)) - .all(); - - t.deepEqual(result, [{ - users: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - db.run(sql`drop table ${users}`); -}); - -test.serial('select from alias', (t) => { - const { db } = t.context; - - const sqliteTable = sqliteTableCreator((name) => `prefixed_${name}`); - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - }); - - db.run(sql`drop table if exists ${users}`); - db.run(sql`create table ${users} (id integer primary key, name text not null)`); - - const user = alias(users, 'user'); - const customers = alias(users, 'customer'); - - db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]).run(); - const result = db - .select() - .from(user) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(user.id, 10)) - .all(); - - t.deepEqual(result, [{ - user: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - db.run(sql`drop table ${users}`); -}); - -test.serial('insert with spaces', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ name: sql`'Jo h n'` }).run(); - const result = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).all(); - - t.deepEqual(result, [{ id: 1, name: 'Jo h n' }]); -}); - -test.serial('prepared statement', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ name: 'John' }).run(); - const statement = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).prepare(); - const result = statement.all(); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('prepared statement reuse', (t) => { - const { db } = t.context; - - const stmt = db.insert(usersTable).values({ - verified: true, - name: placeholder('name'), - }).prepare(); - - for (let i = 0; i < 10; i++) { - stmt.run({ name: `John ${i}` }); - } - - const result = db.select({ - id: usersTable.id, - name: usersTable.name, - verified: usersTable.verified, - }).from(usersTable).all(); - - t.deepEqual(result, [ - { id: 1, name: 'John 0', verified: true }, - { id: 2, name: 'John 1', verified: true }, - { id: 3, name: 'John 2', verified: true }, - { id: 4, name: 'John 3', verified: true }, - { id: 5, name: 'John 4', verified: true }, - { id: 6, name: 'John 5', verified: true }, - { id: 7, name: 'John 6', verified: true }, - { id: 8, name: 'John 7', verified: true }, - { id: 9, name: 'John 8', verified: true }, - { id: 10, name: 'John 9', verified: true }, - ]); -}); - -test.serial('prepared statement with placeholder in .where', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ name: 'John' }).run(); - const stmt = db.select({ - id: usersTable.id, - name: usersTable.name, - }).from(usersTable) - .where(eq(usersTable.id, placeholder('id'))) - .prepare(); - const result = stmt.all({ id: 1 }); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('select with group by as field', (t) => { - const { db } = t.context; - - db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); - - const result = db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.name) - .all(); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with exists', async (t) => { - const { db } = t.context; - - db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); - - const user = alias(usersTable, 'user'); - const result = db.select({ name: usersTable.name }).from(usersTable).where( - exists(db.select({ one: sql`1` }).from(user).where(and(eq(usersTable.name, 'John'), eq(user.id, usersTable.id)))), - ).all(); - - t.deepEqual(result, [{ name: 'John' }]); -}); - -test.serial('select with group by as sql', (t) => { - const { db } = t.context; - - db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); - - const result = db.select({ name: usersTable.name }).from(usersTable) - .groupBy(sql`${usersTable.name}`) - .all(); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with group by as sql + column', (t) => { - const { db } = t.context; - - db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); - - const result = db.select({ name: usersTable.name }).from(usersTable) - .groupBy(sql`${usersTable.name}`, usersTable.id) - .all(); - - t.deepEqual(result, [{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); -}); - -test.serial('select with group by as column + sql', (t) => { - const { db } = t.context; - - db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); - - const result = db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`) - .all(); - - t.deepEqual(result, [{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); -}); - -test.serial('select with group by complex query', (t) => { - const { db } = t.context; - - db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); - - const result = db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`) - .orderBy(asc(usersTable.name)) - .limit(1) - .all(); - - t.deepEqual(result, [{ name: 'Jane' }]); -}); - -test.serial('build query', (t) => { - const { db } = t.context; - - const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, usersTable.name) - .toSQL(); - - t.deepEqual(query, { - sql: 'select "id", "name" from "users" group by "users"."id", "users"."name"', - params: [], - }); -}); - -test.serial('migrator', (t) => { - const { db } = t.context; - - db.run(sql`drop table if exists another_users`); - db.run(sql`drop table if exists users12`); - db.run(sql`drop table if exists __drizzle_migrations`); - - migrate(db, { migrationsFolder: './drizzle2/sqlite' }); - - db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }).run(); - const result = db.select().from(usersMigratorTable).all(); - - db.insert(anotherUsersMigratorTable).values({ name: 'John', email: 'email' }).run(); - const result2 = db.select().from(usersMigratorTable).all(); - - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); - t.deepEqual(result2, [{ id: 1, name: 'John', email: 'email' }]); - - db.run(sql`drop table another_users`); - db.run(sql`drop table users12`); - db.run(sql`drop table __drizzle_migrations`); -}); - -test.serial('migrator : migrate with custom table', async (t) => { - const { db } = t.context; - const customTable = randomString(); - db.run(sql`drop table if exists another_users`); - db.run(sql`drop table if exists users12`); - db.run(sql`drop table if exists ${sql.identifier(customTable)}`); - - migrate(db, { migrationsFolder: './drizzle2/sqlite', migrationsTable: customTable }); - - // test if the custom migrations table was created - const res = db.all(sql`select * from ${sql.identifier(customTable)};`); - t.true(res.length > 0); - - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); - - db.run(sql`drop table another_users`); - db.run(sql`drop table users12`); - db.run(sql`drop table ${sql.identifier(customTable)}`); -}); - -test.serial('insert via db.run + select via db.all', (t) => { - const { db } = t.context; - - db.run(sql`insert into ${usersTable} (${name(usersTable.name.name)}) values (${'John'})`); - - const result = db.all<{ id: number; name: string }>(sql`select id, name from "users"`); - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert via db.get', (t) => { - const { db } = t.context; - - const inserted = db.get<{ id: number; name: string }>( - sql`insert into ${usersTable} (${ - name(usersTable.name.name) - }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, - ); - t.deepEqual(inserted, { id: 1, name: 'John' }); -}); - -test.serial('insert via db.run + select via db.get', (t) => { - const { db } = t.context; - - db.run(sql`insert into ${usersTable} (${name(usersTable.name.name)}) values (${'John'})`); - - const result = db.get<{ id: number; name: string }>( - sql`select ${usersTable.id}, ${usersTable.name} from ${usersTable}`, - ); - t.deepEqual(result, { id: 1, name: 'John' }); -}); - -test.serial('insert via db.get w/ query builder', (t) => { - const { db } = t.context; - - const inserted = db.get( - db.insert(usersTable).values({ name: 'John' }).returning({ id: usersTable.id, name: usersTable.name }), - ); - t.deepEqual(inserted, { id: 1, name: 'John' }); -}); - -test.serial('left join (flat object fields)', (t) => { - const { db } = t.context; - - const { id: cityId } = db.insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]) - .returning({ id: citiesTable.id }).all()[0]!; - - db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]).run(); - - const res = db.select({ - userId: users2Table.id, - userName: users2Table.name, - cityId: citiesTable.id, - cityName: citiesTable.name, - }).from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)) - .all(); - - t.deepEqual(res, [ - { userId: 1, userName: 'John', cityId, cityName: 'Paris' }, - { userId: 2, userName: 'Jane', cityId: null, cityName: null }, - ]); -}); - -test.serial('left join (grouped fields)', (t) => { - const { db } = t.context; - - const { id: cityId } = db.insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]) - .returning({ id: citiesTable.id }).all()[0]!; - - db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]).run(); - - const res = db.select({ - id: users2Table.id, - user: { - name: users2Table.name, - nameUpper: sql`upper(${users2Table.name})`, - }, - city: { - id: citiesTable.id, - name: citiesTable.name, - nameUpper: sql`upper(${citiesTable.name})`, - }, - }).from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)) - .all(); - - t.deepEqual(res, [ - { - id: 1, - user: { name: 'John', nameUpper: 'JOHN' }, - city: { id: cityId, name: 'Paris', nameUpper: 'PARIS' }, - }, - { - id: 2, - user: { name: 'Jane', nameUpper: 'JANE' }, - city: null, - }, - ]); -}); - -test.serial('left join (all fields)', (t) => { - const { db } = t.context; - - const { id: cityId } = db.insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]) - .returning({ id: citiesTable.id }).all()[0]!; - - db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]).run(); - - const res = db.select().from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)).all(); - - t.deepEqual(res, [ - { - users2: { - id: 1, - name: 'John', - cityId, - }, - cities: { - id: cityId, - name: 'Paris', - }, - }, - { - users2: { - id: 2, - name: 'Jane', - cityId: null, - }, - cities: null, - }, - ]); -}); - -test.serial('join subquery', (t) => { - const { db } = t.context; - - db.insert(courseCategoriesTable).values([ - { name: 'Category 1' }, - { name: 'Category 2' }, - { name: 'Category 3' }, - { name: 'Category 4' }, - ]).run(); - - db.insert(coursesTable).values([ - { name: 'Development', categoryId: 2 }, - { name: 'IT & Software', categoryId: 3 }, - { name: 'Marketing', categoryId: 4 }, - { name: 'Design', categoryId: 1 }, - ]).run(); - - const sq2 = db - .select({ - categoryId: courseCategoriesTable.id, - category: courseCategoriesTable.name, - total: sql`count(${courseCategoriesTable.id})`, - }) - .from(courseCategoriesTable) - .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) - .as('sq2'); - - const res = db - .select({ - courseName: coursesTable.name, - categoryId: sq2.categoryId, - }) - .from(coursesTable) - .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) - .orderBy(coursesTable.name) - .all(); - - t.deepEqual(res, [ - { courseName: 'Design', categoryId: 1 }, - { courseName: 'Development', categoryId: 2 }, - { courseName: 'IT & Software', categoryId: 3 }, - { courseName: 'Marketing', categoryId: 4 }, - ]); -}); - -test.serial('with ... select', (t) => { - const { db } = t.context; - - db.insert(orders).values([ - { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, - { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 50, quantity: 5 }, - ]).run(); - - const regionalSales = db - .$with('regional_sales') - .as( - db - .select({ - region: orders.region, - totalSales: sql`sum(${orders.amount})`.as('total_sales'), - }) - .from(orders) - .groupBy(orders.region), - ); - - const topRegions = db - .$with('top_regions') - .as( - db - .select({ - region: regionalSales.region, - }) - .from(regionalSales) - .where( - gt( - regionalSales.totalSales, - db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), - ), - ), - ); - - const result = db - .with(regionalSales, topRegions) - .select({ - region: orders.region, - product: orders.product, - productUnits: sql`cast(sum(${orders.quantity}) as int)`, - productSales: sql`cast(sum(${orders.amount}) as int)`, - }) - .from(orders) - .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) - .groupBy(orders.region, orders.product) - .orderBy(orders.region, orders.product) - .all(); - - t.deepEqual(result, [ - { - region: 'Europe', - product: 'A', - productUnits: 3, - productSales: 30, - }, - { - region: 'Europe', - product: 'B', - productUnits: 5, - productSales: 50, - }, - { - region: 'US', - product: 'A', - productUnits: 7, - productSales: 70, - }, - { - region: 'US', - product: 'B', - productUnits: 9, - productSales: 90, - }, - ]); -}); - -test.serial('query check: insert single empty row', (t) => { - const { db } = t.context; - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state'), - }); - - const query = db - .insert(users) - .values({}) - .toSQL(); - - t.deepEqual(query, { - sql: 'insert into "users" ("id", "name", "state") values (null, ?, null)', - params: ['Dan'], - }); -}); - -test.serial('query check: insert multiple empty rows', (t) => { - const { db } = t.context; - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state'), - }); - - const query = db - .insert(users) - .values([{}, {}]) - .toSQL(); - - t.deepEqual(query, { - sql: 'insert into "users" ("id", "name", "state") values (null, ?, null), (null, ?, null)', - params: ['Dan', 'Dan'], - }); -}); - -test.serial('Insert all defaults in 1 row', (t) => { - const { db } = t.context; - - const users = sqliteTable('empty_insert_single', { - id: integer('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state'), - }); - - db.run(sql`drop table if exists ${users}`); - - db.run( - sql`create table ${users} (id integer primary key, name text default 'Dan', state text)`, - ); - - db.insert(users).values({}).run(); - - const res = db.select().from(users).all(); - - t.deepEqual(res, [{ id: 1, name: 'Dan', state: null }]); -}); - -test.serial('Insert all defaults in multiple rows', (t) => { - const { db } = t.context; - - const users = sqliteTable('empty_insert_multiple', { - id: integer('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state'), - }); - - db.run(sql`drop table if exists ${users}`); - - db.run( - sql`create table ${users} (id integer primary key, name text default 'Dan', state text)`, - ); - - db.insert(users).values([{}, {}]).run(); - - const res = db.select().from(users).all(); - - t.deepEqual(res, [{ id: 1, name: 'Dan', state: null }, { id: 2, name: 'Dan', state: null }]); -}); - -test.serial('select from subquery sql', (t) => { - const { db } = t.context; - - db.insert(users2Table).values([{ name: 'John' }, { name: 'Jane' }]).run(); - - const sq = db - .select({ name: sql`${users2Table.name} || ' modified'`.as('name') }) - .from(users2Table) - .as('sq'); - - const res = db.select({ name: sq.name }).from(sq).all(); - - t.deepEqual(res, [{ name: 'John modified' }, { name: 'Jane modified' }]); -}); - -test.serial('select a field without joining its table', (t) => { - const { db } = t.context; - - t.throws(() => db.select({ name: users2Table.name }).from(usersTable).prepare()); -}); - -test.serial('select all fields from subquery without alias', (t) => { - const { db } = t.context; - - const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); - - t.throws(() => db.select().from(sq).prepare()); -}); - -test.serial('select count()', (t) => { - const { db } = t.context; - - db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]).run(); - - const res = db.select({ count: sql`count(*)` }).from(usersTable).all(); - - t.deepEqual(res, [{ count: 2 }]); -}); - -test.serial('having', (t) => { - const { db } = t.context; - - db.insert(citiesTable).values([{ name: 'London' }, { name: 'Paris' }, { name: 'New York' }]).run(); - - db.insert(users2Table).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 1 }, - { name: 'Jack', cityId: 2 }, - ]).run(); - - const result = db - .select({ - id: citiesTable.id, - name: sql`upper(${citiesTable.name})`.as('upper_name'), - usersCount: sql`count(${users2Table.id})`.as('users_count'), - }) - .from(citiesTable) - .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) - .where(({ name }) => sql`length(${name}) >= 3`) - .groupBy(citiesTable.id) - .having(({ usersCount }) => sql`${usersCount} > 0`) - .orderBy(({ name }) => name) - .all(); - - t.deepEqual(result, [ - { - id: 1, - name: 'LONDON', - usersCount: 2, - }, - { - id: 2, - name: 'PARIS', - usersCount: 1, - }, - ]); -}); - -test.serial('view', (t) => { - const { db } = t.context; - - const newYorkers1 = sqliteView('new_yorkers1') - .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); - - const newYorkers2 = sqliteView('new_yorkers2', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); - - const newYorkers3 = sqliteView('new_yorkers1', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).existing(); - - db.run(sql`create view ${newYorkers1} as ${getViewConfig(newYorkers1).query}`); - db.run(sql`create view ${newYorkers2} as ${getViewConfig(newYorkers2).query}`); - - db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]).run(); - - db.insert(users2Table).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 1 }, - { name: 'Jack', cityId: 2 }, - ]).run(); - - { - const result = db.select().from(newYorkers1).all(); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = db.select().from(newYorkers2).all(); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = db.select().from(newYorkers3).all(); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = db.select({ name: newYorkers1.name }).from(newYorkers1).all(); - t.deepEqual(result, [ - { name: 'John' }, - { name: 'Jane' }, - ]); - } - - db.run(sql`drop view ${newYorkers1}`); - db.run(sql`drop view ${newYorkers2}`); -}); - -test.serial('insert null timestamp', (t) => { - const { db } = t.context; - - const test = sqliteTable('test', { - t: integer('t', { mode: 'timestamp' }), - }); - - db.run(sql`create table ${test} (t timestamp)`); - - db.insert(test).values({ t: null }).run(); - const res = db.select().from(test).all(); - t.deepEqual(res, [{ t: null }]); - - db.run(sql`drop table ${test}`); -}); - -test.serial('select from raw sql', (t) => { - const { db } = t.context; - - const result = db.select({ - id: sql`id`, - name: sql`name`, - }).from(sql`(select 1 as id, 'John' as name) as users`).all(); - - Expect>; - - t.deepEqual(result, [ - { id: 1, name: 'John' }, - ]); -}); - -test.serial('select from raw sql with joins', (t) => { - const { db } = t.context; - - const result = db - .select({ - id: sql`users.id`, - name: sql`users.name`, - userCity: sql`users.city`, - cityName: sql`cities.name`, - }) - .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) - .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`) - .all(); - - Expect>; - - t.deepEqual(result, [ - { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, - ]); -}); - -test.serial('join on aliased sql from select', (t) => { - const { db } = t.context; - - const result = db - .select({ - userId: sql`users.id`.as('userId'), - name: sql`users.name`, - userCity: sql`users.city`, - cityId: sql`cities.id`.as('cityId'), - cityName: sql`cities.name`, - }) - .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) - .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)) - .all(); - - Expect>; - - t.deepEqual(result, [ - { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, - ]); -}); - -test.serial('join on aliased sql from with clause', (t) => { - const { db } = t.context; - - const users = db.$with('users').as( - db.select({ - id: sql`id`.as('userId'), - name: sql`name`.as('userName'), - city: sql`city`.as('city'), - }).from( - sql`(select 1 as id, 'John' as name, 'New York' as city) as users`, - ), - ); - - const cities = db.$with('cities').as( - db.select({ - id: sql`id`.as('cityId'), - name: sql`name`.as('cityName'), - }).from( - sql`(select 1 as id, 'Paris' as name) as cities`, - ), - ); - - const result = db - .with(users, cities) - .select({ - userId: users.id, - name: users.name, - userCity: users.city, - cityId: cities.id, - cityName: cities.name, - }) - .from(users) - .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)) - .all(); - - Expect>; - - t.deepEqual(result, [ - { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, - ]); -}); - -test.serial('prefixed table', (t) => { - const { db } = t.context; - - const table = sqliteTableCreator((name) => `myprefix_${name}`); - - const users = table('test_prefixed_table_with_unique_name', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - }); - - db.run(sql`drop table if exists ${users}`); - - db.run( - sql`create table myprefix_test_prefixed_table_with_unique_name (id integer not null primary key, name text not null)`, - ); - - db.insert(users).values({ id: 1, name: 'John' }).run(); - - const result = db.select().from(users).all(); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); - - db.run(sql`drop table ${users}`); -}); - -test.serial('orderBy with aliased column', (t) => { - const { db } = t.context; - - const query = db.select({ - test: sql`something`.as('test'), - }).from(users2Table).orderBy((fields) => fields.test).toSQL(); - - t.deepEqual(query.sql, 'select something as "test" from "users2" order by "test"'); -}); - -test.serial('transaction', (t) => { - const { db } = t.context; - - const users = sqliteTable('users_transactions', { - id: integer('id').primaryKey(), - balance: integer('balance').notNull(), - }); - const products = sqliteTable('products_transactions', { - id: integer('id').primaryKey(), - price: integer('price').notNull(), - stock: integer('stock').notNull(), - }); - - db.run(sql`drop table if exists ${users}`); - db.run(sql`drop table if exists ${products}`); - - db.run(sql`create table users_transactions (id integer not null primary key, balance integer not null)`); - db.run( - sql`create table products_transactions (id integer not null primary key, price integer not null, stock integer not null)`, - ); - - const user = db.insert(users).values({ balance: 100 }).returning().get(); - const product = db.insert(products).values({ price: 10, stock: 10 }).returning().get(); - - db.transaction((tx) => { - tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)).run(); - tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)).run(); - }); - - const result = db.select().from(users).all(); - - t.deepEqual(result, [{ id: 1, balance: 90 }]); - - db.run(sql`drop table ${users}`); - db.run(sql`drop table ${products}`); -}); - -test.serial('transaction rollback', (t) => { - const { db } = t.context; - - const users = sqliteTable('users_transactions_rollback', { - id: integer('id').primaryKey(), - balance: integer('balance').notNull(), - }); - - db.run(sql`drop table if exists ${users}`); - - db.run( - sql`create table users_transactions_rollback (id integer not null primary key, balance integer not null)`, - ); - - t.throws(() => - db.transaction((tx) => { - tx.insert(users).values({ balance: 100 }).run(); - tx.rollback(); - }), { instanceOf: TransactionRollbackError }); - - const result = db.select().from(users).all(); - - t.deepEqual(result, []); - - db.run(sql`drop table ${users}`); -}); - -test.serial('nested transaction', (t) => { - const { db } = t.context; - - const users = sqliteTable('users_nested_transactions', { - id: integer('id').primaryKey(), - balance: integer('balance').notNull(), - }); - - db.run(sql`drop table if exists ${users}`); - - db.run( - sql`create table users_nested_transactions (id integer not null primary key, balance integer not null)`, - ); - - db.transaction((tx) => { - tx.insert(users).values({ balance: 100 }).run(); - - tx.transaction((tx) => { - tx.update(users).set({ balance: 200 }).run(); - }); - }); - - const result = db.select().from(users).all(); - - t.deepEqual(result, [{ id: 1, balance: 200 }]); - - db.run(sql`drop table ${users}`); -}); - -test.serial('nested transaction rollback', (t) => { - const { db } = t.context; - - const users = sqliteTable('users_nested_transactions_rollback', { - id: integer('id').primaryKey(), - balance: integer('balance').notNull(), - }); - - db.run(sql`drop table if exists ${users}`); - - db.run( - sql`create table users_nested_transactions_rollback (id integer not null primary key, balance integer not null)`, - ); - - db.transaction((tx) => { - tx.insert(users).values({ balance: 100 }).run(); - - t.throws(() => - tx.transaction((tx) => { - tx.update(users).set({ balance: 200 }).run(); - tx.rollback(); - }), { instanceOf: TransactionRollbackError }); - }); - - const result = db.select().from(users).all(); - - t.deepEqual(result, [{ id: 1, balance: 100 }]); - - db.run(sql`drop table ${users}`); -}); - -test.serial('join subquery with join', (t) => { - const { db } = t.context; - - const internalStaff = sqliteTable('internal_staff', { - userId: integer('user_id').notNull(), - }); - - const customUser = sqliteTable('custom_user', { - id: integer('id').notNull(), - }); - - const ticket = sqliteTable('ticket', { - staffId: integer('staff_id').notNull(), - }); - - db.run(sql`drop table if exists ${internalStaff}`); - db.run(sql`drop table if exists ${customUser}`); - db.run(sql`drop table if exists ${ticket}`); - - db.run(sql`create table internal_staff (user_id integer not null)`); - db.run(sql`create table custom_user (id integer not null)`); - db.run(sql`create table ticket (staff_id integer not null)`); - - db.insert(internalStaff).values({ userId: 1 }).run(); - db.insert(customUser).values({ id: 1 }).run(); - db.insert(ticket).values({ staffId: 1 }).run(); - - const subq = db - .select() - .from(internalStaff) - .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) - .as('internal_staff'); - - const mainQuery = db - .select() - .from(ticket) - .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)) - .all(); - - t.deepEqual(mainQuery, [{ - ticket: { staffId: 1 }, - internal_staff: { - internal_staff: { userId: 1 }, - custom_user: { id: 1 }, - }, - }]); - - db.run(sql`drop table ${internalStaff}`); - db.run(sql`drop table ${customUser}`); - db.run(sql`drop table ${ticket}`); -}); - -test.serial('join view as subquery', (t) => { - const { db } = t.context; - - const users = sqliteTable('users_join_view', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }); - - const newYorkers = sqliteView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); - - db.run(sql`drop table if exists ${users}`); - db.run(sql`drop view if exists ${newYorkers}`); - - db.run( - sql`create table ${users} (id integer not null primary key, name text not null, city_id integer not null)`, - ); - db.run(sql`create view ${newYorkers} as ${getViewConfig(newYorkers).query}`); - - db.insert(users).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 2 }, - { name: 'Jack', cityId: 1 }, - { name: 'Jill', cityId: 2 }, - ]).run(); - - const sq = db.select().from(newYorkers).as('new_yorkers_sq'); - - const result = db.select().from(users).leftJoin(sq, eq(users.id, sq.id)).all(); - - t.deepEqual(result, [ - { - users_join_view: { id: 1, name: 'John', cityId: 1 }, - new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, - }, - { - users_join_view: { id: 2, name: 'Jane', cityId: 2 }, - new_yorkers_sq: null, - }, - { - users_join_view: { id: 3, name: 'Jack', cityId: 1 }, - new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, - }, - { - users_join_view: { id: 4, name: 'Jill', cityId: 2 }, - new_yorkers_sq: null, - }, - ]); - - db.run(sql`drop view ${newYorkers}`); - db.run(sql`drop table ${users}`); -}); - -test.serial('insert with onConflict do nothing', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ id: 1, name: 'John' }).run(); - - db - .insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoNothing() - .run(); - - const res = db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)) - .all(); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert with onConflict do nothing using composite pk', (t) => { - const { db } = t.context; - - db - .insert(pkExampleTable) - .values({ id: 1, name: 'John', email: 'john@example.com' }) - .run(); - - db - .insert(pkExampleTable) - .values({ id: 1, name: 'John', email: 'john1@example.com' }) - .onConflictDoNothing() - .run(); - - const res = db - .select({ id: pkExampleTable.id, name: pkExampleTable.name, email: pkExampleTable.email }) - .from(pkExampleTable) - .where(eq(pkExampleTable.id, 1)) - .all(); - - t.deepEqual(res, [{ id: 1, name: 'John', email: 'john@example.com' }]); -}); - -test.serial('insert with onConflict do nothing using target', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ id: 1, name: 'John' }).run(); - - db - .insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoNothing({ target: usersTable.id }) - .run(); - - const res = db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)) - .all(); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert with onConflict do nothing using composite pk as target', (t) => { - const { db } = t.context; - - db - .insert(pkExampleTable) - .values({ id: 1, name: 'John', email: 'john@example.com' }) - .run(); - - db - .insert(pkExampleTable) - .values({ id: 1, name: 'John', email: 'john1@example.com' }) - .onConflictDoNothing({ target: [pkExampleTable.id, pkExampleTable.name] }) - .run(); - - const res = db - .select({ id: pkExampleTable.id, name: pkExampleTable.name, email: pkExampleTable.email }) - .from(pkExampleTable) - .where(eq(pkExampleTable.id, 1)) - .all(); - - t.deepEqual(res, [{ id: 1, name: 'John', email: 'john@example.com' }]); -}); - -test.serial('insert with onConflict do update', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ id: 1, name: 'John' }).run(); - - db - .insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }) - .run(); - - const res = db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)) - .all(); - - t.deepEqual(res, [{ id: 1, name: 'John1' }]); -}); - -test.serial('insert with onConflict do update using composite pk', (t) => { - const { db } = t.context; - - db.insert(pkExampleTable).values({ id: 1, name: 'John', email: 'john@example.com' }).run(); - - db - .insert(pkExampleTable) - .values({ id: 1, name: 'John', email: 'john@example.com' }) - .onConflictDoUpdate({ target: [pkExampleTable.id, pkExampleTable.name], set: { email: 'john1@example.com' } }) - .run(); - - const res = db - .select({ id: pkExampleTable.id, name: pkExampleTable.name, email: pkExampleTable.email }) - .from(pkExampleTable) - .where(eq(pkExampleTable.id, 1)) - .all(); - - t.deepEqual(res, [{ id: 1, name: 'John', email: 'john1@example.com' }]); -}); - -test.serial('insert with onConflict do update where', (t) => { - const { db } = t.context; - - db - .insert(usersTable) - .values([{ id: 1, name: 'John', verified: false }]) - .run(); - - db - .insert(usersTable) - .values({ id: 1, name: 'John1', verified: true }) - .onConflictDoUpdate({ - target: usersTable.id, - set: { name: 'John1', verified: true }, - where: eq(usersTable.verified, false), - }) - .run(); - - const res = db - .select({ id: usersTable.id, name: usersTable.name, verified: usersTable.verified }) - .from(usersTable) - .where(eq(usersTable.id, 1)) - .all(); - - t.deepEqual(res, [{ id: 1, name: 'John1', verified: true }]); -}); - -test.serial('insert undefined', (t) => { - const { db } = t.context; - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name'), - }); - - db.run(sql`drop table if exists ${users}`); - - db.run( - sql`create table ${users} (id integer primary key, name text)`, - ); - - t.notThrows(() => db.insert(users).values({ name: undefined }).run()); - - db.run(sql`drop table ${users}`); -}); - -test.serial('update undefined', (t) => { - const { db } = t.context; - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name'), - }); - - db.run(sql`drop table if exists ${users}`); - - db.run( - sql`create table ${users} (id integer primary key, name text)`, - ); - - t.throws(() => db.update(users).set({ name: undefined }).run()); - t.notThrows(() => db.update(users).set({ id: 1, name: undefined }).run()); - - db.run(sql`drop table ${users}`); -}); - -test.serial('async api - CRUD', async (t) => { - const { db } = t.context; - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name'), - }); - - db.run(sql`drop table if exists ${users}`); - - db.run( - sql`create table ${users} (id integer primary key, name text)`, - ); - - await db.insert(users).values({ id: 1, name: 'John' }); - - const res = await db.select().from(users); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); - - await db.update(users).set({ name: 'John1' }).where(eq(users.id, 1)); - - const res1 = await db.select().from(users); - - t.deepEqual(res1, [{ id: 1, name: 'John1' }]); - - await db.delete(users).where(eq(users.id, 1)); - - const res2 = await db.select().from(users); - - t.deepEqual(res2, []); - - db.run(sql`drop table ${users}`); -}); - -test.serial('async api - insert + select w/ prepare + async execute', async (t) => { - const { db } = t.context; - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name'), - }); - - db.run(sql`drop table if exists ${users}`); - - db.run( - sql`create table ${users} (id integer primary key, name text)`, - ); - - const insertStmt = db.insert(users).values({ id: 1, name: 'John' }).prepare(); - await insertStmt.execute(); - - const selectStmt = db.select().from(users).prepare(); - const res = await selectStmt.execute(); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); - - const updateStmt = db.update(users).set({ name: 'John1' }).where(eq(users.id, 1)).prepare(); - await updateStmt.execute(); - - const res1 = await selectStmt.execute(); - - t.deepEqual(res1, [{ id: 1, name: 'John1' }]); - - const deleteStmt = db.delete(users).where(eq(users.id, 1)).prepare(); - await deleteStmt.execute(); - - const res2 = await selectStmt.execute(); - - t.deepEqual(res2, []); - - db.run(sql`drop table ${users}`); -}); - -test.serial('async api - insert + select w/ prepare + sync execute', (t) => { - const { db } = t.context; - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name'), - }); - - db.run(sql`drop table if exists ${users}`); - - db.run( - sql`create table ${users} (id integer primary key, name text)`, - ); - - const insertStmt = db.insert(users).values({ id: 1, name: 'John' }).prepare(); - insertStmt.execute().sync(); - - const selectStmt = db.select().from(users).prepare(); - const res = selectStmt.execute().sync(); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); - - const updateStmt = db.update(users).set({ name: 'John1' }).where(eq(users.id, 1)).prepare(); - updateStmt.execute().sync(); - - const res1 = selectStmt.execute().sync(); - - t.deepEqual(res1, [{ id: 1, name: 'John1' }]); - - const deleteStmt = db.delete(users).where(eq(users.id, 1)).prepare(); - deleteStmt.execute().sync(); - - const res2 = selectStmt.execute().sync(); - - t.deepEqual(res2, []); - - db.run(sql`drop table ${users}`); -}); - -test.serial('select + .get() for empty result', (t) => { - const { db } = t.context; - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name'), - }); - - db.run(sql`drop table if exists ${users}`); - - db.run( - sql`create table ${users} (id integer primary key, name text)`, - ); - - const res = db.select().from(users).where(eq(users.id, 1)).get(); - - t.is(res, undefined); - - db.run(sql`drop table ${users}`); -}); - -test.serial('text w/ json mode', (t) => { - const { db } = t.context; - - const test = sqliteTable('test', { - data: text('data', { mode: 'json' }).notNull(), - dataTyped: text('data_typed', { mode: 'json' }).$type<{ a: 1 }>().notNull(), - }); - - db.run(sql`drop table if exists ${test}`); - db.run(sql`create table ${test} (data text not null, data_typed text not null)`); - - db.insert(test).values({ data: { foo: 'bar' }, dataTyped: { a: 1 } }).run(); - - const res = db.select().from(test).get(); - - t.deepEqual(res, { data: { foo: 'bar' }, dataTyped: { a: 1 } }); - - db.run(sql`drop table ${test}`); -}); diff --git a/integration-tests/tests/common.ts b/integration-tests/tests/common.ts new file mode 100644 index 000000000..55daa43ce --- /dev/null +++ b/integration-tests/tests/common.ts @@ -0,0 +1,9 @@ +import { beforeEach } from 'vitest'; + +export function skipTests(names: string[]) { + beforeEach((ctx) => { + if (ctx.task.suite.name === 'common' && names.includes(ctx.task.name)) { + ctx.skip(); + } + }); +} diff --git a/integration-tests/tests/d1.test.ts b/integration-tests/tests/d1.test.ts deleted file mode 100644 index a8877af46..000000000 --- a/integration-tests/tests/d1.test.ts +++ /dev/null @@ -1,1840 +0,0 @@ -import { D1Database, D1DatabaseAPI } from '@miniflare/d1'; -import { createSQLiteDB } from '@miniflare/shared'; -import anyTest from 'ava'; -import type { TestFn } from 'ava'; -import { asc, eq, type Equal, gt, inArray, placeholder, sql, TransactionRollbackError } from 'drizzle-orm'; -import type { DrizzleD1Database } from 'drizzle-orm/d1'; -import { drizzle } from 'drizzle-orm/d1'; -import { migrate } from 'drizzle-orm/d1/migrator'; -import { - alias, - blob, - getViewConfig, - integer, - sqliteTable, - sqliteTableCreator, - sqliteView, - text, -} from 'drizzle-orm/sqlite-core'; -import { Expect } from './utils.ts'; - -const usersTable = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - verified: integer('verified').notNull().default(0), - json: blob('json', { mode: 'json' }).$type(), - createdAt: integer('created_at', { mode: 'timestamp' }) - .notNull() - .default(sql`strftime('%s', 'now')`), -}); - -const users2Table = sqliteTable('users2', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').references(() => citiesTable.id), -}); - -const citiesTable = sqliteTable('cities', { - id: integer('id').primaryKey(), - name: text('name').notNull(), -}); - -const coursesTable = sqliteTable('courses', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - categoryId: integer('category_id').references(() => courseCategoriesTable.id), -}); - -const courseCategoriesTable = sqliteTable('course_categories', { - id: integer('id').primaryKey(), - name: text('name').notNull(), -}); - -const orders = sqliteTable('orders', { - id: integer('id').primaryKey(), - region: text('region').notNull(), - product: text('product').notNull(), - amount: integer('amount').notNull(), - quantity: integer('quantity').notNull(), -}); - -const usersMigratorTable = sqliteTable('users12', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - email: text('email').notNull(), -}); - -const anotherUsersMigratorTable = sqliteTable('another_users', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - email: text('email').notNull(), -}); - -interface Context { - d1: D1Database; - db: DrizzleD1Database; -} - -const test = anyTest as TestFn; - -test.before(async (t) => { - const ctx = t.context; - const sqliteDb = await createSQLiteDB(':memory:'); - const db = new D1Database(new D1DatabaseAPI(sqliteDb)); - ctx.d1 = db; - /** - * Casting the type to any due to the following type error - * - * Argument of type 'import("drizzle-orm/node_modules/.pnpm/@miniflare+d1@2.14.0/node_modules/@miniflare/d1/dist/src/index").D1Database' is not assignable to parameter of type 'D1Database'. - * The types returned by 'prepare(...).first(...)' are incompatible between these types. - * Type 'Promise' is not assignable to type 'Promise'. - * Type 'T | null' is not assignable to type 'T'. - * 'T' could be instantiated with an arbitrary type which could be unrelated to 'T | null' - */ - ctx.db = drizzle(db as any); -}); - -test.beforeEach(async (t) => { - const ctx = t.context; - - await ctx.db.run(sql`drop table if exists ${usersTable}`); - await ctx.db.run(sql`drop table if exists ${users2Table}`); - await ctx.db.run(sql`drop table if exists ${citiesTable}`); - await ctx.db.run(sql`drop table if exists ${coursesTable}`); - await ctx.db.run(sql`drop table if exists ${courseCategoriesTable}`); - await ctx.db.run(sql`drop table if exists ${orders}`); - - await ctx.db.run(sql` - create table ${usersTable} ( - id integer primary key, - name text not null, - verified integer not null default 0, - json blob, - created_at integer not null default (strftime('%s', 'now')) - ) - `); - await ctx.db.run(sql` - create table ${users2Table} ( - id integer primary key, - name text not null, - city_id integer references ${citiesTable}(${sql.identifier(citiesTable.id.name)}) - ) - `); - await ctx.db.run(sql` - create table ${citiesTable} ( - id integer primary key, - name text not null - ) - `); - await ctx.db.run(sql` - create table ${courseCategoriesTable} ( - id integer primary key, - name text not null - ) - `); - await ctx.db.run(sql` - create table ${coursesTable} ( - id integer primary key, - name text not null, - category_id integer references ${courseCategoriesTable}(${ - sql.identifier( - courseCategoriesTable.id.name, - ) - }) - ) - `); - await ctx.db.run(sql` - create table ${orders} ( - id integer primary key, - region text not null, - product text not null, - amount integer not null, - quantity integer not null - ) - `); -}); - -test.serial('select all fields', async (t) => { - const { db } = t.context; - - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }).run(); - const result = await db.select().from(usersTable).all(); - - t.assert(result[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 5000); - t.deepEqual(result, [ - { - id: 1, - name: 'John', - verified: 0, - json: null, - createdAt: result[0]!.createdAt, - }, - ]); -}); - -test.serial('select partial', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const result = await db.select({ name: usersTable.name }).from(usersTable).all(); - - t.deepEqual(result, [{ name: 'John' }]); -}); - -test.serial('select sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const users = await db - .select({ - name: sql`upper(${usersTable.name})`, - }) - .from(usersTable) - .all(); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('select typed sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const users = await db.select({ - name: sql`upper(${usersTable.name})`, - }).from(usersTable).all(); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('insert returning sql', async (t) => { - const { db } = t.context; - - const users = await db.insert(usersTable).values({ name: 'John' }).returning({ - name: sql`upper(${usersTable.name})`, - }).all(); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('insert returning sql + get()', async (t) => { - const { db } = t.context; - - const users = await db.insert(usersTable).values({ name: 'John' }).returning({ - name: sql`upper(${usersTable.name})`, - }).get(); - - t.deepEqual(users, { name: 'JOHN' }); -}); - -test.serial('delete returning sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ - name: sql`upper(${usersTable.name})`, - }).all(); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('update returning sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ - name: sql`upper(${usersTable.name})`, - }).all(); - - t.deepEqual(users, [{ name: 'JANE' }]); -}); - -test.serial('update returning sql + get()', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ - name: sql`upper(${usersTable.name})`, - }).get(); - - t.deepEqual(users, { name: 'JANE' }); -}); - -test.serial('insert with auto increment', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([ - { name: 'John' }, - { name: 'Jane' }, - { name: 'George' }, - { name: 'Austin' }, - ]).run(); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).all(); - - t.deepEqual(result, [ - { id: 1, name: 'John' }, - { id: 2, name: 'Jane' }, - { id: 3, name: 'George' }, - { id: 4, name: 'Austin' }, - ]); -}); - -test.serial('insert with default values', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const result = await db.select().from(usersTable).all(); - - t.deepEqual(result, [{ id: 1, name: 'John', verified: 0, json: null, createdAt: result[0]!.createdAt }]); -}); - -test.serial('insert with overridden default values', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John', verified: 1 }).run(); - const result = await db.select().from(usersTable).all(); - - t.deepEqual(result, [{ id: 1, name: 'John', verified: 1, json: null, createdAt: result[0]!.createdAt }]); -}); - -test.serial('update with returning all fields', async (t) => { - const { db } = t.context; - - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }).run(); - const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning().all(); - - t.assert(users[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 5000); - t.deepEqual(users, [{ id: 1, name: 'Jane', verified: 0, json: null, createdAt: users[0]!.createdAt }]); -}); - -test.serial('update with returning all fields + get()', async (t) => { - const { db } = t.context; - - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }).run(); - const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning().get(); - - t.assert(users.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(users.createdAt.getTime() - now) < 5000); - t.deepEqual(users, { id: 1, name: 'Jane', verified: 0, json: null, createdAt: users.createdAt }); -}); - -test.serial('update with returning partial', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ - id: usersTable.id, - name: usersTable.name, - }).all(); - - t.deepEqual(users, [{ id: 1, name: 'Jane' }]); -}); - -test.serial('delete with returning all fields', async (t) => { - const { db } = t.context; - - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }).run(); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning().all(); - - t.assert(users[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 5000); - t.deepEqual(users, [{ id: 1, name: 'John', verified: 0, json: null, createdAt: users[0]!.createdAt }]); -}); - -test.serial('delete with returning all fields + get()', async (t) => { - const { db } = t.context; - - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }).run(); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning().get(); - - t.assert(users!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(users!.createdAt.getTime() - now) < 5000); - t.deepEqual(users, { id: 1, name: 'John', verified: 0, json: null, createdAt: users!.createdAt }); -}); - -test.serial('delete with returning partial', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ - id: usersTable.id, - name: usersTable.name, - }).all(); - - t.deepEqual(users, [{ id: 1, name: 'John' }]); -}); - -test.serial('delete with returning partial + get()', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ - id: usersTable.id, - name: usersTable.name, - }).get(); - - t.deepEqual(users, { id: 1, name: 'John' }); -}); - -test.serial('insert + select', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).all(); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); - - await db.insert(usersTable).values({ name: 'Jane' }).run(); - const result2 = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).all(); - - t.deepEqual(result2, [{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }]); -}); - -test.serial('json insert', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John', json: ['foo', 'bar'] }).run(); - /** - * TODO: Fix bug! - * The select below fails with - * SyntaxError { - * message: 'Unexpected non-whitespace character after JSON at position 2', - * } - */ - await t.throwsAsync( - db.select({ - id: usersTable.id, - name: usersTable.name, - json: usersTable.json, - }).from(usersTable).all(), - ); - - // Uncomment when the above bug is fixed - // t.deepEqual(result, [{ id: 1, name: 'John', json: ['foo', 'bar'] }]); -}); - -test.serial('insert many', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([ - { name: 'John' }, - { name: 'Bruce', json: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: 1 }, - ]).run(); - - /** - * TODO: Fix bug! - * The select below fails with - * SyntaxError { - * message: 'Unexpected non-whitespace character after JSON at position 2', - * } - */ - await t.throwsAsync( - db.select({ - id: usersTable.id, - name: usersTable.name, - json: usersTable.json, - verified: usersTable.verified, - }).from(usersTable).all(), - ); - - // Uncomment when the above bug is fixed - // t.deepEqual(result, [ - // { id: 1, name: 'John', json: null, verified: 0 }, - // { id: 2, name: 'Bruce', json: ['foo', 'bar'], verified: 0 }, - // { id: 3, name: 'Jane', json: null, verified: 0 }, - // { id: 4, name: 'Austin', json: null, verified: 1 }, - // ]); -}); - -test.serial('insert many with returning', async (t) => { - const { db } = t.context; - - /** - * TODO: Fix bug! - * The select below fails with - * SyntaxError { - * message: 'Unexpected non-whitespace character after JSON at position 2', - * } - */ - await t.throwsAsync( - db.insert(usersTable).values([ - { name: 'John' }, - { name: 'Bruce', json: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: 1 }, - ]) - .returning({ - id: usersTable.id, - name: usersTable.name, - json: usersTable.json, - verified: usersTable.verified, - }) - .all(), - ); - - // Uncomment when the above bug is fixed - // t.deepEqual(result, [ - // { id: 1, name: 'John', json: null, verified: 0 }, - // { id: 2, name: 'Bruce', json: ['foo', 'bar'], verified: 0 }, - // { id: 3, name: 'Jane', json: null, verified: 0 }, - // { id: 4, name: 'Austin', json: null, verified: 1 }, - // ]); -}); - -test.serial('partial join with alias', async (t) => { - const { db } = t.context; - const customerAlias = alias(usersTable, 'customer'); - - await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]).run(); - const result = await db - .select({ - user: { - id: usersTable.id, - name: usersTable.name, - }, - customer: { - id: customerAlias.id, - name: customerAlias.name, - }, - }).from(usersTable) - .leftJoin(customerAlias, eq(customerAlias.id, 11)) - .where(eq(usersTable.id, 10)) - .all(); - - /** - * TODO: Fix Bug! The objects should be equal - * - * See #528 for more details. - * Tldr the D1 driver does not execute joins successfully - */ - t.notDeepEqual(result, [{ - user: { id: 10, name: 'Ivan' }, - customer: { id: 11, name: 'Hans' }, - }]); -}); - -test.serial('full join with alias', async (t) => { - const { db } = t.context; - - const sqliteTable = sqliteTableCreator((name) => `prefixed_${name}`); - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.run(sql`drop table if exists ${users}`); - await db.run(sql`create table ${users} (id integer primary key, name text not null)`); - - const customers = alias(users, 'customer'); - - await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]).run(); - const result = await db - .select().from(users) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(users.id, 10)) - .all(); - - /** - * TODO: Fix Bug! The objects should be equal - * - * See #528 for more details. - * Tldr the D1 driver does not execute joins successfully - */ - t.notDeepEqual(result, [{ - users: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - await db.run(sql`drop table ${users}`); -}); - -test.serial('select from alias', async (t) => { - const { db } = t.context; - - const sqliteTable = sqliteTableCreator((name) => `prefixed_${name}`); - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.run(sql`drop table if exists ${users}`); - await db.run(sql`create table ${users} (id integer primary key, name text not null)`); - - const user = alias(users, 'user'); - const customers = alias(users, 'customer'); - - await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]).run(); - const result = await db - .select() - .from(user) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(user.id, 10)) - .all(); - - /** - * TODO: Fix Bug! The objects should be equal - * - * See #528 for more details. - * Tldr the D1 driver does not execute joins successfully - */ - t.notDeepEqual(result, [{ - user: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - db.run(sql`drop table ${users}`); -}); - -test.serial('insert with spaces', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: sql`'Jo h n'` }).run(); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).all(); - - t.deepEqual(result, [{ id: 1, name: 'Jo h n' }]); -}); - -test.serial('prepared statement', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const statement = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).prepare(); - const result = await statement.all(); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('prepared statement reuse', async (t) => { - const { db } = t.context; - - const stmt = db.insert(usersTable).values({ - verified: 1, - name: placeholder('name'), - }).prepare(); - - for (let i = 0; i < 10; i++) { - await stmt.run({ name: `John ${i}` }); - } - - const result = await db.select({ - id: usersTable.id, - name: usersTable.name, - verified: usersTable.verified, - }).from(usersTable).all(); - - t.deepEqual(result, [ - { id: 1, name: 'John 0', verified: 1 }, - { id: 2, name: 'John 1', verified: 1 }, - { id: 3, name: 'John 2', verified: 1 }, - { id: 4, name: 'John 3', verified: 1 }, - { id: 5, name: 'John 4', verified: 1 }, - { id: 6, name: 'John 5', verified: 1 }, - { id: 7, name: 'John 6', verified: 1 }, - { id: 8, name: 'John 7', verified: 1 }, - { id: 9, name: 'John 8', verified: 1 }, - { id: 10, name: 'John 9', verified: 1 }, - ]); -}); - -test.serial('prepared statement with placeholder in .where', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const stmt = db.select({ - id: usersTable.id, - name: usersTable.name, - }).from(usersTable) - .where(eq(usersTable.id, placeholder('id'))) - .prepare(); - const result = await stmt.all({ id: 1 }); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('select with group by as field', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.name) - .all(); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with group by as sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(sql`${usersTable.name}`) - .all(); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with group by as sql + column', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(sql`${usersTable.name}`, usersTable.id) - .all(); - - t.deepEqual(result, [{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); -}); - -test.serial('select with group by as column + sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`) - .all(); - - t.deepEqual(result, [{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); -}); - -test.serial('select with group by complex query', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`) - .orderBy(asc(usersTable.name)) - .limit(1) - .all(); - - t.deepEqual(result, [{ name: 'Jane' }]); -}); - -test.serial('build query', async (t) => { - const { db } = t.context; - - const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, usersTable.name) - .toSQL(); - - t.deepEqual(query, { - sql: 'select "id", "name" from "users" group by "users"."id", "users"."name"', - params: [], - }); -}); - -test.serial('migrator', async (t) => { - const { db } = t.context; - - await db.run(sql`drop table if exists another_users`); - await db.run(sql`drop table if exists users12`); - await db.run(sql`drop table if exists __drizzle_migrations`); - - await migrate(db, { migrationsFolder: './drizzle2/sqlite' }); - - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }).run(); - const result = await db.select().from(usersMigratorTable).all(); - - await db.insert(anotherUsersMigratorTable).values({ name: 'John', email: 'email' }).run(); - const result2 = await db.select().from(usersMigratorTable).all(); - - await migrate(db, { migrationsFolder: './drizzle2/sqlite' }); - - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); - t.deepEqual(result2, [{ id: 1, name: 'John', email: 'email' }]); - - await db.run(sql`drop table another_users`); - await db.run(sql`drop table users12`); - await db.run(sql`drop table __drizzle_migrations`); -}); - -test.serial('insert via db.run + select via db.all', async (t) => { - const { db } = t.context; - - await db.run(sql`insert into ${usersTable} (${sql.identifier(usersTable.name.name)}) values (${'John'})`); - - const result = await db.all<{ id: number; name: string }>(sql`select id, name from "users"`); - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert via db.get', async (t) => { - const { db } = t.context; - - const inserted = await db.get<{ id: number; name: string }>( - sql`insert into ${usersTable} (${ - sql.identifier(usersTable.name.name) - }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, - ); - t.deepEqual(inserted, { id: 1, name: 'John' }); -}); - -test.serial('insert via db.run + select via db.get', async (t) => { - const { db } = t.context; - - await db.run(sql`insert into ${usersTable} (${sql.identifier(usersTable.name.name)}) values (${'John'})`); - - const result = await db.get<{ id: number; name: string }>( - sql`select ${usersTable.id}, ${usersTable.name} from ${usersTable}`, - ); - t.deepEqual(result, { id: 1, name: 'John' }); -}); - -test.serial('insert via db.get w/ query builder', async (t) => { - const { db } = t.context; - - const inserted = await db.get( - db.insert(usersTable).values({ name: 'John' }).returning({ id: usersTable.id, name: usersTable.name }), - ); - t.deepEqual(inserted, { id: 1, name: 'John' }); -}); - -test.serial('left join (flat object fields)', async (t) => { - const { db } = t.context; - - const allCities = await db.insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]) - .returning({ id: citiesTable.id }).all(); - const { id: cityId } = allCities[0]!; - - await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]).run(); - - const res = await db.select({ - userId: users2Table.id, - userName: users2Table.name, - cityId: citiesTable.id, - cityName: citiesTable.name, - }).from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)) - .all(); - - /** - * TODO: Fix Bug! The objects should be equal - * - * See #528 for more details. - * Tldr the D1 driver does not execute joins successfully - */ - t.notDeepEqual(res, [ - { userId: 1, userName: 'John', cityId, cityName: 'Paris' }, - { userId: 2, userName: 'Jane', cityId: null, cityName: null }, - ]); -}); - -test.serial('left join (grouped fields)', async (t) => { - const { db } = t.context; - - const allCities = await db.insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]) - .returning({ id: citiesTable.id }).all(); - const { id: cityId } = allCities[0]!; - - await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]).run(); - - const res = await db.select({ - id: users2Table.id, - user: { - name: users2Table.name, - nameUpper: sql`upper(${users2Table.name})`, - }, - city: { - id: citiesTable.id, - name: citiesTable.name, - nameUpper: sql`upper(${citiesTable.name})`, - }, - }).from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)) - .all(); - - /** - * TODO: Fix Bug! The objects should be equal - * - * See #528 for more details. - * Tldr the D1 driver does not execute joins successfully - */ - t.notDeepEqual(res, [ - { - id: 1, - user: { name: 'John', nameUpper: 'JOHN' }, - city: { id: cityId, name: 'Paris', nameUpper: 'PARIS' }, - }, - { - id: 2, - user: { name: 'Jane', nameUpper: 'JANE' }, - city: null, - }, - ]); -}); - -test.serial('left join (all fields)', async (t) => { - const { db } = t.context; - - const allCities = await db.insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]) - .returning({ id: citiesTable.id }).all(); - const { id: cityId } = allCities[0]!; - - await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]).run(); - - const res = await db.select().from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)).all(); - - /** - * TODO: Fix Bug! The objects should be equal - * - * See #528 for more details. - * Tldr the D1 driver does not execute joins successfully - */ - t.notDeepEqual(res, [ - { - users2: { - id: 1, - name: 'John', - cityId, - }, - cities: { - id: cityId, - name: 'Paris', - }, - }, - { - users2: { - id: 2, - name: 'Jane', - cityId: null, - }, - cities: null, - }, - ]); -}); - -test.serial('join subquery', async (t) => { - const { db } = t.context; - - await db.insert(courseCategoriesTable).values([ - { name: 'Category 1' }, - { name: 'Category 2' }, - { name: 'Category 3' }, - { name: 'Category 4' }, - ]).run(); - - await db.insert(coursesTable).values([ - { name: 'Development', categoryId: 2 }, - { name: 'IT & Software', categoryId: 3 }, - { name: 'Marketing', categoryId: 4 }, - { name: 'Design', categoryId: 1 }, - ]).run(); - - const sq2 = await db - .select({ - categoryId: courseCategoriesTable.id, - category: courseCategoriesTable.name, - total: sql`count(${courseCategoriesTable.id})`, - }) - .from(courseCategoriesTable) - .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) - .as('sq2'); - - const res = await db - .select({ - courseName: coursesTable.name, - categoryId: sq2.categoryId, - }) - .from(coursesTable) - .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) - .orderBy(coursesTable.name) - .all(); - - t.deepEqual(res, [ - { courseName: 'Design', categoryId: 1 }, - { courseName: 'Development', categoryId: 2 }, - { courseName: 'IT & Software', categoryId: 3 }, - { courseName: 'Marketing', categoryId: 4 }, - ]); -}); - -test.serial('with ... select', async (t) => { - const { db } = t.context; - - await db.insert(orders).values([ - { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, - { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 50, quantity: 5 }, - ]).run(); - - const regionalSales = db - .$with('regional_sales') - .as( - db - .select({ - region: orders.region, - totalSales: sql`sum(${orders.amount})`.as('total_sales'), - }) - .from(orders) - .groupBy(orders.region), - ); - - const topRegions = db - .$with('top_regions') - .as( - db - .select({ - region: regionalSales.region, - }) - .from(regionalSales) - .where( - gt( - regionalSales.totalSales, - db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), - ), - ), - ); - - const result = await db - .with(regionalSales, topRegions) - .select({ - region: orders.region, - product: orders.product, - productUnits: sql`cast(sum(${orders.quantity}) as int)`, - productSales: sql`cast(sum(${orders.amount}) as int)`, - }) - .from(orders) - .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) - .groupBy(orders.region, orders.product) - .orderBy(orders.region, orders.product) - .all(); - - t.deepEqual(result, [ - { - region: 'Europe', - product: 'A', - productUnits: 3, - productSales: 30, - }, - { - region: 'Europe', - product: 'B', - productUnits: 5, - productSales: 50, - }, - { - region: 'US', - product: 'A', - productUnits: 7, - productSales: 70, - }, - { - region: 'US', - product: 'B', - productUnits: 9, - productSales: 90, - }, - ]); -}); - -test.serial('select from subquery sql', async (t) => { - const { db } = t.context; - - await db.insert(users2Table).values([{ name: 'John' }, { name: 'Jane' }]).run(); - - const sq = db - .select({ name: sql`${users2Table.name} || ' modified'`.as('name') }) - .from(users2Table) - .as('sq'); - - const res = await db.select({ name: sq.name }).from(sq).all(); - - t.deepEqual(res, [{ name: 'John modified' }, { name: 'Jane modified' }]); -}); - -test.serial('select a field without joining its table', async (t) => { - const { db } = t.context; - - t.throws(() => db.select({ name: users2Table.name }).from(usersTable).prepare()); -}); - -test.serial('select all fields from subquery without alias', async (t) => { - const { db } = t.context; - - const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); - - t.throws(() => db.select().from(sq).prepare()); -}); - -test.serial('select count()', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]).run(); - - const res = await db.select({ count: sql`count(*)` }).from(usersTable).all(); - - t.deepEqual(res, [{ count: 2 }]); -}); - -test.serial('having', async (t) => { - const { db } = t.context; - - await db.insert(citiesTable).values([{ name: 'London' }, { name: 'Paris' }, { name: 'New York' }]).run(); - - await db.insert(users2Table).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 1 }, - { name: 'Jack', cityId: 2 }, - ]).run(); - - const result = await db - .select({ - id: citiesTable.id, - name: sql`upper(${citiesTable.name})`.as('upper_name'), - usersCount: sql`count(${users2Table.id})`.as('users_count'), - }) - .from(citiesTable) - .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) - .where(({ name }) => sql`length(${name}) >= 3`) - .groupBy(citiesTable.id) - .having(({ usersCount }) => sql`${usersCount} > 0`) - .orderBy(({ name }) => name) - .all(); - - t.deepEqual(result, [ - { - id: 1, - name: 'LONDON', - usersCount: 2, - }, - { - id: 2, - name: 'PARIS', - usersCount: 1, - }, - ]); -}); - -test.serial('view', async (t) => { - const { db } = t.context; - - const newYorkers1 = sqliteView('new_yorkers1') - .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); - - const newYorkers2 = sqliteView('new_yorkers2', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); - - const newYorkers3 = sqliteView('new_yorkers1', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).existing(); - - await db.run(sql`create view ${newYorkers1} as ${getViewConfig(newYorkers1).query}`); - await db.run(sql`create view ${newYorkers2} as ${getViewConfig(newYorkers2).query}`); - - await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]).run(); - - await db.insert(users2Table).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 1 }, - { name: 'Jack', cityId: 2 }, - ]).run(); - - { - const result = await db.select().from(newYorkers1).all(); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers2).all(); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers3).all(); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select({ name: newYorkers1.name }).from(newYorkers1).all(); - t.deepEqual(result, [ - { name: 'John' }, - { name: 'Jane' }, - ]); - } - - await db.run(sql`drop view ${newYorkers1}`); - await db.run(sql`drop view ${newYorkers2}`); -}); - -test.serial('insert null timestamp', async (t) => { - const { db } = t.context; - - const test = sqliteTable('test', { - t: integer('t', { mode: 'timestamp' }), - }); - - await db.run(sql`create table ${test} (t timestamp)`); - - await db.insert(test).values({ t: null }).run(); - const res = await db.select().from(test).all(); - t.deepEqual(res, [{ t: null }]); - - await db.run(sql`drop table ${test}`); -}); - -test.serial('select from raw sql', async (t) => { - const { db } = t.context; - - const result = await db.select({ - id: sql`id`, - name: sql`name`, - }).from(sql`(select 1 as id, 'John' as name) as users`).all(); - - Expect>; - - t.deepEqual(result, [ - { id: 1, name: 'John' }, - ]); -}); - -test.serial('select from raw sql with joins', async (t) => { - const { db } = t.context; - - const result = await db - .select({ - id: sql`users.id`, - name: sql`users.name`, - userCity: sql`users.city`, - cityName: sql`cities.name`, - }) - .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) - .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`) - .all(); - - Expect>; - - /** - * TODO: Fix Bug! The objects should be equal - * - * See #528 for more details. - * Tldr the D1 driver does not execute joins successfully - */ - t.notDeepEqual(result, [ - { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, - ]); -}); - -test.serial('join on aliased sql from select', async (t) => { - const { db } = t.context; - - const result = await db - .select({ - userId: sql`users.id`.as('userId'), - name: sql`users.name`, - userCity: sql`users.city`, - cityId: sql`cities.id`.as('cityId'), - cityName: sql`cities.name`, - }) - .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) - .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)) - .all(); - - Expect>; - - /** - * TODO: Fix Bug! The objects should be equal - * - * See #528 for more details. - * Tldr the D1 driver does not execute joins successfully - */ - t.notDeepEqual(result, [ - { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, - ]); -}); - -test.serial('join on aliased sql from with clause', async (t) => { - const { db } = t.context; - - const users = db.$with('users').as( - db.select({ - id: sql`id`.as('userId'), - name: sql`name`.as('userName'), - city: sql`city`.as('city'), - }).from( - sql`(select 1 as id, 'John' as name, 'New York' as city) as users`, - ), - ); - - const cities = db.$with('cities').as( - db.select({ - id: sql`id`.as('cityId'), - name: sql`name`.as('cityName'), - }).from( - sql`(select 1 as id, 'Paris' as name) as cities`, - ), - ); - - const result = await db - .with(users, cities) - .select({ - userId: users.id, - name: users.name, - userCity: users.city, - cityId: cities.id, - cityName: cities.name, - }) - .from(users) - .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)) - .all(); - - Expect>; - - t.deepEqual(result, [ - { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, - ]); -}); - -test.serial('prefixed table', async (t) => { - const { db } = t.context; - - const table = sqliteTableCreator((name) => `myprefix_${name}`); - - const users = table('test_prefixed_table_with_unique_name', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.run(sql`drop table if exists ${users}`); - - await db.run( - sql`create table myprefix_test_prefixed_table_with_unique_name (id integer not null primary key, name text not null)`, - ); - - await db.insert(users).values({ id: 1, name: 'John' }).run(); - - const result = await db.select().from(users).all(); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); - - await db.run(sql`drop table ${users}`); -}); - -test.serial('orderBy with aliased column', async (t) => { - const { db } = t.context; - - const query = db.select({ - test: sql`something`.as('test'), - }).from(users2Table).orderBy((fields) => fields.test).toSQL(); - - t.deepEqual(query.sql, 'select something as "test" from "users2" order by "test"'); -}); - -test.serial('transaction', async (t) => { - const { db } = t.context; - - const users = sqliteTable('users_transactions', { - id: integer('id').primaryKey(), - balance: integer('balance').notNull(), - }); - const products = sqliteTable('products_transactions', { - id: integer('id').primaryKey(), - price: integer('price').notNull(), - stock: integer('stock').notNull(), - }); - - await db.run(sql`drop table if exists ${users}`); - await db.run(sql`drop table if exists ${products}`); - - await db.run(sql`create table users_transactions (id integer not null primary key, balance integer not null)`); - await db.run( - sql`create table products_transactions (id integer not null primary key, price integer not null, stock integer not null)`, - ); - - const user = await db.insert(users).values({ balance: 100 }).returning().get(); - const product = await db.insert(products).values({ price: 10, stock: 10 }).returning().get(); - - await db.transaction(async (tx) => { - await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)).run(); - await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)).run(); - }); - - const result = await db.select().from(users).all(); - - t.deepEqual(result, [{ id: 1, balance: 90 }]); - - await db.run(sql`drop table ${users}`); - await db.run(sql`drop table ${products}`); -}); - -test.serial('transaction rollback', async (t) => { - const { db } = t.context; - - const users = sqliteTable('users_transactions_rollback', { - id: integer('id').primaryKey(), - balance: integer('balance').notNull(), - }); - - await db.run(sql`drop table if exists ${users}`); - - await db.run( - sql`create table users_transactions_rollback (id integer not null primary key, balance integer not null)`, - ); - - await t.throwsAsync(async () => - await db.transaction(async (tx) => { - await tx.insert(users).values({ balance: 100 }).run(); - tx.rollback(); - }), { instanceOf: TransactionRollbackError }); - - const result = await db.select().from(users).all(); - - t.deepEqual(result, []); - - await db.run(sql`drop table ${users}`); -}); - -test.serial('nested transaction', async (t) => { - const { db } = t.context; - - const users = sqliteTable('users_nested_transactions', { - id: integer('id').primaryKey(), - balance: integer('balance').notNull(), - }); - - await db.run(sql`drop table if exists ${users}`); - - await db.run( - sql`create table users_nested_transactions (id integer not null primary key, balance integer not null)`, - ); - - await db.transaction(async (tx) => { - await tx.insert(users).values({ balance: 100 }).run(); - - await tx.transaction(async (tx) => { - await tx.update(users).set({ balance: 200 }).run(); - }); - }); - - const result = await db.select().from(users).all(); - - t.deepEqual(result, [{ id: 1, balance: 200 }]); - - await db.run(sql`drop table ${users}`); -}); - -test.serial('nested transaction rollback', async (t) => { - const { db } = t.context; - - const users = sqliteTable('users_nested_transactions_rollback', { - id: integer('id').primaryKey(), - balance: integer('balance').notNull(), - }); - - await db.run(sql`drop table if exists ${users}`); - - await db.run( - sql`create table users_nested_transactions_rollback (id integer not null primary key, balance integer not null)`, - ); - - await db.transaction(async (tx) => { - await tx.insert(users).values({ balance: 100 }).run(); - - await t.throwsAsync(async () => - await tx.transaction(async (tx) => { - await tx.update(users).set({ balance: 200 }).run(); - tx.rollback(); - }), { instanceOf: TransactionRollbackError }); - }); - - const result = await db.select().from(users).all(); - - t.deepEqual(result, [{ id: 1, balance: 100 }]); - - await db.run(sql`drop table ${users}`); -}); - -test.serial('join subquery with join', async (t) => { - const { db } = t.context; - - const internalStaff = sqliteTable('internal_staff', { - userId: integer('user_id').notNull(), - }); - - const customUser = sqliteTable('custom_user', { - id: integer('id').notNull(), - }); - - const ticket = sqliteTable('ticket', { - staffId: integer('staff_id').notNull(), - }); - - await db.run(sql`drop table if exists ${internalStaff}`); - await db.run(sql`drop table if exists ${customUser}`); - await db.run(sql`drop table if exists ${ticket}`); - - await db.run(sql`create table internal_staff (user_id integer not null)`); - await db.run(sql`create table custom_user (id integer not null)`); - await db.run(sql`create table ticket (staff_id integer not null)`); - - await db.insert(internalStaff).values({ userId: 1 }).run(); - await db.insert(customUser).values({ id: 1 }).run(); - await db.insert(ticket).values({ staffId: 1 }).run(); - - const subq = db - .select() - .from(internalStaff) - .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) - .as('internal_staff'); - - const mainQuery = await db - .select() - .from(ticket) - .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)) - .all(); - - t.deepEqual(mainQuery, [{ - ticket: { staffId: 1 }, - internal_staff: { - internal_staff: { userId: 1 }, - custom_user: { id: 1 }, - }, - }]); - - await db.run(sql`drop table ${internalStaff}`); - await db.run(sql`drop table ${customUser}`); - await db.run(sql`drop table ${ticket}`); -}); - -test.serial('join view as subquery', async (t) => { - const { db } = t.context; - - const users = sqliteTable('users_join_view', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }); - - const newYorkers = sqliteView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); - - await db.run(sql`drop table if exists ${users}`); - await db.run(sql`drop view if exists ${newYorkers}`); - - await db.run( - sql`create table ${users} (id integer not null primary key, name text not null, city_id integer not null)`, - ); - await db.run(sql`create view ${newYorkers} as ${getViewConfig(newYorkers).query}`); - - await db.insert(users).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 2 }, - { name: 'Jack', cityId: 1 }, - { name: 'Jill', cityId: 2 }, - ]).run(); - - const sq = db.select().from(newYorkers).as('new_yorkers_sq'); - - const result = await db.select().from(users).leftJoin(sq, eq(users.id, sq.id)).all(); - - /** - * TODO: Fix Bug! The objects should be equal - * - * See #528 for more details. - * Tldr the D1 driver does not execute joins successfully - */ - t.notDeepEqual(result, [ - { - users_join_view: { id: 1, name: 'John', cityId: 1 }, - new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, - }, - { - users_join_view: { id: 2, name: 'Jane', cityId: 2 }, - new_yorkers_sq: null, - }, - { - users_join_view: { id: 3, name: 'Jack', cityId: 1 }, - new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, - }, - { - users_join_view: { id: 4, name: 'Jill', cityId: 2 }, - new_yorkers_sq: null, - }, - ]); - - await db.run(sql`drop view ${newYorkers}`); - await db.run(sql`drop table ${users}`); -}); - -test.serial('insert with onConflict do nothing', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ id: 1, name: 'John' }).run(); - - await db - .insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoNothing() - .run(); - - const res = await db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)) - .all(); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert with onConflict do nothing using target', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ id: 1, name: 'John' }).run(); - - await db - .insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoNothing({ target: usersTable.id }) - .run(); - - const res = await db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)) - .all(); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert with onConflict do update', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ id: 1, name: 'John' }).run(); - - await db - .insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }) - .run(); - - const res = await db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)) - .all(); - - t.deepEqual(res, [{ id: 1, name: 'John1' }]); -}); - -test.serial('insert with onConflict do update where', async (t) => { - const { db } = t.context; - - await db - .insert(usersTable) - .values([{ id: 1, name: 'John', verified: 0 }]) - .run(); - - await db - .insert(usersTable) - .values({ id: 1, name: 'John1', verified: 0 }) - .onConflictDoUpdate({ - target: usersTable.id, - set: { name: 'John1', verified: 1 }, - where: eq(usersTable.verified, 0), - }) - .run(); - - const res = await db - .select({ id: usersTable.id, name: usersTable.name, verified: usersTable.verified }) - .from(usersTable) - .where(eq(usersTable.id, 1)) - .all(); - - t.deepEqual(res, [{ id: 1, name: 'John1', verified: 1 }]); -}); - -test.serial('insert undefined', async (t) => { - const { db } = t.context; - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name'), - }); - - await db.run(sql`drop table if exists ${users}`); - - await db.run( - sql`create table ${users} (id integer primary key, name text)`, - ); - - await t.notThrowsAsync(async () => await db.insert(users).values({ name: undefined }).run()); - - await db.run(sql`drop table ${users}`); -}); - -test.serial('update undefined', async (t) => { - const { db } = t.context; - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name'), - }); - - await db.run(sql`drop table if exists ${users}`); - - await db.run( - sql`create table ${users} (id integer primary key, name text)`, - ); - - await t.throwsAsync(async () => db.update(users).set({ name: undefined }).run()); - await t.notThrowsAsync(async () => await db.update(users).set({ id: 1, name: undefined }).run()); - - await db.run(sql`drop table ${users}`); -}); - -test.serial('async api - CRUD', async (t) => { - const { db } = t.context; - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name'), - }); - - db.run(sql`drop table if exists ${users}`); - - db.run( - sql`create table ${users} (id integer primary key, name text)`, - ); - - await db.insert(users).values({ id: 1, name: 'John' }); - - const res = await db.select().from(users); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); - - await db.update(users).set({ name: 'John1' }).where(eq(users.id, 1)); - - const res1 = await db.select().from(users); - - t.deepEqual(res1, [{ id: 1, name: 'John1' }]); - - await db.delete(users).where(eq(users.id, 1)); - - const res2 = await db.select().from(users); - - t.deepEqual(res2, []); -}); - -test.serial('async api - insert + select w/ prepare + async execute', async (t) => { - const { db } = t.context; - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name'), - }); - - db.run(sql`drop table if exists ${users}`); - - db.run( - sql`create table ${users} (id integer primary key, name text)`, - ); - - const insertStmt = db.insert(users).values({ id: 1, name: 'John' }).prepare(); - await insertStmt.execute(); - - const selectStmt = db.select().from(users).prepare(); - const res = await selectStmt.execute(); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); - - const updateStmt = db.update(users).set({ name: 'John1' }).where(eq(users.id, 1)).prepare(); - await updateStmt.execute(); - - const res1 = await selectStmt.execute(); - - t.deepEqual(res1, [{ id: 1, name: 'John1' }]); - - const deleteStmt = db.delete(users).where(eq(users.id, 1)).prepare(); - await deleteStmt.execute(); - - const res2 = await selectStmt.execute(); - - t.deepEqual(res2, []); -}); - -test.serial('async api - insert + select w/ prepare + sync execute', async (t) => { - const { db } = t.context; - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name'), - }); - - db.run(sql`drop table if exists ${users}`); - - db.run( - sql`create table ${users} (id integer primary key, name text)`, - ); - - const insertStmt = db.insert(users).values({ id: 1, name: 'John' }).prepare(); - await insertStmt.execute(); - - const selectStmt = db.select().from(users).prepare(); - const res = await selectStmt.execute(); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); - - const updateStmt = db.update(users).set({ name: 'John1' }).where(eq(users.id, 1)).prepare(); - await updateStmt.execute(); - - const res1 = await selectStmt.execute(); - - t.deepEqual(res1, [{ id: 1, name: 'John1' }]); - - const deleteStmt = db.delete(users).where(eq(users.id, 1)).prepare(); - await deleteStmt.execute(); - - const res2 = await selectStmt.execute(); - - t.deepEqual(res2, []); -}); diff --git a/integration-tests/tests/imports.test.cjs b/integration-tests/tests/imports.test.cjs deleted file mode 100644 index a7b11ff80..000000000 --- a/integration-tests/tests/imports.test.cjs +++ /dev/null @@ -1,55 +0,0 @@ -require('drizzle-orm'); -require('drizzle-orm/aws-data-api/pg'); -require('drizzle-orm/aws-data-api/pg/migrator'); -require('drizzle-orm/better-sqlite3'); -require('drizzle-orm/better-sqlite3/migrator'); -require('drizzle-orm/bun-sqlite'); -require('drizzle-orm/bun-sqlite/migrator'); -require('drizzle-orm/d1'); -require('drizzle-orm/d1/migrator'); -require('drizzle-orm/knex'); -require('drizzle-orm/kysely'); -require('drizzle-orm/libsql'); -require('drizzle-orm/libsql/migrator'); -require('drizzle-orm/mysql-core'); -require('drizzle-orm/mysql2'); -require('drizzle-orm/mysql2/migrator'); -require('drizzle-orm/neon-serverless'); -require('drizzle-orm/neon-serverless/migrator'); -require('drizzle-orm/node-postgres'); -require('drizzle-orm/node-postgres/migrator'); -const { pgTable, serial } = require('drizzle-orm/pg-core'); -require('drizzle-orm/planetscale-serverless'); -require('drizzle-orm/planetscale-serverless/migrator'); -require('drizzle-orm/postgres-js'); -require('drizzle-orm/postgres-js/migrator'); -require('drizzle-orm/sql-js'); -require('drizzle-orm/sql-js/migrator'); -require('drizzle-orm/sqlite-core'); -require('drizzle-orm/sqlite-proxy'); -require('drizzle-orm/sqlite-proxy/migrator'); -require('drizzle-orm/pg-proxy'); -require('drizzle-orm/pg-proxy/migrator'); -require('drizzle-orm/mysql-proxy'); -require('drizzle-orm/mysql-proxy/migrator'); -require('drizzle-orm/migrator'); -const { createInsertSchema: createZodInsertSchema } = require('drizzle-zod'); -const { - createInsertSchema: createTypeboxInsertSchema, -} = require('drizzle-typebox'); -const { - createInsertSchema: createValibotInsertSchema, -} = require('drizzle-valibot'); -const { compatibilityVersion, npmVersion } = require('drizzle-orm/version'); -const { strict: assert } = require('node:assert'); - -assert.equal(typeof compatibilityVersion, 'number'); -assert.equal(typeof npmVersion, 'string'); - -const test = pgTable('test', { - id: serial('id').primaryKey(), -}); - -const zodInsertSchema = createZodInsertSchema(test); -const typeboxInsertSchema = createTypeboxInsertSchema(test); -const valibotInsertSchema = createValibotInsertSchema(test); diff --git a/integration-tests/tests/imports.test.mjs b/integration-tests/tests/imports.test.mjs deleted file mode 100644 index 07e06b331..000000000 --- a/integration-tests/tests/imports.test.mjs +++ /dev/null @@ -1,47 +0,0 @@ -import 'drizzle-orm'; -import 'drizzle-orm/aws-data-api/pg'; -import 'drizzle-orm/aws-data-api/pg/migrator'; -import 'drizzle-orm/better-sqlite3'; -import 'drizzle-orm/better-sqlite3/migrator'; -import 'drizzle-orm/bun-sqlite'; -import 'drizzle-orm/bun-sqlite/migrator'; -import 'drizzle-orm/d1'; -import 'drizzle-orm/d1/migrator'; -import 'drizzle-orm/knex'; -import 'drizzle-orm/kysely'; -import 'drizzle-orm/libsql'; -import 'drizzle-orm/libsql/migrator'; -import 'drizzle-orm/mysql-core'; -import 'drizzle-orm/mysql2'; -import 'drizzle-orm/mysql2/migrator'; -import 'drizzle-orm/neon-serverless'; -import 'drizzle-orm/neon-serverless/migrator'; -import 'drizzle-orm/node-postgres'; -import 'drizzle-orm/node-postgres/migrator'; -import { pgTable, serial } from 'drizzle-orm/pg-core'; -import 'drizzle-orm/planetscale-serverless'; -import 'drizzle-orm/planetscale-serverless/migrator'; -import 'drizzle-orm/postgres-js'; -import 'drizzle-orm/postgres-js/migrator'; -import 'drizzle-orm/sql-js'; -import 'drizzle-orm/sql-js/migrator'; -import 'drizzle-orm/sqlite-core'; -import 'drizzle-orm/sqlite-proxy'; -import 'drizzle-orm/sqlite-proxy/migrator'; -import 'drizzle-orm/pg-proxy'; -import 'drizzle-orm/pg-proxy/migrator'; -import 'drizzle-orm/mysql-proxy'; -import 'drizzle-orm/mysql-proxy/migrator'; -import 'drizzle-orm/migrator'; -import { compatibilityVersion, npmVersion } from 'drizzle-orm/version'; -import { createInsertSchema } from 'drizzle-zod'; -import { strict as assert } from 'node:assert'; - -assert.equal(typeof compatibilityVersion, 'number'); -assert.equal(typeof npmVersion, 'string'); - -const test = pgTable('test', { - id: serial('id').primaryKey(), -}); - -const insertSchema = createInsertSchema(test); diff --git a/integration-tests/tests/libsql.test.ts b/integration-tests/tests/libsql.test.ts deleted file mode 100644 index d57904626..000000000 --- a/integration-tests/tests/libsql.test.ts +++ /dev/null @@ -1,2800 +0,0 @@ -import 'dotenv/config'; - -import { type Client, createClient } from '@libsql/client'; -import type { TestFn } from 'ava'; -import anyTest from 'ava'; -import { - and, - asc, - avg, - avgDistinct, - count, - countDistinct, - eq, - exists, - getTableColumns, - gt, - gte, - inArray, - type InferModel, - lt, - max, - min, - Name, - name, - placeholder, - sql, - sum, - sumDistinct, - TransactionRollbackError, -} from 'drizzle-orm'; -import { drizzle, type LibSQLDatabase } from 'drizzle-orm/libsql'; -import { migrate } from 'drizzle-orm/libsql/migrator'; -import { - alias, - blob, - except, - foreignKey, - getTableConfig, - getViewConfig, - int, - integer, - intersect, - numeric, - primaryKey, - sqliteTable, - sqliteTableCreator, - sqliteView, - text, - union, - unionAll, -} from 'drizzle-orm/sqlite-core'; -import { type Equal, Expect, randomString } from './utils.ts'; - -const ENABLE_LOGGING = false; - -interface Context { - client: Client; - db: LibSQLDatabase; -} - -const test = anyTest as TestFn; - -const usersTable = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - verified: integer('verified', { mode: 'boolean' }).notNull().default(false), - json: blob('json', { mode: 'json' }).$type(), - createdAt: integer('created_at', { mode: 'timestamp' }).notNull().default(sql`strftime('%s', 'now')`), -}); - -const usersOnUpdate = sqliteTable('users_on_update', { - id: integer('id').primaryKey({ autoIncrement: true }), - name: text('name').notNull(), - updateCounter: integer('update_counter').default(sql`1`).$onUpdateFn(() => sql`update_counter + 1`), - updatedAt: integer('updated_at', { mode: 'timestamp_ms' }).$onUpdate(() => new Date()), - alwaysNull: text('always_null').$type().$onUpdate(() => null), - // uppercaseName: text('uppercase_name').$onUpdateFn(() => - // sql`upper(s.name)` - // ), This doesn't seem to be supported in sqlite -}); - -const users2Table = sqliteTable('users2', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').references(() => citiesTable.id), -}); - -const citiesTable = sqliteTable('cities', { - id: integer('id').primaryKey(), - name: text('name').notNull(), -}); - -const coursesTable = sqliteTable('courses', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - categoryId: integer('category_id').references(() => courseCategoriesTable.id), -}); - -const courseCategoriesTable = sqliteTable('course_categories', { - id: integer('id').primaryKey(), - name: text('name').notNull(), -}); - -const orders = sqliteTable('orders', { - id: integer('id').primaryKey(), - region: text('region').notNull(), - product: text('product').notNull().$default(() => 'random_string'), - amount: integer('amount').notNull(), - quantity: integer('quantity').notNull(), -}); - -const usersMigratorTable = sqliteTable('users12', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - email: text('email').notNull(), -}); - -const anotherUsersMigratorTable = sqliteTable('another_users', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - email: text('email').notNull(), -}); - -const pkExampleTable = sqliteTable('pk_example', { - id: integer('id').notNull(), - name: text('name').notNull(), - email: text('email').notNull(), -}, (table) => ({ - compositePk: primaryKey(table.id, table.name), -})); - -const bigIntExample = sqliteTable('big_int_example', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - bigInt: blob('big_int', { mode: 'bigint' }).notNull(), -}); - -// To test aggregate functions -const aggregateTable = sqliteTable('aggregate_table', { - id: integer('id').primaryKey({ autoIncrement: true }).notNull(), - name: text('name').notNull(), - a: integer('a'), - b: integer('b'), - c: integer('c'), - nullOnly: integer('null_only'), -}); - -test.before(async (t) => { - const ctx = t.context; - const url = process.env['LIBSQL_URL']; - const authToken = process.env['LIBSQL_AUTH_TOKEN']; - if (!url) { - throw new Error('LIBSQL_URL is not set'); - } - const sleep = 250; - let timeLeft = 5000; - let connected = false; - let lastError: unknown | undefined; - do { - try { - ctx.client = createClient({ url, authToken }); - connected = true; - break; - } catch (e) { - lastError = e; - await new Promise((resolve) => setTimeout(resolve, sleep)); - timeLeft -= sleep; - } - } while (timeLeft > 0); - if (!connected) { - console.error('Cannot connect to libsql'); - throw lastError; - } - ctx.db = drizzle(ctx.client, { logger: ENABLE_LOGGING }); -}); - -test.after.always(async (t) => { - t.context.client.close(); -}); - -test.beforeEach(async (t) => { - const ctx = t.context; - - await ctx.db.run(sql`drop table if exists ${usersTable}`); - await ctx.db.run(sql`drop table if exists ${users2Table}`); - await ctx.db.run(sql`drop table if exists ${citiesTable}`); - await ctx.db.run(sql`drop table if exists ${coursesTable}`); - await ctx.db.run(sql`drop table if exists ${courseCategoriesTable}`); - await ctx.db.run(sql`drop table if exists ${orders}`); - await ctx.db.run(sql`drop table if exists ${bigIntExample}`); - await ctx.db.run(sql`drop table if exists ${pkExampleTable}`); - - await ctx.db.run(sql` - create table ${usersTable} ( - id integer primary key, - name text not null, - verified integer not null default 0, - json blob, - created_at integer not null default (strftime('%s', 'now')) - ) - `); - - await ctx.db.run(sql` - create table ${citiesTable} ( - id integer primary key, - name text not null - ) - `); - await ctx.db.run(sql` - create table ${courseCategoriesTable} ( - id integer primary key, - name text not null - ) - `); - - await ctx.db.run(sql` - create table ${users2Table} ( - id integer primary key, - name text not null, - city_id integer references ${citiesTable}(${name(citiesTable.id.name)}) - ) - `); - await ctx.db.run(sql` - create table ${coursesTable} ( - id integer primary key, - name text not null, - category_id integer references ${courseCategoriesTable}(${name(courseCategoriesTable.id.name)}) - ) - `); - await ctx.db.run(sql` - create table ${orders} ( - id integer primary key, - region text not null, - product text not null, - amount integer not null, - quantity integer not null - ) - `); - await ctx.db.run(sql` - create table ${pkExampleTable} ( - id integer not null, - name text not null, - email text not null, - primary key (id, name) - ) - `); - await ctx.db.run(sql` - create table ${bigIntExample} ( - id integer primary key, - name text not null, - big_int blob not null - ) - `); -}); - -async function setupSetOperationTest(db: LibSQLDatabase>) { - await db.run(sql`drop table if exists users2`); - await db.run(sql`drop table if exists cities`); - await db.run(sql` - create table \`cities\` ( - id integer primary key, - name text not null - ) - `); - - await db.run(sql` - create table \`users2\` ( - id integer primary key, - name text not null, - city_id integer references ${citiesTable}(${name(citiesTable.id.name)}) - ) - `); - - await db.insert(citiesTable).values([ - { id: 1, name: 'New York' }, - { id: 2, name: 'London' }, - { id: 3, name: 'Tampa' }, - ]); - - await db.insert(users2Table).values([ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 2 }, - { id: 3, name: 'Jack', cityId: 3 }, - { id: 4, name: 'Peter', cityId: 3 }, - { id: 5, name: 'Ben', cityId: 2 }, - { id: 6, name: 'Jill', cityId: 1 }, - { id: 7, name: 'Mary', cityId: 2 }, - { id: 8, name: 'Sally', cityId: 1 }, - ]); -} - -async function setupAggregateFunctionsTest(db: LibSQLDatabase>) { - await db.run(sql`drop table if exists "aggregate_table"`); - await db.run( - sql` - create table "aggregate_table" ( - "id" integer primary key autoincrement not null, - "name" text not null, - "a" integer, - "b" integer, - "c" integer, - "null_only" integer - ); - `, - ); - await db.insert(aggregateTable).values([ - { name: 'value 1', a: 5, b: 10, c: 20 }, - { name: 'value 1', a: 5, b: 20, c: 30 }, - { name: 'value 2', a: 10, b: 50, c: 60 }, - { name: 'value 3', a: 20, b: 20, c: null }, - { name: 'value 4', a: null, b: 90, c: 120 }, - { name: 'value 5', a: 80, b: 10, c: null }, - { name: 'value 6', a: null, b: null, c: 150 }, - ]); -} - -test.serial('table config: foreign keys name', async (t) => { - const table = sqliteTable('cities', { - id: int('id').primaryKey(), - name: text('name').notNull(), - state: text('state'), - }, (t) => ({ - f: foreignKey({ foreignColumns: [t.id], columns: [t.id], name: 'custom_fk' }), - f1: foreignKey(() => ({ foreignColumns: [t.id], columns: [t.id], name: 'custom_fk_deprecated' })), - })); - - const tableConfig = getTableConfig(table); - - t.is(tableConfig.foreignKeys.length, 2); - t.is(tableConfig.foreignKeys[0]!.getName(), 'custom_fk'); - t.is(tableConfig.foreignKeys[1]!.getName(), 'custom_fk_deprecated'); -}); - -test.serial('table config: primary keys name', async (t) => { - const table = sqliteTable('cities', { - id: int('id').primaryKey(), - name: text('name').notNull(), - state: text('state'), - }, (t) => ({ - f: primaryKey({ columns: [t.id, t.name], name: 'custom_pk' }), - })); - - const tableConfig = getTableConfig(table); - - t.is(tableConfig.primaryKeys.length, 1); - t.is(tableConfig.primaryKeys[0]!.getName(), 'custom_pk'); -}); - -test.serial('insert bigint values', async (t) => { - const { db } = t.context; - - await db.insert(bigIntExample).values({ name: 'one', bigInt: BigInt('0') }).run(); - await db.insert(bigIntExample).values({ name: 'two', bigInt: BigInt('127') }).run(); - await db.insert(bigIntExample).values({ name: 'three', bigInt: BigInt('32767') }).run(); - await db.insert(bigIntExample).values({ name: 'four', bigInt: BigInt('1234567890') }).run(); - await db.insert(bigIntExample).values({ name: 'five', bigInt: BigInt('12345678900987654321') }).run(); - - const result = await db.select().from(bigIntExample).all(); - t.deepEqual(result, [ - { id: 1, name: 'one', bigInt: BigInt('0') }, - { id: 2, name: 'two', bigInt: BigInt('127') }, - { id: 3, name: 'three', bigInt: BigInt('32767') }, - { id: 4, name: 'four', bigInt: BigInt('1234567890') }, - { id: 5, name: 'five', bigInt: BigInt('12345678900987654321') }, - ]); -}); - -test.serial('select all fields', async (t) => { - const { db } = t.context; - - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }).run(); - const result = await db.select().from(usersTable).all(); - t.assert(result[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 5000); - t.deepEqual(result, [{ id: 1, name: 'John', verified: false, json: null, createdAt: result[0]!.createdAt }]); -}); - -test.serial('select partial', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const result = await db.select({ name: usersTable.name }).from(usersTable).all(); - - t.deepEqual(result, [{ name: 'John' }]); -}); - -test.serial('select sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const users = await db.select({ - name: sql`upper(${usersTable.name})`, - }).from(usersTable).all(); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('select typed sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const users = await db.select({ - name: sql`upper(${usersTable.name})`, - }).from(usersTable).all(); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('select distinct', async (t) => { - const { db } = t.context; - - const usersDistinctTable = sqliteTable('users_distinct', { - id: integer('id').notNull(), - name: text('name').notNull(), - }); - - await db.run(sql`drop table if exists ${usersDistinctTable}`); - await db.run(sql`create table ${usersDistinctTable} (id integer, name text)`); - - await db.insert(usersDistinctTable).values([ - { id: 1, name: 'John' }, - { id: 1, name: 'John' }, - { id: 2, name: 'John' }, - { id: 1, name: 'Jane' }, - ]).run(); - const users = await db.selectDistinct().from(usersDistinctTable).orderBy( - usersDistinctTable.id, - usersDistinctTable.name, - ).all(); - - await db.run(sql`drop table ${usersDistinctTable}`); - - t.deepEqual(users, [{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); -}); - -test.serial('insert returning sql', async (t) => { - const { db } = t.context; - - const users = await db.insert(usersTable).values({ name: 'John' }).returning({ - name: sql`upper(${usersTable.name})`, - }).all(); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('$default function', async (t) => { - const { db } = t.context; - - await db.insert(orders).values({ id: 1, region: 'Ukraine', amount: 1, quantity: 1 }); - const selectedOrder = await db.select().from(orders); - - t.deepEqual(selectedOrder, [{ - id: 1, - amount: 1, - quantity: 1, - region: 'Ukraine', - product: 'random_string', - }]); -}); - -test.serial('delete returning sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ - name: sql`upper(${usersTable.name})`, - }).all(); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('query check: insert single empty row', (t) => { - const { db } = t.context; - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state'), - }); - - const query = db - .insert(users) - .values({}) - .toSQL(); - - t.deepEqual(query, { - sql: 'insert into "users" ("id", "name", "state") values (null, ?, null)', - params: ['Dan'], - }); -}); - -test.serial('query check: insert multiple empty rows', (t) => { - const { db } = t.context; - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state'), - }); - - const query = db - .insert(users) - .values([{}, {}]) - .toSQL(); - - t.deepEqual(query, { - sql: 'insert into "users" ("id", "name", "state") values (null, ?, null), (null, ?, null)', - params: ['Dan', 'Dan'], - }); -}); - -test.serial('Insert all defaults in 1 row', async (t) => { - const { db } = t.context; - - const users = sqliteTable('empty_insert_single', { - id: integer('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state'), - }); - - await db.run(sql`drop table if exists ${users}`); - - await db.run( - sql`create table ${users} (id integer primary key, name text default 'Dan', state text)`, - ); - - await db.insert(users).values({}).run(); - - const res = await db.select().from(users).all(); - - t.deepEqual(res, [{ id: 1, name: 'Dan', state: null }]); -}); - -test.serial('Insert all defaults in multiple rows', async (t) => { - const { db } = t.context; - - const users = sqliteTable('empty_insert_multiple', { - id: integer('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state'), - }); - - await db.run(sql`drop table if exists ${users}`); - - await db.run( - sql`create table ${users} (id integer primary key, name text default 'Dan', state text)`, - ); - - await db.insert(users).values([{}, {}]).run(); - - const res = await db.select().from(users).all(); - - t.deepEqual(res, [{ id: 1, name: 'Dan', state: null }, { id: 2, name: 'Dan', state: null }]); -}); - -test.serial('update returning sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ - name: sql`upper(${usersTable.name})`, - }).all(); - - t.deepEqual(users, [{ name: 'JANE' }]); -}); - -test.serial('insert with auto increment', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([ - { name: 'John' }, - { name: 'Jane' }, - { name: 'George' }, - { name: 'Austin' }, - ]).run(); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).all(); - - t.deepEqual(result, [ - { id: 1, name: 'John' }, - { id: 2, name: 'Jane' }, - { id: 3, name: 'George' }, - { id: 4, name: 'Austin' }, - ]); -}); - -test.serial('insert with default values', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const result = await db.select().from(usersTable).all(); - - t.deepEqual(result, [{ id: 1, name: 'John', verified: false, json: null, createdAt: result[0]!.createdAt }]); -}); - -test.serial('insert with overridden default values', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John', verified: true }).run(); - const result = await db.select().from(usersTable).all(); - - t.deepEqual(result, [{ id: 1, name: 'John', verified: true, json: null, createdAt: result[0]!.createdAt }]); -}); - -test.serial('update with returning all fields', async (t) => { - const { db } = t.context; - - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }).run(); - const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning().all(); - - t.assert(users[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 5000); - t.deepEqual(users, [{ id: 1, name: 'Jane', verified: false, json: null, createdAt: users[0]!.createdAt }]); -}); - -test.serial('update with returning partial', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ - id: usersTable.id, - name: usersTable.name, - }).all(); - - t.deepEqual(users, [{ id: 1, name: 'Jane' }]); -}); - -test.serial('delete with returning all fields', async (t) => { - const { db } = t.context; - - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }).run(); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning().all(); - - t.assert(users[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 5000); - t.deepEqual(users, [{ id: 1, name: 'John', verified: false, json: null, createdAt: users[0]!.createdAt }]); -}); - -test.serial('delete with returning partial', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ - id: usersTable.id, - name: usersTable.name, - }).all(); - - t.deepEqual(users, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert + select', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).all(); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); - - await db.insert(usersTable).values({ name: 'Jane' }).run(); - const result2 = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).all(); - - t.deepEqual(result2, [{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }]); -}); - -test.serial('json insert', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John', json: ['foo', 'bar'] }).run(); - const result = await db.select({ - id: usersTable.id, - name: usersTable.name, - json: usersTable.json, - }).from(usersTable).all(); - - t.deepEqual(result, [{ id: 1, name: 'John', json: ['foo', 'bar'] }]); -}); - -test.serial('insert many', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([ - { name: 'John' }, - { name: 'Bruce', json: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]).run(); - const result = await db.select({ - id: usersTable.id, - name: usersTable.name, - json: usersTable.json, - verified: usersTable.verified, - }).from(usersTable).all(); - - t.deepEqual(result, [ - { id: 1, name: 'John', json: null, verified: false }, - { id: 2, name: 'Bruce', json: ['foo', 'bar'], verified: false }, - { id: 3, name: 'Jane', json: null, verified: false }, - { id: 4, name: 'Austin', json: null, verified: true }, - ]); -}); - -test.serial('insert many with returning', async (t) => { - const { db } = t.context; - - const result = await db.insert(usersTable).values([ - { name: 'John' }, - { name: 'Bruce', json: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]) - .returning({ - id: usersTable.id, - name: usersTable.name, - json: usersTable.json, - verified: usersTable.verified, - }) - .all(); - - t.deepEqual(result, [ - { id: 1, name: 'John', json: null, verified: false }, - { id: 2, name: 'Bruce', json: ['foo', 'bar'], verified: false }, - { id: 3, name: 'Jane', json: null, verified: false }, - { id: 4, name: 'Austin', json: null, verified: true }, - ]); -}); - -test.serial('partial join with alias', async (t) => { - const { db } = t.context; - const customerAlias = alias(usersTable, 'customer'); - - await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - - const result = await db - .select({ - user: { - id: usersTable.id, - name: usersTable.name, - }, - customer: { - id: customerAlias.id, - name: customerAlias.name, - }, - }).from(usersTable) - .leftJoin(customerAlias, eq(customerAlias.id, 11)) - .where(eq(usersTable.id, 10)); - - t.deepEqual(result, [{ - user: { id: 10, name: 'Ivan' }, - customer: { id: 11, name: 'Hans' }, - }]); -}); - -test.serial('full join with alias', async (t) => { - const { db } = t.context; - - const sqliteTable = sqliteTableCreator((name) => `prefixed_${name}`); - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.run(sql`drop table if exists ${users}`); - await db.run(sql`create table ${users} (id integer primary key, name text not null)`); - - const customers = alias(users, 'customer'); - - await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]).run(); - const result = await db - .select().from(users) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(users.id, 10)) - .all(); - - t.deepEqual(result, [{ - users: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - await db.run(sql`drop table ${users}`); -}); - -test.serial('select from alias', async (t) => { - const { db } = t.context; - - const sqliteTable = sqliteTableCreator((name) => `prefixed_${name}`); - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.run(sql`drop table if exists ${users}`); - await db.run(sql`create table ${users} (id integer primary key, name text not null)`); - - const user = alias(users, 'user'); - const customers = alias(users, 'customer'); - - await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]).run(); - const result = await db - .select() - .from(user) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(user.id, 10)) - .all(); - - t.deepEqual(result, [{ - user: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - await db.run(sql`drop table ${users}`); -}); - -test.serial('insert with spaces', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: sql`'Jo h n'` }).run(); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).all(); - - t.deepEqual(result, [{ id: 1, name: 'Jo h n' }]); -}); - -test.serial('prepared statement', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const statement = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).prepare(); - const result = await statement.all(); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('prepared statement reuse', async (t) => { - const { db } = t.context; - - const stmt = db.insert(usersTable).values({ - verified: true, - name: placeholder('name'), - }).prepare(); - - for (let i = 0; i < 10; i++) { - await stmt.run({ name: `John ${i}` }); - } - - const result = await db.select({ - id: usersTable.id, - name: usersTable.name, - verified: usersTable.verified, - }).from(usersTable).all(); - - t.deepEqual(result, [ - { id: 1, name: 'John 0', verified: true }, - { id: 2, name: 'John 1', verified: true }, - { id: 3, name: 'John 2', verified: true }, - { id: 4, name: 'John 3', verified: true }, - { id: 5, name: 'John 4', verified: true }, - { id: 6, name: 'John 5', verified: true }, - { id: 7, name: 'John 6', verified: true }, - { id: 8, name: 'John 7', verified: true }, - { id: 9, name: 'John 8', verified: true }, - { id: 10, name: 'John 9', verified: true }, - ]); -}); - -test.serial('prepared statement with placeholder in .where', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const stmt = db.select({ - id: usersTable.id, - name: usersTable.name, - }).from(usersTable) - .where(eq(usersTable.id, placeholder('id'))) - .prepare(); - const result = await stmt.all({ id: 1 }); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('select with group by as field', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.name) - .all(); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with exists', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); - - const user = alias(usersTable, 'user'); - const result = await db.select({ name: usersTable.name }).from(usersTable).where( - exists(db.select({ one: sql`1` }).from(user).where(and(eq(usersTable.name, 'John'), eq(user.id, usersTable.id)))), - ).all(); - - t.deepEqual(result, [{ name: 'John' }]); -}); - -test.serial('select with group by as sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(sql`${usersTable.name}`) - .all(); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with group by as sql + column', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(sql`${usersTable.name}`, usersTable.id) - .all(); - - t.deepEqual(result, [{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); -}); - -test.serial('select with group by as column + sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`) - .all(); - - t.deepEqual(result, [{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); -}); - -test.serial('select with group by complex query', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`) - .orderBy(asc(usersTable.name)) - .limit(1) - .all(); - - t.deepEqual(result, [{ name: 'Jane' }]); -}); - -test.serial('build query', async (t) => { - const { db } = t.context; - - const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, usersTable.name) - .toSQL(); - - t.deepEqual(query, { - sql: 'select "id", "name" from "users" group by "users"."id", "users"."name"', - params: [], - }); -}); - -test.serial('migrator', async (t) => { - const { db } = t.context; - - await db.run(sql`drop table if exists another_users`); - await db.run(sql`drop table if exists users12`); - await db.run(sql`drop table if exists __drizzle_migrations`); - - await migrate(db, { migrationsFolder: './drizzle2/sqlite' }); - - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }).run(); - const result = await db.select().from(usersMigratorTable).all(); - - await db.insert(anotherUsersMigratorTable).values({ name: 'John', email: 'email' }).run(); - const result2 = await db.select().from(anotherUsersMigratorTable).all(); - - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); - t.deepEqual(result2, [{ id: 1, name: 'John', email: 'email' }]); - - await db.run(sql`drop table another_users`); - await db.run(sql`drop table users12`); - await db.run(sql`drop table __drizzle_migrations`); -}); - -test.serial('migrator : migrate with custom table', async (t) => { - const { db } = t.context; - const customTable = randomString(); - await db.run(sql`drop table if exists another_users`); - await db.run(sql`drop table if exists users12`); - await db.run(sql`drop table if exists ${sql.identifier(customTable)}`); - - await migrate(db, { migrationsFolder: './drizzle2/sqlite', migrationsTable: customTable }); - - // test if the custom migrations table was created - const res = await db.all(sql`select * from ${sql.identifier(customTable)};`); - t.true(res.length > 0); - - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); - - await db.run(sql`drop table another_users`); - await db.run(sql`drop table users12`); - await db.run(sql`drop table ${sql.identifier(customTable)}`); -}); - -test.serial('insert via db.run + select via db.all', async (t) => { - const { db } = t.context; - - await db.run(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); - - const result = await db.all<{ id: number; name: string }>(sql`select id, name from "users"`); - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert via db.get', async (t) => { - const { db } = t.context; - - const inserted = await db.get<{ id: number; name: string }>( - sql`insert into ${usersTable} (${new Name( - usersTable.name.name, - )}) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, - ); - t.deepEqual(inserted, { id: 1, name: 'John' }); -}); - -test.serial('insert via db.run + select via db.get', async (t) => { - const { db } = t.context; - - await db.run(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); - - const result = await db.get<{ id: number; name: string }>( - sql`select ${usersTable.id}, ${usersTable.name} from ${usersTable}`, - ); - t.deepEqual(result, { id: 1, name: 'John' }); -}); - -test.serial('insert via db.get w/ query builder', async (t) => { - const { db } = t.context; - - const inserted = await db.get, 'id' | 'name'>>( - db.insert(usersTable).values({ name: 'John' }).returning({ id: usersTable.id, name: usersTable.name }), - ); - t.deepEqual(inserted, { id: 1, name: 'John' }); -}); - -test.serial('left join (flat object fields)', async (t) => { - const { db } = t.context; - - const { id: cityId } = await db.insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]) - .returning({ id: citiesTable.id }).all().then((res) => res[0]!); - - await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]).run(); - - const res = await db.select({ - userId: users2Table.id, - userName: users2Table.name, - cityId: citiesTable.id, - cityName: citiesTable.name, - }).from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)) - .all(); - - t.deepEqual(res, [ - { userId: 1, userName: 'John', cityId, cityName: 'Paris' }, - { userId: 2, userName: 'Jane', cityId: null, cityName: null }, - ]); -}); - -test.serial('left join (grouped fields)', async (t) => { - const { db } = t.context; - - const { id: cityId } = await db.insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]) - .returning({ id: citiesTable.id }).all().then((res) => res[0]!); - - await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]).run(); - - const res = await db.select({ - id: users2Table.id, - user: { - name: users2Table.name, - nameUpper: sql`upper(${users2Table.name})`, - }, - city: { - id: citiesTable.id, - name: citiesTable.name, - nameUpper: sql`upper(${citiesTable.name})`, - }, - }).from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)) - .all(); - - t.deepEqual(res, [ - { - id: 1, - user: { name: 'John', nameUpper: 'JOHN' }, - city: { id: cityId, name: 'Paris', nameUpper: 'PARIS' }, - }, - { - id: 2, - user: { name: 'Jane', nameUpper: 'JANE' }, - city: null, - }, - ]); -}); - -test.serial('left join (all fields)', async (t) => { - const { db } = t.context; - - const { id: cityId } = await db.insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]) - .returning({ id: citiesTable.id }).all().then((res) => res[0]!); - - await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]).run(); - - const res = await db.select().from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)).all(); - - t.deepEqual(res, [ - { - users2: { - id: 1, - name: 'John', - cityId, - }, - cities: { - id: cityId, - name: 'Paris', - }, - }, - { - users2: { - id: 2, - name: 'Jane', - cityId: null, - }, - cities: null, - }, - ]); -}); - -test.serial('join subquery', async (t) => { - const { db } = t.context; - - await db.insert(courseCategoriesTable).values([ - { name: 'Category 1' }, - { name: 'Category 2' }, - { name: 'Category 3' }, - { name: 'Category 4' }, - ]).run(); - - await db.insert(coursesTable).values([ - { name: 'Development', categoryId: 2 }, - { name: 'IT & Software', categoryId: 3 }, - { name: 'Marketing', categoryId: 4 }, - { name: 'Design', categoryId: 1 }, - ]).run(); - - const sq2 = db - .select({ - categoryId: courseCategoriesTable.id, - category: courseCategoriesTable.name, - total: sql`count(${courseCategoriesTable.id})`, - }) - .from(courseCategoriesTable) - .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) - .as('sq2'); - - const res = await db - .select({ - courseName: coursesTable.name, - categoryId: sq2.categoryId, - }) - .from(coursesTable) - .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) - .orderBy(coursesTable.name) - .all(); - - t.deepEqual(res, [ - { courseName: 'Design', categoryId: 1 }, - { courseName: 'Development', categoryId: 2 }, - { courseName: 'IT & Software', categoryId: 3 }, - { courseName: 'Marketing', categoryId: 4 }, - ]); -}); - -test.serial('with ... select', async (t) => { - const { db } = t.context; - - await db.insert(orders).values([ - { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, - { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 50, quantity: 5 }, - ]).run(); - - const regionalSales = await db - .$with('regional_sales') - .as( - db - .select({ - region: orders.region, - totalSales: sql`sum(${orders.amount})`.as('total_sales'), - }) - .from(orders) - .groupBy(orders.region), - ); - - const topRegions = await db - .$with('top_regions') - .as( - db - .select({ - region: regionalSales.region, - }) - .from(regionalSales) - .where( - gt( - regionalSales.totalSales, - db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), - ), - ), - ); - - const result = await db - .with(regionalSales, topRegions) - .select({ - region: orders.region, - product: orders.product, - productUnits: sql`cast(sum(${orders.quantity}) as int)`, - productSales: sql`cast(sum(${orders.amount}) as int)`, - }) - .from(orders) - .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) - .groupBy(orders.region, orders.product) - .orderBy(orders.region, orders.product) - .all(); - - t.deepEqual(result, [ - { - region: 'Europe', - product: 'A', - productUnits: 3, - productSales: 30, - }, - { - region: 'Europe', - product: 'B', - productUnits: 5, - productSales: 50, - }, - { - region: 'US', - product: 'A', - productUnits: 7, - productSales: 70, - }, - { - region: 'US', - product: 'B', - productUnits: 9, - productSales: 90, - }, - ]); -}); - -test.serial('with ... update', async (t) => { - const { db } = t.context; - - const products = sqliteTable('products', { - id: integer('id').primaryKey(), - price: numeric('price').notNull(), - cheap: integer('cheap', { mode: 'boolean' }).notNull().default(false), - }); - - await db.run(sql`drop table if exists ${products}`); - await db.run(sql` - create table ${products} ( - id integer primary key, - price numeric not null, - cheap integer not null default 0 - ) - `); - - await db.insert(products).values([ - { price: '10.99' }, - { price: '25.85' }, - { price: '32.99' }, - { price: '2.50' }, - { price: '4.59' }, - ]); - - const averagePrice = db - .$with('average_price') - .as( - db - .select({ - value: sql`avg(${products.price})`.as('value'), - }) - .from(products), - ); - - const result = await db - .with(averagePrice) - .update(products) - .set({ - cheap: true, - }) - .where(lt(products.price, sql`(select * from ${averagePrice})`)) - .returning({ - id: products.id, - }); - - t.deepEqual(result, [ - { id: 1 }, - { id: 4 }, - { id: 5 }, - ]); -}); - -test.serial('with ... insert', async (t) => { - const { db } = t.context; - - const users = sqliteTable('users', { - username: text('username').notNull(), - admin: integer('admin', { mode: 'boolean' }).notNull(), - }); - - await db.run(sql`drop table if exists ${users}`); - await db.run(sql`create table ${users} (username text not null, admin integer not null default 0)`); - - const userCount = db - .$with('user_count') - .as( - db - .select({ - value: sql`count(*)`.as('value'), - }) - .from(users), - ); - - const result = await db - .with(userCount) - .insert(users) - .values([ - { username: 'user1', admin: sql`((select * from ${userCount}) = 0)` }, - ]) - .returning({ - admin: users.admin, - }); - - t.deepEqual(result, [{ admin: true }]); -}); - -test.serial('with ... delete', async (t) => { - const { db } = t.context; - - await db.insert(orders).values([ - { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, - { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 50, quantity: 5 }, - ]); - - const averageAmount = db - .$with('average_amount') - .as( - db - .select({ - value: sql`avg(${orders.amount})`.as('value'), - }) - .from(orders), - ); - - const result = await db - .with(averageAmount) - .delete(orders) - .where(gt(orders.amount, sql`(select * from ${averageAmount})`)) - .returning({ - id: orders.id, - }); - - t.deepEqual(result, [ - { id: 6 }, - { id: 7 }, - { id: 8 }, - ]); -}); - -test.serial('select from subquery sql', async (t) => { - const { db } = t.context; - - await db.insert(users2Table).values([{ name: 'John' }, { name: 'Jane' }]).run(); - - const sq = db - .select({ name: sql`${users2Table.name} || ' modified'`.as('name') }) - .from(users2Table) - .as('sq'); - - const res = await db.select({ name: sq.name }).from(sq).all(); - - t.deepEqual(res, [{ name: 'John modified' }, { name: 'Jane modified' }]); -}); - -test.serial('select a field without joining its table', (t) => { - const { db } = t.context; - - t.throws(() => db.select({ name: users2Table.name }).from(usersTable).prepare()); -}); - -test.serial('select all fields from subquery without alias', (t) => { - const { db } = t.context; - - const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); - - t.throws(() => db.select().from(sq).prepare()); -}); - -test.serial('select count()', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]).run(); - - const res = await db.select({ count: sql`count(*)` }).from(usersTable).all(); - - t.deepEqual(res, [{ count: 2 }]); -}); - -test.serial('having', async (t) => { - const { db } = t.context; - - await db.insert(citiesTable).values([{ name: 'London' }, { name: 'Paris' }, { name: 'New York' }]).run(); - - await db.insert(users2Table).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 1 }, - { name: 'Jack', cityId: 2 }, - ]).run(); - - const result = await db - .select({ - id: citiesTable.id, - name: sql`upper(${citiesTable.name})`.as('upper_name'), - usersCount: sql`count(${users2Table.id})`.as('users_count'), - }) - .from(citiesTable) - .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) - .where(({ name }) => sql`length(${name}) >= 3`) - .groupBy(citiesTable.id) - .having(({ usersCount }) => sql`${usersCount} > 0`) - .orderBy(({ name }) => name) - .all(); - - t.deepEqual(result, [ - { - id: 1, - name: 'LONDON', - usersCount: 2, - }, - { - id: 2, - name: 'PARIS', - usersCount: 1, - }, - ]); -}); - -test.serial('view', async (t) => { - const { db } = t.context; - - const newYorkers1 = sqliteView('new_yorkers') - .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); - - const newYorkers2 = sqliteView('new_yorkers', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); - - const newYorkers3 = sqliteView('new_yorkers', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).existing(); - - await db.run(sql`create view new_yorkers as ${getViewConfig(newYorkers1).query}`); - - await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]).run(); - - await db.insert(users2Table).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 1 }, - { name: 'Jack', cityId: 2 }, - ]).run(); - - { - const result = await db.select().from(newYorkers1).all(); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers2).all(); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers3).all(); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select({ name: newYorkers1.name }).from(newYorkers1).all(); - t.deepEqual(result, [ - { name: 'John' }, - { name: 'Jane' }, - ]); - } - - await db.run(sql`drop view ${newYorkers1}`); -}); - -test.serial('insert null timestamp', async (t) => { - const { db } = t.context; - - const test = sqliteTable('test', { - t: integer('t', { mode: 'timestamp' }), - }); - - await db.run(sql`create table ${test} (t timestamp)`); - - await db.insert(test).values({ t: null }).run(); - const res = await db.select().from(test).all(); - t.deepEqual(res, [{ t: null }]); - - await db.run(sql`drop table ${test}`); -}); - -test.serial('select from raw sql', async (t) => { - const { db } = t.context; - - const result = await db.select({ - id: sql`id`, - name: sql`name`, - }).from(sql`(select 1 as id, 'John' as name) as users`).all(); - - Expect>; - - t.deepEqual(result, [ - { id: 1, name: 'John' }, - ]); -}); - -test.serial('select from raw sql with joins', async (t) => { - const { db } = t.context; - - const result = await db - .select({ - id: sql`users.id`, - name: sql`users.name`.as('userName'), - userCity: sql`users.city`, - cityName: sql`cities.name`.as('cityName'), - }) - .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) - .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`) - .all(); - - Expect>; - - t.deepEqual(result, [ - { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, - ]); -}); - -test.serial('join on aliased sql from select', async (t) => { - const { db } = t.context; - - const result = await db - .select({ - userId: sql`users.id`.as('userId'), - name: sql`users.name`.as('userName'), - userCity: sql`users.city`, - cityId: sql`cities.id`.as('cityId'), - cityName: sql`cities.name`.as('cityName'), - }) - .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) - .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)) - .all(); - - Expect>; - - t.deepEqual(result, [ - { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, - ]); -}); - -test.serial('join on aliased sql from with clause', async (t) => { - const { db } = t.context; - - const users = db.$with('users').as( - db.select({ - id: sql`id`.as('userId'), - name: sql`name`.as('userName'), - city: sql`city`.as('city'), - }).from( - sql`(select 1 as id, 'John' as name, 'New York' as city) as users`, - ), - ); - - const cities = db.$with('cities').as( - db.select({ - id: sql`id`.as('cityId'), - name: sql`name`.as('cityName'), - }).from( - sql`(select 1 as id, 'Paris' as name) as cities`, - ), - ); - - const result = await db - .with(users, cities) - .select({ - userId: users.id, - name: users.name, - userCity: users.city, - cityId: cities.id, - cityName: cities.name, - }) - .from(users) - .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)) - .all(); - - Expect>; - - t.deepEqual(result, [ - { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, - ]); -}); - -test.serial('prefixed table', async (t) => { - const { db } = t.context; - - const sqliteTable = sqliteTableCreator((name) => `myprefix_${name}`); - - const users = sqliteTable('test_prefixed_table_with_unique_name', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.run(sql`drop table if exists ${users}`); - - await db.run( - sql`create table myprefix_test_prefixed_table_with_unique_name (id integer not null primary key, name text not null)`, - ); - - await db.insert(users).values({ id: 1, name: 'John' }).run(); - - const result = await db.select().from(users).all(); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); - - await db.run(sql`drop table ${users}`); -}); - -test.serial('orderBy with aliased column', (t) => { - const { db } = t.context; - - const query = db.select({ - test: sql`something`.as('test'), - }).from(users2Table).orderBy((fields) => fields.test).toSQL(); - - t.deepEqual(query.sql, 'select something as "test" from "users2" order by "test"'); -}); - -test.serial('transaction', async (t) => { - const { db } = t.context; - - const users = sqliteTable('users_transactions', { - id: integer('id').primaryKey(), - balance: integer('balance').notNull(), - }); - const products = sqliteTable('products_transactions', { - id: integer('id').primaryKey(), - price: integer('price').notNull(), - stock: integer('stock').notNull(), - }); - - await db.run(sql`drop table if exists ${users}`); - await db.run(sql`drop table if exists ${products}`); - - await db.run(sql`create table users_transactions (id integer not null primary key, balance integer not null)`); - await db.run( - sql`create table products_transactions (id integer not null primary key, price integer not null, stock integer not null)`, - ); - - const user = await db.insert(users).values({ balance: 100 }).returning().get(); - const product = await db.insert(products).values({ price: 10, stock: 10 }).returning().get(); - - await db.transaction(async (tx) => { - await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)).run(); - await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)).run(); - }); - - const result = await db.select().from(users).all(); - - t.deepEqual(result, [{ id: 1, balance: 90 }]); - - await db.run(sql`drop table ${users}`); - await db.run(sql`drop table ${products}`); -}); - -test.serial('transaction rollback', async (t) => { - const { db } = t.context; - - const users = sqliteTable('users_transactions_rollback', { - id: integer('id').primaryKey(), - balance: integer('balance').notNull(), - }); - - await db.run(sql`drop table if exists ${users}`); - - await db.run( - sql`create table users_transactions_rollback (id integer not null primary key, balance integer not null)`, - ); - - await t.throwsAsync(async () => - await db.transaction(async (tx) => { - await tx.insert(users).values({ balance: 100 }).run(); - tx.rollback(); - }), { instanceOf: TransactionRollbackError }); - - const result = await db.select().from(users).all(); - - t.deepEqual(result, []); - - await db.run(sql`drop table ${users}`); -}); - -test.serial('nested transaction', async (t) => { - const { db } = t.context; - - const users = sqliteTable('users_nested_transactions', { - id: integer('id').primaryKey(), - balance: integer('balance').notNull(), - }); - - await db.run(sql`drop table if exists ${users}`); - - await db.run( - sql`create table users_nested_transactions (id integer not null primary key, balance integer not null)`, - ); - - await db.transaction(async (tx) => { - await tx.insert(users).values({ balance: 100 }).run(); - - await tx.transaction(async (tx) => { - await tx.update(users).set({ balance: 200 }).run(); - }); - }); - - const result = await db.select().from(users).all(); - - t.deepEqual(result, [{ id: 1, balance: 200 }]); - - await db.run(sql`drop table ${users}`); -}); - -test.serial('nested transaction rollback', async (t) => { - const { db } = t.context; - - const users = sqliteTable('users_nested_transactions_rollback', { - id: integer('id').primaryKey(), - balance: integer('balance').notNull(), - }); - - await db.run(sql`drop table if exists ${users}`); - - await db.run( - sql`create table users_nested_transactions_rollback (id integer not null primary key, balance integer not null)`, - ); - - await db.transaction(async (tx) => { - await tx.insert(users).values({ balance: 100 }).run(); - - await t.throwsAsync(async () => - await tx.transaction(async (tx) => { - await tx.update(users).set({ balance: 200 }).run(); - tx.rollback(); - }), { instanceOf: TransactionRollbackError }); - }); - - const result = await db.select().from(users).all(); - - t.deepEqual(result, [{ id: 1, balance: 100 }]); - - await db.run(sql`drop table ${users}`); -}); - -test.serial('join subquery with join', async (t) => { - const { db } = t.context; - - const internalStaff = sqliteTable('internal_staff', { - userId: integer('user_id').notNull(), - }); - - const customUser = sqliteTable('custom_user', { - id: integer('id').notNull(), - }); - - const ticket = sqliteTable('ticket', { - staffId: integer('staff_id').notNull(), - }); - - await db.run(sql`drop table if exists ${internalStaff}`); - await db.run(sql`drop table if exists ${customUser}`); - await db.run(sql`drop table if exists ${ticket}`); - - await db.run(sql`create table internal_staff (user_id integer not null)`); - await db.run(sql`create table custom_user (id integer not null)`); - await db.run(sql`create table ticket (staff_id integer not null)`); - - await db.insert(internalStaff).values({ userId: 1 }).run(); - await db.insert(customUser).values({ id: 1 }).run(); - await db.insert(ticket).values({ staffId: 1 }).run(); - - const subq = await db - .select() - .from(internalStaff) - .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) - .as('internal_staff'); - - const mainQuery = await db - .select() - .from(ticket) - .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)) - .all(); - - t.deepEqual(mainQuery, [{ - ticket: { staffId: 1 }, - internal_staff: { - internal_staff: { userId: 1 }, - custom_user: { id: 1 }, - }, - }]); - - await db.run(sql`drop table ${internalStaff}`); - await db.run(sql`drop table ${customUser}`); - await db.run(sql`drop table ${ticket}`); -}); - -test.serial('join view as subquery', async (t) => { - const { db } = t.context; - - const users = sqliteTable('users_join_view', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }); - - const newYorkers = sqliteView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); - - await db.run(sql`drop table if exists ${users}`); - await db.run(sql`drop view if exists ${newYorkers}`); - - await db.run( - sql`create table ${users} (id integer not null primary key, name text not null, city_id integer not null)`, - ); - await db.run(sql`create view ${newYorkers} as ${getViewConfig(newYorkers).query}`); - - db.insert(users).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 2 }, - { name: 'Jack', cityId: 1 }, - { name: 'Jill', cityId: 2 }, - ]).run(); - - const sq = db.select().from(newYorkers).as('new_yorkers_sq'); - - const result = await db.select().from(users).leftJoin(sq, eq(users.id, sq.id)).all(); - - t.deepEqual(result, [ - { - users_join_view: { id: 1, name: 'John', cityId: 1 }, - new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, - }, - { - users_join_view: { id: 2, name: 'Jane', cityId: 2 }, - new_yorkers_sq: null, - }, - { - users_join_view: { id: 3, name: 'Jack', cityId: 1 }, - new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, - }, - { - users_join_view: { id: 4, name: 'Jill', cityId: 2 }, - new_yorkers_sq: null, - }, - ]); - - await db.run(sql`drop view ${newYorkers}`); - await db.run(sql`drop table ${users}`); -}); - -test.serial('insert with onConflict do nothing', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ id: 1, name: 'John' }).run(); - - await db - .insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoNothing() - .run(); - - const res = await db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)) - .all(); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert with onConflict do nothing using composite pk', async (t) => { - const { db } = t.context; - - await db - .insert(pkExampleTable) - .values({ id: 1, name: 'John', email: 'john@example.com' }) - .run(); - - await db - .insert(pkExampleTable) - .values({ id: 1, name: 'John', email: 'john1@example.com' }) - .onConflictDoNothing() - .run(); - - const res = await db - .select({ id: pkExampleTable.id, name: pkExampleTable.name, email: pkExampleTable.email }) - .from(pkExampleTable) - .where(eq(pkExampleTable.id, 1)) - .all(); - - t.deepEqual(res, [{ id: 1, name: 'John', email: 'john@example.com' }]); -}); - -test.serial('insert with onConflict do nothing using target', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ id: 1, name: 'John' }).run(); - - await db - .insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoNothing({ target: usersTable.id }) - .run(); - - const res = await db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)) - .all(); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert with onConflict do nothing using composite pk as target', async (t) => { - const { db } = t.context; - - await db - .insert(pkExampleTable) - .values({ id: 1, name: 'John', email: 'john@example.com' }) - .run(); - - await db - .insert(pkExampleTable) - .values({ id: 1, name: 'John', email: 'john1@example.com' }) - .onConflictDoNothing({ target: [pkExampleTable.id, pkExampleTable.name] }) - .run(); - - const res = await db - .select({ id: pkExampleTable.id, name: pkExampleTable.name, email: pkExampleTable.email }) - .from(pkExampleTable) - .where(eq(pkExampleTable.id, 1)) - .all(); - - t.deepEqual(res, [{ id: 1, name: 'John', email: 'john@example.com' }]); -}); - -test.serial('insert with onConflict do update', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ id: 1, name: 'John' }).run(); - - await db - .insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }) - .run(); - - const res = await db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)) - .all(); - - t.deepEqual(res, [{ id: 1, name: 'John1' }]); -}); - -test.serial('insert with onConflict do update where', async (t) => { - const { db } = t.context; - - await db - .insert(usersTable) - .values([{ id: 1, name: 'John', verified: false }]) - .run(); - - await db - .insert(usersTable) - .values({ id: 1, name: 'John1', verified: true }) - .onConflictDoUpdate({ - target: usersTable.id, - set: { name: 'John1', verified: true }, - where: eq(usersTable.verified, false), - }) - .run(); - - const res = await db - .select({ id: usersTable.id, name: usersTable.name, verified: usersTable.verified }) - .from(usersTable) - .where(eq(usersTable.id, 1)) - .all(); - - t.deepEqual(res, [{ id: 1, name: 'John1', verified: true }]); -}); - -test.serial('insert with onConflict do update using composite pk', async (t) => { - const { db } = t.context; - - await db.insert(pkExampleTable).values({ id: 1, name: 'John', email: 'john@example.com' }).run(); - - await db - .insert(pkExampleTable) - .values({ id: 1, name: 'John', email: 'john@example.com' }) - .onConflictDoUpdate({ target: [pkExampleTable.id, pkExampleTable.name], set: { email: 'john1@example.com' } }) - .run(); - - const res = await db - .select({ id: pkExampleTable.id, name: pkExampleTable.name, email: pkExampleTable.email }) - .from(pkExampleTable) - .where(eq(pkExampleTable.id, 1)) - .all(); - - t.deepEqual(res, [{ id: 1, name: 'John', email: 'john1@example.com' }]); -}); - -test.serial('insert undefined', async (t) => { - const { db } = t.context; - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name'), - }); - - await db.run(sql`drop table if exists ${users}`); - - await db.run( - sql`create table ${users} (id integer primary key, name text)`, - ); - - await t.notThrowsAsync(async () => await db.insert(users).values({ name: undefined }).run()); - - await db.run(sql`drop table ${users}`); -}); - -test.serial('update undefined', async (t) => { - const { db } = t.context; - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name'), - }); - - await db.run(sql`drop table if exists ${users}`); - - await db.run( - sql`create table ${users} (id integer primary key, name text)`, - ); - - await t.throwsAsync(async () => await db.update(users).set({ name: undefined }).run()); - await t.notThrowsAsync(async () => await db.update(users).set({ id: 1, name: undefined }).run()); - - await db.run(sql`drop table ${users}`); -}); - -test.serial('async api - CRUD', async (t) => { - const { db } = t.context; - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name'), - }); - - db.run(sql`drop table if exists ${users}`); - - db.run( - sql`create table ${users} (id integer primary key, name text)`, - ); - - await db.insert(users).values({ id: 1, name: 'John' }); - - const res = await db.select().from(users); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); - - await db.update(users).set({ name: 'John1' }).where(eq(users.id, 1)); - - const res1 = await db.select().from(users); - - t.deepEqual(res1, [{ id: 1, name: 'John1' }]); - - await db.delete(users).where(eq(users.id, 1)); - - const res2 = await db.select().from(users); - - t.deepEqual(res2, []); - - await db.run(sql`drop table ${users}`); -}); - -test.serial('async api - insert + select w/ prepare + async execute', async (t) => { - const { db } = t.context; - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name'), - }); - - db.run(sql`drop table if exists ${users}`); - - db.run( - sql`create table ${users} (id integer primary key, name text)`, - ); - - const insertStmt = db.insert(users).values({ id: 1, name: 'John' }).prepare(); - await insertStmt.execute(); - - const selectStmt = db.select().from(users).prepare(); - const res = await selectStmt.execute(); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); - - const updateStmt = db.update(users).set({ name: 'John1' }).where(eq(users.id, 1)).prepare(); - await updateStmt.execute(); - - const res1 = await selectStmt.execute(); - - t.deepEqual(res1, [{ id: 1, name: 'John1' }]); - - const deleteStmt = db.delete(users).where(eq(users.id, 1)).prepare(); - await deleteStmt.execute(); - - const res2 = await selectStmt.execute(); - - t.deepEqual(res2, []); - - await db.run(sql`drop table ${users}`); -}); - -test.serial('async api - insert + select w/ prepare + sync execute', async (t) => { - const { db } = t.context; - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name'), - }); - - db.run(sql`drop table if exists ${users}`); - - db.run( - sql`create table ${users} (id integer primary key, name text)`, - ); - - const insertStmt = db.insert(users).values({ id: 1, name: 'John' }).prepare(); - await insertStmt.execute(); - - const selectStmt = db.select().from(users).prepare(); - const res = await selectStmt.execute(); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); - - const updateStmt = db.update(users).set({ name: 'John1' }).where(eq(users.id, 1)).prepare(); - await updateStmt.execute(); - - const res1 = await selectStmt.execute(); - - t.deepEqual(res1, [{ id: 1, name: 'John1' }]); - - const deleteStmt = db.delete(users).where(eq(users.id, 1)).prepare(); - await deleteStmt.execute(); - - const res2 = await selectStmt.execute(); - - t.deepEqual(res2, []); - - await db.run(sql`drop table ${users}`); -}); - -test.serial('select + .get() for empty result', async (t) => { - const { db } = t.context; - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name'), - }); - - db.run(sql`drop table if exists ${users}`); - - db.run( - sql`create table ${users} (id integer primary key, name text)`, - ); - - const res = await db.select().from(users).where(eq(users.id, 1)).get(); - - t.is(res, undefined); - - await db.run(sql`drop table ${users}`); -}); - -test.serial('set operations (union) from query builder with subquery', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const sq = db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).union( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table), - ).orderBy(asc(sql`name`)).as('sq'); - - const result = await db.select().from(sq).limit(5).offset(5); - - t.assert(result.length === 5); - - t.deepEqual(result, [ - { id: 2, name: 'London' }, - { id: 7, name: 'Mary' }, - { id: 1, name: 'New York' }, - { id: 4, name: 'Peter' }, - { id: 8, name: 'Sally' }, - ]); - - t.throws(() => { - db - .select({ name: citiesTable.name, id: citiesTable.id }) - .from(citiesTable).union( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table), - ).orderBy(asc(sql`name`)); - }); -}); - -test.serial('set operations (union) as function', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await union( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).orderBy(asc(sql`name`)); - - t.assert(result.length === 2); - - t.deepEqual(result, [ - { id: 1, name: 'John' }, - { id: 1, name: 'New York' }, - ]); - - t.throws(() => { - union( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).orderBy(asc(sql`name`)); - }); -}); - -test.serial('set operations (union all) from query builder', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).unionAll( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable), - ).orderBy(asc(citiesTable.id)).limit(5).offset(1); - - t.assert(result.length === 5); - - t.deepEqual(result, [ - { id: 1, name: 'New York' }, - { id: 2, name: 'London' }, - { id: 2, name: 'London' }, - { id: 3, name: 'Tampa' }, - { id: 3, name: 'Tampa' }, - ]); - - t.throws(() => { - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).unionAll( - db - .select({ name: citiesTable.name, id: citiesTable.id }) - .from(citiesTable), - ).orderBy(asc(citiesTable.id)).limit(5).offset(1); - }); -}); - -test.serial('set operations (union all) as function', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await unionAll( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ); - - t.assert(result.length === 3); - - t.deepEqual(result, [ - { id: 1, name: 'New York' }, - { id: 1, name: 'John' }, - { id: 1, name: 'John' }, - ]); - - t.throws(() => { - unionAll( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(users2Table).where(eq(users2Table.id, 1)), - ); - }); -}); - -test.serial('set operations (intersect) from query builder', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).intersect( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(gt(citiesTable.id, 1)), - ).orderBy(asc(sql`name`)); - - t.assert(result.length === 2); - - t.deepEqual(result, [ - { id: 2, name: 'London' }, - { id: 3, name: 'Tampa' }, - ]); - - t.throws(() => { - db - .select({ name: citiesTable.name, id: citiesTable.id }) - .from(citiesTable).intersect( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(gt(citiesTable.id, 1)), - ).orderBy(asc(sql`name`)); - }); -}); - -test.serial('set operations (intersect) as function', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await intersect( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ); - - t.assert(result.length === 0); - - t.deepEqual(result, []); - - t.throws(() => { - intersect( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ); - }); -}); - -test.serial('set operations (except) from query builder', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await db - .select() - .from(citiesTable).except( - db - .select() - .from(citiesTable).where(gt(citiesTable.id, 1)), - ); - - t.assert(result.length === 1); - - t.deepEqual(result, [ - { id: 1, name: 'New York' }, - ]); - - t.throws(() => { - db - .select() - .from(citiesTable).except( - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(citiesTable).where(gt(citiesTable.id, 1)), - ); - }); -}); - -test.serial('set operations (except) as function', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await except( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable), - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).orderBy(asc(sql`id`)); - - t.assert(result.length === 2); - - t.deepEqual(result, [ - { id: 2, name: 'London' }, - { id: 3, name: 'Tampa' }, - ]); - - t.throws(() => { - except( - db - .select({ name: citiesTable.name, id: citiesTable.id }) - .from(citiesTable), - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).orderBy(asc(sql`id`)); - }); -}); - -test.serial('set operations (mixed) from query builder', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await db - .select() - .from(citiesTable).except( - ({ unionAll }) => - unionAll( - db - .select() - .from(citiesTable).where(gt(citiesTable.id, 1)), - db.select().from(citiesTable).where(eq(citiesTable.id, 2)), - ), - ); - - t.assert(result.length === 2); - - t.deepEqual(result, [ - { id: 1, name: 'New York' }, - { id: 2, name: 'London' }, - ]); - - t.throws(() => { - db - .select() - .from(citiesTable).except( - ({ unionAll }) => - unionAll( - db - .select() - .from(citiesTable).where(gt(citiesTable.id, 1)), - db.select({ name: citiesTable.name, id: citiesTable.id }) - .from(citiesTable).where(eq(citiesTable.id, 2)), - ), - ); - }); -}); - -test.serial('set operations (mixed all) as function with subquery', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const sq = union( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - except( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(gte(users2Table.id, 5)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 7)), - ), - db - .select().from(citiesTable).where(gt(citiesTable.id, 1)), - ).orderBy(asc(sql`id`)).as('sq'); - - const result = await db.select().from(sq).limit(4).offset(1); - - t.assert(result.length === 4); - - t.deepEqual(result, [ - { id: 2, name: 'London' }, - { id: 3, name: 'Tampa' }, - { id: 5, name: 'Ben' }, - { id: 6, name: 'Jill' }, - ]); - - t.throws(() => { - union( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - except( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(gte(users2Table.id, 5)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 7)), - ), - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(citiesTable).where(gt(citiesTable.id, 1)), - ).orderBy(asc(sql`id`)); - }); -}); - -test.serial('aggregate function: count', async (t) => { - const { db } = t.context; - const table = aggregateTable; - await setupAggregateFunctionsTest(db); - - const result1 = await db.select({ value: count() }).from(table); - const result2 = await db.select({ value: count(table.a) }).from(table); - const result3 = await db.select({ value: countDistinct(table.name) }).from(table); - - t.deepEqual(result1[0]?.value, 7); - t.deepEqual(result2[0]?.value, 5); - t.deepEqual(result3[0]?.value, 6); -}); - -test.serial('aggregate function: avg', async (t) => { - const { db } = t.context; - const table = aggregateTable; - await setupAggregateFunctionsTest(db); - - const result1 = await db.select({ value: avg(table.a) }).from(table); - const result2 = await db.select({ value: avg(table.nullOnly) }).from(table); - const result3 = await db.select({ value: avgDistinct(table.b) }).from(table); - - t.deepEqual(result1[0]?.value, '24'); - t.deepEqual(result2[0]?.value, null); - t.deepEqual(result3[0]?.value, '42.5'); -}); - -test.serial('aggregate function: sum', async (t) => { - const { db } = t.context; - const table = aggregateTable; - await setupAggregateFunctionsTest(db); - - const result1 = await db.select({ value: sum(table.b) }).from(table); - const result2 = await db.select({ value: sum(table.nullOnly) }).from(table); - const result3 = await db.select({ value: sumDistinct(table.b) }).from(table); - - t.deepEqual(result1[0]?.value, '200'); - t.deepEqual(result2[0]?.value, null); - t.deepEqual(result3[0]?.value, '170'); -}); - -test.serial('aggregate function: max', async (t) => { - const { db } = t.context; - const table = aggregateTable; - await setupAggregateFunctionsTest(db); - - const result1 = await db.select({ value: max(table.b) }).from(table); - const result2 = await db.select({ value: max(table.nullOnly) }).from(table); - - t.deepEqual(result1[0]?.value, 90); - t.deepEqual(result2[0]?.value, null); -}); - -test.serial('aggregate function: min', async (t) => { - const { db } = t.context; - const table = aggregateTable; - await setupAggregateFunctionsTest(db); - - const result1 = await db.select({ value: min(table.b) }).from(table); - const result2 = await db.select({ value: min(table.nullOnly) }).from(table); - - t.deepEqual(result1[0]?.value, 10); - t.deepEqual(result2[0]?.value, null); -}); - -test.serial('test $onUpdateFn and $onUpdate works as $default', async (t) => { - const { db } = t.context; - - await db.run(sql`drop table if exists ${usersOnUpdate}`); - - await db.run( - sql` - create table ${usersOnUpdate} ( - id integer primary key autoincrement, - name text not null, - update_counter integer default 1 not null, - updated_at integer, - always_null text - ) - `, - ); - - await db.insert(usersOnUpdate).values([ - { name: 'John' }, - { name: 'Jane' }, - { name: 'Jack' }, - { name: 'Jill' }, - ]); - const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); - - const justDates = await db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); - - const response = await db.select({ ...rest }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); - - t.deepEqual(response, [ - { name: 'John', id: 1, updateCounter: 1, alwaysNull: null }, - { name: 'Jane', id: 2, updateCounter: 1, alwaysNull: null }, - { name: 'Jack', id: 3, updateCounter: 1, alwaysNull: null }, - { name: 'Jill', id: 4, updateCounter: 1, alwaysNull: null }, - ]); - const msDelay = 250; - - for (const eachUser of justDates) { - t.assert(eachUser.updatedAt!.valueOf() > Date.now() - msDelay); - } -}); - -test.serial('test $onUpdateFn and $onUpdate works updating', async (t) => { - const { db } = t.context; - - await db.run(sql`drop table if exists ${usersOnUpdate}`); - - await db.run( - sql` - create table ${usersOnUpdate} ( - id integer primary key autoincrement, - name text not null, - update_counter integer default 1, - updated_at integer, - always_null text - ) - `, - ); - - await db.insert(usersOnUpdate).values([ - { name: 'John', alwaysNull: 'this will be null after updating' }, - { name: 'Jane' }, - { name: 'Jack' }, - { name: 'Jill' }, - ]); - const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); - - await db.update(usersOnUpdate).set({ name: 'Angel' }).where(eq(usersOnUpdate.id, 1)); - await db.update(usersOnUpdate).set({ updateCounter: null }).where(eq(usersOnUpdate.id, 2)); - - const justDates = await db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); - - const response = await db.select({ ...rest }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); - - t.deepEqual(response, [ - { name: 'Angel', id: 1, updateCounter: 2, alwaysNull: null }, - { name: 'Jane', id: 2, updateCounter: null, alwaysNull: null }, - { name: 'Jack', id: 3, updateCounter: 1, alwaysNull: null }, - { name: 'Jill', id: 4, updateCounter: 1, alwaysNull: null }, - ]); - const msDelay = 250; - - for (const eachUser of justDates) { - t.assert(eachUser.updatedAt!.valueOf() > Date.now() - msDelay); - } -}); diff --git a/integration-tests/tests/mysql-proxy.test.ts b/integration-tests/tests/mysql-proxy.test.ts deleted file mode 100644 index fdf6f17f0..000000000 --- a/integration-tests/tests/mysql-proxy.test.ts +++ /dev/null @@ -1,2122 +0,0 @@ -import 'dotenv/config'; - -import type { TestFn } from 'ava'; -import anyTest from 'ava'; -import Docker from 'dockerode'; -import { asc, eq, gt, inArray, Name, placeholder, sql } from 'drizzle-orm'; -import { - alias, - boolean, - date, - datetime, - getTableConfig, - getViewConfig, - int, - json, - mysqlEnum, - mysqlTable, - mysqlTableCreator, - mysqlView, - serial, - text, - time, - timestamp, - unique, - uniqueIndex, - uniqueKeyName, - year, -} from 'drizzle-orm/mysql-core'; -import { drizzle as proxyDrizzle } from 'drizzle-orm/mysql-proxy'; -import type { MySqlRemoteDatabase } from 'drizzle-orm/mysql-proxy'; -import { migrate } from 'drizzle-orm/mysql-proxy/migrator'; -import getPort from 'get-port'; -import * as mysql from 'mysql2/promise'; -import { v4 as uuid } from 'uuid'; -import { type Equal, Expect, toLocalDate } from './utils.ts'; - -const ENABLE_LOGGING = false; - -const usersTable = mysqlTable('userstest', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - verified: boolean('verified').notNull().default(false), - jsonb: json('jsonb').$type(), - createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), -}); - -const users2Table = mysqlTable('users2', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: int('city_id').references(() => citiesTable.id), -}); - -const citiesTable = mysqlTable('cities', { - id: serial('id').primaryKey(), - name: text('name').notNull(), -}); - -const datesTable = mysqlTable('datestable', { - date: date('date'), - dateAsString: date('date_as_string', { mode: 'string' }), - time: time('time', { fsp: 1 }), - datetime: datetime('datetime', { fsp: 2 }), - datetimeAsString: datetime('datetime_as_string', { fsp: 2, mode: 'string' }), - timestamp: timestamp('timestamp', { fsp: 3 }), - timestampAsString: timestamp('timestamp_as_string', { fsp: 3, mode: 'string' }), - year: year('year'), -}); - -const coursesTable = mysqlTable('courses', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - categoryId: int('category_id').references(() => courseCategoriesTable.id), -}); - -const courseCategoriesTable = mysqlTable('course_categories', { - id: serial('id').primaryKey(), - name: text('name').notNull(), -}); - -const orders = mysqlTable('orders', { - id: serial('id').primaryKey(), - region: text('region').notNull(), - product: text('product').notNull().$default(() => 'random_string'), - amount: int('amount').notNull(), - quantity: int('quantity').notNull(), -}); - -const usersMigratorTable = mysqlTable('users12', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - email: text('email').notNull(), -}, (table) => { - return { - name: uniqueIndex('').on(table.name).using('btree'), - }; -}); - -// eslint-disable-next-line drizzle/require-entity-kind -class ServerSimulator { - constructor(private db: mysql.Connection) {} - - async query(sql: string, params: any[], method: 'all' | 'execute') { - if (method === 'all') { - try { - const result = await this.db.query({ - sql, - values: params, - rowsAsArray: true, - typeCast: function(field: any, next: any) { - if (field.type === 'TIMESTAMP' || field.type === 'DATETIME' || field.type === 'DATE') { - return field.string(); - } - return next(); - }, - }); - - return { data: result[0] as any }; - } catch (e: any) { - return { error: e }; - } - } else if (method === 'execute') { - try { - const result = await this.db.query({ - sql, - values: params, - typeCast: function(field: any, next: any) { - if (field.type === 'TIMESTAMP' || field.type === 'DATETIME' || field.type === 'DATE') { - return field.string(); - } - return next(); - }, - }); - - return { data: result as any }; - } catch (e: any) { - return { error: e }; - } - } else { - return { error: 'Unknown method value' }; - } - } - - async migrations(queries: string[]) { - await this.db.query('START TRANSACTION'); - try { - for (const query of queries) { - await this.db.query(query); - } - await this.db.query('COMMIT'); - } catch (e) { - await this.db.query('ROLLBACK'); - throw e; - } - - return {}; - } -} - -interface Context { - docker: Docker; - mysqlContainer: Docker.Container; - db: MySqlRemoteDatabase; - client: mysql.Connection; - serverSimulator: ServerSimulator; -} - -const test = anyTest as TestFn; - -async function createDockerDB(ctx: Context): Promise { - const docker = (ctx.docker = new Docker()); - const port = await getPort({ port: 3306 }); - const image = 'mysql:8'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) - ); - - ctx.mysqlContainer = await docker.createContainer({ - Image: image, - Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=drizzle'], - name: `drizzle-integration-tests-${uuid()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '3306/tcp': [{ HostPort: `${port}` }], - }, - }, - }); - - await ctx.mysqlContainer.start(); - - return `mysql://root:mysql@127.0.0.1:${port}/drizzle`; -} - -test.before(async (t) => { - const ctx = t.context; - const connectionString = process.env['MYSQL_CONNECTION_STRING'] ?? await createDockerDB(ctx); - - const sleep = 1000; - let timeLeft = 20000; - let connected = false; - let lastError: unknown | undefined; - do { - try { - ctx.client = await mysql.createConnection(connectionString); - await ctx.client.connect(); - connected = true; - break; - } catch (e) { - lastError = e; - await new Promise((resolve) => setTimeout(resolve, sleep)); - timeLeft -= sleep; - } - } while (timeLeft > 0); - if (!connected) { - console.error('Cannot connect to MySQL'); - await ctx.client?.end().catch(console.error); - await ctx.mysqlContainer?.stop().catch(console.error); - throw lastError; - } - - ctx.serverSimulator = new ServerSimulator(ctx.client); - - ctx.db = proxyDrizzle(async (sql, params, method) => { - try { - const response = await ctx.serverSimulator.query(sql, params, method); - - if (response.error !== undefined) { - throw response.error; - } - - return { rows: response.data }; - } catch (e: any) { - console.error('Error from mysql proxy server:', e.message); - throw e; - } - }, { logger: ENABLE_LOGGING }); -}); - -test.after.always(async (t) => { - const ctx = t.context; - await ctx.client?.end().catch(console.error); - await ctx.mysqlContainer?.stop().catch(console.error); -}); - -test.beforeEach(async (t) => { - try { - const ctx = t.context; - await ctx.db.execute(sql`drop table if exists \`userstest\``); - await ctx.db.execute(sql`drop table if exists \`users2\``); - await ctx.db.execute(sql`drop table if exists \`cities\``); - - await ctx.db.execute( - sql` - create table \`userstest\` ( - \`id\` serial primary key, - \`name\` text not null, - \`verified\` boolean not null default false, - \`jsonb\` json, - \`created_at\` timestamp not null default now() - ) - `, - ); - - await ctx.db.execute( - sql` - create table \`users2\` ( - \`id\` serial primary key, - \`name\` text not null, - \`city_id\` int references \`cities\`(\`id\`) - ) - `, - ); - - await ctx.db.execute( - sql` - create table \`cities\` ( - \`id\` serial primary key, - \`name\` text not null - ) - `, - ); - } catch (error) { - console.log('error', error); - throw error; - } -}); - -test.serial('table configs: unique third param', async (t) => { - const cities1Table = mysqlTable('cities1', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - state: text('state'), - }, (t) => ({ - f: unique('custom_name').on(t.name, t.state), - f1: unique('custom_name1').on(t.name, t.state), - })); - - const tableConfig = getTableConfig(cities1Table); - - t.assert(tableConfig.uniqueConstraints.length === 2); - - t.assert(tableConfig.uniqueConstraints[0]?.name === 'custom_name'); - t.deepEqual(tableConfig.uniqueConstraints[0]?.columns.map((t) => t.name), ['name', 'state']); - - t.assert(tableConfig.uniqueConstraints[1]?.name, 'custom_name1'); - t.deepEqual(tableConfig.uniqueConstraints[1]?.columns.map((t) => t.name), ['name', 'state']); -}); - -test.serial('table configs: unique in column', async (t) => { - const cities1Table = mysqlTable('cities1', { - id: serial('id').primaryKey(), - name: text('name').notNull().unique(), - state: text('state').unique('custom'), - field: text('field').unique('custom_field'), - }); - - const tableConfig = getTableConfig(cities1Table); - - const columnName = tableConfig.columns.find((it) => it.name === 'name'); - t.assert(columnName?.uniqueName === uniqueKeyName(cities1Table, [columnName!.name])); - t.assert(columnName?.isUnique); - - const columnState = tableConfig.columns.find((it) => it.name === 'state'); - t.assert(columnState?.uniqueName === 'custom'); - t.assert(columnState?.isUnique); - - const columnField = tableConfig.columns.find((it) => it.name === 'field'); - t.assert(columnField?.uniqueName === 'custom_field'); - t.assert(columnField?.isUnique); -}); - -test.serial('select all fields', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const result = await db.select().from(usersTable); - - t.assert(result[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - // not timezone based timestamp, thats why it should not work here - // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); - t.deepEqual(result, [{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); -}); - -test.serial('select sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.select({ - name: sql`upper(${usersTable.name})`, - }).from(usersTable); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('select typed sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.select({ - name: sql`upper(${usersTable.name})`, - }).from(usersTable); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('select distinct', async (t) => { - const { db } = t.context; - - const usersDistinctTable = mysqlTable('users_distinct', { - id: int('id').notNull(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${usersDistinctTable}`); - await db.execute(sql`create table ${usersDistinctTable} (id int, name text)`); - - await db.insert(usersDistinctTable).values([ - { id: 1, name: 'John' }, - { id: 1, name: 'John' }, - { id: 2, name: 'John' }, - { id: 1, name: 'Jane' }, - ]); - const users = await db.selectDistinct().from(usersDistinctTable).orderBy( - usersDistinctTable.id, - usersDistinctTable.name, - ); - - await db.execute(sql`drop table ${usersDistinctTable}`); - - t.deepEqual(users, [{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); -}); - -test.serial('insert returning sql', async (t) => { - const { db } = t.context; - - const [result, _] = await db.insert(usersTable).values({ name: 'John' }); - - t.deepEqual(result.insertId, 1); -}); - -test.serial('delete returning sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')); - - t.is(users[0].affectedRows, 1); -}); - -test.serial('update returning sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); - - t.is(users[0].changedRows, 1); -}); - -test.serial('update with returning all fields', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); - - const users = await db.select().from(usersTable).where(eq(usersTable.id, 1)); - - t.is(updatedUsers[0].changedRows, 1); - - t.assert(users[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - // not timezone based timestamp, thats why it should not work here - // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 2000); - t.deepEqual(users, [{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); -}); - -test.serial('update with returning partial', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); - - const users = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( - eq(usersTable.id, 1), - ); - - t.deepEqual(updatedUsers[0].changedRows, 1); - - t.deepEqual(users, [{ id: 1, name: 'Jane' }]); -}); - -test.serial('delete with returning all fields', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); - - t.is(deletedUser[0].affectedRows, 1); -}); - -test.serial('delete with returning partial', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); - - t.is(deletedUser[0].affectedRows, 1); -}); - -test.serial('insert + select', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const result = await db.select().from(usersTable); - t.deepEqual(result, [{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); - - await db.insert(usersTable).values({ name: 'Jane' }); - const result2 = await db.select().from(usersTable); - t.deepEqual(result2, [ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, - { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, - ]); -}); - -test.serial('json insert', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); - const result = await db.select({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - }).from(usersTable); - - t.deepEqual(result, [{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); -}); - -test.serial('insert with overridden default values', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John', verified: true }); - const result = await db.select().from(usersTable); - - t.deepEqual(result, [{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); -}); - -test.serial('insert many', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([ - { name: 'John' }, - { name: 'Bruce', jsonb: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]); - const result = await db.select({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - verified: usersTable.verified, - }).from(usersTable); - - t.deepEqual(result, [ - { id: 1, name: 'John', jsonb: null, verified: false }, - { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, - { id: 3, name: 'Jane', jsonb: null, verified: false }, - { id: 4, name: 'Austin', jsonb: null, verified: true }, - ]); -}); - -test.serial('insert many with returning', async (t) => { - const { db } = t.context; - - const result = await db.insert(usersTable).values([ - { name: 'John' }, - { name: 'Bruce', jsonb: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]); - - t.is(result[0].affectedRows, 4); -}); - -test.serial('select with group by as field', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.name); - - t.deepEqual(result, [{ name: 'John' }, { name: 'Jane' }]); -}); - -test.serial('select with group by as sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(sql`${usersTable.name}`); - - t.deepEqual(result, [{ name: 'John' }, { name: 'Jane' }]); -}); - -test.serial('$default function', async (t) => { - const { db } = t.context; - - await db.execute(sql`drop table if exists \`orders\``); - await db.execute( - sql` - create table \`orders\` ( - \`id\` serial primary key, - \`region\` text not null, - \`product\` text not null, - \`amount\` int not null, - \`quantity\` int not null - ) - `, - ); - - await db.insert(orders).values({ id: 1, region: 'Ukraine', amount: 1, quantity: 1 }); - const selectedOrder = await db.select().from(orders); - - t.deepEqual(selectedOrder, [{ - id: 1, - amount: 1, - quantity: 1, - region: 'Ukraine', - product: 'random_string', - }]); -}); - -test.serial('$default with empty array', async (t) => { - const { db } = t.context; - - await db.execute(sql`drop table if exists \`s_orders\``); - await db.execute( - sql` - create table \`s_orders\` ( - \`id\` serial primary key, - \`region\` text default ('Ukraine'), - \`product\` text not null - ) - `, - ); - - const users = mysqlTable('s_orders', { - id: serial('id').primaryKey(), - region: text('region').default('Ukraine'), - product: text('product').$defaultFn(() => 'random_string'), - }); - - await db.insert(users).values({}); - const selectedOrder = await db.select().from(users); - - t.deepEqual(selectedOrder, [{ - id: 1, - region: 'Ukraine', - product: 'random_string', - }]); -}); - -test.serial('select with group by as sql + column', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(sql`${usersTable.name}`, usersTable.id); - - t.deepEqual(result, [{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); -}); - -test.serial('select with group by as column + sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`); - - t.deepEqual(result, [{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); -}); - -test.serial('select with group by complex query', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`) - .orderBy(asc(usersTable.name)) - .limit(1); - - t.deepEqual(result, [{ name: 'Jane' }]); -}); - -test.serial('build query', async (t) => { - const { db } = t.context; - - const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, usersTable.name) - .toSQL(); - - t.deepEqual(query, { - sql: `select \`id\`, \`name\` from \`userstest\` group by \`userstest\`.\`id\`, \`userstest\`.\`name\``, - params: [], - }); -}); - -test.serial('Query check: Insert all defaults in 1 row', async (t) => { - const { db } = t.context; - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state'), - }); - - const query = db - .insert(users) - .values({}) - .toSQL(); - - t.deepEqual(query, { - sql: 'insert into `users` (`id`, `name`, `state`) values (default, default, default)', - params: [], - }); -}); - -test.serial('Query check: Insert all defaults in multiple rows', async (t) => { - const { db } = t.context; - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state').default('UA'), - }); - - const query = db - .insert(users) - .values([{}, {}]) - .toSQL(); - - t.deepEqual(query, { - sql: 'insert into `users` (`id`, `name`, `state`) values (default, default, default), (default, default, default)', - params: [], - }); -}); - -test.serial('Insert all defaults in 1 row', async (t) => { - const { db } = t.context; - - const users = mysqlTable('empty_insert_single', { - id: serial('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial primary key, name text default ('Dan'), state text)`, - ); - - await db.insert(users).values({}); - - const res = await db.select().from(users); - - t.deepEqual(res, [{ id: 1, name: 'Dan', state: null }]); -}); - -test.serial('Insert all defaults in multiple rows', async (t) => { - const { db } = t.context; - - const users = mysqlTable('empty_insert_multiple', { - id: serial('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial primary key, name text default ('Dan'), state text)`, - ); - - await db.insert(users).values([{}, {}]); - - const res = await db.select().from(users); - - t.deepEqual(res, [{ id: 1, name: 'Dan', state: null }, { id: 2, name: 'Dan', state: null }]); -}); - -test.serial('build query insert with onDuplicate', async (t) => { - const { db } = t.context; - - const query = db.insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onDuplicateKeyUpdate({ set: { name: 'John1' } }) - .toSQL(); - - t.deepEqual(query, { - sql: - 'insert into `userstest` (`id`, `name`, `verified`, `jsonb`, `created_at`) values (default, ?, default, ?, default) on duplicate key update `name` = ?', - params: ['John', '["foo","bar"]', 'John1'], - }); -}); - -test.serial('insert with onDuplicate', async (t) => { - const { db } = t.context; - - await db.insert(usersTable) - .values({ name: 'John' }); - - await db.insert(usersTable) - .values({ id: 1, name: 'John' }) - .onDuplicateKeyUpdate({ set: { name: 'John1' } }); - - const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( - eq(usersTable.id, 1), - ); - - t.deepEqual(res, [{ id: 1, name: 'John1' }]); -}); - -test.serial('insert conflict', async (t) => { - const { db } = t.context; - - await db.insert(usersTable) - .values({ name: 'John' }); - - await t.throwsAsync( - () => db.insert(usersTable).values({ id: 1, name: 'John1' }), - { - code: 'ER_DUP_ENTRY', - message: "Duplicate entry '1' for key 'userstest.PRIMARY'", - }, - ); -}); - -test.serial('insert conflict with ignore', async (t) => { - const { db } = t.context; - - await db.insert(usersTable) - .values({ name: 'John' }); - - await db.insert(usersTable) - .ignore() - .values({ id: 1, name: 'John1' }); - - const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( - eq(usersTable.id, 1), - ); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: sql`${'John'}` }); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('partial join with alias', async (t) => { - const { db } = t.context; - const customerAlias = alias(usersTable, 'customer'); - - await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select({ - user: { - id: usersTable.id, - name: usersTable.name, - }, - customer: { - id: customerAlias.id, - name: customerAlias.name, - }, - }).from(usersTable) - .leftJoin(customerAlias, eq(customerAlias.id, 11)) - .where(eq(usersTable.id, 10)); - - t.deepEqual(result, [{ - user: { id: 10, name: 'Ivan' }, - customer: { id: 11, name: 'Hans' }, - }]); -}); - -test.serial('full join with alias', async (t) => { - const { db } = t.context; - - const mysqlTable = mysqlTableCreator((name) => `prefixed_${name}`); - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); - - const customers = alias(users, 'customer'); - - await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select().from(users) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(users.id, 10)); - - t.deepEqual(result, [{ - users: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('select from alias', async (t) => { - const { db } = t.context; - - const mysqlTable = mysqlTableCreator((name) => `prefixed_${name}`); - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); - - const user = alias(users, 'user'); - const customers = alias(users, 'customer'); - - await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select() - .from(user) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(user.id, 10)); - - t.deepEqual(result, [{ - user: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('insert with spaces', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: sql`'Jo h n'` }); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); - - t.deepEqual(result, [{ id: 1, name: 'Jo h n' }]); -}); - -test.serial('prepared statement', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const statement = db.select({ - id: usersTable.id, - name: usersTable.name, - }).from(usersTable) - .prepare(); - const result = await statement.execute(); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('prepared statement reuse', async (t) => { - const { db } = t.context; - - const stmt = db.insert(usersTable).values({ - verified: true, - name: placeholder('name'), - }).prepare(); - - for (let i = 0; i < 10; i++) { - await stmt.execute({ name: `John ${i}` }); - } - - const result = await db.select({ - id: usersTable.id, - name: usersTable.name, - verified: usersTable.verified, - }).from(usersTable); - - t.deepEqual(result, [ - { id: 1, name: 'John 0', verified: true }, - { id: 2, name: 'John 1', verified: true }, - { id: 3, name: 'John 2', verified: true }, - { id: 4, name: 'John 3', verified: true }, - { id: 5, name: 'John 4', verified: true }, - { id: 6, name: 'John 5', verified: true }, - { id: 7, name: 'John 6', verified: true }, - { id: 8, name: 'John 7', verified: true }, - { id: 9, name: 'John 8', verified: true }, - { id: 10, name: 'John 9', verified: true }, - ]); -}); - -test.serial('prepared statement with placeholder in .where', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const stmt = db.select({ - id: usersTable.id, - name: usersTable.name, - }).from(usersTable) - .where(eq(usersTable.id, placeholder('id'))) - .prepare(); - const result = await stmt.execute({ id: 1 }); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('migrator', async (t) => { - const { db, serverSimulator } = t.context; - - await db.execute(sql`drop table if exists userstest`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists __drizzle_migrations`); - - await migrate(db, async (queries) => { - try { - await serverSimulator.migrations(queries); - } catch (e) { - console.error(e); - throw new Error('Proxy server cannot run migrations'); - } - }, { migrationsFolder: './drizzle2/mysql-proxy/first' }); - - await t.notThrowsAsync(async () => { - await db.insert(usersTable).values({ name: 'John' }); - }); - - await t.throwsAsync(async () => { - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - }, { - message: "Table 'drizzle.users12' doesn't exist", - }); - - await migrate(db, async (queries) => { - try { - await serverSimulator.migrations(queries); - } catch (e) { - console.error(e); - throw new Error('Proxy server cannot run migrations'); - } - }, { migrationsFolder: './drizzle2/mysql-proxy/second' }); - - await t.notThrowsAsync(async () => { - await db.insert(usersTable).values({ name: 'John' }); - }); - - await t.notThrowsAsync(async () => { - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - }); - - await db.execute(sql`drop table userstest`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table __drizzle_migrations`); -}); - -test.serial('insert via db.execute + select via db.execute', async (t) => { - const { db } = t.context; - - await db.execute(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); - - const result = await db.execute<{ id: number; name: string }>(sql`select id, name from ${usersTable}`); - t.deepEqual(result[0], [{ id: 1, name: 'John' }]); -}); - -test.serial('insert via db.execute w/ query builder', async (t) => { - const { db } = t.context; - - const inserted = await db.execute( - db.insert(usersTable).values({ name: 'John' }), - ); - t.is(inserted[0].affectedRows, 1); -}); - -test.serial('insert + select all possible dates', async (t) => { - const { db } = t.context; - - await db.execute(sql`drop table if exists \`datestable\``); - await db.execute( - sql` - create table \`datestable\` ( - \`date\` date, - \`date_as_string\` date, - \`time\` time, - \`datetime\` datetime, - \`datetime_as_string\` datetime, - \`timestamp\` timestamp(3), - \`timestamp_as_string\` timestamp(3), - \`year\` year - ) - `, - ); - - const date = new Date('2022-11-11'); - const dateWithMilliseconds = new Date('2022-11-11 12:12:12.123'); - - await db.insert(datesTable).values({ - date: date, - dateAsString: '2022-11-11', - time: '12:12:12', - datetime: date, - year: 22, - datetimeAsString: '2022-11-11 12:12:12', - timestamp: dateWithMilliseconds, - timestampAsString: '2022-11-11 12:12:12.123', - }); - - const res = await db.select().from(datesTable); - - t.assert(res[0]?.date instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(res[0]?.datetime instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(typeof res[0]?.dateAsString === 'string'); - t.assert(typeof res[0]?.datetimeAsString === 'string'); - - t.deepEqual(res, [{ - date: toLocalDate(new Date('2022-11-11')), - dateAsString: '2022-11-11', - time: '12:12:12', - datetime: new Date('2022-11-11'), - year: 2022, - datetimeAsString: '2022-11-11 12:12:12', - timestamp: new Date('2022-11-11 12:12:12.123'), - timestampAsString: '2022-11-11 12:12:12.123', - }]); - - await db.execute(sql`drop table if exists \`datestable\``); -}); - -const tableWithEnums = mysqlTable('enums_test_case', { - id: serial('id').primaryKey(), - enum1: mysqlEnum('enum1', ['a', 'b', 'c']).notNull(), - enum2: mysqlEnum('enum2', ['a', 'b', 'c']).default('a'), - enum3: mysqlEnum('enum3', ['a', 'b', 'c']).notNull().default('b'), -}); - -test.serial('Mysql enum test case #1', async (t) => { - const { db } = t.context; - - await db.execute(sql`drop table if exists \`enums_test_case\``); - - await db.execute(sql` - create table \`enums_test_case\` ( - \`id\` serial primary key, - \`enum1\` ENUM('a', 'b', 'c') not null, - \`enum2\` ENUM('a', 'b', 'c') default 'a', - \`enum3\` ENUM('a', 'b', 'c') not null default 'b' - ) - `); - - await db.insert(tableWithEnums).values([ - { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, - { id: 2, enum1: 'a', enum3: 'c' }, - { id: 3, enum1: 'a' }, - ]); - - const res = await db.select().from(tableWithEnums); - - await db.execute(sql`drop table \`enums_test_case\``); - - t.deepEqual(res, [ - { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, - { id: 2, enum1: 'a', enum2: 'a', enum3: 'c' }, - { id: 3, enum1: 'a', enum2: 'a', enum3: 'b' }, - ]); -}); - -test.serial('left join (flat object fields)', async (t) => { - const { db } = t.context; - - await db.insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]); - - await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); - - const res = await db.select({ - userId: users2Table.id, - userName: users2Table.name, - cityId: citiesTable.id, - cityName: citiesTable.name, - }).from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); - - t.deepEqual(res, [ - { userId: 1, userName: 'John', cityId: 1, cityName: 'Paris' }, - { userId: 2, userName: 'Jane', cityId: null, cityName: null }, - ]); -}); - -test.serial('left join (grouped fields)', async (t) => { - const { db } = t.context; - - await db.insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]); - - await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); - - const res = await db.select({ - id: users2Table.id, - user: { - name: users2Table.name, - nameUpper: sql`upper(${users2Table.name})`, - }, - city: { - id: citiesTable.id, - name: citiesTable.name, - nameUpper: sql`upper(${citiesTable.name})`, - }, - }).from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); - - t.deepEqual(res, [ - { - id: 1, - user: { name: 'John', nameUpper: 'JOHN' }, - city: { id: 1, name: 'Paris', nameUpper: 'PARIS' }, - }, - { - id: 2, - user: { name: 'Jane', nameUpper: 'JANE' }, - city: null, - }, - ]); -}); - -test.serial('left join (all fields)', async (t) => { - const { db } = t.context; - - await db.insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]); - - await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); - - const res = await db.select().from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); - - t.deepEqual(res, [ - { - users2: { - id: 1, - name: 'John', - cityId: 1, - }, - cities: { - id: 1, - name: 'Paris', - }, - }, - { - users2: { - id: 2, - name: 'Jane', - cityId: null, - }, - cities: null, - }, - ]); -}); - -test.serial('join subquery', async (t) => { - const { db } = t.context; - - await db.execute(sql`drop table if exists \`courses\``); - await db.execute(sql`drop table if exists \`course_categories\``); - - await db.execute( - sql` - create table \`course_categories\` ( - \`id\` serial primary key, - \`name\` text not null - ) - `, - ); - - await db.execute( - sql` - create table \`courses\` ( - \`id\` serial primary key, - \`name\` text not null, - \`category_id\` int references \`course_categories\`(\`id\`) - ) - `, - ); - - await db.insert(courseCategoriesTable).values([ - { name: 'Category 1' }, - { name: 'Category 2' }, - { name: 'Category 3' }, - { name: 'Category 4' }, - ]); - - await db.insert(coursesTable).values([ - { name: 'Development', categoryId: 2 }, - { name: 'IT & Software', categoryId: 3 }, - { name: 'Marketing', categoryId: 4 }, - { name: 'Design', categoryId: 1 }, - ]); - - const sq2 = db - .select({ - categoryId: courseCategoriesTable.id, - category: courseCategoriesTable.name, - total: sql`count(${courseCategoriesTable.id})`, - }) - .from(courseCategoriesTable) - .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) - .as('sq2'); - - const res = await db - .select({ - courseName: coursesTable.name, - categoryId: sq2.categoryId, - }) - .from(coursesTable) - .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) - .orderBy(coursesTable.name); - - t.deepEqual(res, [ - { courseName: 'Design', categoryId: 1 }, - { courseName: 'Development', categoryId: 2 }, - { courseName: 'IT & Software', categoryId: 3 }, - { courseName: 'Marketing', categoryId: 4 }, - ]); - - await db.execute(sql`drop table if exists \`courses\``); - await db.execute(sql`drop table if exists \`course_categories\``); -}); - -test.serial('with ... select', async (t) => { - const { db } = t.context; - - await db.execute(sql`drop table if exists \`orders\``); - await db.execute( - sql` - create table \`orders\` ( - \`id\` serial primary key, - \`region\` text not null, - \`product\` text not null, - \`amount\` int not null, - \`quantity\` int not null - ) - `, - ); - - await db.insert(orders).values([ - { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, - { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 50, quantity: 5 }, - ]); - - const regionalSales = db - .$with('regional_sales') - .as( - db - .select({ - region: orders.region, - totalSales: sql`sum(${orders.amount})`.as('total_sales'), - }) - .from(orders) - .groupBy(orders.region), - ); - - const topRegions = db - .$with('top_regions') - .as( - db - .select({ - region: regionalSales.region, - }) - .from(regionalSales) - .where( - gt( - regionalSales.totalSales, - db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), - ), - ), - ); - - const result = await db - .with(regionalSales, topRegions) - .select({ - region: orders.region, - product: orders.product, - productUnits: sql`cast(sum(${orders.quantity}) as unsigned)`, - productSales: sql`cast(sum(${orders.amount}) as unsigned)`, - }) - .from(orders) - .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) - .groupBy(orders.region, orders.product) - .orderBy(orders.region, orders.product); - - t.deepEqual(result, [ - { - region: 'Europe', - product: 'A', - productUnits: 3, - productSales: 30, - }, - { - region: 'Europe', - product: 'B', - productUnits: 5, - productSales: 50, - }, - { - region: 'US', - product: 'A', - productUnits: 7, - productSales: 70, - }, - { - region: 'US', - product: 'B', - productUnits: 9, - productSales: 90, - }, - ]); -}); - -test.serial('select from subquery sql', async (t) => { - const { db } = t.context; - - await db.insert(users2Table).values([{ name: 'John' }, { name: 'Jane' }]); - - const sq = db - .select({ name: sql`concat(${users2Table.name}, " modified")`.as('name') }) - .from(users2Table) - .as('sq'); - - const res = await db.select({ name: sq.name }).from(sq); - - t.deepEqual(res, [{ name: 'John modified' }, { name: 'Jane modified' }]); -}); - -test.serial('select a field without joining its table', (t) => { - const { db } = t.context; - - t.throws(() => db.select({ name: users2Table.name }).from(usersTable).prepare()); -}); - -test.serial('select all fields from subquery without alias', (t) => { - const { db } = t.context; - - const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); - - t.throws(() => db.select().from(sq).prepare()); -}); - -test.serial('select count()', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); - - const res = await db.select({ count: sql`count(*)` }).from(usersTable); - - t.deepEqual(res, [{ count: 2 }]); -}); - -test.serial('select for ...', (t) => { - const { db } = t.context; - - { - const query = db.select().from(users2Table).for('update').toSQL(); - t.regex(query.sql, / for update$/); - } - { - const query = db.select().from(users2Table).for('share', { skipLocked: true }).toSQL(); - t.regex(query.sql, / for share skip locked$/); - } - { - const query = db.select().from(users2Table).for('update', { noWait: true }).toSQL(); - t.regex(query.sql, / for update no wait$/); - } -}); - -test.serial('having', async (t) => { - const { db } = t.context; - - await db.insert(citiesTable).values([{ name: 'London' }, { name: 'Paris' }, { name: 'New York' }]); - - await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { - name: 'Jack', - cityId: 2, - }]); - - const result = await db - .select({ - id: citiesTable.id, - name: sql`upper(${citiesTable.name})`.as('upper_name'), - usersCount: sql`count(${users2Table.id})`.as('users_count'), - }) - .from(citiesTable) - .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) - .where(({ name }) => sql`length(${name}) >= 3`) - .groupBy(citiesTable.id) - .having(({ usersCount }) => sql`${usersCount} > 0`) - .orderBy(({ name }) => name); - - t.deepEqual(result, [ - { - id: 1, - name: 'LONDON', - usersCount: 2, - }, - { - id: 2, - name: 'PARIS', - usersCount: 1, - }, - ]); -}); - -test.serial('view', async (t) => { - const { db } = t.context; - - const newYorkers1 = mysqlView('new_yorkers') - .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); - - const newYorkers2 = mysqlView('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: int('city_id').notNull(), - }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); - - const newYorkers3 = mysqlView('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: int('city_id').notNull(), - }).existing(); - - await db.execute(sql`create view new_yorkers as ${getViewConfig(newYorkers1).query}`); - - await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); - - await db.insert(users2Table).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 1 }, - { name: 'Jack', cityId: 2 }, - ]); - - { - const result = await db.select().from(newYorkers1); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers2); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers3); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); - t.deepEqual(result, [ - { name: 'John' }, - { name: 'Jane' }, - ]); - } - - await db.execute(sql`drop view ${newYorkers1}`); -}); - -test.serial('select from raw sql', async (t) => { - const { db } = t.context; - - const result = await db.select({ - id: sql`id`, - name: sql`name`, - }).from(sql`(select 1 as id, 'John' as name) as users`); - - Expect>; - - t.deepEqual(result, [ - { id: 1, name: 'John' }, - ]); -}); - -test.serial('select from raw sql with joins', async (t) => { - const { db } = t.context; - - const result = await db - .select({ - id: sql`users.id`, - name: sql`users.name`, - userCity: sql`users.city`, - cityName: sql`cities.name`, - }) - .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) - .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`); - - Expect>; - - t.deepEqual(result, [ - { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, - ]); -}); - -test.serial('join on aliased sql from select', async (t) => { - const { db } = t.context; - - const result = await db - .select({ - userId: sql`users.id`.as('userId'), - name: sql`users.name`, - userCity: sql`users.city`, - cityId: sql`cities.id`.as('cityId'), - cityName: sql`cities.name`, - }) - .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) - .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)); - - Expect>; - - t.deepEqual(result, [ - { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, - ]); -}); - -test.serial('join on aliased sql from with clause', async (t) => { - const { db } = t.context; - - const users = db.$with('users').as( - db.select({ - id: sql`id`.as('userId'), - name: sql`name`.as('userName'), - city: sql`city`.as('city'), - }).from( - sql`(select 1 as id, 'John' as name, 'New York' as city) as users`, - ), - ); - - const cities = db.$with('cities').as( - db.select({ - id: sql`id`.as('cityId'), - name: sql`name`.as('cityName'), - }).from( - sql`(select 1 as id, 'Paris' as name) as cities`, - ), - ); - - const result = await db - .with(users, cities) - .select({ - userId: users.id, - name: users.name, - userCity: users.city, - cityId: cities.id, - cityName: cities.name, - }) - .from(users) - .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)); - - Expect>; - - t.deepEqual(result, [ - { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, - ]); -}); - -test.serial('prefixed table', async (t) => { - const { db } = t.context; - - const mysqlTable = mysqlTableCreator((name) => `myprefix_${name}`); - - const users = mysqlTable('test_prefixed_table_with_unique_name', { - id: int('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table myprefix_test_prefixed_table_with_unique_name (id int not null primary key, name text not null)`, - ); - - await db.insert(users).values({ id: 1, name: 'John' }); - - const result = await db.select().from(users); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('orderBy with aliased column', (t) => { - const { db } = t.context; - - const query = db.select({ - test: sql`something`.as('test'), - }).from(users2Table).orderBy((fields) => fields.test).toSQL(); - - t.deepEqual(query.sql, 'select something as `test` from `users2` order by `test`'); -}); - -test.serial('timestamp timezone', async (t) => { - const { db } = t.context; - - const date = new Date(Date.parse('2020-01-01T12:34:56+07:00')); - - await db.insert(usersTable).values({ name: 'With default times' }); - await db.insert(usersTable).values({ - name: 'Without default times', - createdAt: date, - }); - const users = await db.select().from(usersTable); - - // check that the timestamps are set correctly for default times - t.assert(Math.abs(users[0]!.createdAt.getTime() - Date.now()) < 2000); - - // check that the timestamps are set correctly for non default times - t.assert(Math.abs(users[1]!.createdAt.getTime() - date.getTime()) < 2000); -}); - -// TODO: implement transactions -// test.serial('transaction', async (t) => { -// const { db } = t.context; - -// const users = mysqlTable('users_transactions', { -// id: serial('id').primaryKey(), -// balance: int('balance').notNull(), -// }); -// const products = mysqlTable('products_transactions', { -// id: serial('id').primaryKey(), -// price: int('price').notNull(), -// stock: int('stock').notNull(), -// }); - -// await db.execute(sql`drop table if exists ${users}`); -// await db.execute(sql`drop table if exists ${products}`); - -// await db.execute(sql`create table users_transactions (id serial not null primary key, balance int not null)`); -// await db.execute( -// sql`create table products_transactions (id serial not null primary key, price int not null, stock int not null)`, -// ); - -// const [{ insertId: userId }] = await db.insert(users).values({ balance: 100 }); -// const user = await db.select().from(users).where(eq(users.id, userId)).then((rows) => rows[0]!); -// const [{ insertId: productId }] = await db.insert(products).values({ price: 10, stock: 10 }); -// const product = await db.select().from(products).where(eq(products.id, productId)).then((rows) => rows[0]!); - -// await db.transaction(async (tx) => { -// await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)); -// await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)); -// }); - -// const result = await db.select().from(users); - -// t.deepEqual(result, [{ id: 1, balance: 90 }]); - -// await db.execute(sql`drop table ${users}`); -// await db.execute(sql`drop table ${products}`); -// }); - -// TODO: implement transactions -// test.serial('transaction rollback', async (t) => { -// const { db } = t.context; - -// const users = mysqlTable('users_transactions_rollback', { -// id: serial('id').primaryKey(), -// balance: int('balance').notNull(), -// }); - -// await db.execute(sql`drop table if exists ${users}`); - -// await db.execute( -// sql`create table users_transactions_rollback (id serial not null primary key, balance int not null)`, -// ); - -// await t.throwsAsync(async () => -// await db.transaction(async (tx) => { -// await tx.insert(users).values({ balance: 100 }); -// tx.rollback(); -// }), new TransactionRollbackError()); - -// const result = await db.select().from(users); - -// t.deepEqual(result, []); - -// await db.execute(sql`drop table ${users}`); -// }); - -// TODO: implement transactions -// test.serial('nested transaction', async (t) => { -// const { db } = t.context; - -// const users = mysqlTable('users_nested_transactions', { -// id: serial('id').primaryKey(), -// balance: int('balance').notNull(), -// }); - -// await db.execute(sql`drop table if exists ${users}`); - -// await db.execute( -// sql`create table users_nested_transactions (id serial not null primary key, balance int not null)`, -// ); - -// await db.transaction(async (tx) => { -// await tx.insert(users).values({ balance: 100 }); - -// await tx.transaction(async (tx) => { -// await tx.update(users).set({ balance: 200 }); -// }); -// }); - -// const result = await db.select().from(users); - -// t.deepEqual(result, [{ id: 1, balance: 200 }]); - -// await db.execute(sql`drop table ${users}`); -// }); - -// TODO: implement transactions -// test.serial('nested transaction rollback', async (t) => { -// const { db } = t.context; - -// const users = mysqlTable('users_nested_transactions_rollback', { -// id: serial('id').primaryKey(), -// balance: int('balance').notNull(), -// }); - -// await db.execute(sql`drop table if exists ${users}`); - -// await db.execute( -// sql`create table users_nested_transactions_rollback (id serial not null primary key, balance int not null)`, -// ); - -// await db.transaction(async (tx) => { -// await tx.insert(users).values({ balance: 100 }); - -// await t.throwsAsync(async () => -// await tx.transaction(async (tx) => { -// await tx.update(users).set({ balance: 200 }); -// tx.rollback(); -// }), new TransactionRollbackError()); -// }); - -// const result = await db.select().from(users); - -// t.deepEqual(result, [{ id: 1, balance: 100 }]); - -// await db.execute(sql`drop table ${users}`); -// }); - -test.serial('join subquery with join', async (t) => { - const { db } = t.context; - - const internalStaff = mysqlTable('internal_staff', { - userId: int('user_id').notNull(), - }); - - const customUser = mysqlTable('custom_user', { - id: int('id').notNull(), - }); - - const ticket = mysqlTable('ticket', { - staffId: int('staff_id').notNull(), - }); - - await db.execute(sql`drop table if exists ${internalStaff}`); - await db.execute(sql`drop table if exists ${customUser}`); - await db.execute(sql`drop table if exists ${ticket}`); - - await db.execute(sql`create table internal_staff (user_id integer not null)`); - await db.execute(sql`create table custom_user (id integer not null)`); - await db.execute(sql`create table ticket (staff_id integer not null)`); - - await db.insert(internalStaff).values({ userId: 1 }); - await db.insert(customUser).values({ id: 1 }); - await db.insert(ticket).values({ staffId: 1 }); - - const subq = db - .select() - .from(internalStaff) - .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) - .as('internal_staff'); - - const mainQuery = await db - .select() - .from(ticket) - .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); - - t.deepEqual(mainQuery, [{ - ticket: { staffId: 1 }, - internal_staff: { - internal_staff: { userId: 1 }, - custom_user: { id: 1 }, - }, - }]); - - await db.execute(sql`drop table ${internalStaff}`); - await db.execute(sql`drop table ${customUser}`); - await db.execute(sql`drop table ${ticket}`); -}); - -test.serial('subquery with view', async (t) => { - const { db } = t.context; - - const users = mysqlTable('users_subquery_view', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: int('city_id').notNull(), - }); - - const newYorkers = mysqlView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`drop view if exists ${newYorkers}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, - ); - await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); - - await db.insert(users).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 2 }, - { name: 'Jack', cityId: 1 }, - { name: 'Jill', cityId: 2 }, - ]); - - const sq = db.$with('sq').as(db.select().from(newYorkers)); - const result = await db.with(sq).select().from(sq); - - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 3, name: 'Jack', cityId: 1 }, - ]); - - await db.execute(sql`drop view ${newYorkers}`); - await db.execute(sql`drop table ${users}`); -}); - -test.serial('join view as subquery', async (t) => { - const { db } = t.context; - - const users = mysqlTable('users_join_view', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: int('city_id').notNull(), - }); - - const newYorkers = mysqlView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`drop view if exists ${newYorkers}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, - ); - await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); - - await db.insert(users).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 2 }, - { name: 'Jack', cityId: 1 }, - { name: 'Jill', cityId: 2 }, - ]); - - const sq = db.select().from(newYorkers).as('new_yorkers_sq'); - - const result = await db.select().from(users).leftJoin(sq, eq(users.id, sq.id)); - - t.deepEqual(result, [ - { - users_join_view: { id: 1, name: 'John', cityId: 1 }, - new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, - }, - { - users_join_view: { id: 2, name: 'Jane', cityId: 2 }, - new_yorkers_sq: null, - }, - { - users_join_view: { id: 3, name: 'Jack', cityId: 1 }, - new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, - }, - { - users_join_view: { id: 4, name: 'Jill', cityId: 2 }, - new_yorkers_sq: null, - }, - ]); - - await db.execute(sql`drop view ${newYorkers}`); - await db.execute(sql`drop table ${users}`); -}); - -// TODO: implement iterator -// test.serial('select iterator', async (t) => { -// const { db } = t.context; - -// const users = mysqlTable('users_iterator', { -// id: serial('id').primaryKey(), -// }); - -// await db.execute(sql`drop table if exists ${users}`); -// await db.execute(sql`create table ${users} (id serial not null primary key)`); - -// await db.insert(users).values([{}, {}, {}]); - -// const iter = db.select().from(users).iterator(); -// const result: InferModel[] = []; - -// for await (const row of iter) { -// result.push(row); -// } - -// t.deepEqual(result, [{ id: 1 }, { id: 2 }, { id: 3 }]); -// }); - -// TODO: implement iterator -// test.serial('select iterator w/ prepared statement', async (t) => { -// const { db } = t.context; - -// const users = mysqlTable('users_iterator', { -// id: serial('id').primaryKey(), -// }); - -// await db.execute(sql`drop table if exists ${users}`); -// await db.execute(sql`create table ${users} (id serial not null primary key)`); - -// await db.insert(users).values([{}, {}, {}]); - -// const prepared = db.select().from(users).prepare(); -// const iter = prepared.iterator(); -// const result: InferModel[] = []; - -// for await (const row of iter) { -// result.push(row); -// } - -// t.deepEqual(result, [{ id: 1 }, { id: 2 }, { id: 3 }]); -// }); - -test.serial('insert undefined', async (t) => { - const { db } = t.context; - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: text('name'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text)`, - ); - - await t.notThrowsAsync(async () => await db.insert(users).values({ name: undefined })); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('update undefined', async (t) => { - const { db } = t.context; - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: text('name'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text)`, - ); - - await t.throwsAsync(async () => await db.update(users).set({ name: undefined })); - await t.notThrowsAsync(async () => await db.update(users).set({ id: 1, name: undefined })); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('utc config for datetime', async (t) => { - const { db } = t.context; - - await db.execute(sql`drop table if exists \`datestable\``); - await db.execute( - sql` - create table \`datestable\` ( - \`datetime_utc\` datetime(3), - \`datetime\` datetime(3), - \`datetime_as_string\` datetime - ) - `, - ); - const datesTable = mysqlTable('datestable', { - datetimeUTC: datetime('datetime_utc', { fsp: 3, mode: 'date' }), - datetime: datetime('datetime', { fsp: 3 }), - datetimeAsString: datetime('datetime_as_string', { mode: 'string' }), - }); - - const dateObj = new Date('2022-11-11'); - const dateUtc = new Date('2022-11-11T12:12:12.122Z'); - - await db.insert(datesTable).values({ - datetimeUTC: dateUtc, - datetime: dateObj, - datetimeAsString: '2022-11-11 12:12:12', - }); - - const res = await db.select().from(datesTable); - - const [rawSelect] = await db.execute(sql`select \`datetime_utc\` from \`datestable\``); - const selectedRow = (rawSelect as unknown as [{ datetime_utc: string }])[0]; - - t.is(selectedRow.datetime_utc, '2022-11-11 12:12:12.122'); - t.deepEqual(new Date(selectedRow.datetime_utc.replace(' ', 'T') + 'Z'), dateUtc); - - t.assert(res[0]?.datetime instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(res[0]?.datetimeUTC instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(typeof res[0]?.datetimeAsString === 'string'); - - t.deepEqual(res, [{ - datetimeUTC: dateUtc, - datetime: new Date('2022-11-11'), - datetimeAsString: '2022-11-11 12:12:12', - }]); - - await db.execute(sql`drop table if exists \`datestable\``); -}); diff --git a/integration-tests/tests/mysql-schema.test.ts b/integration-tests/tests/mysql-schema.test.ts deleted file mode 100644 index f82d47533..000000000 --- a/integration-tests/tests/mysql-schema.test.ts +++ /dev/null @@ -1,899 +0,0 @@ -import 'dotenv/config'; - -import type { TestFn } from 'ava'; -import anyTest from 'ava'; -import Docker from 'dockerode'; -import { asc, eq, Name, placeholder, sql } from 'drizzle-orm'; -import { - alias, - boolean, - date, - datetime, - getViewConfig, - int, - json, - mysqlEnum, - mysqlSchema, - mysqlTable, - mysqlTableCreator, - serial, - text, - time, - timestamp, - year, -} from 'drizzle-orm/mysql-core'; -import type { MySql2Database } from 'drizzle-orm/mysql2'; -import { drizzle } from 'drizzle-orm/mysql2'; -import getPort from 'get-port'; -import * as mysql from 'mysql2/promise'; -import { v4 as uuid } from 'uuid'; -import { toLocalDate } from './utils'; - -const mySchema = mysqlSchema('mySchema'); - -const usersTable = mySchema.table('userstest', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - verified: boolean('verified').notNull().default(false), - jsonb: json('jsonb').$type(), - createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), -}); - -const users2Table = mySchema.table('users2', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: int('city_id').references(() => citiesTable.id), -}); - -const citiesTable = mySchema.table('cities', { - id: serial('id').primaryKey(), - name: text('name').notNull(), -}); - -const publicUsersTable = mysqlTable('userstest', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - verified: boolean('verified').notNull().default(false), - jsonb: json('jsonb').$type(), - createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), -}); - -const datesTable = mysqlTable('datestable', { - date: date('date'), - dateAsString: date('date_as_string', { mode: 'string' }), - time: time('time', { fsp: 1 }), - datetime: datetime('datetime', { fsp: 2 }), - datetimeAsString: datetime('datetime_as_string', { fsp: 2, mode: 'string' }), - year: year('year'), -}); - -interface Context { - docker: Docker; - mysqlContainer: Docker.Container; - db: MySql2Database; - client: mysql.Connection; -} - -const test = anyTest as TestFn; - -async function createDockerDB(ctx: Context): Promise { - const docker = (ctx.docker = new Docker()); - const port = await getPort({ port: 3306 }); - const image = 'mysql:8'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) - ); - - ctx.mysqlContainer = await docker.createContainer({ - Image: image, - Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=mysqltests'], - name: `drizzle-integration-tests-${uuid()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '3306/tcp': [{ HostPort: `${port}` }], - }, - }, - }); - - await ctx.mysqlContainer.start(); - - return `mysql://root:mysql@127.0.0.1:${port}/mysql`; -} - -test.before(async (t) => { - const ctx = t.context; - const connectionString = process.env['MYSQL_CONNECTION_STRING'] ?? await createDockerDB(ctx); - - const sleep = 1000; - let timeLeft = 20000; - let connected = false; - let lastError: unknown | undefined; - do { - try { - ctx.client = await mysql.createConnection(connectionString); - await ctx.client.connect(); - connected = true; - break; - } catch (e) { - lastError = e; - await new Promise((resolve) => setTimeout(resolve, sleep)); - timeLeft -= sleep; - } - } while (timeLeft > 0); - if (!connected) { - console.error('Cannot connect to MySql'); - await ctx.client?.end().catch(console.error); - await ctx.mysqlContainer?.stop().catch(console.error); - throw lastError; - } - ctx.db = drizzle(ctx.client /* , { logger: new DefaultLogger() } */); -}); - -test.beforeEach(async (t) => { - const ctx = t.context; - await ctx.db.execute(sql`drop table if exists \`datestable\``); - await ctx.db.execute(sql`drop schema if exists \`mySchema\``); - await ctx.db.execute(sql`create schema if not exists \`mySchema\``); - await ctx.db.execute( - sql` - create table \`mySchema\`.\`userstest\` ( - \`id\` serial primary key, - \`name\` text not null, - \`verified\` boolean not null default false, - \`jsonb\` json, - \`created_at\` timestamp not null default now() - ) - `, - ); - - await ctx.db.execute( - sql` - create table \`mySchema\`.\`cities\` ( - \`id\` serial primary key, - \`name\` text not null - ) - `, - ); - - await ctx.db.execute( - sql` - create table \`mySchema\`.\`users2\` ( - \`id\` serial primary key, - \`name\` text not null, - \`city_id\` int references \`mySchema\`.\`cities\`(\`id\`) - ) - `, - ); - - await ctx.db.execute( - sql` - create table \`datestable\` ( - \`date\` date, - \`date_as_string\` date, - \`time\` time, - \`datetime\` datetime, - \`datetime_as_string\` datetime, - \`year\` year - ) - `, - ); -}); - -test.serial('select all fields', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const result = await db.select().from(usersTable); - - t.assert(result[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - // not timezone based timestamp, thats why it should not work here - // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); - t.deepEqual(result, [{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); -}); - -test.serial('select sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.select({ - name: sql`upper(${usersTable.name})`, - }).from(usersTable); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('select typed sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.select({ - name: sql`upper(${usersTable.name})`, - }).from(usersTable); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('select distinct', async (t) => { - const { db } = t.context; - - const usersDistinctTable = mysqlTable('users_distinct', { - id: int('id').notNull(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${usersDistinctTable}`); - await db.execute(sql`create table ${usersDistinctTable} (id int, name text)`); - - await db.insert(usersDistinctTable).values([ - { id: 1, name: 'John' }, - { id: 1, name: 'John' }, - { id: 2, name: 'John' }, - { id: 1, name: 'Jane' }, - ]); - const users = await db.selectDistinct().from(usersDistinctTable).orderBy( - usersDistinctTable.id, - usersDistinctTable.name, - ); - - await db.execute(sql`drop table ${usersDistinctTable}`); - - t.deepEqual(users, [{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); -}); - -test.serial('insert returning sql', async (t) => { - const { db } = t.context; - - const [result, _] = await db.insert(usersTable).values({ name: 'John' }); - - t.deepEqual(result.insertId, 1); -}); - -test.serial('delete returning sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')); - - t.is(users[0].affectedRows, 1); -}); - -test.serial('update returning sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); - - t.is(users[0].changedRows, 1); -}); - -test.serial('update with returning all fields', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); - - const users = await db.select().from(usersTable).where(eq(usersTable.id, 1)); - - t.is(updatedUsers[0].changedRows, 1); - - t.assert(users[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - // not timezone based timestamp, thats why it should not work here - // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 2000); - t.deepEqual(users, [{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); -}); - -test.serial('update with returning partial', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); - - const users = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( - eq(usersTable.id, 1), - ); - - t.deepEqual(updatedUsers[0].changedRows, 1); - - t.deepEqual(users, [{ id: 1, name: 'Jane' }]); -}); - -test.serial('delete with returning all fields', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); - - t.is(deletedUser[0].affectedRows, 1); -}); - -test.serial('delete with returning partial', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); - - t.is(deletedUser[0].affectedRows, 1); -}); - -test.serial('insert + select', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const result = await db.select().from(usersTable); - t.deepEqual(result, [{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); - - await db.insert(usersTable).values({ name: 'Jane' }); - const result2 = await db.select().from(usersTable); - t.deepEqual(result2, [ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, - { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, - ]); -}); - -test.serial('json insert', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); - const result = await db.select({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - }).from(usersTable); - - t.deepEqual(result, [{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); -}); - -test.serial('insert with overridden default values', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John', verified: true }); - const result = await db.select().from(usersTable); - - t.deepEqual(result, [{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); -}); - -test.serial('insert many', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([ - { name: 'John' }, - { name: 'Bruce', jsonb: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]); - const result = await db.select({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - verified: usersTable.verified, - }).from(usersTable); - - t.deepEqual(result, [ - { id: 1, name: 'John', jsonb: null, verified: false }, - { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, - { id: 3, name: 'Jane', jsonb: null, verified: false }, - { id: 4, name: 'Austin', jsonb: null, verified: true }, - ]); -}); - -test.serial('insert many with returning', async (t) => { - const { db } = t.context; - - const result = await db.insert(usersTable).values([ - { name: 'John' }, - { name: 'Bruce', jsonb: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]); - - t.is(result[0].affectedRows, 4); -}); - -test.serial('select with group by as field', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.name); - - t.deepEqual(result, [{ name: 'John' }, { name: 'Jane' }]); -}); - -test.serial('select with group by as sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(sql`${usersTable.name}`); - - t.deepEqual(result, [{ name: 'John' }, { name: 'Jane' }]); -}); - -test.serial('select with group by as sql + column', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(sql`${usersTable.name}`, usersTable.id); - - t.deepEqual(result, [{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); -}); - -test.serial('select with group by as column + sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`); - - t.deepEqual(result, [{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); -}); - -test.serial('select with group by complex query', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`) - .orderBy(asc(usersTable.name)) - .limit(1); - - t.deepEqual(result, [{ name: 'Jane' }]); -}); - -test.serial('build query', async (t) => { - const { db } = t.context; - - const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, usersTable.name) - .toSQL(); - - t.deepEqual(query, { - sql: - `select \`id\`, \`name\` from \`mySchema\`.\`userstest\` group by \`userstest\`.\`id\`, \`userstest\`.\`name\``, - params: [], - }); -}); - -test.serial('build query insert with onDuplicate', async (t) => { - const { db } = t.context; - - const query = db.insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onDuplicateKeyUpdate({ set: { name: 'John1' } }) - .toSQL(); - - t.deepEqual(query, { - sql: - 'insert into `mySchema`.`userstest` (`id`, `name`, `verified`, `jsonb`, `created_at`) values (default, ?, default, ?, default) on duplicate key update `name` = ?', - params: ['John', '["foo","bar"]', 'John1'], - }); -}); - -test.serial('insert with onDuplicate', async (t) => { - const { db } = t.context; - - await db.insert(usersTable) - .values({ name: 'John' }); - - await db.insert(usersTable) - .values({ id: 1, name: 'John' }) - .onDuplicateKeyUpdate({ set: { name: 'John1' } }); - - const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( - eq(usersTable.id, 1), - ); - - t.deepEqual(res, [{ id: 1, name: 'John1' }]); -}); - -test.serial('insert conflict', async (t) => { - const { db } = t.context; - - await db.insert(usersTable) - .values({ name: 'John' }); - - await t.throwsAsync( - () => db.insert(usersTable).values({ id: 1, name: 'John1' }), - { - code: 'ER_DUP_ENTRY', - message: "Duplicate entry '1' for key 'userstest.PRIMARY'", - }, - ); -}); - -test.serial('insert conflict with ignore', async (t) => { - const { db } = t.context; - - await db.insert(usersTable) - .values({ name: 'John' }); - - await db.insert(usersTable) - .ignore() - .values({ id: 1, name: 'John1' }); - - const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( - eq(usersTable.id, 1), - ); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: sql`${'John'}` }); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('partial join with alias', async (t) => { - const { db } = t.context; - const customerAlias = alias(usersTable, 'customer'); - - await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select({ - user: { - id: usersTable.id, - name: usersTable.name, - }, - customer: { - id: customerAlias.id, - name: customerAlias.name, - }, - }).from(usersTable) - .leftJoin(customerAlias, eq(customerAlias.id, 11)) - .where(eq(usersTable.id, 10)); - - t.deepEqual(result, [{ - user: { id: 10, name: 'Ivan' }, - customer: { id: 11, name: 'Hans' }, - }]); -}); - -test.serial('full join with alias', async (t) => { - const { db } = t.context; - - const mysqlTable = mysqlTableCreator((name) => `prefixed_${name}`); - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); - - const customers = alias(users, 'customer'); - - await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select().from(users) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(users.id, 10)); - - t.deepEqual(result, [{ - users: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('select from alias', async (t) => { - const { db } = t.context; - - const mysqlTable = mysqlTableCreator((name) => `prefixed_${name}`); - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); - - const user = alias(users, 'user'); - const customers = alias(users, 'customer'); - - await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select() - .from(user) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(user.id, 10)); - - t.deepEqual(result, [{ - user: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('insert with spaces', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: sql`'Jo h n'` }); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); - - t.deepEqual(result, [{ id: 1, name: 'Jo h n' }]); -}); - -test.serial('prepared statement', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const statement = db.select({ - id: usersTable.id, - name: usersTable.name, - }).from(usersTable) - .prepare(); - const result = await statement.execute(); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('prepared statement reuse', async (t) => { - const { db } = t.context; - - const stmt = db.insert(usersTable).values({ - verified: true, - name: placeholder('name'), - }).prepare(); - - for (let i = 0; i < 10; i++) { - await stmt.execute({ name: `John ${i}` }); - } - - const result = await db.select({ - id: usersTable.id, - name: usersTable.name, - verified: usersTable.verified, - }).from(usersTable); - - t.deepEqual(result, [ - { id: 1, name: 'John 0', verified: true }, - { id: 2, name: 'John 1', verified: true }, - { id: 3, name: 'John 2', verified: true }, - { id: 4, name: 'John 3', verified: true }, - { id: 5, name: 'John 4', verified: true }, - { id: 6, name: 'John 5', verified: true }, - { id: 7, name: 'John 6', verified: true }, - { id: 8, name: 'John 7', verified: true }, - { id: 9, name: 'John 8', verified: true }, - { id: 10, name: 'John 9', verified: true }, - ]); -}); - -test.serial('prepared statement with placeholder in .where', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const stmt = db.select({ - id: usersTable.id, - name: usersTable.name, - }).from(usersTable) - .where(eq(usersTable.id, placeholder('id'))) - .prepare(); - const result = await stmt.execute({ id: 1 }); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert via db.execute + select via db.execute', async (t) => { - const { db } = t.context; - - await db.execute(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); - - const result = await db.execute<{ id: number; name: string }>(sql`select id, name from ${usersTable}`); - t.deepEqual(result[0], [{ id: 1, name: 'John' }]); -}); - -test.serial('insert via db.execute w/ query builder', async (t) => { - const { db } = t.context; - - const inserted = await db.execute( - db.insert(usersTable).values({ name: 'John' }), - ); - t.is(inserted[0].affectedRows, 1); -}); - -test.serial('insert + select all possible dates', async (t) => { - const { db } = t.context; - - const date = new Date('2022-11-11'); - - await db.insert(datesTable).values({ - date: date, - dateAsString: '2022-11-11', - time: '12:12:12', - datetime: date, - year: 22, - datetimeAsString: '2022-11-11 12:12:12', - }); - - const res = await db.select().from(datesTable); - - t.assert(res[0]?.date instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(res[0]?.datetime instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(typeof res[0]?.dateAsString === 'string'); - t.assert(typeof res[0]?.datetimeAsString === 'string'); - - t.deepEqual(res, [{ - date: toLocalDate(new Date('2022-11-11')), - dateAsString: '2022-11-11', - time: '12:12:12', - datetime: new Date('2022-11-11'), - year: 2022, - datetimeAsString: '2022-11-11 12:12:12', - }]); -}); -test.serial('select from tables with same name from different schema using alias', async (t) => { - const { db } = t.context; - await db.execute(sql`drop table if exists \`userstest\``); - await db.execute( - sql` - create table \`userstest\` ( - \`id\` serial primary key, - \`name\` text not null, - \`verified\` boolean not null default false, - \`jsonb\` json, - \`created_at\` timestamp not null default now() - ) - `, - ); - - await db.insert(usersTable).values({ id: 10, name: 'Ivan' }); - await db.insert(publicUsersTable).values({ id: 11, name: 'Hans' }); - - const customerAlias = alias(publicUsersTable, 'customer'); - - const result = await db - .select().from(usersTable) - .leftJoin(customerAlias, eq(customerAlias.id, 11)) - .where(eq(usersTable.id, 10)); - - t.deepEqual(result, [{ - userstest: { - id: 10, - name: 'Ivan', - verified: false, - jsonb: null, - createdAt: result[0]!.userstest.createdAt, - }, - customer: { - id: 11, - name: 'Hans', - verified: false, - jsonb: null, - createdAt: result[0]!.customer!.createdAt, - }, - }]); -}); - -const tableWithEnums = mySchema.table('enums_test_case', { - id: serial('id').primaryKey(), - enum1: mysqlEnum('enum1', ['a', 'b', 'c']).notNull(), - enum2: mysqlEnum('enum2', ['a', 'b', 'c']).default('a'), - enum3: mysqlEnum('enum3', ['a', 'b', 'c']).notNull().default('b'), -}); - -test.serial('Mysql enum test case #1', async (t) => { - const { db } = t.context; - - await db.execute(sql` - create table ${tableWithEnums} ( - \`id\` serial primary key, - \`enum1\` ENUM('a', 'b', 'c') not null, - \`enum2\` ENUM('a', 'b', 'c') default 'a', - \`enum3\` ENUM('a', 'b', 'c') not null default 'b' - ) - `); - - await db.insert(tableWithEnums).values([ - { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, - { id: 2, enum1: 'a', enum3: 'c' }, - { id: 3, enum1: 'a' }, - ]); - - const res = await db.select().from(tableWithEnums); - - await db.execute(sql`drop table ${tableWithEnums}`); - - t.deepEqual(res, [ - { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, - { id: 2, enum1: 'a', enum2: 'a', enum3: 'c' }, - { id: 3, enum1: 'a', enum2: 'a', enum3: 'b' }, - ]); -}); - -test.after.always(async (t) => { - const ctx = t.context; - await ctx.client?.end().catch(console.error); - await ctx.mysqlContainer?.stop().catch(console.error); -}); - -test.serial('view', async (t) => { - const { db } = t.context; - - const newYorkers1 = mySchema.view('new_yorkers') - .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); - - const newYorkers2 = mySchema.view('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: int('city_id').notNull(), - }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); - - const newYorkers3 = mySchema.view('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: int('city_id').notNull(), - }).existing(); - - await db.execute(sql`create view ${newYorkers1} as ${getViewConfig(newYorkers1).query}`); - - await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); - - await db.insert(users2Table).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 1 }, - { name: 'Jack', cityId: 2 }, - ]); - - { - const result = await db.select().from(newYorkers1); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers2); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers3); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); - t.deepEqual(result, [ - { name: 'John' }, - { name: 'Jane' }, - ]); - } - - await db.execute(sql`drop view ${newYorkers1}`); -}); diff --git a/integration-tests/tests/mysql.test.ts b/integration-tests/tests/mysql.test.ts deleted file mode 100644 index ed05ed9ca..000000000 --- a/integration-tests/tests/mysql.test.ts +++ /dev/null @@ -1,3001 +0,0 @@ -import 'dotenv/config'; - -import type { TestFn } from 'ava'; -import anyTest from 'ava'; -import Docker from 'dockerode'; -import { - and, - asc, - avg, - avgDistinct, - count, - countDistinct, - DefaultLogger, - eq, - exists, - getTableColumns, - gt, - gte, - inArray, - type InferModel, - lt, - max, - min, - Name, - placeholder, - sql, - sum, - sumDistinct, - TransactionRollbackError, -} from 'drizzle-orm'; -import { - alias, - bigint, - boolean, - date, - datetime, - decimal, - except, - exceptAll, - foreignKey, - getTableConfig, - getViewConfig, - int, - intersect, - intersectAll, - json, - mediumint, - mysqlEnum, - mysqlTable, - mysqlTableCreator, - mysqlView, - primaryKey, - serial, - smallint, - text, - time, - timestamp, - tinyint, - union, - unionAll, - unique, - uniqueIndex, - uniqueKeyName, - year, -} from 'drizzle-orm/mysql-core'; -import type { MySql2Database } from 'drizzle-orm/mysql2'; -import { drizzle } from 'drizzle-orm/mysql2'; -import { migrate } from 'drizzle-orm/mysql2/migrator'; -import getPort from 'get-port'; -import * as mysql from 'mysql2/promise'; -import { v4 as uuid } from 'uuid'; -import { type Equal, Expect, toLocalDate } from './utils.ts'; - -const ENABLE_LOGGING = false; - -const usersTable = mysqlTable('userstest', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - verified: boolean('verified').notNull().default(false), - jsonb: json('jsonb').$type(), - createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), -}); - -const users2Table = mysqlTable('users2', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: int('city_id').references(() => citiesTable.id), -}); - -const citiesTable = mysqlTable('cities', { - id: serial('id').primaryKey(), - name: text('name').notNull(), -}); - -const usersOnUpdate = mysqlTable('users_on_update', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - updateCounter: int('update_counter').default(sql`1`).$onUpdateFn(() => sql`update_counter + 1`), - updatedAt: datetime('updated_at', { mode: 'date', fsp: 3 }).$onUpdate(() => new Date()), - uppercaseName: text('uppercase_name').$onUpdateFn(() => sql`upper(name)`), - alwaysNull: text('always_null').$type().$onUpdateFn(() => null), // need to add $type because $onUpdate add a default value -}); - -const datesTable = mysqlTable('datestable', { - date: date('date'), - dateAsString: date('date_as_string', { mode: 'string' }), - time: time('time', { fsp: 1 }), - datetime: datetime('datetime', { fsp: 2 }), - datetimeAsString: datetime('datetime_as_string', { fsp: 2, mode: 'string' }), - timestamp: timestamp('timestamp', { fsp: 3 }), - timestampAsString: timestamp('timestamp_as_string', { fsp: 3, mode: 'string' }), - year: year('year'), -}); - -const coursesTable = mysqlTable('courses', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - categoryId: int('category_id').references(() => courseCategoriesTable.id), -}); - -const courseCategoriesTable = mysqlTable('course_categories', { - id: serial('id').primaryKey(), - name: text('name').notNull(), -}); - -const orders = mysqlTable('orders', { - id: serial('id').primaryKey(), - region: text('region').notNull(), - product: text('product').notNull().$default(() => 'random_string'), - amount: int('amount').notNull(), - quantity: int('quantity').notNull(), -}); - -const usersMigratorTable = mysqlTable('users12', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - email: text('email').notNull(), -}, (table) => { - return { - name: uniqueIndex('').on(table.name).using('btree'), - }; -}); - -// To test aggregate functions -const aggregateTable = mysqlTable('aggregate_table', { - id: serial('id').notNull(), - name: text('name').notNull(), - a: int('a'), - b: int('b'), - c: int('c'), - nullOnly: int('null_only'), -}); - -interface Context { - docker: Docker; - mysqlContainer: Docker.Container; - db: MySql2Database; - client: mysql.Connection; -} - -const test = anyTest as TestFn; - -async function createDockerDB(ctx: Context): Promise { - const docker = (ctx.docker = new Docker()); - const port = await getPort({ port: 3306 }); - const image = 'mysql:8'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) - ); - - ctx.mysqlContainer = await docker.createContainer({ - Image: image, - Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=drizzle'], - name: `drizzle-integration-tests-${uuid()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '3306/tcp': [{ HostPort: `${port}` }], - }, - }, - }); - - await ctx.mysqlContainer.start(); - - return `mysql://root:mysql@127.0.0.1:${port}/drizzle`; -} - -test.before(async (t) => { - const ctx = t.context; - const connectionString = process.env['MYSQL_CONNECTION_STRING'] ?? await createDockerDB(ctx); - - const sleep = 1000; - let timeLeft = 20000; - let connected = false; - let lastError: unknown | undefined; - do { - try { - ctx.client = await mysql.createConnection(connectionString); - await ctx.client.connect(); - connected = true; - break; - } catch (e) { - lastError = e; - await new Promise((resolve) => setTimeout(resolve, sleep)); - timeLeft -= sleep; - } - } while (timeLeft > 0); - if (!connected) { - console.error('Cannot connect to MySQL'); - await ctx.client?.end().catch(console.error); - await ctx.mysqlContainer?.stop().catch(console.error); - throw lastError; - } - ctx.db = drizzle(ctx.client, { logger: ENABLE_LOGGING ? new DefaultLogger() : undefined }); -}); - -test.after.always(async (t) => { - const ctx = t.context; - await ctx.client?.end().catch(console.error); - await ctx.mysqlContainer?.stop().catch(console.error); -}); - -test.beforeEach(async (t) => { - const ctx = t.context; - await ctx.db.execute(sql`drop table if exists \`userstest\``); - await ctx.db.execute(sql`drop table if exists \`users2\``); - await ctx.db.execute(sql`drop table if exists \`cities\``); - - await ctx.db.execute( - sql` - create table \`userstest\` ( - \`id\` serial primary key, - \`name\` text not null, - \`verified\` boolean not null default false, - \`jsonb\` json, - \`created_at\` timestamp not null default now() - ) - `, - ); - - await ctx.db.execute( - sql` - create table \`users2\` ( - \`id\` serial primary key, - \`name\` text not null, - \`city_id\` int references \`cities\`(\`id\`) - ) - `, - ); - - await ctx.db.execute( - sql` - create table \`cities\` ( - \`id\` serial primary key, - \`name\` text not null - ) - `, - ); -}); - -async function setupSetOperationTest(db: MySql2Database) { - await db.execute(sql`drop table if exists \`users2\``); - await db.execute(sql`drop table if exists \`cities\``); - await db.execute( - sql` - create table \`users2\` ( - \`id\` serial primary key, - \`name\` text not null, - \`city_id\` int references \`cities\`(\`id\`) - ) - `, - ); - - await db.execute( - sql` - create table \`cities\` ( - \`id\` serial primary key, - \`name\` text not null - ) - `, - ); - - await db.insert(citiesTable).values([ - { id: 1, name: 'New York' }, - { id: 2, name: 'London' }, - { id: 3, name: 'Tampa' }, - ]); - - await db.insert(users2Table).values([ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 2 }, - { id: 3, name: 'Jack', cityId: 3 }, - { id: 4, name: 'Peter', cityId: 3 }, - { id: 5, name: 'Ben', cityId: 2 }, - { id: 6, name: 'Jill', cityId: 1 }, - { id: 7, name: 'Mary', cityId: 2 }, - { id: 8, name: 'Sally', cityId: 1 }, - ]); -} - -async function setupAggregateFunctionsTest(db: MySql2Database) { - await db.execute(sql`drop table if exists \`aggregate_table\``); - await db.execute( - sql` - create table \`aggregate_table\` ( - \`id\` integer primary key auto_increment not null, - \`name\` text not null, - \`a\` integer, - \`b\` integer, - \`c\` integer, - \`null_only\` integer - ); - `, - ); - await db.insert(aggregateTable).values([ - { name: 'value 1', a: 5, b: 10, c: 20 }, - { name: 'value 1', a: 5, b: 20, c: 30 }, - { name: 'value 2', a: 10, b: 50, c: 60 }, - { name: 'value 3', a: 20, b: 20, c: null }, - { name: 'value 4', a: null, b: 90, c: 120 }, - { name: 'value 5', a: 80, b: 10, c: null }, - { name: 'value 6', a: null, b: null, c: 150 }, - ]); -} - -test.serial('table config: unsigned ints', async (t) => { - const unsignedInts = mysqlTable('cities1', { - bigint: bigint('bigint', { mode: 'number', unsigned: true }), - int: int('int', { unsigned: true }), - smallint: smallint('smallint', { unsigned: true }), - mediumint: mediumint('mediumint', { unsigned: true }), - tinyint: tinyint('tinyint', { unsigned: true }), - }); - - const tableConfig = getTableConfig(unsignedInts); - - const bigintColumn = tableConfig.columns.find((c) => c.name === 'bigint')!; - const intColumn = tableConfig.columns.find((c) => c.name === 'int')!; - const smallintColumn = tableConfig.columns.find((c) => c.name === 'smallint')!; - const mediumintColumn = tableConfig.columns.find((c) => c.name === 'mediumint')!; - const tinyintColumn = tableConfig.columns.find((c) => c.name === 'tinyint')!; - - t.is(bigintColumn.getSQLType(), 'bigint unsigned'); - t.is(intColumn.getSQLType(), 'int unsigned'); - t.is(smallintColumn.getSQLType(), 'smallint unsigned'); - t.is(mediumintColumn.getSQLType(), 'mediumint unsigned'); - t.is(tinyintColumn.getSQLType(), 'tinyint unsigned'); -}); - -test.serial('table config: signed ints', async (t) => { - const unsignedInts = mysqlTable('cities1', { - bigint: bigint('bigint', { mode: 'number' }), - int: int('int'), - smallint: smallint('smallint'), - mediumint: mediumint('mediumint'), - tinyint: tinyint('tinyint'), - }); - - const tableConfig = getTableConfig(unsignedInts); - - const bigintColumn = tableConfig.columns.find((c) => c.name === 'bigint')!; - const intColumn = tableConfig.columns.find((c) => c.name === 'int')!; - const smallintColumn = tableConfig.columns.find((c) => c.name === 'smallint')!; - const mediumintColumn = tableConfig.columns.find((c) => c.name === 'mediumint')!; - const tinyintColumn = tableConfig.columns.find((c) => c.name === 'tinyint')!; - - t.is(bigintColumn.getSQLType(), 'bigint'); - t.is(intColumn.getSQLType(), 'int'); - t.is(smallintColumn.getSQLType(), 'smallint'); - t.is(mediumintColumn.getSQLType(), 'mediumint'); - t.is(tinyintColumn.getSQLType(), 'tinyint'); -}); - -test.serial('table config: foreign keys name', async (t) => { - const table = mysqlTable('cities', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - state: text('state'), - }, (t) => ({ - f: foreignKey({ foreignColumns: [t.id], columns: [t.id], name: 'custom_fk' }), - })); - - const tableConfig = getTableConfig(table); - - t.is(tableConfig.foreignKeys.length, 1); - t.is(tableConfig.foreignKeys[0]!.getName(), 'custom_fk'); -}); - -test.serial('table config: primary keys name', async (t) => { - const table = mysqlTable('cities', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - state: text('state'), - }, (t) => ({ - f: primaryKey({ columns: [t.id, t.name], name: 'custom_pk' }), - })); - - const tableConfig = getTableConfig(table); - - t.is(tableConfig.primaryKeys.length, 1); - t.is(tableConfig.primaryKeys[0]!.getName(), 'custom_pk'); -}); - -test.serial('table configs: unique third param', async (t) => { - const cities1Table = mysqlTable('cities1', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - state: text('state'), - }, (t) => ({ - f: unique('custom_name').on(t.name, t.state), - f1: unique('custom_name1').on(t.name, t.state), - })); - - const tableConfig = getTableConfig(cities1Table); - - t.assert(tableConfig.uniqueConstraints.length === 2); - - t.assert(tableConfig.uniqueConstraints[0]?.name === 'custom_name'); - t.deepEqual(tableConfig.uniqueConstraints[0]?.columns.map((t) => t.name), ['name', 'state']); - - t.assert(tableConfig.uniqueConstraints[1]?.name, 'custom_name1'); - t.deepEqual(tableConfig.uniqueConstraints[1]?.columns.map((t) => t.name), ['name', 'state']); -}); - -test.serial('table configs: unique in column', async (t) => { - const cities1Table = mysqlTable('cities1', { - id: serial('id').primaryKey(), - name: text('name').notNull().unique(), - state: text('state').unique('custom'), - field: text('field').unique('custom_field'), - }); - - const tableConfig = getTableConfig(cities1Table); - - const columnName = tableConfig.columns.find((it) => it.name === 'name'); - t.assert(columnName?.uniqueName === uniqueKeyName(cities1Table, [columnName!.name])); - t.assert(columnName?.isUnique); - - const columnState = tableConfig.columns.find((it) => it.name === 'state'); - t.assert(columnState?.uniqueName === 'custom'); - t.assert(columnState?.isUnique); - - const columnField = tableConfig.columns.find((it) => it.name === 'field'); - t.assert(columnField?.uniqueName === 'custom_field'); - t.assert(columnField?.isUnique); -}); - -test.serial('select all fields', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const result = await db.select().from(usersTable); - - t.assert(result[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - // not timezone based timestamp, thats why it should not work here - // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); - t.deepEqual(result, [{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); -}); - -test.serial('select sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.select({ - name: sql`upper(${usersTable.name})`, - }).from(usersTable); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('select typed sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.select({ - name: sql`upper(${usersTable.name})`, - }).from(usersTable); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('select distinct', async (t) => { - const { db } = t.context; - - const usersDistinctTable = mysqlTable('users_distinct', { - id: int('id').notNull(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${usersDistinctTable}`); - await db.execute(sql`create table ${usersDistinctTable} (id int, name text)`); - - await db.insert(usersDistinctTable).values([ - { id: 1, name: 'John' }, - { id: 1, name: 'John' }, - { id: 2, name: 'John' }, - { id: 1, name: 'Jane' }, - ]); - const users = await db.selectDistinct().from(usersDistinctTable).orderBy( - usersDistinctTable.id, - usersDistinctTable.name, - ); - - await db.execute(sql`drop table ${usersDistinctTable}`); - - t.deepEqual(users, [{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); -}); - -test.serial('insert returning sql', async (t) => { - const { db } = t.context; - - const [result, _] = await db.insert(usersTable).values({ name: 'John' }); - - t.deepEqual(result.insertId, 1); -}); - -test.serial('delete returning sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')); - - t.is(users[0].affectedRows, 1); -}); - -test.serial('update returning sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); - - t.is(users[0].changedRows, 1); -}); - -test.serial('update with returning all fields', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); - - const users = await db.select().from(usersTable).where(eq(usersTable.id, 1)); - - t.is(updatedUsers[0].changedRows, 1); - - t.assert(users[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - // not timezone based timestamp, thats why it should not work here - // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 2000); - t.deepEqual(users, [{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); -}); - -test.serial('update with returning partial', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); - - const users = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( - eq(usersTable.id, 1), - ); - - t.deepEqual(updatedUsers[0].changedRows, 1); - - t.deepEqual(users, [{ id: 1, name: 'Jane' }]); -}); - -test.serial('delete with returning all fields', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); - - t.is(deletedUser[0].affectedRows, 1); -}); - -test.serial('delete with returning partial', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); - - t.is(deletedUser[0].affectedRows, 1); -}); - -test.serial('insert + select', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const result = await db.select().from(usersTable); - t.deepEqual(result, [{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); - - await db.insert(usersTable).values({ name: 'Jane' }); - const result2 = await db.select().from(usersTable); - t.deepEqual(result2, [ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, - { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, - ]); -}); - -test.serial('json insert', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); - const result = await db.select({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - }).from(usersTable); - - t.deepEqual(result, [{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); -}); - -test.serial('insert with overridden default values', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John', verified: true }); - const result = await db.select().from(usersTable); - - t.deepEqual(result, [{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); -}); - -test.serial('insert many', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([ - { name: 'John' }, - { name: 'Bruce', jsonb: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]); - const result = await db.select({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - verified: usersTable.verified, - }).from(usersTable); - - t.deepEqual(result, [ - { id: 1, name: 'John', jsonb: null, verified: false }, - { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, - { id: 3, name: 'Jane', jsonb: null, verified: false }, - { id: 4, name: 'Austin', jsonb: null, verified: true }, - ]); -}); - -test.serial('insert many with returning', async (t) => { - const { db } = t.context; - - const result = await db.insert(usersTable).values([ - { name: 'John' }, - { name: 'Bruce', jsonb: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]); - - t.is(result[0].affectedRows, 4); -}); - -test.serial('select with group by as field', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.name); - - t.deepEqual(result, [{ name: 'John' }, { name: 'Jane' }]); -}); - -test.serial('select with exists', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const user = alias(usersTable, 'user'); - const result = await db.select({ name: usersTable.name }).from(usersTable).where( - exists(db.select({ one: sql`1` }).from(user).where(and(eq(usersTable.name, 'John'), eq(user.id, usersTable.id)))), - ); - - t.deepEqual(result, [{ name: 'John' }]); -}); - -test.serial('select with group by as sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(sql`${usersTable.name}`); - - t.deepEqual(result, [{ name: 'John' }, { name: 'Jane' }]); -}); - -test.serial('$default function', async (t) => { - const { db } = t.context; - - await db.execute(sql`drop table if exists \`orders\``); - await db.execute( - sql` - create table \`orders\` ( - \`id\` serial primary key, - \`region\` text not null, - \`product\` text not null, - \`amount\` int not null, - \`quantity\` int not null - ) - `, - ); - - await db.insert(orders).values({ id: 1, region: 'Ukraine', amount: 1, quantity: 1 }); - const selectedOrder = await db.select().from(orders); - - t.deepEqual(selectedOrder, [{ - id: 1, - amount: 1, - quantity: 1, - region: 'Ukraine', - product: 'random_string', - }]); -}); - -test.serial('$default with empty array', async (t) => { - const { db } = t.context; - - await db.execute(sql`drop table if exists \`s_orders\``); - await db.execute( - sql` - create table \`s_orders\` ( - \`id\` serial primary key, - \`region\` text default ('Ukraine'), - \`product\` text not null - ) - `, - ); - - const users = mysqlTable('s_orders', { - id: serial('id').primaryKey(), - region: text('region').default('Ukraine'), - product: text('product').$defaultFn(() => 'random_string'), - }); - - await db.insert(users).values({}); - const selectedOrder = await db.select().from(users); - - t.deepEqual(selectedOrder, [{ - id: 1, - region: 'Ukraine', - product: 'random_string', - }]); -}); - -test.serial('select with group by as sql + column', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(sql`${usersTable.name}`, usersTable.id); - - t.deepEqual(result, [{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); -}); - -test.serial('select with group by as column + sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`); - - t.deepEqual(result, [{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); -}); - -test.serial('select with group by complex query', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`) - .orderBy(asc(usersTable.name)) - .limit(1); - - t.deepEqual(result, [{ name: 'Jane' }]); -}); - -test.serial('build query', async (t) => { - const { db } = t.context; - - const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, usersTable.name) - .toSQL(); - - t.deepEqual(query, { - sql: `select \`id\`, \`name\` from \`userstest\` group by \`userstest\`.\`id\`, \`userstest\`.\`name\``, - params: [], - }); -}); - -test.serial('Query check: Insert all defaults in 1 row', async (t) => { - const { db } = t.context; - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state'), - }); - - const query = db - .insert(users) - .values({}) - .toSQL(); - - t.deepEqual(query, { - sql: 'insert into `users` (`id`, `name`, `state`) values (default, default, default)', - params: [], - }); -}); - -test.serial('Query check: Insert all defaults in multiple rows', async (t) => { - const { db } = t.context; - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state').default('UA'), - }); - - const query = db - .insert(users) - .values([{}, {}]) - .toSQL(); - - t.deepEqual(query, { - sql: 'insert into `users` (`id`, `name`, `state`) values (default, default, default), (default, default, default)', - params: [], - }); -}); - -test.serial('Insert all defaults in 1 row', async (t) => { - const { db } = t.context; - - const users = mysqlTable('empty_insert_single', { - id: serial('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial primary key, name text default ('Dan'), state text)`, - ); - - await db.insert(users).values({}); - - const res = await db.select().from(users); - - t.deepEqual(res, [{ id: 1, name: 'Dan', state: null }]); -}); - -test.serial('Insert all defaults in multiple rows', async (t) => { - const { db } = t.context; - - const users = mysqlTable('empty_insert_multiple', { - id: serial('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial primary key, name text default ('Dan'), state text)`, - ); - - await db.insert(users).values([{}, {}]); - - const res = await db.select().from(users); - - t.deepEqual(res, [{ id: 1, name: 'Dan', state: null }, { id: 2, name: 'Dan', state: null }]); -}); - -test.serial('build query insert with onDuplicate', async (t) => { - const { db } = t.context; - - const query = db.insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onDuplicateKeyUpdate({ set: { name: 'John1' } }) - .toSQL(); - - t.deepEqual(query, { - sql: - 'insert into `userstest` (`id`, `name`, `verified`, `jsonb`, `created_at`) values (default, ?, default, ?, default) on duplicate key update `name` = ?', - params: ['John', '["foo","bar"]', 'John1'], - }); -}); - -test.serial('insert with onDuplicate', async (t) => { - const { db } = t.context; - - await db.insert(usersTable) - .values({ name: 'John' }); - - await db.insert(usersTable) - .values({ id: 1, name: 'John' }) - .onDuplicateKeyUpdate({ set: { name: 'John1' } }); - - const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( - eq(usersTable.id, 1), - ); - - t.deepEqual(res, [{ id: 1, name: 'John1' }]); -}); - -test.serial('insert conflict', async (t) => { - const { db } = t.context; - - await db.insert(usersTable) - .values({ name: 'John' }); - - await t.throwsAsync( - () => db.insert(usersTable).values({ id: 1, name: 'John1' }), - { - code: 'ER_DUP_ENTRY', - message: "Duplicate entry '1' for key 'userstest.PRIMARY'", - }, - ); -}); - -test.serial('insert conflict with ignore', async (t) => { - const { db } = t.context; - - await db.insert(usersTable) - .values({ name: 'John' }); - - await db.insert(usersTable) - .ignore() - .values({ id: 1, name: 'John1' }); - - const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( - eq(usersTable.id, 1), - ); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: sql`${'John'}` }); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('partial join with alias', async (t) => { - const { db } = t.context; - const customerAlias = alias(usersTable, 'customer'); - - await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select({ - user: { - id: usersTable.id, - name: usersTable.name, - }, - customer: { - id: customerAlias.id, - name: customerAlias.name, - }, - }).from(usersTable) - .leftJoin(customerAlias, eq(customerAlias.id, 11)) - .where(eq(usersTable.id, 10)); - - t.deepEqual(result, [{ - user: { id: 10, name: 'Ivan' }, - customer: { id: 11, name: 'Hans' }, - }]); -}); - -test.serial('full join with alias', async (t) => { - const { db } = t.context; - - const mysqlTable = mysqlTableCreator((name) => `prefixed_${name}`); - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); - - const customers = alias(users, 'customer'); - - await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select().from(users) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(users.id, 10)); - - t.deepEqual(result, [{ - users: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('select from alias', async (t) => { - const { db } = t.context; - - const mysqlTable = mysqlTableCreator((name) => `prefixed_${name}`); - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); - - const user = alias(users, 'user'); - const customers = alias(users, 'customer'); - - await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select() - .from(user) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(user.id, 10)); - - t.deepEqual(result, [{ - user: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('insert with spaces', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: sql`'Jo h n'` }); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); - - t.deepEqual(result, [{ id: 1, name: 'Jo h n' }]); -}); - -test.serial('prepared statement', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const statement = db.select({ - id: usersTable.id, - name: usersTable.name, - }).from(usersTable) - .prepare(); - const result = await statement.execute(); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('prepared statement reuse', async (t) => { - const { db } = t.context; - - const stmt = db.insert(usersTable).values({ - verified: true, - name: placeholder('name'), - }).prepare(); - - for (let i = 0; i < 10; i++) { - await stmt.execute({ name: `John ${i}` }); - } - - const result = await db.select({ - id: usersTable.id, - name: usersTable.name, - verified: usersTable.verified, - }).from(usersTable); - - t.deepEqual(result, [ - { id: 1, name: 'John 0', verified: true }, - { id: 2, name: 'John 1', verified: true }, - { id: 3, name: 'John 2', verified: true }, - { id: 4, name: 'John 3', verified: true }, - { id: 5, name: 'John 4', verified: true }, - { id: 6, name: 'John 5', verified: true }, - { id: 7, name: 'John 6', verified: true }, - { id: 8, name: 'John 7', verified: true }, - { id: 9, name: 'John 8', verified: true }, - { id: 10, name: 'John 9', verified: true }, - ]); -}); - -test.serial('prepared statement with placeholder in .where', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const stmt = db.select({ - id: usersTable.id, - name: usersTable.name, - }).from(usersTable) - .where(eq(usersTable.id, placeholder('id'))) - .prepare(); - const result = await stmt.execute({ id: 1 }); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('migrator', async (t) => { - const { db } = t.context; - - await db.execute(sql`drop table if exists cities_migration`); - await db.execute(sql`drop table if exists users_migration`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists __drizzle_migrations`); - - await migrate(db, { migrationsFolder: './drizzle2/mysql' }); - - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - - const result = await db.select().from(usersMigratorTable); - - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); - - await db.execute(sql`drop table cities_migration`); - await db.execute(sql`drop table users_migration`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table __drizzle_migrations`); -}); - -test.serial('insert via db.execute + select via db.execute', async (t) => { - const { db } = t.context; - - await db.execute(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); - - const result = await db.execute<{ id: number; name: string }>(sql`select id, name from ${usersTable}`); - t.deepEqual(result[0], [{ id: 1, name: 'John' }]); -}); - -test.serial('insert via db.execute w/ query builder', async (t) => { - const { db } = t.context; - - const inserted = await db.execute( - db.insert(usersTable).values({ name: 'John' }), - ); - t.is(inserted[0].affectedRows, 1); -}); - -test.serial('insert + select all possible dates', async (t) => { - const { db } = t.context; - - await db.execute(sql`drop table if exists \`datestable\``); - await db.execute( - sql` - create table \`datestable\` ( - \`date\` date, - \`date_as_string\` date, - \`time\` time, - \`datetime\` datetime, - \`datetime_as_string\` datetime, - \`timestamp\` timestamp(3), - \`timestamp_as_string\` timestamp(3), - \`year\` year - ) - `, - ); - - const date = new Date('2022-11-11'); - const dateWithMilliseconds = new Date('2022-11-11 12:12:12.123'); - - await db.insert(datesTable).values({ - date: date, - dateAsString: '2022-11-11', - time: '12:12:12', - datetime: date, - year: 22, - datetimeAsString: '2022-11-11 12:12:12', - timestamp: dateWithMilliseconds, - timestampAsString: '2022-11-11 12:12:12.123', - }); - - const res = await db.select().from(datesTable); - - t.assert(res[0]?.date instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(res[0]?.datetime instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(typeof res[0]?.dateAsString === 'string'); - t.assert(typeof res[0]?.datetimeAsString === 'string'); - - t.deepEqual(res, [{ - date: toLocalDate(new Date('2022-11-11')), - dateAsString: '2022-11-11', - time: '12:12:12', - datetime: new Date('2022-11-11'), - year: 2022, - datetimeAsString: '2022-11-11 12:12:12', - timestamp: new Date('2022-11-11 12:12:12.123'), - timestampAsString: '2022-11-11 12:12:12.123', - }]); - - await db.execute(sql`drop table if exists \`datestable\``); -}); - -const tableWithEnums = mysqlTable('enums_test_case', { - id: serial('id').primaryKey(), - enum1: mysqlEnum('enum1', ['a', 'b', 'c']).notNull(), - enum2: mysqlEnum('enum2', ['a', 'b', 'c']).default('a'), - enum3: mysqlEnum('enum3', ['a', 'b', 'c']).notNull().default('b'), -}); - -test.serial('Mysql enum test case #1', async (t) => { - const { db } = t.context; - - await db.execute(sql`drop table if exists \`enums_test_case\``); - - await db.execute(sql` - create table \`enums_test_case\` ( - \`id\` serial primary key, - \`enum1\` ENUM('a', 'b', 'c') not null, - \`enum2\` ENUM('a', 'b', 'c') default 'a', - \`enum3\` ENUM('a', 'b', 'c') not null default 'b' - ) - `); - - await db.insert(tableWithEnums).values([ - { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, - { id: 2, enum1: 'a', enum3: 'c' }, - { id: 3, enum1: 'a' }, - ]); - - const res = await db.select().from(tableWithEnums); - - await db.execute(sql`drop table \`enums_test_case\``); - - t.deepEqual(res, [ - { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, - { id: 2, enum1: 'a', enum2: 'a', enum3: 'c' }, - { id: 3, enum1: 'a', enum2: 'a', enum3: 'b' }, - ]); -}); - -test.serial('left join (flat object fields)', async (t) => { - const { db } = t.context; - - await db.insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]); - - await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); - - const res = await db.select({ - userId: users2Table.id, - userName: users2Table.name, - cityId: citiesTable.id, - cityName: citiesTable.name, - }).from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); - - t.deepEqual(res, [ - { userId: 1, userName: 'John', cityId: 1, cityName: 'Paris' }, - { userId: 2, userName: 'Jane', cityId: null, cityName: null }, - ]); -}); - -test.serial('left join (grouped fields)', async (t) => { - const { db } = t.context; - - await db.insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]); - - await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); - - const res = await db.select({ - id: users2Table.id, - user: { - name: users2Table.name, - nameUpper: sql`upper(${users2Table.name})`, - }, - city: { - id: citiesTable.id, - name: citiesTable.name, - nameUpper: sql`upper(${citiesTable.name})`, - }, - }).from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); - - t.deepEqual(res, [ - { - id: 1, - user: { name: 'John', nameUpper: 'JOHN' }, - city: { id: 1, name: 'Paris', nameUpper: 'PARIS' }, - }, - { - id: 2, - user: { name: 'Jane', nameUpper: 'JANE' }, - city: null, - }, - ]); -}); - -test.serial('left join (all fields)', async (t) => { - const { db } = t.context; - - await db.insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]); - - await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); - - const res = await db.select().from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); - - t.deepEqual(res, [ - { - users2: { - id: 1, - name: 'John', - cityId: 1, - }, - cities: { - id: 1, - name: 'Paris', - }, - }, - { - users2: { - id: 2, - name: 'Jane', - cityId: null, - }, - cities: null, - }, - ]); -}); - -test.serial('join subquery', async (t) => { - const { db } = t.context; - - await db.execute(sql`drop table if exists \`courses\``); - await db.execute(sql`drop table if exists \`course_categories\``); - - await db.execute( - sql` - create table \`course_categories\` ( - \`id\` serial primary key, - \`name\` text not null - ) - `, - ); - - await db.execute( - sql` - create table \`courses\` ( - \`id\` serial primary key, - \`name\` text not null, - \`category_id\` int references \`course_categories\`(\`id\`) - ) - `, - ); - - await db.insert(courseCategoriesTable).values([ - { name: 'Category 1' }, - { name: 'Category 2' }, - { name: 'Category 3' }, - { name: 'Category 4' }, - ]); - - await db.insert(coursesTable).values([ - { name: 'Development', categoryId: 2 }, - { name: 'IT & Software', categoryId: 3 }, - { name: 'Marketing', categoryId: 4 }, - { name: 'Design', categoryId: 1 }, - ]); - - const sq2 = db - .select({ - categoryId: courseCategoriesTable.id, - category: courseCategoriesTable.name, - total: sql`count(${courseCategoriesTable.id})`, - }) - .from(courseCategoriesTable) - .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) - .as('sq2'); - - const res = await db - .select({ - courseName: coursesTable.name, - categoryId: sq2.categoryId, - }) - .from(coursesTable) - .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) - .orderBy(coursesTable.name); - - t.deepEqual(res, [ - { courseName: 'Design', categoryId: 1 }, - { courseName: 'Development', categoryId: 2 }, - { courseName: 'IT & Software', categoryId: 3 }, - { courseName: 'Marketing', categoryId: 4 }, - ]); - - await db.execute(sql`drop table if exists \`courses\``); - await db.execute(sql`drop table if exists \`course_categories\``); -}); - -test.serial('with ... select', async (t) => { - const { db } = t.context; - - await db.execute(sql`drop table if exists \`orders\``); - await db.execute( - sql` - create table \`orders\` ( - \`id\` serial primary key, - \`region\` text not null, - \`product\` text not null, - \`amount\` int not null, - \`quantity\` int not null - ) - `, - ); - - await db.insert(orders).values([ - { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, - { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 50, quantity: 5 }, - ]); - - const regionalSales = db - .$with('regional_sales') - .as( - db - .select({ - region: orders.region, - totalSales: sql`sum(${orders.amount})`.as('total_sales'), - }) - .from(orders) - .groupBy(orders.region), - ); - - const topRegions = db - .$with('top_regions') - .as( - db - .select({ - region: regionalSales.region, - }) - .from(regionalSales) - .where( - gt( - regionalSales.totalSales, - db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), - ), - ), - ); - - const result = await db - .with(regionalSales, topRegions) - .select({ - region: orders.region, - product: orders.product, - productUnits: sql`cast(sum(${orders.quantity}) as unsigned)`, - productSales: sql`cast(sum(${orders.amount}) as unsigned)`, - }) - .from(orders) - .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) - .groupBy(orders.region, orders.product) - .orderBy(orders.region, orders.product); - - t.deepEqual(result, [ - { - region: 'Europe', - product: 'A', - productUnits: 3, - productSales: 30, - }, - { - region: 'Europe', - product: 'B', - productUnits: 5, - productSales: 50, - }, - { - region: 'US', - product: 'A', - productUnits: 7, - productSales: 70, - }, - { - region: 'US', - product: 'B', - productUnits: 9, - productSales: 90, - }, - ]); -}); - -test.serial('with ... update', async (t) => { - const { db } = t.context; - - const products = mysqlTable('products', { - id: serial('id').primaryKey(), - price: decimal('price', { - precision: 15, - scale: 2, - }).notNull(), - cheap: boolean('cheap').notNull().default(false), - }); - - await db.execute(sql`drop table if exists ${products}`); - await db.execute(sql` - create table ${products} ( - id serial primary key, - price decimal(15, 2) not null, - cheap boolean not null default false - ) - `); - - await db.insert(products).values([ - { price: '10.99' }, - { price: '25.85' }, - { price: '32.99' }, - { price: '2.50' }, - { price: '4.59' }, - ]); - - const averagePrice = db - .$with('average_price') - .as( - db - .select({ - value: sql`avg(${products.price})`.as('value'), - }) - .from(products), - ); - - await db - .with(averagePrice) - .update(products) - .set({ - cheap: true, - }) - .where(lt(products.price, sql`(select * from ${averagePrice})`)); - - const result = await db - .select({ - id: products.id, - }) - .from(products) - .where(eq(products.cheap, true)); - - t.deepEqual(result, [ - { id: 1 }, - { id: 4 }, - { id: 5 }, - ]); -}); - -test.serial('with ... delete', async (t) => { - const { db } = t.context; - - await db.execute(sql`drop table if exists \`orders\``); - await db.execute( - sql` - create table \`orders\` ( - \`id\` serial primary key, - \`region\` text not null, - \`product\` text not null, - \`amount\` int not null, - \`quantity\` int not null - ) - `, - ); - - await db.insert(orders).values([ - { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, - { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 50, quantity: 5 }, - ]); - - const averageAmount = db - .$with('average_amount') - .as( - db - .select({ - value: sql`avg(${orders.amount})`.as('value'), - }) - .from(orders), - ); - - await db - .with(averageAmount) - .delete(orders) - .where(gt(orders.amount, sql`(select * from ${averageAmount})`)); - - const result = await db - .select({ - id: orders.id, - }) - .from(orders); - - t.deepEqual(result, [ - { id: 1 }, - { id: 2 }, - { id: 3 }, - { id: 4 }, - { id: 5 }, - ]); -}); - -test.serial('select from subquery sql', async (t) => { - const { db } = t.context; - - await db.insert(users2Table).values([{ name: 'John' }, { name: 'Jane' }]); - - const sq = db - .select({ name: sql`concat(${users2Table.name}, " modified")`.as('name') }) - .from(users2Table) - .as('sq'); - - const res = await db.select({ name: sq.name }).from(sq); - - t.deepEqual(res, [{ name: 'John modified' }, { name: 'Jane modified' }]); -}); - -test.serial('select a field without joining its table', (t) => { - const { db } = t.context; - - t.throws(() => db.select({ name: users2Table.name }).from(usersTable).prepare()); -}); - -test.serial('select all fields from subquery without alias', (t) => { - const { db } = t.context; - - const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); - - t.throws(() => db.select().from(sq).prepare()); -}); - -test.serial('select count()', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); - - const res = await db.select({ count: sql`count(*)` }).from(usersTable); - - t.deepEqual(res, [{ count: 2 }]); -}); - -test.serial('select for ...', (t) => { - const { db } = t.context; - - { - const query = db.select().from(users2Table).for('update').toSQL(); - t.regex(query.sql, / for update$/); - } - { - const query = db.select().from(users2Table).for('share', { skipLocked: true }).toSQL(); - t.regex(query.sql, / for share skip locked$/); - } - { - const query = db.select().from(users2Table).for('update', { noWait: true }).toSQL(); - t.regex(query.sql, / for update no wait$/); - } -}); - -test.serial('having', async (t) => { - const { db } = t.context; - - await db.insert(citiesTable).values([{ name: 'London' }, { name: 'Paris' }, { name: 'New York' }]); - - await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { - name: 'Jack', - cityId: 2, - }]); - - const result = await db - .select({ - id: citiesTable.id, - name: sql`upper(${citiesTable.name})`.as('upper_name'), - usersCount: sql`count(${users2Table.id})`.as('users_count'), - }) - .from(citiesTable) - .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) - .where(({ name }) => sql`length(${name}) >= 3`) - .groupBy(citiesTable.id) - .having(({ usersCount }) => sql`${usersCount} > 0`) - .orderBy(({ name }) => name); - - t.deepEqual(result, [ - { - id: 1, - name: 'LONDON', - usersCount: 2, - }, - { - id: 2, - name: 'PARIS', - usersCount: 1, - }, - ]); -}); - -test.serial('view', async (t) => { - const { db } = t.context; - - const newYorkers1 = mysqlView('new_yorkers') - .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); - - const newYorkers2 = mysqlView('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: int('city_id').notNull(), - }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); - - const newYorkers3 = mysqlView('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: int('city_id').notNull(), - }).existing(); - - await db.execute(sql`create view new_yorkers as ${getViewConfig(newYorkers1).query}`); - - await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); - - await db.insert(users2Table).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 1 }, - { name: 'Jack', cityId: 2 }, - ]); - - { - const result = await db.select().from(newYorkers1); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers2); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers3); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); - t.deepEqual(result, [ - { name: 'John' }, - { name: 'Jane' }, - ]); - } - - await db.execute(sql`drop view ${newYorkers1}`); -}); - -test.serial('select from raw sql', async (t) => { - const { db } = t.context; - - const result = await db.select({ - id: sql`id`, - name: sql`name`, - }).from(sql`(select 1 as id, 'John' as name) as users`); - - Expect>; - - t.deepEqual(result, [ - { id: 1, name: 'John' }, - ]); -}); - -test.serial('select from raw sql with joins', async (t) => { - const { db } = t.context; - - const result = await db - .select({ - id: sql`users.id`, - name: sql`users.name`, - userCity: sql`users.city`, - cityName: sql`cities.name`, - }) - .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) - .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`); - - Expect>; - - t.deepEqual(result, [ - { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, - ]); -}); - -test.serial('join on aliased sql from select', async (t) => { - const { db } = t.context; - - const result = await db - .select({ - userId: sql`users.id`.as('userId'), - name: sql`users.name`, - userCity: sql`users.city`, - cityId: sql`cities.id`.as('cityId'), - cityName: sql`cities.name`, - }) - .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) - .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)); - - Expect>; - - t.deepEqual(result, [ - { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, - ]); -}); - -test.serial('join on aliased sql from with clause', async (t) => { - const { db } = t.context; - - const users = db.$with('users').as( - db.select({ - id: sql`id`.as('userId'), - name: sql`name`.as('userName'), - city: sql`city`.as('city'), - }).from( - sql`(select 1 as id, 'John' as name, 'New York' as city) as users`, - ), - ); - - const cities = db.$with('cities').as( - db.select({ - id: sql`id`.as('cityId'), - name: sql`name`.as('cityName'), - }).from( - sql`(select 1 as id, 'Paris' as name) as cities`, - ), - ); - - const result = await db - .with(users, cities) - .select({ - userId: users.id, - name: users.name, - userCity: users.city, - cityId: cities.id, - cityName: cities.name, - }) - .from(users) - .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)); - - Expect>; - - t.deepEqual(result, [ - { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, - ]); -}); - -test.serial('prefixed table', async (t) => { - const { db } = t.context; - - const mysqlTable = mysqlTableCreator((name) => `myprefix_${name}`); - - const users = mysqlTable('test_prefixed_table_with_unique_name', { - id: int('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table myprefix_test_prefixed_table_with_unique_name (id int not null primary key, name text not null)`, - ); - - await db.insert(users).values({ id: 1, name: 'John' }); - - const result = await db.select().from(users); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('orderBy with aliased column', (t) => { - const { db } = t.context; - - const query = db.select({ - test: sql`something`.as('test'), - }).from(users2Table).orderBy((fields) => fields.test).toSQL(); - - t.deepEqual(query.sql, 'select something as `test` from `users2` order by `test`'); -}); - -test.serial('timestamp timezone', async (t) => { - const { db } = t.context; - - const date = new Date(Date.parse('2020-01-01T12:34:56+07:00')); - - await db.insert(usersTable).values({ name: 'With default times' }); - await db.insert(usersTable).values({ - name: 'Without default times', - createdAt: date, - }); - const users = await db.select().from(usersTable); - - // check that the timestamps are set correctly for default times - t.assert(Math.abs(users[0]!.createdAt.getTime() - Date.now()) < 2000); - - // check that the timestamps are set correctly for non default times - t.assert(Math.abs(users[1]!.createdAt.getTime() - date.getTime()) < 2000); -}); - -test.serial('transaction', async (t) => { - const { db } = t.context; - - const users = mysqlTable('users_transactions', { - id: serial('id').primaryKey(), - balance: int('balance').notNull(), - }); - const products = mysqlTable('products_transactions', { - id: serial('id').primaryKey(), - price: int('price').notNull(), - stock: int('stock').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`drop table if exists ${products}`); - - await db.execute(sql`create table users_transactions (id serial not null primary key, balance int not null)`); - await db.execute( - sql`create table products_transactions (id serial not null primary key, price int not null, stock int not null)`, - ); - - const [{ insertId: userId }] = await db.insert(users).values({ balance: 100 }); - const user = await db.select().from(users).where(eq(users.id, userId)).then((rows) => rows[0]!); - const [{ insertId: productId }] = await db.insert(products).values({ price: 10, stock: 10 }); - const product = await db.select().from(products).where(eq(products.id, productId)).then((rows) => rows[0]!); - - await db.transaction(async (tx) => { - await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)); - await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)); - }); - - const result = await db.select().from(users); - - t.deepEqual(result, [{ id: 1, balance: 90 }]); - - await db.execute(sql`drop table ${users}`); - await db.execute(sql`drop table ${products}`); -}); - -test.serial('transaction rollback', async (t) => { - const { db } = t.context; - - const users = mysqlTable('users_transactions_rollback', { - id: serial('id').primaryKey(), - balance: int('balance').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table users_transactions_rollback (id serial not null primary key, balance int not null)`, - ); - - await t.throwsAsync(async () => - await db.transaction(async (tx) => { - await tx.insert(users).values({ balance: 100 }); - tx.rollback(); - }), { instanceOf: TransactionRollbackError }); - - const result = await db.select().from(users); - - t.deepEqual(result, []); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('nested transaction', async (t) => { - const { db } = t.context; - - const users = mysqlTable('users_nested_transactions', { - id: serial('id').primaryKey(), - balance: int('balance').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table users_nested_transactions (id serial not null primary key, balance int not null)`, - ); - - await db.transaction(async (tx) => { - await tx.insert(users).values({ balance: 100 }); - - await tx.transaction(async (tx) => { - await tx.update(users).set({ balance: 200 }); - }); - }); - - const result = await db.select().from(users); - - t.deepEqual(result, [{ id: 1, balance: 200 }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('nested transaction rollback', async (t) => { - const { db } = t.context; - - const users = mysqlTable('users_nested_transactions_rollback', { - id: serial('id').primaryKey(), - balance: int('balance').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table users_nested_transactions_rollback (id serial not null primary key, balance int not null)`, - ); - - await db.transaction(async (tx) => { - await tx.insert(users).values({ balance: 100 }); - - await t.throwsAsync(async () => - await tx.transaction(async (tx) => { - await tx.update(users).set({ balance: 200 }); - tx.rollback(); - }), { instanceOf: TransactionRollbackError }); - }); - - const result = await db.select().from(users); - - t.deepEqual(result, [{ id: 1, balance: 100 }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('join subquery with join', async (t) => { - const { db } = t.context; - - const internalStaff = mysqlTable('internal_staff', { - userId: int('user_id').notNull(), - }); - - const customUser = mysqlTable('custom_user', { - id: int('id').notNull(), - }); - - const ticket = mysqlTable('ticket', { - staffId: int('staff_id').notNull(), - }); - - await db.execute(sql`drop table if exists ${internalStaff}`); - await db.execute(sql`drop table if exists ${customUser}`); - await db.execute(sql`drop table if exists ${ticket}`); - - await db.execute(sql`create table internal_staff (user_id integer not null)`); - await db.execute(sql`create table custom_user (id integer not null)`); - await db.execute(sql`create table ticket (staff_id integer not null)`); - - await db.insert(internalStaff).values({ userId: 1 }); - await db.insert(customUser).values({ id: 1 }); - await db.insert(ticket).values({ staffId: 1 }); - - const subq = db - .select() - .from(internalStaff) - .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) - .as('internal_staff'); - - const mainQuery = await db - .select() - .from(ticket) - .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); - - t.deepEqual(mainQuery, [{ - ticket: { staffId: 1 }, - internal_staff: { - internal_staff: { userId: 1 }, - custom_user: { id: 1 }, - }, - }]); - - await db.execute(sql`drop table ${internalStaff}`); - await db.execute(sql`drop table ${customUser}`); - await db.execute(sql`drop table ${ticket}`); -}); - -test.serial('subquery with view', async (t) => { - const { db } = t.context; - - const users = mysqlTable('users_subquery_view', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: int('city_id').notNull(), - }); - - const newYorkers = mysqlView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`drop view if exists ${newYorkers}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, - ); - await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); - - await db.insert(users).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 2 }, - { name: 'Jack', cityId: 1 }, - { name: 'Jill', cityId: 2 }, - ]); - - const sq = db.$with('sq').as(db.select().from(newYorkers)); - const result = await db.with(sq).select().from(sq); - - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 3, name: 'Jack', cityId: 1 }, - ]); - - await db.execute(sql`drop view ${newYorkers}`); - await db.execute(sql`drop table ${users}`); -}); - -test.serial('join view as subquery', async (t) => { - const { db } = t.context; - - const users = mysqlTable('users_join_view', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: int('city_id').notNull(), - }); - - const newYorkers = mysqlView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`drop view if exists ${newYorkers}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, - ); - await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); - - await db.insert(users).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 2 }, - { name: 'Jack', cityId: 1 }, - { name: 'Jill', cityId: 2 }, - ]); - - const sq = db.select().from(newYorkers).as('new_yorkers_sq'); - - const result = await db.select().from(users).leftJoin(sq, eq(users.id, sq.id)); - - t.deepEqual(result, [ - { - users_join_view: { id: 1, name: 'John', cityId: 1 }, - new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, - }, - { - users_join_view: { id: 2, name: 'Jane', cityId: 2 }, - new_yorkers_sq: null, - }, - { - users_join_view: { id: 3, name: 'Jack', cityId: 1 }, - new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, - }, - { - users_join_view: { id: 4, name: 'Jill', cityId: 2 }, - new_yorkers_sq: null, - }, - ]); - - await db.execute(sql`drop view ${newYorkers}`); - await db.execute(sql`drop table ${users}`); -}); - -test.serial('select iterator', async (t) => { - const { db } = t.context; - - const users = mysqlTable('users_iterator', { - id: serial('id').primaryKey(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial not null primary key)`); - - await db.insert(users).values([{}, {}, {}]); - - const iter = db.select().from(users).iterator(); - const result: InferModel[] = []; - - for await (const row of iter) { - result.push(row); - } - - t.deepEqual(result, [{ id: 1 }, { id: 2 }, { id: 3 }]); -}); - -test.serial('select iterator w/ prepared statement', async (t) => { - const { db } = t.context; - - const users = mysqlTable('users_iterator', { - id: serial('id').primaryKey(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial not null primary key)`); - - await db.insert(users).values([{}, {}, {}]); - - const prepared = db.select().from(users).prepare(); - const iter = prepared.iterator(); - const result: InferModel[] = []; - - for await (const row of iter) { - result.push(row); - } - - t.deepEqual(result, [{ id: 1 }, { id: 2 }, { id: 3 }]); -}); - -test.serial('insert undefined', async (t) => { - const { db } = t.context; - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: text('name'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text)`, - ); - - await t.notThrowsAsync(async () => await db.insert(users).values({ name: undefined })); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('update undefined', async (t) => { - const { db } = t.context; - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: text('name'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text)`, - ); - - await t.throwsAsync(async () => await db.update(users).set({ name: undefined })); - await t.notThrowsAsync(async () => await db.update(users).set({ id: 1, name: undefined })); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('utc config for datetime', async (t) => { - const { db } = t.context; - - await db.execute(sql`drop table if exists \`datestable\``); - await db.execute( - sql` - create table \`datestable\` ( - \`datetime_utc\` datetime(3), - \`datetime\` datetime(3), - \`datetime_as_string\` datetime - ) - `, - ); - const datesTable = mysqlTable('datestable', { - datetimeUTC: datetime('datetime_utc', { fsp: 3, mode: 'date' }), - datetime: datetime('datetime', { fsp: 3 }), - datetimeAsString: datetime('datetime_as_string', { mode: 'string' }), - }); - - const dateObj = new Date('2022-11-11'); - const dateUtc = new Date('2022-11-11T12:12:12.122Z'); - - await db.insert(datesTable).values({ - datetimeUTC: dateUtc, - datetime: dateObj, - datetimeAsString: '2022-11-11 12:12:12', - }); - - const res = await db.select().from(datesTable); - - const [rawSelect] = await db.execute(sql`select \`datetime_utc\` from \`datestable\``); - const selectedRow = (rawSelect as unknown as [{ datetime_utc: string }])[0]; - - t.is(selectedRow.datetime_utc, '2022-11-11 12:12:12.122'); - t.deepEqual(new Date(selectedRow.datetime_utc.replace(' ', 'T') + 'Z'), dateUtc); - - t.assert(res[0]?.datetime instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(res[0]?.datetimeUTC instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(typeof res[0]?.datetimeAsString === 'string'); - - t.deepEqual(res, [{ - datetimeUTC: dateUtc, - datetime: new Date('2022-11-11'), - datetimeAsString: '2022-11-11 12:12:12', - }]); - - await db.execute(sql`drop table if exists \`datestable\``); -}); - -test.serial('set operations (union) from query builder with subquery', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - const sq = db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).as('sq'); - - const result = await db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).union( - db.select().from(sq), - ).limit(8); - - t.assert(result.length === 8); - - t.deepEqual(result, [ - { id: 1, name: 'New York' }, - { id: 2, name: 'London' }, - { id: 3, name: 'Tampa' }, - { id: 1, name: 'John' }, - { id: 2, name: 'Jane' }, - { id: 3, name: 'Jack' }, - { id: 4, name: 'Peter' }, - { id: 5, name: 'Ben' }, - ]); - - // union should throw if selected fields are not in the same order - t.throws(() => - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).union( - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(users2Table), - ) - ); -}); - -test.serial('set operations (union) as function', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await union( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ); - - t.assert(result.length === 2); - - t.deepEqual(result, [ - { id: 1, name: 'New York' }, - { id: 1, name: 'John' }, - ]); - - t.throws(() => { - union( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(users2Table).where(eq(users2Table.id, 1)), - ); - }); -}); - -test.serial('set operations (union all) from query builder', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).limit(2).unionAll( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).limit(2), - ).orderBy(asc(sql`id`)).limit(3); - - t.assert(result.length === 3); - - t.deepEqual(result, [ - { id: 1, name: 'New York' }, - { id: 1, name: 'New York' }, - { id: 2, name: 'London' }, - ]); - - t.throws(() => { - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).limit(2).unionAll( - db - .select({ name: citiesTable.name, id: citiesTable.id }) - .from(citiesTable).limit(2), - ).orderBy(asc(sql`id`)); - }); -}); - -test.serial('set operations (union all) as function', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await unionAll( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).limit(1); - - t.assert(result.length === 1); - - t.deepEqual(result, [ - { id: 1, name: 'New York' }, - ]); - - t.throws(() => { - unionAll( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).limit(1); - }); -}); - -test.serial('set operations (intersect) from query builder', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).intersect( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(gt(citiesTable.id, 1)), - ); - - t.assert(result.length === 2); - - t.deepEqual(result, [ - { id: 2, name: 'London' }, - { id: 3, name: 'Tampa' }, - ]); - - t.throws(() => { - db - .select({ name: citiesTable.name, id: citiesTable.id }) - .from(citiesTable).intersect( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(gt(citiesTable.id, 1)), - ); - }); -}); - -test.serial('set operations (intersect) as function', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await intersect( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).limit(1); - - t.assert(result.length === 0); - - t.deepEqual(result, []); - - t.throws(() => { - intersect( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).limit(1); - }); -}); - -test.serial('set operations (intersect all) from query builder', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).limit(2).intersectAll( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).limit(2), - ).orderBy(asc(sql`id`)); - - t.assert(result.length === 2); - - t.deepEqual(result, [ - { id: 1, name: 'New York' }, - { id: 2, name: 'London' }, - ]); - - t.throws(() => { - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).limit(2).intersectAll( - db - .select({ name: citiesTable.name, id: citiesTable.id }) - .from(citiesTable).limit(2), - ).orderBy(asc(sql`id`)); - }); -}); - -test.serial('set operations (intersect all) as function', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await intersectAll( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ); - - t.assert(result.length === 1); - - t.deepEqual(result, [ - { id: 1, name: 'John' }, - ]); - - t.throws(() => { - intersectAll( - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ); - }); -}); - -test.serial('set operations (except) from query builder', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await db - .select() - .from(citiesTable).except( - db - .select() - .from(citiesTable).where(gt(citiesTable.id, 1)), - ); - - t.assert(result.length === 1); - - t.deepEqual(result, [ - { id: 1, name: 'New York' }, - ]); -}); - -test.serial('set operations (except) as function', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await except( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable), - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).limit(3); - - t.assert(result.length === 2); - - t.deepEqual(result, [ - { id: 2, name: 'London' }, - { id: 3, name: 'Tampa' }, - ]); - - t.throws(() => { - except( - db - .select({ name: citiesTable.name, id: citiesTable.id }) - .from(citiesTable), - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).limit(3); - }); -}); - -test.serial('set operations (except all) from query builder', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await db - .select() - .from(citiesTable).exceptAll( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - ).orderBy(asc(sql`id`)); - - t.assert(result.length === 2); - - t.deepEqual(result, [ - { id: 2, name: 'London' }, - { id: 3, name: 'Tampa' }, - ]); - - t.throws(() => { - db - .select() - .from(citiesTable).exceptAll( - db - .select({ name: citiesTable.name, id: citiesTable.id }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - ).orderBy(asc(sql`id`)); - }); -}); - -test.serial('set operations (except all) as function', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await exceptAll( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(gt(users2Table.id, 7)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).limit(6).orderBy(asc(sql.identifier('id'))); - - t.assert(result.length === 6); - - t.deepEqual(result, [ - { id: 2, name: 'Jane' }, - { id: 3, name: 'Jack' }, - { id: 4, name: 'Peter' }, - { id: 5, name: 'Ben' }, - { id: 6, name: 'Jill' }, - { id: 7, name: 'Mary' }, - ]); - - t.throws(() => { - exceptAll( - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(users2Table), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(gt(users2Table.id, 7)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).limit(6); - }); -}); - -test.serial('set operations (mixed) from query builder', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await db - .select() - .from(citiesTable).except( - ({ unionAll }) => - unionAll( - db - .select() - .from(citiesTable).where(gt(citiesTable.id, 1)), - db.select().from(citiesTable).where(eq(citiesTable.id, 2)), - ).orderBy(asc(citiesTable.id)).limit(1).offset(1), - ); - - t.assert(result.length === 2); - - t.deepEqual(result, [ - { id: 1, name: 'New York' }, - { id: 3, name: 'Tampa' }, - ]); - - t.throws(() => { - db - .select() - .from(citiesTable).except( - ({ unionAll }) => - unionAll( - db - .select({ name: citiesTable.name, id: citiesTable.id }) - .from(citiesTable).where(gt(citiesTable.id, 1)), - db.select().from(citiesTable).where(eq(citiesTable.id, 2)), - ), - ); - }); -}); - -test.serial('set operations (mixed all) as function with subquery', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const sq = except( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(gte(users2Table.id, 5)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 7)), - ).orderBy(asc(sql.identifier('id'))).as('sq'); - - const result = await union( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db.select().from(sq).limit(1), - db - .select().from(citiesTable).where(gt(citiesTable.id, 1)), - ); - - t.assert(result.length === 4); - - t.deepEqual(result, [ - { id: 1, name: 'John' }, - { id: 5, name: 'Ben' }, - { id: 2, name: 'London' }, - { id: 3, name: 'Tampa' }, - ]); - - t.throws(() => { - union( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - except( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(gte(users2Table.id, 5)), - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(users2Table).where(eq(users2Table.id, 7)), - ).limit(1), - db - .select().from(citiesTable).where(gt(citiesTable.id, 1)), - ); - }); -}); - -test.serial('aggregate function: count', async (t) => { - const { db } = t.context; - const table = aggregateTable; - await setupAggregateFunctionsTest(db); - - const result1 = await db.select({ value: count() }).from(table); - const result2 = await db.select({ value: count(table.a) }).from(table); - const result3 = await db.select({ value: countDistinct(table.name) }).from(table); - - t.deepEqual(result1[0]?.value, 7); - t.deepEqual(result2[0]?.value, 5); - t.deepEqual(result3[0]?.value, 6); -}); - -test.serial('aggregate function: avg', async (t) => { - const { db } = t.context; - const table = aggregateTable; - await setupAggregateFunctionsTest(db); - - const result1 = await db.select({ value: avg(table.b) }).from(table); - const result2 = await db.select({ value: avg(table.nullOnly) }).from(table); - const result3 = await db.select({ value: avgDistinct(table.b) }).from(table); - - t.deepEqual(result1[0]?.value, '33.3333'); - t.deepEqual(result2[0]?.value, null); - t.deepEqual(result3[0]?.value, '42.5000'); -}); - -test.serial('aggregate function: sum', async (t) => { - const { db } = t.context; - const table = aggregateTable; - await setupAggregateFunctionsTest(db); - - const result1 = await db.select({ value: sum(table.b) }).from(table); - const result2 = await db.select({ value: sum(table.nullOnly) }).from(table); - const result3 = await db.select({ value: sumDistinct(table.b) }).from(table); - - t.deepEqual(result1[0]?.value, '200'); - t.deepEqual(result2[0]?.value, null); - t.deepEqual(result3[0]?.value, '170'); -}); - -test.serial('aggregate function: max', async (t) => { - const { db } = t.context; - const table = aggregateTable; - await setupAggregateFunctionsTest(db); - - const result1 = await db.select({ value: max(table.b) }).from(table); - const result2 = await db.select({ value: max(table.nullOnly) }).from(table); - - t.deepEqual(result1[0]?.value, 90); - t.deepEqual(result2[0]?.value, null); -}); - -test.serial('aggregate function: min', async (t) => { - const { db } = t.context; - const table = aggregateTable; - await setupAggregateFunctionsTest(db); - - const result1 = await db.select({ value: min(table.b) }).from(table); - const result2 = await db.select({ value: min(table.nullOnly) }).from(table); - - t.deepEqual(result1[0]?.value, 10); - t.deepEqual(result2[0]?.value, null); -}); - -test.serial('test $onUpdateFn and $onUpdate works as $default', async (t) => { - const { db } = t.context; - - await db.execute(sql`drop table if exists ${usersOnUpdate}`); - - await db.execute( - sql` - create table ${usersOnUpdate} ( - id serial not null primary key, - name text not null, - update_counter integer default 1 not null, - updated_at datetime(3), - uppercase_name text, - always_null text - ) - `, - ); - - await db.insert(usersOnUpdate).values([ - { name: 'John' }, - { name: 'Jane' }, - { name: 'Jack' }, - { name: 'Jill' }, - ]); - const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); - - const justDates = await db.select({ updatedAt }).from(usersOnUpdate); - - const response = await db.select({ ...rest }).from(usersOnUpdate); - - t.deepEqual(response, [ - { name: 'John', id: 1, updateCounter: 1, uppercaseName: 'JOHN', alwaysNull: null }, - { name: 'Jane', id: 2, updateCounter: 1, uppercaseName: 'JANE', alwaysNull: null }, - { name: 'Jack', id: 3, updateCounter: 1, uppercaseName: 'JACK', alwaysNull: null }, - { name: 'Jill', id: 4, updateCounter: 1, uppercaseName: 'JILL', alwaysNull: null }, - ]); - const msDelay = 250; - - for (const eachUser of justDates) { - t.assert(eachUser.updatedAt!.valueOf() > Date.now() - msDelay); - } -}); - -test.serial('test $onUpdateFn and $onUpdate works updating', async (t) => { - const { db } = t.context; - - await db.execute(sql`drop table if exists ${usersOnUpdate}`); - - await db.execute( - sql` - create table ${usersOnUpdate} ( - id serial not null primary key, - name text not null, - update_counter integer default 1 not null, - updated_at datetime(3), - uppercase_name text, - always_null text - ) - `, - ); - - await db.insert(usersOnUpdate).values([ - { name: 'John', alwaysNull: 'this will will be null after updating' }, - { name: 'Jane' }, - { name: 'Jack' }, - { name: 'Jill' }, - ]); - const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); - const initial = await db.select({ updatedAt }).from(usersOnUpdate); - - await db.update(usersOnUpdate).set({ name: 'Angel', uppercaseName: null }).where(eq(usersOnUpdate.id, 1)); - - const justDates = await db.select({ updatedAt }).from(usersOnUpdate); - - const response = await db.select({ ...rest }).from(usersOnUpdate); - - t.deepEqual(response, [ - { name: 'Angel', id: 1, updateCounter: 2, uppercaseName: null, alwaysNull: null }, - { name: 'Jane', id: 2, updateCounter: 1, uppercaseName: 'JANE', alwaysNull: null }, - { name: 'Jack', id: 3, updateCounter: 1, uppercaseName: 'JACK', alwaysNull: null }, - { name: 'Jill', id: 4, updateCounter: 1, uppercaseName: 'JILL', alwaysNull: null }, - ]); - const msDelay = 250; - - t.assert(initial[0]?.updatedAt?.valueOf() !== justDates[0]?.updatedAt?.valueOf()); - - for (const eachUser of justDates) { - t.assert(eachUser.updatedAt!.valueOf() > Date.now() - msDelay); - } -}); diff --git a/integration-tests/tests/mysql/mysql-common.ts b/integration-tests/tests/mysql/mysql-common.ts new file mode 100644 index 000000000..e67e706fb --- /dev/null +++ b/integration-tests/tests/mysql/mysql-common.ts @@ -0,0 +1,3390 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import 'dotenv/config'; +import Docker from 'dockerode'; +import { + and, + asc, + avg, + avgDistinct, + count, + countDistinct, + eq, + exists, + getTableColumns, + gt, + gte, + inArray, + lt, + max, + min, + Name, + placeholder, + sql, + sum, + sumDistinct, + TransactionRollbackError, +} from 'drizzle-orm'; +import type { MySqlDatabase } from 'drizzle-orm/mysql-core'; +import { + alias, + bigint, + boolean, + date, + datetime, + decimal, + except, + exceptAll, + foreignKey, + getTableConfig, + getViewConfig, + int, + intersect, + intersectAll, + json, + mediumint, + mysqlEnum, + mysqlSchema, + mysqlTable, + mysqlTableCreator, + mysqlView, + primaryKey, + serial, + smallint, + text, + time, + timestamp, + tinyint, + union, + unionAll, + unique, + uniqueIndex, + uniqueKeyName, + year, +} from 'drizzle-orm/mysql-core'; +import type { MySqlRemoteDatabase } from 'drizzle-orm/mysql-proxy'; +import { migrate } from 'drizzle-orm/mysql2/migrator'; +import getPort from 'get-port'; +import { v4 as uuid } from 'uuid'; +import { afterAll, beforeEach, describe, expect, test } from 'vitest'; +import { Expect, toLocalDate } from '~/utils.ts'; +import type { Equal } from '~/utils.ts'; + +type TestMySQLDB = MySqlDatabase; + +declare module 'vitest' { + interface TestContext { + mysql: { + db: TestMySQLDB; + }; + mysqlProxy: { + db: MySqlRemoteDatabase; + }; + } +} + +const ENABLE_LOGGING = false; + +const usersTable = mysqlTable('userstest', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), +}); + +const users2Table = mysqlTable('users2', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').references(() => citiesTable.id), +}); + +const citiesTable = mysqlTable('cities', { + id: serial('id').primaryKey(), + name: text('name').notNull(), +}); + +const usersOnUpdate = mysqlTable('users_on_update', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + updateCounter: int('update_counter').default(sql`1`).$onUpdateFn(() => sql`update_counter + 1`), + updatedAt: datetime('updated_at', { mode: 'date', fsp: 3 }).$onUpdate(() => new Date()), + uppercaseName: text('uppercase_name').$onUpdateFn(() => sql`upper(name)`), + alwaysNull: text('always_null').$type().$onUpdateFn(() => null), // need to add $type because $onUpdate add a default value +}); + +const datesTable = mysqlTable('datestable', { + date: date('date'), + dateAsString: date('date_as_string', { mode: 'string' }), + time: time('time', { fsp: 1 }), + datetime: datetime('datetime', { fsp: 2 }), + datetimeAsString: datetime('datetime_as_string', { fsp: 2, mode: 'string' }), + timestamp: timestamp('timestamp', { fsp: 3 }), + timestampAsString: timestamp('timestamp_as_string', { fsp: 3, mode: 'string' }), + year: year('year'), +}); + +const coursesTable = mysqlTable('courses', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + categoryId: int('category_id').references(() => courseCategoriesTable.id), +}); + +const courseCategoriesTable = mysqlTable('course_categories', { + id: serial('id').primaryKey(), + name: text('name').notNull(), +}); + +const orders = mysqlTable('orders', { + id: serial('id').primaryKey(), + region: text('region').notNull(), + product: text('product').notNull().$default(() => 'random_string'), + amount: int('amount').notNull(), + quantity: int('quantity').notNull(), +}); + +const usersMigratorTable = mysqlTable('users12', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + email: text('email').notNull(), +}, (table) => { + return { + name: uniqueIndex('').on(table.name).using('btree'), + }; +}); + +// To test aggregate functions +const aggregateTable = mysqlTable('aggregate_table', { + id: serial('id').notNull(), + name: text('name').notNull(), + a: int('a'), + b: int('b'), + c: int('c'), + nullOnly: int('null_only'), +}); + +// To test another schema and multischema +const mySchema = mysqlSchema(`mySchema`); + +const usersMySchemaTable = mySchema.table('userstest', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), +}); + +const users2MySchemaTable = mySchema.table('users2', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').references(() => citiesTable.id), +}); + +const citiesMySchemaTable = mySchema.table('cities', { + id: serial('id').primaryKey(), + name: text('name').notNull(), +}); + +let mysqlContainer: Docker.Container; +export async function createDockerDB(): Promise { + const docker = new Docker(); + const port = await getPort({ port: 3306 }); + const image = 'mysql:8'; + + const pullStream = await docker.pull(image); + await new Promise((resolve, reject) => + docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) + ); + + mysqlContainer = await docker.createContainer({ + Image: image, + Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=drizzle'], + name: `drizzle-integration-tests-${uuid()}`, + HostConfig: { + AutoRemove: true, + PortBindings: { + '3306/tcp': [{ HostPort: `${port}` }], + }, + }, + }); + + await mysqlContainer.start(); + await new Promise((resolve) => setTimeout(resolve, 4000)); + + return `mysql://root:mysql@127.0.0.1:${port}/drizzle`; +} + +afterAll(async () => { + await mysqlContainer?.stop().catch(console.error); +}); + +export function tests(driver?: string) { + describe('common', () => { + beforeEach(async (ctx) => { + const { db } = ctx.mysql; + await db.execute(sql`drop table if exists userstest`); + await db.execute(sql`drop table if exists users2`); + await db.execute(sql`drop table if exists cities`); + + if (driver !== 'planetscale') { + await db.execute(sql`drop schema if exists \`mySchema\``); + await db.execute(sql`create schema if not exists \`mySchema\``); + } + + await db.execute( + sql` + create table userstest ( + id serial primary key, + name text not null, + verified boolean not null default false, + jsonb json, + created_at timestamp not null default now() + ) + `, + ); + + await db.execute( + sql` + create table users2 ( + id serial primary key, + name text not null, + city_id int references cities(id) + ) + `, + ); + + await db.execute( + sql` + create table cities ( + id serial primary key, + name text not null + ) + `, + ); + + if (driver !== 'planetscale') { + // mySchema + await db.execute( + sql` + create table \`mySchema\`.\`userstest\` ( + \`id\` serial primary key, + \`name\` text not null, + \`verified\` boolean not null default false, + \`jsonb\` json, + \`created_at\` timestamp not null default now() + ) + `, + ); + + await db.execute( + sql` + create table \`mySchema\`.\`cities\` ( + \`id\` serial primary key, + \`name\` text not null + ) + `, + ); + + await db.execute( + sql` + create table \`mySchema\`.\`users2\` ( + \`id\` serial primary key, + \`name\` text not null, + \`city_id\` int references \`mySchema\`.\`cities\`(\`id\`) + ) + `, + ); + } + }); + + async function setupSetOperationTest(db: TestMySQLDB) { + await db.execute(sql`drop table if exists \`users2\``); + await db.execute(sql`drop table if exists \`cities\``); + await db.execute( + sql` + create table \`users2\` ( + \`id\` serial primary key, + \`name\` text not null, + \`city_id\` int references \`cities\`(\`id\`) + ) + `, + ); + + await db.execute( + sql` + create table \`cities\` ( + \`id\` serial primary key, + \`name\` text not null + ) + `, + ); + + await db.insert(citiesTable).values([ + { id: 1, name: 'New York' }, + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + await db.insert(users2Table).values([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 2 }, + { id: 3, name: 'Jack', cityId: 3 }, + { id: 4, name: 'Peter', cityId: 3 }, + { id: 5, name: 'Ben', cityId: 2 }, + { id: 6, name: 'Jill', cityId: 1 }, + { id: 7, name: 'Mary', cityId: 2 }, + { id: 8, name: 'Sally', cityId: 1 }, + ]); + } + + async function setupAggregateFunctionsTest(db: TestMySQLDB) { + await db.execute(sql`drop table if exists \`aggregate_table\``); + await db.execute( + sql` + create table \`aggregate_table\` ( + \`id\` integer primary key auto_increment not null, + \`name\` text not null, + \`a\` integer, + \`b\` integer, + \`c\` integer, + \`null_only\` integer + ); + `, + ); + await db.insert(aggregateTable).values([ + { name: 'value 1', a: 5, b: 10, c: 20 }, + { name: 'value 1', a: 5, b: 20, c: 30 }, + { name: 'value 2', a: 10, b: 50, c: 60 }, + { name: 'value 3', a: 20, b: 20, c: null }, + { name: 'value 4', a: null, b: 90, c: 120 }, + { name: 'value 5', a: 80, b: 10, c: null }, + { name: 'value 6', a: null, b: null, c: 150 }, + ]); + } + + test('table config: unsigned ints', async () => { + const unsignedInts = mysqlTable('cities1', { + bigint: bigint('bigint', { mode: 'number', unsigned: true }), + int: int('int', { unsigned: true }), + smallint: smallint('smallint', { unsigned: true }), + mediumint: mediumint('mediumint', { unsigned: true }), + tinyint: tinyint('tinyint', { unsigned: true }), + }); + + const tableConfig = getTableConfig(unsignedInts); + + const bigintColumn = tableConfig.columns.find((c) => c.name === 'bigint')!; + const intColumn = tableConfig.columns.find((c) => c.name === 'int')!; + const smallintColumn = tableConfig.columns.find((c) => c.name === 'smallint')!; + const mediumintColumn = tableConfig.columns.find((c) => c.name === 'mediumint')!; + const tinyintColumn = tableConfig.columns.find((c) => c.name === 'tinyint')!; + + expect(bigintColumn.getSQLType()).toBe('bigint unsigned'); + expect(intColumn.getSQLType()).toBe('int unsigned'); + expect(smallintColumn.getSQLType()).toBe('smallint unsigned'); + expect(mediumintColumn.getSQLType()).toBe('mediumint unsigned'); + expect(tinyintColumn.getSQLType()).toBe('tinyint unsigned'); + }); + + test('table config: signed ints', async () => { + const unsignedInts = mysqlTable('cities1', { + bigint: bigint('bigint', { mode: 'number' }), + int: int('int'), + smallint: smallint('smallint'), + mediumint: mediumint('mediumint'), + tinyint: tinyint('tinyint'), + }); + + const tableConfig = getTableConfig(unsignedInts); + + const bigintColumn = tableConfig.columns.find((c) => c.name === 'bigint')!; + const intColumn = tableConfig.columns.find((c) => c.name === 'int')!; + const smallintColumn = tableConfig.columns.find((c) => c.name === 'smallint')!; + const mediumintColumn = tableConfig.columns.find((c) => c.name === 'mediumint')!; + const tinyintColumn = tableConfig.columns.find((c) => c.name === 'tinyint')!; + + expect(bigintColumn.getSQLType()).toBe('bigint'); + expect(intColumn.getSQLType()).toBe('int'); + expect(smallintColumn.getSQLType()).toBe('smallint'); + expect(mediumintColumn.getSQLType()).toBe('mediumint'); + expect(tinyintColumn.getSQLType()).toBe('tinyint'); + }); + + test('table config: foreign keys name', async () => { + const table = mysqlTable('cities', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + state: text('state'), + }, (t) => ({ + f: foreignKey({ foreignColumns: [t.id], columns: [t.id], name: 'custom_fk' }), + })); + + const tableConfig = getTableConfig(table); + + expect(tableConfig.foreignKeys).toHaveLength(1); + expect(tableConfig.foreignKeys[0]!.getName()).toBe('custom_fk'); + }); + + test('table config: primary keys name', async () => { + const table = mysqlTable('cities', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + state: text('state'), + }, (t) => ({ + f: primaryKey({ columns: [t.id, t.name], name: 'custom_pk' }), + })); + + const tableConfig = getTableConfig(table); + + expect(tableConfig.primaryKeys).toHaveLength(1); + expect(tableConfig.primaryKeys[0]!.getName()).toBe('custom_pk'); + }); + + test('table configs: unique third param', async () => { + const cities1Table = mysqlTable('cities1', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + state: text('state'), + }, (t) => ({ + f: unique('custom_name').on(t.name, t.state), + f1: unique('custom_name1').on(t.name, t.state), + })); + + const tableConfig = getTableConfig(cities1Table); + + expect(tableConfig.uniqueConstraints).toHaveLength(2); + + expect(tableConfig.uniqueConstraints[0]?.name).toBe('custom_name'); + expect(tableConfig.uniqueConstraints[0]?.columns.map((t) => t.name)).toEqual(['name', 'state']); + + expect(tableConfig.uniqueConstraints[1]?.name).toBe('custom_name1'); + expect(tableConfig.uniqueConstraints[1]?.columns.map((t) => t.name)).toEqual(['name', 'state']); + }); + + test('table configs: unique in column', async () => { + const cities1Table = mysqlTable('cities1', { + id: serial('id').primaryKey(), + name: text('name').notNull().unique(), + state: text('state').unique('custom'), + field: text('field').unique('custom_field'), + }); + + const tableConfig = getTableConfig(cities1Table); + + const columnName = tableConfig.columns.find((it) => it.name === 'name'); + expect(columnName?.uniqueName).toBe(uniqueKeyName(cities1Table, [columnName!.name])); + expect(columnName?.isUnique).toBeTruthy(); + + const columnState = tableConfig.columns.find((it) => it.name === 'state'); + expect(columnState?.uniqueName).toBe('custom'); + expect(columnState?.isUnique).toBeTruthy(); + + const columnField = tableConfig.columns.find((it) => it.name === 'field'); + expect(columnField?.uniqueName).toBe('custom_field'); + expect(columnField?.isUnique).toBeTruthy(); + }); + + test('select all fields', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(usersTable).values({ name: 'John' }); + const result = await db.select().from(usersTable); + + expect(result[0]!.createdAt).toBeInstanceOf(Date); + // not timezone based timestamp, thats why it should not work here + // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + }); + + test('select sql', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersTable.name})`, + }).from(usersTable); + + expect(users).toEqual([{ name: 'JOHN' }]); + }); + + test('select typed sql', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersTable.name})`, + }).from(usersTable); + + expect(users).toEqual([{ name: 'JOHN' }]); + }); + + test('select distinct', async (ctx) => { + const { db } = ctx.mysql; + + const usersDistinctTable = mysqlTable('users_distinct', { + id: int('id').notNull(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${usersDistinctTable}`); + await db.execute(sql`create table ${usersDistinctTable} (id int, name text)`); + + await db.insert(usersDistinctTable).values([ + { id: 1, name: 'John' }, + { id: 1, name: 'John' }, + { id: 2, name: 'John' }, + { id: 1, name: 'Jane' }, + ]); + const users = await db.selectDistinct().from(usersDistinctTable).orderBy( + usersDistinctTable.id, + usersDistinctTable.name, + ); + + await db.execute(sql`drop table ${usersDistinctTable}`); + + expect(users).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); + }); + + test('insert returning sql', async (ctx) => { + const { db } = ctx.mysql; + + const [result, _] = await db.insert(usersTable).values({ name: 'John' }); + + expect(result.insertId).toBe(1); + }); + + test('delete returning sql', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')); + + expect(users[0].affectedRows).toBe(1); + }); + + test('update returning sql', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); + + expect(users[0].changedRows).toBe(1); + }); + + test('update with returning all fields', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(usersTable).values({ name: 'John' }); + const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); + + const users = await db.select().from(usersTable).where(eq(usersTable.id, 1)); + + expect(updatedUsers[0].changedRows).toBe(1); + + expect(users[0]!.createdAt).toBeInstanceOf(Date); + // not timezone based timestamp, thats why it should not work here + // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 2000); + expect(users).toEqual([{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); + }); + + test('update with returning partial', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(usersTable).values({ name: 'John' }); + const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); + + const users = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( + eq(usersTable.id, 1), + ); + + expect(updatedUsers[0].changedRows).toBe(1); + + expect(users).toEqual([{ id: 1, name: 'Jane' }]); + }); + + test('delete with returning all fields', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(usersTable).values({ name: 'John' }); + const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); + + expect(deletedUser[0].affectedRows).toBe(1); + }); + + test('delete with returning partial', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(usersTable).values({ name: 'John' }); + const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); + + expect(deletedUser[0].affectedRows).toBe(1); + }); + + test('insert + select', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(usersTable).values({ name: 'John' }); + const result = await db.select().from(usersTable); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + + await db.insert(usersTable).values({ name: 'Jane' }); + const result2 = await db.select().from(usersTable); + expect(result2).toEqual([ + { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, + { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, + ]); + }); + + test('json insert', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); + const result = await db.select({ + id: usersTable.id, + name: usersTable.name, + jsonb: usersTable.jsonb, + }).from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); + }); + + test('insert with overridden default values', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(usersTable).values({ name: 'John', verified: true }); + const result = await db.select().from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); + }); + + test('insert many', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(usersTable).values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]); + const result = await db.select({ + id: usersTable.id, + name: usersTable.name, + jsonb: usersTable.jsonb, + verified: usersTable.verified, + }).from(usersTable); + + expect(result).toEqual([ + { id: 1, name: 'John', jsonb: null, verified: false }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, + { id: 3, name: 'Jane', jsonb: null, verified: false }, + { id: 4, name: 'Austin', jsonb: null, verified: true }, + ]); + }); + + test('insert many with returning', async (ctx) => { + const { db } = ctx.mysql; + + const result = await db.insert(usersTable).values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]); + + expect(result[0].affectedRows).toBe(4); + }); + + test('select with group by as field', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.name); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); + }); + + test('select with exists', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const user = alias(usersTable, 'user'); + const result = await db.select({ name: usersTable.name }).from(usersTable).where( + exists( + db.select({ one: sql`1` }).from(user).where(and(eq(usersTable.name, 'John'), eq(user.id, usersTable.id))), + ), + ); + + expect(result).toEqual([{ name: 'John' }]); + }); + + test('select with group by as sql', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(sql`${usersTable.name}`); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); + }); + + test('$default function', async (ctx) => { + const { db } = ctx.mysql; + + await db.execute(sql`drop table if exists \`orders\``); + await db.execute( + sql` + create table \`orders\` ( + \`id\` serial primary key, + \`region\` text not null, + \`product\` text not null, + \`amount\` int not null, + \`quantity\` int not null + ) + `, + ); + + await db.insert(orders).values({ id: 1, region: 'Ukraine', amount: 1, quantity: 1 }); + const selectedOrder = await db.select().from(orders); + + expect(selectedOrder).toEqual([{ + id: 1, + amount: 1, + quantity: 1, + region: 'Ukraine', + product: 'random_string', + }]); + }); + + test('$default with empty array', async (ctx) => { + const { db } = ctx.mysql; + + await db.execute(sql`drop table if exists \`s_orders\``); + await db.execute( + sql` + create table \`s_orders\` ( + \`id\` serial primary key, + \`region\` text default ('Ukraine'), + \`product\` text not null + ) + `, + ); + + const users = mysqlTable('s_orders', { + id: serial('id').primaryKey(), + region: text('region').default('Ukraine'), + product: text('product').$defaultFn(() => 'random_string'), + }); + + await db.insert(users).values({}); + const selectedOrder = await db.select().from(users); + + expect(selectedOrder).toEqual([{ + id: 1, + region: 'Ukraine', + product: 'random_string', + }]); + }); + + test('select with group by as sql + column', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(sql`${usersTable.name}`, usersTable.id); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + }); + + test('select with group by as column + sql', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, sql`${usersTable.name}`); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + }); + + test('select with group by complex query', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, sql`${usersTable.name}`) + .orderBy(asc(usersTable.name)) + .limit(1); + + expect(result).toEqual([{ name: 'Jane' }]); + }); + + test('build query', async (ctx) => { + const { db } = ctx.mysql; + + const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, usersTable.name) + .toSQL(); + + expect(query).toEqual({ + sql: `select \`id\`, \`name\` from \`userstest\` group by \`userstest\`.\`id\`, \`userstest\`.\`name\``, + params: [], + }); + }); + + test('Query check: Insert all defaults in 1 row', async (ctx) => { + const { db } = ctx.mysql; + + const users = mysqlTable('users', { + id: serial('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state'), + }); + + const query = db + .insert(users) + .values({}) + .toSQL(); + + expect(query).toEqual({ + sql: 'insert into `users` (`id`, `name`, `state`) values (default, default, default)', + params: [], + }); + }); + + test('Query check: Insert all defaults in multiple rows', async (ctx) => { + const { db } = ctx.mysql; + + const users = mysqlTable('users', { + id: serial('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state').default('UA'), + }); + + const query = db + .insert(users) + .values([{}, {}]) + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into `users` (`id`, `name`, `state`) values (default, default, default), (default, default, default)', + params: [], + }); + }); + + test('Insert all defaults in 1 row', async (ctx) => { + const { db } = ctx.mysql; + + const users = mysqlTable('empty_insert_single', { + id: serial('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state'), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id serial primary key, name text default ('Dan'), state text)`, + ); + + await db.insert(users).values({}); + + const res = await db.select().from(users); + + expect(res).toEqual([{ id: 1, name: 'Dan', state: null }]); + }); + + test('Insert all defaults in multiple rows', async (ctx) => { + const { db } = ctx.mysql; + + const users = mysqlTable('empty_insert_multiple', { + id: serial('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state'), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id serial primary key, name text default ('Dan'), state text)`, + ); + + await db.insert(users).values([{}, {}]); + + const res = await db.select().from(users); + + expect(res).toEqual([{ id: 1, name: 'Dan', state: null }, { id: 2, name: 'Dan', state: null }]); + }); + + test('build query insert with onDuplicate', async (ctx) => { + const { db } = ctx.mysql; + + const query = db.insert(usersTable) + .values({ name: 'John', jsonb: ['foo', 'bar'] }) + .onDuplicateKeyUpdate({ set: { name: 'John1' } }) + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into `userstest` (`id`, `name`, `verified`, `jsonb`, `created_at`) values (default, ?, default, ?, default) on duplicate key update `name` = ?', + params: ['John', '["foo","bar"]', 'John1'], + }); + }); + + test('insert with onDuplicate', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(usersTable) + .values({ name: 'John' }); + + await db.insert(usersTable) + .values({ id: 1, name: 'John' }) + .onDuplicateKeyUpdate({ set: { name: 'John1' } }); + + const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( + eq(usersTable.id, 1), + ); + + expect(res).toEqual([{ id: 1, name: 'John1' }]); + }); + + test('insert conflict', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(usersTable) + .values({ name: 'John' }); + + await expect((async () => { + db.insert(usersTable).values({ id: 1, name: 'John1' }); + })()).resolves.not.toThrowError(); + }); + + test('insert conflict with ignore', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(usersTable) + .values({ name: 'John' }); + + await db.insert(usersTable) + .ignore() + .values({ id: 1, name: 'John1' }); + + const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( + eq(usersTable.id, 1), + ); + + expect(res).toEqual([{ id: 1, name: 'John' }]); + }); + + test('insert sql', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(usersTable).values({ name: sql`${'John'}` }); + const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); + expect(result).toEqual([{ id: 1, name: 'John' }]); + }); + + test('partial join with alias', async (ctx) => { + const { db } = ctx.mysql; + const customerAlias = alias(usersTable, 'customer'); + + await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select({ + user: { + id: usersTable.id, + name: usersTable.name, + }, + customer: { + id: customerAlias.id, + name: customerAlias.name, + }, + }).from(usersTable) + .leftJoin(customerAlias, eq(customerAlias.id, 11)) + .where(eq(usersTable.id, 10)); + + expect(result).toEqual([{ + user: { id: 10, name: 'Ivan' }, + customer: { id: 11, name: 'Hans' }, + }]); + }); + + test('full join with alias', async (ctx) => { + const { db } = ctx.mysql; + + const mysqlTable = mysqlTableCreator((name) => `prefixed_${name}`); + + const users = mysqlTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); + + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select().from(users) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(users.id, 10)); + + expect(result).toEqual([{ + users: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); + + await db.execute(sql`drop table ${users}`); + }); + + test('select from alias', async (ctx) => { + const { db } = ctx.mysql; + + const mysqlTable = mysqlTableCreator((name) => `prefixed_${name}`); + + const users = mysqlTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); + + const user = alias(users, 'user'); + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select() + .from(user) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(user.id, 10)); + + expect(result).toEqual([{ + user: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); + + await db.execute(sql`drop table ${users}`); + }); + + test('insert with spaces', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(usersTable).values({ name: sql`'Jo h n'` }); + const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); + }); + + test('prepared statement', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(usersTable).values({ name: 'John' }); + const statement = db.select({ + id: usersTable.id, + name: usersTable.name, + }).from(usersTable) + .prepare(); + const result = await statement.execute(); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + }); + + test('prepared statement reuse', async (ctx) => { + const { db } = ctx.mysql; + + const stmt = db.insert(usersTable).values({ + verified: true, + name: placeholder('name'), + }).prepare(); + + for (let i = 0; i < 10; i++) { + await stmt.execute({ name: `John ${i}` }); + } + + const result = await db.select({ + id: usersTable.id, + name: usersTable.name, + verified: usersTable.verified, + }).from(usersTable); + + expect(result).toEqual([ + { id: 1, name: 'John 0', verified: true }, + { id: 2, name: 'John 1', verified: true }, + { id: 3, name: 'John 2', verified: true }, + { id: 4, name: 'John 3', verified: true }, + { id: 5, name: 'John 4', verified: true }, + { id: 6, name: 'John 5', verified: true }, + { id: 7, name: 'John 6', verified: true }, + { id: 8, name: 'John 7', verified: true }, + { id: 9, name: 'John 8', verified: true }, + { id: 10, name: 'John 9', verified: true }, + ]); + }); + + test('prepared statement with placeholder in .where', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(usersTable).values({ name: 'John' }); + const stmt = db.select({ + id: usersTable.id, + name: usersTable.name, + }).from(usersTable) + .where(eq(usersTable.id, placeholder('id'))) + .prepare(); + const result = await stmt.execute({ id: 1 }); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + }); + + test('migrator', async (ctx) => { + const { db } = ctx.mysql; + + await db.execute(sql`drop table if exists cities_migration`); + await db.execute(sql`drop table if exists users_migration`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists __drizzle_migrations`); + + await migrate(db, { migrationsFolder: './drizzle2/mysql' }); + + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + + const result = await db.select().from(usersMigratorTable); + + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table cities_migration`); + await db.execute(sql`drop table users_migration`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table __drizzle_migrations`); + }); + + test('insert via db.execute + select via db.execute', async (ctx) => { + const { db } = ctx.mysql; + + await db.execute(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); + + const result = await db.execute<{ id: number; name: string }>(sql`select id, name from ${usersTable}`); + expect(result[0]).toEqual([{ id: 1, name: 'John' }]); + }); + + test('insert via db.execute w/ query builder', async (ctx) => { + const { db } = ctx.mysql; + + const inserted = await db.execute( + db.insert(usersTable).values({ name: 'John' }), + ); + expect(inserted[0].affectedRows).toBe(1); + }); + + test('insert + select all possible dates', async (ctx) => { + const { db } = ctx.mysql; + + await db.execute(sql`drop table if exists \`datestable\``); + await db.execute( + sql` + create table \`datestable\` ( + \`date\` date, + \`date_as_string\` date, + \`time\` time, + \`datetime\` datetime, + \`datetime_as_string\` datetime, + \`timestamp\` timestamp(3), + \`timestamp_as_string\` timestamp(3), + \`year\` year + ) + `, + ); + + const date = new Date('2022-11-11'); + const dateWithMilliseconds = new Date('2022-11-11 12:12:12.123'); + + await db.insert(datesTable).values({ + date: date, + dateAsString: '2022-11-11', + time: '12:12:12', + datetime: date, + year: 22, + datetimeAsString: '2022-11-11 12:12:12', + timestamp: dateWithMilliseconds, + timestampAsString: '2022-11-11 12:12:12.123', + }); + + const res = await db.select().from(datesTable); + + expect(res[0]?.date).toBeInstanceOf(Date); + expect(res[0]?.datetime).toBeInstanceOf(Date); + expect(typeof res[0]?.dateAsString).toBe('string'); + expect(typeof res[0]?.datetimeAsString).toBe('string'); + + expect(res).toEqual([{ + date: toLocalDate(new Date('2022-11-11')), + dateAsString: '2022-11-11', + time: '12:12:12', + datetime: new Date('2022-11-11'), + year: 2022, + datetimeAsString: '2022-11-11 12:12:12', + timestamp: new Date('2022-11-11 12:12:12.123'), + timestampAsString: '2022-11-11 12:12:12.123', + }]); + + await db.execute(sql`drop table if exists \`datestable\``); + }); + + const tableWithEnums = mysqlTable('enums_test_case', { + id: serial('id').primaryKey(), + enum1: mysqlEnum('enum1', ['a', 'b', 'c']).notNull(), + enum2: mysqlEnum('enum2', ['a', 'b', 'c']).default('a'), + enum3: mysqlEnum('enum3', ['a', 'b', 'c']).notNull().default('b'), + }); + + test('Mysql enum test case #1', async (ctx) => { + const { db } = ctx.mysql; + + await db.execute(sql`drop table if exists \`enums_test_case\``); + + await db.execute(sql` + create table \`enums_test_case\` ( + \`id\` serial primary key, + \`enum1\` ENUM('a', 'b', 'c') not null, + \`enum2\` ENUM('a', 'b', 'c') default 'a', + \`enum3\` ENUM('a', 'b', 'c') not null default 'b' + ) + `); + + await db.insert(tableWithEnums).values([ + { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, + { id: 2, enum1: 'a', enum3: 'c' }, + { id: 3, enum1: 'a' }, + ]); + + const res = await db.select().from(tableWithEnums); + + await db.execute(sql`drop table \`enums_test_case\``); + + expect(res).toEqual([ + { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, + { id: 2, enum1: 'a', enum2: 'a', enum3: 'c' }, + { id: 3, enum1: 'a', enum2: 'a', enum3: 'b' }, + ]); + }); + + test('left join (flat object fields)', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(citiesTable) + .values([{ name: 'Paris' }, { name: 'London' }]); + + await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); + + const res = await db.select({ + userId: users2Table.id, + userName: users2Table.name, + cityId: citiesTable.id, + cityName: citiesTable.name, + }).from(users2Table) + .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); + + expect(res).toEqual([ + { userId: 1, userName: 'John', cityId: 1, cityName: 'Paris' }, + { userId: 2, userName: 'Jane', cityId: null, cityName: null }, + ]); + }); + + test('left join (grouped fields)', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(citiesTable) + .values([{ name: 'Paris' }, { name: 'London' }]); + + await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); + + const res = await db.select({ + id: users2Table.id, + user: { + name: users2Table.name, + nameUpper: sql`upper(${users2Table.name})`, + }, + city: { + id: citiesTable.id, + name: citiesTable.name, + nameUpper: sql`upper(${citiesTable.name})`, + }, + }).from(users2Table) + .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); + + expect(res).toEqual([ + { + id: 1, + user: { name: 'John', nameUpper: 'JOHN' }, + city: { id: 1, name: 'Paris', nameUpper: 'PARIS' }, + }, + { + id: 2, + user: { name: 'Jane', nameUpper: 'JANE' }, + city: null, + }, + ]); + }); + + test('left join (all fields)', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(citiesTable) + .values([{ name: 'Paris' }, { name: 'London' }]); + + await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); + + const res = await db.select().from(users2Table) + .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); + + expect(res).toEqual([ + { + users2: { + id: 1, + name: 'John', + cityId: 1, + }, + cities: { + id: 1, + name: 'Paris', + }, + }, + { + users2: { + id: 2, + name: 'Jane', + cityId: null, + }, + cities: null, + }, + ]); + }); + + test('join subquery', async (ctx) => { + const { db } = ctx.mysql; + + await db.execute(sql`drop table if exists \`courses\``); + await db.execute(sql`drop table if exists \`course_categories\``); + + await db.execute( + sql` + create table \`course_categories\` ( + \`id\` serial primary key, + \`name\` text not null + ) + `, + ); + + await db.execute( + sql` + create table \`courses\` ( + \`id\` serial primary key, + \`name\` text not null, + \`category_id\` int references \`course_categories\`(\`id\`) + ) + `, + ); + + await db.insert(courseCategoriesTable).values([ + { name: 'Category 1' }, + { name: 'Category 2' }, + { name: 'Category 3' }, + { name: 'Category 4' }, + ]); + + await db.insert(coursesTable).values([ + { name: 'Development', categoryId: 2 }, + { name: 'IT & Software', categoryId: 3 }, + { name: 'Marketing', categoryId: 4 }, + { name: 'Design', categoryId: 1 }, + ]); + + const sq2 = db + .select({ + categoryId: courseCategoriesTable.id, + category: courseCategoriesTable.name, + total: sql`count(${courseCategoriesTable.id})`, + }) + .from(courseCategoriesTable) + .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) + .as('sq2'); + + const res = await db + .select({ + courseName: coursesTable.name, + categoryId: sq2.categoryId, + }) + .from(coursesTable) + .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) + .orderBy(coursesTable.name); + + expect(res).toEqual([ + { courseName: 'Design', categoryId: 1 }, + { courseName: 'Development', categoryId: 2 }, + { courseName: 'IT & Software', categoryId: 3 }, + { courseName: 'Marketing', categoryId: 4 }, + ]); + + await db.execute(sql`drop table if exists \`courses\``); + await db.execute(sql`drop table if exists \`course_categories\``); + }); + + test('with ... select', async (ctx) => { + const { db } = ctx.mysql; + + await db.execute(sql`drop table if exists \`orders\``); + await db.execute( + sql` + create table \`orders\` ( + \`id\` serial primary key, + \`region\` text not null, + \`product\` text not null, + \`amount\` int not null, + \`quantity\` int not null + ) + `, + ); + + await db.insert(orders).values([ + { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, + { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 50, quantity: 5 }, + ]); + + const regionalSales = db + .$with('regional_sales') + .as( + db + .select({ + region: orders.region, + totalSales: sql`sum(${orders.amount})`.as('total_sales'), + }) + .from(orders) + .groupBy(orders.region), + ); + + const topRegions = db + .$with('top_regions') + .as( + db + .select({ + region: regionalSales.region, + }) + .from(regionalSales) + .where( + gt( + regionalSales.totalSales, + db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), + ), + ), + ); + + const result = await db + .with(regionalSales, topRegions) + .select({ + region: orders.region, + product: orders.product, + productUnits: sql`cast(sum(${orders.quantity}) as unsigned)`, + productSales: sql`cast(sum(${orders.amount}) as unsigned)`, + }) + .from(orders) + .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) + .groupBy(orders.region, orders.product) + .orderBy(orders.region, orders.product); + + expect(result).toEqual([ + { + region: 'Europe', + product: 'A', + productUnits: 3, + productSales: 30, + }, + { + region: 'Europe', + product: 'B', + productUnits: 5, + productSales: 50, + }, + { + region: 'US', + product: 'A', + productUnits: 7, + productSales: 70, + }, + { + region: 'US', + product: 'B', + productUnits: 9, + productSales: 90, + }, + ]); + }); + + test('with ... update', async (ctx) => { + const { db } = ctx.mysql; + + const products = mysqlTable('products', { + id: serial('id').primaryKey(), + price: decimal('price', { + precision: 15, + scale: 2, + }).notNull(), + cheap: boolean('cheap').notNull().default(false), + }); + + await db.execute(sql`drop table if exists ${products}`); + await db.execute(sql` + create table ${products} ( + id serial primary key, + price decimal(15, 2) not null, + cheap boolean not null default false + ) + `); + + await db.insert(products).values([ + { price: '10.99' }, + { price: '25.85' }, + { price: '32.99' }, + { price: '2.50' }, + { price: '4.59' }, + ]); + + const averagePrice = db + .$with('average_price') + .as( + db + .select({ + value: sql`avg(${products.price})`.as('value'), + }) + .from(products), + ); + + await db + .with(averagePrice) + .update(products) + .set({ + cheap: true, + }) + .where(lt(products.price, sql`(select * from ${averagePrice})`)); + + const result = await db + .select({ + id: products.id, + }) + .from(products) + .where(eq(products.cheap, true)); + + expect(result).toEqual([ + { id: 1 }, + { id: 4 }, + { id: 5 }, + ]); + }); + + test('with ... delete', async (ctx) => { + const { db } = ctx.mysql; + + await db.execute(sql`drop table if exists \`orders\``); + await db.execute( + sql` + create table \`orders\` ( + \`id\` serial primary key, + \`region\` text not null, + \`product\` text not null, + \`amount\` int not null, + \`quantity\` int not null + ) + `, + ); + + await db.insert(orders).values([ + { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, + { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 50, quantity: 5 }, + ]); + + const averageAmount = db + .$with('average_amount') + .as( + db + .select({ + value: sql`avg(${orders.amount})`.as('value'), + }) + .from(orders), + ); + + await db + .with(averageAmount) + .delete(orders) + .where(gt(orders.amount, sql`(select * from ${averageAmount})`)); + + const result = await db + .select({ + id: orders.id, + }) + .from(orders); + + expect(result).toEqual([ + { id: 1 }, + { id: 2 }, + { id: 3 }, + { id: 4 }, + { id: 5 }, + ]); + }); + + test('select from subquery sql', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(users2Table).values([{ name: 'John' }, { name: 'Jane' }]); + + const sq = db + .select({ name: sql`concat(${users2Table.name}, " modified")`.as('name') }) + .from(users2Table) + .as('sq'); + + const res = await db.select({ name: sq.name }).from(sq); + + expect(res).toEqual([{ name: 'John modified' }, { name: 'Jane modified' }]); + }); + + test('select a field without joining its table', (ctx) => { + const { db } = ctx.mysql; + + expect(() => db.select({ name: users2Table.name }).from(usersTable).prepare()).toThrowError(); + }); + + test('select all fields from subquery without alias', (ctx) => { + const { db } = ctx.mysql; + + const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); + + expect(() => db.select().from(sq).prepare()).toThrowError(); + }); + + test('select count()', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); + + const res = await db.select({ count: sql`count(*)` }).from(usersTable); + + expect(res).toEqual([{ count: 2 }]); + }); + + test('select for ...', (ctx) => { + const { db } = ctx.mysql; + + { + const query = db.select().from(users2Table).for('update').toSQL(); + expect(query.sql).toMatch(/ for update$/); + } + { + const query = db.select().from(users2Table).for('share', { skipLocked: true }).toSQL(); + expect(query.sql).toMatch(/ for share skip locked$/); + } + { + const query = db.select().from(users2Table).for('update', { noWait: true }).toSQL(); + expect(query.sql).toMatch(/ for update no wait$/); + } + }); + + test('having', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(citiesTable).values([{ name: 'London' }, { name: 'Paris' }, { name: 'New York' }]); + + await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { + name: 'Jack', + cityId: 2, + }]); + + const result = await db + .select({ + id: citiesTable.id, + name: sql`upper(${citiesTable.name})`.as('upper_name'), + usersCount: sql`count(${users2Table.id})`.as('users_count'), + }) + .from(citiesTable) + .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) + .where(({ name }) => sql`length(${name}) >= 3`) + .groupBy(citiesTable.id) + .having(({ usersCount }) => sql`${usersCount} > 0`) + .orderBy(({ name }) => name); + + expect(result).toEqual([ + { + id: 1, + name: 'LONDON', + usersCount: 2, + }, + { + id: 2, + name: 'PARIS', + usersCount: 1, + }, + ]); + }); + + test('view', async (ctx) => { + const { db } = ctx.mysql; + + const newYorkers1 = mysqlView('new_yorkers') + .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); + + const newYorkers2 = mysqlView('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); + + const newYorkers3 = mysqlView('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }).existing(); + + await db.execute(sql`create view new_yorkers as ${getViewConfig(newYorkers1).query}`); + + await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); + + await db.insert(users2Table).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 1 }, + { name: 'Jack', cityId: 2 }, + ]); + + { + const result = await db.select().from(newYorkers1); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers2); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers3); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); + expect(result).toEqual([ + { name: 'John' }, + { name: 'Jane' }, + ]); + } + + await db.execute(sql`drop view ${newYorkers1}`); + }); + + test('select from raw sql', async (ctx) => { + const { db } = ctx.mysql; + + const result = await db.select({ + id: sql`id`, + name: sql`name`, + }).from(sql`(select 1 as id, 'John' as name) as users`); + + Expect>; + + expect(result).toEqual([ + { id: 1, name: 'John' }, + ]); + }); + + test('select from raw sql with joins', async (ctx) => { + const { db } = ctx.mysql; + + const result = await db + .select({ + id: sql`users.id`, + name: sql`users.name`, + userCity: sql`users.city`, + cityName: sql`cities.name`, + }) + .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) + .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`); + + Expect>; + + expect(result).toEqual([ + { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, + ]); + }); + + test('join on aliased sql from select', async (ctx) => { + const { db } = ctx.mysql; + + const result = await db + .select({ + userId: sql`users.id`.as('userId'), + name: sql`users.name`, + userCity: sql`users.city`, + cityId: sql`cities.id`.as('cityId'), + cityName: sql`cities.name`, + }) + .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) + .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)); + + Expect< + Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> + >; + + expect(result).toEqual([ + { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, + ]); + }); + + test('join on aliased sql from with clause', async (ctx) => { + const { db } = ctx.mysql; + + const users = db.$with('users').as( + db.select({ + id: sql`id`.as('userId'), + name: sql`name`.as('userName'), + city: sql`city`.as('city'), + }).from( + sql`(select 1 as id, 'John' as name, 'New York' as city) as users`, + ), + ); + + const cities = db.$with('cities').as( + db.select({ + id: sql`id`.as('cityId'), + name: sql`name`.as('cityName'), + }).from( + sql`(select 1 as id, 'Paris' as name) as cities`, + ), + ); + + const result = await db + .with(users, cities) + .select({ + userId: users.id, + name: users.name, + userCity: users.city, + cityId: cities.id, + cityName: cities.name, + }) + .from(users) + .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)); + + Expect< + Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> + >; + + expect(result).toEqual([ + { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, + ]); + }); + + test('prefixed table', async (ctx) => { + const { db } = ctx.mysql; + + const mysqlTable = mysqlTableCreator((name) => `myprefix_${name}`); + + const users = mysqlTable('test_prefixed_table_with_unique_name', { + id: int('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table myprefix_test_prefixed_table_with_unique_name (id int not null primary key, name text not null)`, + ); + + await db.insert(users).values({ id: 1, name: 'John' }); + + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + + await db.execute(sql`drop table ${users}`); + }); + + test('orderBy with aliased column', (ctx) => { + const { db } = ctx.mysql; + + const query = db.select({ + test: sql`something`.as('test'), + }).from(users2Table).orderBy((fields) => fields.test).toSQL(); + + expect(query.sql).toBe('select something as `test` from `users2` order by `test`'); + }); + + test('timestamp timezone', async (ctx) => { + const { db } = ctx.mysql; + + const date = new Date(Date.parse('2020-01-01T12:34:56+07:00')); + + await db.insert(usersTable).values({ name: 'With default times' }); + await db.insert(usersTable).values({ + name: 'Without default times', + createdAt: date, + }); + const users = await db.select().from(usersTable); + + // check that the timestamps are set correctly for default times + expect(Math.abs(users[0]!.createdAt.getTime() - Date.now())).toBeLessThan(2000); + + // check that the timestamps are set correctly for non default times + expect(Math.abs(users[1]!.createdAt.getTime() - date.getTime())).toBeLessThan(2000); + }); + + test('transaction', async (ctx) => { + const { db } = ctx.mysql; + + const users = mysqlTable('users_transactions', { + id: serial('id').primaryKey(), + balance: int('balance').notNull(), + }); + const products = mysqlTable('products_transactions', { + id: serial('id').primaryKey(), + price: int('price').notNull(), + stock: int('stock').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`drop table if exists ${products}`); + + await db.execute(sql`create table users_transactions (id serial not null primary key, balance int not null)`); + await db.execute( + sql`create table products_transactions (id serial not null primary key, price int not null, stock int not null)`, + ); + + const [{ insertId: userId }] = await db.insert(users).values({ balance: 100 }); + const user = await db.select().from(users).where(eq(users.id, userId)).then((rows) => rows[0]!); + const [{ insertId: productId }] = await db.insert(products).values({ price: 10, stock: 10 }); + const product = await db.select().from(products).where(eq(products.id, productId)).then((rows) => rows[0]!); + + await db.transaction(async (tx) => { + await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)); + await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)); + }); + + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, balance: 90 }]); + + await db.execute(sql`drop table ${users}`); + await db.execute(sql`drop table ${products}`); + }); + + test('transaction rollback', async (ctx) => { + const { db } = ctx.mysql; + + const users = mysqlTable('users_transactions_rollback', { + id: serial('id').primaryKey(), + balance: int('balance').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table users_transactions_rollback (id serial not null primary key, balance int not null)`, + ); + + await expect((async () => { + await db.transaction(async (tx) => { + await tx.insert(users).values({ balance: 100 }); + tx.rollback(); + }); + })()).rejects.toThrowError(TransactionRollbackError); + + const result = await db.select().from(users); + + expect(result).toEqual([]); + + await db.execute(sql`drop table ${users}`); + }); + + test('nested transaction', async (ctx) => { + const { db } = ctx.mysql; + + const users = mysqlTable('users_nested_transactions', { + id: serial('id').primaryKey(), + balance: int('balance').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table users_nested_transactions (id serial not null primary key, balance int not null)`, + ); + + await db.transaction(async (tx) => { + await tx.insert(users).values({ balance: 100 }); + + await tx.transaction(async (tx) => { + await tx.update(users).set({ balance: 200 }); + }); + }); + + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, balance: 200 }]); + + await db.execute(sql`drop table ${users}`); + }); + + test('nested transaction rollback', async (ctx) => { + const { db } = ctx.mysql; + + const users = mysqlTable('users_nested_transactions_rollback', { + id: serial('id').primaryKey(), + balance: int('balance').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table users_nested_transactions_rollback (id serial not null primary key, balance int not null)`, + ); + + await db.transaction(async (tx) => { + await tx.insert(users).values({ balance: 100 }); + + await expect((async () => { + await tx.transaction(async (tx) => { + await tx.update(users).set({ balance: 200 }); + tx.rollback(); + }); + })()).rejects.toThrowError(TransactionRollbackError); + }); + + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, balance: 100 }]); + + await db.execute(sql`drop table ${users}`); + }); + + test('join subquery with join', async (ctx) => { + const { db } = ctx.mysql; + + const internalStaff = mysqlTable('internal_staff', { + userId: int('user_id').notNull(), + }); + + const customUser = mysqlTable('custom_user', { + id: int('id').notNull(), + }); + + const ticket = mysqlTable('ticket', { + staffId: int('staff_id').notNull(), + }); + + await db.execute(sql`drop table if exists ${internalStaff}`); + await db.execute(sql`drop table if exists ${customUser}`); + await db.execute(sql`drop table if exists ${ticket}`); + + await db.execute(sql`create table internal_staff (user_id integer not null)`); + await db.execute(sql`create table custom_user (id integer not null)`); + await db.execute(sql`create table ticket (staff_id integer not null)`); + + await db.insert(internalStaff).values({ userId: 1 }); + await db.insert(customUser).values({ id: 1 }); + await db.insert(ticket).values({ staffId: 1 }); + + const subq = db + .select() + .from(internalStaff) + .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) + .as('internal_staff'); + + const mainQuery = await db + .select() + .from(ticket) + .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); + + expect(mainQuery).toEqual([{ + ticket: { staffId: 1 }, + internal_staff: { + internal_staff: { userId: 1 }, + custom_user: { id: 1 }, + }, + }]); + + await db.execute(sql`drop table ${internalStaff}`); + await db.execute(sql`drop table ${customUser}`); + await db.execute(sql`drop table ${ticket}`); + }); + + test('subquery with view', async (ctx) => { + const { db } = ctx.mysql; + + const users = mysqlTable('users_subquery_view', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }); + + const newYorkers = mysqlView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`drop view if exists ${newYorkers}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, + ); + await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); + + await db.insert(users).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 2 }, + { name: 'Jack', cityId: 1 }, + { name: 'Jill', cityId: 2 }, + ]); + + const sq = db.$with('sq').as(db.select().from(newYorkers)); + const result = await db.with(sq).select().from(sq); + + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 3, name: 'Jack', cityId: 1 }, + ]); + + await db.execute(sql`drop view ${newYorkers}`); + await db.execute(sql`drop table ${users}`); + }); + + test('join view as subquery', async (ctx) => { + const { db } = ctx.mysql; + + const users = mysqlTable('users_join_view', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }); + + const newYorkers = mysqlView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`drop view if exists ${newYorkers}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, + ); + await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); + + await db.insert(users).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 2 }, + { name: 'Jack', cityId: 1 }, + { name: 'Jill', cityId: 2 }, + ]); + + const sq = db.select().from(newYorkers).as('new_yorkers_sq'); + + const result = await db.select().from(users).leftJoin(sq, eq(users.id, sq.id)); + + expect(result).toEqual([ + { + users_join_view: { id: 1, name: 'John', cityId: 1 }, + new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, + }, + { + users_join_view: { id: 2, name: 'Jane', cityId: 2 }, + new_yorkers_sq: null, + }, + { + users_join_view: { id: 3, name: 'Jack', cityId: 1 }, + new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, + }, + { + users_join_view: { id: 4, name: 'Jill', cityId: 2 }, + new_yorkers_sq: null, + }, + ]); + + await db.execute(sql`drop view ${newYorkers}`); + await db.execute(sql`drop table ${users}`); + }); + + test('select iterator', async (ctx) => { + const { db } = ctx.mysql; + + const users = mysqlTable('users_iterator', { + id: serial('id').primaryKey(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial not null primary key)`); + + await db.insert(users).values([{}, {}, {}]); + + const iter = db.select().from(users).iterator(); + + const result: typeof users.$inferSelect[] = []; + + for await (const row of iter) { + result.push(row); + } + + expect(result).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); + }); + + test('select iterator w/ prepared statement', async (ctx) => { + const { db } = ctx.mysql; + + const users = mysqlTable('users_iterator', { + id: serial('id').primaryKey(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial not null primary key)`); + + await db.insert(users).values([{}, {}, {}]); + + const prepared = db.select().from(users).prepare(); + const iter = prepared.iterator(); + const result: typeof users.$inferSelect[] = []; + + for await (const row of iter) { + result.push(row); + } + + expect(result).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); + }); + + test('insert undefined', async (ctx) => { + const { db } = ctx.mysql; + + const users = mysqlTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, name text)`, + ); + + await expect((async () => { + await db.insert(users).values({ name: undefined }); + })()).resolves.not.toThrowError(); + + await db.execute(sql`drop table ${users}`); + }); + + test('update undefined', async (ctx) => { + const { db } = ctx.mysql; + + const users = mysqlTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, name text)`, + ); + + await expect((async () => { + await db.update(users).set({ name: undefined }); + })()).rejects.toThrowError(); + + await expect((async () => { + await db.update(users).set({ id: 1, name: undefined }); + })()).resolves.not.toThrowError(); + + await db.execute(sql`drop table ${users}`); + }); + + test('utc config for datetime', async (ctx) => { + const { db } = ctx.mysql; + + await db.execute(sql`drop table if exists \`datestable\``); + await db.execute( + sql` + create table \`datestable\` ( + \`datetime_utc\` datetime(3), + \`datetime\` datetime(3), + \`datetime_as_string\` datetime + ) + `, + ); + const datesTable = mysqlTable('datestable', { + datetimeUTC: datetime('datetime_utc', { fsp: 3, mode: 'date' }), + datetime: datetime('datetime', { fsp: 3 }), + datetimeAsString: datetime('datetime_as_string', { mode: 'string' }), + }); + + const dateObj = new Date('2022-11-11'); + const dateUtc = new Date('2022-11-11T12:12:12.122Z'); + + await db.insert(datesTable).values({ + datetimeUTC: dateUtc, + datetime: dateObj, + datetimeAsString: '2022-11-11 12:12:12', + }); + + const res = await db.select().from(datesTable); + + const [rawSelect] = await db.execute(sql`select \`datetime_utc\` from \`datestable\``); + const selectedRow = (rawSelect as unknown as [{ datetime_utc: string }])[0]; + + expect(selectedRow.datetime_utc).toBe('2022-11-11 12:12:12.122'); + expect(new Date(selectedRow.datetime_utc.replace(' ', 'T') + 'Z')).toEqual(dateUtc); + + expect(res[0]?.datetime).toBeInstanceOf(Date); + expect(res[0]?.datetimeUTC).toBeInstanceOf(Date); + expect(typeof res[0]?.datetimeAsString).toBe('string'); + + expect(res).toEqual([{ + datetimeUTC: dateUtc, + datetime: new Date('2022-11-11'), + datetimeAsString: '2022-11-11 12:12:12', + }]); + + await db.execute(sql`drop table if exists \`datestable\``); + }); + + test('set operations (union) from query builder with subquery', async (ctx) => { + const { db } = ctx.mysql; + + await setupSetOperationTest(db); + const sq = db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).as('sq'); + + const result = await db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).union( + db.select().from(sq), + ).limit(8); + + expect(result).toHaveLength(8); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + { id: 1, name: 'John' }, + { id: 2, name: 'Jane' }, + { id: 3, name: 'Jack' }, + { id: 4, name: 'Peter' }, + { id: 5, name: 'Ben' }, + ]); + + // union should throw if selected fields are not in the same order + await expect((async () => { + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).union( + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table), + ); + })()).rejects.toThrowError(); + }); + + test('set operations (union) as function', async (ctx) => { + const { db } = ctx.mysql; + + await setupSetOperationTest(db); + + const result = await union( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + { id: 1, name: 'John' }, + ]); + + await expect((async () => { + union( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table).where(eq(users2Table.id, 1)), + ); + })()).rejects.toThrowError(); + }); + + test('set operations (union all) from query builder', async (ctx) => { + const { db } = ctx.mysql; + + await setupSetOperationTest(db); + + const result = await db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).limit(2).unionAll( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).limit(2), + ).orderBy(asc(sql`id`)).limit(3); + + expect(result).toHaveLength(3); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + { id: 1, name: 'New York' }, + { id: 2, name: 'London' }, + ]); + + await expect((async () => { + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).limit(2).unionAll( + db + .select({ name: citiesTable.name, id: citiesTable.id }) + .from(citiesTable).limit(2), + ).orderBy(asc(sql`id`)); + })()).rejects.toThrowError(); + }); + + test('set operations (union all) as function', async (ctx) => { + const { db } = ctx.mysql; + + await setupSetOperationTest(db); + + const result = await unionAll( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).limit(1); + + expect(result).toHaveLength(1); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + ]); + + await expect((async () => { + unionAll( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).limit(1); + })()).rejects.toThrowError(); + }); + + test('set operations (intersect) from query builder', async (ctx) => { + const { db } = ctx.mysql; + + await setupSetOperationTest(db); + + const result = await db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).intersect( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(gt(citiesTable.id, 1)), + ); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + await expect((async () => { + db + .select({ name: citiesTable.name, id: citiesTable.id }) + .from(citiesTable).intersect( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(gt(citiesTable.id, 1)), + ); + })()).rejects.toThrowError(); + }); + + test('set operations (intersect) as function', async (ctx) => { + const { db } = ctx.mysql; + + await setupSetOperationTest(db); + + const result = await intersect( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).limit(1); + + expect(result).toHaveLength(0); + + expect(result).toEqual([]); + + await expect((async () => { + intersect( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).limit(1); + })()).rejects.toThrowError(); + }); + + test('set operations (intersect all) from query builder', async (ctx) => { + const { db } = ctx.mysql; + + await setupSetOperationTest(db); + + const result = await db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).limit(2).intersectAll( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).limit(2), + ).orderBy(asc(sql`id`)); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + { id: 2, name: 'London' }, + ]); + + await expect((async () => { + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).limit(2).intersectAll( + db + .select({ name: citiesTable.name, id: citiesTable.id }) + .from(citiesTable).limit(2), + ).orderBy(asc(sql`id`)); + })()).rejects.toThrowError(); + }); + + test('set operations (intersect all) as function', async (ctx) => { + const { db } = ctx.mysql; + + await setupSetOperationTest(db); + + const result = await intersectAll( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ); + + expect(result).toHaveLength(1); + + expect(result).toEqual([ + { id: 1, name: 'John' }, + ]); + + await expect((async () => { + intersectAll( + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ); + })()).rejects.toThrowError(); + }); + + test('set operations (except) from query builder', async (ctx) => { + const { db } = ctx.mysql; + + await setupSetOperationTest(db); + + const result = await db + .select() + .from(citiesTable).except( + db + .select() + .from(citiesTable).where(gt(citiesTable.id, 1)), + ); + + expect(result).toHaveLength(1); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + ]); + }); + + test('set operations (except) as function', async (ctx) => { + const { db } = ctx.mysql; + + await setupSetOperationTest(db); + + const result = await except( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable), + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).limit(3); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + await expect((async () => { + except( + db + .select({ name: citiesTable.name, id: citiesTable.id }) + .from(citiesTable), + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).limit(3); + })()).rejects.toThrowError(); + }); + + test('set operations (except all) from query builder', async (ctx) => { + const { db } = ctx.mysql; + + await setupSetOperationTest(db); + + const result = await db + .select() + .from(citiesTable).exceptAll( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + ).orderBy(asc(sql`id`)); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + await expect((async () => { + db + .select() + .from(citiesTable).exceptAll( + db + .select({ name: citiesTable.name, id: citiesTable.id }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + ).orderBy(asc(sql`id`)); + })()).rejects.toThrowError(); + }); + + test('set operations (except all) as function', async (ctx) => { + const { db } = ctx.mysql; + + await setupSetOperationTest(db); + + const result = await exceptAll( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(gt(users2Table.id, 7)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).limit(6).orderBy(asc(sql.identifier('id'))); + + expect(result).toHaveLength(6); + + expect(result).toEqual([ + { id: 2, name: 'Jane' }, + { id: 3, name: 'Jack' }, + { id: 4, name: 'Peter' }, + { id: 5, name: 'Ben' }, + { id: 6, name: 'Jill' }, + { id: 7, name: 'Mary' }, + ]); + + await expect((async () => { + exceptAll( + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(gt(users2Table.id, 7)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).limit(6); + })()).rejects.toThrowError(); + }); + + test('set operations (mixed) from query builder', async (ctx) => { + const { db } = ctx.mysql; + + await setupSetOperationTest(db); + + const result = await db + .select() + .from(citiesTable).except( + ({ unionAll }) => + unionAll( + db + .select() + .from(citiesTable).where(gt(citiesTable.id, 1)), + db.select().from(citiesTable).where(eq(citiesTable.id, 2)), + ).orderBy(asc(citiesTable.id)).limit(1).offset(1), + ); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + { id: 3, name: 'Tampa' }, + ]); + + await expect((async () => { + db + .select() + .from(citiesTable).except( + ({ unionAll }) => + unionAll( + db + .select({ name: citiesTable.name, id: citiesTable.id }) + .from(citiesTable).where(gt(citiesTable.id, 1)), + db.select().from(citiesTable).where(eq(citiesTable.id, 2)), + ), + ); + })()).rejects.toThrowError(); + }); + + test('set operations (mixed all) as function with subquery', async (ctx) => { + const { db } = ctx.mysql; + + await setupSetOperationTest(db); + + const sq = except( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(gte(users2Table.id, 5)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 7)), + ).orderBy(asc(sql.identifier('id'))).as('sq'); + + const result = await union( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db.select().from(sq).limit(1), + db + .select().from(citiesTable).where(gt(citiesTable.id, 1)), + ); + + expect(result).toHaveLength(4); + + expect(result).toEqual([ + { id: 1, name: 'John' }, + { id: 5, name: 'Ben' }, + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + await expect((async () => { + union( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + except( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(gte(users2Table.id, 5)), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table).where(eq(users2Table.id, 7)), + ).limit(1), + db + .select().from(citiesTable).where(gt(citiesTable.id, 1)), + ); + })()).rejects.toThrowError(); + }); + + test('aggregate function: count', async (ctx) => { + const { db } = ctx.mysql; + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: count() }).from(table); + const result2 = await db.select({ value: count(table.a) }).from(table); + const result3 = await db.select({ value: countDistinct(table.name) }).from(table); + + expect(result1[0]?.value).toBe(7); + expect(result2[0]?.value).toBe(5); + expect(result3[0]?.value).toBe(6); + }); + + test('aggregate function: avg', async (ctx) => { + const { db } = ctx.mysql; + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: avg(table.b) }).from(table); + const result2 = await db.select({ value: avg(table.nullOnly) }).from(table); + const result3 = await db.select({ value: avgDistinct(table.b) }).from(table); + + expect(result1[0]?.value).toBe('33.3333'); + expect(result2[0]?.value).toBe(null); + expect(result3[0]?.value).toBe('42.5000'); + }); + + test('aggregate function: sum', async (ctx) => { + const { db } = ctx.mysql; + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: sum(table.b) }).from(table); + const result2 = await db.select({ value: sum(table.nullOnly) }).from(table); + const result3 = await db.select({ value: sumDistinct(table.b) }).from(table); + + expect(result1[0]?.value).toBe('200'); + expect(result2[0]?.value).toBe(null); + expect(result3[0]?.value).toBe('170'); + }); + + test('aggregate function: max', async (ctx) => { + const { db } = ctx.mysql; + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: max(table.b) }).from(table); + const result2 = await db.select({ value: max(table.nullOnly) }).from(table); + + expect(result1[0]?.value).toBe(90); + expect(result2[0]?.value).toBe(null); + }); + + test('aggregate function: min', async (ctx) => { + const { db } = ctx.mysql; + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: min(table.b) }).from(table); + const result2 = await db.select({ value: min(table.nullOnly) }).from(table); + + expect(result1[0]?.value).toBe(10); + expect(result2[0]?.value).toBe(null); + }); + + test('test $onUpdateFn and $onUpdate works as $default', async (ctx) => { + const { db } = ctx.mysql; + + await db.execute(sql`drop table if exists ${usersOnUpdate}`); + + await db.execute( + sql` + create table ${usersOnUpdate} ( + id serial not null primary key, + name text not null, + update_counter integer default 1 not null, + updated_at datetime(3), + uppercase_name text, + always_null text + ) + `, + ); + + await db.insert(usersOnUpdate).values([ + { name: 'John' }, + { name: 'Jane' }, + { name: 'Jack' }, + { name: 'Jill' }, + ]); + const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); + + const justDates = await db.select({ updatedAt }).from(usersOnUpdate); + + const response = await db.select({ ...rest }).from(usersOnUpdate); + + expect(response).toEqual([ + { name: 'John', id: 1, updateCounter: 1, uppercaseName: 'JOHN', alwaysNull: null }, + { name: 'Jane', id: 2, updateCounter: 1, uppercaseName: 'JANE', alwaysNull: null }, + { name: 'Jack', id: 3, updateCounter: 1, uppercaseName: 'JACK', alwaysNull: null }, + { name: 'Jill', id: 4, updateCounter: 1, uppercaseName: 'JILL', alwaysNull: null }, + ]); + const msDelay = 750; + + for (const eachUser of justDates) { + expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); + } + }); + + test('test $onUpdateFn and $onUpdate works updating', async (ctx) => { + const { db } = ctx.mysql; + + await db.execute(sql`drop table if exists ${usersOnUpdate}`); + + await db.execute( + sql` + create table ${usersOnUpdate} ( + id serial not null primary key, + name text not null, + update_counter integer default 1 not null, + updated_at datetime(3), + uppercase_name text, + always_null text + ) + `, + ); + + await db.insert(usersOnUpdate).values([ + { name: 'John', alwaysNull: 'this will will be null after updating' }, + { name: 'Jane' }, + { name: 'Jack' }, + { name: 'Jill' }, + ]); + const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); + const initial = await db.select({ updatedAt }).from(usersOnUpdate); + + await db.update(usersOnUpdate).set({ name: 'Angel', uppercaseName: null }).where(eq(usersOnUpdate.id, 1)); + + const justDates = await db.select({ updatedAt }).from(usersOnUpdate); + + const response = await db.select({ ...rest }).from(usersOnUpdate); + + expect(response).toEqual([ + { name: 'Angel', id: 1, updateCounter: 2, uppercaseName: null, alwaysNull: null }, + { name: 'Jane', id: 2, updateCounter: 1, uppercaseName: 'JANE', alwaysNull: null }, + { name: 'Jack', id: 3, updateCounter: 1, uppercaseName: 'JACK', alwaysNull: null }, + { name: 'Jill', id: 4, updateCounter: 1, uppercaseName: 'JILL', alwaysNull: null }, + ]); + const msDelay = 750; + + expect(initial[0]?.updatedAt?.valueOf()).not.toBe(justDates[0]?.updatedAt?.valueOf()); + + for (const eachUser of justDates) { + expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); + } + }); + + // mySchema tests + test('mySchema :: select all fields', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const result = await db.select().from(usersMySchemaTable); + + expect(result[0]!.createdAt).toBeInstanceOf(Date); + // not timezone based timestamp, thats why it should not work here + // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + }); + + test('mySchema :: select sql', async (ctx) => { + const { db } = ctx.mysql; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersMySchemaTable.name})`, + }).from(usersMySchemaTable); + + expect(users).toEqual([{ name: 'JOHN' }]); + }); + + test('mySchema :: select typed sql', async (ctx) => { + const { db } = ctx.mysql; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersMySchemaTable.name})`, + }).from(usersMySchemaTable); + + expect(users).toEqual([{ name: 'JOHN' }]); + }); + + test('mySchema :: select distinct', async (ctx) => { + const { db } = ctx.mysql; + + const usersDistinctTable = mysqlTable('users_distinct', { + id: int('id').notNull(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${usersDistinctTable}`); + await db.execute(sql`create table ${usersDistinctTable} (id int, name text)`); + + await db.insert(usersDistinctTable).values([ + { id: 1, name: 'John' }, + { id: 1, name: 'John' }, + { id: 2, name: 'John' }, + { id: 1, name: 'Jane' }, + ]); + const users = await db.selectDistinct().from(usersDistinctTable).orderBy( + usersDistinctTable.id, + usersDistinctTable.name, + ); + + await db.execute(sql`drop table ${usersDistinctTable}`); + + expect(users).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); + }); + + test('mySchema :: insert returning sql', async (ctx) => { + const { db } = ctx.mysql; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + const [result, _] = await db.insert(usersMySchemaTable).values({ name: 'John' }); + + expect(result.insertId).toBe(1); + }); + + test('mySchema :: delete returning sql', async (ctx) => { + const { db } = ctx.mysql; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const users = await db.delete(usersMySchemaTable).where(eq(usersMySchemaTable.name, 'John')); + + expect(users[0].affectedRows).toBe(1); + }); + + test('mySchema :: update with returning partial', async (ctx) => { + const { db } = ctx.mysql; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const updatedUsers = await db.update(usersMySchemaTable).set({ name: 'Jane' }).where( + eq(usersMySchemaTable.name, 'John'), + ); + + const users = await db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from( + usersMySchemaTable, + ) + .where( + eq(usersMySchemaTable.id, 1), + ); + + expect(updatedUsers[0].changedRows).toBe(1); + + expect(users).toEqual([{ id: 1, name: 'Jane' }]); + }); + + test('mySchema :: delete with returning all fields', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const deletedUser = await db.delete(usersMySchemaTable).where(eq(usersMySchemaTable.name, 'John')); + + expect(deletedUser[0].affectedRows).toBe(1); + }); + + test('mySchema :: insert + select', async (ctx) => { + const { db } = ctx.mysql; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const result = await db.select().from(usersMySchemaTable); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + + await db.insert(usersMySchemaTable).values({ name: 'Jane' }); + const result2 = await db.select().from(usersMySchemaTable); + expect(result2).toEqual([ + { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, + { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, + ]); + }); + + test('mySchema :: insert with overridden default values', async (ctx) => { + const { db } = ctx.mysql; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ name: 'John', verified: true }); + const result = await db.select().from(usersMySchemaTable); + + expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); + }); + + test('mySchema :: insert many', async (ctx) => { + const { db } = ctx.mysql; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]); + const result = await db.select({ + id: usersMySchemaTable.id, + name: usersMySchemaTable.name, + jsonb: usersMySchemaTable.jsonb, + verified: usersMySchemaTable.verified, + }).from(usersMySchemaTable); + + expect(result).toEqual([ + { id: 1, name: 'John', jsonb: null, verified: false }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, + { id: 3, name: 'Jane', jsonb: null, verified: false }, + { id: 4, name: 'Austin', jsonb: null, verified: true }, + ]); + }); + + test('mySchema :: select with group by as field', async (ctx) => { + const { db } = ctx.mysql; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersMySchemaTable.name }).from(usersMySchemaTable) + .groupBy(usersMySchemaTable.name); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); + }); + + test('mySchema :: select with group by as column + sql', async (ctx) => { + const { db } = ctx.mysql; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersMySchemaTable.name }).from(usersMySchemaTable) + .groupBy(usersMySchemaTable.id, sql`${usersMySchemaTable.name}`); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + }); + + test('mySchema :: build query', async (ctx) => { + const { db } = ctx.mysql; + + const query = db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from(usersMySchemaTable) + .groupBy(usersMySchemaTable.id, usersMySchemaTable.name) + .toSQL(); + + expect(query).toEqual({ + sql: + `select \`id\`, \`name\` from \`mySchema\`.\`userstest\` group by \`userstest\`.\`id\`, \`userstest\`.\`name\``, + params: [], + }); + }); + + test('mySchema :: insert with spaces', async (ctx) => { + const { db } = ctx.mysql; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ name: sql`'Jo h n'` }); + const result = await db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from( + usersMySchemaTable, + ); + + expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); + }); + + test('mySchema :: prepared statement with placeholder in .where', async (ctx) => { + const { db } = ctx.mysql; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const stmt = db.select({ + id: usersMySchemaTable.id, + name: usersMySchemaTable.name, + }).from(usersMySchemaTable) + .where(eq(usersMySchemaTable.id, sql.placeholder('id'))) + .prepare(); + const result = await stmt.execute({ id: 1 }); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + }); + + test('mySchema :: select from tables with same name from different schema using alias', async (ctx) => { + const { db } = ctx.mysql; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.execute(sql`drop table if exists \`userstest\``); + await db.execute( + sql` + create table \`userstest\` ( + \`id\` serial primary key, + \`name\` text not null, + \`verified\` boolean not null default false, + \`jsonb\` json, + \`created_at\` timestamp not null default now() + ) + `, + ); + + await db.insert(usersMySchemaTable).values({ id: 10, name: 'Ivan' }); + await db.insert(usersTable).values({ id: 11, name: 'Hans' }); + + const customerAlias = alias(usersTable, 'customer'); + + const result = await db + .select().from(usersMySchemaTable) + .leftJoin(customerAlias, eq(customerAlias.id, 11)) + .where(eq(usersMySchemaTable.id, 10)); + + expect(result).toEqual([{ + userstest: { + id: 10, + name: 'Ivan', + verified: false, + jsonb: null, + createdAt: result[0]!.userstest.createdAt, + }, + customer: { + id: 11, + name: 'Hans', + verified: false, + jsonb: null, + createdAt: result[0]!.customer!.createdAt, + }, + }]); + }); + + test('mySchema :: view', async (ctx) => { + const { db } = ctx.mysql; + + const newYorkers1 = mySchema.view('new_yorkers') + .as((qb) => qb.select().from(users2MySchemaTable).where(eq(users2MySchemaTable.cityId, 1))); + + const newYorkers2 = mySchema.view('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }).as(sql`select * from ${users2MySchemaTable} where ${eq(users2MySchemaTable.cityId, 1)}`); + + const newYorkers3 = mySchema.view('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }).existing(); + + await db.execute(sql`create view ${newYorkers1} as ${getViewConfig(newYorkers1).query}`); + + await db.insert(citiesMySchemaTable).values([{ name: 'New York' }, { name: 'Paris' }]); + + await db.insert(users2MySchemaTable).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 1 }, + { name: 'Jack', cityId: 2 }, + ]); + + { + const result = await db.select().from(newYorkers1); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers2); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers3); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); + expect(result).toEqual([ + { name: 'John' }, + { name: 'Jane' }, + ]); + } + + await db.execute(sql`drop view ${newYorkers1}`); + }); + }); +} diff --git a/integration-tests/tests/mysql.custom.test.ts b/integration-tests/tests/mysql/mysql-custom.test.ts similarity index 65% rename from integration-tests/tests/mysql.custom.test.ts rename to integration-tests/tests/mysql/mysql-custom.test.ts index c60b88e47..c8a761665 100644 --- a/integration-tests/tests/mysql.custom.test.ts +++ b/integration-tests/tests/mysql/mysql-custom.test.ts @@ -1,8 +1,4 @@ -import 'dotenv/config'; - -import type { TestFn } from 'ava'; -import anyTest from 'ava'; -import Docker from 'dockerode'; +import retry from 'async-retry'; import { asc, eq, Name, placeholder, sql } from 'drizzle-orm'; import { alias, @@ -22,10 +18,45 @@ import { import type { MySql2Database } from 'drizzle-orm/mysql2'; import { drizzle } from 'drizzle-orm/mysql2'; import { migrate } from 'drizzle-orm/mysql2/migrator'; -import getPort from 'get-port'; import * as mysql from 'mysql2/promise'; import { v4 as uuid } from 'uuid'; -import { toLocalDate } from './utils'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { toLocalDate } from '~/utils'; +import { createDockerDB } from './mysql-common'; + +const ENABLE_LOGGING = false; + +let db: MySql2Database; +let client: mysql.Connection; + +beforeAll(async () => { + const connectionString = process.env['MYSQL_CONNECTION_STRING'] ?? await createDockerDB(); + client = await retry(async () => { + client = await mysql.createConnection(connectionString); + await client.connect(); + return client; + }, { + retries: 20, + factor: 1, + minTimeout: 250, + maxTimeout: 250, + randomize: false, + onRetry() { + client?.end(); + }, + }); + db = drizzle(client, { logger: ENABLE_LOGGING }); +}); + +afterAll(async () => { + await client?.end(); +}); + +beforeEach((ctx) => { + ctx.mysql = { + db, + }; +}); const customSerial = customType<{ data: number; notNull: true; default: true }>({ dataType() { @@ -118,78 +149,12 @@ const usersMigratorTable = mysqlTable('users12', { email: text('email').notNull(), }); -interface Context { - docker: Docker; - mysqlContainer: Docker.Container; - db: MySql2Database; - client: mysql.Connection; -} - -const test = anyTest as TestFn; - -async function createDockerDB(ctx: Context): Promise { - const docker = (ctx.docker = new Docker()); - const port = await getPort({ port: 3306 }); - const image = 'mysql:8'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) - ); - - ctx.mysqlContainer = await docker.createContainer({ - Image: image, - Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=drizzle'], - name: `drizzle-integration-tests-${uuid()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '3306/tcp': [{ HostPort: `${port}` }], - }, - }, - }); - - await ctx.mysqlContainer.start(); - - return `mysql://root:mysql@127.0.0.1:${port}/drizzle`; -} - -test.before(async (t) => { - const ctx = t.context; - const connectionString = process.env['MYSQL_CONNECTION_STRING'] ?? await createDockerDB(ctx); - - const sleep = 1000; - let timeLeft = 20000; - let connected = false; - let lastError: unknown | undefined; - do { - try { - ctx.client = await mysql.createConnection(connectionString); - await ctx.client.connect(); - connected = true; - break; - } catch (e) { - lastError = e; - await new Promise((resolve) => setTimeout(resolve, sleep)); - timeLeft -= sleep; - } - } while (timeLeft > 0); - if (!connected) { - console.error('Cannot connect to MySQL'); - await ctx.client?.end().catch(console.error); - await ctx.mysqlContainer?.stop().catch(console.error); - throw lastError; - } - ctx.db = drizzle(ctx.client, { logger: false }); -}); - -test.beforeEach(async (t) => { - const ctx = t.context; - await ctx.db.execute(sql`drop table if exists \`userstest\``); - await ctx.db.execute(sql`drop table if exists \`datestable\``); - await ctx.db.execute(sql`drop table if exists \`test_table\``); +beforeEach(async () => { + await db.execute(sql`drop table if exists \`userstest\``); + await db.execute(sql`drop table if exists \`datestable\``); + await db.execute(sql`drop table if exists \`test_table\``); // await ctx.db.execute(sql`create schema public`); - await ctx.db.execute( + await db.execute( sql` create table \`userstest\` ( \`id\` serial primary key, @@ -201,7 +166,7 @@ test.beforeEach(async (t) => { `, ); - await ctx.db.execute( + await db.execute( sql` create table \`datestable\` ( \`date\` date, @@ -214,7 +179,7 @@ test.beforeEach(async (t) => { `, ); - await ctx.db.execute( + await db.execute( sql` create table \`test_table\` ( \`id\` binary(16) primary key, @@ -225,84 +190,84 @@ test.beforeEach(async (t) => { ); }); -test.serial('select all fields', async (t) => { - const { db } = t.context; +test('select all fields', async (ctx) => { + const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const result = await db.select().from(usersTable); - t.assert(result[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof + expect(result[0]!.createdAt).toBeInstanceOf(Date); // not timezone based timestamp, thats why it should not work here // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); - t.deepEqual(result, [{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); }); -test.serial('select sql', async (t) => { - const { db } = t.context; +test('select sql', async (ctx) => { + const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersTable.name})`, }).from(usersTable); - t.deepEqual(users, [{ name: 'JOHN' }]); + expect(users).toEqual([{ name: 'JOHN' }]); }); -test.serial('select typed sql', async (t) => { - const { db } = t.context; +test('select typed sql', async (ctx) => { + const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersTable.name})`, }).from(usersTable); - t.deepEqual(users, [{ name: 'JOHN' }]); + expect(users).toEqual([{ name: 'JOHN' }]); }); -test.serial('insert returning sql', async (t) => { - const { db } = t.context; +test('insert returning sql', async (ctx) => { + const { db } = ctx.mysql; const [result, _] = await db.insert(usersTable).values({ name: 'John' }); - t.deepEqual(result.insertId, 1); + expect(result.insertId).toBe(1); }); -test.serial('delete returning sql', async (t) => { - const { db } = t.context; +test('delete returning sql', async (ctx) => { + const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')); - t.is(users[0].affectedRows, 1); + expect(users[0].affectedRows).toBe(1); }); -test.serial('update returning sql', async (t) => { - const { db } = t.context; +test('update returning sql', async (ctx) => { + const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); - t.is(users[0].changedRows, 1); + expect(users[0].changedRows).toBe(1); }); -test.serial('update with returning all fields', async (t) => { - const { db } = t.context; +test('update with returning all fields', async (ctx) => { + const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); const users = await db.select().from(usersTable).where(eq(usersTable.id, 1)); - t.is(updatedUsers[0].changedRows, 1); + expect(updatedUsers[0].changedRows).toBe(1); - t.assert(users[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof + expect(users[0]!.createdAt).toBeInstanceOf(Date); // not timezone based timestamp, thats why it should not work here // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 2000); - t.deepEqual(users, [{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); + expect(users).toEqual([{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); }); -test.serial('update with returning partial', async (t) => { - const { db } = t.context; +test('update with returning partial', async (ctx) => { + const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); @@ -311,46 +276,46 @@ test.serial('update with returning partial', async (t) => { eq(usersTable.id, 1), ); - t.deepEqual(updatedUsers[0].changedRows, 1); + expect(updatedUsers[0].changedRows).toBe(1); - t.deepEqual(users, [{ id: 1, name: 'Jane' }]); + expect(users).toEqual([{ id: 1, name: 'Jane' }]); }); -test.serial('delete with returning all fields', async (t) => { - const { db } = t.context; +test('delete with returning all fields', async (ctx) => { + const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); - t.is(deletedUser[0].affectedRows, 1); + expect(deletedUser[0].affectedRows).toBe(1); }); -test.serial('delete with returning partial', async (t) => { - const { db } = t.context; +test('delete with returning partial', async (ctx) => { + const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); - t.is(deletedUser[0].affectedRows, 1); + expect(deletedUser[0].affectedRows).toBe(1); }); -test.serial('insert + select', async (t) => { - const { db } = t.context; +test('insert + select', async (ctx) => { + const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const result = await db.select().from(usersTable); - t.deepEqual(result, [{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); await db.insert(usersTable).values({ name: 'Jane' }); const result2 = await db.select().from(usersTable); - t.deepEqual(result2, [ + expect(result2).toEqual([ { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, ]); }); -test.serial('json insert', async (t) => { - const { db } = t.context; +test('json insert', async (ctx) => { + const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); const result = await db.select({ @@ -359,20 +324,20 @@ test.serial('json insert', async (t) => { jsonb: usersTable.jsonb, }).from(usersTable); - t.deepEqual(result, [{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); + expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); }); -test.serial('insert with overridden default values', async (t) => { - const { db } = t.context; +test('insert with overridden default values', async (ctx) => { + const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John', verified: true }); const result = await db.select().from(usersTable); - t.deepEqual(result, [{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); + expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); }); -test.serial('insert many', async (t) => { - const { db } = t.context; +test('insert many', async (ctx) => { + const { db } = ctx.mysql; await db.insert(usersTable).values([ { name: 'John' }, @@ -387,7 +352,7 @@ test.serial('insert many', async (t) => { verified: usersTable.verified, }).from(usersTable); - t.deepEqual(result, [ + expect(result).toEqual([ { id: 1, name: 'John', jsonb: null, verified: false }, { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, { id: 3, name: 'Jane', jsonb: null, verified: false }, @@ -395,8 +360,8 @@ test.serial('insert many', async (t) => { ]); }); -test.serial('insert many with returning', async (t) => { - const { db } = t.context; +test('insert many with returning', async (ctx) => { + const { db } = ctx.mysql; const result = await db.insert(usersTable).values([ { name: 'John' }, @@ -405,55 +370,55 @@ test.serial('insert many with returning', async (t) => { { name: 'Austin', verified: true }, ]); - t.is(result[0].affectedRows, 4); + expect(result[0].affectedRows).toBe(4); }); -test.serial('select with group by as field', async (t) => { - const { db } = t.context; +test('select with group by as field', async (ctx) => { + const { db } = ctx.mysql; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.name); - t.deepEqual(result, [{ name: 'John' }, { name: 'Jane' }]); + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); }); -test.serial('select with group by as sql', async (t) => { - const { db } = t.context; +test('select with group by as sql', async (ctx) => { + const { db } = ctx.mysql; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(sql`${usersTable.name}`); - t.deepEqual(result, [{ name: 'John' }, { name: 'Jane' }]); + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); }); -test.serial('select with group by as sql + column', async (t) => { - const { db } = t.context; +test('select with group by as sql + column', async (ctx) => { + const { db } = ctx.mysql; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(sql`${usersTable.name}`, usersTable.id); - t.deepEqual(result, [{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); }); -test.serial('select with group by as column + sql', async (t) => { - const { db } = t.context; +test('select with group by as column + sql', async (ctx) => { + const { db } = ctx.mysql; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, sql`${usersTable.name}`); - t.deepEqual(result, [{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); }); -test.serial('select with group by complex query', async (t) => { - const { db } = t.context; +test('select with group by complex query', async (ctx) => { + const { db } = ctx.mysql; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); @@ -462,39 +427,39 @@ test.serial('select with group by complex query', async (t) => { .orderBy(asc(usersTable.name)) .limit(1); - t.deepEqual(result, [{ name: 'Jane' }]); + expect(result).toEqual([{ name: 'Jane' }]); }); -test.serial('build query', async (t) => { - const { db } = t.context; +test('build query', async (ctx) => { + const { db } = ctx.mysql; const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, usersTable.name) .toSQL(); - t.deepEqual(query, { + expect(query).toEqual({ sql: `select \`id\`, \`name\` from \`userstest\` group by \`userstest\`.\`id\`, \`userstest\`.\`name\``, params: [], }); }); -test.serial('build query insert with onDuplicate', async (t) => { - const { db } = t.context; +test('build query insert with onDuplicate', async (ctx) => { + const { db } = ctx.mysql; const query = db.insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onDuplicateKeyUpdate({ set: { name: 'John1' } }) .toSQL(); - t.deepEqual(query, { + expect(query).toEqual({ sql: 'insert into `userstest` (`id`, `name`, `verified`, `jsonb`, `created_at`) values (default, ?, default, ?, default) on duplicate key update `name` = ?', params: ['John', '["foo","bar"]', 'John1'], }); }); -test.serial('insert with onDuplicate', async (t) => { - const { db } = t.context; +test('insert with onDuplicate', async (ctx) => { + const { db } = ctx.mysql; await db.insert(usersTable) .values({ name: 'John' }); @@ -507,26 +472,22 @@ test.serial('insert with onDuplicate', async (t) => { eq(usersTable.id, 1), ); - t.deepEqual(res, [{ id: 1, name: 'John1' }]); + expect(res).toEqual([{ id: 1, name: 'John1' }]); }); -test.serial('insert conflict', async (t) => { - const { db } = t.context; +test('insert conflict', async (ctx) => { + const { db } = ctx.mysql; await db.insert(usersTable) .values({ name: 'John' }); - await t.throwsAsync( - () => db.insert(usersTable).values({ id: 1, name: 'John1' }), - { - code: 'ER_DUP_ENTRY', - message: "Duplicate entry '1' for key 'userstest.PRIMARY'", - }, - ); + await expect((async () => { + db.insert(usersTable).values({ id: 1, name: 'John1' }); + })()).resolves.not.toThrowError(); }); -test.serial('insert conflict with ignore', async (t) => { - const { db } = t.context; +test('insert conflict with ignore', async (ctx) => { + const { db } = ctx.mysql; await db.insert(usersTable) .values({ name: 'John' }); @@ -539,19 +500,19 @@ test.serial('insert conflict with ignore', async (t) => { eq(usersTable.id, 1), ); - t.deepEqual(res, [{ id: 1, name: 'John' }]); + expect(res).toEqual([{ id: 1, name: 'John' }]); }); -test.serial('insert sql', async (t) => { - const { db } = t.context; +test('insert sql', async (ctx) => { + const { db } = ctx.mysql; await db.insert(usersTable).values({ name: sql`${'John'}` }); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); - t.deepEqual(result, [{ id: 1, name: 'John' }]); + expect(result).toEqual([{ id: 1, name: 'John' }]); }); -test.serial('partial join with alias', async (t) => { - const { db } = t.context; +test('partial join with alias', async (ctx) => { + const { db } = ctx.mysql; const customerAlias = alias(usersTable, 'customer'); await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); @@ -569,14 +530,14 @@ test.serial('partial join with alias', async (t) => { .leftJoin(customerAlias, eq(customerAlias.id, 11)) .where(eq(usersTable.id, 10)); - t.deepEqual(result, [{ + expect(result).toEqual([{ user: { id: 10, name: 'Ivan' }, customer: { id: 11, name: 'Hans' }, }]); }); -test.serial('full join with alias', async (t) => { - const { db } = t.context; +test('full join with alias', async (ctx) => { + const { db } = ctx.mysql; const mysqlTable = mysqlTableCreator((name) => `prefixed_${name}`); @@ -596,7 +557,7 @@ test.serial('full join with alias', async (t) => { .leftJoin(customers, eq(customers.id, 11)) .where(eq(users.id, 10)); - t.deepEqual(result, [{ + expect(result).toEqual([{ users: { id: 10, name: 'Ivan', @@ -610,8 +571,8 @@ test.serial('full join with alias', async (t) => { await db.execute(sql`drop table ${users}`); }); -test.serial('select from alias', async (t) => { - const { db } = t.context; +test('select from alias', async (ctx) => { + const { db } = ctx.mysql; const mysqlTable = mysqlTableCreator((name) => `prefixed_${name}`); @@ -633,7 +594,7 @@ test.serial('select from alias', async (t) => { .leftJoin(customers, eq(customers.id, 11)) .where(eq(user.id, 10)); - t.deepEqual(result, [{ + expect(result).toEqual([{ user: { id: 10, name: 'Ivan', @@ -647,17 +608,17 @@ test.serial('select from alias', async (t) => { await db.execute(sql`drop table ${users}`); }); -test.serial('insert with spaces', async (t) => { - const { db } = t.context; +test('insert with spaces', async (ctx) => { + const { db } = ctx.mysql; await db.insert(usersTable).values({ name: sql`'Jo h n'` }); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); - t.deepEqual(result, [{ id: 1, name: 'Jo h n' }]); + expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); }); -test.serial('prepared statement', async (t) => { - const { db } = t.context; +test('prepared statement', async (ctx) => { + const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const statement = db.select({ @@ -667,11 +628,11 @@ test.serial('prepared statement', async (t) => { .prepare(); const result = await statement.execute(); - t.deepEqual(result, [{ id: 1, name: 'John' }]); + expect(result).toEqual([{ id: 1, name: 'John' }]); }); -test.serial('prepared statement reuse', async (t) => { - const { db } = t.context; +test('prepared statement reuse', async (ctx) => { + const { db } = ctx.mysql; const stmt = db.insert(usersTable).values({ verified: true, @@ -688,7 +649,7 @@ test.serial('prepared statement reuse', async (t) => { verified: usersTable.verified, }).from(usersTable); - t.deepEqual(result, [ + expect(result).toEqual([ { id: 1, name: 'John 0', verified: true }, { id: 2, name: 'John 1', verified: true }, { id: 3, name: 'John 2', verified: true }, @@ -702,8 +663,8 @@ test.serial('prepared statement reuse', async (t) => { ]); }); -test.serial('prepared statement with placeholder in .where', async (t) => { - const { db } = t.context; +test('prepared statement with placeholder in .where', async (ctx) => { + const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const stmt = db.select({ @@ -714,11 +675,11 @@ test.serial('prepared statement with placeholder in .where', async (t) => { .prepare(); const result = await stmt.execute({ id: 1 }); - t.deepEqual(result, [{ id: 1, name: 'John' }]); + expect(result).toEqual([{ id: 1, name: 'John' }]); }); -test.serial('migrator', async (t) => { - const { db } = t.context; +test('migrator', async (ctx) => { + const { db } = ctx.mysql; await db.execute(sql`drop table if exists cities_migration`); await db.execute(sql`drop table if exists users_migration`); @@ -731,7 +692,7 @@ test.serial('migrator', async (t) => { const result = await db.select().from(usersMigratorTable); - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table cities_migration`); await db.execute(sql`drop table users_migration`); @@ -739,26 +700,26 @@ test.serial('migrator', async (t) => { await db.execute(sql`drop table __drizzle_migrations`); }); -test.serial('insert via db.execute + select via db.execute', async (t) => { - const { db } = t.context; +test('insert via db.execute + select via db.execute', async (ctx) => { + const { db } = ctx.mysql; await db.execute(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); const result = await db.execute<{ id: number; name: string }>(sql`select id, name from ${usersTable}`); - t.deepEqual(result[0], [{ id: 1, name: 'John' }]); + expect(result[0]).toEqual([{ id: 1, name: 'John' }]); }); -test.serial('insert via db.execute w/ query builder', async (t) => { - const { db } = t.context; +test('insert via db.execute w/ query builder', async (ctx) => { + const { db } = ctx.mysql; const inserted = await db.execute( db.insert(usersTable).values({ name: 'John' }), ); - t.is(inserted[0].affectedRows, 1); + expect(inserted[0].affectedRows).toBe(1); }); -test.serial('insert + select all possible dates', async (t) => { - const { db } = t.context; +test('insert + select all possible dates', async (ctx) => { + const { db } = ctx.mysql; const date = new Date('2022-11-11'); @@ -773,12 +734,12 @@ test.serial('insert + select all possible dates', async (t) => { const res = await db.select().from(datesTable); - t.assert(res[0]?.date instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(res[0]?.datetime instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(typeof res[0]?.dateAsString === 'string'); - t.assert(typeof res[0]?.datetimeAsString === 'string'); + expect(res[0]?.date).toBeInstanceOf(Date); + expect(res[0]?.datetime).toBeInstanceOf(Date); + expect(res[0]?.dateAsString).toBeTypeOf('string'); + expect(res[0]?.datetimeAsString).toBeTypeOf('string'); - t.deepEqual(res, [{ + expect(res).toEqual([{ date: toLocalDate(new Date('2022-11-11')), dateAsString: '2022-11-11', time: '12:12:12', @@ -795,8 +756,8 @@ const tableWithEnums = mysqlTable('enums_test_case', { enum3: mysqlEnum('enum3', ['a', 'b', 'c']).notNull().default('b'), }); -test.serial('Mysql enum test case #1', async (t) => { - const { db } = t.context; +test('Mysql enum test case #1', async (ctx) => { + const { db } = ctx.mysql; await db.execute(sql`drop table if exists \`enums_test_case\``); @@ -819,15 +780,15 @@ test.serial('Mysql enum test case #1', async (t) => { await db.execute(sql`drop table \`enums_test_case\``); - t.deepEqual(res, [ + expect(res).toEqual([ { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, { id: 2, enum1: 'a', enum2: 'a', enum3: 'c' }, { id: 3, enum1: 'a', enum2: 'a', enum3: 'b' }, ]); }); -test.serial('custom binary', async (t) => { - const { db } = t.context; +test('custom binary', async (ctx) => { + const { db } = ctx.mysql; const id = uuid().replace(/-/g, ''); await db.insert(testTable).values({ @@ -838,15 +799,9 @@ test.serial('custom binary', async (t) => { const res = await db.select().from(testTable); - t.deepEqual(res, [{ + expect(res).toEqual([{ id, sqlId: Buffer.from(id, 'hex'), rawId: id, }]); }); - -test.after.always(async (t) => { - const ctx = t.context; - await ctx.client?.end().catch(console.error); - await ctx.mysqlContainer?.stop().catch(console.error); -}); diff --git a/integration-tests/tests/mysql/mysql-planetscale.test.ts b/integration-tests/tests/mysql/mysql-planetscale.test.ts new file mode 100644 index 000000000..8c7e74543 --- /dev/null +++ b/integration-tests/tests/mysql/mysql-planetscale.test.ts @@ -0,0 +1,76 @@ +import { Client } from '@planetscale/database'; +import type { PlanetScaleDatabase } from 'drizzle-orm/planetscale-serverless'; +import { drizzle } from 'drizzle-orm/planetscale-serverless'; +import { beforeAll, beforeEach } from 'vitest'; +import { skipTests } from '~/common'; +import { tests } from './mysql-common'; + +const ENABLE_LOGGING = false; + +let db: PlanetScaleDatabase; + +beforeAll(async () => { + db = drizzle(new Client({ url: process.env['PLANETSCALE_CONNECTION_STRING']! }), { logger: ENABLE_LOGGING }); +}); + +beforeEach((ctx) => { + ctx.mysql = { + db, + }; +}); + +skipTests([ + 'mySchema :: view', + 'mySchema :: select from tables with same name from different schema using alias', + 'mySchema :: prepared statement with placeholder in .where', + 'mySchema :: insert with spaces', + 'mySchema :: select with group by as column + sql', + 'mySchema :: select with group by as field', + 'mySchema :: insert many', + 'mySchema :: insert with overridden default values', + 'mySchema :: insert + select', + 'mySchema :: delete with returning all fields', + 'mySchema :: update with returning partial', + 'mySchema :: delete returning sql', + 'mySchema :: insert returning sql', + 'mySchema :: select typed sql', + 'mySchema :: select sql', + 'mySchema :: select all fields', + 'test $onUpdateFn and $onUpdate works updating', + 'test $onUpdateFn and $onUpdate works as $default', + 'set operations (mixed all) as function with subquery', + 'set operations (mixed) from query builder', + 'set operations (except all) as function', + 'set operations (except all) from query builder', + 'set operations (except) as function', + 'set operations (except) from query builder', + 'set operations (intersect all) as function', + 'set operations (intersect all) from query builder', + 'set operations (intersect) as function', + 'set operations (intersect) from query builder', + 'select iterator w/ prepared statement', + 'select iterator', + 'subquery with view', + 'join on aliased sql from with clause', + 'with ... delete', + 'with ... update', + 'with ... select', + + // to redefine in this file + 'utc config for datetime', + 'transaction', + 'having', + 'select count()', + 'insert via db.execute w/ query builder', + 'insert via db.execute + select via db.execute', + 'insert many with returning', + 'delete with returning partial', + 'delete with returning all fields', + 'update with returning partial', + 'update with returning all fields', + 'update returning sql', + 'delete returning sql', + 'insert returning sql', +]); + +tests('planetscale'); diff --git a/integration-tests/tests/mysql.prefixed.test.ts b/integration-tests/tests/mysql/mysql-prefixed.test.ts similarity index 74% rename from integration-tests/tests/mysql.prefixed.test.ts rename to integration-tests/tests/mysql/mysql-prefixed.test.ts index 324dced00..2f313ec00 100644 --- a/integration-tests/tests/mysql.prefixed.test.ts +++ b/integration-tests/tests/mysql/mysql-prefixed.test.ts @@ -1,21 +1,6 @@ -import 'dotenv/config'; - -import type { TestFn } from 'ava'; -import anyTest from 'ava'; -import Docker from 'dockerode'; -import { - asc, - DefaultLogger, - eq, - getTableName, - gt, - inArray, - type InferModel, - Name, - placeholder, - sql, - TransactionRollbackError, -} from 'drizzle-orm'; +import retry from 'async-retry'; +import type { Equal } from 'drizzle-orm'; +import { asc, eq, getTableName, gt, inArray, Name, sql, TransactionRollbackError } from 'drizzle-orm'; import { alias, boolean, @@ -38,17 +23,42 @@ import { import type { MySql2Database } from 'drizzle-orm/mysql2'; import { drizzle } from 'drizzle-orm/mysql2'; import { migrate } from 'drizzle-orm/mysql2/migrator'; -import getPort from 'get-port'; import * as mysql from 'mysql2/promise'; -import { v4 as uuid } from 'uuid'; -import { type Equal, Expect, toLocalDate } from './utils.ts'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { Expect, toLocalDate } from '~/utils'; +import { createDockerDB } from './mysql-common'; const ENABLE_LOGGING = false; +let db: MySql2Database; +let client: mysql.Connection; + +beforeAll(async () => { + const connectionString = process.env['MYSQL_CONNECTION_STRING'] ?? await createDockerDB(); + client = await retry(async () => { + client = await mysql.createConnection(connectionString); + await client.connect(); + return client; + }, { + retries: 20, + factor: 1, + minTimeout: 250, + maxTimeout: 250, + randomize: false, + onRetry() { + client?.end(); + }, + }); + db = drizzle(client, { logger: ENABLE_LOGGING }); +}); + +afterAll(async () => { + await client?.end(); +}); + const tablePrefix = 'drizzle_tests_'; const mysqlTable = mysqlTableCreator((name) => `${tablePrefix}${name}`); - const usersTable = mysqlTable('userstest', { id: serial('id').primaryKey(), name: text('name').notNull(), @@ -68,84 +78,12 @@ const citiesTable = mysqlTable('cities', { name: text('name').notNull(), }); -interface Context { - docker: Docker; - mysqlContainer: Docker.Container; - db: MySql2Database; - client: mysql.Connection; -} - -const test = anyTest as TestFn; - -async function createDockerDB(ctx: Context): Promise { - const docker = (ctx.docker = new Docker()); - const port = await getPort({ port: 3306 }); - const image = 'mysql:8'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) - ); +beforeEach(async () => { + await db.execute(sql`drop table if exists ${usersTable}`); + await db.execute(sql`drop table if exists ${users2Table}`); + await db.execute(sql`drop table if exists ${citiesTable}`); - ctx.mysqlContainer = await docker.createContainer({ - Image: image, - Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=drizzle'], - name: `drizzle-integration-tests-${uuid()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '3306/tcp': [{ HostPort: `${port}` }], - }, - }, - }); - - await ctx.mysqlContainer.start(); - - return `mysql://root:mysql@127.0.0.1:${port}/drizzle`; -} - -test.before(async (t) => { - const ctx = t.context; - const connectionString = process.env['MYSQL_CONNECTION_STRING'] ?? await createDockerDB(ctx); - - const sleep = 1000; - let timeLeft = 20000; - let connected = false; - let lastError: unknown | undefined; - do { - try { - ctx.client = await mysql.createConnection(connectionString); - await ctx.client.connect(); - connected = true; - break; - } catch (e) { - lastError = e; - await new Promise((resolve) => setTimeout(resolve, sleep)); - timeLeft -= sleep; - } - } while (timeLeft > 0); - if (!connected) { - console.error('Cannot connect to MySQL'); - await ctx.client?.end().catch(console.error); - await ctx.mysqlContainer?.stop().catch(console.error); - throw lastError; - } - ctx.db = drizzle(ctx.client, { logger: ENABLE_LOGGING ? new DefaultLogger() : undefined }); -}); - -test.after.always(async (t) => { - const ctx = t.context; - await ctx.client?.end().catch(console.error); - await ctx.mysqlContainer?.stop().catch(console.error); -}); - -test.beforeEach(async (t) => { - const ctx = t.context; - await ctx.db.execute(sql`drop table if exists ${usersTable}`); - await ctx.db.execute(sql`drop table if exists ${users2Table}`); - await ctx.db.execute(sql`drop table if exists ${citiesTable}`); - - await ctx.db.execute( + await db.execute( sql` create table ${usersTable} ( \`id\` serial primary key, @@ -157,7 +95,7 @@ test.beforeEach(async (t) => { `, ); - await ctx.db.execute( + await db.execute( sql` create table ${users2Table} ( \`id\` serial primary key, @@ -167,7 +105,7 @@ test.beforeEach(async (t) => { `, ); - await ctx.db.execute( + await db.execute( sql` create table ${citiesTable} ( \`id\` serial primary key, @@ -177,43 +115,35 @@ test.beforeEach(async (t) => { ); }); -test.serial('select all fields', async (t) => { - const { db } = t.context; - +test('select all fields', async () => { await db.insert(usersTable).values({ name: 'John' }); const result = await db.select().from(usersTable); - t.assert(result[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof + expect(result[0]!.createdAt).toBeInstanceOf(Date); // not timezone based timestamp, thats why it should not work here // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); - t.deepEqual(result, [{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); }); -test.serial('select sql', async (t) => { - const { db } = t.context; - +test('select sql', async () => { await db.insert(usersTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersTable.name})`, }).from(usersTable); - t.deepEqual(users, [{ name: 'JOHN' }]); + expect(users).toEqual([{ name: 'JOHN' }]); }); -test.serial('select typed sql', async (t) => { - const { db } = t.context; - +test('select typed sql', async () => { await db.insert(usersTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersTable.name})`, }).from(usersTable); - t.deepEqual(users, [{ name: 'JOHN' }]); + expect(users).toEqual([{ name: 'JOHN' }]); }); -test.serial('select distinct', async (t) => { - const { db } = t.context; - +test('select distinct', async () => { const usersDistinctTable = mysqlTable('users_distinct', { id: int('id').notNull(), name: text('name').notNull(), @@ -235,54 +165,44 @@ test.serial('select distinct', async (t) => { await db.execute(sql`drop table ${usersDistinctTable}`); - t.deepEqual(users, [{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); + expect(users).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); }); -test.serial('insert returning sql', async (t) => { - const { db } = t.context; - +test('insert returning sql', async () => { const [result, _] = await db.insert(usersTable).values({ name: 'John' }); - t.deepEqual(result.insertId, 1); + expect(result.insertId).toBe(1); }); -test.serial('delete returning sql', async (t) => { - const { db } = t.context; - +test('delete returning sql', async () => { await db.insert(usersTable).values({ name: 'John' }); const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')); - t.is(users[0].affectedRows, 1); + expect(users[0].affectedRows).toBe(1); }); -test.serial('update returning sql', async (t) => { - const { db } = t.context; - +test('update returning sql', async () => { await db.insert(usersTable).values({ name: 'John' }); const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); - t.is(users[0].changedRows, 1); + expect(users[0].changedRows).toBe(1); }); -test.serial('update with returning all fields', async (t) => { - const { db } = t.context; - +test('update with returning all fields', async () => { await db.insert(usersTable).values({ name: 'John' }); const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); const users = await db.select().from(usersTable).where(eq(usersTable.id, 1)); - t.is(updatedUsers[0].changedRows, 1); + expect(updatedUsers[0].changedRows).toBe(1); - t.assert(users[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof + expect(users[0]!.createdAt).toBeInstanceOf(Date); // not timezone based timestamp, thats why it should not work here // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 2000); - t.deepEqual(users, [{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); + expect(users).toEqual([{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); }); -test.serial('update with returning partial', async (t) => { - const { db } = t.context; - +test('update with returning partial', async () => { await db.insert(usersTable).values({ name: 'John' }); const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); @@ -290,47 +210,39 @@ test.serial('update with returning partial', async (t) => { eq(usersTable.id, 1), ); - t.deepEqual(updatedUsers[0].changedRows, 1); + expect(updatedUsers[0].changedRows).toBe(1); - t.deepEqual(users, [{ id: 1, name: 'Jane' }]); + expect(users).toEqual([{ id: 1, name: 'Jane' }]); }); -test.serial('delete with returning all fields', async (t) => { - const { db } = t.context; - +test('delete with returning all fields', async () => { await db.insert(usersTable).values({ name: 'John' }); const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); - t.is(deletedUser[0].affectedRows, 1); + expect(deletedUser[0].affectedRows).toBe(1); }); -test.serial('delete with returning partial', async (t) => { - const { db } = t.context; - +test('delete with returning partial', async () => { await db.insert(usersTable).values({ name: 'John' }); const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); - t.is(deletedUser[0].affectedRows, 1); + expect(deletedUser[0].affectedRows).toBe(1); }); -test.serial('insert + select', async (t) => { - const { db } = t.context; - +test('insert + select', async () => { await db.insert(usersTable).values({ name: 'John' }); const result = await db.select().from(usersTable); - t.deepEqual(result, [{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); await db.insert(usersTable).values({ name: 'Jane' }); const result2 = await db.select().from(usersTable); - t.deepEqual(result2, [ + expect(result2).toEqual([ { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, ]); }); -test.serial('json insert', async (t) => { - const { db } = t.context; - +test('json insert', async () => { await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); const result = await db.select({ id: usersTable.id, @@ -338,21 +250,17 @@ test.serial('json insert', async (t) => { jsonb: usersTable.jsonb, }).from(usersTable); - t.deepEqual(result, [{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); + expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); }); -test.serial('insert with overridden default values', async (t) => { - const { db } = t.context; - +test('insert with overridden default values', async () => { await db.insert(usersTable).values({ name: 'John', verified: true }); const result = await db.select().from(usersTable); - t.deepEqual(result, [{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); + expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); }); -test.serial('insert many', async (t) => { - const { db } = t.context; - +test('insert many', async () => { await db.insert(usersTable).values([ { name: 'John' }, { name: 'Bruce', jsonb: ['foo', 'bar'] }, @@ -366,7 +274,7 @@ test.serial('insert many', async (t) => { verified: usersTable.verified, }).from(usersTable); - t.deepEqual(result, [ + expect(result).toEqual([ { id: 1, name: 'John', jsonb: null, verified: false }, { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, { id: 3, name: 'Jane', jsonb: null, verified: false }, @@ -374,9 +282,7 @@ test.serial('insert many', async (t) => { ]); }); -test.serial('insert many with returning', async (t) => { - const { db } = t.context; - +test('insert many with returning', async () => { const result = await db.insert(usersTable).values([ { name: 'John' }, { name: 'Bruce', jsonb: ['foo', 'bar'] }, @@ -384,56 +290,46 @@ test.serial('insert many with returning', async (t) => { { name: 'Austin', verified: true }, ]); - t.is(result[0].affectedRows, 4); + expect(result[0].affectedRows).toBe(4); }); -test.serial('select with group by as field', async (t) => { - const { db } = t.context; - +test('select with group by as field', async () => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.name); - t.deepEqual(result, [{ name: 'John' }, { name: 'Jane' }]); + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); }); -test.serial('select with group by as sql', async (t) => { - const { db } = t.context; - +test('select with group by as sql', async () => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(sql`${usersTable.name}`); - t.deepEqual(result, [{ name: 'John' }, { name: 'Jane' }]); + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); }); -test.serial('select with group by as sql + column', async (t) => { - const { db } = t.context; - +test('select with group by as sql + column', async () => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(sql`${usersTable.name}`, usersTable.id); - t.deepEqual(result, [{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); }); -test.serial('select with group by as column + sql', async (t) => { - const { db } = t.context; - +test('select with group by as column + sql', async () => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, sql`${usersTable.name}`); - t.deepEqual(result, [{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); }); -test.serial('select with group by complex query', async (t) => { - const { db } = t.context; - +test('select with group by complex query', async () => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) @@ -441,17 +337,15 @@ test.serial('select with group by complex query', async (t) => { .orderBy(asc(usersTable.name)) .limit(1); - t.deepEqual(result, [{ name: 'Jane' }]); + expect(result).toEqual([{ name: 'Jane' }]); }); -test.serial('build query', async (t) => { - const { db } = t.context; - +test('build query', async () => { const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, usersTable.name) .toSQL(); - t.deepEqual(query, { + expect(query).toEqual({ sql: `select \`id\`, \`name\` from \`${getTableName(usersTable)}\` group by \`${ getTableName(usersTable) }\`.\`id\`, \`${getTableName(usersTable)}\`.\`name\``, @@ -459,15 +353,13 @@ test.serial('build query', async (t) => { }); }); -test.serial('build query insert with onDuplicate', async (t) => { - const { db } = t.context; - +test('build query insert with onDuplicate', async () => { const query = db.insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onDuplicateKeyUpdate({ set: { name: 'John1' } }) .toSQL(); - t.deepEqual(query, { + expect(query).toEqual({ sql: `insert into \`${ getTableName(usersTable) }\` (\`id\`, \`name\`, \`verified\`, \`jsonb\`, \`created_at\`) values (default, ?, default, ?, default) on duplicate key update \`name\` = ?`, @@ -475,9 +367,7 @@ test.serial('build query insert with onDuplicate', async (t) => { }); }); -test.serial('insert with onDuplicate', async (t) => { - const { db } = t.context; - +test('insert with onDuplicate', async () => { await db.insert(usersTable) .values({ name: 'John' }); @@ -489,26 +379,19 @@ test.serial('insert with onDuplicate', async (t) => { eq(usersTable.id, 1), ); - t.deepEqual(res, [{ id: 1, name: 'John1' }]); + expect(res).toEqual([{ id: 1, name: 'John1' }]); }); -test.serial('insert conflict', async (t) => { - const { db } = t.context; - +test('insert conflict', async () => { await db.insert(usersTable) .values({ name: 'John' }); - await t.throwsAsync( - () => db.insert(usersTable).values({ id: 1, name: 'John1' }), - { - code: 'ER_DUP_ENTRY', - }, - ); + await expect((async () => { + db.insert(usersTable).values({ id: 1, name: 'John1' }); + })()).resolves.not.toThrowError(); }); -test.serial('insert conflict with ignore', async (t) => { - const { db } = t.context; - +test('insert conflict with ignore', async () => { await db.insert(usersTable) .values({ name: 'John' }); @@ -520,19 +403,16 @@ test.serial('insert conflict with ignore', async (t) => { eq(usersTable.id, 1), ); - t.deepEqual(res, [{ id: 1, name: 'John' }]); + expect(res).toEqual([{ id: 1, name: 'John' }]); }); -test.serial('insert sql', async (t) => { - const { db } = t.context; - +test('insert sql', async () => { await db.insert(usersTable).values({ name: sql`${'John'}` }); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); - t.deepEqual(result, [{ id: 1, name: 'John' }]); + expect(result).toEqual([{ id: 1, name: 'John' }]); }); -test.serial('partial join with alias', async (t) => { - const { db } = t.context; +test('partial join with alias', async () => { const customerAlias = alias(usersTable, 'customer'); await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); @@ -550,15 +430,13 @@ test.serial('partial join with alias', async (t) => { .leftJoin(customerAlias, eq(customerAlias.id, 11)) .where(eq(usersTable.id, 10)); - t.deepEqual(result, [{ + expect(result).toEqual([{ user: { id: 10, name: 'Ivan' }, customer: { id: 11, name: 'Hans' }, }]); }); -test.serial('full join with alias', async (t) => { - const { db } = t.context; - +test('full join with alias', async () => { const mysqlTable = mysqlTableCreator((name) => `prefixed_${name}`); const users = mysqlTable('users', { @@ -577,7 +455,7 @@ test.serial('full join with alias', async (t) => { .leftJoin(customers, eq(customers.id, 11)) .where(eq(users.id, 10)); - t.deepEqual(result, [{ + expect(result).toEqual([{ users: { id: 10, name: 'Ivan', @@ -591,9 +469,7 @@ test.serial('full join with alias', async (t) => { await db.execute(sql`drop table ${users}`); }); -test.serial('select from alias', async (t) => { - const { db } = t.context; - +test('select from alias', async () => { const mysqlTable = mysqlTableCreator((name) => `prefixed_${name}`); const users = mysqlTable('users', { @@ -614,7 +490,7 @@ test.serial('select from alias', async (t) => { .leftJoin(customers, eq(customers.id, 11)) .where(eq(user.id, 10)); - t.deepEqual(result, [{ + expect(result).toEqual([{ user: { id: 10, name: 'Ivan', @@ -628,18 +504,14 @@ test.serial('select from alias', async (t) => { await db.execute(sql`drop table ${users}`); }); -test.serial('insert with spaces', async (t) => { - const { db } = t.context; - +test('insert with spaces', async () => { await db.insert(usersTable).values({ name: sql`'Jo h n'` }); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); - t.deepEqual(result, [{ id: 1, name: 'Jo h n' }]); + expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); }); -test.serial('prepared statement', async (t) => { - const { db } = t.context; - +test('prepared statement', async () => { await db.insert(usersTable).values({ name: 'John' }); const statement = db.select({ id: usersTable.id, @@ -648,15 +520,13 @@ test.serial('prepared statement', async (t) => { .prepare(); const result = await statement.execute(); - t.deepEqual(result, [{ id: 1, name: 'John' }]); + expect(result).toEqual([{ id: 1, name: 'John' }]); }); -test.serial('prepared statement reuse', async (t) => { - const { db } = t.context; - +test('prepared statement reuse', async () => { const stmt = db.insert(usersTable).values({ verified: true, - name: placeholder('name'), + name: sql.placeholder('name'), }).prepare(); for (let i = 0; i < 10; i++) { @@ -669,7 +539,7 @@ test.serial('prepared statement reuse', async (t) => { verified: usersTable.verified, }).from(usersTable); - t.deepEqual(result, [ + expect(result).toEqual([ { id: 1, name: 'John 0', verified: true }, { id: 2, name: 'John 1', verified: true }, { id: 3, name: 'John 2', verified: true }, @@ -683,24 +553,20 @@ test.serial('prepared statement reuse', async (t) => { ]); }); -test.serial('prepared statement with placeholder in .where', async (t) => { - const { db } = t.context; - +test('prepared statement with placeholder in .where', async () => { await db.insert(usersTable).values({ name: 'John' }); const stmt = db.select({ id: usersTable.id, name: usersTable.name, }).from(usersTable) - .where(eq(usersTable.id, placeholder('id'))) + .where(eq(usersTable.id, sql.placeholder('id'))) .prepare(); const result = await stmt.execute({ id: 1 }); - t.deepEqual(result, [{ id: 1, name: 'John' }]); + expect(result).toEqual([{ id: 1, name: 'John' }]); }); -test.serial('migrator', async (t) => { - const { db } = t.context; - +test('migrator', async () => { const usersMigratorTable = mysqlTableRaw('users12', { id: serial('id').primaryKey(), name: text('name').notNull(), @@ -722,7 +588,7 @@ test.serial('migrator', async (t) => { const result = await db.select().from(usersMigratorTable); - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql.raw(`drop table cities_migration`)); await db.execute(sql.raw(`drop table users_migration`)); @@ -730,27 +596,21 @@ test.serial('migrator', async (t) => { await db.execute(sql.raw(`drop table __drizzle_migrations`)); }); -test.serial('insert via db.execute + select via db.execute', async (t) => { - const { db } = t.context; - +test('insert via db.execute + select via db.execute', async () => { await db.execute(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); const result = await db.execute<{ id: number; name: string }>(sql`select id, name from ${usersTable}`); - t.deepEqual(result[0], [{ id: 1, name: 'John' }]); + expect(result[0]).toEqual([{ id: 1, name: 'John' }]); }); -test.serial('insert via db.execute w/ query builder', async (t) => { - const { db } = t.context; - +test('insert via db.execute w/ query builder', async () => { const inserted = await db.execute( db.insert(usersTable).values({ name: 'John' }), ); - t.is(inserted[0].affectedRows, 1); + expect(inserted[0].affectedRows).toBe(1); }); -test.serial('insert + select all possible dates', async (t) => { - const { db } = t.context; - +test('insert + select all possible dates', async () => { const datesTable = mysqlTable('datestable', { date: date('date'), dateAsString: date('date_as_string', { mode: 'string' }), @@ -787,12 +647,12 @@ test.serial('insert + select all possible dates', async (t) => { const res = await db.select().from(datesTable); - t.assert(res[0]?.date instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(res[0]?.datetime instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(typeof res[0]?.dateAsString === 'string'); - t.assert(typeof res[0]?.datetimeAsString === 'string'); + expect(res[0]?.date).toBeInstanceOf(Date); + expect(res[0]?.datetime).toBeInstanceOf(Date); + expect(typeof res[0]?.dateAsString).toBe('string'); + expect(typeof res[0]?.datetimeAsString).toBe('string'); - t.deepEqual(res, [{ + expect(res).toEqual([{ date: toLocalDate(new Date('2022-11-11')), dateAsString: '2022-11-11', time: '12:12:12', @@ -804,9 +664,7 @@ test.serial('insert + select all possible dates', async (t) => { await db.execute(sql`drop table ${datesTable}`); }); -test.serial('Mysql enum test case #1', async (t) => { - const { db } = t.context; - +test('Mysql enum test case #1', async () => { const tableWithEnums = mysqlTable('enums_test_case', { id: serial('id').primaryKey(), enum1: mysqlEnum('enum1', ['a', 'b', 'c']).notNull(), @@ -835,16 +693,14 @@ test.serial('Mysql enum test case #1', async (t) => { await db.execute(sql`drop table ${tableWithEnums}`); - t.deepEqual(res, [ + expect(res).toEqual([ { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, { id: 2, enum1: 'a', enum2: 'a', enum3: 'c' }, { id: 3, enum1: 'a', enum2: 'a', enum3: 'b' }, ]); }); -test.serial('left join (flat object fields)', async (t) => { - const { db } = t.context; - +test('left join (flat object fields)', async () => { await db.insert(citiesTable) .values([{ name: 'Paris' }, { name: 'London' }]); @@ -858,15 +714,13 @@ test.serial('left join (flat object fields)', async (t) => { }).from(users2Table) .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); - t.deepEqual(res, [ + expect(res).toEqual([ { userId: 1, userName: 'John', cityId: 1, cityName: 'Paris' }, { userId: 2, userName: 'Jane', cityId: null, cityName: null }, ]); }); -test.serial('left join (grouped fields)', async (t) => { - const { db } = t.context; - +test('left join (grouped fields)', async () => { await db.insert(citiesTable) .values([{ name: 'Paris' }, { name: 'London' }]); @@ -886,7 +740,7 @@ test.serial('left join (grouped fields)', async (t) => { }).from(users2Table) .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); - t.deepEqual(res, [ + expect(res).toEqual([ { id: 1, user: { name: 'John', nameUpper: 'JOHN' }, @@ -900,9 +754,7 @@ test.serial('left join (grouped fields)', async (t) => { ]); }); -test.serial('left join (all fields)', async (t) => { - const { db } = t.context; - +test('left join (all fields)', async () => { await db.insert(citiesTable) .values([{ name: 'Paris' }, { name: 'London' }]); @@ -911,7 +763,7 @@ test.serial('left join (all fields)', async (t) => { const res = await db.select().from(users2Table) .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); - t.deepEqual(res, [ + expect(res).toEqual([ { users2: { id: 1, @@ -934,9 +786,7 @@ test.serial('left join (all fields)', async (t) => { ]); }); -test.serial('join subquery', async (t) => { - const { db } = t.context; - +test('join subquery', async () => { const coursesTable = mysqlTable('courses', { id: serial('id').primaryKey(), name: text('name').notNull(), @@ -1006,7 +856,7 @@ test.serial('join subquery', async (t) => { await db.execute(sql`drop table ${coursesTable}`); await db.execute(sql`drop table ${courseCategoriesTable}`); - t.deepEqual(res, [ + expect(res).toEqual([ { courseName: 'Design', categoryId: 1 }, { courseName: 'Development', categoryId: 2 }, { courseName: 'IT & Software', categoryId: 3 }, @@ -1014,9 +864,7 @@ test.serial('join subquery', async (t) => { ]); }); -test.serial('with ... select', async (t) => { - const { db } = t.context; - +test('with ... select', async () => { const orders = mysqlTable('orders', { id: serial('id').primaryKey(), region: text('region').notNull(), @@ -1092,7 +940,7 @@ test.serial('with ... select', async (t) => { await db.execute(sql`drop table ${orders}`); - t.deepEqual(result, [ + expect(result).toEqual([ { region: 'Europe', product: 'A', @@ -1120,9 +968,7 @@ test.serial('with ... select', async (t) => { ]); }); -test.serial('select from subquery sql', async (t) => { - const { db } = t.context; - +test('select from subquery sql', async () => { await db.insert(users2Table).values([{ name: 'John' }, { name: 'Jane' }]); const sq = db @@ -1132,53 +978,43 @@ test.serial('select from subquery sql', async (t) => { const res = await db.select({ name: sq.name }).from(sq); - t.deepEqual(res, [{ name: 'John modified' }, { name: 'Jane modified' }]); + expect(res).toEqual([{ name: 'John modified' }, { name: 'Jane modified' }]); }); -test.serial('select a field without joining its table', (t) => { - const { db } = t.context; - - t.throws(() => db.select({ name: users2Table.name }).from(usersTable).prepare()); +test('select a field without joining its table', () => { + expect(() => db.select({ name: users2Table.name }).from(usersTable).prepare()).toThrowError(); }); -test.serial('select all fields from subquery without alias', (t) => { - const { db } = t.context; - +test('select all fields from subquery without alias', () => { const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); - t.throws(() => db.select().from(sq).prepare()); + expect(() => db.select().from(sq).prepare()).toThrowError(); }); -test.serial('select count()', async (t) => { - const { db } = t.context; - +test('select count()', async () => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); const res = await db.select({ count: sql`count(*)` }).from(usersTable); - t.deepEqual(res, [{ count: 2 }]); + expect(res).toEqual([{ count: 2 }]); }); -test.serial('select for ...', (t) => { - const { db } = t.context; - +test('select for ...', () => { { const query = db.select().from(users2Table).for('update').toSQL(); - t.regex(query.sql, / for update$/); + expect(query.sql).toMatch(/ for update$/); } { const query = db.select().from(users2Table).for('share', { skipLocked: true }).toSQL(); - t.regex(query.sql, / for share skip locked$/); + expect(query.sql).toMatch(/ for share skip locked$/); } { const query = db.select().from(users2Table).for('update', { noWait: true }).toSQL(); - t.regex(query.sql, / for update no wait$/); + expect(query.sql).toMatch(/ for update no wait$/); } }); -test.serial('having', async (t) => { - const { db } = t.context; - +test('having', async () => { await db.insert(citiesTable).values([{ name: 'London' }, { name: 'Paris' }, { name: 'New York' }]); await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { @@ -1199,7 +1035,7 @@ test.serial('having', async (t) => { .having(({ usersCount }) => sql`${usersCount} > 0`) .orderBy(({ name }) => name); - t.deepEqual(result, [ + expect(result).toEqual([ { id: 1, name: 'LONDON', @@ -1213,9 +1049,7 @@ test.serial('having', async (t) => { ]); }); -test.serial('view', async (t) => { - const { db } = t.context; - +test('view', async () => { const newYorkers1 = mysqlView('new_yorkers') .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); @@ -1243,7 +1077,7 @@ test.serial('view', async (t) => { { const result = await db.select().from(newYorkers1); - t.deepEqual(result, [ + expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); @@ -1251,7 +1085,7 @@ test.serial('view', async (t) => { { const result = await db.select().from(newYorkers2); - t.deepEqual(result, [ + expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); @@ -1259,7 +1093,7 @@ test.serial('view', async (t) => { { const result = await db.select().from(newYorkers3); - t.deepEqual(result, [ + expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); @@ -1267,7 +1101,7 @@ test.serial('view', async (t) => { { const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); - t.deepEqual(result, [ + expect(result).toEqual([ { name: 'John' }, { name: 'Jane' }, ]); @@ -1276,9 +1110,7 @@ test.serial('view', async (t) => { await db.execute(sql`drop view ${newYorkers1}`); }); -test.serial('select from raw sql', async (t) => { - const { db } = t.context; - +test('select from raw sql', async () => { const result = await db.select({ id: sql`id`, name: sql`name`, @@ -1286,14 +1118,12 @@ test.serial('select from raw sql', async (t) => { Expect>; - t.deepEqual(result, [ + expect(result).toEqual([ { id: 1, name: 'John' }, ]); }); -test.serial('select from raw sql with joins', async (t) => { - const { db } = t.context; - +test('select from raw sql with joins', async () => { const result = await db .select({ id: sql`users.id`, @@ -1306,14 +1136,12 @@ test.serial('select from raw sql with joins', async (t) => { Expect>; - t.deepEqual(result, [ + expect(result).toEqual([ { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, ]); }); -test.serial('join on aliased sql from select', async (t) => { - const { db } = t.context; - +test('join on aliased sql from select', async () => { const result = await db .select({ userId: sql`users.id`.as('userId'), @@ -1327,14 +1155,12 @@ test.serial('join on aliased sql from select', async (t) => { Expect>; - t.deepEqual(result, [ + expect(result).toEqual([ { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, ]); }); -test.serial('join on aliased sql from with clause', async (t) => { - const { db } = t.context; - +test('join on aliased sql from with clause', async () => { const users = db.$with('users').as( db.select({ id: sql`id`.as('userId'), @@ -1368,14 +1194,12 @@ test.serial('join on aliased sql from with clause', async (t) => { Expect>; - t.deepEqual(result, [ + expect(result).toEqual([ { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, ]); }); -test.serial('prefixed table', async (t) => { - const { db } = t.context; - +test('prefixed table', async () => { const mysqlTable = mysqlTableCreator((name) => `myprefix_${name}`); const users = mysqlTable('test_prefixed_table_with_unique_name', { @@ -1393,24 +1217,20 @@ test.serial('prefixed table', async (t) => { const result = await db.select().from(users); - t.deepEqual(result, [{ id: 1, name: 'John' }]); + expect(result).toEqual([{ id: 1, name: 'John' }]); await db.execute(sql`drop table ${users}`); }); -test.serial('orderBy with aliased column', (t) => { - const { db } = t.context; - +test('orderBy with aliased column', () => { const query = db.select({ test: sql`something`.as('test'), }).from(users2Table).orderBy((fields) => fields.test).toSQL(); - t.deepEqual(query.sql, `select something as \`test\` from \`${getTableName(users2Table)}\` order by \`test\``); + expect(query.sql).toBe(`select something as \`test\` from \`${getTableName(users2Table)}\` order by \`test\``); }); -test.serial('timestamp timezone', async (t) => { - const { db } = t.context; - +test('timestamp timezone', async () => { const date = new Date(Date.parse('2020-01-01T12:34:56+07:00')); await db.insert(usersTable).values({ name: 'With default times' }); @@ -1421,15 +1241,13 @@ test.serial('timestamp timezone', async (t) => { const users = await db.select().from(usersTable); // check that the timestamps are set correctly for default times - t.assert(Math.abs(users[0]!.createdAt.getTime() - Date.now()) < 2000); + expect(Math.abs(users[0]!.createdAt.getTime() - Date.now())).toBeLessThan(2000); // check that the timestamps are set correctly for non default times - t.assert(Math.abs(users[1]!.createdAt.getTime() - date.getTime()) < 2000); + expect(Math.abs(users[1]!.createdAt.getTime() - date.getTime())).toBeLessThan(2000); }); -test.serial('transaction', async (t) => { - const { db } = t.context; - +test('transaction', async () => { const users = mysqlTable('users_transactions', { id: serial('id').primaryKey(), balance: int('balance').notNull(), @@ -1463,12 +1281,10 @@ test.serial('transaction', async (t) => { await db.execute(sql`drop table ${users}`); await db.execute(sql`drop table ${products}`); - t.deepEqual(result, [{ id: 1, balance: 90 }]); + expect(result).toEqual([{ id: 1, balance: 90 }]); }); -test.serial('transaction rollback', async (t) => { - const { db } = t.context; - +test('transaction rollback', async () => { const users = mysqlTable('users_transactions_rollback', { id: serial('id').primaryKey(), balance: int('balance').notNull(), @@ -1480,22 +1296,21 @@ test.serial('transaction rollback', async (t) => { sql`create table ${users} (id serial not null primary key, balance int not null)`, ); - await t.throwsAsync(async () => + await expect((async () => { await db.transaction(async (tx) => { await tx.insert(users).values({ balance: 100 }); tx.rollback(); - }), { instanceOf: TransactionRollbackError }); + }); + })()).rejects.toThrowError(TransactionRollbackError); const result = await db.select().from(users); await db.execute(sql`drop table ${users}`); - t.deepEqual(result, []); + expect(result).toEqual([]); }); -test.serial('nested transaction', async (t) => { - const { db } = t.context; - +test('nested transaction', async () => { const users = mysqlTable('users_nested_transactions', { id: serial('id').primaryKey(), balance: int('balance').notNull(), @@ -1519,12 +1334,10 @@ test.serial('nested transaction', async (t) => { await db.execute(sql`drop table ${users}`); - t.deepEqual(result, [{ id: 1, balance: 200 }]); + expect(result).toEqual([{ id: 1, balance: 200 }]); }); -test.serial('nested transaction rollback', async (t) => { - const { db } = t.context; - +test('nested transaction rollback', async () => { const users = mysqlTable('users_nested_transactions_rollback', { id: serial('id').primaryKey(), balance: int('balance').notNull(), @@ -1539,23 +1352,22 @@ test.serial('nested transaction rollback', async (t) => { await db.transaction(async (tx) => { await tx.insert(users).values({ balance: 100 }); - await t.throwsAsync(async () => + await expect((async () => { await tx.transaction(async (tx) => { await tx.update(users).set({ balance: 200 }); tx.rollback(); - }), { instanceOf: TransactionRollbackError }); + }); + })()).rejects.toThrowError(TransactionRollbackError); }); const result = await db.select().from(users); await db.execute(sql`drop table ${users}`); - t.deepEqual(result, [{ id: 1, balance: 100 }]); + expect(result).toEqual([{ id: 1, balance: 100 }]); }); -test.serial('join subquery with join', async (t) => { - const { db } = t.context; - +test('join subquery with join', async () => { const internalStaff = mysqlTable('internal_staff', { userId: int('user_id').notNull(), }); @@ -1595,7 +1407,7 @@ test.serial('join subquery with join', async (t) => { await db.execute(sql`drop table ${customUser}`); await db.execute(sql`drop table ${ticket}`); - t.deepEqual(mainQuery, [{ + expect(mainQuery).toEqual([{ ticket: { staffId: 1 }, internal_staff: { internal_staff: { userId: 1 }, @@ -1604,9 +1416,7 @@ test.serial('join subquery with join', async (t) => { }]); }); -test.serial('subquery with view', async (t) => { - const { db } = t.context; - +test('subquery with view', async () => { const users = mysqlTable('users_subquery_view', { id: serial('id').primaryKey(), name: text('name').notNull(), @@ -1636,15 +1446,13 @@ test.serial('subquery with view', async (t) => { await db.execute(sql`drop view ${newYorkers}`); await db.execute(sql`drop table ${users}`); - t.deepEqual(result, [ + expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 3, name: 'Jack', cityId: 1 }, ]); }); -test.serial('join view as subquery', async (t) => { - const { db } = t.context; - +test('join view as subquery', async () => { const users = mysqlTable('users_join_view', { id: serial('id').primaryKey(), name: text('name').notNull(), @@ -1672,7 +1480,7 @@ test.serial('join view as subquery', async (t) => { const result = await db.select().from(users).leftJoin(sq, eq(users.id, sq.id)); - t.deepEqual(result, [ + expect(result).toEqual([ { users_join_view: { id: 1, name: 'John', cityId: 1 }, new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, @@ -1695,9 +1503,7 @@ test.serial('join view as subquery', async (t) => { await db.execute(sql`drop table ${users}`); }); -test.serial('select iterator', async (t) => { - const { db } = t.context; - +test('select iterator', async () => { const users = mysqlTable('users_iterator', { id: serial('id').primaryKey(), }); @@ -1708,18 +1514,17 @@ test.serial('select iterator', async (t) => { await db.insert(users).values([{}, {}, {}]); const iter = db.select().from(users).iterator(); - const result: InferModel[] = []; + + const result: typeof users.$inferSelect[] = []; for await (const row of iter) { result.push(row); } - t.deepEqual(result, [{ id: 1 }, { id: 2 }, { id: 3 }]); + expect(result).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); }); -test.serial('select iterator w/ prepared statement', async (t) => { - const { db } = t.context; - +test('select iterator w/ prepared statement', async () => { const users = mysqlTable('users_iterator', { id: serial('id').primaryKey(), }); @@ -1731,18 +1536,16 @@ test.serial('select iterator w/ prepared statement', async (t) => { const prepared = db.select().from(users).prepare(); const iter = prepared.iterator(); - const result: InferModel[] = []; + const result: typeof users.$inferSelect[] = []; for await (const row of iter) { result.push(row); } - t.deepEqual(result, [{ id: 1 }, { id: 2 }, { id: 3 }]); + expect(result).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); }); -test.serial('insert undefined', async (t) => { - const { db } = t.context; - +test('insert undefined', async () => { const users = mysqlTable('users', { id: serial('id').primaryKey(), name: text('name'), @@ -1754,14 +1557,14 @@ test.serial('insert undefined', async (t) => { sql`create table ${users} (id serial not null primary key, name text)`, ); - await t.notThrowsAsync(async () => await db.insert(users).values({ name: undefined })); + await expect((async () => { + await db.insert(users).values({ name: undefined }); + })()).resolves.not.toThrowError(); await db.execute(sql`drop table ${users}`); }); -test.serial('update undefined', async (t) => { - const { db } = t.context; - +test('update undefined', async () => { const users = mysqlTable('users', { id: serial('id').primaryKey(), name: text('name'), @@ -1773,8 +1576,13 @@ test.serial('update undefined', async (t) => { sql`create table ${users} (id serial not null primary key, name text)`, ); - await t.throwsAsync(async () => await db.update(users).set({ name: undefined })); - await t.notThrowsAsync(async () => await db.update(users).set({ id: 1, name: undefined })); + await expect((async () => { + await db.update(users).set({ name: undefined }); + })()).rejects.toThrowError(); + + await expect((async () => { + await db.update(users).set({ id: 1, name: undefined }); + })()).resolves.not.toThrowError(); await db.execute(sql`drop table ${users}`); }); diff --git a/integration-tests/tests/mysql/mysql-proxy.test.ts b/integration-tests/tests/mysql/mysql-proxy.test.ts new file mode 100644 index 000000000..304b32f83 --- /dev/null +++ b/integration-tests/tests/mysql/mysql-proxy.test.ts @@ -0,0 +1,136 @@ +import retry from 'async-retry'; +import type { MySqlRemoteDatabase } from 'drizzle-orm/mysql-proxy'; +import { drizzle as proxyDrizzle } from 'drizzle-orm/mysql-proxy'; +import * as mysql from 'mysql2/promise'; +import { afterAll, beforeAll, beforeEach } from 'vitest'; +import { skipTests } from '~/common'; +import { createDockerDB, tests } from './mysql-common'; + +const ENABLE_LOGGING = false; + +// TODO +// finish prexied, planetscale and cutom mysql tests +// wait for sqlite from Oleksii +// release to beta and check pipeline +// finish returningId +// release everything together with generated + +// eslint-disable-next-line drizzle-internal/require-entity-kind +class ServerSimulator { + constructor(private db: mysql.Connection) {} + + async query(sql: string, params: any[], method: 'all' | 'execute') { + if (method === 'all') { + try { + const result = await this.db.query({ + sql, + values: params, + rowsAsArray: true, + typeCast: function(field: any, next: any) { + if (field.type === 'TIMESTAMP' || field.type === 'DATETIME' || field.type === 'DATE') { + return field.string(); + } + return next(); + }, + }); + + return { data: result[0] as any }; + } catch (e: any) { + return { error: e }; + } + } else if (method === 'execute') { + try { + const result = await this.db.query({ + sql, + values: params, + typeCast: function(field: any, next: any) { + if (field.type === 'TIMESTAMP' || field.type === 'DATETIME' || field.type === 'DATE') { + return field.string(); + } + return next(); + }, + }); + + return { data: result as any }; + } catch (e: any) { + return { error: e }; + } + } else { + return { error: 'Unknown method value' }; + } + } + + async migrations(queries: string[]) { + await this.db.query('START TRANSACTION'); + try { + for (const query of queries) { + await this.db.query(query); + } + await this.db.query('COMMIT'); + } catch (e) { + await this.db.query('ROLLBACK'); + throw e; + } + + return {}; + } +} + +let db: MySqlRemoteDatabase; +let client: mysql.Connection; +let serverSimulator: ServerSimulator; + +beforeAll(async () => { + const connectionString = process.env['MYSQL_CONNECTION_STRING'] ?? await createDockerDB(); + client = await retry(async () => { + client = await mysql.createConnection(connectionString); + await client.connect(); + return client; + }, { + retries: 20, + factor: 1, + minTimeout: 250, + maxTimeout: 250, + randomize: false, + onRetry() { + client?.end(); + }, + }); + serverSimulator = new ServerSimulator(client); + db = proxyDrizzle(async (sql, params, method) => { + try { + const response = await serverSimulator.query(sql, params, method); + + if (response.error !== undefined) { + throw response.error; + } + + return { rows: response.data }; + } catch (e: any) { + console.error('Error from mysql proxy server:', e.message); + throw e; + } + }, { logger: ENABLE_LOGGING }); +}); + +afterAll(async () => { + await client?.end(); +}); + +beforeEach((ctx) => { + ctx.mysql = { + db, + }; +}); + +skipTests([ + 'select iterator w/ prepared statement', + 'select iterator', + 'nested transaction rollback', + 'nested transaction', + 'transaction rollback', + 'transaction', + 'migrator', +]); + +tests(); diff --git a/integration-tests/tests/mysql/mysql.test.ts b/integration-tests/tests/mysql/mysql.test.ts new file mode 100644 index 000000000..4cf4ca99c --- /dev/null +++ b/integration-tests/tests/mysql/mysql.test.ts @@ -0,0 +1,42 @@ +import retry from 'async-retry'; +import type { MySql2Database } from 'drizzle-orm/mysql2'; +import { drizzle } from 'drizzle-orm/mysql2'; +import * as mysql from 'mysql2/promise'; +import { afterAll, beforeAll, beforeEach } from 'vitest'; +import { createDockerDB, tests } from './mysql-common'; + +const ENABLE_LOGGING = false; + +let db: MySql2Database; +let client: mysql.Connection; + +beforeAll(async () => { + const connectionString = process.env['MYSQL_CONNECTION_STRING'] ?? await createDockerDB(); + client = await retry(async () => { + client = await mysql.createConnection(connectionString); + await client.connect(); + return client; + }, { + retries: 20, + factor: 1, + minTimeout: 250, + maxTimeout: 250, + randomize: false, + onRetry() { + client?.end(); + }, + }); + db = drizzle(client, { logger: ENABLE_LOGGING }); +}); + +afterAll(async () => { + await client?.end(); +}); + +beforeEach((ctx) => { + ctx.mysql = { + db, + }; +}); + +tests(); diff --git a/integration-tests/tests/tidb-serverless.test.ts b/integration-tests/tests/mysql/tidb-serverless.test.ts similarity index 99% rename from integration-tests/tests/tidb-serverless.test.ts rename to integration-tests/tests/mysql/tidb-serverless.test.ts index 7e6de0fed..05199e836 100644 --- a/integration-tests/tests/tidb-serverless.test.ts +++ b/integration-tests/tests/mysql/tidb-serverless.test.ts @@ -63,7 +63,7 @@ import type { TiDBServerlessDatabase } from 'drizzle-orm/tidb-serverless'; import { drizzle } from 'drizzle-orm/tidb-serverless'; import { migrate } from 'drizzle-orm/tidb-serverless/migrator'; import { beforeAll, beforeEach, expect, test } from 'vitest'; -import { type Equal, Expect, toLocalDate } from './utils.ts'; +import { type Equal, Expect, toLocalDate } from '../utils.ts'; const ENABLE_LOGGING = false; diff --git a/integration-tests/tests/neon-http-batch.test.ts b/integration-tests/tests/neon-http-batch.test.ts deleted file mode 100644 index 08c4bc02e..000000000 --- a/integration-tests/tests/neon-http-batch.test.ts +++ /dev/null @@ -1,556 +0,0 @@ -import 'dotenv/config'; -import { neon } from '@neondatabase/serverless'; -import type { FullQueryResults, NeonQueryFunction } from '@neondatabase/serverless'; -import type { InferSelectModel } from 'drizzle-orm'; -import { eq, relations, sql } from 'drizzle-orm'; -import { drizzle } from 'drizzle-orm/neon-http'; -import type { NeonHttpDatabase, NeonHttpQueryResult } from 'drizzle-orm/neon-http'; -import { type AnyPgColumn, integer, pgTable, primaryKey, serial, text, timestamp } from 'drizzle-orm/pg-core'; -import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; - -const ENABLE_LOGGING = false; - -export const usersTable = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - verified: integer('verified').notNull().default(0), - invitedBy: integer('invited_by').references((): AnyPgColumn => usersTable.id), -}); -export const usersConfig = relations(usersTable, ({ one, many }) => ({ - invitee: one(usersTable, { - fields: [usersTable.invitedBy], - references: [usersTable.id], - }), - usersToGroups: many(usersToGroupsTable), - posts: many(postsTable), -})); - -export const groupsTable = pgTable('groups', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - description: text('description'), -}); -export const groupsConfig = relations(groupsTable, ({ many }) => ({ - usersToGroups: many(usersToGroupsTable), -})); - -export const usersToGroupsTable = pgTable( - 'users_to_groups', - { - id: serial('id'), - userId: integer('user_id').notNull().references(() => usersTable.id), - groupId: integer('group_id').notNull().references(() => groupsTable.id), - }, - (t) => ({ - pk: primaryKey({ columns: [t.userId, t.groupId] }), - }), -); -export const usersToGroupsConfig = relations(usersToGroupsTable, ({ one }) => ({ - group: one(groupsTable, { - fields: [usersToGroupsTable.groupId], - references: [groupsTable.id], - }), - user: one(usersTable, { - fields: [usersToGroupsTable.userId], - references: [usersTable.id], - }), -})); - -export const postsTable = pgTable('posts', { - id: serial('id').primaryKey(), - content: text('content').notNull(), - ownerId: integer('owner_id').references(() => usersTable.id), - createdAt: timestamp('created_at').notNull().defaultNow(), -}); -export const postsConfig = relations(postsTable, ({ one, many }) => ({ - author: one(usersTable, { - fields: [postsTable.ownerId], - references: [usersTable.id], - }), - comments: many(commentsTable), -})); - -export const commentsTable = pgTable('comments', { - id: serial('id').primaryKey(), - content: text('content').notNull(), - creator: integer('creator').references(() => usersTable.id), - postId: integer('post_id').references(() => postsTable.id), - createdAt: timestamp('created_at').notNull().defaultNow(), -}); -export const commentsConfig = relations(commentsTable, ({ one, many }) => ({ - post: one(postsTable, { - fields: [commentsTable.postId], - references: [postsTable.id], - }), - author: one(usersTable, { - fields: [commentsTable.creator], - references: [usersTable.id], - }), - likes: many(commentLikesTable), -})); - -export const commentLikesTable = pgTable('comment_likes', { - id: serial('id').primaryKey(), - creator: integer('creator').references(() => usersTable.id), - commentId: integer('comment_id').references(() => commentsTable.id), - createdAt: timestamp('created_at').notNull().defaultNow(), -}); -export const commentLikesConfig = relations(commentLikesTable, ({ one }) => ({ - comment: one(commentsTable, { - fields: [commentLikesTable.commentId], - references: [commentsTable.id], - }), - author: one(usersTable, { - fields: [commentLikesTable.creator], - references: [usersTable.id], - }), -})); - -const schema = { - usersTable, - postsTable, - commentsTable, - usersToGroupsTable, - groupsTable, - commentLikesConfig, - commentsConfig, - postsConfig, - usersToGroupsConfig, - groupsConfig, - usersConfig, -}; - -declare module 'vitest' { - export interface TestContext { - neonHttpDb: NeonHttpDatabase; - neonHttpClient: NeonQueryFunction; - } -} - -let db: NeonHttpDatabase; -let client: NeonQueryFunction; - -beforeAll(async () => { - const connectionString = process.env['NEON_CONNECTION_STRING']; - if (!connectionString) { - throw new Error('NEON_CONNECTION_STRING is not defined'); - } - - client = neon(connectionString); - db = drizzle(client, { schema, logger: ENABLE_LOGGING }); -}); - -beforeEach(async (ctx) => { - ctx.neonHttpDb = db; - ctx.neonHttpClient = client; - - await db.execute(sql`drop table if exists comment_likes`); - await db.execute(sql`drop table if exists comments`); - await db.execute(sql`drop table if exists posts`); - await db.execute(sql`drop table if exists users_to_groups`); - await db.execute(sql`drop table if exists groups`); - await db.execute(sql`drop table if exists users`); - - await db.execute( - sql` - create table users ( - id serial primary key, - name text not null, - verified int not null default 0, - invited_by int references users(id) - ) - `, - ); - await db.execute( - sql` - create table groups ( - id serial primary key, - name text not null, - description text - ) - `, - ); - await db.execute( - sql` - create table users_to_groups ( - id serial, - user_id int not null references users(id), - group_id int not null references groups(id), - primary key (user_id, group_id) - ) - `, - ); - await db.execute( - sql` - create table posts ( - id serial primary key, - content text not null, - owner_id int references users(id), - created_at timestamp not null default now() - ) - `, - ); - await db.execute( - sql` - create table comments ( - id serial primary key, - content text not null, - creator int references users(id), - post_id int references posts(id), - created_at timestamp not null default now() - ) - `, - ); - await db.execute( - sql` - create table comment_likes ( - id serial primary key, - creator int references users(id), - comment_id int references comments(id), - created_at timestamp not null default now() - ) - `, - ); -}); - -afterAll(async () => { - await db.execute(sql`drop table if exists comment_likes`); - await db.execute(sql`drop table if exists comments`); - await db.execute(sql`drop table if exists posts`); - await db.execute(sql`drop table if exists users_to_groups`); - await db.execute(sql`drop table if exists groups`); - await db.execute(sql`drop table if exists users`); -}); - -test('batch api example', async () => { - const batchResponse = await db.batch([ - db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ - id: usersTable.id, - invitedBy: usersTable.invitedBy, - }), - db.insert(usersTable).values({ id: 2, name: 'Dan' }), - db.select().from(usersTable), - ]); - - expectTypeOf(batchResponse).toEqualTypeOf<[ - { - id: number; - invitedBy: number | null; - }[], - NeonHttpQueryResult, - { - id: number; - name: string; - verified: number; - invitedBy: number | null; - }[], - ]>(); - - expect(batchResponse.length).eq(3); - - expect(batchResponse[0]).toEqual([{ - id: 1, - invitedBy: null, - }]); - - expect(batchResponse[1]).toMatchObject({ rows: [], rowCount: 1 }); - - expect(batchResponse[2]).toEqual([ - { id: 1, name: 'John', verified: 0, invitedBy: null }, - { id: 2, name: 'Dan', verified: 0, invitedBy: null }, - ]); -}); - -// batch api only relational many -test('insert + findMany', async () => { - const batchResponse = await db.batch([ - db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), - db.insert(usersTable).values({ id: 2, name: 'Dan' }), - db.query.usersTable.findMany({}), - ]); - - expectTypeOf(batchResponse).toEqualTypeOf<[ - { - id: number; - }[], - NeonHttpQueryResult, - { - id: number; - name: string; - verified: number; - invitedBy: number | null; - }[], - ]>(); - - expect(batchResponse.length).eq(3); - - expect(batchResponse[0]).toEqual([{ - id: 1, - }]); - - expect(batchResponse[1]).toMatchObject({ rows: [], rowCount: 1 }); - - expect(batchResponse[2]).toEqual([ - { id: 1, name: 'John', verified: 0, invitedBy: null }, - { id: 2, name: 'Dan', verified: 0, invitedBy: null }, - ]); -}); - -// batch api relational many + one -test('insert + findMany + findFirst', async () => { - const batchResponse = await db.batch([ - db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), - db.insert(usersTable).values({ id: 2, name: 'Dan' }), - db.query.usersTable.findMany({}), - db.query.usersTable.findFirst({}), - ]); - - expectTypeOf(batchResponse).toEqualTypeOf<[ - { - id: number; - }[], - NeonHttpQueryResult, - { - id: number; - name: string; - verified: number; - invitedBy: number | null; - }[], - { - id: number; - name: string; - verified: number; - invitedBy: number | null; - } | undefined, - ]>(); - - expect(batchResponse.length).eq(4); - - expect(batchResponse[0]).toEqual([{ - id: 1, - }]); - - expect(batchResponse[1]).toMatchObject({ rows: [], rowCount: 1 }); - - expect(batchResponse[2]).toEqual([ - { id: 1, name: 'John', verified: 0, invitedBy: null }, - { id: 2, name: 'Dan', verified: 0, invitedBy: null }, - ]); - - expect(batchResponse[3]).toEqual( - { id: 1, name: 'John', verified: 0, invitedBy: null }, - ); -}); - -test('insert + db.execute', async () => { - const batchResponse = await db.batch([ - db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), - db.execute(sql`insert into users (id, name) values (2, 'Dan')`), - ]); - - expectTypeOf(batchResponse).toEqualTypeOf<[ - { - id: number; - }[], - FullQueryResults, - ]>(); - - expect(batchResponse.length).eq(2); - - expect(batchResponse[0]).toEqual([{ - id: 1, - }]); - - expect(batchResponse[1]).toMatchObject({ rowAsArray: false, rows: [], rowCount: 1 }); -}); - -// batch api combined rqb + raw call -test('insert + findManyWith + db.all', async () => { - const batchResponse = await db.batch([ - db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), - db.insert(usersTable).values({ id: 2, name: 'Dan' }), - db.query.usersTable.findMany({}), - db.execute(sql`select * from users`), - ]); - - expectTypeOf(batchResponse).toEqualTypeOf<[ - { - id: number; - }[], - NeonHttpQueryResult, - { - id: number; - name: string; - verified: number; - invitedBy: number | null; - }[], - NeonHttpQueryResult<{ - id: number; - name: string; - verified: number; - invitedBy: number | null; - }>, - ]>(); - - expect(batchResponse.length).eq(4); - - expect(batchResponse[0]).toEqual([{ - id: 1, - }]); - - expect(batchResponse[1]).toMatchObject({ rowAsArray: true, rows: [], rowCount: 1 }); - - expect(batchResponse[2]).toEqual([ - { id: 1, name: 'John', verified: 0, invitedBy: null }, - { id: 2, name: 'Dan', verified: 0, invitedBy: null }, - ]); - - expect(batchResponse[3]).toMatchObject({ - rows: [ - { id: 1, name: 'John', verified: 0, invited_by: null }, - { id: 2, name: 'Dan', verified: 0, invited_by: null }, - ], - }); -}); - -// batch api for insert + update + select -test('insert + update + select + select partial', async () => { - const batchResponse = await db.batch([ - db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), - db.update(usersTable).set({ name: 'Dan' }).where(eq(usersTable.id, 1)), - db.query.usersTable.findMany({}), - db.select().from(usersTable).where(eq(usersTable.id, 1)), - db.select({ id: usersTable.id, invitedBy: usersTable.invitedBy }).from(usersTable), - ]); - - expectTypeOf(batchResponse).toEqualTypeOf<[ - { - id: number; - }[], - NeonHttpQueryResult, - { - id: number; - name: string; - verified: number; - invitedBy: number | null; - }[], - { - id: number; - name: string; - verified: number; - invitedBy: number | null; - }[], - { - id: number; - invitedBy: number | null; - }[], - ]>(); - - expect(batchResponse.length).eq(5); - - expect(batchResponse[0]).toEqual([{ - id: 1, - }]); - - expect(batchResponse[1]).toMatchObject({ rows: [], rowCount: 1 }); - - expect(batchResponse[2]).toEqual([ - { id: 1, name: 'Dan', verified: 0, invitedBy: null }, - ]); - - expect(batchResponse[3]).toEqual([ - { id: 1, name: 'Dan', verified: 0, invitedBy: null }, - ]); - - expect(batchResponse[4]).toEqual([ - { id: 1, invitedBy: null }, - ]); -}); - -// batch api for insert + delete + select -test('insert + delete + select + select partial', async () => { - const batchResponse = await db.batch([ - db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), - db.insert(usersTable).values({ id: 2, name: 'Dan' }), - db.delete(usersTable).where(eq(usersTable.id, 1)).returning({ id: usersTable.id, invitedBy: usersTable.invitedBy }), - db.query.usersTable.findFirst({ - columns: { - id: true, - invitedBy: true, - }, - }), - ]); - - expectTypeOf(batchResponse).toEqualTypeOf<[ - { - id: number; - }[], - NeonHttpQueryResult, - { - id: number; - invitedBy: number | null; - }[], - { - id: number; - invitedBy: number | null; - } | undefined, - ]>(); - - expect(batchResponse.length).eq(4); - - expect(batchResponse[0]).toEqual([{ - id: 1, - }]); - - expect(batchResponse[1]).toMatchObject({ rows: [], rowCount: 1 }); - - expect(batchResponse[2]).toEqual([ - { id: 1, invitedBy: null }, - ]); - - expect(batchResponse[3]).toEqual( - { id: 2, invitedBy: null }, - ); -}); - -test('select raw', async () => { - await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Dan' }]); - const batchResponse = await db.batch([ - db.execute>(sql`select * from users`), - db.execute>(sql`select * from users where id = 1`), - ]); - - expectTypeOf(batchResponse).toEqualTypeOf<[ - NeonHttpQueryResult<{ - id: number; - name: string; - verified: number; - invited_by: number | null; - }>, - NeonHttpQueryResult<{ - id: number; - name: string; - verified: number; - invited_by: number | null; - }>, - ]>(); - - expect(batchResponse.length).eq(2); - - expect(batchResponse[0]).toMatchObject({ - rows: [ - { id: 1, name: 'John', verified: 0, invited_by: null }, - { id: 2, name: 'Dan', verified: 0, invited_by: null }, - ], - }); - - expect(batchResponse[1]).toMatchObject({ - rows: [ - { id: 1, name: 'John', verified: 0, invited_by: null }, - ], - }); -}); - -// * additionally -// batch for all neon cases, just replace simple calls with batch calls -// batch for all rqb cases, just replace simple calls with batch calls diff --git a/integration-tests/tests/neon-http.test.ts b/integration-tests/tests/neon-http.test.ts deleted file mode 100644 index 0b12131e6..000000000 --- a/integration-tests/tests/neon-http.test.ts +++ /dev/null @@ -1,2708 +0,0 @@ -import 'dotenv/config'; - -import { neon, type NeonQueryFunction } from '@neondatabase/serverless'; -import type { TestFn } from 'ava'; -import anyTest from 'ava'; -import { - and, - arrayContained, - arrayContains, - arrayOverlaps, - asc, - eq, - gt, - gte, - inArray, - lt, - name, - placeholder, - type SQL, - sql, - type SQLWrapper, - TransactionRollbackError, -} from 'drizzle-orm'; -import { drizzle, type NeonHttpDatabase } from 'drizzle-orm/neon-http'; -import { migrate } from 'drizzle-orm/neon-http/migrator'; -import { - alias, - boolean, - char, - cidr, - date, - getMaterializedViewConfig, - getViewConfig, - inet, - integer, - interval, - jsonb, - macaddr, - macaddr8, - type PgColumn, - pgEnum, - pgMaterializedView, - pgTable, - pgTableCreator, - pgView, - serial, - text, - time, - timestamp, - uuid as pgUuid, - varchar, -} from 'drizzle-orm/pg-core'; -import pg from 'pg'; -import { v4 as uuid } from 'uuid'; -import { type Equal, Expect, randomString } from './utils.ts'; - -const { Client } = pg; - -const ENABLE_LOGGING = false; - -const usersTable = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - verified: boolean('verified').notNull().default(false), - jsonb: jsonb('jsonb').$type(), - createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), -}); - -const citiesTable = pgTable('cities', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - state: char('state', { length: 2 }), -}); - -const users2Table = pgTable('users2', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').references(() => citiesTable.id), -}); - -const coursesTable = pgTable('courses', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - categoryId: integer('category_id').references(() => courseCategoriesTable.id), -}); - -const courseCategoriesTable = pgTable('course_categories', { - id: serial('id').primaryKey(), - name: text('name').notNull(), -}); - -const orders = pgTable('orders', { - id: serial('id').primaryKey(), - region: text('region').notNull(), - product: text('product').notNull(), - amount: integer('amount').notNull(), - quantity: integer('quantity').notNull(), -}); - -const network = pgTable('network_table', { - inet: inet('inet').notNull(), - cidr: cidr('cidr').notNull(), - macaddr: macaddr('macaddr').notNull(), - macaddr8: macaddr8('macaddr8').notNull(), -}); - -const salEmp = pgTable('sal_emp', { - name: text('name'), - payByQuarter: integer('pay_by_quarter').array(), - schedule: text('schedule').array().array(), -}); - -const _tictactoe = pgTable('tictactoe', { - squares: integer('squares').array(3).array(3), -}); - -const usersMigratorTable = pgTable('users12', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - email: text('email').notNull(), -}); - -interface Context { - db: NeonHttpDatabase; - ddlRunner: pg.Client; - client: NeonQueryFunction; -} - -const test = anyTest as TestFn; - -test.before(async (t) => { - const ctx = t.context; - const connectionString = process.env['NEON_CONNECTION_STRING']; - if (!connectionString) { - throw new Error('NEON_CONNECTION_STRING is not defined'); - } - - ctx.client = neon(connectionString); - ctx.ddlRunner = new Client(connectionString); - await ctx.ddlRunner.connect(); - ctx.db = drizzle(ctx.client, { logger: ENABLE_LOGGING }); -}); - -test.after.always(async (t) => { - const ctx = t.context; - await ctx.ddlRunner?.end().catch(console.error); -}); - -test.beforeEach(async (t) => { - const ctx = t.context; - // await ctx.ddlRunner.query(`drop schema public cascade`); - // await ctx.ddlRunner.query(`create schema public`); - await ctx.db.execute(sql`drop table if exists users cascade`); - await ctx.db.execute(sql`drop table if exists cities cascade`); - await ctx.db.execute(sql`drop table if exists users2 cascade`); - await ctx.db.execute(sql`drop table if exists course_categories cascade`); - await ctx.db.execute(sql`drop table if exists courses cascade`); - await ctx.db.execute(sql`drop table if exists orders cascade`); - await ctx.db.execute(sql`drop table if exists network_table cascade`); - await ctx.db.execute(sql`drop table if exists sal_emp cascade`); - await ctx.db.execute(sql`drop table if exists tictactoe cascade`); - - await ctx.ddlRunner.query( - ` - create table users ( - id serial primary key, - name text not null, - verified boolean not null default false, - jsonb jsonb, - created_at timestamptz not null default now() - ) - `, - ); - await ctx.ddlRunner.query( - ` - create table cities ( - id serial primary key, - name text not null, - state char(2) - ) - `, - ); - await ctx.ddlRunner.query( - ` - create table users2 ( - id serial primary key, - name text not null, - city_id integer references cities(id) - ) - `, - ); - await ctx.ddlRunner.query( - ` - create table course_categories ( - id serial primary key, - name text not null - ) - `, - ); - await ctx.ddlRunner.query( - ` - create table courses ( - id serial primary key, - name text not null, - category_id integer references course_categories(id) - ) - `, - ); - await ctx.ddlRunner.query( - ` - create table orders ( - id serial primary key, - region text not null, - product text not null, - amount integer not null, - quantity integer not null - ) - `, - ); - await ctx.ddlRunner.query( - ` - create table network_table ( - inet inet not null, - cidr cidr not null, - macaddr macaddr not null, - macaddr8 macaddr8 not null - ) - `, - ); - await ctx.ddlRunner.query( - ` - create table sal_emp ( - name text not null, - pay_by_quarter integer[] not null, - schedule text[][] not null - ) - `, - ); - await ctx.ddlRunner.query( - ` - create table tictactoe ( - squares integer[3][3] not null - ) - `, - ); -}); - -test.serial('select all fields', async (t) => { - const { db } = t.context; - - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }); - const result = await db.select().from(usersTable); - - t.assert(result[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 1000); - t.deepEqual(result, [ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }, - ]); -}); - -test.serial('select sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .select({ - name: sql`upper(${usersTable.name})`, - }) - .from(usersTable); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('select typed sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - - const users = await db.select({ - name: sql`upper(${usersTable.name})`, - }).from(usersTable); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('select distinct', async (t) => { - const { db } = t.context; - - const usersDistinctTable = pgTable('users_distinct', { - id: integer('id').notNull(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${usersDistinctTable}`); - await db.execute(sql`create table ${usersDistinctTable} (id integer, name text)`); - - await db.insert(usersDistinctTable).values([ - { id: 1, name: 'John' }, - { id: 1, name: 'John' }, - { id: 2, name: 'John' }, - { id: 1, name: 'Jane' }, - ]); - const users1 = await db.selectDistinct().from(usersDistinctTable).orderBy( - usersDistinctTable.id, - usersDistinctTable.name, - ); - const users2 = await db.selectDistinctOn([usersDistinctTable.id]).from(usersDistinctTable).orderBy( - usersDistinctTable.id, - ); - const users3 = await db.selectDistinctOn([usersDistinctTable.name], { name: usersDistinctTable.name }).from( - usersDistinctTable, - ).orderBy(usersDistinctTable.name); - - await db.execute(sql`drop table ${usersDistinctTable}`); - - t.deepEqual(users1, [{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); - - t.deepEqual(users2.length, 2); - t.deepEqual(users2[0]?.id, 1); - t.deepEqual(users2[1]?.id, 2); - - t.deepEqual(users3.length, 2); - t.deepEqual(users3[0]?.name, 'Jane'); - t.deepEqual(users3[1]?.name, 'John'); -}); - -test.serial('insert returning sql', async (t) => { - const { db } = t.context; - - const users = await db - .insert(usersTable) - .values({ name: 'John' }) - .returning({ - name: sql`upper(${usersTable.name})`, - }); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('delete returning sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .delete(usersTable) - .where(eq(usersTable.name, 'John')) - .returning({ - name: sql`upper(${usersTable.name})`, - }); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('update returning sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .update(usersTable) - .set({ name: 'Jane' }) - .where(eq(usersTable.name, 'John')) - .returning({ - name: sql`upper(${usersTable.name})`, - }); - - t.deepEqual(users, [{ name: 'JANE' }]); -}); - -test.serial('update with returning all fields', async (t) => { - const { db } = t.context; - - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .update(usersTable) - .set({ name: 'Jane' }) - .where(eq(usersTable.name, 'John')) - .returning(); - - t.assert(users[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 1000); - t.deepEqual(users, [ - { id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }, - ]); -}); - -test.serial('update with returning partial', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .update(usersTable) - .set({ name: 'Jane' }) - .where(eq(usersTable.name, 'John')) - .returning({ - id: usersTable.id, - name: usersTable.name, - }); - - t.deepEqual(users, [{ id: 1, name: 'Jane' }]); -}); - -test.serial('delete with returning all fields', async (t) => { - const { db } = t.context; - - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning(); - - t.assert(users[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 1000); - t.deepEqual(users, [ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: users[0]!.createdAt }, - ]); -}); - -test.serial('delete with returning partial', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ - id: usersTable.id, - name: usersTable.name, - }); - - t.deepEqual(users, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert + select', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const result = await db.select().from(usersTable); - t.deepEqual(result, [ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }, - ]); - - await db.insert(usersTable).values({ name: 'Jane' }); - const result2 = await db.select().from(usersTable); - t.deepEqual(result2, [ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, - { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, - ]); -}); - -test.serial('json insert', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); - const result = await db - .select({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - }) - .from(usersTable); - - t.deepEqual(result, [{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); -}); - -test.serial('char insert', async (t) => { - const { db } = t.context; - - await db.insert(citiesTable).values({ name: 'Austin', state: 'TX' }); - const result = await db - .select({ id: citiesTable.id, name: citiesTable.name, state: citiesTable.state }) - .from(citiesTable); - - t.deepEqual(result, [{ id: 1, name: 'Austin', state: 'TX' }]); -}); - -test.serial('char update', async (t) => { - const { db } = t.context; - - await db.insert(citiesTable).values({ name: 'Austin', state: 'TX' }); - await db.update(citiesTable).set({ name: 'Atlanta', state: 'GA' }).where(eq(citiesTable.id, 1)); - const result = await db - .select({ id: citiesTable.id, name: citiesTable.name, state: citiesTable.state }) - .from(citiesTable); - - t.deepEqual(result, [{ id: 1, name: 'Atlanta', state: 'GA' }]); -}); - -test.serial('char delete', async (t) => { - const { db } = t.context; - - await db.insert(citiesTable).values({ name: 'Austin', state: 'TX' }); - await db.delete(citiesTable).where(eq(citiesTable.state, 'TX')); - const result = await db - .select({ id: citiesTable.id, name: citiesTable.name, state: citiesTable.state }) - .from(citiesTable); - - t.deepEqual(result, []); -}); - -test.serial('insert with overridden default values', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John', verified: true }); - const result = await db.select().from(usersTable); - - t.deepEqual(result, [ - { id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }, - ]); -}); - -test.serial('insert many', async (t) => { - const { db } = t.context; - - await db - .insert(usersTable) - .values([ - { name: 'John' }, - { name: 'Bruce', jsonb: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]); - const result = await db - .select({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - verified: usersTable.verified, - }) - .from(usersTable); - - t.deepEqual(result, [ - { id: 1, name: 'John', jsonb: null, verified: false }, - { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, - { id: 3, name: 'Jane', jsonb: null, verified: false }, - { id: 4, name: 'Austin', jsonb: null, verified: true }, - ]); -}); - -test.serial('insert many with returning', async (t) => { - const { db } = t.context; - - const result = await db - .insert(usersTable) - .values([ - { name: 'John' }, - { name: 'Bruce', jsonb: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]) - .returning({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - verified: usersTable.verified, - }); - - t.deepEqual(result, [ - { id: 1, name: 'John', jsonb: null, verified: false }, - { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, - { id: 3, name: 'Jane', jsonb: null, verified: false }, - { id: 4, name: 'Austin', jsonb: null, verified: true }, - ]); -}); - -test.serial('select with group by as field', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(usersTable.name); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with group by as sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(sql`${usersTable.name}`); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with group by as sql + column', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(sql`${usersTable.name}`, usersTable.id); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with group by as column + sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with group by complex query', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`) - .orderBy(asc(usersTable.name)) - .limit(1); - - t.deepEqual(result, [{ name: 'Jane' }]); -}); - -test.serial('build query', async (t) => { - const { db } = t.context; - - const query = db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .groupBy(usersTable.id, usersTable.name) - .toSQL(); - - t.deepEqual(query, { - sql: 'select "id", "name" from "users" group by "users"."id", "users"."name"', - params: [], - }); -}); - -test.serial('insert sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: sql`${'John'}` }); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('partial join with alias', async (t) => { - const { db } = t.context; - const customerAlias = alias(usersTable, 'customer'); - - await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select({ - user: { - id: usersTable.id, - name: usersTable.name, - }, - customer: { - id: customerAlias.id, - name: customerAlias.name, - }, - }) - .from(usersTable) - .leftJoin(customerAlias, eq(customerAlias.id, 11)) - .where(eq(usersTable.id, 10)); - - t.deepEqual(result, [ - { - user: { id: 10, name: 'Ivan' }, - customer: { id: 11, name: 'Hans' }, - }, - ]); -}); - -test.serial('full join with alias', async (t) => { - const { db } = t.context; - - const pgTable = pgTableCreator((name) => `prefixed_${name}`); - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); - - const customers = alias(users, 'customer'); - - await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select() - .from(users) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(users.id, 10)); - - t.deepEqual(result, [{ - users: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('select from alias', async (t) => { - const { db } = t.context; - - const pgTable = pgTableCreator((name) => `prefixed_${name}`); - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); - - const user = alias(users, 'user'); - const customers = alias(users, 'customer'); - - await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select() - .from(user) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(user.id, 10)); - - t.deepEqual(result, [{ - user: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('insert with spaces', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: sql`'Jo h n'` }); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); - - t.deepEqual(result, [{ id: 1, name: 'Jo h n' }]); -}); - -test.serial('prepared statement', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const statement = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .prepare('statement1'); - const result = await statement.execute(); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('prepared statement reuse', async (t) => { - const { db } = t.context; - - const stmt = db - .insert(usersTable) - .values({ - verified: true, - name: placeholder('name'), - }) - .prepare('stmt2'); - - for (let i = 0; i < 10; i++) { - await stmt.execute({ name: `John ${i}` }); - } - - const result = await db - .select({ - id: usersTable.id, - name: usersTable.name, - verified: usersTable.verified, - }) - .from(usersTable); - - t.deepEqual(result, [ - { id: 1, name: 'John 0', verified: true }, - { id: 2, name: 'John 1', verified: true }, - { id: 3, name: 'John 2', verified: true }, - { id: 4, name: 'John 3', verified: true }, - { id: 5, name: 'John 4', verified: true }, - { id: 6, name: 'John 5', verified: true }, - { id: 7, name: 'John 6', verified: true }, - { id: 8, name: 'John 7', verified: true }, - { id: 9, name: 'John 8', verified: true }, - { id: 10, name: 'John 9', verified: true }, - ]); -}); - -test.serial('prepared statement with placeholder in .where', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const stmt = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .where(eq(usersTable.id, placeholder('id'))) - .prepare('stmt3'); - const result = await stmt.execute({ id: 1 }); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('prepared statement with placeholder in .limit', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const stmt = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .where(eq(usersTable.id, placeholder('id'))) - .limit(placeholder('limit')) - .prepare('stmt_limit'); - - const result = await stmt.execute({ id: 1, limit: 1 }); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); - t.is(result.length, 1); -}); - -test.serial('prepared statement with placeholder in .offset', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'John1' }]); - const stmt = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .offset(placeholder('offset')) - .prepare('stmt_offset'); - - const result = await stmt.execute({ offset: 1 }); - - t.deepEqual(result, [{ id: 2, name: 'John1' }]); -}); - -test.serial('migrator : default migration strategy', async (t) => { - const { db } = t.context; - - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - - await migrate(db, { migrationsFolder: './drizzle2/pg' }); - - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - - const result = await db.select().from(usersMigratorTable); - - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); - - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); -}); - -test.serial('migrator : migrate with custom schema', async (t) => { - const { db } = t.context; - const customSchema = randomString(); - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - - await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsSchema: customSchema }); - - // test if the custom migrations table was created - const { rowCount } = await db.execute(sql`select * from ${sql.identifier(customSchema)}."__drizzle_migrations";`); - t.true(rowCount > 0); - - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); - - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table ${sql.identifier(customSchema)}."__drizzle_migrations"`); -}); - -test.serial('migrator : migrate with custom table', async (t) => { - const { db } = t.context; - const customTable = randomString(); - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - - await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable }); - - // test if the custom migrations table was created - const { rowCount } = await db.execute(sql`select * from "drizzle".${sql.identifier(customTable)};`); - t.true(rowCount > 0); - - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); - - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table "drizzle".${sql.identifier(customTable)}`); -}); - -test.serial('migrator : migrate with custom table and custom schema', async (t) => { - const { db } = t.context; - const customTable = randomString(); - const customSchema = randomString(); - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - - await migrate(db, { - migrationsFolder: './drizzle2/pg', - migrationsTable: customTable, - migrationsSchema: customSchema, - }); - - // test if the custom migrations table was created - const { rowCount } = await db.execute( - sql`select * from ${sql.identifier(customSchema)}.${sql.identifier(customTable)};`, - ); - t.true(rowCount > 0); - - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); - - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table ${sql.identifier(customSchema)}.${sql.identifier(customTable)}`); -}); - -test.serial('insert via db.execute + select via db.execute', async (t) => { - const { db } = t.context; - - await db.execute( - sql`insert into ${usersTable} (${name(usersTable.name.name)}) values (${'John'})`, - ); - - const result = await db.execute<{ id: number; name: string }>( - sql`select id, name from "users"`, - ); - t.deepEqual(result.rows, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert via db.execute + returning', async (t) => { - const { db } = t.context; - - const inserted = await db.execute<{ id: number; name: string }>( - sql`insert into ${usersTable} (${ - name( - usersTable.name.name, - ) - }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, - ); - t.deepEqual(inserted.rows, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert via db.execute w/ query builder', async (t) => { - const { db } = t.context; - - const inserted = await db.execute>( - db - .insert(usersTable) - .values({ name: 'John' }) - .returning({ id: usersTable.id, name: usersTable.name }), - ); - t.deepEqual(inserted.rows, [{ id: 1, name: 'John' }]); -}); - -test.serial('build query insert with onConflict do update', async (t) => { - const { db } = t.context; - - const query = db - .insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }) - .toSQL(); - - t.deepEqual(query, { - sql: - 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do update set "name" = $3', - params: ['John', '["foo","bar"]', 'John1'], - }); -}); - -test.serial('build query insert with onConflict do update / multiple columns', async (t) => { - const { db } = t.context; - - const query = db - .insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoUpdate({ target: [usersTable.id, usersTable.name], set: { name: 'John1' } }) - .toSQL(); - - t.deepEqual(query, { - sql: - 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id","name") do update set "name" = $3', - params: ['John', '["foo","bar"]', 'John1'], - }); -}); - -test.serial('build query insert with onConflict do nothing', async (t) => { - const { db } = t.context; - - const query = db - .insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoNothing() - .toSQL(); - - t.deepEqual(query, { - sql: - 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict do nothing', - params: ['John', '["foo","bar"]'], - }); -}); - -test.serial('build query insert with onConflict do nothing + target', async (t) => { - const { db } = t.context; - - const query = db - .insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoNothing({ target: usersTable.id }) - .toSQL(); - - t.deepEqual(query, { - sql: - 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do nothing', - params: ['John', '["foo","bar"]'], - }); -}); - -test.serial('insert with onConflict do update', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - - await db - .insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }); - - const res = await db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)); - - t.deepEqual(res, [{ id: 1, name: 'John1' }]); -}); - -test.serial('insert with onConflict do nothing', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - - await db.insert(usersTable).values({ id: 1, name: 'John' }).onConflictDoNothing(); - - const res = await db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert with onConflict do nothing + target', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - - await db - .insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoNothing({ target: usersTable.id }); - - const res = await db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); -}); - -test.serial('left join (flat object fields)', async (t) => { - const { db } = t.context; - - const { id: cityId } = await db - .insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]) - .returning({ id: citiesTable.id }) - .then((rows) => rows[0]!); - - await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); - - const res = await db - .select({ - userId: users2Table.id, - userName: users2Table.name, - cityId: citiesTable.id, - cityName: citiesTable.name, - }) - .from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); - - t.deepEqual(res, [ - { userId: 1, userName: 'John', cityId, cityName: 'Paris' }, - { userId: 2, userName: 'Jane', cityId: null, cityName: null }, - ]); -}); - -test.serial('left join (grouped fields)', async (t) => { - const { db } = t.context; - - const { id: cityId } = await db - .insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]) - .returning({ id: citiesTable.id }) - .then((rows) => rows[0]!); - - await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); - - const res = await db - .select({ - id: users2Table.id, - user: { - name: users2Table.name, - nameUpper: sql`upper(${users2Table.name})`, - }, - city: { - id: citiesTable.id, - name: citiesTable.name, - nameUpper: sql`upper(${citiesTable.name})`, - }, - }) - .from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); - - t.deepEqual(res, [ - { - id: 1, - user: { name: 'John', nameUpper: 'JOHN' }, - city: { id: cityId, name: 'Paris', nameUpper: 'PARIS' }, - }, - { - id: 2, - user: { name: 'Jane', nameUpper: 'JANE' }, - city: null, - }, - ]); -}); - -test.serial('left join (all fields)', async (t) => { - const { db } = t.context; - - const { id: cityId } = await db - .insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]) - .returning({ id: citiesTable.id }) - .then((rows) => rows[0]!); - - await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); - - const res = await db - .select() - .from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); - - t.deepEqual(res, [ - { - users2: { - id: 1, - name: 'John', - cityId, - }, - cities: { - id: cityId, - name: 'Paris', - state: null, - }, - }, - { - users2: { - id: 2, - name: 'Jane', - cityId: null, - }, - cities: null, - }, - ]); -}); - -test.serial('join subquery', async (t) => { - const { db } = t.context; - - await db - .insert(courseCategoriesTable) - .values([ - { name: 'Category 1' }, - { name: 'Category 2' }, - { name: 'Category 3' }, - { name: 'Category 4' }, - ]); - - await db - .insert(coursesTable) - .values([ - { name: 'Development', categoryId: 2 }, - { name: 'IT & Software', categoryId: 3 }, - { name: 'Marketing', categoryId: 4 }, - { name: 'Design', categoryId: 1 }, - ]); - - const sq2 = db - .select({ - categoryId: courseCategoriesTable.id, - category: courseCategoriesTable.name, - total: sql`count(${courseCategoriesTable.id})`, - }) - .from(courseCategoriesTable) - .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) - .as('sq2'); - - const res = await db - .select({ - courseName: coursesTable.name, - categoryId: sq2.categoryId, - }) - .from(coursesTable) - .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) - .orderBy(coursesTable.name); - - t.deepEqual(res, [ - { courseName: 'Design', categoryId: 1 }, - { courseName: 'Development', categoryId: 2 }, - { courseName: 'IT & Software', categoryId: 3 }, - { courseName: 'Marketing', categoryId: 4 }, - ]); -}); - -test.serial('with ... select', async (t) => { - const { db } = t.context; - - await db.insert(orders).values([ - { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, - { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 50, quantity: 5 }, - ]); - - const regionalSales = db - .$with('regional_sales') - .as( - db - .select({ - region: orders.region, - totalSales: sql`sum(${orders.amount})`.as('total_sales'), - }) - .from(orders) - .groupBy(orders.region), - ); - - const topRegions = db - .$with('top_regions') - .as( - db - .select({ - region: regionalSales.region, - }) - .from(regionalSales) - .where( - gt( - regionalSales.totalSales, - db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), - ), - ), - ); - - const result = await db - .with(regionalSales, topRegions) - .select({ - region: orders.region, - product: orders.product, - productUnits: sql`sum(${orders.quantity})::int`, - productSales: sql`sum(${orders.amount})::int`, - }) - .from(orders) - .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) - .groupBy(orders.region, orders.product) - .orderBy(orders.region, orders.product); - - t.deepEqual(result, [ - { - region: 'Europe', - product: 'A', - productUnits: 3, - productSales: 30, - }, - { - region: 'Europe', - product: 'B', - productUnits: 5, - productSales: 50, - }, - { - region: 'US', - product: 'A', - productUnits: 7, - productSales: 70, - }, - { - region: 'US', - product: 'B', - productUnits: 9, - productSales: 90, - }, - ]); -}); - -test.serial('select from subquery sql', async (t) => { - const { db } = t.context; - - await db.insert(users2Table).values([{ name: 'John' }, { name: 'Jane' }]); - - const sq = db - .select({ name: sql`${users2Table.name} || ' modified'`.as('name') }) - .from(users2Table) - .as('sq'); - - const res = await db.select({ name: sq.name }).from(sq); - - t.deepEqual(res, [{ name: 'John modified' }, { name: 'Jane modified' }]); -}); - -test.serial('select a field without joining its table', (t) => { - const { db } = t.context; - - t.throws(() => db.select({ name: users2Table.name }).from(usersTable).prepare('query')); -}); - -test.serial('select all fields from subquery without alias', (t) => { - const { db } = t.context; - - const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); - - t.throws(() => db.select().from(sq).prepare('query')); -}); - -test.serial('select count()', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); - - const res = await db.select({ count: sql`count(*)` }).from(usersTable); - - t.deepEqual(res, [{ count: '2' }]); -}); - -test.serial('select count w/ custom mapper', async (t) => { - const { db } = t.context; - - function count(value: PgColumn | SQLWrapper): SQL; - function count(value: PgColumn | SQLWrapper, alias: string): SQL.Aliased; - function count(value: PgColumn | SQLWrapper, alias?: string): SQL | SQL.Aliased { - const result = sql`count(${value})`.mapWith(Number); - if (!alias) { - return result; - } - return result.as(alias); - } - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); - - const res = await db.select({ count: count(sql`*`) }).from(usersTable); - - t.deepEqual(res, [{ count: 2 }]); -}); - -test.serial('network types', async (t) => { - const { db } = t.context; - - const value: typeof network.$inferSelect = { - inet: '127.0.0.1', - cidr: '192.168.100.128/25', - macaddr: '08:00:2b:01:02:03', - macaddr8: '08:00:2b:01:02:03:04:05', - }; - - await db.insert(network).values(value); - - const res = await db.select().from(network); - - t.deepEqual(res, [value]); -}); - -test.serial('array types', async (t) => { - const { db } = t.context; - - const values: typeof salEmp.$inferSelect[] = [ - { - name: 'John', - payByQuarter: [10000, 10000, 10000, 10000], - schedule: [['meeting', 'lunch'], ['training', 'presentation']], - }, - { - name: 'Carol', - payByQuarter: [20000, 25000, 25000, 25000], - schedule: [['breakfast', 'consulting'], ['meeting', 'lunch']], - }, - ]; - - await db.insert(salEmp).values(values); - - const res = await db.select().from(salEmp); - - t.deepEqual(res, values); -}); - -test.serial('select for ...', (t) => { - const { db } = t.context; - - { - const query = db - .select() - .from(users2Table) - .for('update') - .toSQL(); - - t.regex( - query.sql, - / for update$/, - ); - } - - { - const query = db - .select() - .from(users2Table) - .for('update', { of: [users2Table, coursesTable] }) - .toSQL(); - - t.regex( - query.sql, - / for update of "users2", "courses"$/, - ); - } - - { - const query = db - .select() - .from(users2Table) - .for('no key update', { of: users2Table }) - .toSQL(); - - t.regex( - query.sql, - /for no key update of "users2"$/, - ); - } - - { - const query = db - .select() - .from(users2Table) - .for('no key update', { of: users2Table, skipLocked: true }) - .toSQL(); - - t.regex( - query.sql, - / for no key update of "users2" skip locked$/, - ); - } - - { - const query = db - .select() - .from(users2Table) - .for('share', { of: users2Table, noWait: true }) - .toSQL(); - - t.regex( - query.sql, - // eslint-disable-next-line unicorn/better-regex - /for share of "users2" no wait$/, - ); - } -}); - -test.serial('having', async (t) => { - const { db } = t.context; - - await db.insert(citiesTable).values([{ name: 'London' }, { name: 'Paris' }, { name: 'New York' }]); - - await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { - name: 'Jack', - cityId: 2, - }]); - - const result = await db - .select({ - id: citiesTable.id, - name: sql`upper(${citiesTable.name})`.as('upper_name'), - usersCount: sql`count(${users2Table.id})::int`.as('users_count'), - }) - .from(citiesTable) - .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) - .where(({ name }) => sql`length(${name}) >= 3`) - .groupBy(citiesTable.id) - .having(({ usersCount }) => sql`${usersCount} > 0`) - .orderBy(({ name }) => name); - - t.deepEqual(result, [ - { - id: 1, - name: 'LONDON', - usersCount: 2, - }, - { - id: 2, - name: 'PARIS', - usersCount: 1, - }, - ]); -}); - -test.serial('view', async (t) => { - const { db } = t.context; - - const newYorkers1 = pgView('new_yorkers') - .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); - - const newYorkers2 = pgView('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); - - const newYorkers3 = pgView('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).existing(); - - await db.execute(sql`create view ${newYorkers1} as ${getViewConfig(newYorkers1).query}`); - - await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); - - await db.insert(users2Table).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 1 }, - { name: 'Jack', cityId: 2 }, - ]); - - { - const result = await db.select().from(newYorkers1); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers2); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers3); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); - t.deepEqual(result, [ - { name: 'John' }, - { name: 'Jane' }, - ]); - } - - await db.execute(sql`drop view ${newYorkers1}`); -}); - -test.serial('materialized view', async (t) => { - const { db } = t.context; - - const newYorkers1 = pgMaterializedView('new_yorkers') - .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); - - const newYorkers2 = pgMaterializedView('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); - - const newYorkers3 = pgMaterializedView('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).existing(); - - await db.execute(sql`create materialized view ${newYorkers1} as ${getMaterializedViewConfig(newYorkers1).query}`); - - await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); - - await db.insert(users2Table).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 1 }, - { name: 'Jack', cityId: 2 }, - ]); - - { - const result = await db.select().from(newYorkers1); - t.deepEqual(result, []); - } - - await db.refreshMaterializedView(newYorkers1); - - { - const result = await db.select().from(newYorkers1); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers2); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers3); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); - t.deepEqual(result, [ - { name: 'John' }, - { name: 'Jane' }, - ]); - } - - await db.execute(sql`drop materialized view ${newYorkers1}`); -}); - -// TODO: copy to SQLite and MySQL, add to docs -test.serial('select from raw sql', async (t) => { - const { db } = t.context; - - const result = await db.select({ - id: sql`id`, - name: sql`name`, - }).from(sql`(select 1 as id, 'John' as name) as users`); - - Expect>; - - t.deepEqual(result, [ - { id: 1, name: 'John' }, - ]); -}); - -test.serial('select from raw sql with joins', async (t) => { - const { db } = t.context; - - const result = await db - .select({ - id: sql`users.id`, - name: sql`users.name`, - userCity: sql`users.city`, - cityName: sql`cities.name`, - }) - .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) - .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`); - - Expect>; - - t.deepEqual(result, [ - { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, - ]); -}); - -test.serial('join on aliased sql from select', async (t) => { - const { db } = t.context; - - const result = await db - .select({ - userId: sql`users.id`.as('userId'), - name: sql`users.name`, - userCity: sql`users.city`, - cityId: sql`cities.id`.as('cityId'), - cityName: sql`cities.name`, - }) - .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) - .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)); - - Expect>; - - t.deepEqual(result, [ - { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, - ]); -}); - -test.serial('join on aliased sql from with clause', async (t) => { - const { db } = t.context; - - const users = db.$with('users').as( - db.select({ - id: sql`id`.as('userId'), - name: sql`name`.as('userName'), - city: sql`city`.as('city'), - }).from( - sql`(select 1 as id, 'John' as name, 'New York' as city) as users`, - ), - ); - - const cities = db.$with('cities').as( - db.select({ - id: sql`id`.as('cityId'), - name: sql`name`.as('cityName'), - }).from( - sql`(select 1 as id, 'Paris' as name) as cities`, - ), - ); - - const result = await db - .with(users, cities) - .select({ - userId: users.id, - name: users.name, - userCity: users.city, - cityId: cities.id, - cityName: cities.name, - }) - .from(users) - .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)); - - Expect>; - - t.deepEqual(result, [ - { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, - ]); -}); - -test.serial('prefixed table', async (t) => { - const { db } = t.context; - - const pgTable = pgTableCreator((name) => `myprefix_${name}`); - - const users = pgTable('test_prefixed_table_with_unique_name', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table myprefix_test_prefixed_table_with_unique_name (id integer not null primary key, name text not null)`, - ); - - await db.insert(users).values({ id: 1, name: 'John' }); - - const result = await db.select().from(users); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('select from enum', async (t) => { - const { db } = t.context; - - const muscleEnum = pgEnum('muscle', [ - 'abdominals', - 'hamstrings', - 'adductors', - 'quadriceps', - 'biceps', - 'shoulders', - 'chest', - 'middle_back', - 'calves', - 'glutes', - 'lower_back', - 'lats', - 'triceps', - 'traps', - 'forearms', - 'neck', - 'abductors', - ]); - - const forceEnum = pgEnum('force', ['isometric', 'isotonic', 'isokinetic']); - - const levelEnum = pgEnum('level', ['beginner', 'intermediate', 'advanced']); - - const mechanicEnum = pgEnum('mechanic', ['compound', 'isolation']); - - const equipmentEnum = pgEnum('equipment', ['barbell', 'dumbbell', 'bodyweight', 'machine', 'cable', 'kettlebell']); - - const categoryEnum = pgEnum('category', ['upper_body', 'lower_body', 'full_body']); - - const exercises = pgTable('exercises', { - id: serial('id').primaryKey(), - name: varchar('name').notNull(), - force: forceEnum('force'), - level: levelEnum('level'), - mechanic: mechanicEnum('mechanic'), - equipment: equipmentEnum('equipment'), - instructions: text('instructions'), - category: categoryEnum('category'), - primaryMuscles: muscleEnum('primary_muscles').array(), - secondaryMuscles: muscleEnum('secondary_muscles').array(), - createdAt: timestamp('created_at').notNull().default(sql`now()`), - updatedAt: timestamp('updated_at').notNull().default(sql`now()`), - }); - - await db.execute(sql`drop table if exists ${exercises}`); - await db.execute(sql`drop type if exists ${name(muscleEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(forceEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(levelEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(mechanicEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(equipmentEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(categoryEnum.enumName)}`); - - await db.execute( - sql`create type ${ - name(muscleEnum.enumName) - } as enum ('abdominals', 'hamstrings', 'adductors', 'quadriceps', 'biceps', 'shoulders', 'chest', 'middle_back', 'calves', 'glutes', 'lower_back', 'lats', 'triceps', 'traps', 'forearms', 'neck', 'abductors')`, - ); - await db.execute(sql`create type ${name(forceEnum.enumName)} as enum ('isometric', 'isotonic', 'isokinetic')`); - await db.execute(sql`create type ${name(levelEnum.enumName)} as enum ('beginner', 'intermediate', 'advanced')`); - await db.execute(sql`create type ${name(mechanicEnum.enumName)} as enum ('compound', 'isolation')`); - await db.execute( - sql`create type ${ - name(equipmentEnum.enumName) - } as enum ('barbell', 'dumbbell', 'bodyweight', 'machine', 'cable', 'kettlebell')`, - ); - await db.execute(sql`create type ${name(categoryEnum.enumName)} as enum ('upper_body', 'lower_body', 'full_body')`); - await db.execute(sql` - create table ${exercises} ( - id serial primary key, - name varchar not null, - force force, - level level, - mechanic mechanic, - equipment equipment, - instructions text, - category category, - primary_muscles muscle[], - secondary_muscles muscle[], - created_at timestamp not null default now(), - updated_at timestamp not null default now() - ) - `); - - await db.insert(exercises).values({ - name: 'Bench Press', - force: 'isotonic', - level: 'beginner', - mechanic: 'compound', - equipment: 'barbell', - instructions: - 'Lie on your back on a flat bench. Grasp the barbell with an overhand grip, slightly wider than shoulder width. Unrack the barbell and hold it over you with your arms locked. Lower the barbell to your chest. Press the barbell back to the starting position.', - category: 'upper_body', - primaryMuscles: ['chest', 'triceps'], - secondaryMuscles: ['shoulders', 'traps'], - }); - - const result = await db.select().from(exercises); - - t.deepEqual(result, [ - { - id: 1, - name: 'Bench Press', - force: 'isotonic', - level: 'beginner', - mechanic: 'compound', - equipment: 'barbell', - instructions: - 'Lie on your back on a flat bench. Grasp the barbell with an overhand grip, slightly wider than shoulder width. Unrack the barbell and hold it over you with your arms locked. Lower the barbell to your chest. Press the barbell back to the starting position.', - category: 'upper_body', - primaryMuscles: ['chest', 'triceps'], - secondaryMuscles: ['shoulders', 'traps'], - createdAt: result[0]!.createdAt, - updatedAt: result[0]!.updatedAt, - }, - ]); - - await db.execute(sql`drop table ${exercises}`); - await db.execute(sql`drop type ${name(muscleEnum.enumName)}`); - await db.execute(sql`drop type ${name(forceEnum.enumName)}`); - await db.execute(sql`drop type ${name(levelEnum.enumName)}`); - await db.execute(sql`drop type ${name(mechanicEnum.enumName)}`); - await db.execute(sql`drop type ${name(equipmentEnum.enumName)}`); - await db.execute(sql`drop type ${name(categoryEnum.enumName)}`); -}); - -test.serial('orderBy with aliased column', (t) => { - const { db } = t.context; - - const query = db.select({ - test: sql`something`.as('test'), - }).from(users2Table).orderBy((fields) => fields.test).toSQL(); - - t.deepEqual(query.sql, 'select something as "test" from "users2" order by "test"'); -}); - -test.serial('select from sql', async (t) => { - const { db } = t.context; - - const metricEntry = pgTable('metric_entry', { - id: pgUuid('id').notNull(), - createdAt: timestamp('created_at').notNull(), - }); - - await db.execute(sql`drop table if exists ${metricEntry}`); - await db.execute(sql`create table ${metricEntry} (id uuid not null, created_at timestamp not null)`); - - const metricId = uuid(); - - const intervals = db.$with('intervals').as( - db - .select({ - startTime: sql`(date'2023-03-01'+ x * '1 day'::interval)`.as('start_time'), - endTime: sql`(date'2023-03-01'+ (x+1) *'1 day'::interval)`.as('end_time'), - }) - .from(sql`generate_series(0, 29, 1) as t(x)`), - ); - - await t.notThrowsAsync(() => - db - .with(intervals) - .select({ - startTime: intervals.startTime, - endTime: intervals.endTime, - count: sql`count(${metricEntry})`, - }) - .from(metricEntry) - .rightJoin( - intervals, - and( - eq(metricEntry.id, metricId), - gte(metricEntry.createdAt, intervals.startTime), - lt(metricEntry.createdAt, intervals.endTime), - ), - ) - .groupBy(intervals.startTime, intervals.endTime) - .orderBy(asc(intervals.startTime)) - ); -}); - -test.serial('timestamp timezone', async (t) => { - const { db } = t.context; - - const usersTableWithAndWithoutTimezone = pgTable('users_test_with_and_without_timezone', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), - updatedAt: timestamp('updated_at', { withTimezone: false }).notNull().defaultNow(), - }); - - await db.execute(sql`drop table if exists ${usersTableWithAndWithoutTimezone}`); - - await db.execute( - sql` - create table users_test_with_and_without_timezone ( - id serial not null primary key, - name text not null, - created_at timestamptz not null default now(), - updated_at timestamp not null default now() - ) - `, - ); - - const date = new Date(Date.parse('2020-01-01T00:00:00+04:00')); - - await db.insert(usersTableWithAndWithoutTimezone).values({ name: 'With default times' }); - await db.insert(usersTableWithAndWithoutTimezone).values({ - name: 'Without default times', - createdAt: date, - updatedAt: date, - }); - const users = await db.select().from(usersTableWithAndWithoutTimezone); - - // check that the timestamps are set correctly for default times - t.assert(Math.abs(users[0]!.updatedAt.getTime() - Date.now()) < 2000); - t.assert(Math.abs(users[0]!.createdAt.getTime() - Date.now()) < 2000); - - // check that the timestamps are set correctly for non default times - t.assert(Math.abs(users[1]!.updatedAt.getTime() - date.getTime()) < 2000); - t.assert(Math.abs(users[1]!.createdAt.getTime() - date.getTime()) < 2000); -}); - -test.serial('all date and time columns', async (t) => { - const { db } = t.context; - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - dateString: date('date_string', { mode: 'string' }).notNull(), - time: time('time', { precision: 3 }).notNull(), - datetime: timestamp('datetime').notNull(), - datetimeWTZ: timestamp('datetime_wtz', { withTimezone: true }).notNull(), - datetimeString: timestamp('datetime_string', { mode: 'string' }).notNull(), - datetimeFullPrecision: timestamp('datetime_full_precision', { precision: 6, mode: 'string' }).notNull(), - datetimeWTZString: timestamp('datetime_wtz_string', { withTimezone: true, mode: 'string' }).notNull(), - interval: interval('interval').notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - date_string date not null, - time time(3) not null, - datetime timestamp not null, - datetime_wtz timestamp with time zone not null, - datetime_string timestamp not null, - datetime_full_precision timestamp(6) not null, - datetime_wtz_string timestamp with time zone not null, - interval interval not null - ) - `); - - const someDatetime = new Date('2022-01-01T00:00:00.123Z'); - const fullPrecision = '2022-01-01T00:00:00.123456'; - const someTime = '23:23:12.432'; - - await db.insert(table).values({ - dateString: '2022-01-01', - time: someTime, - datetime: someDatetime, - datetimeWTZ: someDatetime, - datetimeString: '2022-01-01T00:00:00.123Z', - datetimeFullPrecision: fullPrecision.replace('T', ' ').replace('Z', ''), - datetimeWTZString: '2022-01-01T00:00:00.123Z', - interval: '1 day', - }); - - const result = await db.select().from(table); - - Expect< - Equal<{ - id: number; - dateString: string; - time: string; - datetime: Date; - datetimeWTZ: Date; - datetimeString: string; - datetimeFullPrecision: string; - datetimeWTZString: string; - interval: string; - }[], typeof result> - >; - - Expect< - Equal<{ - dateString: string; - time: string; - datetime: Date; - datetimeWTZ: Date; - datetimeString: string; - datetimeFullPrecision: string; - datetimeWTZString: string; - interval: string; - id?: number | undefined; - }, typeof table.$inferInsert> - >; - - t.deepEqual(result.length, 1); - - t.like(result[0], { - id: 1, - dateString: '2022-01-01', - time: someTime, - datetime: someDatetime, - datetimeWTZ: someDatetime, - datetimeString: '2022-01-01 00:00:00.123', - datetimeFullPrecision: fullPrecision.replace('T', ' '), - datetimeWTZString: '2022-01-01 00:00:00.123+00', - interval: { - days: 1, - }, - }); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('all date and time columns with timezone', async (t) => { - const { db } = t.context; - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), - timestampAsDate: timestamp('timestamp_date', { withTimezone: true, precision: 3 }).notNull(), - timestampTimeZones: timestamp('timestamp_date_2', { withTimezone: true, precision: 3 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(6) with time zone not null, - timestamp_date timestamp(3) with time zone not null, - timestamp_date_2 timestamp(3) with time zone not null - ) - `); - - const timestampString = '2022-01-01 00:00:00.123456-0200'; - const timestampDate = new Date(); - const timestampDateWTZ = new Date('2022-01-01 00:00:00.123 +0500'); - - const timestampString2 = '2022-01-01 00:00:00.123456-0400'; - const timestampDate2 = new Date(); - const timestampDateWTZ2 = new Date('2022-01-01 00:00:00.123 +0200'); - - await db.insert(table).values([ - { timestamp: timestampString, timestampAsDate: timestampDate, timestampTimeZones: timestampDateWTZ }, - { timestamp: timestampString2, timestampAsDate: timestampDate2, timestampTimeZones: timestampDateWTZ2 }, - ]); - - const result = await db.select().from(table); - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - timestamp_date: string; - timestamp_date_2: string; - }>(sql`select * from ${table}`); - - // Whatever you put in, you get back when you're using the date mode - // But when using the string mode, postgres returns a string transformed into UTC - t.deepEqual(result, [ - { - id: 1, - timestamp: '2022-01-01 02:00:00.123456+00', - timestampAsDate: timestampDate, - timestampTimeZones: timestampDateWTZ, - }, - { - id: 2, - timestamp: '2022-01-01 04:00:00.123456+00', - timestampAsDate: timestampDate2, - timestampTimeZones: timestampDateWTZ2, - }, - ]); - - t.deepEqual(result2.rows, [ - { - id: 1, - timestamp_string: '2022-01-01 02:00:00.123456+00', - timestamp_date: timestampDate.toISOString().replace('T', ' ').replace('Z', '') + '+00', - timestamp_date_2: timestampDateWTZ.toISOString().replace('T', ' ').replace('Z', '') + '+00', - }, - { - id: 2, - timestamp_string: '2022-01-01 04:00:00.123456+00', - timestamp_date: timestampDate2.toISOString().replace('T', ' ').replace('Z', '') + '+00', - timestamp_date_2: timestampDateWTZ2.toISOString().replace('T', ' ').replace('Z', '') + '+00', - }, - ]); - - t.deepEqual( - result[0]?.timestampTimeZones.getTime(), - new Date((result2.rows[0] as any).timestamp_date_2 as any).getTime(), - ); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('all date and time columns without timezone', async (t) => { - const { db } = t.context; - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestampString: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), - timestampString2: timestamp('timestamp_string2', { precision: 3, mode: 'string' }).notNull(), - timestampDate: timestamp('timestamp_date', { precision: 3 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(6) not null, - timestamp_string2 timestamp(3) not null, - timestamp_date timestamp(3) not null - ) - `); - - const timestampString = '2022-01-01 00:00:00.123456'; - const timestampString2 = '2022-01-02 00:00:00.123 -0300'; - const timestampDate = new Date('2022-01-01 00:00:00.123Z'); - - const timestampString_2 = '2022-01-01 00:00:00.123456'; - const timestampString2_2 = '2022-01-01 00:00:00.123 -0300'; - const timestampDate2 = new Date('2022-01-01 00:00:00.123 +0200'); - - await db.insert(table).values([ - { timestampString, timestampString2, timestampDate }, - { timestampString: timestampString_2, timestampString2: timestampString2_2, timestampDate: timestampDate2 }, - ]); - - const result = await db.select().from(table); - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - timestamp_string2: string; - timestamp_date: string; - }>(sql`select * from ${table}`); - - // Whatever you put in, you get back when you're using the date mode - // But when using the string mode, postgres returns a string transformed into UTC - t.deepEqual(result, [ - { - id: 1, - timestampString: timestampString, - timestampString2: '2022-01-02 00:00:00.123', - timestampDate: timestampDate, - }, - { - id: 2, - timestampString: timestampString_2, - timestampString2: '2022-01-01 00:00:00.123', - timestampDate: timestampDate2, - }, - ]); - - t.deepEqual(result2.rows, [ - { - id: 1, - timestamp_string: timestampString, - timestamp_string2: '2022-01-02 00:00:00.123', - timestamp_date: timestampDate.toISOString().replace('T', ' ').replace('Z', ''), - }, - { - id: 2, - timestamp_string: timestampString_2, - timestamp_string2: '2022-01-01 00:00:00.123', - timestamp_date: timestampDate2.toISOString().replace('T', ' ').replace('Z', ''), - }, - ]); - - t.deepEqual((result2.rows[0] as any).timestamp_string, '2022-01-01 00:00:00.123456'); - // need to add the 'Z', otherwise javascript assumes it's in local time - t.deepEqual(new Date((result2.rows[0] as any).timestamp_date + 'Z' as any).getTime(), timestampDate.getTime()); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('transaction', async (t) => { - const { db } = t.context; - - const users = pgTable('users_transactions', { - id: serial('id').primaryKey(), - balance: integer('balance').notNull(), - }); - const products = pgTable('products_transactions', { - id: serial('id').primaryKey(), - price: integer('price').notNull(), - stock: integer('stock').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`drop table if exists ${products}`); - - await db.execute(sql`create table users_transactions (id serial not null primary key, balance integer not null)`); - await db.execute( - sql`create table products_transactions (id serial not null primary key, price integer not null, stock integer not null)`, - ); - - const user = await db.insert(users).values({ balance: 100 }).returning().then((rows) => rows[0]!); - const product = await db.insert(products).values({ price: 10, stock: 10 }).returning().then((rows) => rows[0]!); - - const error = await t.throwsAsync(() => - db.transaction(async (tx) => { - await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)); - await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)); - }) - ); - - t.is(error!.message, 'No transactions support in neon-http driver'); - - const result = await db.select().from(users); - - t.deepEqual(result, [{ id: 1, balance: 100 }]); - - await db.execute(sql`drop table ${users}`); - await db.execute(sql`drop table ${products}`); -}); - -test.serial.skip('transaction rollback', async (t) => { - const { db } = t.context; - - const users = pgTable('users_transactions_rollback', { - id: serial('id').primaryKey(), - balance: integer('balance').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table users_transactions_rollback (id serial not null primary key, balance integer not null)`, - ); - - await t.throwsAsync(async () => - await db.transaction(async (tx) => { - await tx.insert(users).values({ balance: 100 }); - tx.rollback(); - }), { instanceOf: TransactionRollbackError }); - - const result = await db.select().from(users); - - t.deepEqual(result, []); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('nested transaction', async (t) => { - const { db } = t.context; - - const users = pgTable('users_nested_transactions', { - id: serial('id').primaryKey(), - balance: integer('balance').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table users_nested_transactions (id serial not null primary key, balance integer not null)`, - ); - - const error = await t.throwsAsync(() => - db.transaction(async (tx) => { - await tx.insert(users).values({ balance: 100 }); - - await tx.transaction(async (tx) => { - await tx.update(users).set({ balance: 200 }); - }); - }) - ); - - t.is(error!.message, 'No transactions support in neon-http driver'); - - // const result = await db.select().from(users); - - // t.deepEqual(result, [{ id: 1, balance: 200 }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial.skip('nested transaction rollback', async (t) => { - const { db } = t.context; - - const users = pgTable('users_nested_transactions_rollback', { - id: serial('id').primaryKey(), - balance: integer('balance').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table users_nested_transactions_rollback (id serial not null primary key, balance integer not null)`, - ); - - await db.transaction(async (tx) => { - await tx.insert(users).values({ balance: 100 }); - - await t.throwsAsync(async () => - await tx.transaction(async (tx) => { - await tx.update(users).set({ balance: 200 }); - tx.rollback(); - }), { instanceOf: TransactionRollbackError }); - }); - - const result = await db.select().from(users); - - t.deepEqual(result, [{ id: 1, balance: 100 }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('join subquery with join', async (t) => { - const { db } = t.context; - - const internalStaff = pgTable('internal_staff', { - userId: integer('user_id').notNull(), - }); - - const customUser = pgTable('custom_user', { - id: integer('id').notNull(), - }); - - const ticket = pgTable('ticket', { - staffId: integer('staff_id').notNull(), - }); - - await db.execute(sql`drop table if exists ${internalStaff}`); - await db.execute(sql`drop table if exists ${customUser}`); - await db.execute(sql`drop table if exists ${ticket}`); - - await db.execute(sql`create table internal_staff (user_id integer not null)`); - await db.execute(sql`create table custom_user (id integer not null)`); - await db.execute(sql`create table ticket (staff_id integer not null)`); - - await db.insert(internalStaff).values({ userId: 1 }); - await db.insert(customUser).values({ id: 1 }); - await db.insert(ticket).values({ staffId: 1 }); - - const subq = db - .select() - .from(internalStaff) - .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) - .as('internal_staff'); - - const mainQuery = await db - .select() - .from(ticket) - .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); - - t.deepEqual(mainQuery, [{ - ticket: { staffId: 1 }, - internal_staff: { - internal_staff: { userId: 1 }, - custom_user: { id: 1 }, - }, - }]); - - await db.execute(sql`drop table ${internalStaff}`); - await db.execute(sql`drop table ${customUser}`); - await db.execute(sql`drop table ${ticket}`); -}); - -test.serial('subquery with view', async (t) => { - const { db } = t.context; - - const users = pgTable('users_subquery_view', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }); - - const newYorkers = pgView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`drop view if exists ${newYorkers}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, - ); - await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); - - await db.insert(users).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 2 }, - { name: 'Jack', cityId: 1 }, - { name: 'Jill', cityId: 2 }, - ]); - - const sq = db.$with('sq').as(db.select().from(newYorkers)); - const result = await db.with(sq).select().from(sq); - - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 3, name: 'Jack', cityId: 1 }, - ]); - - await db.execute(sql`drop view ${newYorkers}`); - await db.execute(sql`drop table ${users}`); -}); - -test.serial('join view as subquery', async (t) => { - const { db } = t.context; - - const users = pgTable('users_join_view', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }); - - const newYorkers = pgView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`drop view if exists ${newYorkers}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, - ); - await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); - - await db.insert(users).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 2 }, - { name: 'Jack', cityId: 1 }, - { name: 'Jill', cityId: 2 }, - ]); - - const sq = db.select().from(newYorkers).as('new_yorkers_sq'); - - const result = await db.select().from(users).leftJoin(sq, eq(users.id, sq.id)); - - t.deepEqual(result, [ - { - users_join_view: { id: 1, name: 'John', cityId: 1 }, - new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, - }, - { - users_join_view: { id: 2, name: 'Jane', cityId: 2 }, - new_yorkers_sq: null, - }, - { - users_join_view: { id: 3, name: 'Jack', cityId: 1 }, - new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, - }, - { - users_join_view: { id: 4, name: 'Jill', cityId: 2 }, - new_yorkers_sq: null, - }, - ]); - - await db.execute(sql`drop view ${newYorkers}`); - await db.execute(sql`drop table ${users}`); -}); - -test.serial('table selection with single table', async (t) => { - const { db } = t.context; - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, - ); - - await db.insert(users).values({ name: 'John', cityId: 1 }); - - const result = await db.select({ users }).from(users); - - t.deepEqual(result, [{ users: { id: 1, name: 'John', cityId: 1 } }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('set null to jsonb field', async (t) => { - const { db } = t.context; - - const users = pgTable('users', { - id: serial('id').primaryKey(), - jsonb: jsonb('jsonb'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, jsonb jsonb)`, - ); - - const result = await db.insert(users).values({ jsonb: null }).returning(); - - t.deepEqual(result, [{ id: 1, jsonb: null }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('insert undefined', async (t) => { - const { db } = t.context; - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text)`, - ); - - await t.notThrowsAsync(async () => await db.insert(users).values({ name: undefined })); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('update undefined', async (t) => { - const { db } = t.context; - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text)`, - ); - - await t.throwsAsync(async () => await db.update(users).set({ name: undefined })); - await t.notThrowsAsync(async () => await db.update(users).set({ id: 1, name: undefined })); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('array operators', async (t) => { - const { db } = t.context; - - const posts = pgTable('posts', { - id: serial('id').primaryKey(), - tags: text('tags').array(), - }); - - await db.execute(sql`drop table if exists ${posts}`); - - await db.execute( - sql`create table ${posts} (id serial primary key, tags text[])`, - ); - - await db.insert(posts).values([{ - tags: ['ORM'], - }, { - tags: ['Typescript'], - }, { - tags: ['Typescript', 'ORM'], - }, { - tags: ['Typescript', 'Frontend', 'React'], - }, { - tags: ['Typescript', 'ORM', 'Database', 'Postgres'], - }, { - tags: ['Java', 'Spring', 'OOP'], - }]); - - const contains = await db.select({ id: posts.id }).from(posts) - .where(arrayContains(posts.tags, ['Typescript', 'ORM'])); - const contained = await db.select({ id: posts.id }).from(posts) - .where(arrayContained(posts.tags, ['Typescript', 'ORM'])); - const overlaps = await db.select({ id: posts.id }).from(posts) - .where(arrayOverlaps(posts.tags, ['Typescript', 'ORM'])); - const withSubQuery = await db.select({ id: posts.id }).from(posts) - .where(arrayContains( - posts.tags, - db.select({ tags: posts.tags }).from(posts).where(eq(posts.id, 1)), - )); - - t.deepEqual(contains, [{ id: 3 }, { id: 5 }]); - t.deepEqual(contained, [{ id: 1 }, { id: 2 }, { id: 3 }]); - t.deepEqual(overlaps, [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }, { id: 5 }]); - t.deepEqual(withSubQuery, [{ id: 1 }, { id: 3 }, { id: 5 }]); -}); diff --git a/integration-tests/tests/pg-proxy.test.ts b/integration-tests/tests/pg-proxy.test.ts deleted file mode 100644 index c7e87bed7..000000000 --- a/integration-tests/tests/pg-proxy.test.ts +++ /dev/null @@ -1,2937 +0,0 @@ -import 'dotenv/config'; - -import type { TestFn } from 'ava'; -import anyTest from 'ava'; -import Docker from 'dockerode'; -import { - and, - arrayContained, - arrayContains, - arrayOverlaps, - asc, - eq, - gt, - gte, - inArray, - lt, - name, - placeholder, - type SQL, - sql, - type SQLWrapper, -} from 'drizzle-orm'; -import { - alias, - boolean, - char, - cidr, - date, - getMaterializedViewConfig, - getTableConfig, - getViewConfig, - inet, - integer, - interval, - jsonb, - macaddr, - macaddr8, - type PgColumn, - pgEnum, - pgMaterializedView, - pgTable, - pgTableCreator, - pgView, - serial, - text, - time, - timestamp, - unique, - uniqueKeyName, - uuid as pgUuid, - varchar, -} from 'drizzle-orm/pg-core'; -import { drizzle as proxyDrizzle } from 'drizzle-orm/pg-proxy'; -import type { PgRemoteDatabase } from 'drizzle-orm/pg-proxy'; -import { migrate } from 'drizzle-orm/pg-proxy/migrator'; -import getPort from 'get-port'; -import pg from 'pg'; -import { v4 as uuid } from 'uuid'; -import type { Equal } from './utils.ts'; -import { Expect } from './utils.ts'; - -// eslint-disable-next-line drizzle/require-entity-kind -class ServerSimulator { - constructor(private db: pg.Client) { - const { types } = pg; - - types.setTypeParser(types.builtins.TIMESTAMPTZ, (val) => val); - types.setTypeParser(types.builtins.TIMESTAMP, (val) => val); - types.setTypeParser(types.builtins.DATE, (val) => val); - types.setTypeParser(types.builtins.INTERVAL, (val) => val); - } - - async query(sql: string, params: any[], method: 'all' | 'execute') { - if (method === 'all') { - try { - const result = await this.db.query({ - text: sql, - values: params, - rowMode: 'array', - }); - - return { data: result.rows as any }; - } catch (e: any) { - return { error: e }; - } - } else if (method === 'execute') { - try { - const result = await this.db.query({ - text: sql, - values: params, - }); - - return { data: result.rows as any }; - } catch (e: any) { - return { error: e }; - } - } else { - return { error: 'Unknown method value' }; - } - } - - async migrations(queries: string[]) { - await this.db.query('BEGIN'); - try { - for (const query of queries) { - await this.db.query(query); - } - await this.db.query('COMMIT'); - } catch (e) { - await this.db.query('ROLLBACK'); - throw e; - } - - return {}; - } -} - -const { Client } = pg; - -const ENABLE_LOGGING = false; - -const usersTable = pgTable('users', { - id: serial('id' as string).primaryKey(), - name: text('name').notNull(), - verified: boolean('verified').notNull().default(false), - jsonb: jsonb('jsonb').$type(), - createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), -}); - -const citiesTable = pgTable('cities', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - state: char('state', { length: 2 }), -}); - -const users2Table = pgTable('users2', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').references(() => citiesTable.id), -}); - -const coursesTable = pgTable('courses', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - categoryId: integer('category_id').references(() => courseCategoriesTable.id), -}); - -const courseCategoriesTable = pgTable('course_categories', { - id: serial('id').primaryKey(), - name: text('name').notNull(), -}); - -const orders = pgTable('orders', { - id: serial('id').primaryKey(), - region: text('region').notNull(), - product: text('product').notNull().$default(() => 'random_string'), - amount: integer('amount').notNull(), - quantity: integer('quantity').notNull(), -}); - -const network = pgTable('network_table', { - inet: inet('inet').notNull(), - cidr: cidr('cidr').notNull(), - macaddr: macaddr('macaddr').notNull(), - macaddr8: macaddr8('macaddr8').notNull(), -}); - -const salEmp = pgTable('sal_emp', { - name: text('name'), - payByQuarter: integer('pay_by_quarter').array(), - schedule: text('schedule').array().array(), -}); - -const usersMigratorTable = pgTable('users12', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - email: text('email').notNull(), -}); - -interface Context { - docker: Docker; - pgContainer: Docker.Container; - db: PgRemoteDatabase; - client: pg.Client; - serverSimulator: ServerSimulator; -} - -const test = anyTest as TestFn; - -async function createDockerDB(ctx: Context): Promise { - const docker = (ctx.docker = new Docker()); - const port = await getPort({ port: 5432 }); - const image = 'postgres:14'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) - ); - - ctx.pgContainer = await docker.createContainer({ - Image: image, - Env: ['POSTGRES_PASSWORD=postgres', 'POSTGRES_USER=postgres', 'POSTGRES_DB=postgres'], - name: `drizzle-integration-tests-${uuid()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '5432/tcp': [{ HostPort: `${port}` }], - }, - }, - }); - - await ctx.pgContainer.start(); - - return `postgres://postgres:postgres@localhost:${port}/postgres`; -} - -test.before(async (t) => { - const ctx = t.context; - const connectionString = process.env['PG_CONNECTION_STRING'] ?? (await createDockerDB(ctx)); - - const sleep = 250; - let timeLeft = 5000; - let connected = false; - let lastError: unknown | undefined; - do { - try { - ctx.client = new Client(connectionString); - await ctx.client.connect(); - connected = true; - break; - } catch (e) { - lastError = e; - await new Promise((resolve) => setTimeout(resolve, sleep)); - timeLeft -= sleep; - } - } while (timeLeft > 0); - if (!connected) { - console.error('Cannot connect to Postgres'); - await ctx.client?.end().catch(console.error); - await ctx.pgContainer?.stop().catch(console.error); - throw lastError; - } - - ctx.serverSimulator = new ServerSimulator(ctx.client); - - ctx.db = proxyDrizzle(async (sql, params, method) => { - try { - const response = await ctx.serverSimulator.query(sql, params, method); - - if (response.error !== undefined) { - throw response.error; - } - - return { rows: response.data }; - } catch (e: any) { - console.error('Error from pg proxy server:', e.message); - throw e; - } - }, { - logger: ENABLE_LOGGING, - }); -}); - -test.after.always(async (t) => { - const ctx = t.context; - await ctx.client?.end().catch(console.error); - await ctx.pgContainer?.stop().catch(console.error); -}); - -test.beforeEach(async (t) => { - const ctx = t.context; - await ctx.db.execute(sql`drop schema public cascade`); - await ctx.db.execute(sql`create schema public`); - await ctx.db.execute( - sql` - create table users ( - id serial primary key, - name text not null, - verified boolean not null default false, - jsonb jsonb, - created_at timestamptz not null default now() - ) - `, - ); - await ctx.db.execute( - sql` - create table cities ( - id serial primary key, - name text not null, - state char(2) - ) - `, - ); - await ctx.db.execute( - sql` - create table users2 ( - id serial primary key, - name text not null, - city_id integer references cities(id) - ) - `, - ); - await ctx.db.execute( - sql` - create table course_categories ( - id serial primary key, - name text not null - ) - `, - ); - await ctx.db.execute( - sql` - create table courses ( - id serial primary key, - name text not null, - category_id integer references course_categories(id) - ) - `, - ); - await ctx.db.execute( - sql` - create table orders ( - id serial primary key, - region text not null, - product text not null, - amount integer not null, - quantity integer not null - ) - `, - ); - await ctx.db.execute( - sql` - create table network_table ( - inet inet not null, - cidr cidr not null, - macaddr macaddr not null, - macaddr8 macaddr8 not null - ) - `, - ); - await ctx.db.execute( - sql` - create table sal_emp ( - name text not null, - pay_by_quarter integer[] not null, - schedule text[][] not null - ) - `, - ); - await ctx.db.execute( - sql` - create table tictactoe ( - squares integer[3][3] not null - ) - `, - ); -}); - -test.serial('table configs: unique third param', async (t) => { - const cities1Table = pgTable('cities1', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - state: char('state', { length: 2 }), - }, (t) => ({ - f: unique('custom_name').on(t.name, t.state).nullsNotDistinct(), - f1: unique('custom_name1').on(t.name, t.state), - })); - - const tableConfig = getTableConfig(cities1Table); - - t.assert(tableConfig.uniqueConstraints.length === 2); - - t.assert(tableConfig.uniqueConstraints[0]?.name === 'custom_name'); - t.assert(tableConfig.uniqueConstraints[0]?.nullsNotDistinct); - t.deepEqual(tableConfig.uniqueConstraints[0]?.columns.map((t) => t.name), ['name', 'state']); - - t.assert(tableConfig.uniqueConstraints[1]?.name, 'custom_name1'); - t.assert(!tableConfig.uniqueConstraints[1]?.nullsNotDistinct); - t.deepEqual(tableConfig.uniqueConstraints[0]?.columns.map((t) => t.name), ['name', 'state']); -}); - -test.serial('table configs: unique in column', async (t) => { - const cities1Table = pgTable('cities1', { - id: serial('id').primaryKey(), - name: text('name').notNull().unique(), - state: char('state', { length: 2 }).unique('custom'), - field: char('field', { length: 2 }).unique('custom_field', { nulls: 'not distinct' }), - }); - - const tableConfig = getTableConfig(cities1Table); - - const columnName = tableConfig.columns.find((it) => it.name === 'name'); - t.assert(columnName?.uniqueName === uniqueKeyName(cities1Table, [columnName!.name])); - t.assert(columnName?.isUnique); - - const columnState = tableConfig.columns.find((it) => it.name === 'state'); - t.assert(columnState?.uniqueName === 'custom'); - t.assert(columnState?.isUnique); - - const columnField = tableConfig.columns.find((it) => it.name === 'field'); - t.assert(columnField?.uniqueName === 'custom_field'); - t.assert(columnField?.isUnique); - t.assert(columnField?.uniqueType === 'not distinct'); -}); - -test.serial('select all fields', async (t) => { - const { db } = t.context; - - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }); - const result = await db.select().from(usersTable); - - t.assert(result[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 100); - t.deepEqual(result, [ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }, - ]); -}); - -test.serial('select sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .select({ - name: sql`upper(${usersTable.name})`, - }) - .from(usersTable); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('select typed sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - - const users = await db.select({ - name: sql`upper(${usersTable.name})`, - }).from(usersTable); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('$default function', async (t) => { - const { db } = t.context; - - const insertedOrder = await db.insert(orders).values({ id: 1, region: 'Ukraine', amount: 1, quantity: 1 }) - .returning(); - const selectedOrder = await db.select().from(orders); - - t.deepEqual(insertedOrder, [{ - id: 1, - amount: 1, - quantity: 1, - region: 'Ukraine', - product: 'random_string', - }]); - - t.deepEqual(selectedOrder, [{ - id: 1, - amount: 1, - quantity: 1, - region: 'Ukraine', - product: 'random_string', - }]); -}); - -test.serial('select distinct', async (t) => { - const { db } = t.context; - - const usersDistinctTable = pgTable('users_distinct', { - id: integer('id').notNull(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${usersDistinctTable}`); - await db.execute(sql`create table ${usersDistinctTable} (id integer, name text)`); - - await db.insert(usersDistinctTable).values([ - { id: 1, name: 'John' }, - { id: 1, name: 'John' }, - { id: 2, name: 'John' }, - { id: 1, name: 'Jane' }, - ]); - const users1 = await db.selectDistinct().from(usersDistinctTable).orderBy( - usersDistinctTable.id, - usersDistinctTable.name, - ); - const users2 = await db.selectDistinctOn([usersDistinctTable.id]).from(usersDistinctTable).orderBy( - usersDistinctTable.id, - ); - const users3 = await db.selectDistinctOn([usersDistinctTable.name], { name: usersDistinctTable.name }).from( - usersDistinctTable, - ).orderBy(usersDistinctTable.name); - - await db.execute(sql`drop table ${usersDistinctTable}`); - - t.deepEqual(users1, [{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); - - t.deepEqual(users2.length, 2); - t.deepEqual(users2[0]?.id, 1); - t.deepEqual(users2[1]?.id, 2); - - t.deepEqual(users3.length, 2); - t.deepEqual(users3[0]?.name, 'Jane'); - t.deepEqual(users3[1]?.name, 'John'); -}); - -test.serial('insert returning sql', async (t) => { - const { db } = t.context; - - const users = await db - .insert(usersTable) - .values({ name: 'John' }) - .returning({ - name: sql`upper(${usersTable.name})`, - }); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('delete returning sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .delete(usersTable) - .where(eq(usersTable.name, 'John')) - .returning({ - name: sql`upper(${usersTable.name})`, - }); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('update returning sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .update(usersTable) - .set({ name: 'Jane' }) - .where(eq(usersTable.name, 'John')) - .returning({ - name: sql`upper(${usersTable.name})`, - }); - - t.deepEqual(users, [{ name: 'JANE' }]); -}); - -test.serial('update with returning all fields', async (t) => { - const { db } = t.context; - - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .update(usersTable) - .set({ name: 'Jane' }) - .where(eq(usersTable.name, 'John')) - .returning(); - - t.assert(users[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 100); - t.deepEqual(users, [ - { id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }, - ]); -}); - -test.serial('update with returning partial', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .update(usersTable) - .set({ name: 'Jane' }) - .where(eq(usersTable.name, 'John')) - .returning({ - id: usersTable.id, - name: usersTable.name, - }); - - t.deepEqual(users, [{ id: 1, name: 'Jane' }]); -}); - -test.serial('delete with returning all fields', async (t) => { - const { db } = t.context; - - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning(); - - t.assert(users[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 100); - t.deepEqual(users, [ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: users[0]!.createdAt }, - ]); -}); - -test.serial('delete with returning partial', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ - id: usersTable.id, - name: usersTable.name, - }); - - t.deepEqual(users, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert + select', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const result = await db.select().from(usersTable); - t.deepEqual(result, [ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }, - ]); - - await db.insert(usersTable).values({ name: 'Jane' }); - const result2 = await db.select().from(usersTable); - t.deepEqual(result2, [ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, - { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, - ]); -}); - -test.serial('json insert', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); - const result = await db - .select({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - }) - .from(usersTable); - - t.deepEqual(result, [{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); -}); - -test.serial('char insert', async (t) => { - const { db } = t.context; - - await db.insert(citiesTable).values({ name: 'Austin', state: 'TX' }); - const result = await db - .select({ id: citiesTable.id, name: citiesTable.name, state: citiesTable.state }) - .from(citiesTable); - - t.deepEqual(result, [{ id: 1, name: 'Austin', state: 'TX' }]); -}); - -test.serial('char update', async (t) => { - const { db } = t.context; - - await db.insert(citiesTable).values({ name: 'Austin', state: 'TX' }); - await db.update(citiesTable).set({ name: 'Atlanta', state: 'GA' }).where(eq(citiesTable.id, 1)); - const result = await db - .select({ id: citiesTable.id, name: citiesTable.name, state: citiesTable.state }) - .from(citiesTable); - - t.deepEqual(result, [{ id: 1, name: 'Atlanta', state: 'GA' }]); -}); - -test.serial('char delete', async (t) => { - const { db } = t.context; - - await db.insert(citiesTable).values({ name: 'Austin', state: 'TX' }); - await db.delete(citiesTable).where(eq(citiesTable.state, 'TX')); - const result = await db - .select({ id: citiesTable.id, name: citiesTable.name, state: citiesTable.state }) - .from(citiesTable); - - t.deepEqual(result, []); -}); - -test.serial('insert with overridden default values', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John', verified: true }); - const result = await db.select().from(usersTable); - - t.deepEqual(result, [ - { id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }, - ]); -}); - -test.serial('insert many', async (t) => { - const { db } = t.context; - - await db - .insert(usersTable) - .values([ - { name: 'John' }, - { name: 'Bruce', jsonb: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]); - const result = await db - .select({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - verified: usersTable.verified, - }) - .from(usersTable); - - t.deepEqual(result, [ - { id: 1, name: 'John', jsonb: null, verified: false }, - { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, - { id: 3, name: 'Jane', jsonb: null, verified: false }, - { id: 4, name: 'Austin', jsonb: null, verified: true }, - ]); -}); - -test.serial('insert many with returning', async (t) => { - const { db } = t.context; - - const result = await db - .insert(usersTable) - .values([ - { name: 'John' }, - { name: 'Bruce', jsonb: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]) - .returning({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - verified: usersTable.verified, - }); - - t.deepEqual(result, [ - { id: 1, name: 'John', jsonb: null, verified: false }, - { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, - { id: 3, name: 'Jane', jsonb: null, verified: false }, - { id: 4, name: 'Austin', jsonb: null, verified: true }, - ]); -}); - -test.serial('select with group by as field', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(usersTable.name); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with group by as sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(sql`${usersTable.name}`); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with group by as sql + column', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(sql`${usersTable.name}`, usersTable.id); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with group by as column + sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with group by complex query', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`) - .orderBy(asc(usersTable.name)) - .limit(1); - - t.deepEqual(result, [{ name: 'Jane' }]); -}); - -test.serial('build query', async (t) => { - const { db } = t.context; - - const query = db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .groupBy(usersTable.id, usersTable.name) - .toSQL(); - - t.deepEqual(query, { - sql: 'select "id", "name" from "users" group by "users"."id", "users"."name"', - params: [], - }); -}); - -test.serial('insert sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: sql`${'John'}` }); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('partial join with alias', async (t) => { - const { db } = t.context; - const customerAlias = alias(usersTable, 'customer'); - - await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select({ - user: { - id: usersTable.id, - name: usersTable.name, - }, - customer: { - id: customerAlias.id, - name: customerAlias.name, - }, - }) - .from(usersTable) - .leftJoin(customerAlias, eq(customerAlias.id, 11)) - .where(eq(usersTable.id, 10)); - - t.deepEqual(result, [ - { - user: { id: 10, name: 'Ivan' }, - customer: { id: 11, name: 'Hans' }, - }, - ]); -}); - -test.serial('full join with alias', async (t) => { - const { db } = t.context; - - const pgTable = pgTableCreator((name) => `prefixed_${name}`); - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); - - const customers = alias(users, 'customer'); - - await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select() - .from(users) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(users.id, 10)); - - t.deepEqual(result, [{ - users: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('select from alias', async (t) => { - const { db } = t.context; - - const pgTable = pgTableCreator((name) => `prefixed_${name}`); - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); - - const user = alias(users, 'user'); - const customers = alias(users, 'customer'); - - await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select() - .from(user) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(user.id, 10)); - - t.deepEqual(result, [{ - user: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('insert with spaces', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: sql`'Jo h n'` }); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); - - t.deepEqual(result, [{ id: 1, name: 'Jo h n' }]); -}); - -test.serial('prepared statement', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const statement = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .prepare('statement1'); - const result = await statement.execute(); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('prepared statement reuse', async (t) => { - const { db } = t.context; - - const stmt = db - .insert(usersTable) - .values({ - verified: true, - name: placeholder('name'), - }) - .prepare('stmt2'); - - for (let i = 0; i < 10; i++) { - await stmt.execute({ name: `John ${i}` }); - } - - const result = await db - .select({ - id: usersTable.id, - name: usersTable.name, - verified: usersTable.verified, - }) - .from(usersTable); - - t.deepEqual(result, [ - { id: 1, name: 'John 0', verified: true }, - { id: 2, name: 'John 1', verified: true }, - { id: 3, name: 'John 2', verified: true }, - { id: 4, name: 'John 3', verified: true }, - { id: 5, name: 'John 4', verified: true }, - { id: 6, name: 'John 5', verified: true }, - { id: 7, name: 'John 6', verified: true }, - { id: 8, name: 'John 7', verified: true }, - { id: 9, name: 'John 8', verified: true }, - { id: 10, name: 'John 9', verified: true }, - ]); -}); - -test.serial('prepared statement with placeholder in .where', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const stmt = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .where(eq(usersTable.id, placeholder('id'))) - .prepare('stmt3'); - const result = await stmt.execute({ id: 1 }); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('prepared statement with placeholder in .limit', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const stmt = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .where(eq(usersTable.id, placeholder('id'))) - .limit(placeholder('limit')) - .prepare('stmt_limit'); - - const result = await stmt.execute({ id: 1, limit: 1 }); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); - t.is(result.length, 1); -}); - -test.serial('prepared statement with placeholder in .offset', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'John1' }]); - const stmt = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .offset(placeholder('offset')) - .prepare('stmt_offset'); - - const result = await stmt.execute({ offset: 1 }); - - t.deepEqual(result, [{ id: 2, name: 'John1' }]); -}); - -// TODO change tests to new structure -test.serial('migrator', async (t) => { - const { db, serverSimulator } = t.context; - - await db.execute(sql`drop table if exists users`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - - await migrate(db, async (queries) => { - try { - await serverSimulator.migrations(queries); - } catch (e) { - console.error(e); - throw new Error('Proxy server cannot run migrations'); - } - }, { migrationsFolder: './drizzle2/pg-proxy/first' }); - - await t.notThrowsAsync(async () => { - await db.insert(usersTable).values({ name: 'John' }); - }); - - await t.throwsAsync(async () => { - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - }, { - message: 'relation "users12" does not exist', - }); - - await migrate(db, async (queries) => { - try { - await serverSimulator.migrations(queries); - } catch (e) { - console.error(e); - throw new Error('Proxy server cannot run migrations'); - } - }, { migrationsFolder: './drizzle2/pg-proxy/second' }); - - await t.notThrowsAsync(async () => { - await db.insert(usersTable).values({ name: 'John' }); - }); - - await t.notThrowsAsync(async () => { - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - }); - - await db.execute(sql`drop table users`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); -}); - -test.serial('insert via db.execute + select via db.execute', async (t) => { - const { db } = t.context; - - await db.execute( - sql`insert into ${usersTable} (${name(usersTable.name.name)}) values (${'John'})`, - ); - - const result = await db.execute<{ id: number; name: string }>( - sql`select id, name from "users"`, - ); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert via db.execute + returning', async (t) => { - const { db } = t.context; - - const inserted = await db.execute<{ id: number; name: string }>( - sql`insert into ${usersTable} (${ - name( - usersTable.name.name, - ) - }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, - ); - t.deepEqual(inserted, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert via db.execute w/ query builder', async (t) => { - const { db } = t.context; - - const inserted = await db.execute>( - db - .insert(usersTable) - .values({ name: 'John' }) - .returning({ id: usersTable.id, name: usersTable.name }), - ); - t.deepEqual(inserted, [{ id: 1, name: 'John' }]); -}); - -test.serial('Query check: Insert all defaults in 1 row', async (t) => { - const { db } = t.context; - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state'), - }); - - const query = db - .insert(users) - .values({}) - .toSQL(); - - t.deepEqual(query, { - sql: 'insert into "users" ("id", "name", "state") values (default, default, default)', - params: [], - }); -}); - -test.serial('Query check: Insert all defaults in multiple rows', async (t) => { - const { db } = t.context; - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state').default('UA'), - }); - - const query = db - .insert(users) - .values([{}, {}]) - .toSQL(); - - t.deepEqual(query, { - sql: 'insert into "users" ("id", "name", "state") values (default, default, default), (default, default, default)', - params: [], - }); -}); - -test.serial('Insert all defaults in 1 row', async (t) => { - const { db } = t.context; - - const users = pgTable('empty_insert_single', { - id: serial('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial primary key, name text default 'Dan', state text)`, - ); - - await db.insert(users).values({}); - - const res = await db.select().from(users); - - t.deepEqual(res, [{ id: 1, name: 'Dan', state: null }]); -}); - -test.serial('Insert all defaults in multiple rows', async (t) => { - const { db } = t.context; - - const users = pgTable('empty_insert_multiple', { - id: serial('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial primary key, name text default 'Dan', state text)`, - ); - - await db.insert(users).values([{}, {}]); - - const res = await db.select().from(users); - - t.deepEqual(res, [{ id: 1, name: 'Dan', state: null }, { id: 2, name: 'Dan', state: null }]); -}); - -test.serial('build query insert with onConflict do update', async (t) => { - const { db } = t.context; - - const query = db - .insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }) - .toSQL(); - - t.deepEqual(query, { - sql: - 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do update set "name" = $3', - params: ['John', '["foo","bar"]', 'John1'], - }); -}); - -test.serial('build query insert with onConflict do update / multiple columns', async (t) => { - const { db } = t.context; - - const query = db - .insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoUpdate({ target: [usersTable.id, usersTable.name], set: { name: 'John1' } }) - .toSQL(); - - t.deepEqual(query, { - sql: - 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id","name") do update set "name" = $3', - params: ['John', '["foo","bar"]', 'John1'], - }); -}); - -test.serial('build query insert with onConflict do nothing', async (t) => { - const { db } = t.context; - - const query = db - .insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoNothing() - .toSQL(); - - t.deepEqual(query, { - sql: - 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict do nothing', - params: ['John', '["foo","bar"]'], - }); -}); - -test.serial('build query insert with onConflict do nothing + target', async (t) => { - const { db } = t.context; - - const query = db - .insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoNothing({ target: usersTable.id }) - .toSQL(); - - t.deepEqual(query, { - sql: - 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do nothing', - params: ['John', '["foo","bar"]'], - }); -}); - -test.serial('insert with onConflict do update', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - - await db - .insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }); - - const res = await db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)); - - t.deepEqual(res, [{ id: 1, name: 'John1' }]); -}); - -test.serial('insert with onConflict do nothing', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - - await db.insert(usersTable).values({ id: 1, name: 'John' }).onConflictDoNothing(); - - const res = await db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert with onConflict do nothing + target', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - - await db - .insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoNothing({ target: usersTable.id }); - - const res = await db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); -}); - -test.serial('left join (flat object fields)', async (t) => { - const { db } = t.context; - - const { id: cityId } = await db - .insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]) - .returning({ id: citiesTable.id }) - .then((rows) => rows[0]!); - - await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); - - const res = await db - .select({ - userId: users2Table.id, - userName: users2Table.name, - cityId: citiesTable.id, - cityName: citiesTable.name, - }) - .from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); - - t.deepEqual(res, [ - { userId: 1, userName: 'John', cityId, cityName: 'Paris' }, - { userId: 2, userName: 'Jane', cityId: null, cityName: null }, - ]); -}); - -test.serial('left join (grouped fields)', async (t) => { - const { db } = t.context; - - const { id: cityId } = await db - .insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]) - .returning({ id: citiesTable.id }) - .then((rows) => rows[0]!); - - await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); - - const res = await db - .select({ - id: users2Table.id, - user: { - name: users2Table.name, - nameUpper: sql`upper(${users2Table.name})`, - }, - city: { - id: citiesTable.id, - name: citiesTable.name, - nameUpper: sql`upper(${citiesTable.name})`, - }, - }) - .from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); - - t.deepEqual(res, [ - { - id: 1, - user: { name: 'John', nameUpper: 'JOHN' }, - city: { id: cityId, name: 'Paris', nameUpper: 'PARIS' }, - }, - { - id: 2, - user: { name: 'Jane', nameUpper: 'JANE' }, - city: null, - }, - ]); -}); - -test.serial('left join (all fields)', async (t) => { - const { db } = t.context; - - const { id: cityId } = await db - .insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]) - .returning({ id: citiesTable.id }) - .then((rows) => rows[0]!); - - await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); - - const res = await db - .select() - .from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); - - t.deepEqual(res, [ - { - users2: { - id: 1, - name: 'John', - cityId, - }, - cities: { - id: cityId, - name: 'Paris', - state: null, - }, - }, - { - users2: { - id: 2, - name: 'Jane', - cityId: null, - }, - cities: null, - }, - ]); -}); - -test.serial('join subquery', async (t) => { - const { db } = t.context; - - await db - .insert(courseCategoriesTable) - .values([ - { name: 'Category 1' }, - { name: 'Category 2' }, - { name: 'Category 3' }, - { name: 'Category 4' }, - ]); - - await db - .insert(coursesTable) - .values([ - { name: 'Development', categoryId: 2 }, - { name: 'IT & Software', categoryId: 3 }, - { name: 'Marketing', categoryId: 4 }, - { name: 'Design', categoryId: 1 }, - ]); - - const sq2 = db - .select({ - categoryId: courseCategoriesTable.id, - category: courseCategoriesTable.name, - total: sql`count(${courseCategoriesTable.id})`, - }) - .from(courseCategoriesTable) - .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) - .as('sq2'); - - const res = await db - .select({ - courseName: coursesTable.name, - categoryId: sq2.categoryId, - }) - .from(coursesTable) - .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) - .orderBy(coursesTable.name); - - t.deepEqual(res, [ - { courseName: 'Design', categoryId: 1 }, - { courseName: 'Development', categoryId: 2 }, - { courseName: 'IT & Software', categoryId: 3 }, - { courseName: 'Marketing', categoryId: 4 }, - ]); -}); - -test.serial('with ... select', async (t) => { - const { db } = t.context; - - await db.insert(orders).values([ - { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, - { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 50, quantity: 5 }, - ]); - - const regionalSales = db - .$with('regional_sales') - .as( - db - .select({ - region: orders.region, - totalSales: sql`sum(${orders.amount})`.as('total_sales'), - }) - .from(orders) - .groupBy(orders.region), - ); - - const topRegions = db - .$with('top_regions') - .as( - db - .select({ - region: regionalSales.region, - }) - .from(regionalSales) - .where( - gt( - regionalSales.totalSales, - db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), - ), - ), - ); - - const result = await db - .with(regionalSales, topRegions) - .select({ - region: orders.region, - product: orders.product, - productUnits: sql`sum(${orders.quantity})::int`, - productSales: sql`sum(${orders.amount})::int`, - }) - .from(orders) - .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) - .groupBy(orders.region, orders.product) - .orderBy(orders.region, orders.product); - - t.deepEqual(result, [ - { - region: 'Europe', - product: 'A', - productUnits: 3, - productSales: 30, - }, - { - region: 'Europe', - product: 'B', - productUnits: 5, - productSales: 50, - }, - { - region: 'US', - product: 'A', - productUnits: 7, - productSales: 70, - }, - { - region: 'US', - product: 'B', - productUnits: 9, - productSales: 90, - }, - ]); -}); - -test.serial('select from subquery sql', async (t) => { - const { db } = t.context; - - await db.insert(users2Table).values([{ name: 'John' }, { name: 'Jane' }]); - - const sq = db - .select({ name: sql`${users2Table.name} || ' modified'`.as('name') }) - .from(users2Table) - .as('sq'); - - const res = await db.select({ name: sq.name }).from(sq); - - t.deepEqual(res, [{ name: 'John modified' }, { name: 'Jane modified' }]); -}); - -test.serial('select a field without joining its table', (t) => { - const { db } = t.context; - - t.throws(() => db.select({ name: users2Table.name }).from(usersTable).prepare('query')); -}); - -test.serial('select all fields from subquery without alias', (t) => { - const { db } = t.context; - - const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); - - t.throws(() => db.select().from(sq).prepare('query')); -}); - -test.serial('select count()', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); - - const res = await db.select({ count: sql`count(*)` }).from(usersTable); - - t.deepEqual(res, [{ count: '2' }]); -}); - -test.serial('select count w/ custom mapper', async (t) => { - const { db } = t.context; - - function count(value: PgColumn | SQLWrapper): SQL; - function count(value: PgColumn | SQLWrapper, alias: string): SQL.Aliased; - function count(value: PgColumn | SQLWrapper, alias?: string): SQL | SQL.Aliased { - const result = sql`count(${value})`.mapWith(Number); - if (!alias) { - return result; - } - return result.as(alias); - } - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); - - const res = await db.select({ count: count(sql`*`) }).from(usersTable); - - t.deepEqual(res, [{ count: 2 }]); -}); - -test.serial('network types', async (t) => { - const { db } = t.context; - - const value: typeof network.$inferSelect = { - inet: '127.0.0.1', - cidr: '192.168.100.128/25', - macaddr: '08:00:2b:01:02:03', - macaddr8: '08:00:2b:01:02:03:04:05', - }; - - await db.insert(network).values(value); - - const res = await db.select().from(network); - - t.deepEqual(res, [value]); -}); - -test.serial('array types', async (t) => { - const { db } = t.context; - - const values: typeof salEmp.$inferSelect[] = [ - { - name: 'John', - payByQuarter: [10000, 10000, 10000, 10000], - schedule: [['meeting', 'lunch'], ['training', 'presentation']], - }, - { - name: 'Carol', - payByQuarter: [20000, 25000, 25000, 25000], - schedule: [['breakfast', 'consulting'], ['meeting', 'lunch']], - }, - ]; - - await db.insert(salEmp).values(values); - - const res = await db.select().from(salEmp); - - t.deepEqual(res, values); -}); - -test.serial('select for ...', (t) => { - const { db } = t.context; - - { - const query = db - .select() - .from(users2Table) - .for('update') - .toSQL(); - - t.regex( - query.sql, - / for update$/, - ); - } - - { - const query = db - .select() - .from(users2Table) - .for('update', { of: [users2Table, coursesTable] }) - .toSQL(); - - t.regex( - query.sql, - / for update of "users2", "courses"$/, - ); - } - - { - const query = db - .select() - .from(users2Table) - .for('no key update', { of: users2Table }) - .toSQL(); - - t.regex( - query.sql, - /for no key update of "users2"$/, - ); - } - - { - const query = db - .select() - .from(users2Table) - .for('no key update', { of: users2Table, skipLocked: true }) - .toSQL(); - - t.regex( - query.sql, - / for no key update of "users2" skip locked$/, - ); - } - - { - const query = db - .select() - .from(users2Table) - .for('share', { of: users2Table, noWait: true }) - .toSQL(); - - t.regex( - query.sql, - // eslint-disable-next-line unicorn/better-regex - /for share of "users2" no wait$/, - ); - } -}); - -test.serial('having', async (t) => { - const { db } = t.context; - - await db.insert(citiesTable).values([{ name: 'London' }, { name: 'Paris' }, { name: 'New York' }]); - - await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { - name: 'Jack', - cityId: 2, - }]); - - const result = await db - .select({ - id: citiesTable.id, - name: sql`upper(${citiesTable.name})`.as('upper_name'), - usersCount: sql`count(${users2Table.id})::int`.as('users_count'), - }) - .from(citiesTable) - .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) - .where(({ name }) => sql`length(${name}) >= 3`) - .groupBy(citiesTable.id) - .having(({ usersCount }) => sql`${usersCount} > 0`) - .orderBy(({ name }) => name); - - t.deepEqual(result, [ - { - id: 1, - name: 'LONDON', - usersCount: 2, - }, - { - id: 2, - name: 'PARIS', - usersCount: 1, - }, - ]); -}); - -test.serial('view', async (t) => { - const { db } = t.context; - - const newYorkers1 = pgView('new_yorkers') - .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); - - const newYorkers2 = pgView('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); - - const newYorkers3 = pgView('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).existing(); - - await db.execute(sql`create view ${newYorkers1} as ${getViewConfig(newYorkers1).query}`); - - await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); - - await db.insert(users2Table).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 1 }, - { name: 'Jack', cityId: 2 }, - ]); - - { - const result = await db.select().from(newYorkers1); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers2); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers3); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); - t.deepEqual(result, [ - { name: 'John' }, - { name: 'Jane' }, - ]); - } - - await db.execute(sql`drop view ${newYorkers1}`); -}); - -test.serial('materialized view', async (t) => { - const { db } = t.context; - - const newYorkers1 = pgMaterializedView('new_yorkers') - .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); - - const newYorkers2 = pgMaterializedView('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); - - const newYorkers3 = pgMaterializedView('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).existing(); - - await db.execute(sql`create materialized view ${newYorkers1} as ${getMaterializedViewConfig(newYorkers1).query}`); - - await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); - - await db.insert(users2Table).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 1 }, - { name: 'Jack', cityId: 2 }, - ]); - - { - const result = await db.select().from(newYorkers1); - t.deepEqual(result, []); - } - - await db.refreshMaterializedView(newYorkers1); - - { - const result = await db.select().from(newYorkers1); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers2); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers3); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); - t.deepEqual(result, [ - { name: 'John' }, - { name: 'Jane' }, - ]); - } - - await db.execute(sql`drop materialized view ${newYorkers1}`); -}); - -// TODO: copy to SQLite and MySQL, add to docs -test.serial('select from raw sql', async (t) => { - const { db } = t.context; - - const result = await db.select({ - id: sql`id`, - name: sql`name`, - }).from(sql`(select 1 as id, 'John' as name) as users`); - - Expect>; - - t.deepEqual(result, [ - { id: 1, name: 'John' }, - ]); -}); - -test.serial('select from raw sql with joins', async (t) => { - const { db } = t.context; - - const result = await db - .select({ - id: sql`users.id`, - name: sql`users.name`, - userCity: sql`users.city`, - cityName: sql`cities.name`, - }) - .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) - .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`); - - Expect>; - - t.deepEqual(result, [ - { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, - ]); -}); - -test.serial('join on aliased sql from select', async (t) => { - const { db } = t.context; - - const result = await db - .select({ - userId: sql`users.id`.as('userId'), - name: sql`users.name`, - userCity: sql`users.city`, - cityId: sql`cities.id`.as('cityId'), - cityName: sql`cities.name`, - }) - .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) - .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)); - - Expect>; - - t.deepEqual(result, [ - { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, - ]); -}); - -test.serial('join on aliased sql from with clause', async (t) => { - const { db } = t.context; - - const users = db.$with('users').as( - db.select({ - id: sql`id`.as('userId'), - name: sql`name`.as('userName'), - city: sql`city`.as('city'), - }).from( - sql`(select 1 as id, 'John' as name, 'New York' as city) as users`, - ), - ); - - const cities = db.$with('cities').as( - db.select({ - id: sql`id`.as('cityId'), - name: sql`name`.as('cityName'), - }).from( - sql`(select 1 as id, 'Paris' as name) as cities`, - ), - ); - - const result = await db - .with(users, cities) - .select({ - userId: users.id, - name: users.name, - userCity: users.city, - cityId: cities.id, - cityName: cities.name, - }) - .from(users) - .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)); - - Expect>; - - t.deepEqual(result, [ - { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, - ]); -}); - -test.serial('prefixed table', async (t) => { - const { db } = t.context; - - const pgTable = pgTableCreator((name) => `myprefix_${name}`); - - const users = pgTable('test_prefixed_table_with_unique_name', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table myprefix_test_prefixed_table_with_unique_name (id integer not null primary key, name text not null)`, - ); - - await db.insert(users).values({ id: 1, name: 'John' }); - - const result = await db.select().from(users); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('select from enum', async (t) => { - const { db } = t.context; - - const muscleEnum = pgEnum('muscle', [ - 'abdominals', - 'hamstrings', - 'adductors', - 'quadriceps', - 'biceps', - 'shoulders', - 'chest', - 'middle_back', - 'calves', - 'glutes', - 'lower_back', - 'lats', - 'triceps', - 'traps', - 'forearms', - 'neck', - 'abductors', - ]); - - const forceEnum = pgEnum('force', ['isometric', 'isotonic', 'isokinetic']); - - const levelEnum = pgEnum('level', ['beginner', 'intermediate', 'advanced']); - - const mechanicEnum = pgEnum('mechanic', ['compound', 'isolation']); - - const equipmentEnum = pgEnum('equipment', ['barbell', 'dumbbell', 'bodyweight', 'machine', 'cable', 'kettlebell']); - - const categoryEnum = pgEnum('category', ['upper_body', 'lower_body', 'full_body']); - - const exercises = pgTable('exercises', { - id: serial('id').primaryKey(), - name: varchar('name').notNull(), - force: forceEnum('force'), - level: levelEnum('level'), - mechanic: mechanicEnum('mechanic'), - equipment: equipmentEnum('equipment'), - instructions: text('instructions'), - category: categoryEnum('category'), - primaryMuscles: muscleEnum('primary_muscles').array(), - secondaryMuscles: muscleEnum('secondary_muscles').array(), - createdAt: timestamp('created_at').notNull().default(sql`now()`), - updatedAt: timestamp('updated_at').notNull().default(sql`now()`), - }); - - await db.execute(sql`drop table if exists ${exercises}`); - await db.execute(sql`drop type if exists ${name(muscleEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(forceEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(levelEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(mechanicEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(equipmentEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(categoryEnum.enumName)}`); - - await db.execute( - sql`create type ${ - name(muscleEnum.enumName) - } as enum ('abdominals', 'hamstrings', 'adductors', 'quadriceps', 'biceps', 'shoulders', 'chest', 'middle_back', 'calves', 'glutes', 'lower_back', 'lats', 'triceps', 'traps', 'forearms', 'neck', 'abductors')`, - ); - await db.execute(sql`create type ${name(forceEnum.enumName)} as enum ('isometric', 'isotonic', 'isokinetic')`); - await db.execute(sql`create type ${name(levelEnum.enumName)} as enum ('beginner', 'intermediate', 'advanced')`); - await db.execute(sql`create type ${name(mechanicEnum.enumName)} as enum ('compound', 'isolation')`); - await db.execute( - sql`create type ${ - name(equipmentEnum.enumName) - } as enum ('barbell', 'dumbbell', 'bodyweight', 'machine', 'cable', 'kettlebell')`, - ); - await db.execute(sql`create type ${name(categoryEnum.enumName)} as enum ('upper_body', 'lower_body', 'full_body')`); - await db.execute(sql` - create table ${exercises} ( - id serial primary key, - name varchar not null, - force force, - level level, - mechanic mechanic, - equipment equipment, - instructions text, - category category, - primary_muscles muscle[], - secondary_muscles muscle[], - created_at timestamp not null default now(), - updated_at timestamp not null default now() - ) - `); - - await db.insert(exercises).values({ - name: 'Bench Press', - force: 'isotonic', - level: 'beginner', - mechanic: 'compound', - equipment: 'barbell', - instructions: - 'Lie on your back on a flat bench. Grasp the barbell with an overhand grip, slightly wider than shoulder width. Unrack the barbell and hold it over you with your arms locked. Lower the barbell to your chest. Press the barbell back to the starting position.', - category: 'upper_body', - primaryMuscles: ['chest', 'triceps'], - secondaryMuscles: ['shoulders', 'traps'], - }); - - const result = await db.select().from(exercises); - - t.deepEqual(result, [ - { - id: 1, - name: 'Bench Press', - force: 'isotonic', - level: 'beginner', - mechanic: 'compound', - equipment: 'barbell', - instructions: - 'Lie on your back on a flat bench. Grasp the barbell with an overhand grip, slightly wider than shoulder width. Unrack the barbell and hold it over you with your arms locked. Lower the barbell to your chest. Press the barbell back to the starting position.', - category: 'upper_body', - primaryMuscles: ['chest', 'triceps'], - secondaryMuscles: ['shoulders', 'traps'], - createdAt: result[0]!.createdAt, - updatedAt: result[0]!.updatedAt, - }, - ]); - - await db.execute(sql`drop table ${exercises}`); - await db.execute(sql`drop type ${name(muscleEnum.enumName)}`); - await db.execute(sql`drop type ${name(forceEnum.enumName)}`); - await db.execute(sql`drop type ${name(levelEnum.enumName)}`); - await db.execute(sql`drop type ${name(mechanicEnum.enumName)}`); - await db.execute(sql`drop type ${name(equipmentEnum.enumName)}`); - await db.execute(sql`drop type ${name(categoryEnum.enumName)}`); -}); - -test.serial('orderBy with aliased column', (t) => { - const { db } = t.context; - - const query = db.select({ - test: sql`something`.as('test'), - }).from(users2Table).orderBy((fields) => fields.test).toSQL(); - - t.deepEqual(query.sql, 'select something as "test" from "users2" order by "test"'); -}); - -test.serial('select from sql', async (t) => { - const { db } = t.context; - - const metricEntry = pgTable('metric_entry', { - id: pgUuid('id').notNull(), - createdAt: timestamp('created_at').notNull(), - }); - - await db.execute(sql`drop table if exists ${metricEntry}`); - await db.execute(sql`create table ${metricEntry} (id uuid not null, created_at timestamp not null)`); - - const metricId = uuid(); - - const intervals = db.$with('intervals').as( - db - .select({ - startTime: sql`(date'2023-03-01'+ x * '1 day'::interval)`.as('start_time'), - endTime: sql`(date'2023-03-01'+ (x+1) *'1 day'::interval)`.as('end_time'), - }) - .from(sql`generate_series(0, 29, 1) as t(x)`), - ); - - await t.notThrowsAsync(() => - db - .with(intervals) - .select({ - startTime: intervals.startTime, - endTime: intervals.endTime, - count: sql`count(${metricEntry})`, - }) - .from(metricEntry) - .rightJoin( - intervals, - and( - eq(metricEntry.id, metricId), - gte(metricEntry.createdAt, intervals.startTime), - lt(metricEntry.createdAt, intervals.endTime), - ), - ) - .groupBy(intervals.startTime, intervals.endTime) - .orderBy(asc(intervals.startTime)) - ); -}); - -test.serial('timestamp timezone', async (t) => { - const { db } = t.context; - - const usersTableWithAndWithoutTimezone = pgTable('users_test_with_and_without_timezone', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), - updatedAt: timestamp('updated_at', { withTimezone: false }).notNull().defaultNow(), - }); - - await db.execute(sql`drop table if exists ${usersTableWithAndWithoutTimezone}`); - - await db.execute( - sql` - create table users_test_with_and_without_timezone ( - id serial not null primary key, - name text not null, - created_at timestamptz not null default now(), - updated_at timestamp not null default now() - ) - `, - ); - - const date = new Date(Date.parse('2020-01-01T00:00:00+04:00')); - - await db.insert(usersTableWithAndWithoutTimezone).values({ name: 'With default times' }); - await db.insert(usersTableWithAndWithoutTimezone).values({ - name: 'Without default times', - createdAt: date, - updatedAt: date, - }); - const users = await db.select().from(usersTableWithAndWithoutTimezone); - - // check that the timestamps are set correctly for default times - t.assert(Math.abs(users[0]!.updatedAt.getTime() - Date.now()) < 2000); - t.assert(Math.abs(users[0]!.createdAt.getTime() - Date.now()) < 2000); - - // check that the timestamps are set correctly for non default times - t.assert(Math.abs(users[1]!.updatedAt.getTime() - date.getTime()) < 2000); - t.assert(Math.abs(users[1]!.createdAt.getTime() - date.getTime()) < 2000); -}); - -test.serial('all date and time columns', async (t) => { - const { db } = t.context; - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - dateString: date('date_string', { mode: 'string' }).notNull(), - time: time('time', { precision: 3 }).notNull(), - datetime: timestamp('datetime').notNull(), - datetimeWTZ: timestamp('datetime_wtz', { withTimezone: true }).notNull(), - datetimeString: timestamp('datetime_string', { mode: 'string' }).notNull(), - datetimeFullPrecision: timestamp('datetime_full_precision', { precision: 6, mode: 'string' }).notNull(), - datetimeWTZString: timestamp('datetime_wtz_string', { withTimezone: true, mode: 'string' }).notNull(), - interval: interval('interval').notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - date_string date not null, - time time(3) not null, - datetime timestamp not null, - datetime_wtz timestamp with time zone not null, - datetime_string timestamp not null, - datetime_full_precision timestamp(6) not null, - datetime_wtz_string timestamp with time zone not null, - interval interval not null - ) - `); - - const someDatetime = new Date('2022-01-01T00:00:00.123Z'); - const fullPrecision = '2022-01-01T00:00:00.123456Z'; - const someTime = '23:23:12.432'; - - await db.insert(table).values({ - dateString: '2022-01-01', - time: someTime, - datetime: someDatetime, - datetimeWTZ: someDatetime, - datetimeString: '2022-01-01T00:00:00.123Z', - datetimeFullPrecision: fullPrecision, - datetimeWTZString: '2022-01-01T00:00:00.123Z', - interval: '1 day', - }); - - const result = await db.select().from(table); - - Expect< - Equal<{ - id: number; - dateString: string; - time: string; - datetime: Date; - datetimeWTZ: Date; - datetimeString: string; - datetimeFullPrecision: string; - datetimeWTZString: string; - interval: string; - }[], typeof result> - >; - - Expect< - Equal<{ - dateString: string; - time: string; - datetime: Date; - datetimeWTZ: Date; - datetimeString: string; - datetimeFullPrecision: string; - datetimeWTZString: string; - interval: string; - id?: number | undefined; - }, typeof table.$inferInsert> - >; - - t.deepEqual(result, [ - { - id: 1, - dateString: '2022-01-01', - time: someTime, - datetime: someDatetime, - datetimeWTZ: someDatetime, - datetimeString: '2022-01-01 00:00:00.123', - datetimeFullPrecision: fullPrecision.replace('T', ' ').replace('Z', ''), - datetimeWTZString: '2022-01-01 00:00:00.123+00', - interval: '1 day', - }, - ]); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('all date and time columns with timezone second case mode date', async (t) => { - const { db } = t.context; - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(3) with time zone not null - ) - `); - - const insertedDate = new Date(); - - // 1. Insert date as new date - await db.insert(table).values([ - { timestamp: insertedDate }, - ]); - - // 2, Select as date and check that timezones are the same - // There is no way to check timezone in Date object, as it is always represented internally in UTC - const result = await db.select().from(table); - - t.deepEqual(result, [{ id: 1, timestamp: insertedDate }]); - - // 3. Compare both dates - t.deepEqual(insertedDate.getTime(), result[0]?.timestamp.getTime()); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('all date and time columns with timezone third case mode date', async (t) => { - const { db } = t.context; - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(3) with time zone not null - ) - `); - - const insertedDate = new Date('2022-01-01 20:00:00.123-04'); // used different time zones, internally is still UTC - const insertedDate2 = new Date('2022-01-02 04:00:00.123+04'); // They are both the same date in different time zones - - // 1. Insert date as new dates with different time zones - await db.insert(table).values([ - { timestamp: insertedDate }, - { timestamp: insertedDate2 }, - ]); - - // 2, Select and compare both dates - const result = await db.select().from(table); - - t.deepEqual(result[0]?.timestamp.getTime(), result[1]?.timestamp.getTime()); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('all date and time columns without timezone first case mode string', async (t) => { - const { db } = t.context; - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(6) not null - ) - `); - - // 1. Insert date in string format without timezone in it - await db.insert(table).values([ - { timestamp: '2022-01-01 02:00:00.123456' }, - ]); - - // 2, Select in string format and check that values are the same - const result = await db.select().from(table); - - t.deepEqual(result, [{ id: 1, timestamp: '2022-01-01 02:00:00.123456' }]); - - // 3. Select as raw query and check that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); - - t.deepEqual([...result2], [{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('all date and time columns without timezone second case mode string', async (t) => { - const { db } = t.context; - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(6) not null - ) - `); - - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: '2022-01-01T02:00:00.123456-02' }, - ]); - - // 2, Select as raw query and check that values are the same - const result = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); - - t.deepEqual([...result], [{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('all date and time columns without timezone third case mode date', async (t) => { - const { db } = t.context; - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'date', precision: 3 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(3) not null - ) - `); - - const insertedDate = new Date('2022-01-01 20:00:00.123+04'); - - // 1. Insert date as new date - await db.insert(table).values([ - { timestamp: insertedDate }, - ]); - - // 2, Select as raw query as string - const result = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); - - // 3. Compare both dates using orm mapping - Need to add 'Z' to tell JS that it is UTC - t.deepEqual(new Date(result[0]!.timestamp_string + 'Z').getTime(), insertedDate.getTime()); - - await db.execute(sql`drop table if exists ${table}`); -}); - -// TODO: implement transaction -// test.serial('transaction', async (t) => { -// const { db } = t.context; - -// const users = pgTable('users_transactions', { -// id: serial('id').primaryKey(), -// balance: integer('balance').notNull(), -// }); -// const products = pgTable('products_transactions', { -// id: serial('id').primaryKey(), -// price: integer('price').notNull(), -// stock: integer('stock').notNull(), -// }); - -// await db.execute(sql`drop table if exists ${users}`); -// await db.execute(sql`drop table if exists ${products}`); - -// await db.execute(sql`create table users_transactions (id serial not null primary key, balance integer not null)`); -// await db.execute( -// sql`create table products_transactions (id serial not null primary key, price integer not null, stock integer not null)`, -// ); - -// const user = await db.insert(users).values({ balance: 100 }).returning().then((rows) => rows[0]!); -// const product = await db.insert(products).values({ price: 10, stock: 10 }).returning().then((rows) => rows[0]!); - -// await db.transaction(async (tx) => { -// await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)); -// await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)); -// }); - -// const result = await db.select().from(users); - -// t.deepEqual(result, [{ id: 1, balance: 90 }]); - -// await db.execute(sql`drop table ${users}`); -// await db.execute(sql`drop table ${products}`); -// }); - -// TODO: implement transaction -// test.serial('transaction rollback', async (t) => { - -// const { db } = t.context; - -// const users = pgTable('users_transactions_rollback', { -// id: serial('id').primaryKey(), -// balance: integer('balance').notNull(), -// }); - -// await db.execute(sql`drop table if exists ${users}`); - -// await db.execute( -// sql`create table users_transactions_rollback (id serial not null primary key, balance integer not null)`, -// ); - -// await t.throwsAsync(async () => -// await db.transaction(async (tx) => { -// await tx.insert(users).values({ balance: 100 }); -// tx.rollback(); -// }), new TransactionRollbackError()); - -// const result = await db.select().from(users); - -// t.deepEqual(result, []); - -// await db.execute(sql`drop table ${users}`); -// }); - -// TODO: implement transaction -// test.serial('nested transaction', async (t) => { -// const { db } = t.context; - -// const users = pgTable('users_nested_transactions', { -// id: serial('id').primaryKey(), -// balance: integer('balance').notNull(), -// }); - -// await db.execute(sql`drop table if exists ${users}`); - -// await db.execute( -// sql`create table users_nested_transactions (id serial not null primary key, balance integer not null)`, -// ); - -// await db.transaction(async (tx) => { -// await tx.insert(users).values({ balance: 100 }); - -// await tx.transaction(async (tx) => { -// await tx.update(users).set({ balance: 200 }); -// }); -// }); - -// const result = await db.select().from(users); - -// t.deepEqual(result, [{ id: 1, balance: 200 }]); - -// await db.execute(sql`drop table ${users}`); -// }); - -// TODO: implement transaction -// test.serial('nested transaction rollback', async (_t) => { -// const { db } = t.context; - -// const users = pgTable('users_nested_transactions_rollback', { -// id: serial('id').primaryKey(), -// balance: integer('balance').notNull(), -// }); - -// await db.execute(sql`drop table if exists ${users}`); - -// await db.execute( -// sql`create table users_nested_transactions_rollback (id serial not null primary key, balance integer not null)`, -// ); - -// await db.transaction(async (tx) => { -// await tx.insert(users).values({ balance: 100 }); - -// await t.throwsAsync(async () => -// await tx.transaction(async (tx) => { -// await tx.update(users).set({ balance: 200 }); -// tx.rollback(); -// }), new TransactionRollbackError()); -// }); - -// const result = await db.select().from(users); - -// t.deepEqual(result, [{ id: 1, balance: 100 }]); - -// await db.execute(sql`drop table ${users}`); -// }); - -test.serial('join subquery with join', async (t) => { - const { db } = t.context; - - const internalStaff = pgTable('internal_staff', { - userId: integer('user_id').notNull(), - }); - - const customUser = pgTable('custom_user', { - id: integer('id').notNull(), - }); - - const ticket = pgTable('ticket', { - staffId: integer('staff_id').notNull(), - }); - - await db.execute(sql`drop table if exists ${internalStaff}`); - await db.execute(sql`drop table if exists ${customUser}`); - await db.execute(sql`drop table if exists ${ticket}`); - - await db.execute(sql`create table internal_staff (user_id integer not null)`); - await db.execute(sql`create table custom_user (id integer not null)`); - await db.execute(sql`create table ticket (staff_id integer not null)`); - - await db.insert(internalStaff).values({ userId: 1 }); - await db.insert(customUser).values({ id: 1 }); - await db.insert(ticket).values({ staffId: 1 }); - - const subq = db - .select() - .from(internalStaff) - .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) - .as('internal_staff'); - - const mainQuery = await db - .select() - .from(ticket) - .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); - - t.deepEqual(mainQuery, [{ - ticket: { staffId: 1 }, - internal_staff: { - internal_staff: { userId: 1 }, - custom_user: { id: 1 }, - }, - }]); - - await db.execute(sql`drop table ${internalStaff}`); - await db.execute(sql`drop table ${customUser}`); - await db.execute(sql`drop table ${ticket}`); -}); - -test.serial('subquery with view', async (t) => { - const { db } = t.context; - - const users = pgTable('users_subquery_view', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }); - - const newYorkers = pgView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`drop view if exists ${newYorkers}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, - ); - await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); - - await db.insert(users).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 2 }, - { name: 'Jack', cityId: 1 }, - { name: 'Jill', cityId: 2 }, - ]); - - const sq = db.$with('sq').as(db.select().from(newYorkers)); - const result = await db.with(sq).select().from(sq); - - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 3, name: 'Jack', cityId: 1 }, - ]); - - await db.execute(sql`drop view ${newYorkers}`); - await db.execute(sql`drop table ${users}`); -}); - -test.serial('join view as subquery', async (t) => { - const { db } = t.context; - - const users = pgTable('users_join_view', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }); - - const newYorkers = pgView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`drop view if exists ${newYorkers}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, - ); - await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); - - await db.insert(users).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 2 }, - { name: 'Jack', cityId: 1 }, - { name: 'Jill', cityId: 2 }, - ]); - - const sq = db.select().from(newYorkers).as('new_yorkers_sq'); - - const result = await db.select().from(users).leftJoin(sq, eq(users.id, sq.id)); - - t.deepEqual(result, [ - { - users_join_view: { id: 1, name: 'John', cityId: 1 }, - new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, - }, - { - users_join_view: { id: 2, name: 'Jane', cityId: 2 }, - new_yorkers_sq: null, - }, - { - users_join_view: { id: 3, name: 'Jack', cityId: 1 }, - new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, - }, - { - users_join_view: { id: 4, name: 'Jill', cityId: 2 }, - new_yorkers_sq: null, - }, - ]); - - await db.execute(sql`drop view ${newYorkers}`); - await db.execute(sql`drop table ${users}`); -}); - -test.serial('table selection with single table', async (t) => { - const { db } = t.context; - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, - ); - - await db.insert(users).values({ name: 'John', cityId: 1 }); - - const result = await db.select({ users }).from(users); - - t.deepEqual(result, [{ users: { id: 1, name: 'John', cityId: 1 } }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('set null to jsonb field', async (t) => { - const { db } = t.context; - - const users = pgTable('users', { - id: serial('id').primaryKey(), - jsonb: jsonb('jsonb'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, jsonb jsonb)`, - ); - - const result = await db.insert(users).values({ jsonb: null }).returning(); - - t.deepEqual(result, [{ id: 1, jsonb: null }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('insert undefined', async (t) => { - const { db } = t.context; - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text)`, - ); - - await t.notThrowsAsync(async () => await db.insert(users).values({ name: undefined })); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('update undefined', async (t) => { - const { db } = t.context; - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text)`, - ); - - await t.throwsAsync(async () => await db.update(users).set({ name: undefined })); - await t.notThrowsAsync(async () => await db.update(users).set({ id: 1, name: undefined })); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('array operators', async (t) => { - const { db } = t.context; - - const posts = pgTable('posts', { - id: serial('id').primaryKey(), - tags: text('tags').array(), - }); - - await db.execute(sql`drop table if exists ${posts}`); - - await db.execute( - sql`create table ${posts} (id serial primary key, tags text[])`, - ); - - await db.insert(posts).values([{ - tags: ['ORM'], - }, { - tags: ['Typescript'], - }, { - tags: ['Typescript', 'ORM'], - }, { - tags: ['Typescript', 'Frontend', 'React'], - }, { - tags: ['Typescript', 'ORM', 'Database', 'Postgres'], - }, { - tags: ['Java', 'Spring', 'OOP'], - }]); - - const contains = await db.select({ id: posts.id }).from(posts) - .where(arrayContains(posts.tags, ['Typescript', 'ORM'])); - const contained = await db.select({ id: posts.id }).from(posts) - .where(arrayContained(posts.tags, ['Typescript', 'ORM'])); - const overlaps = await db.select({ id: posts.id }).from(posts) - .where(arrayOverlaps(posts.tags, ['Typescript', 'ORM'])); - const withSubQuery = await db.select({ id: posts.id }).from(posts) - .where(arrayContains( - posts.tags, - db.select({ tags: posts.tags }).from(posts).where(eq(posts.id, 1)), - )); - - t.deepEqual(contains, [{ id: 3 }, { id: 5 }]); - t.deepEqual(contained, [{ id: 1 }, { id: 2 }, { id: 3 }]); - t.deepEqual(overlaps, [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }, { id: 5 }]); - t.deepEqual(withSubQuery, [{ id: 1 }, { id: 3 }, { id: 5 }]); -}); diff --git a/integration-tests/tests/pg-schema.test.ts b/integration-tests/tests/pg-schema.test.ts deleted file mode 100644 index 9194d14e0..000000000 --- a/integration-tests/tests/pg-schema.test.ts +++ /dev/null @@ -1,994 +0,0 @@ -import 'dotenv/config'; - -import type { TestFn } from 'ava'; -import anyTest from 'ava'; -import Docker from 'dockerode'; -import { asc, eq, Name, placeholder, sql } from 'drizzle-orm'; -import type { NodePgDatabase } from 'drizzle-orm/node-postgres'; -import { drizzle } from 'drizzle-orm/node-postgres'; -import { - alias, - boolean, - char, - getMaterializedViewConfig, - getViewConfig, - integer, - jsonb, - PgDialect, - pgSchema, - pgTable, - pgTableCreator, - serial, - text, - timestamp, -} from 'drizzle-orm/pg-core'; -import getPort from 'get-port'; -import pg from 'pg'; -import { v4 as uuid } from 'uuid'; - -const { Client } = pg; - -const ENABLE_LOGGING = false; - -const mySchema = pgSchema('mySchema'); - -const usersTable = mySchema.table('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - verified: boolean('verified').notNull().default(false), - jsonb: jsonb('jsonb').$type(), - createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), -}); - -const citiesTable = mySchema.table('cities', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - state: char('state', { length: 2 }), -}); - -const users2Table = mySchema.table('users2', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').references(() => citiesTable.id), -}); - -const publicUsersTable = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - verified: boolean('verified').notNull().default(false), - jsonb: jsonb('jsonb').$type(), - createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), -}); - -interface Context { - docker: Docker; - pgContainer: Docker.Container; - db: NodePgDatabase; - client: pg.Client; -} - -const test = anyTest as TestFn; - -async function createDockerDB(ctx: Context): Promise { - const docker = (ctx.docker = new Docker()); - const port = await getPort({ port: 5432 }); - const image = 'postgres:14'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) - ); - - ctx.pgContainer = await docker.createContainer({ - Image: image, - Env: ['POSTGRES_PASSWORD=postgres', 'POSTGRES_USER=postgres', 'POSTGRES_DB=postgres'], - name: `drizzle-integration-tests-${uuid()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '5432/tcp': [{ HostPort: `${port}` }], - }, - }, - }); - - await ctx.pgContainer.start(); - - return `postgres://postgres:postgres@localhost:${port}/postgres`; -} - -test.before(async (t) => { - const ctx = t.context; - const connectionString = process.env['PG_CONNECTION_STRING'] ?? await createDockerDB(ctx); - - const sleep = 250; - let timeLeft = 5000; - let connected = false; - let lastError: unknown | undefined; - do { - try { - ctx.client = new Client(connectionString); - await ctx.client.connect(); - connected = true; - break; - } catch (e) { - lastError = e; - await new Promise((resolve) => setTimeout(resolve, sleep)); - timeLeft -= sleep; - } - } while (timeLeft > 0); - if (!connected) { - console.error('Cannot connect to Postgres'); - await ctx.client?.end().catch(console.error); - await ctx.pgContainer?.stop().catch(console.error); - throw lastError; - } - ctx.db = drizzle(ctx.client, { logger: ENABLE_LOGGING }); -}); - -test.after.always(async (t) => { - const ctx = t.context; - await ctx.client?.end().catch(console.error); - await ctx.pgContainer?.stop().catch(console.error); -}); - -test.beforeEach(async (t) => { - const ctx = t.context; - await ctx.db.execute(sql`drop schema if exists public cascade`); - await ctx.db.execute(sql`drop schema if exists "mySchema" cascade`); - await ctx.db.execute(sql`create schema public`); - await ctx.db.execute( - sql`create schema "mySchema"`, - ); - await ctx.db.execute( - sql` - create table "mySchema".users ( - id serial primary key, - name text not null, - verified boolean not null default false, - jsonb jsonb, - created_at timestamptz not null default now() - ) - `, - ); - await ctx.db.execute( - sql` - create table "mySchema".cities ( - id serial primary key, - name text not null, - state char(2) - ) - `, - ); - await ctx.db.execute( - sql` - create table "mySchema".users2 ( - id serial primary key, - name text not null, - city_id integer references "mySchema".cities(id) - ) - `, - ); -}); - -test.serial('select all fields', async (t) => { - const { db } = t.context; - - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }); - const result = await db.select().from(usersTable); - - t.assert(result[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 100); - t.deepEqual(result, [{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); -}); - -test.serial('select sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.select({ - name: sql`upper(${usersTable.name})`, - }).from(usersTable); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('select typed sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.select({ - name: sql`upper(${usersTable.name})`, - }).from(usersTable); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('select distinct', async (t) => { - const { db } = t.context; - - const usersDistinctTable = pgTable('users_distinct', { - id: integer('id').notNull(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${usersDistinctTable}`); - await db.execute(sql`create table ${usersDistinctTable} (id integer, name text)`); - - await db.insert(usersDistinctTable).values([ - { id: 1, name: 'John' }, - { id: 1, name: 'John' }, - { id: 2, name: 'John' }, - { id: 1, name: 'Jane' }, - ]); - const users1 = await db.selectDistinct().from(usersDistinctTable).orderBy( - usersDistinctTable.id, - usersDistinctTable.name, - ); - const users2 = await db.selectDistinctOn([usersDistinctTable.id]).from(usersDistinctTable).orderBy( - usersDistinctTable.id, - ); - const users3 = await db.selectDistinctOn([usersDistinctTable.name], { name: usersDistinctTable.name }).from( - usersDistinctTable, - ).orderBy(usersDistinctTable.name); - - await db.execute(sql`drop table ${usersDistinctTable}`); - - t.deepEqual(users1, [{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); - - t.deepEqual(users2.length, 2); - t.deepEqual(users2[0]?.id, 1); - t.deepEqual(users2[1]?.id, 2); - - t.deepEqual(users3.length, 2); - t.deepEqual(users3[0]?.name, 'Jane'); - t.deepEqual(users3[1]?.name, 'John'); -}); - -test.serial('insert returning sql', async (t) => { - const { db } = t.context; - - const users = await db.insert(usersTable).values({ name: 'John' }).returning({ - name: sql`upper(${usersTable.name})`, - }); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('delete returning sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ - name: sql`upper(${usersTable.name})`, - }); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('update returning sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ - name: sql`upper(${usersTable.name})`, - }); - - t.deepEqual(users, [{ name: 'JANE' }]); -}); - -test.serial('update with returning all fields', async (t) => { - const { db } = t.context; - - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning(); - - t.assert(users[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 100); - t.deepEqual(users, [{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); -}); - -test.serial('update with returning partial', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ - id: usersTable.id, - name: usersTable.name, - }); - - t.deepEqual(users, [{ id: 1, name: 'Jane' }]); -}); - -test.serial('delete with returning all fields', async (t) => { - const { db } = t.context; - - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning(); - - t.assert(users[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 100); - t.deepEqual(users, [{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); -}); - -test.serial('delete with returning partial', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ - id: usersTable.id, - name: usersTable.name, - }); - - t.deepEqual(users, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert + select', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const result = await db.select().from(usersTable); - t.deepEqual(result, [{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); - - await db.insert(usersTable).values({ name: 'Jane' }); - const result2 = await db.select().from(usersTable); - t.deepEqual(result2, [ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, - { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, - ]); -}); - -test.serial('json insert', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); - const result = await db.select({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - }).from(usersTable); - - t.deepEqual(result, [{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); -}); - -test.serial('insert with overridden default values', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John', verified: true }); - const result = await db.select().from(usersTable); - - t.deepEqual(result, [{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); -}); - -test.serial('insert many', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([ - { name: 'John' }, - { name: 'Bruce', jsonb: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]); - const result = await db.select({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - verified: usersTable.verified, - }).from(usersTable); - - t.deepEqual(result, [ - { id: 1, name: 'John', jsonb: null, verified: false }, - { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, - { id: 3, name: 'Jane', jsonb: null, verified: false }, - { id: 4, name: 'Austin', jsonb: null, verified: true }, - ]); -}); - -test.serial('insert many with returning', async (t) => { - const { db } = t.context; - - const result = await db.insert(usersTable).values([ - { name: 'John' }, - { name: 'Bruce', jsonb: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]) - .returning({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - verified: usersTable.verified, - }); - - t.deepEqual(result, [ - { id: 1, name: 'John', jsonb: null, verified: false }, - { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, - { id: 3, name: 'Jane', jsonb: null, verified: false }, - { id: 4, name: 'Austin', jsonb: null, verified: true }, - ]); -}); - -test.serial('select with group by as field', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.name); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with group by as sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(sql`${usersTable.name}`); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with group by as sql + column', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(sql`${usersTable.name}`, usersTable.id); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with group by as column + sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with group by complex query', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`) - .orderBy(asc(usersTable.name)) - .limit(1); - - t.deepEqual(result, [{ name: 'Jane' }]); -}); - -test.serial('build query', async (t) => { - const { db } = t.context; - - const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, usersTable.name) - .toSQL(); - - t.deepEqual(query, { - sql: 'select "id", "name" from "mySchema"."users" group by "users"."id", "users"."name"', - params: [], - }); -}); - -test.serial('insert sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: sql`${'John'}` }); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('partial join with alias', async (t) => { - const { db } = t.context; - const customerAlias = alias(usersTable, 'customer'); - - await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select({ - user: { - id: usersTable.id, - name: usersTable.name, - }, - customer: { - id: customerAlias.id, - name: customerAlias.name, - }, - }).from(usersTable) - .leftJoin(customerAlias, eq(customerAlias.id, 11)) - .where(eq(usersTable.id, 10)); - - t.deepEqual(result, [{ - user: { id: 10, name: 'Ivan' }, - customer: { id: 11, name: 'Hans' }, - }]); -}); - -test.serial('full join with alias', async (t) => { - const { db } = t.context; - - const pgTable = pgTableCreator((name) => `prefixed_${name}`); - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); - - const customers = alias(users, 'customer'); - - await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select().from(users) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(users.id, 10)); - - t.deepEqual(result, [{ - users: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('insert with spaces', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: sql`'Jo h n'` }); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); - - t.deepEqual(result, [{ id: 1, name: 'Jo h n' }]); -}); - -test.serial('prepared statement', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const statement = db.select({ - id: usersTable.id, - name: usersTable.name, - }).from(usersTable) - .prepare('statement1'); - const result = await statement.execute(); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('prepared statement with placeholder in .limit', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const stmt = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .where(eq(usersTable.id, placeholder('id'))) - .limit(placeholder('limit')) - .prepare('stmt_limit'); - - const result = await stmt.execute({ id: 1, limit: 1 }); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); - t.is(result.length, 1); -}); - -test.serial('prepared statement with placeholder in .offset', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'John1' }]); - const stmt = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .offset(placeholder('offset')) - .prepare('stmt_offset'); - - const result = await stmt.execute({ offset: 1 }); - - t.deepEqual(result, [{ id: 2, name: 'John1' }]); -}); - -test.serial('prepared statement reuse', async (t) => { - const { db } = t.context; - - const stmt = db.insert(usersTable).values({ - verified: true, - name: placeholder('name'), - }).prepare('stmt2'); - - for (let i = 0; i < 10; i++) { - await stmt.execute({ name: `John ${i}` }); - } - - const result = await db.select({ - id: usersTable.id, - name: usersTable.name, - verified: usersTable.verified, - }).from(usersTable); - - t.deepEqual(result, [ - { id: 1, name: 'John 0', verified: true }, - { id: 2, name: 'John 1', verified: true }, - { id: 3, name: 'John 2', verified: true }, - { id: 4, name: 'John 3', verified: true }, - { id: 5, name: 'John 4', verified: true }, - { id: 6, name: 'John 5', verified: true }, - { id: 7, name: 'John 6', verified: true }, - { id: 8, name: 'John 7', verified: true }, - { id: 9, name: 'John 8', verified: true }, - { id: 10, name: 'John 9', verified: true }, - ]); -}); - -test.serial('prepared statement with placeholder in .where', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const stmt = db.select({ - id: usersTable.id, - name: usersTable.name, - }).from(usersTable) - .where(eq(usersTable.id, placeholder('id'))) - .prepare('stmt3'); - const result = await stmt.execute({ id: 1 }); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert via db.execute + select via db.execute', async (t) => { - const { db } = t.context; - - await db.execute(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); - - const result = await db.execute<{ id: number; name: string }>(sql`select id, name from "mySchema"."users"`); - t.deepEqual(result.rows, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert via db.execute + returning', async (t) => { - const { db } = t.context; - - const inserted = await db.execute<{ id: number; name: string }>( - sql`insert into ${usersTable} (${new Name( - usersTable.name.name, - )}) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, - ); - t.deepEqual(inserted.rows, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert via db.execute w/ query builder', async (t) => { - const { db } = t.context; - - const inserted = await db.execute>( - db.insert(usersTable).values({ name: 'John' }).returning({ id: usersTable.id, name: usersTable.name }), - ); - t.deepEqual(inserted.rows, [{ id: 1, name: 'John' }]); -}); - -test.serial('build query insert with onConflict do update', async (t) => { - const { db } = t.context; - - const query = db.insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }) - .toSQL(); - - t.deepEqual(query, { - sql: - 'insert into "mySchema"."users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do update set "name" = $3', - params: ['John', '["foo","bar"]', 'John1'], - }); -}); - -test.serial('build query insert with onConflict do update / multiple columns', async (t) => { - const { db } = t.context; - - const query = db.insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoUpdate({ target: [usersTable.id, usersTable.name], set: { name: 'John1' } }) - .toSQL(); - - t.deepEqual(query, { - sql: - 'insert into "mySchema"."users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id","name") do update set "name" = $3', - params: ['John', '["foo","bar"]', 'John1'], - }); -}); - -test.serial('build query insert with onConflict do nothing', async (t) => { - const { db } = t.context; - - const query = db.insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoNothing() - .toSQL(); - - t.deepEqual(query, { - sql: - 'insert into "mySchema"."users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict do nothing', - params: ['John', '["foo","bar"]'], - }); -}); - -test.serial('build query insert with onConflict do nothing + target', async (t) => { - const { db } = t.context; - - const query = db.insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoNothing({ target: usersTable.id }) - .toSQL(); - - t.deepEqual(query, { - sql: - 'insert into "mySchema"."users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do nothing', - params: ['John', '["foo","bar"]'], - }); -}); - -test.serial('insert with onConflict do update', async (t) => { - const { db } = t.context; - - await db.insert(usersTable) - .values({ name: 'John' }); - - await db.insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }); - - const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( - eq(usersTable.id, 1), - ); - - t.deepEqual(res, [{ id: 1, name: 'John1' }]); -}); - -test.serial('insert with onConflict do nothing', async (t) => { - const { db } = t.context; - - await db.insert(usersTable) - .values({ name: 'John' }); - - await db.insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoNothing(); - - const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( - eq(usersTable.id, 1), - ); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert with onConflict do nothing + target', async (t) => { - const { db } = t.context; - - await db.insert(usersTable) - .values({ name: 'John' }); - - await db.insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoNothing({ target: usersTable.id }); - - const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( - eq(usersTable.id, 1), - ); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); -}); - -test.serial('select from tables with same name from different schema using alias', async (t) => { - const { db } = t.context; - - await db.execute( - sql` - create table users ( - id serial primary key, - name text not null, - verified boolean not null default false, - jsonb jsonb, - created_at timestamptz not null default now() - ) - `, - ); - - await db.insert(usersTable).values({ id: 10, name: 'Ivan' }); - await db.insert(publicUsersTable).values({ id: 11, name: 'Hans' }); - - const customerAlias = alias(publicUsersTable, 'customer'); - - const result = await db - .select().from(usersTable) - .leftJoin(customerAlias, eq(customerAlias.id, 11)) - .where(eq(customerAlias.id, 11)); - - t.deepEqual(result, [{ - users: { - id: 10, - name: 'Ivan', - verified: false, - jsonb: null, - createdAt: result[0]!.users.createdAt, - }, - customer: { - id: 11, - name: 'Hans', - verified: false, - jsonb: null, - createdAt: result[0]!.customer!.createdAt, - }, - }]); -}); - -test.serial('view', async (t) => { - const { db } = t.context; - - const newYorkers1 = mySchema.view('new_yorkers') - .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); - - const newYorkers2 = mySchema.view('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); - - const newYorkers3 = mySchema.view('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).existing(); - - await db.execute(sql`create view ${newYorkers1} as ${getViewConfig(newYorkers1).query}`); - - await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); - - await db.insert(users2Table).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 1 }, - { name: 'Jack', cityId: 2 }, - ]); - - { - const result = await db.select().from(newYorkers1); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers2); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers3); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); - t.deepEqual(result, [ - { name: 'John' }, - { name: 'Jane' }, - ]); - } - - await db.execute(sql`drop view ${newYorkers1}`); -}); - -test.serial('materialized view', async (t) => { - const { db } = t.context; - - const newYorkers1 = mySchema.materializedView('new_yorkers') - .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); - - const newYorkers2 = mySchema.materializedView('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); - - const newYorkers3 = mySchema.materializedView('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).existing(); - - await db.execute(sql`create materialized view ${newYorkers1} as ${getMaterializedViewConfig(newYorkers1).query}`); - - await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); - - await db.insert(users2Table).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 1 }, - { name: 'Jack', cityId: 2 }, - ]); - - { - const result = await db.select().from(newYorkers1); - t.deepEqual(result, []); - } - - await db.refreshMaterializedView(newYorkers1); - - { - const result = await db.select().from(newYorkers1); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers2); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers3); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); - t.deepEqual(result, [ - { name: 'John' }, - { name: 'Jane' }, - ]); - } - - await db.execute(sql`drop materialized view ${newYorkers1}`); -}); - -test.serial('enum', async (t) => { - const { db } = t.context; - - const colors = mySchema.enum('colors', ['red', 'green', 'blue']); - - t.deepEqual(colors.schema, 'mySchema'); - - const { sql: query } = new PgDialect().sqlToQuery(sql`${colors}`); - t.deepEqual(query, '"mySchema"."colors"'); - - await db.execute(sql`create type ${colors} as enum ('red', 'green', 'blue')`); - - const result = await db.execute<{ enum_range: string }>(sql`select enum_range(null::${colors})`); - t.deepEqual(result.rows, [{ enum_range: '{red,green,blue}' }]); - - await db.execute(sql`drop type ${colors}`); -}); diff --git a/integration-tests/tests/pg.test.ts b/integration-tests/tests/pg.test.ts deleted file mode 100644 index d6167f491..000000000 --- a/integration-tests/tests/pg.test.ts +++ /dev/null @@ -1,4345 +0,0 @@ -import 'dotenv/config'; - -import type { TestFn } from 'ava'; -import anyTest from 'ava'; -import Docker from 'dockerode'; -import { - and, - arrayContained, - arrayContains, - arrayOverlaps, - asc, - avg, - avgDistinct, - count, - countDistinct, - eq, - exists, - getTableColumns, - gt, - gte, - ilike, - inArray, - lt, - max, - min, - name, - or, - placeholder, - type SQL, - sql, - type SQLWrapper, - sum, - sumDistinct, - TransactionRollbackError, -} from 'drizzle-orm'; -import { drizzle, type NodePgDatabase } from 'drizzle-orm/node-postgres'; -import { migrate } from 'drizzle-orm/node-postgres/migrator'; -import { - alias, - boolean, - char, - cidr, - date, - except, - exceptAll, - foreignKey, - getMaterializedViewConfig, - getTableConfig, - getViewConfig, - // index, - inet, - integer, - intersect, - intersectAll, - interval, - jsonb, - macaddr, - macaddr8, - numeric, - type PgColumn, - pgEnum, - pgMaterializedView, - pgTable, - pgTableCreator, - pgView, - primaryKey, - serial, - text, - time, - timestamp, - union, - unionAll, - unique, - uniqueKeyName, - uuid as pgUuid, - varchar, -} from 'drizzle-orm/pg-core'; -import getPort from 'get-port'; -import pg from 'pg'; -import { v4 as uuid } from 'uuid'; -import { type Equal, Expect, randomString } from './utils.ts'; - -const { Client } = pg; - -const ENABLE_LOGGING = false; - -const usersTable = pgTable('users', { - id: serial('id' as string).primaryKey(), - name: text('name').notNull(), - verified: boolean('verified').notNull().default(false), - jsonb: jsonb('jsonb').$type(), - createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), -}); - -const usersOnUpdate = pgTable('users_on_update', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - updateCounter: integer('update_counter').default(sql`1`).$onUpdateFn(() => sql`update_counter + 1`), - updatedAt: timestamp('updated_at', { mode: 'date', precision: 3 }).$onUpdate(() => new Date()), - alwaysNull: text('always_null').$type().$onUpdate(() => null), - // uppercaseName: text('uppercase_name').$onUpdateFn(() => sql`upper(name)`), looks like this is not supported in pg -}); - -const citiesTable = pgTable('cities', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - state: char('state', { length: 2 }), -}); - -const cities2Table = pgTable('cities', { - id: serial('id').primaryKey(), - name: text('name').notNull(), -}); - -const users2Table = pgTable('users2', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').references(() => citiesTable.id), -}); - -const coursesTable = pgTable('courses', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - categoryId: integer('category_id').references(() => courseCategoriesTable.id), -}); - -const courseCategoriesTable = pgTable('course_categories', { - id: serial('id').primaryKey(), - name: text('name').notNull(), -}); - -const orders = pgTable('orders', { - id: serial('id').primaryKey(), - region: text('region').notNull(), - product: text('product').notNull().$default(() => 'random_string'), - amount: integer('amount').notNull(), - quantity: integer('quantity').notNull(), -}); - -const network = pgTable('network_table', { - inet: inet('inet').notNull(), - cidr: cidr('cidr').notNull(), - macaddr: macaddr('macaddr').notNull(), - macaddr8: macaddr8('macaddr8').notNull(), -}); - -const salEmp = pgTable('sal_emp', { - name: text('name'), - payByQuarter: integer('pay_by_quarter').array(), - schedule: text('schedule').array().array(), -}); - -const _tictactoe = pgTable('tictactoe', { - squares: integer('squares').array(3).array(3), -}); - -const usersMigratorTable = pgTable('users12', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - email: text('email').notNull(), -}); - -// To test aggregate functions -const aggregateTable = pgTable('aggregate_table', { - id: serial('id').notNull(), - name: text('name').notNull(), - a: integer('a'), - b: integer('b'), - c: integer('c'), - nullOnly: integer('null_only'), -}); - -interface Context { - docker: Docker; - pgContainer: Docker.Container; - db: NodePgDatabase; - client: pg.Client; -} - -const test = anyTest as TestFn; - -async function createDockerDB(ctx: Context): Promise { - const docker = (ctx.docker = new Docker()); - const port = await getPort({ port: 5432 }); - const image = 'postgres:14'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) - ); - - ctx.pgContainer = await docker.createContainer({ - Image: image, - Env: ['POSTGRES_PASSWORD=postgres', 'POSTGRES_USER=postgres', 'POSTGRES_DB=postgres'], - name: `drizzle-integration-tests-${uuid()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '5432/tcp': [{ HostPort: `${port}` }], - }, - }, - }); - - await ctx.pgContainer.start(); - - return `postgres://postgres:postgres@localhost:${port}/postgres`; -} - -test.before(async (t) => { - const ctx = t.context; - const connectionString = process.env['PG_CONNECTION_STRING'] ?? (await createDockerDB(ctx)); - - const sleep = 250; - let timeLeft = 5000; - let connected = false; - let lastError: unknown | undefined; - do { - try { - ctx.client = new Client(connectionString); - await ctx.client.connect(); - connected = true; - break; - } catch (e) { - lastError = e; - await new Promise((resolve) => setTimeout(resolve, sleep)); - timeLeft -= sleep; - } - } while (timeLeft > 0); - if (!connected) { - console.error('Cannot connect to Postgres'); - await ctx.client?.end().catch(console.error); - await ctx.pgContainer?.stop().catch(console.error); - throw lastError; - } - ctx.db = drizzle(ctx.client, { logger: ENABLE_LOGGING }); -}); - -test.after.always(async (t) => { - const ctx = t.context; - await ctx.client?.end().catch(console.error); - await ctx.pgContainer?.stop().catch(console.error); -}); - -test.beforeEach(async (t) => { - const ctx = t.context; - await ctx.db.execute(sql`drop schema public cascade`); - await ctx.db.execute(sql`create schema public`); - await ctx.db.execute( - sql` - create table users ( - id serial primary key, - name text not null, - verified boolean not null default false, - jsonb jsonb, - created_at timestamptz not null default now() - ) - `, - ); - await ctx.db.execute( - sql` - create table cities ( - id serial primary key, - name text not null, - state char(2) - ) - `, - ); - await ctx.db.execute( - sql` - create table users2 ( - id serial primary key, - name text not null, - city_id integer references cities(id) - ) - `, - ); - await ctx.db.execute( - sql` - create table course_categories ( - id serial primary key, - name text not null - ) - `, - ); - await ctx.db.execute( - sql` - create table courses ( - id serial primary key, - name text not null, - category_id integer references course_categories(id) - ) - `, - ); - await ctx.db.execute( - sql` - create table orders ( - id serial primary key, - region text not null, - product text not null, - amount integer not null, - quantity integer not null - ) - `, - ); - await ctx.db.execute( - sql` - create table network_table ( - inet inet not null, - cidr cidr not null, - macaddr macaddr not null, - macaddr8 macaddr8 not null - ) - `, - ); - await ctx.db.execute( - sql` - create table sal_emp ( - name text not null, - pay_by_quarter integer[] not null, - schedule text[][] not null - ) - `, - ); - await ctx.db.execute( - sql` - create table tictactoe ( - squares integer[3][3] not null - ) - `, - ); -}); - -async function setupSetOperationTest(db: NodePgDatabase) { - await db.execute(sql`drop table if exists users2`); - await db.execute(sql`drop table if exists cities`); - await db.execute( - sql` - create table cities ( - id serial primary key, - name text not null - ) - `, - ); - await db.execute( - sql` - create table users2 ( - id serial primary key, - name text not null, - city_id integer references cities(id) - ) - `, - ); - - await db.insert(cities2Table).values([ - { id: 1, name: 'New York' }, - { id: 2, name: 'London' }, - { id: 3, name: 'Tampa' }, - ]); - - await db.insert(users2Table).values([ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 2 }, - { id: 3, name: 'Jack', cityId: 3 }, - { id: 4, name: 'Peter', cityId: 3 }, - { id: 5, name: 'Ben', cityId: 2 }, - { id: 6, name: 'Jill', cityId: 1 }, - { id: 7, name: 'Mary', cityId: 2 }, - { id: 8, name: 'Sally', cityId: 1 }, - ]); -} - -async function setupAggregateFunctionsTest(db: NodePgDatabase) { - await db.execute(sql`drop table if exists "aggregate_table"`); - await db.execute( - sql` - create table "aggregate_table" ( - "id" serial not null, - "name" text not null, - "a" integer, - "b" integer, - "c" integer, - "null_only" integer - ); - `, - ); - await db.insert(aggregateTable).values([ - { name: 'value 1', a: 5, b: 10, c: 20 }, - { name: 'value 1', a: 5, b: 20, c: 30 }, - { name: 'value 2', a: 10, b: 50, c: 60 }, - { name: 'value 3', a: 20, b: 20, c: null }, - { name: 'value 4', a: null, b: 90, c: 120 }, - { name: 'value 5', a: 80, b: 10, c: null }, - { name: 'value 6', a: null, b: null, c: 150 }, - ]); -} - -test.serial('table configs: unique third param', async (t) => { - const cities1Table = pgTable('cities1', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - state: char('state', { length: 2 }), - }, (t) => ({ - f: unique('custom_name').on(t.name, t.state).nullsNotDistinct(), - f1: unique('custom_name1').on(t.name, t.state), - })); - - const tableConfig = getTableConfig(cities1Table); - - t.assert(tableConfig.uniqueConstraints.length === 2); - - t.assert(tableConfig.uniqueConstraints[0]?.name === 'custom_name'); - t.assert(tableConfig.uniqueConstraints[0]?.nullsNotDistinct); - t.deepEqual(tableConfig.uniqueConstraints[0]?.columns.map((t) => t.name), ['name', 'state']); - - t.assert(tableConfig.uniqueConstraints[1]?.name, 'custom_name1'); - t.assert(!tableConfig.uniqueConstraints[1]?.nullsNotDistinct); - t.deepEqual(tableConfig.uniqueConstraints[0]?.columns.map((t) => t.name), ['name', 'state']); -}); - -test.serial('table configs: unique in column', async (t) => { - const cities1Table = pgTable('cities1', { - id: serial('id').primaryKey(), - name: text('name').notNull().unique(), - state: char('state', { length: 2 }).unique('custom'), - field: char('field', { length: 2 }).unique('custom_field', { nulls: 'not distinct' }), - }); - - const tableConfig = getTableConfig(cities1Table); - - const columnName = tableConfig.columns.find((it) => it.name === 'name'); - t.assert(columnName?.uniqueName === uniqueKeyName(cities1Table, [columnName!.name])); - t.assert(columnName?.isUnique); - - const columnState = tableConfig.columns.find((it) => it.name === 'state'); - t.assert(columnState?.uniqueName === 'custom'); - t.assert(columnState?.isUnique); - - const columnField = tableConfig.columns.find((it) => it.name === 'field'); - t.assert(columnField?.uniqueName === 'custom_field'); - t.assert(columnField?.isUnique); - t.assert(columnField?.uniqueType === 'not distinct'); -}); - -test.serial('table config: foreign keys name', async (t) => { - const table = pgTable('cities', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - state: text('state'), - }, (t) => ({ - f: foreignKey({ foreignColumns: [t.id], columns: [t.id], name: 'custom_fk' }), - })); - - const tableConfig = getTableConfig(table); - - t.is(tableConfig.foreignKeys.length, 1); - t.is(tableConfig.foreignKeys[0]!.getName(), 'custom_fk'); -}); - -test.serial('table config: primary keys name', async (t) => { - const table = pgTable('cities', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - state: text('state'), - }, (t) => ({ - f: primaryKey({ columns: [t.id, t.name], name: 'custom_pk' }), - })); - - const tableConfig = getTableConfig(table); - - t.is(tableConfig.primaryKeys.length, 1); - t.is(tableConfig.primaryKeys[0]!.getName(), 'custom_pk'); -}); - -// test.serial('table configs: all possible index properties', async () => { -// const cities1Table = pgTable('cities1', { -// id: serial('id').primaryKey(), -// name: text('name').notNull(), -// state: char('state', { length: 2 }), -// }, (t) => ({ -// f: index('custom_name').using('hnsw', sql`${t.name} vector_ip_ops`, t.state.desc()), -// f4: index('custom_name').on(sql`${t.name} vector_ip_ops`, t.state.desc().nullsLast()).where(sql``).with({ -// length: 12, -// }), -// })); - -// const tableConfig = getTableConfig(cities1Table); - -// console.log(tableConfig.indexes[0]?.config.columns); -// }); - -test.serial('select all fields', async (t) => { - const { db } = t.context; - - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }); - const result = await db.select().from(usersTable); - - t.assert(result[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 100); - t.deepEqual(result, [ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }, - ]); -}); - -test.serial('select sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .select({ - name: sql`upper(${usersTable.name})`, - }) - .from(usersTable); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('select typed sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - - const users = await db.select({ - name: sql`upper(${usersTable.name})`, - }).from(usersTable); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('$default function', async (t) => { - const { db } = t.context; - - const insertedOrder = await db.insert(orders).values({ id: 1, region: 'Ukraine', amount: 1, quantity: 1 }) - .returning(); - const selectedOrder = await db.select().from(orders); - - t.deepEqual(insertedOrder, [{ - id: 1, - amount: 1, - quantity: 1, - region: 'Ukraine', - product: 'random_string', - }]); - - t.deepEqual(selectedOrder, [{ - id: 1, - amount: 1, - quantity: 1, - region: 'Ukraine', - product: 'random_string', - }]); -}); - -test.serial('select distinct', async (t) => { - const { db } = t.context; - - const usersDistinctTable = pgTable('users_distinct', { - id: integer('id').notNull(), - name: text('name').notNull(), - age: integer('age').notNull(), - }); - - await db.execute(sql`drop table if exists ${usersDistinctTable}`); - await db.execute(sql`create table ${usersDistinctTable} (id integer, name text, age integer)`); - - await db.insert(usersDistinctTable).values([ - { id: 1, name: 'John', age: 24 }, - { id: 1, name: 'John', age: 24 }, - { id: 2, name: 'John', age: 25 }, - { id: 1, name: 'Jane', age: 24 }, - { id: 1, name: 'Jane', age: 26 }, - ]); - const users1 = await db.selectDistinct().from(usersDistinctTable).orderBy( - usersDistinctTable.id, - usersDistinctTable.name, - ); - const users2 = await db.selectDistinctOn([usersDistinctTable.id]).from(usersDistinctTable).orderBy( - usersDistinctTable.id, - ); - const users3 = await db.selectDistinctOn([usersDistinctTable.name], { name: usersDistinctTable.name }).from( - usersDistinctTable, - ).orderBy(usersDistinctTable.name); - const users4 = await db.selectDistinctOn([usersDistinctTable.id, usersDistinctTable.age]).from( - usersDistinctTable, - ).orderBy(usersDistinctTable.id, usersDistinctTable.age); - - await db.execute(sql`drop table ${usersDistinctTable}`); - - t.deepEqual(users1, [ - { id: 1, name: 'Jane', age: 24 }, - { id: 1, name: 'Jane', age: 26 }, - { id: 1, name: 'John', age: 24 }, - { id: 2, name: 'John', age: 25 }, - ]); - - t.deepEqual(users2.length, 2); - t.deepEqual(users2[0]?.id, 1); - t.deepEqual(users2[1]?.id, 2); - - t.deepEqual(users3.length, 2); - t.deepEqual(users3[0]?.name, 'Jane'); - t.deepEqual(users3[1]?.name, 'John'); - - t.deepEqual(users4, [ - { id: 1, name: 'John', age: 24 }, - { id: 1, name: 'Jane', age: 26 }, - { id: 2, name: 'John', age: 25 }, - ]); -}); - -test.serial('insert returning sql', async (t) => { - const { db } = t.context; - - const users = await db - .insert(usersTable) - .values({ name: 'John' }) - .returning({ - name: sql`upper(${usersTable.name})`, - }); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('delete returning sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .delete(usersTable) - .where(eq(usersTable.name, 'John')) - .returning({ - name: sql`upper(${usersTable.name})`, - }); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('update returning sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .update(usersTable) - .set({ name: 'Jane' }) - .where(eq(usersTable.name, 'John')) - .returning({ - name: sql`upper(${usersTable.name})`, - }); - - t.deepEqual(users, [{ name: 'JANE' }]); -}); - -test.serial('update with returning all fields', async (t) => { - const { db } = t.context; - - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .update(usersTable) - .set({ name: 'Jane' }) - .where(eq(usersTable.name, 'John')) - .returning(); - - t.assert(users[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 100); - t.deepEqual(users, [ - { id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }, - ]); -}); - -test.serial('update with returning partial', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .update(usersTable) - .set({ name: 'Jane' }) - .where(eq(usersTable.name, 'John')) - .returning({ - id: usersTable.id, - name: usersTable.name, - }); - - t.deepEqual(users, [{ id: 1, name: 'Jane' }]); -}); - -test.serial('delete with returning all fields', async (t) => { - const { db } = t.context; - - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning(); - - t.assert(users[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 100); - t.deepEqual(users, [ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: users[0]!.createdAt }, - ]); -}); - -test.serial('delete with returning partial', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ - id: usersTable.id, - name: usersTable.name, - }); - - t.deepEqual(users, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert + select', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const result = await db.select().from(usersTable); - t.deepEqual(result, [ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }, - ]); - - await db.insert(usersTable).values({ name: 'Jane' }); - const result2 = await db.select().from(usersTable); - t.deepEqual(result2, [ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, - { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, - ]); -}); - -test.serial('json insert', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); - const result = await db - .select({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - }) - .from(usersTable); - - t.deepEqual(result, [{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); -}); - -test.serial('char insert', async (t) => { - const { db } = t.context; - - await db.insert(citiesTable).values({ name: 'Austin', state: 'TX' }); - const result = await db - .select({ id: citiesTable.id, name: citiesTable.name, state: citiesTable.state }) - .from(citiesTable); - - t.deepEqual(result, [{ id: 1, name: 'Austin', state: 'TX' }]); -}); - -test.serial('char update', async (t) => { - const { db } = t.context; - - await db.insert(citiesTable).values({ name: 'Austin', state: 'TX' }); - await db.update(citiesTable).set({ name: 'Atlanta', state: 'GA' }).where(eq(citiesTable.id, 1)); - const result = await db - .select({ id: citiesTable.id, name: citiesTable.name, state: citiesTable.state }) - .from(citiesTable); - - t.deepEqual(result, [{ id: 1, name: 'Atlanta', state: 'GA' }]); -}); - -test.serial('char delete', async (t) => { - const { db } = t.context; - - await db.insert(citiesTable).values({ name: 'Austin', state: 'TX' }); - await db.delete(citiesTable).where(eq(citiesTable.state, 'TX')); - const result = await db - .select({ id: citiesTable.id, name: citiesTable.name, state: citiesTable.state }) - .from(citiesTable); - - t.deepEqual(result, []); -}); - -test.serial('insert with overridden default values', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John', verified: true }); - const result = await db.select().from(usersTable); - - t.deepEqual(result, [ - { id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }, - ]); -}); - -test.serial('insert many', async (t) => { - const { db } = t.context; - - await db - .insert(usersTable) - .values([ - { name: 'John' }, - { name: 'Bruce', jsonb: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]); - const result = await db - .select({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - verified: usersTable.verified, - }) - .from(usersTable); - - t.deepEqual(result, [ - { id: 1, name: 'John', jsonb: null, verified: false }, - { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, - { id: 3, name: 'Jane', jsonb: null, verified: false }, - { id: 4, name: 'Austin', jsonb: null, verified: true }, - ]); -}); - -test.serial('insert many with returning', async (t) => { - const { db } = t.context; - - const result = await db - .insert(usersTable) - .values([ - { name: 'John' }, - { name: 'Bruce', jsonb: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]) - .returning({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - verified: usersTable.verified, - }); - - t.deepEqual(result, [ - { id: 1, name: 'John', jsonb: null, verified: false }, - { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, - { id: 3, name: 'Jane', jsonb: null, verified: false }, - { id: 4, name: 'Austin', jsonb: null, verified: true }, - ]); -}); - -test.serial('select with group by as field', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(usersTable.name); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with exists', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const user = alias(usersTable, 'user'); - const result = await db.select({ name: usersTable.name }).from(usersTable).where( - exists(db.select({ one: sql`1` }).from(user).where(and(eq(usersTable.name, 'John'), eq(user.id, usersTable.id)))), - ); - - t.deepEqual(result, [{ name: 'John' }]); -}); - -test.serial('select with group by as sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(sql`${usersTable.name}`); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with group by as sql + column', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(sql`${usersTable.name}`, usersTable.id); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with group by as column + sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with group by complex query', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`) - .orderBy(asc(usersTable.name)) - .limit(1); - - t.deepEqual(result, [{ name: 'Jane' }]); -}); - -test.serial('build query', async (t) => { - const { db } = t.context; - - const query = db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .groupBy(usersTable.id, usersTable.name) - .toSQL(); - - t.deepEqual(query, { - sql: 'select "id", "name" from "users" group by "users"."id", "users"."name"', - params: [], - }); -}); - -test.serial('insert sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: sql`${'John'}` }); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('partial join with alias', async (t) => { - const { db } = t.context; - const customerAlias = alias(usersTable, 'customer'); - - await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select({ - user: { - id: usersTable.id, - name: usersTable.name, - }, - customer: { - id: customerAlias.id, - name: customerAlias.name, - }, - }) - .from(usersTable) - .leftJoin(customerAlias, eq(customerAlias.id, 11)) - .where(eq(usersTable.id, 10)); - - t.deepEqual(result, [ - { - user: { id: 10, name: 'Ivan' }, - customer: { id: 11, name: 'Hans' }, - }, - ]); -}); - -test.serial('full join with alias', async (t) => { - const { db } = t.context; - - const pgTable = pgTableCreator((name) => `prefixed_${name}`); - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); - - const customers = alias(users, 'customer'); - - await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select() - .from(users) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(users.id, 10)); - - t.deepEqual(result, [{ - users: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('select from alias', async (t) => { - const { db } = t.context; - - const pgTable = pgTableCreator((name) => `prefixed_${name}`); - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); - - const user = alias(users, 'user'); - const customers = alias(users, 'customer'); - - await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select() - .from(user) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(user.id, 10)); - - t.deepEqual(result, [{ - user: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('insert with spaces', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: sql`'Jo h n'` }); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); - - t.deepEqual(result, [{ id: 1, name: 'Jo h n' }]); -}); - -test.serial('prepared statement', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const statement = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .prepare('statement1'); - const result = await statement.execute(); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('prepared statement reuse', async (t) => { - const { db } = t.context; - - const stmt = db - .insert(usersTable) - .values({ - verified: true, - name: placeholder('name'), - }) - .prepare('stmt2'); - - for (let i = 0; i < 10; i++) { - await stmt.execute({ name: `John ${i}` }); - } - - const result = await db - .select({ - id: usersTable.id, - name: usersTable.name, - verified: usersTable.verified, - }) - .from(usersTable); - - t.deepEqual(result, [ - { id: 1, name: 'John 0', verified: true }, - { id: 2, name: 'John 1', verified: true }, - { id: 3, name: 'John 2', verified: true }, - { id: 4, name: 'John 3', verified: true }, - { id: 5, name: 'John 4', verified: true }, - { id: 6, name: 'John 5', verified: true }, - { id: 7, name: 'John 6', verified: true }, - { id: 8, name: 'John 7', verified: true }, - { id: 9, name: 'John 8', verified: true }, - { id: 10, name: 'John 9', verified: true }, - ]); -}); - -test.serial('prepared statement with placeholder in .where', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const stmt = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .where(eq(usersTable.id, placeholder('id'))) - .prepare('stmt3'); - const result = await stmt.execute({ id: 1 }); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('prepared statement with placeholder in .limit', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const stmt = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .where(eq(usersTable.id, placeholder('id'))) - .limit(placeholder('limit')) - .prepare('stmt_limit'); - - const result = await stmt.execute({ id: 1, limit: 1 }); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); - t.is(result.length, 1); -}); - -test.serial('prepared statement with placeholder in .offset', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'John1' }]); - const stmt = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .offset(placeholder('offset')) - .prepare('stmt_offset'); - - const result = await stmt.execute({ offset: 1 }); - - t.deepEqual(result, [{ id: 2, name: 'John1' }]); -}); - -// TODO change tests to new structure -test.serial('migrator : default migration strategy', async (t) => { - const { db } = t.context; - - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - - await migrate(db, { migrationsFolder: './drizzle2/pg' }); - - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - - const result = await db.select().from(usersMigratorTable); - - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); - - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); -}); - -test.serial('migrator : migrate with custom schema', async (t) => { - const { db } = t.context; - const customSchema = randomString(); - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - - await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsSchema: customSchema }); - - // test if the custom migrations table was created - const { rowCount } = await db.execute(sql`select * from ${sql.identifier(customSchema)}."__drizzle_migrations";`); - t.true(rowCount > 0); - - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); - - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table ${sql.identifier(customSchema)}."__drizzle_migrations"`); -}); - -test.serial('migrator : migrate with custom table', async (t) => { - const { db } = t.context; - const customTable = randomString(); - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - - await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable }); - - // test if the custom migrations table was created - const { rowCount } = await db.execute(sql`select * from "drizzle".${sql.identifier(customTable)};`); - t.true(rowCount > 0); - - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); - - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table "drizzle".${sql.identifier(customTable)}`); -}); - -test.serial('migrator : migrate with custom table and custom schema', async (t) => { - const { db } = t.context; - const customTable = randomString(); - const customSchema = randomString(); - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - - await migrate(db, { - migrationsFolder: './drizzle2/pg', - migrationsTable: customTable, - migrationsSchema: customSchema, - }); - - // test if the custom migrations table was created - const { rowCount } = await db.execute( - sql`select * from ${sql.identifier(customSchema)}.${sql.identifier(customTable)};`, - ); - t.true(rowCount > 0); - - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); - - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table ${sql.identifier(customSchema)}.${sql.identifier(customTable)}`); -}); - -test.serial('insert via db.execute + select via db.execute', async (t) => { - const { db } = t.context; - - await db.execute( - sql`insert into ${usersTable} (${name(usersTable.name.name)}) values (${'John'})`, - ); - - const result = await db.execute<{ id: number; name: string }>( - sql`select id, name from "users"`, - ); - t.deepEqual(result.rows, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert via db.execute + returning', async (t) => { - const { db } = t.context; - - const inserted = await db.execute<{ id: number; name: string }>( - sql`insert into ${usersTable} (${ - name( - usersTable.name.name, - ) - }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, - ); - t.deepEqual(inserted.rows, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert via db.execute w/ query builder', async (t) => { - const { db } = t.context; - - const inserted = await db.execute>( - db - .insert(usersTable) - .values({ name: 'John' }) - .returning({ id: usersTable.id, name: usersTable.name }), - ); - t.deepEqual(inserted.rows, [{ id: 1, name: 'John' }]); -}); - -test.serial('Query check: Insert all defaults in 1 row', async (t) => { - const { db } = t.context; - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state'), - }); - - const query = db - .insert(users) - .values({}) - .toSQL(); - - t.deepEqual(query, { - sql: 'insert into "users" ("id", "name", "state") values (default, default, default)', - params: [], - }); -}); - -test.serial('Query check: Insert all defaults in multiple rows', async (t) => { - const { db } = t.context; - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state').default('UA'), - }); - - const query = db - .insert(users) - .values([{}, {}]) - .toSQL(); - - t.deepEqual(query, { - sql: 'insert into "users" ("id", "name", "state") values (default, default, default), (default, default, default)', - params: [], - }); -}); - -test.serial('Insert all defaults in 1 row', async (t) => { - const { db } = t.context; - - const users = pgTable('empty_insert_single', { - id: serial('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial primary key, name text default 'Dan', state text)`, - ); - - await db.insert(users).values({}); - - const res = await db.select().from(users); - - t.deepEqual(res, [{ id: 1, name: 'Dan', state: null }]); -}); - -test.serial('Insert all defaults in multiple rows', async (t) => { - const { db } = t.context; - - const users = pgTable('empty_insert_multiple', { - id: serial('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial primary key, name text default 'Dan', state text)`, - ); - - await db.insert(users).values([{}, {}]); - - const res = await db.select().from(users); - - t.deepEqual(res, [{ id: 1, name: 'Dan', state: null }, { id: 2, name: 'Dan', state: null }]); -}); - -test.serial('build query insert with onConflict do update', async (t) => { - const { db } = t.context; - - const query = db - .insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }) - .toSQL(); - - t.deepEqual(query, { - sql: - 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do update set "name" = $3', - params: ['John', '["foo","bar"]', 'John1'], - }); -}); - -test.serial('build query insert with onConflict do update / multiple columns', async (t) => { - const { db } = t.context; - - const query = db - .insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoUpdate({ target: [usersTable.id, usersTable.name], set: { name: 'John1' } }) - .toSQL(); - - t.deepEqual(query, { - sql: - 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id","name") do update set "name" = $3', - params: ['John', '["foo","bar"]', 'John1'], - }); -}); - -test.serial('build query insert with onConflict do nothing', async (t) => { - const { db } = t.context; - - const query = db - .insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoNothing() - .toSQL(); - - t.deepEqual(query, { - sql: - 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict do nothing', - params: ['John', '["foo","bar"]'], - }); -}); - -test.serial('build query insert with onConflict do nothing + target', async (t) => { - const { db } = t.context; - - const query = db - .insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoNothing({ target: usersTable.id }) - .toSQL(); - - t.deepEqual(query, { - sql: - 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do nothing', - params: ['John', '["foo","bar"]'], - }); -}); - -test.serial('insert with onConflict do update', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - - await db - .insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }); - - const res = await db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)); - - t.deepEqual(res, [{ id: 1, name: 'John1' }]); -}); - -test.serial('insert with onConflict do nothing', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - - await db.insert(usersTable).values({ id: 1, name: 'John' }).onConflictDoNothing(); - - const res = await db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert with onConflict do nothing + target', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - - await db - .insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoNothing({ target: usersTable.id }); - - const res = await db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); -}); - -test.serial('left join (flat object fields)', async (t) => { - const { db } = t.context; - - const { id: cityId } = await db - .insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]) - .returning({ id: citiesTable.id }) - .then((rows) => rows[0]!); - - await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); - - const res = await db - .select({ - userId: users2Table.id, - userName: users2Table.name, - cityId: citiesTable.id, - cityName: citiesTable.name, - }) - .from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); - - t.deepEqual(res, [ - { userId: 1, userName: 'John', cityId, cityName: 'Paris' }, - { userId: 2, userName: 'Jane', cityId: null, cityName: null }, - ]); -}); - -test.serial('left join (grouped fields)', async (t) => { - const { db } = t.context; - - const { id: cityId } = await db - .insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]) - .returning({ id: citiesTable.id }) - .then((rows) => rows[0]!); - - await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); - - const res = await db - .select({ - id: users2Table.id, - user: { - name: users2Table.name, - nameUpper: sql`upper(${users2Table.name})`, - }, - city: { - id: citiesTable.id, - name: citiesTable.name, - nameUpper: sql`upper(${citiesTable.name})`, - }, - }) - .from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); - - t.deepEqual(res, [ - { - id: 1, - user: { name: 'John', nameUpper: 'JOHN' }, - city: { id: cityId, name: 'Paris', nameUpper: 'PARIS' }, - }, - { - id: 2, - user: { name: 'Jane', nameUpper: 'JANE' }, - city: null, - }, - ]); -}); - -test.serial('left join (all fields)', async (t) => { - const { db } = t.context; - - const { id: cityId } = await db - .insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]) - .returning({ id: citiesTable.id }) - .then((rows) => rows[0]!); - - await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); - - const res = await db - .select() - .from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); - - t.deepEqual(res, [ - { - users2: { - id: 1, - name: 'John', - cityId, - }, - cities: { - id: cityId, - name: 'Paris', - state: null, - }, - }, - { - users2: { - id: 2, - name: 'Jane', - cityId: null, - }, - cities: null, - }, - ]); -}); - -test.serial('join subquery', async (t) => { - const { db } = t.context; - - await db - .insert(courseCategoriesTable) - .values([ - { name: 'Category 1' }, - { name: 'Category 2' }, - { name: 'Category 3' }, - { name: 'Category 4' }, - ]); - - await db - .insert(coursesTable) - .values([ - { name: 'Development', categoryId: 2 }, - { name: 'IT & Software', categoryId: 3 }, - { name: 'Marketing', categoryId: 4 }, - { name: 'Design', categoryId: 1 }, - ]); - - const sq2 = db - .select({ - categoryId: courseCategoriesTable.id, - category: courseCategoriesTable.name, - total: sql`count(${courseCategoriesTable.id})`, - }) - .from(courseCategoriesTable) - .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) - .as('sq2'); - - const res = await db - .select({ - courseName: coursesTable.name, - categoryId: sq2.categoryId, - }) - .from(coursesTable) - .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) - .orderBy(coursesTable.name); - - t.deepEqual(res, [ - { courseName: 'Design', categoryId: 1 }, - { courseName: 'Development', categoryId: 2 }, - { courseName: 'IT & Software', categoryId: 3 }, - { courseName: 'Marketing', categoryId: 4 }, - ]); -}); - -test.serial('with ... select', async (t) => { - const { db } = t.context; - - await db.insert(orders).values([ - { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, - { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 50, quantity: 5 }, - ]); - - const regionalSales = db - .$with('regional_sales') - .as( - db - .select({ - region: orders.region, - totalSales: sql`sum(${orders.amount})`.as('total_sales'), - }) - .from(orders) - .groupBy(orders.region), - ); - - const topRegions = db - .$with('top_regions') - .as( - db - .select({ - region: regionalSales.region, - }) - .from(regionalSales) - .where( - gt( - regionalSales.totalSales, - db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), - ), - ), - ); - - const result1 = await db - .with(regionalSales, topRegions) - .select({ - region: orders.region, - product: orders.product, - productUnits: sql`sum(${orders.quantity})::int`, - productSales: sql`sum(${orders.amount})::int`, - }) - .from(orders) - .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) - .groupBy(orders.region, orders.product) - .orderBy(orders.region, orders.product); - const result2 = await db - .with(regionalSales, topRegions) - .selectDistinct({ - region: orders.region, - product: orders.product, - productUnits: sql`sum(${orders.quantity})::int`, - productSales: sql`sum(${orders.amount})::int`, - }) - .from(orders) - .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) - .groupBy(orders.region, orders.product) - .orderBy(orders.region, orders.product); - const result3 = await db - .with(regionalSales, topRegions) - .selectDistinctOn([orders.region], { - region: orders.region, - productUnits: sql`sum(${orders.quantity})::int`, - productSales: sql`sum(${orders.amount})::int`, - }) - .from(orders) - .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) - .groupBy(orders.region) - .orderBy(orders.region); - - t.deepEqual(result1, [ - { - region: 'Europe', - product: 'A', - productUnits: 3, - productSales: 30, - }, - { - region: 'Europe', - product: 'B', - productUnits: 5, - productSales: 50, - }, - { - region: 'US', - product: 'A', - productUnits: 7, - productSales: 70, - }, - { - region: 'US', - product: 'B', - productUnits: 9, - productSales: 90, - }, - ]); - t.deepEqual(result2, result1); - t.deepEqual(result3, [ - { - region: 'Europe', - productUnits: 8, - productSales: 80, - }, - { - region: 'US', - productUnits: 16, - productSales: 160, - }, - ]); -}); - -test.serial('with ... update', async (t) => { - const { db } = t.context; - - const products = pgTable('products', { - id: serial('id').primaryKey(), - price: numeric('price').notNull(), - cheap: boolean('cheap').notNull().default(false), - }); - - await db.execute(sql`drop table if exists ${products}`); - await db.execute(sql` - create table ${products} ( - id serial primary key, - price numeric not null, - cheap boolean not null default false - ) - `); - - await db.insert(products).values([ - { price: '10.99' }, - { price: '25.85' }, - { price: '32.99' }, - { price: '2.50' }, - { price: '4.59' }, - ]); - - const averagePrice = db - .$with('average_price') - .as( - db - .select({ - value: sql`avg(${products.price})`.as('value'), - }) - .from(products), - ); - - const result = await db - .with(averagePrice) - .update(products) - .set({ - cheap: true, - }) - .where(lt(products.price, sql`(select * from ${averagePrice})`)) - .returning({ - id: products.id, - }); - - t.deepEqual(result, [ - { id: 1 }, - { id: 4 }, - { id: 5 }, - ]); -}); - -test.serial('with ... insert', async (t) => { - const { db } = t.context; - - const users = pgTable('users', { - username: text('username').notNull(), - admin: boolean('admin').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (username text not null, admin boolean not null default false)`); - - const userCount = db - .$with('user_count') - .as( - db - .select({ - value: sql`count(*)`.as('value'), - }) - .from(users), - ); - - const result = await db - .with(userCount) - .insert(users) - .values([ - { username: 'user1', admin: sql`((select * from ${userCount}) = 0)` }, - ]) - .returning({ - admin: users.admin, - }); - - t.deepEqual(result, [{ admin: true }]); -}); - -test.serial('with ... delete', async (t) => { - const { db } = t.context; - - await db.insert(orders).values([ - { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, - { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 50, quantity: 5 }, - ]); - - const averageAmount = db - .$with('average_amount') - .as( - db - .select({ - value: sql`avg(${orders.amount})`.as('value'), - }) - .from(orders), - ); - - const result = await db - .with(averageAmount) - .delete(orders) - .where(gt(orders.amount, sql`(select * from ${averageAmount})`)) - .returning({ - id: orders.id, - }); - - t.deepEqual(result, [ - { id: 6 }, - { id: 7 }, - { id: 8 }, - ]); -}); - -test.serial('select from subquery sql', async (t) => { - const { db } = t.context; - - await db.insert(users2Table).values([{ name: 'John' }, { name: 'Jane' }]); - - const sq = db - .select({ name: sql`${users2Table.name} || ' modified'`.as('name') }) - .from(users2Table) - .as('sq'); - - const res = await db.select({ name: sq.name }).from(sq); - - t.deepEqual(res, [{ name: 'John modified' }, { name: 'Jane modified' }]); -}); - -test.serial('select a field without joining its table', (t) => { - const { db } = t.context; - - t.throws(() => db.select({ name: users2Table.name }).from(usersTable).prepare('query')); -}); - -test.serial('select all fields from subquery without alias', (t) => { - const { db } = t.context; - - const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); - - t.throws(() => db.select().from(sq).prepare('query')); -}); - -test.serial('select count()', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); - - const res = await db.select({ count: sql`count(*)` }).from(usersTable); - - t.deepEqual(res, [{ count: '2' }]); -}); - -test.serial('select count w/ custom mapper', async (t) => { - const { db } = t.context; - - function count(value: PgColumn | SQLWrapper): SQL; - function count(value: PgColumn | SQLWrapper, alias: string): SQL.Aliased; - function count(value: PgColumn | SQLWrapper, alias?: string): SQL | SQL.Aliased { - const result = sql`count(${value})`.mapWith(Number); - if (!alias) { - return result; - } - return result.as(alias); - } - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); - - const res = await db.select({ count: count(sql`*`) }).from(usersTable); - - t.deepEqual(res, [{ count: 2 }]); -}); - -test.serial('network types', async (t) => { - const { db } = t.context; - - const value: typeof network.$inferSelect = { - inet: '127.0.0.1', - cidr: '192.168.100.128/25', - macaddr: '08:00:2b:01:02:03', - macaddr8: '08:00:2b:01:02:03:04:05', - }; - - await db.insert(network).values(value); - - const res = await db.select().from(network); - - t.deepEqual(res, [value]); -}); - -test.serial('array types', async (t) => { - const { db } = t.context; - - const values: typeof salEmp.$inferSelect[] = [ - { - name: 'John', - payByQuarter: [10000, 10000, 10000, 10000], - schedule: [['meeting', 'lunch'], ['training', 'presentation']], - }, - { - name: 'Carol', - payByQuarter: [20000, 25000, 25000, 25000], - schedule: [['breakfast', 'consulting'], ['meeting', 'lunch']], - }, - ]; - - await db.insert(salEmp).values(values); - - const res = await db.select().from(salEmp); - - t.deepEqual(res, values); -}); - -test.serial('select for ...', (t) => { - const { db } = t.context; - - { - const query = db - .select() - .from(users2Table) - .for('update') - .toSQL(); - - t.regex( - query.sql, - / for update$/, - ); - } - - { - const query = db - .select() - .from(users2Table) - .for('update', { of: [users2Table, coursesTable] }) - .toSQL(); - - t.regex( - query.sql, - / for update of "users2", "courses"$/, - ); - } - - { - const query = db - .select() - .from(users2Table) - .for('no key update', { of: users2Table }) - .toSQL(); - - t.regex( - query.sql, - /for no key update of "users2"$/, - ); - } - - { - const query = db - .select() - .from(users2Table) - .for('no key update', { of: users2Table, skipLocked: true }) - .toSQL(); - - t.regex( - query.sql, - / for no key update of "users2" skip locked$/, - ); - } - - { - const query = db - .select() - .from(users2Table) - .for('share', { of: users2Table, noWait: true }) - .toSQL(); - - t.regex( - query.sql, - // eslint-disable-next-line unicorn/better-regex - /for share of "users2" no wait$/, - ); - } -}); - -test.serial('having', async (t) => { - const { db } = t.context; - - await db.insert(citiesTable).values([{ name: 'London' }, { name: 'Paris' }, { name: 'New York' }]); - - await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { - name: 'Jack', - cityId: 2, - }]); - - const result = await db - .select({ - id: citiesTable.id, - name: sql`upper(${citiesTable.name})`.as('upper_name'), - usersCount: sql`count(${users2Table.id})::int`.as('users_count'), - }) - .from(citiesTable) - .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) - .where(({ name }) => sql`length(${name}) >= 3`) - .groupBy(citiesTable.id) - .having(({ usersCount }) => sql`${usersCount} > 0`) - .orderBy(({ name }) => name); - - t.deepEqual(result, [ - { - id: 1, - name: 'LONDON', - usersCount: 2, - }, - { - id: 2, - name: 'PARIS', - usersCount: 1, - }, - ]); -}); - -test.serial('view', async (t) => { - const { db } = t.context; - - const newYorkers1 = pgView('new_yorkers') - .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); - - const newYorkers2 = pgView('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); - - const newYorkers3 = pgView('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).existing(); - - await db.execute(sql`create view ${newYorkers1} as ${getViewConfig(newYorkers1).query}`); - - await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); - - await db.insert(users2Table).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 1 }, - { name: 'Jack', cityId: 2 }, - ]); - - { - const result = await db.select().from(newYorkers1); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers2); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers3); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); - t.deepEqual(result, [ - { name: 'John' }, - { name: 'Jane' }, - ]); - } - - await db.execute(sql`drop view ${newYorkers1}`); -}); - -test.serial('materialized view', async (t) => { - const { db } = t.context; - - const newYorkers1 = pgMaterializedView('new_yorkers') - .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); - - const newYorkers2 = pgMaterializedView('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); - - const newYorkers3 = pgMaterializedView('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).existing(); - - await db.execute(sql`create materialized view ${newYorkers1} as ${getMaterializedViewConfig(newYorkers1).query}`); - - await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); - - await db.insert(users2Table).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 1 }, - { name: 'Jack', cityId: 2 }, - ]); - - { - const result = await db.select().from(newYorkers1); - t.deepEqual(result, []); - } - - await db.refreshMaterializedView(newYorkers1); - - { - const result = await db.select().from(newYorkers1); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers2); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers3); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); - t.deepEqual(result, [ - { name: 'John' }, - { name: 'Jane' }, - ]); - } - - await db.execute(sql`drop materialized view ${newYorkers1}`); -}); - -// TODO: copy to SQLite and MySQL, add to docs -test.serial('select from raw sql', async (t) => { - const { db } = t.context; - - const result = await db.select({ - id: sql`id`, - name: sql`name`, - }).from(sql`(select 1 as id, 'John' as name) as users`); - - Expect>; - - t.deepEqual(result, [ - { id: 1, name: 'John' }, - ]); -}); - -test.serial('select from raw sql with joins', async (t) => { - const { db } = t.context; - - const result = await db - .select({ - id: sql`users.id`, - name: sql`users.name`, - userCity: sql`users.city`, - cityName: sql`cities.name`, - }) - .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) - .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`); - - Expect>; - - t.deepEqual(result, [ - { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, - ]); -}); - -test.serial('join on aliased sql from select', async (t) => { - const { db } = t.context; - - const result = await db - .select({ - userId: sql`users.id`.as('userId'), - name: sql`users.name`, - userCity: sql`users.city`, - cityId: sql`cities.id`.as('cityId'), - cityName: sql`cities.name`, - }) - .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) - .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)); - - Expect>; - - t.deepEqual(result, [ - { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, - ]); -}); - -test.serial('join on aliased sql from with clause', async (t) => { - const { db } = t.context; - - const users = db.$with('users').as( - db.select({ - id: sql`id`.as('userId'), - name: sql`name`.as('userName'), - city: sql`city`.as('city'), - }).from( - sql`(select 1 as id, 'John' as name, 'New York' as city) as users`, - ), - ); - - const cities = db.$with('cities').as( - db.select({ - id: sql`id`.as('cityId'), - name: sql`name`.as('cityName'), - }).from( - sql`(select 1 as id, 'Paris' as name) as cities`, - ), - ); - - const result = await db - .with(users, cities) - .select({ - userId: users.id, - name: users.name, - userCity: users.city, - cityId: cities.id, - cityName: cities.name, - }) - .from(users) - .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)); - - Expect>; - - t.deepEqual(result, [ - { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, - ]); -}); - -test.serial('prefixed table', async (t) => { - const { db } = t.context; - - const pgTable = pgTableCreator((name) => `myprefix_${name}`); - - const users = pgTable('test_prefixed_table_with_unique_name', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table myprefix_test_prefixed_table_with_unique_name (id integer not null primary key, name text not null)`, - ); - - await db.insert(users).values({ id: 1, name: 'John' }); - - const result = await db.select().from(users); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('select from enum', async (t) => { - const { db } = t.context; - - const muscleEnum = pgEnum('muscle', [ - 'abdominals', - 'hamstrings', - 'adductors', - 'quadriceps', - 'biceps', - 'shoulders', - 'chest', - 'middle_back', - 'calves', - 'glutes', - 'lower_back', - 'lats', - 'triceps', - 'traps', - 'forearms', - 'neck', - 'abductors', - ]); - - const forceEnum = pgEnum('force', ['isometric', 'isotonic', 'isokinetic']); - - const levelEnum = pgEnum('level', ['beginner', 'intermediate', 'advanced']); - - const mechanicEnum = pgEnum('mechanic', ['compound', 'isolation']); - - const equipmentEnum = pgEnum('equipment', ['barbell', 'dumbbell', 'bodyweight', 'machine', 'cable', 'kettlebell']); - - const categoryEnum = pgEnum('category', ['upper_body', 'lower_body', 'full_body']); - - const exercises = pgTable('exercises', { - id: serial('id').primaryKey(), - name: varchar('name').notNull(), - force: forceEnum('force'), - level: levelEnum('level'), - mechanic: mechanicEnum('mechanic'), - equipment: equipmentEnum('equipment'), - instructions: text('instructions'), - category: categoryEnum('category'), - primaryMuscles: muscleEnum('primary_muscles').array(), - secondaryMuscles: muscleEnum('secondary_muscles').array(), - createdAt: timestamp('created_at').notNull().default(sql`now()`), - updatedAt: timestamp('updated_at').notNull().default(sql`now()`), - }); - - await db.execute(sql`drop table if exists ${exercises}`); - await db.execute(sql`drop type if exists ${name(muscleEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(forceEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(levelEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(mechanicEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(equipmentEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(categoryEnum.enumName)}`); - - await db.execute( - sql`create type ${ - name(muscleEnum.enumName) - } as enum ('abdominals', 'hamstrings', 'adductors', 'quadriceps', 'biceps', 'shoulders', 'chest', 'middle_back', 'calves', 'glutes', 'lower_back', 'lats', 'triceps', 'traps', 'forearms', 'neck', 'abductors')`, - ); - await db.execute(sql`create type ${name(forceEnum.enumName)} as enum ('isometric', 'isotonic', 'isokinetic')`); - await db.execute(sql`create type ${name(levelEnum.enumName)} as enum ('beginner', 'intermediate', 'advanced')`); - await db.execute(sql`create type ${name(mechanicEnum.enumName)} as enum ('compound', 'isolation')`); - await db.execute( - sql`create type ${ - name(equipmentEnum.enumName) - } as enum ('barbell', 'dumbbell', 'bodyweight', 'machine', 'cable', 'kettlebell')`, - ); - await db.execute(sql`create type ${name(categoryEnum.enumName)} as enum ('upper_body', 'lower_body', 'full_body')`); - await db.execute(sql` - create table ${exercises} ( - id serial primary key, - name varchar not null, - force force, - level level, - mechanic mechanic, - equipment equipment, - instructions text, - category category, - primary_muscles muscle[], - secondary_muscles muscle[], - created_at timestamp not null default now(), - updated_at timestamp not null default now() - ) - `); - - await db.insert(exercises).values({ - name: 'Bench Press', - force: 'isotonic', - level: 'beginner', - mechanic: 'compound', - equipment: 'barbell', - instructions: - 'Lie on your back on a flat bench. Grasp the barbell with an overhand grip, slightly wider than shoulder width. Unrack the barbell and hold it over you with your arms locked. Lower the barbell to your chest. Press the barbell back to the starting position.', - category: 'upper_body', - primaryMuscles: ['chest', 'triceps'], - secondaryMuscles: ['shoulders', 'traps'], - }); - - const result = await db.select().from(exercises); - - t.deepEqual(result, [ - { - id: 1, - name: 'Bench Press', - force: 'isotonic', - level: 'beginner', - mechanic: 'compound', - equipment: 'barbell', - instructions: - 'Lie on your back on a flat bench. Grasp the barbell with an overhand grip, slightly wider than shoulder width. Unrack the barbell and hold it over you with your arms locked. Lower the barbell to your chest. Press the barbell back to the starting position.', - category: 'upper_body', - primaryMuscles: ['chest', 'triceps'], - secondaryMuscles: ['shoulders', 'traps'], - createdAt: result[0]!.createdAt, - updatedAt: result[0]!.updatedAt, - }, - ]); - - await db.execute(sql`drop table ${exercises}`); - await db.execute(sql`drop type ${name(muscleEnum.enumName)}`); - await db.execute(sql`drop type ${name(forceEnum.enumName)}`); - await db.execute(sql`drop type ${name(levelEnum.enumName)}`); - await db.execute(sql`drop type ${name(mechanicEnum.enumName)}`); - await db.execute(sql`drop type ${name(equipmentEnum.enumName)}`); - await db.execute(sql`drop type ${name(categoryEnum.enumName)}`); -}); - -test.serial('all date and time columns', async (t) => { - const { db } = t.context; - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - dateString: date('date_string', { mode: 'string' }).notNull(), - time: time('time', { precision: 3 }).notNull(), - datetime: timestamp('datetime').notNull(), - datetimeWTZ: timestamp('datetime_wtz', { withTimezone: true }).notNull(), - datetimeString: timestamp('datetime_string', { mode: 'string' }).notNull(), - datetimeFullPrecision: timestamp('datetime_full_precision', { precision: 6, mode: 'string' }).notNull(), - datetimeWTZString: timestamp('datetime_wtz_string', { withTimezone: true, mode: 'string' }).notNull(), - interval: interval('interval').notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - date_string date not null, - time time(3) not null, - datetime timestamp not null, - datetime_wtz timestamp with time zone not null, - datetime_string timestamp not null, - datetime_full_precision timestamp(6) not null, - datetime_wtz_string timestamp with time zone not null, - interval interval not null - ) - `); - - const someDatetime = new Date('2022-01-01T00:00:00.123Z'); - const fullPrecision = '2022-01-01T00:00:00.123456Z'; - const someTime = '23:23:12.432'; - - await db.insert(table).values({ - dateString: '2022-01-01', - time: someTime, - datetime: someDatetime, - datetimeWTZ: someDatetime, - datetimeString: '2022-01-01T00:00:00.123Z', - datetimeFullPrecision: fullPrecision, - datetimeWTZString: '2022-01-01T00:00:00.123Z', - interval: '1 day', - }); - - const result = await db.select().from(table); - - Expect< - Equal<{ - id: number; - dateString: string; - time: string; - datetime: Date; - datetimeWTZ: Date; - datetimeString: string; - datetimeFullPrecision: string; - datetimeWTZString: string; - interval: string; - }[], typeof result> - >; - - Expect< - Equal<{ - dateString: string; - time: string; - datetime: Date; - datetimeWTZ: Date; - datetimeString: string; - datetimeFullPrecision: string; - datetimeWTZString: string; - interval: string; - id?: number | undefined; - }, typeof table.$inferInsert> - >; - - t.deepEqual(result, [ - { - id: 1, - dateString: '2022-01-01', - time: someTime, - datetime: someDatetime, - datetimeWTZ: someDatetime, - datetimeString: '2022-01-01 00:00:00.123', - datetimeFullPrecision: fullPrecision.replace('T', ' ').replace('Z', ''), - datetimeWTZString: '2022-01-01 00:00:00.123+00', - interval: '1 day', - }, - ]); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('all date and time columns with timezone second case mode date', async (t) => { - const { db } = t.context; - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(3) with time zone not null - ) - `); - - const insertedDate = new Date(); - - // 1. Insert date as new date - await db.insert(table).values([ - { timestamp: insertedDate }, - ]); - - // 2, Select as date and check that timezones are the same - // There is no way to check timezone in Date object, as it is always represented internally in UTC - const result = await db.select().from(table); - - t.deepEqual(result, [{ id: 1, timestamp: insertedDate }]); - - // 3. Compare both dates - t.deepEqual(insertedDate.getTime(), result[0]?.timestamp.getTime()); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('all date and time columns with timezone third case mode date', async (t) => { - const { db } = t.context; - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(3) with time zone not null - ) - `); - - const insertedDate = new Date('2022-01-01 20:00:00.123-04'); // used different time zones, internally is still UTC - const insertedDate2 = new Date('2022-01-02 04:00:00.123+04'); // They are both the same date in different time zones - - // 1. Insert date as new dates with different time zones - await db.insert(table).values([ - { timestamp: insertedDate }, - { timestamp: insertedDate2 }, - ]); - - // 2, Select and compare both dates - const result = await db.select().from(table); - - t.deepEqual(result[0]?.timestamp.getTime(), result[1]?.timestamp.getTime()); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('all date and time columns without timezone first case mode string', async (t) => { - const { db } = t.context; - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(6) not null - ) - `); - - // 1. Insert date in string format without timezone in it - await db.insert(table).values([ - { timestamp: '2022-01-01 02:00:00.123456' }, - ]); - - // 2, Select in string format and check that values are the same - const result = await db.select().from(table); - - t.deepEqual(result, [{ id: 1, timestamp: '2022-01-01 02:00:00.123456' }]); - - // 3. Select as raw query and check that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); - - t.deepEqual(result2.rows, [{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('all date and time columns without timezone second case mode string', async (t) => { - const { db } = t.context; - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(6) not null - ) - `); - - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: '2022-01-01T02:00:00.123456-02' }, - ]); - - // 2, Select as raw query and check that values are the same - const result = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); - - t.deepEqual(result.rows, [{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('all date and time columns without timezone third case mode date', async (t) => { - const { db } = t.context; - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'date', precision: 3 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(3) not null - ) - `); - - const insertedDate = new Date('2022-01-01 20:00:00.123+04'); - - // 1. Insert date as new date - await db.insert(table).values([ - { timestamp: insertedDate }, - ]); - - // 2, Select as raw query as string - const result = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); - - // 3. Compare both dates using orm mapping - Need to add 'Z' to tell JS that it is UTC - t.deepEqual(new Date(result.rows[0]!.timestamp_string + 'Z').getTime(), insertedDate.getTime()); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('test mode string for timestamp with timezone', async (t) => { - const { db } = t.context; - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(6) with time zone not null - ) - `); - - const timestampString = '2022-01-01 00:00:00.123456-0200'; - - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: timestampString }, - ]); - - // 2. Select date in string format and check that the values are the same - const result = await db.select().from(table); - - // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same - t.deepEqual(result, [{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); - - // 3. Select as raw query and checke that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); - - // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same - t.deepEqual(result2.rows, [{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('test mode date for timestamp with timezone', async (t) => { - const { db } = t.context; - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(3) with time zone not null - ) - `); - - const timestampString = new Date('2022-01-01 00:00:00.456-0200'); - - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: timestampString }, - ]); - - // 2. Select date in string format and check that the values are the same - const result = await db.select().from(table); - - // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same - t.deepEqual(result, [{ id: 1, timestamp: timestampString }]); - - // 3. Select as raw query and checke that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); - - // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same - t.deepEqual(result2.rows, [{ id: 1, timestamp_string: '2022-01-01 02:00:00.456+00' }]); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('test mode string for timestamp with timezone in UTC timezone', async (t) => { - const { db } = t.context; - - // get current timezone from db - const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); - - // set timezone to UTC - await db.execute(sql`set time zone 'UTC'`); - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(6) with time zone not null - ) - `); - - const timestampString = '2022-01-01 00:00:00.123456-0200'; - - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: timestampString }, - ]); - - // 2. Select date in string format and check that the values are the same - const result = await db.select().from(table); - - // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same - t.deepEqual(result, [{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); - - // 3. Select as raw query and checke that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); - - // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same - t.deepEqual(result2.rows, [{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); - - await db.execute(sql`set time zone '${sql.raw(timezone.rows[0]!.TimeZone)}'`); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('test mode string for timestamp with timezone in different timezone', async (t) => { - const { db } = t.context; - - // get current timezone from db - const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); - - // set timezone to HST (UTC - 10) - await db.execute(sql`set time zone 'HST'`); - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(6) with time zone not null - ) - `); - - const timestampString = '2022-01-01 00:00:00.123456-1000'; - - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: timestampString }, - ]); - - // 2. Select date in string format and check that the values are the same - const result = await db.select().from(table); - - t.deepEqual(result, [{ id: 1, timestamp: '2022-01-01 00:00:00.123456-10' }]); - - // 3. Select as raw query and checke that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); - - t.deepEqual(result2.rows, [{ id: 1, timestamp_string: '2022-01-01 00:00:00.123456-10' }]); - - await db.execute(sql`set time zone '${sql.raw(timezone.rows[0]!.TimeZone)}'`); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('orderBy with aliased column', (t) => { - const { db } = t.context; - - const query = db.select({ - test: sql`something`.as('test'), - }).from(users2Table).orderBy((fields) => fields.test).toSQL(); - - t.deepEqual(query.sql, 'select something as "test" from "users2" order by "test"'); -}); - -test.serial('select from sql', async (t) => { - const { db } = t.context; - - const metricEntry = pgTable('metric_entry', { - id: pgUuid('id').notNull(), - createdAt: timestamp('created_at').notNull(), - }); - - await db.execute(sql`drop table if exists ${metricEntry}`); - await db.execute(sql`create table ${metricEntry} (id uuid not null, created_at timestamp not null)`); - - const metricId = uuid(); - - const intervals = db.$with('intervals').as( - db - .select({ - startTime: sql`(date'2023-03-01'+ x * '1 day'::interval)`.as('start_time'), - endTime: sql`(date'2023-03-01'+ (x+1) *'1 day'::interval)`.as('end_time'), - }) - .from(sql`generate_series(0, 29, 1) as t(x)`), - ); - - await t.notThrowsAsync(() => - db - .with(intervals) - .select({ - startTime: intervals.startTime, - endTime: intervals.endTime, - count: sql`count(${metricEntry})`, - }) - .from(metricEntry) - .rightJoin( - intervals, - and( - eq(metricEntry.id, metricId), - gte(metricEntry.createdAt, intervals.startTime), - lt(metricEntry.createdAt, intervals.endTime), - ), - ) - .groupBy(intervals.startTime, intervals.endTime) - .orderBy(asc(intervals.startTime)) - ); -}); - -test.serial('timestamp timezone', async (t) => { - const { db } = t.context; - - const usersTableWithAndWithoutTimezone = pgTable('users_test_with_and_without_timezone', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), - updatedAt: timestamp('updated_at', { withTimezone: false }).notNull().defaultNow(), - }); - - await db.execute(sql`drop table if exists ${usersTableWithAndWithoutTimezone}`); - - await db.execute( - sql` - create table users_test_with_and_without_timezone ( - id serial not null primary key, - name text not null, - created_at timestamptz not null default now(), - updated_at timestamp not null default now() - ) - `, - ); - - const date = new Date(Date.parse('2020-01-01T00:00:00+04:00')); - - await db.insert(usersTableWithAndWithoutTimezone).values({ name: 'With default times' }); - await db.insert(usersTableWithAndWithoutTimezone).values({ - name: 'Without default times', - createdAt: date, - updatedAt: date, - }); - const users = await db.select().from(usersTableWithAndWithoutTimezone); - - // check that the timestamps are set correctly for default times - t.assert(Math.abs(users[0]!.updatedAt.getTime() - Date.now()) < 2000); - t.assert(Math.abs(users[0]!.createdAt.getTime() - Date.now()) < 2000); - - // check that the timestamps are set correctly for non default times - t.assert(Math.abs(users[1]!.updatedAt.getTime() - date.getTime()) < 2000); - t.assert(Math.abs(users[1]!.createdAt.getTime() - date.getTime()) < 2000); -}); - -test.serial('transaction', async (t) => { - const { db } = t.context; - - const users = pgTable('users_transactions', { - id: serial('id').primaryKey(), - balance: integer('balance').notNull(), - }); - const products = pgTable('products_transactions', { - id: serial('id').primaryKey(), - price: integer('price').notNull(), - stock: integer('stock').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`drop table if exists ${products}`); - - await db.execute(sql`create table users_transactions (id serial not null primary key, balance integer not null)`); - await db.execute( - sql`create table products_transactions (id serial not null primary key, price integer not null, stock integer not null)`, - ); - - const user = await db.insert(users).values({ balance: 100 }).returning().then((rows) => rows[0]!); - const product = await db.insert(products).values({ price: 10, stock: 10 }).returning().then((rows) => rows[0]!); - - await db.transaction(async (tx) => { - await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)); - await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)); - }); - - const result = await db.select().from(users); - - t.deepEqual(result, [{ id: 1, balance: 90 }]); - - await db.execute(sql`drop table ${users}`); - await db.execute(sql`drop table ${products}`); -}); - -test.serial('transaction rollback', async (t) => { - const { db } = t.context; - - const users = pgTable('users_transactions_rollback', { - id: serial('id').primaryKey(), - balance: integer('balance').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table users_transactions_rollback (id serial not null primary key, balance integer not null)`, - ); - - await t.throwsAsync(async () => - await db.transaction(async (tx) => { - await tx.insert(users).values({ balance: 100 }); - tx.rollback(); - }), { instanceOf: TransactionRollbackError }); - - const result = await db.select().from(users); - - t.deepEqual(result, []); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('nested transaction', async (t) => { - const { db } = t.context; - - const users = pgTable('users_nested_transactions', { - id: serial('id').primaryKey(), - balance: integer('balance').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table users_nested_transactions (id serial not null primary key, balance integer not null)`, - ); - - await db.transaction(async (tx) => { - await tx.insert(users).values({ balance: 100 }); - - await tx.transaction(async (tx) => { - await tx.update(users).set({ balance: 200 }); - }); - }); - - const result = await db.select().from(users); - - t.deepEqual(result, [{ id: 1, balance: 200 }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('nested transaction rollback', async (t) => { - const { db } = t.context; - - const users = pgTable('users_nested_transactions_rollback', { - id: serial('id').primaryKey(), - balance: integer('balance').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table users_nested_transactions_rollback (id serial not null primary key, balance integer not null)`, - ); - - await db.transaction(async (tx) => { - await tx.insert(users).values({ balance: 100 }); - - await t.throwsAsync(async () => - await tx.transaction(async (tx) => { - await tx.update(users).set({ balance: 200 }); - tx.rollback(); - }), { instanceOf: TransactionRollbackError }); - }); - - const result = await db.select().from(users); - - t.deepEqual(result, [{ id: 1, balance: 100 }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('join subquery with join', async (t) => { - const { db } = t.context; - - const internalStaff = pgTable('internal_staff', { - userId: integer('user_id').notNull(), - }); - - const customUser = pgTable('custom_user', { - id: integer('id').notNull(), - }); - - const ticket = pgTable('ticket', { - staffId: integer('staff_id').notNull(), - }); - - await db.execute(sql`drop table if exists ${internalStaff}`); - await db.execute(sql`drop table if exists ${customUser}`); - await db.execute(sql`drop table if exists ${ticket}`); - - await db.execute(sql`create table internal_staff (user_id integer not null)`); - await db.execute(sql`create table custom_user (id integer not null)`); - await db.execute(sql`create table ticket (staff_id integer not null)`); - - await db.insert(internalStaff).values({ userId: 1 }); - await db.insert(customUser).values({ id: 1 }); - await db.insert(ticket).values({ staffId: 1 }); - - const subq = db - .select() - .from(internalStaff) - .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) - .as('internal_staff'); - - const mainQuery = await db - .select() - .from(ticket) - .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); - - t.deepEqual(mainQuery, [{ - ticket: { staffId: 1 }, - internal_staff: { - internal_staff: { userId: 1 }, - custom_user: { id: 1 }, - }, - }]); - - await db.execute(sql`drop table ${internalStaff}`); - await db.execute(sql`drop table ${customUser}`); - await db.execute(sql`drop table ${ticket}`); -}); - -test.serial('subquery with view', async (t) => { - const { db } = t.context; - - const users = pgTable('users_subquery_view', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }); - - const newYorkers = pgView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`drop view if exists ${newYorkers}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, - ); - await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); - - await db.insert(users).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 2 }, - { name: 'Jack', cityId: 1 }, - { name: 'Jill', cityId: 2 }, - ]); - - const sq = db.$with('sq').as(db.select().from(newYorkers)); - const result = await db.with(sq).select().from(sq); - - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 3, name: 'Jack', cityId: 1 }, - ]); - - await db.execute(sql`drop view ${newYorkers}`); - await db.execute(sql`drop table ${users}`); -}); - -test.serial('join view as subquery', async (t) => { - const { db } = t.context; - - const users = pgTable('users_join_view', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }); - - const newYorkers = pgView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`drop view if exists ${newYorkers}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, - ); - await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); - - await db.insert(users).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 2 }, - { name: 'Jack', cityId: 1 }, - { name: 'Jill', cityId: 2 }, - ]); - - const sq = db.select().from(newYorkers).as('new_yorkers_sq'); - - const result = await db.select().from(users).leftJoin(sq, eq(users.id, sq.id)); - - t.deepEqual(result, [ - { - users_join_view: { id: 1, name: 'John', cityId: 1 }, - new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, - }, - { - users_join_view: { id: 2, name: 'Jane', cityId: 2 }, - new_yorkers_sq: null, - }, - { - users_join_view: { id: 3, name: 'Jack', cityId: 1 }, - new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, - }, - { - users_join_view: { id: 4, name: 'Jill', cityId: 2 }, - new_yorkers_sq: null, - }, - ]); - - await db.execute(sql`drop view ${newYorkers}`); - await db.execute(sql`drop table ${users}`); -}); - -test.serial('table selection with single table', async (t) => { - const { db } = t.context; - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, - ); - - await db.insert(users).values({ name: 'John', cityId: 1 }); - - const result = await db.select({ users }).from(users); - - t.deepEqual(result, [{ users: { id: 1, name: 'John', cityId: 1 } }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('set null to jsonb field', async (t) => { - const { db } = t.context; - - const users = pgTable('users', { - id: serial('id').primaryKey(), - jsonb: jsonb('jsonb'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, jsonb jsonb)`, - ); - - const result = await db.insert(users).values({ jsonb: null }).returning(); - - t.deepEqual(result, [{ id: 1, jsonb: null }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('insert undefined', async (t) => { - const { db } = t.context; - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text)`, - ); - - await t.notThrowsAsync(async () => await db.insert(users).values({ name: undefined })); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('update undefined', async (t) => { - const { db } = t.context; - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text)`, - ); - - await t.throwsAsync(async () => await db.update(users).set({ name: undefined })); - await t.notThrowsAsync(async () => await db.update(users).set({ id: 1, name: undefined })); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('array operators', async (t) => { - const { db } = t.context; - - const posts = pgTable('posts', { - id: serial('id').primaryKey(), - tags: text('tags').array(), - }); - - await db.execute(sql`drop table if exists ${posts}`); - - await db.execute( - sql`create table ${posts} (id serial primary key, tags text[])`, - ); - - await db.insert(posts).values([{ - tags: ['ORM'], - }, { - tags: ['Typescript'], - }, { - tags: ['Typescript', 'ORM'], - }, { - tags: ['Typescript', 'Frontend', 'React'], - }, { - tags: ['Typescript', 'ORM', 'Database', 'Postgres'], - }, { - tags: ['Java', 'Spring', 'OOP'], - }]); - - const contains = await db.select({ id: posts.id }).from(posts) - .where(arrayContains(posts.tags, ['Typescript', 'ORM'])); - const contained = await db.select({ id: posts.id }).from(posts) - .where(arrayContained(posts.tags, ['Typescript', 'ORM'])); - const overlaps = await db.select({ id: posts.id }).from(posts) - .where(arrayOverlaps(posts.tags, ['Typescript', 'ORM'])); - const withSubQuery = await db.select({ id: posts.id }).from(posts) - .where(arrayContains( - posts.tags, - db.select({ tags: posts.tags }).from(posts).where(eq(posts.id, 1)), - )); - - t.deepEqual(contains, [{ id: 3 }, { id: 5 }]); - t.deepEqual(contained, [{ id: 1 }, { id: 2 }, { id: 3 }]); - t.deepEqual(overlaps, [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }, { id: 5 }]); - t.deepEqual(withSubQuery, [{ id: 1 }, { id: 3 }, { id: 5 }]); -}); - -test.serial('set operations (union) from query builder with subquery', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const sq = db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).as('sq'); - - const result = await db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).union( - db.select().from(sq), - ).orderBy(asc(sql`name`)).limit(2).offset(1); - - t.assert(result.length === 2); - - t.deepEqual(result, [ - { id: 3, name: 'Jack' }, - { id: 2, name: 'Jane' }, - ]); - - t.throws(() => { - db - .select({ id: cities2Table.id, name: citiesTable.name, name2: users2Table.name }) - .from(cities2Table).union( - // @ts-expect-error - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table), - ).orderBy(asc(sql`name`)); - }); -}); - -test.serial('set operations (union) as function', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await union( - db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).orderBy(asc(sql`name`)).limit(1).offset(1); - - t.assert(result.length === 1); - - t.deepEqual(result, [ - { id: 1, name: 'New York' }, - ]); - - t.throws(() => { - union( - db - .select({ name: citiesTable.name, id: cities2Table.id }) - .from(cities2Table).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).orderBy(asc(sql`name`)); - }); -}); - -test.serial('set operations (union all) from query builder', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).limit(2).unionAll( - db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).limit(2), - ).orderBy(asc(sql`id`)); - - t.assert(result.length === 4); - - t.deepEqual(result, [ - { id: 1, name: 'New York' }, - { id: 1, name: 'New York' }, - { id: 2, name: 'London' }, - { id: 2, name: 'London' }, - ]); - - t.throws(() => { - db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).limit(2).unionAll( - db - .select({ name: citiesTable.name, id: cities2Table.id }) - .from(cities2Table).limit(2), - ).orderBy(asc(sql`id`)); - }); -}); - -test.serial('set operations (union all) as function', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await unionAll( - db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ); - - t.assert(result.length === 3); - - t.deepEqual(result, [ - { id: 1, name: 'New York' }, - { id: 1, name: 'John' }, - { id: 1, name: 'John' }, - ]); - - t.throws(() => { - unionAll( - db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).where(eq(citiesTable.id, 1)), - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ); - }); -}); - -test.serial('set operations (intersect) from query builder', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).intersect( - db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).where(gt(citiesTable.id, 1)), - ).orderBy(asc(sql`name`)); - - t.assert(result.length === 2); - - t.deepEqual(result, [ - { id: 2, name: 'London' }, - { id: 3, name: 'Tampa' }, - ]); - - t.throws(() => { - db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).intersect( - // @ts-expect-error - db - .select({ id: cities2Table.id, name: citiesTable.name, id2: cities2Table.id }) - .from(cities2Table).where(gt(citiesTable.id, 1)), - ).orderBy(asc(sql`name`)); - }); -}); - -test.serial('set operations (intersect) as function', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await intersect( - db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ); - - t.assert(result.length === 0); - - t.deepEqual(result, []); - - t.throws(() => { - intersect( - db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(users2Table).where(eq(users2Table.id, 1)), - ); - }); -}); - -test.serial('set operations (intersect all) from query builder', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).limit(2).intersectAll( - db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).limit(2), - ).orderBy(asc(sql`id`)); - - t.assert(result.length === 2); - - t.deepEqual(result, [ - { id: 1, name: 'New York' }, - { id: 2, name: 'London' }, - ]); - - t.throws(() => { - db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).limit(2).intersectAll( - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(cities2Table).limit(2), - ).orderBy(asc(sql`id`)); - }); -}); - -test.serial('set operations (intersect all) as function', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await intersectAll( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ); - - t.assert(result.length === 1); - - t.deepEqual(result, [ - { id: 1, name: 'John' }, - ]); - - t.throws(() => { - intersectAll( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ); - }); -}); - -test.serial('set operations (except) from query builder', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await db - .select() - .from(cities2Table).except( - db - .select() - .from(cities2Table).where(gt(citiesTable.id, 1)), - ); - - t.assert(result.length === 1); - - t.deepEqual(result, [ - { id: 1, name: 'New York' }, - ]); - - t.throws(() => { - db - .select() - .from(cities2Table).except( - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(cities2Table).where(gt(citiesTable.id, 1)), - ); - }); -}); - -test.serial('set operations (except) as function', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await except( - db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table), - db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).orderBy(asc(sql`id`)); - - t.assert(result.length === 2); - - t.deepEqual(result, [ - { id: 2, name: 'London' }, - { id: 3, name: 'Tampa' }, - ]); - - t.throws(() => { - except( - db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table), - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(cities2Table).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).orderBy(asc(sql`id`)); - }); -}); - -test.serial('set operations (except all) from query builder', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await db - .select() - .from(cities2Table).exceptAll( - db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).where(eq(citiesTable.id, 1)), - ).orderBy(asc(sql`id`)); - - t.assert(result.length === 2); - - t.deepEqual(result, [ - { id: 2, name: 'London' }, - { id: 3, name: 'Tampa' }, - ]); - - t.throws(() => { - db - .select({ name: cities2Table.name, id: cities2Table.id }) - .from(cities2Table).exceptAll( - db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).where(eq(citiesTable.id, 1)), - ).orderBy(asc(sql`id`)); - }); -}); - -test.serial('set operations (except all) as function', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await exceptAll( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(gt(users2Table.id, 7)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).orderBy(asc(sql`id`)).limit(5).offset(2); - - t.assert(result.length === 4); - - t.deepEqual(result, [ - { id: 4, name: 'Peter' }, - { id: 5, name: 'Ben' }, - { id: 6, name: 'Jill' }, - { id: 7, name: 'Mary' }, - ]); - - t.throws(() => { - exceptAll( - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(users2Table), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(gt(users2Table.id, 7)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).orderBy(asc(sql`id`)); - }); -}); - -test.serial('set operations (mixed) from query builder with subquery', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - const sq = db - .select() - .from(cities2Table).where(gt(citiesTable.id, 1)).as('sq'); - - const result = await db - .select() - .from(cities2Table).except( - ({ unionAll }) => - unionAll( - db.select().from(sq), - db.select().from(cities2Table).where(eq(citiesTable.id, 2)), - ), - ); - - t.assert(result.length === 1); - - t.deepEqual(result, [ - { id: 1, name: 'New York' }, - ]); - - t.throws(() => { - db - .select() - .from(cities2Table).except( - ({ unionAll }) => - unionAll( - db - .select({ name: cities2Table.name, id: cities2Table.id }) - .from(cities2Table).where(gt(citiesTable.id, 1)), - db.select().from(cities2Table).where(eq(citiesTable.id, 2)), - ), - ); - }); -}); - -test.serial('set operations (mixed all) as function', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await union( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - except( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(gte(users2Table.id, 5)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 7)), - ), - db - .select().from(cities2Table).where(gt(citiesTable.id, 1)), - ).orderBy(asc(sql`id`)); - - t.assert(result.length === 6); - - t.deepEqual(result, [ - { id: 1, name: 'John' }, - { id: 2, name: 'London' }, - { id: 3, name: 'Tampa' }, - { id: 5, name: 'Ben' }, - { id: 6, name: 'Jill' }, - { id: 8, name: 'Sally' }, - ]); - - t.throws(() => { - union( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - except( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(gte(users2Table.id, 5)), - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(users2Table).where(eq(users2Table.id, 7)), - ), - db - .select().from(cities2Table).where(gt(citiesTable.id, 1)), - ).orderBy(asc(sql`id`)); - }); -}); - -test.serial('aggregate function: count', async (t) => { - const { db } = t.context; - const table = aggregateTable; - await setupAggregateFunctionsTest(db); - - const result1 = await db.select({ value: count() }).from(table); - const result2 = await db.select({ value: count(table.a) }).from(table); - const result3 = await db.select({ value: countDistinct(table.name) }).from(table); - - t.deepEqual(result1[0]?.value, 7); - t.deepEqual(result2[0]?.value, 5); - t.deepEqual(result3[0]?.value, 6); -}); - -test.serial('aggregate function: avg', async (t) => { - const { db } = t.context; - const table = aggregateTable; - await setupAggregateFunctionsTest(db); - - const result1 = await db.select({ value: avg(table.b) }).from(table); - const result2 = await db.select({ value: avg(table.nullOnly) }).from(table); - const result3 = await db.select({ value: avgDistinct(table.b) }).from(table); - - t.deepEqual(result1[0]?.value, '33.3333333333333333'); - t.deepEqual(result2[0]?.value, null); - t.deepEqual(result3[0]?.value, '42.5000000000000000'); -}); - -test.serial('aggregate function: sum', async (t) => { - const { db } = t.context; - const table = aggregateTable; - await setupAggregateFunctionsTest(db); - - const result1 = await db.select({ value: sum(table.b) }).from(table); - const result2 = await db.select({ value: sum(table.nullOnly) }).from(table); - const result3 = await db.select({ value: sumDistinct(table.b) }).from(table); - - t.deepEqual(result1[0]?.value, '200'); - t.deepEqual(result2[0]?.value, null); - t.deepEqual(result3[0]?.value, '170'); -}); - -test.serial('aggregate function: max', async (t) => { - const { db } = t.context; - const table = aggregateTable; - await setupAggregateFunctionsTest(db); - - const result1 = await db.select({ value: max(table.b) }).from(table); - const result2 = await db.select({ value: max(table.nullOnly) }).from(table); - - t.deepEqual(result1[0]?.value, 90); - t.deepEqual(result2[0]?.value, null); -}); - -test.serial('aggregate function: min', async (t) => { - const { db } = t.context; - const table = aggregateTable; - await setupAggregateFunctionsTest(db); - - const result1 = await db.select({ value: min(table.b) }).from(table); - const result2 = await db.select({ value: min(table.nullOnly) }).from(table); - - t.deepEqual(result1[0]?.value, 10); - t.deepEqual(result2[0]?.value, null); -}); - -test.serial('array mapping and parsing', async (t) => { - const { db } = t.context; - - const arrays = pgTable('arrays_tests', { - id: serial('id').primaryKey(), - tags: text('tags').array(), - nested: text('nested').array().array(), - numbers: integer('numbers').notNull().array(), - }); - - await db.execute(sql`drop table if exists ${arrays}`); - await db.execute(sql` - create table ${arrays} ( - id serial primary key, - tags text[], - nested text[][], - numbers integer[] - ) - `); - - await db.insert(arrays).values({ - tags: ['', 'b', 'c'], - nested: [['1', ''], ['3', '\\a']], - numbers: [1, 2, 3], - }); - - const result = await db.select().from(arrays); - - t.deepEqual(result, [{ - id: 1, - tags: ['', 'b', 'c'], - nested: [['1', ''], ['3', '\\a']], - numbers: [1, 2, 3], - }]); - - await db.execute(sql`drop table ${arrays}`); -}); - -test.serial('test $onUpdateFn and $onUpdate works as $default', async (t) => { - const { db } = t.context; - - await db.execute(sql`drop table if exists ${usersOnUpdate}`); - - await db.execute( - sql` - create table ${usersOnUpdate} ( - id serial primary key, - name text not null, - update_counter integer default 1 not null, - updated_at timestamp(3), - always_null text - ) - `, - ); - - await db.insert(usersOnUpdate).values([ - { name: 'John' }, - { name: 'Jane' }, - { name: 'Jack' }, - { name: 'Jill' }, - ]); - - const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); - - const justDates = await db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); - - const response = await db.select({ ...rest }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); - - t.deepEqual(response, [ - { name: 'John', id: 1, updateCounter: 1, alwaysNull: null }, - { name: 'Jane', id: 2, updateCounter: 1, alwaysNull: null }, - { name: 'Jack', id: 3, updateCounter: 1, alwaysNull: null }, - { name: 'Jill', id: 4, updateCounter: 1, alwaysNull: null }, - ]); - const msDelay = 250; - - for (const eachUser of justDates) { - t.assert(eachUser.updatedAt!.valueOf() > Date.now() - msDelay); - } -}); - -test.serial('test $onUpdateFn and $onUpdate works updating', async (t) => { - const { db } = t.context; - - await db.execute(sql`drop table if exists ${usersOnUpdate}`); - - await db.execute( - sql` - create table ${usersOnUpdate} ( - id serial primary key, - name text not null, - update_counter integer default 1, - updated_at timestamp(3), - always_null text - ) - `, - ); - - await db.insert(usersOnUpdate).values([ - { name: 'John', alwaysNull: 'this will be null after updating' }, - { name: 'Jane' }, - { name: 'Jack' }, - { name: 'Jill' }, - ]); - - const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); - const initial = await db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); - - await db.update(usersOnUpdate).set({ name: 'Angel' }).where(eq(usersOnUpdate.id, 1)); - await db.update(usersOnUpdate).set({ updateCounter: null }).where(eq(usersOnUpdate.id, 2)); - - const justDates = await db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); - - const response = await db.select({ ...rest }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); - - t.deepEqual(response, [ - { name: 'Angel', id: 1, updateCounter: 2, alwaysNull: null }, - { name: 'Jane', id: 2, updateCounter: null, alwaysNull: null }, - { name: 'Jack', id: 3, updateCounter: 1, alwaysNull: null }, - { name: 'Jill', id: 4, updateCounter: 1, alwaysNull: null }, - ]); - const msDelay = 250; - - t.assert(initial[0]?.updatedAt?.valueOf() !== justDates[0]?.updatedAt?.valueOf()); - - for (const eachUser of justDates) { - t.assert(eachUser.updatedAt!.valueOf() > Date.now() - msDelay); - } -}); - -test.serial('test if method with sql operators', async (t) => { - const { db } = t.context; - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - age: integer('age').notNull(), - city: text('city').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute(sql` - create table ${users} ( - id serial primary key, - name text not null, - age integer not null, - city text not null - ) - `); - - await db.insert(users).values([ - { id: 1, name: 'John', age: 20, city: 'New York' }, - { id: 2, name: 'Alice', age: 21, city: 'New York' }, - { id: 3, name: 'Nick', age: 22, city: 'London' }, - { id: 4, name: 'Lina', age: 23, city: 'London' }, - ]); - - const condition1 = true; - - const [result1] = await db.select().from(users).where(eq(users.id, 1).if(condition1)); - - t.deepEqual(result1, { id: 1, name: 'John', age: 20, city: 'New York' }); - - const condition2 = 1; - - const [result2] = await db.select().from(users).where(sql`${users.id} = 1`.if(condition2)); - - t.deepEqual(result2, { id: 1, name: 'John', age: 20, city: 'New York' }); - - const condition3 = 'non-empty string'; - - const result3 = await db.select().from(users).where( - or(eq(users.id, 1).if(condition3), eq(users.id, 2).if(condition3)), - ); - - t.deepEqual(result3, [{ id: 1, name: 'John', age: 20, city: 'New York' }, { - id: 2, - name: 'Alice', - age: 21, - city: 'New York', - }]); - - const condtition4 = false; - - const result4 = await db.select().from(users).where(eq(users.id, 1).if(condtition4)); - - t.deepEqual(result4, [ - { id: 1, name: 'John', age: 20, city: 'New York' }, - { id: 2, name: 'Alice', age: 21, city: 'New York' }, - { id: 3, name: 'Nick', age: 22, city: 'London' }, - { id: 4, name: 'Lina', age: 23, city: 'London' }, - ]); - - const condition5 = undefined; - - const result5 = await db.select().from(users).where(sql`${users.id} = 1`.if(condition5)); - - t.deepEqual(result5, [ - { id: 1, name: 'John', age: 20, city: 'New York' }, - { id: 2, name: 'Alice', age: 21, city: 'New York' }, - { id: 3, name: 'Nick', age: 22, city: 'London' }, - { id: 4, name: 'Lina', age: 23, city: 'London' }, - ]); - - const condition6 = null; - - const result6 = await db.select().from(users).where( - or(eq(users.id, 1).if(condition6), eq(users.id, 2).if(condition6)), - ); - - t.deepEqual(result6, [ - { id: 1, name: 'John', age: 20, city: 'New York' }, - { id: 2, name: 'Alice', age: 21, city: 'New York' }, - { id: 3, name: 'Nick', age: 22, city: 'London' }, - { id: 4, name: 'Lina', age: 23, city: 'London' }, - ]); - - const condition7 = { - term1: 0, - term2: 1, - }; - - const result7 = await db.select().from(users).where( - and(gt(users.age, 20).if(condition7.term1), eq(users.city, 'New York').if(condition7.term2)), - ); - - t.deepEqual(result7, [ - { id: 1, name: 'John', age: 20, city: 'New York' }, - { id: 2, name: 'Alice', age: 21, city: 'New York' }, - ]); - - const condition8 = { - term1: '', - term2: 'non-empty string', - }; - - const result8 = await db.select().from(users).where( - or(lt(users.age, 21).if(condition8.term1), eq(users.city, 'London').if(condition8.term2)), - ); - - t.deepEqual(result8, [ - { id: 3, name: 'Nick', age: 22, city: 'London' }, - { id: 4, name: 'Lina', age: 23, city: 'London' }, - ]); - - const condition9 = { - term1: 1, - term2: true, - }; - - const result9 = await db.select().from(users).where( - and(inArray(users.city, ['New York', 'London']).if(condition9.term1), ilike(users.name, 'a%').if(condition9.term2)), - ); - - t.deepEqual(result9, [ - { id: 2, name: 'Alice', age: 21, city: 'New York' }, - ]); - - const condition10 = { - term1: 4, - term2: 19, - }; - - const result10 = await db.select().from(users).where( - and( - sql`length(${users.name}) <= ${condition10.term1}`.if(condition10.term1), - gt(users.age, condition10.term2).if(condition10.term2 > 20), - ), - ); - - t.deepEqual(result10, [ - { id: 1, name: 'John', age: 20, city: 'New York' }, - { id: 3, name: 'Nick', age: 22, city: 'London' }, - { id: 4, name: 'Lina', age: 23, city: 'London' }, - ]); - - const condition11 = true; - - const result11 = await db.select().from(users).where( - or(eq(users.city, 'New York'), gte(users.age, 22))!.if(condition11), - ); - - t.deepEqual(result11, [ - { id: 1, name: 'John', age: 20, city: 'New York' }, - { id: 2, name: 'Alice', age: 21, city: 'New York' }, - { id: 3, name: 'Nick', age: 22, city: 'London' }, - { id: 4, name: 'Lina', age: 23, city: 'London' }, - ]); - - const condition12 = false; - - const result12 = await db.select().from(users).where( - and(eq(users.city, 'London'), gte(users.age, 23))!.if(condition12), - ); - - t.deepEqual(result12, [ - { id: 1, name: 'John', age: 20, city: 'New York' }, - { id: 2, name: 'Alice', age: 21, city: 'New York' }, - { id: 3, name: 'Nick', age: 22, city: 'London' }, - { id: 4, name: 'Lina', age: 23, city: 'London' }, - ]); - - const condition13 = true; - - const result13 = await db.select().from(users).where(sql`(city = 'New York' or age >= 22)`.if(condition13)); - - t.deepEqual(result13, [ - { id: 1, name: 'John', age: 20, city: 'New York' }, - { id: 2, name: 'Alice', age: 21, city: 'New York' }, - { id: 3, name: 'Nick', age: 22, city: 'London' }, - { id: 4, name: 'Lina', age: 23, city: 'London' }, - ]); - - const condition14 = false; - - const result14 = await db.select().from(users).where(sql`(city = 'London' and age >= 23)`.if(condition14)); - - t.deepEqual(result14, [ - { id: 1, name: 'John', age: 20, city: 'New York' }, - { id: 2, name: 'Alice', age: 21, city: 'New York' }, - { id: 3, name: 'Nick', age: 22, city: 'London' }, - { id: 4, name: 'Lina', age: 23, city: 'London' }, - ]); - - await db.execute(sql`drop table ${users}`); -}); diff --git a/integration-tests/tests/awsdatapi.test.ts b/integration-tests/tests/pg/awsdatapi.test.ts similarity index 99% rename from integration-tests/tests/awsdatapi.test.ts rename to integration-tests/tests/pg/awsdatapi.test.ts index 856589096..9bf901fc6 100644 --- a/integration-tests/tests/awsdatapi.test.ts +++ b/integration-tests/tests/pg/awsdatapi.test.ts @@ -22,8 +22,8 @@ import { import { Resource } from 'sst'; import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; -import type { Equal } from './utils'; -import { Expect, randomString } from './utils'; +import type { Equal } from '../utils.ts'; +import { Expect, randomString } from '../utils.ts'; dotenv.config(); diff --git a/integration-tests/tests/pg/neon-http-batch.test.ts b/integration-tests/tests/pg/neon-http-batch.test.ts new file mode 100644 index 000000000..daea2219e --- /dev/null +++ b/integration-tests/tests/pg/neon-http-batch.test.ts @@ -0,0 +1,54 @@ +import { neon, type NeonQueryFunction } from '@neondatabase/serverless'; +import { drizzle, type NeonHttpDatabase } from 'drizzle-orm/neon-http'; +import { beforeAll, beforeEach, expect, test } from 'vitest'; +import { + commentLikesConfig, + commentsConfig, + commentsTable, + groupsConfig, + groupsTable, + postsConfig, + postsTable, + usersConfig, + usersTable, + usersToGroupsConfig, + usersToGroupsTable, +} from './neon-http-batch'; + +const ENABLE_LOGGING = false; + +export const schema = { + usersTable, + postsTable, + commentsTable, + usersToGroupsTable, + groupsTable, + commentLikesConfig, + commentsConfig, + postsConfig, + usersToGroupsConfig, + groupsConfig, + usersConfig, +}; + +let db: NeonHttpDatabase; +let client: NeonQueryFunction; + +beforeAll(async () => { + const connectionString = process.env['NEON_CONNECTION_STRING']; + if (!connectionString) { + throw new Error('NEON_CONNECTION_STRING is not defined'); + } + client = neon(connectionString); + db = drizzle(client, { schema, logger: ENABLE_LOGGING }); +}); + +beforeEach((ctx) => { + ctx.neonPg = { + db, + }; +}); + +test('skip', async () => { + expect(1).toBe(1); +}); diff --git a/integration-tests/tests/pg/neon-http-batch.ts b/integration-tests/tests/pg/neon-http-batch.ts new file mode 100644 index 000000000..e2cc57ae2 --- /dev/null +++ b/integration-tests/tests/pg/neon-http-batch.ts @@ -0,0 +1,556 @@ +import Docker from 'dockerode'; +import type { InferSelectModel } from 'drizzle-orm'; +import { eq, relations, sql } from 'drizzle-orm'; +import type { NeonHttpQueryResult } from 'drizzle-orm/neon-http'; +import { integer, pgTable, primaryKey, serial, text, timestamp } from 'drizzle-orm/pg-core'; +import type { AnyPgColumn } from 'drizzle-orm/pg-core'; +import getPort from 'get-port'; +import { v4 as uuidV4 } from 'uuid'; +import { afterAll, beforeEach, describe, expect, expectTypeOf, test } from 'vitest'; + +export const usersTable = pgTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: integer('verified').notNull().default(0), + invitedBy: integer('invited_by').references((): AnyPgColumn => usersTable.id), +}); +export const usersConfig = relations(usersTable, ({ one, many }) => ({ + invitee: one(usersTable, { + fields: [usersTable.invitedBy], + references: [usersTable.id], + }), + usersToGroups: many(usersToGroupsTable), + posts: many(postsTable), +})); + +export const groupsTable = pgTable('groups', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + description: text('description'), +}); +export const groupsConfig = relations(groupsTable, ({ many }) => ({ + usersToGroups: many(usersToGroupsTable), +})); + +export const usersToGroupsTable = pgTable( + 'users_to_groups', + { + id: serial('id'), + userId: integer('user_id').notNull().references(() => usersTable.id), + groupId: integer('group_id').notNull().references(() => groupsTable.id), + }, + (t) => ({ + pk: primaryKey({ columns: [t.userId, t.groupId] }), + }), +); +export const usersToGroupsConfig = relations(usersToGroupsTable, ({ one }) => ({ + group: one(groupsTable, { + fields: [usersToGroupsTable.groupId], + references: [groupsTable.id], + }), + user: one(usersTable, { + fields: [usersToGroupsTable.userId], + references: [usersTable.id], + }), +})); + +export const postsTable = pgTable('posts', { + id: serial('id').primaryKey(), + content: text('content').notNull(), + ownerId: integer('owner_id').references(() => usersTable.id), + createdAt: timestamp('created_at').notNull().defaultNow(), +}); +export const postsConfig = relations(postsTable, ({ one, many }) => ({ + author: one(usersTable, { + fields: [postsTable.ownerId], + references: [usersTable.id], + }), + comments: many(commentsTable), +})); + +export const commentsTable = pgTable('comments', { + id: serial('id').primaryKey(), + content: text('content').notNull(), + creator: integer('creator').references(() => usersTable.id), + postId: integer('post_id').references(() => postsTable.id), + createdAt: timestamp('created_at').notNull().defaultNow(), +}); +export const commentsConfig = relations(commentsTable, ({ one, many }) => ({ + post: one(postsTable, { + fields: [commentsTable.postId], + references: [postsTable.id], + }), + author: one(usersTable, { + fields: [commentsTable.creator], + references: [usersTable.id], + }), + likes: many(commentLikesTable), +})); + +export const commentLikesTable = pgTable('comment_likes', { + id: serial('id').primaryKey(), + creator: integer('creator').references(() => usersTable.id), + commentId: integer('comment_id').references(() => commentsTable.id), + createdAt: timestamp('created_at').notNull().defaultNow(), +}); +export const commentLikesConfig = relations(commentLikesTable, ({ one }) => ({ + comment: one(commentsTable, { + fields: [commentLikesTable.commentId], + references: [commentsTable.id], + }), + author: one(usersTable, { + fields: [commentLikesTable.creator], + references: [usersTable.id], + }), +})); + +let pgContainer: Docker.Container; +export async function createDockerDB(): Promise { + const docker = new Docker(); + const port = await getPort({ port: 5432 }); + const image = 'postgres:14'; + + const pullStream = await docker.pull(image); + await new Promise((resolve, reject) => + docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) + ); + + pgContainer = await docker.createContainer({ + Image: image, + Env: ['POSTGRES_PASSWORD=postgres', 'POSTGRES_USER=postgres', 'POSTGRES_DB=postgres'], + name: `drizzle-integration-tests-${uuidV4()}`, + HostConfig: { + AutoRemove: true, + PortBindings: { + '5432/tcp': [{ HostPort: `${port}` }], + }, + }, + }); + + await pgContainer.start(); + + return `postgres://postgres:postgres@localhost:${port}/postgres`; +} + +afterAll(async () => { + await pgContainer?.stop().catch(console.error); +}); + +export function tests() { + describe('common', () => { + beforeEach(async (ctx) => { + const { db } = ctx.pg; + await db.execute(sql`drop schema if exists public cascade`); + await db.execute(sql`drop schema if exists mySchema cascade`); + + await db.execute( + sql` + create table users ( + id serial primary key, + name text not null, + verified int not null default 0, + invited_by int references users(id) + ) + `, + ); + await db.execute( + sql` + create table groups ( + id serial primary key, + name text not null, + description text + ) + `, + ); + await db.execute( + sql` + create table users_to_groups ( + id serial, + user_id int not null references users(id), + group_id int not null references groups(id), + primary key (user_id, group_id) + ) + `, + ); + await db.execute( + sql` + create table posts ( + id serial primary key, + content text not null, + owner_id int references users(id), + created_at timestamp not null default now() + ) + `, + ); + await db.execute( + sql` + create table comments ( + id serial primary key, + content text not null, + creator int references users(id), + post_id int references posts(id), + created_at timestamp not null default now() + ) + `, + ); + await db.execute( + sql` + create table comment_likes ( + id serial primary key, + creator int references users(id), + comment_id int references comments(id), + created_at timestamp not null default now() + ) + `, + ); + }); + + test('batch api example', async (ctx) => { + const { db } = ctx.neonPg; + + const batchResponse = await db.batch([ + db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ + id: usersTable.id, + invitedBy: usersTable.invitedBy, + }), + db.insert(usersTable).values({ id: 2, name: 'Dan' }), + db.select().from(usersTable), + ]); + + expectTypeOf(batchResponse).toEqualTypeOf<[ + { + id: number; + invitedBy: number | null; + }[], + NeonHttpQueryResult, + { + id: number; + name: string; + verified: number; + invitedBy: number | null; + }[], + ]>(); + + expect(batchResponse.length).eq(3); + + expect(batchResponse[0]).toEqual([{ + id: 1, + invitedBy: null, + }]); + + expect(batchResponse[1]).toMatchObject({ rows: [], rowCount: 1 }); + + expect(batchResponse[2]).toEqual([ + { id: 1, name: 'John', verified: 0, invitedBy: null }, + { id: 2, name: 'Dan', verified: 0, invitedBy: null }, + ]); + }); + + // batch api only relational many + test('insert + findMany', async (ctx) => { + const { db } = ctx.neonPg; + + const batchResponse = await db.batch([ + db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), + db.insert(usersTable).values({ id: 2, name: 'Dan' }), + db.query.usersTable.findMany({}), + ]); + + expectTypeOf(batchResponse).toEqualTypeOf<[ + { + id: number; + }[], + NeonHttpQueryResult, + { + id: number; + name: string; + verified: number; + invitedBy: number | null; + }[], + ]>(); + + expect(batchResponse.length).eq(3); + + expect(batchResponse[0]).toEqual([{ + id: 1, + }]); + + expect(batchResponse[1]).toMatchObject({ rows: [], rowCount: 1 }); + + expect(batchResponse[2]).toEqual([ + { id: 1, name: 'John', verified: 0, invitedBy: null }, + { id: 2, name: 'Dan', verified: 0, invitedBy: null }, + ]); + }); + + // batch api relational many + one + test('insert + findMany + findFirst', async (ctx) => { + const { db } = ctx.neonPg; + + const batchResponse = await db.batch([ + db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), + db.insert(usersTable).values({ id: 2, name: 'Dan' }), + db.query.usersTable.findMany({}), + db.query.usersTable.findFirst({}), + ]); + + expectTypeOf(batchResponse).toEqualTypeOf<[ + { + id: number; + }[], + NeonHttpQueryResult, + { + id: number; + name: string; + verified: number; + invitedBy: number | null; + }[], + { + id: number; + name: string; + verified: number; + invitedBy: number | null; + } | undefined, + ]>(); + + expect(batchResponse.length).eq(4); + + expect(batchResponse[0]).toEqual([{ + id: 1, + }]); + + expect(batchResponse[1]).toMatchObject({ rows: [], rowCount: 1 }); + + expect(batchResponse[2]).toEqual([ + { id: 1, name: 'John', verified: 0, invitedBy: null }, + { id: 2, name: 'Dan', verified: 0, invitedBy: null }, + ]); + + expect(batchResponse[3]).toEqual( + { id: 1, name: 'John', verified: 0, invitedBy: null }, + ); + }); + + test('insert + db.execute', async (ctx) => { + const { db } = ctx.neonPg; + + const batchResponse = await db.batch([ + db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), + db.execute(sql`insert into users (id, name) values (2, 'Dan')`), + ]); + + expectTypeOf(batchResponse).toEqualTypeOf<[ + { + id: number; + }[], + NeonHttpQueryResult>, + ]>(); + + expect(batchResponse.length).eq(2); + + expect(batchResponse[0]).toEqual([{ + id: 1, + }]); + + expect(batchResponse[1]).toMatchObject({ rowAsArray: false, rows: [], rowCount: 1 }); + }); + + // batch api combined rqb + raw call + test('insert + findManyWith + db.all', async (ctx) => { + const { db } = ctx.neonPg; + + const batchResponse = await db.batch([ + db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), + db.insert(usersTable).values({ id: 2, name: 'Dan' }), + db.query.usersTable.findMany({}), + db.execute(sql`select * from users`), + ]); + + expectTypeOf(batchResponse).toEqualTypeOf<[ + { + id: number; + }[], + NeonHttpQueryResult, + { + id: number; + name: string; + verified: number; + invitedBy: number | null; + }[], + NeonHttpQueryResult<{ + id: number; + name: string; + verified: number; + invitedBy: number | null; + }>, + ]>(); + + expect(batchResponse.length).eq(4); + + expect(batchResponse[0]).toEqual([{ + id: 1, + }]); + + expect(batchResponse[1]).toMatchObject({ rowAsArray: true, rows: [], rowCount: 1 }); + + expect(batchResponse[2]).toEqual([ + { id: 1, name: 'John', verified: 0, invitedBy: null }, + { id: 2, name: 'Dan', verified: 0, invitedBy: null }, + ]); + + expect(batchResponse[3]).toMatchObject({ + rows: [ + { id: 1, name: 'John', verified: 0, invited_by: null }, + { id: 2, name: 'Dan', verified: 0, invited_by: null }, + ], + }); + }); + + // batch api for insert + update + select + test('insert + update + select + select partial', async (ctx) => { + const { db } = ctx.neonPg; + + const batchResponse = await db.batch([ + db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), + db.update(usersTable).set({ name: 'Dan' }).where(eq(usersTable.id, 1)), + db.query.usersTable.findMany({}), + db.select().from(usersTable).where(eq(usersTable.id, 1)), + db.select({ id: usersTable.id, invitedBy: usersTable.invitedBy }).from(usersTable), + ]); + + expectTypeOf(batchResponse).toEqualTypeOf<[ + { + id: number; + }[], + NeonHttpQueryResult, + { + id: number; + name: string; + verified: number; + invitedBy: number | null; + }[], + { + id: number; + name: string; + verified: number; + invitedBy: number | null; + }[], + { + id: number; + invitedBy: number | null; + }[], + ]>(); + + expect(batchResponse.length).eq(5); + + expect(batchResponse[0]).toEqual([{ + id: 1, + }]); + + expect(batchResponse[1]).toMatchObject({ rows: [], rowCount: 1 }); + + expect(batchResponse[2]).toEqual([ + { id: 1, name: 'Dan', verified: 0, invitedBy: null }, + ]); + + expect(batchResponse[3]).toEqual([ + { id: 1, name: 'Dan', verified: 0, invitedBy: null }, + ]); + + expect(batchResponse[4]).toEqual([ + { id: 1, invitedBy: null }, + ]); + }); + + // batch api for insert + delete + select + test('insert + delete + select + select partial', async (ctx) => { + const { db } = ctx.neonPg; + + const batchResponse = await db.batch([ + db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), + db.insert(usersTable).values({ id: 2, name: 'Dan' }), + db.delete(usersTable).where(eq(usersTable.id, 1)).returning({ + id: usersTable.id, + invitedBy: usersTable.invitedBy, + }), + db.query.usersTable.findFirst({ + columns: { + id: true, + invitedBy: true, + }, + }), + ]); + + expectTypeOf(batchResponse).toEqualTypeOf<[ + { + id: number; + }[], + NeonHttpQueryResult, + { + id: number; + invitedBy: number | null; + }[], + { + id: number; + invitedBy: number | null; + } | undefined, + ]>(); + + expect(batchResponse.length).eq(4); + + expect(batchResponse[0]).toEqual([{ + id: 1, + }]); + + expect(batchResponse[1]).toMatchObject({ rows: [], rowCount: 1 }); + + expect(batchResponse[2]).toEqual([ + { id: 1, invitedBy: null }, + ]); + + expect(batchResponse[3]).toEqual( + { id: 2, invitedBy: null }, + ); + }); + + test('select raw', async (ctx) => { + const { db } = ctx.neonPg; + + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Dan' }]); + const batchResponse = await db.batch([ + db.execute>(sql`select * from users`), + db.execute>(sql`select * from users where id = 1`), + ]); + + expectTypeOf(batchResponse).toEqualTypeOf<[ + NeonHttpQueryResult<{ + id: number; + name: string; + verified: number; + invited_by: number | null; + }>, + NeonHttpQueryResult<{ + id: number; + name: string; + verified: number; + invited_by: number | null; + }>, + ]>(); + + expect(batchResponse.length).eq(2); + + expect(batchResponse[0]).toMatchObject({ + rows: [ + { id: 1, name: 'John', verified: 0, invited_by: null }, + { id: 2, name: 'Dan', verified: 0, invited_by: null }, + ], + }); + + expect(batchResponse[1]).toMatchObject({ + rows: [ + { id: 1, name: 'John', verified: 0, invited_by: null }, + ], + }); + }); + }); +} diff --git a/integration-tests/tests/pg/neon-http.test.ts b/integration-tests/tests/pg/neon-http.test.ts new file mode 100644 index 000000000..1476e9628 --- /dev/null +++ b/integration-tests/tests/pg/neon-http.test.ts @@ -0,0 +1,486 @@ +import { neon, type NeonQueryFunction } from '@neondatabase/serverless'; +import retry from 'async-retry'; +import { sql } from 'drizzle-orm'; +import { drizzle, type NeonHttpDatabase } from 'drizzle-orm/neon-http'; +import { migrate } from 'drizzle-orm/neon-http/migrator'; +import { pgTable, serial, timestamp } from 'drizzle-orm/pg-core'; +import { Client } from 'pg'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { skipTests } from '~/common'; +import { randomString } from '~/utils'; +import { tests, usersMigratorTable, usersTable } from './pg-common'; + +const ENABLE_LOGGING = false; + +let db: NeonHttpDatabase; +let ddlRunner: Client; +let client: NeonQueryFunction; + +beforeAll(async () => { + const connectionString = process.env['NEON_CONNECTION_STRING']; + if (!connectionString) { + throw new Error('NEON_CONNECTION_STRING is not defined'); + } + client = neon(connectionString); + ddlRunner = await retry(async () => { + ddlRunner = new Client(connectionString); + await ddlRunner.connect(); + return ddlRunner; + }, { + retries: 20, + factor: 1, + minTimeout: 250, + maxTimeout: 250, + randomize: false, + onRetry() { + ddlRunner?.end(); + }, + }); + db = drizzle(client, { logger: ENABLE_LOGGING }); +}); + +afterAll(async () => { + await ddlRunner?.end(); +}); + +beforeEach((ctx) => { + ctx.pg = { + db, + }; +}); + +test('migrator : default migration strategy', async () => { + await db.execute(sql`drop table if exists all_columns`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + + await migrate(db, { migrationsFolder: './drizzle2/pg' }); + + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + + const result = await db.select().from(usersMigratorTable); + + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table all_columns`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); +}); + +test('migrator : migrate with custom schema', async () => { + const customSchema = randomString(); + await db.execute(sql`drop table if exists all_columns`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + + await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsSchema: customSchema }); + + // test if the custom migrations table was created + const { rowCount } = await db.execute(sql`select * from ${sql.identifier(customSchema)}."__drizzle_migrations";`); + expect(rowCount && rowCount > 0).toBeTruthy(); + + // test if the migrated table are working as expected + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table all_columns`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table ${sql.identifier(customSchema)}."__drizzle_migrations"`); +}); + +test('migrator : migrate with custom table', async () => { + const customTable = randomString(); + await db.execute(sql`drop table if exists all_columns`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + + await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable }); + + // test if the custom migrations table was created + const { rowCount } = await db.execute(sql`select * from "drizzle".${sql.identifier(customTable)};`); + expect(rowCount && rowCount > 0).toBeTruthy(); + + // test if the migrated table are working as expected + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table all_columns`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table "drizzle".${sql.identifier(customTable)}`); +}); + +test('migrator : migrate with custom table and custom schema', async () => { + const customTable = randomString(); + const customSchema = randomString(); + await db.execute(sql`drop table if exists all_columns`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + + await migrate(db, { + migrationsFolder: './drizzle2/pg', + migrationsTable: customTable, + migrationsSchema: customSchema, + }); + + // test if the custom migrations table was created + const { rowCount } = await db.execute( + sql`select * from ${sql.identifier(customSchema)}.${sql.identifier(customTable)};`, + ); + expect(rowCount && rowCount > 0).toBeTruthy(); + + // test if the migrated table are working as expected + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table all_columns`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table ${sql.identifier(customSchema)}.${sql.identifier(customTable)}`); +}); + +test('all date and time columns without timezone first case mode string', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) not null + ) + `); + + // 1. Insert date in string format without timezone in it + await db.insert(table).values([ + { timestamp: '2022-01-01 02:00:00.123456' }, + ]); + + // 2, Select in string format and check that values are the same + const result = await db.select().from(table); + + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456' }]); + + // 3. Select as raw query and check that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('all date and time columns without timezone second case mode string', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) not null + ) + `); + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: '2022-01-01T02:00:00.123456-02' }, + ]); + + // 2, Select as raw query and check that values are the same + const result = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + expect(result.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('all date and time columns without timezone third case mode date', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'date', precision: 3 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(3) not null + ) + `); + + const insertedDate = new Date('2022-01-01 20:00:00.123+04'); + + // 1. Insert date as new date + await db.insert(table).values([ + { timestamp: insertedDate }, + ]); + + // 2, Select as raw query as string + const result = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + // 3. Compare both dates using orm mapping - Need to add 'Z' to tell JS that it is UTC + expect(new Date(result.rows[0]!.timestamp_string + 'Z').getTime()).toBe(insertedDate.getTime()); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('test mode string for timestamp with timezone', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) with time zone not null + ) + `); + + const timestampString = '2022-01-01 00:00:00.123456-0200'; + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); + + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); + + // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); + + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('test mode date for timestamp with timezone', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(3) with time zone not null + ) + `); + + const timestampString = new Date('2022-01-01 00:00:00.456-0200'); + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); + + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); + + // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same + expect(result).toEqual([{ id: 1, timestamp: timestampString }]); + + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.456+00' }]); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('test mode string for timestamp with timezone in UTC timezone', async () => { + // get current timezone from db + const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); + + // set timezone to UTC + await db.execute(sql`set time zone 'UTC'`); + + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) with time zone not null + ) + `); + + const timestampString = '2022-01-01 00:00:00.123456-0200'; + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); + + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); + + // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); + + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); + + await db.execute(sql`set time zone '${sql.raw(timezone.rows[0]!.TimeZone)}'`); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test.skip('test mode string for timestamp with timezone in different timezone', async () => { + // get current timezone from db + const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); + + // set timezone to HST (UTC - 10) + await db.execute(sql`set time zone 'HST'`); + + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) with time zone not null + ) + `); + + const timestampString = '2022-01-01 00:00:00.123456-1000'; + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); + + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); + + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 00:00:00.123456-10' }]); + + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 00:00:00.123456+00' }]); + + await db.execute(sql`set time zone '${sql.raw(timezone.rows[0]!.TimeZone)}'`); + + await db.execute(sql`drop table if exists ${table}`); +}); + +skipTests([ + 'migrator : default migration strategy', + 'migrator : migrate with custom schema', + 'migrator : migrate with custom table', + 'migrator : migrate with custom table and custom schema', + 'insert via db.execute + select via db.execute', + 'insert via db.execute + returning', + 'insert via db.execute w/ query builder', + 'all date and time columns without timezone first case mode string', + 'all date and time columns without timezone third case mode date', + 'test mode string for timestamp with timezone', + 'test mode date for timestamp with timezone', + 'test mode string for timestamp with timezone in UTC timezone', + 'nested transaction rollback', + 'transaction rollback', + 'nested transaction', + 'transaction', + 'timestamp timezone', + 'test $onUpdateFn and $onUpdate works as $default', +]); +tests(); + +beforeEach(async () => { + await db.execute(sql`drop schema if exists public cascade`); + await db.execute(sql`create schema public`); + await db.execute( + sql` + create table users ( + id serial primary key, + name text not null, + verified boolean not null default false, + jsonb jsonb, + created_at timestamptz not null default now() + ) + `, + ); +}); + +test('insert via db.execute + select via db.execute', async () => { + await db.execute( + sql`insert into ${usersTable} (${sql.identifier(usersTable.name.name)}) values (${'John'})`, + ); + + const result = await db.execute<{ id: number; name: string }>( + sql`select id, name from "users"`, + ); + expect(result.rows).toEqual([{ id: 1, name: 'John' }]); +}); + +test('insert via db.execute + returning', async () => { + const inserted = await db.execute<{ id: number; name: string }>( + sql`insert into ${usersTable} (${ + sql.identifier( + usersTable.name.name, + ) + }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, + ); + expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); +}); + +test('insert via db.execute w/ query builder', async () => { + const inserted = await db.execute>( + db + .insert(usersTable) + .values({ name: 'John' }) + .returning({ id: usersTable.id, name: usersTable.name }), + ); + expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); +}); diff --git a/integration-tests/tests/pg/node-postgres.test.ts b/integration-tests/tests/pg/node-postgres.test.ts new file mode 100644 index 000000000..1c898e6a6 --- /dev/null +++ b/integration-tests/tests/pg/node-postgres.test.ts @@ -0,0 +1,476 @@ +import retry from 'async-retry'; +import { sql } from 'drizzle-orm'; +import type { NodePgDatabase } from 'drizzle-orm/node-postgres'; +import { drizzle } from 'drizzle-orm/node-postgres'; +import { migrate } from 'drizzle-orm/node-postgres/migrator'; +import { pgTable, serial, timestamp } from 'drizzle-orm/pg-core'; +import { Client } from 'pg'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { skipTests } from '~/common'; +import { randomString } from '~/utils'; +import { createDockerDB, tests, usersMigratorTable, usersTable } from './pg-common'; + +const ENABLE_LOGGING = false; + +let db: NodePgDatabase; +let client: Client; + +beforeAll(async () => { + const connectionString = process.env['PG_CONNECTION_STRING'] ?? await createDockerDB(); + client = await retry(async () => { + client = new Client(connectionString); + await client.connect(); + return client; + }, { + retries: 20, + factor: 1, + minTimeout: 250, + maxTimeout: 250, + randomize: false, + onRetry() { + client?.end(); + }, + }); + db = drizzle(client, { logger: ENABLE_LOGGING }); +}); + +afterAll(async () => { + await client?.end(); +}); + +beforeEach((ctx) => { + ctx.pg = { + db, + }; +}); + +test('migrator : default migration strategy', async () => { + await db.execute(sql`drop table if exists all_columns`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + + await migrate(db, { migrationsFolder: './drizzle2/pg' }); + + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + + const result = await db.select().from(usersMigratorTable); + + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table all_columns`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); +}); + +test('migrator : migrate with custom schema', async () => { + const customSchema = randomString(); + await db.execute(sql`drop table if exists all_columns`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + + await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsSchema: customSchema }); + + // test if the custom migrations table was created + const { rowCount } = await db.execute(sql`select * from ${sql.identifier(customSchema)}."__drizzle_migrations";`); + expect(rowCount && rowCount > 0).toBeTruthy(); + + // test if the migrated table are working as expected + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table all_columns`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table ${sql.identifier(customSchema)}."__drizzle_migrations"`); +}); + +test('migrator : migrate with custom table', async () => { + const customTable = randomString(); + await db.execute(sql`drop table if exists all_columns`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + + await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable }); + + // test if the custom migrations table was created + const { rowCount } = await db.execute(sql`select * from "drizzle".${sql.identifier(customTable)};`); + expect(rowCount && rowCount > 0).toBeTruthy(); + + // test if the migrated table are working as expected + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table all_columns`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table "drizzle".${sql.identifier(customTable)}`); +}); + +test('migrator : migrate with custom table and custom schema', async () => { + const customTable = randomString(); + const customSchema = randomString(); + await db.execute(sql`drop table if exists all_columns`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + + await migrate(db, { + migrationsFolder: './drizzle2/pg', + migrationsTable: customTable, + migrationsSchema: customSchema, + }); + + // test if the custom migrations table was created + const { rowCount } = await db.execute( + sql`select * from ${sql.identifier(customSchema)}.${sql.identifier(customTable)};`, + ); + expect(rowCount && rowCount > 0).toBeTruthy(); + + // test if the migrated table are working as expected + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table all_columns`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table ${sql.identifier(customSchema)}.${sql.identifier(customTable)}`); +}); + +test('all date and time columns without timezone first case mode string', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) not null + ) + `); + + // 1. Insert date in string format without timezone in it + await db.insert(table).values([ + { timestamp: '2022-01-01 02:00:00.123456' }, + ]); + + // 2, Select in string format and check that values are the same + const result = await db.select().from(table); + + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456' }]); + + // 3. Select as raw query and check that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('all date and time columns without timezone second case mode string', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) not null + ) + `); + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: '2022-01-01T02:00:00.123456-02' }, + ]); + + // 2, Select as raw query and check that values are the same + const result = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + expect(result.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('all date and time columns without timezone third case mode date', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'date', precision: 3 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(3) not null + ) + `); + + const insertedDate = new Date('2022-01-01 20:00:00.123+04'); + + // 1. Insert date as new date + await db.insert(table).values([ + { timestamp: insertedDate }, + ]); + + // 2, Select as raw query as string + const result = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + // 3. Compare both dates using orm mapping - Need to add 'Z' to tell JS that it is UTC + expect(new Date(result.rows[0]!.timestamp_string + 'Z').getTime()).toBe(insertedDate.getTime()); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('test mode string for timestamp with timezone', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) with time zone not null + ) + `); + + const timestampString = '2022-01-01 00:00:00.123456-0200'; + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); + + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); + + // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); + + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('test mode date for timestamp with timezone', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(3) with time zone not null + ) + `); + + const timestampString = new Date('2022-01-01 00:00:00.456-0200'); + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); + + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); + + // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same + expect(result).toEqual([{ id: 1, timestamp: timestampString }]); + + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.456+00' }]); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('test mode string for timestamp with timezone in UTC timezone', async () => { + // get current timezone from db + const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); + + // set timezone to UTC + await db.execute(sql`set time zone 'UTC'`); + + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) with time zone not null + ) + `); + + const timestampString = '2022-01-01 00:00:00.123456-0200'; + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); + + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); + + // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); + + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); + + await db.execute(sql`set time zone '${sql.raw(timezone.rows[0]!.TimeZone)}'`); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('test mode string for timestamp with timezone in different timezone', async () => { + // get current timezone from db + const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); + + // set timezone to HST (UTC - 10) + await db.execute(sql`set time zone 'HST'`); + + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) with time zone not null + ) + `); + + const timestampString = '2022-01-01 00:00:00.123456-1000'; + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); + + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); + + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 00:00:00.123456-10' }]); + + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 00:00:00.123456-10' }]); + + await db.execute(sql`set time zone '${sql.raw(timezone.rows[0]!.TimeZone)}'`); + + await db.execute(sql`drop table if exists ${table}`); +}); + +skipTests([ + 'migrator : default migration strategy', + 'migrator : migrate with custom schema', + 'migrator : migrate with custom table', + 'migrator : migrate with custom table and custom schema', + 'insert via db.execute + select via db.execute', + 'insert via db.execute + returning', + 'insert via db.execute w/ query builder', + 'all date and time columns without timezone first case mode string', + 'all date and time columns without timezone third case mode date', + 'test mode string for timestamp with timezone', + 'test mode date for timestamp with timezone', + 'test mode string for timestamp with timezone in UTC timezone', + 'test mode string for timestamp with timezone in different timezone', +]); +tests(); + +beforeEach(async () => { + await db.execute(sql`drop schema if exists public cascade`); + await db.execute(sql`create schema public`); + await db.execute( + sql` + create table users ( + id serial primary key, + name text not null, + verified boolean not null default false, + jsonb jsonb, + created_at timestamptz not null default now() + ) + `, + ); +}); + +test('insert via db.execute + select via db.execute', async () => { + await db.execute( + sql`insert into ${usersTable} (${sql.identifier(usersTable.name.name)}) values (${'John'})`, + ); + + const result = await db.execute<{ id: number; name: string }>( + sql`select id, name from "users"`, + ); + expect(result.rows).toEqual([{ id: 1, name: 'John' }]); +}); + +test('insert via db.execute + returning', async () => { + const inserted = await db.execute<{ id: number; name: string }>( + sql`insert into ${usersTable} (${ + sql.identifier( + usersTable.name.name, + ) + }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, + ); + expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); +}); + +test('insert via db.execute w/ query builder', async () => { + const inserted = await db.execute>( + db + .insert(usersTable) + .values({ name: 'John' }) + .returning({ id: usersTable.id, name: usersTable.name }), + ); + expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); +}); diff --git a/integration-tests/tests/pg/pg-common.ts b/integration-tests/tests/pg/pg-common.ts new file mode 100644 index 000000000..b668238f2 --- /dev/null +++ b/integration-tests/tests/pg/pg-common.ts @@ -0,0 +1,4432 @@ +import Docker from 'dockerode'; +// eslint-disable-next-line @typescript-eslint/consistent-type-imports +import { + and, + arrayContained, + arrayContains, + arrayOverlaps, + asc, + avg, + avgDistinct, + count, + countDistinct, + eq, + Equal, + exists, + getTableColumns, + gt, + gte, + ilike, + inArray, + lt, + max, + min, + or, + SQL, + sql, + SQLWrapper, + sum, + sumDistinct, + TransactionRollbackError, +} from 'drizzle-orm'; +import type { NeonHttpDatabase } from 'drizzle-orm/neon-http'; +import type { PgColumn, PgDatabase, PgQueryResultHKT } from 'drizzle-orm/pg-core'; +import { + alias, + boolean, + char, + cidr, + date, + except, + exceptAll, + foreignKey, + getMaterializedViewConfig, + getTableConfig, + getViewConfig, + inet, + integer, + intersect, + intersectAll, + interval, + jsonb, + macaddr, + macaddr8, + numeric, + pgEnum, + pgMaterializedView, + pgSchema, + pgTable, + pgTableCreator, + pgView, + primaryKey, + serial, + text, + time, + timestamp, + union, + unionAll, + unique, + uniqueKeyName, + uuid as pgUuid, + varchar, +} from 'drizzle-orm/pg-core'; +import getPort from 'get-port'; +import { v4 as uuidV4 } from 'uuid'; +import { afterAll, beforeEach, describe, expect, test } from 'vitest'; +import { Expect } from '~/utils'; +import type { schema } from './neon-http-batch.test'; +// eslint-disable-next-line @typescript-eslint/no-import-type-side-effects +// import { type NodePgDatabase } from 'drizzle-orm/node-postgres'; + +declare module 'vitest' { + interface TestContext { + pg: { + db: PgDatabase; + }; + neonPg: { + db: NeonHttpDatabase; + }; + } +} + +export const usersTable = pgTable('users', { + id: serial('id' as string).primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: jsonb('jsonb').$type(), + createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), +}); + +const usersOnUpdate = pgTable('users_on_update', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + updateCounter: integer('update_counter').default(sql`1`).$onUpdateFn(() => sql`update_counter + 1`), + updatedAt: timestamp('updated_at', { mode: 'date', precision: 3 }).$onUpdate(() => new Date()), + alwaysNull: text('always_null').$type().$onUpdate(() => null), + // uppercaseName: text('uppercase_name').$onUpdateFn(() => sql`upper(name)`), looks like this is not supported in pg +}); + +const citiesTable = pgTable('cities', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + state: char('state', { length: 2 }), +}); + +const cities2Table = pgTable('cities', { + id: serial('id').primaryKey(), + name: text('name').notNull(), +}); + +const users2Table = pgTable('users2', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').references(() => citiesTable.id), +}); + +const coursesTable = pgTable('courses', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + categoryId: integer('category_id').references(() => courseCategoriesTable.id), +}); + +const courseCategoriesTable = pgTable('course_categories', { + id: serial('id').primaryKey(), + name: text('name').notNull(), +}); + +const orders = pgTable('orders', { + id: serial('id').primaryKey(), + region: text('region').notNull(), + product: text('product').notNull().$default(() => 'random_string'), + amount: integer('amount').notNull(), + quantity: integer('quantity').notNull(), +}); + +const network = pgTable('network_table', { + inet: inet('inet').notNull(), + cidr: cidr('cidr').notNull(), + macaddr: macaddr('macaddr').notNull(), + macaddr8: macaddr8('macaddr8').notNull(), +}); + +const salEmp = pgTable('sal_emp', { + name: text('name'), + payByQuarter: integer('pay_by_quarter').array(), + schedule: text('schedule').array().array(), +}); + +const _tictactoe = pgTable('tictactoe', { + squares: integer('squares').array(3).array(3), +}); + +export const usersMigratorTable = pgTable('users12', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + email: text('email').notNull(), +}); + +// To test aggregate functions +const aggregateTable = pgTable('aggregate_table', { + id: serial('id').notNull(), + name: text('name').notNull(), + a: integer('a'), + b: integer('b'), + c: integer('c'), + nullOnly: integer('null_only'), +}); + +// To test another schema and multischema +const mySchema = pgSchema('mySchema'); + +const usersMySchemaTable = mySchema.table('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: jsonb('jsonb').$type(), + createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), +}); + +const citiesMySchemaTable = mySchema.table('cities', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + state: char('state', { length: 2 }), +}); + +const users2MySchemaTable = mySchema.table('users2', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').references(() => citiesTable.id), +}); + +let pgContainer: Docker.Container; + +export async function createDockerDB(): Promise { + const docker = new Docker(); + const port = await getPort({ port: 5432 }); + const image = 'postgres:14'; + + const pullStream = await docker.pull(image); + await new Promise((resolve, reject) => + docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) + ); + + pgContainer = await docker.createContainer({ + Image: image, + Env: ['POSTGRES_PASSWORD=postgres', 'POSTGRES_USER=postgres', 'POSTGRES_DB=postgres'], + name: `drizzle-integration-tests-${uuidV4()}`, + HostConfig: { + AutoRemove: true, + PortBindings: { + '5432/tcp': [{ HostPort: `${port}` }], + }, + }, + }); + + await pgContainer.start(); + + return `postgres://postgres:postgres@localhost:${port}/postgres`; +} + +afterAll(async () => { + await pgContainer?.stop().catch(console.error); +}); + +export function tests() { + describe('common', () => { + beforeEach(async (ctx) => { + const { db } = ctx.pg; + await db.execute(sql`drop schema if exists public cascade`); + await db.execute(sql`drop schema if exists ${mySchema} cascade`); + await db.execute(sql`create schema public`); + await db.execute(sql`create schema ${mySchema}`); + // public users + await db.execute( + sql` + create table users ( + id serial primary key, + name text not null, + verified boolean not null default false, + jsonb jsonb, + created_at timestamptz not null default now() + ) + `, + ); + // public cities + await db.execute( + sql` + create table cities ( + id serial primary key, + name text not null, + state char(2) + ) + `, + ); + // public users2 + await db.execute( + sql` + create table users2 ( + id serial primary key, + name text not null, + city_id integer references cities(id) + ) + `, + ); + await db.execute( + sql` + create table course_categories ( + id serial primary key, + name text not null + ) + `, + ); + await db.execute( + sql` + create table courses ( + id serial primary key, + name text not null, + category_id integer references course_categories(id) + ) + `, + ); + await db.execute( + sql` + create table orders ( + id serial primary key, + region text not null, + product text not null, + amount integer not null, + quantity integer not null + ) + `, + ); + await db.execute( + sql` + create table network_table ( + inet inet not null, + cidr cidr not null, + macaddr macaddr not null, + macaddr8 macaddr8 not null + ) + `, + ); + await db.execute( + sql` + create table sal_emp ( + name text not null, + pay_by_quarter integer[] not null, + schedule text[][] not null + ) + `, + ); + await db.execute( + sql` + create table tictactoe ( + squares integer[3][3] not null + ) + `, + ); + // // mySchema users + await db.execute( + sql` + create table ${usersMySchemaTable} ( + id serial primary key, + name text not null, + verified boolean not null default false, + jsonb jsonb, + created_at timestamptz not null default now() + ) + `, + ); + // mySchema cities + await db.execute( + sql` + create table ${citiesMySchemaTable} ( + id serial primary key, + name text not null, + state char(2) + ) + `, + ); + // mySchema users2 + await db.execute( + sql` + create table ${users2MySchemaTable} ( + id serial primary key, + name text not null, + city_id integer references "mySchema".cities(id) + ) + `, + ); + }); + + async function setupSetOperationTest(db: PgDatabase) { + await db.execute(sql`drop table if exists users2`); + await db.execute(sql`drop table if exists cities`); + await db.execute( + sql` + create table cities ( + id serial primary key, + name text not null + ) + `, + ); + await db.execute( + sql` + create table users2 ( + id serial primary key, + name text not null, + city_id integer references cities(id) + ) + `, + ); + + await db.insert(cities2Table).values([ + { id: 1, name: 'New York' }, + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + await db.insert(users2Table).values([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 2 }, + { id: 3, name: 'Jack', cityId: 3 }, + { id: 4, name: 'Peter', cityId: 3 }, + { id: 5, name: 'Ben', cityId: 2 }, + { id: 6, name: 'Jill', cityId: 1 }, + { id: 7, name: 'Mary', cityId: 2 }, + { id: 8, name: 'Sally', cityId: 1 }, + ]); + } + + async function setupAggregateFunctionsTest(db: PgDatabase) { + await db.execute(sql`drop table if exists "aggregate_table"`); + await db.execute( + sql` + create table "aggregate_table" ( + "id" serial not null, + "name" text not null, + "a" integer, + "b" integer, + "c" integer, + "null_only" integer + ); + `, + ); + await db.insert(aggregateTable).values([ + { name: 'value 1', a: 5, b: 10, c: 20 }, + { name: 'value 1', a: 5, b: 20, c: 30 }, + { name: 'value 2', a: 10, b: 50, c: 60 }, + { name: 'value 3', a: 20, b: 20, c: null }, + { name: 'value 4', a: null, b: 90, c: 120 }, + { name: 'value 5', a: 80, b: 10, c: null }, + { name: 'value 6', a: null, b: null, c: 150 }, + ]); + } + + test('table configs: unique third param', async () => { + const cities1Table = pgTable('cities1', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + state: char('state', { length: 2 }), + }, (t) => ({ + f: unique('custom_name').on(t.name, t.state).nullsNotDistinct(), + f1: unique('custom_name1').on(t.name, t.state), + })); + + const tableConfig = getTableConfig(cities1Table); + + expect(tableConfig.uniqueConstraints).toHaveLength(2); + + expect(tableConfig.uniqueConstraints[0]?.name).toBe('custom_name'); + expect(tableConfig.uniqueConstraints[0]?.nullsNotDistinct).toBe(true); + expect(tableConfig.uniqueConstraints[0]?.columns.map((t) => t.name)).toEqual(['name', 'state']); + + expect(tableConfig.uniqueConstraints[1]?.name).toBe('custom_name1'); + expect(tableConfig.uniqueConstraints[1]?.nullsNotDistinct).toBe(false); + expect(tableConfig.uniqueConstraints[1]?.columns.map((t) => t.name)).toEqual(['name', 'state']); + }); + + test('table configs: unique in column', async () => { + const cities1Table = pgTable('cities1', { + id: serial('id').primaryKey(), + name: text('name').notNull().unique(), + state: char('state', { length: 2 }).unique('custom'), + field: char('field', { length: 2 }).unique('custom_field', { nulls: 'not distinct' }), + }); + + const tableConfig = getTableConfig(cities1Table); + + const columnName = tableConfig.columns.find((it) => it.name === 'name'); + + expect(columnName?.uniqueName).toBe(uniqueKeyName(cities1Table, [columnName!.name])); + expect(columnName?.isUnique).toBe(true); + + const columnState = tableConfig.columns.find((it) => it.name === 'state'); + expect(columnState?.uniqueName).toBe('custom'); + expect(columnState?.isUnique).toBe(true); + + const columnField = tableConfig.columns.find((it) => it.name === 'field'); + expect(columnField?.uniqueName).toBe('custom_field'); + expect(columnField?.isUnique).toBe(true); + expect(columnField?.uniqueType).toBe('not distinct'); + }); + + test('table config: foreign keys name', async () => { + const table = pgTable('cities', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + state: text('state'), + }, (t) => ({ + f: foreignKey({ foreignColumns: [t.id], columns: [t.id], name: 'custom_fk' }), + })); + + const tableConfig = getTableConfig(table); + + expect(tableConfig.foreignKeys).toHaveLength(1); + expect(tableConfig.foreignKeys[0]!.getName()).toBe('custom_fk'); + }); + + test('table config: primary keys name', async () => { + const table = pgTable('cities', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + state: text('state'), + }, (t) => ({ + f: primaryKey({ columns: [t.id, t.name], name: 'custom_pk' }), + })); + + const tableConfig = getTableConfig(table); + + expect(tableConfig.primaryKeys).toHaveLength(1); + expect(tableConfig.primaryKeys[0]!.getName()).toBe('custom_pk'); + }); + + test('select all fields', async (ctx) => { + const { db } = ctx.pg; + + const now = Date.now(); + + await db.insert(usersTable).values({ name: 'John' }); + const result = await db.select().from(usersTable); + + expect(result[0]!.createdAt).toBeInstanceOf(Date); + expect(Math.abs(result[0]!.createdAt.getTime() - now)).toBeLessThan(100); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + }); + + test('select sql', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db + .select({ + name: sql`upper(${usersTable.name})`, + }) + .from(usersTable); + + expect(users).toEqual([{ name: 'JOHN' }]); + }); + + test('select typed sql', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersTable).values({ name: 'John' }); + + const users = await db.select({ + name: sql`upper(${usersTable.name})`, + }).from(usersTable); + + expect(users).toEqual([{ name: 'JOHN' }]); + }); + + test('$default function', async (ctx) => { + const { db } = ctx.pg; + + const insertedOrder = await db.insert(orders).values({ id: 1, region: 'Ukraine', amount: 1, quantity: 1 }) + .returning(); + const selectedOrder = await db.select().from(orders); + + expect(insertedOrder).toEqual([{ + id: 1, + amount: 1, + quantity: 1, + region: 'Ukraine', + product: 'random_string', + }]); + + expect(selectedOrder).toEqual([{ + id: 1, + amount: 1, + quantity: 1, + region: 'Ukraine', + product: 'random_string', + }]); + }); + + test('select distinct', async (ctx) => { + const { db } = ctx.pg; + + const usersDistinctTable = pgTable('users_distinct', { + id: integer('id').notNull(), + name: text('name').notNull(), + age: integer('age').notNull(), + }); + + await db.execute(sql`drop table if exists ${usersDistinctTable}`); + await db.execute(sql`create table ${usersDistinctTable} (id integer, name text, age integer)`); + + await db.insert(usersDistinctTable).values([ + { id: 1, name: 'John', age: 24 }, + { id: 1, name: 'John', age: 24 }, + { id: 2, name: 'John', age: 25 }, + { id: 1, name: 'Jane', age: 24 }, + { id: 1, name: 'Jane', age: 26 }, + ]); + const users1 = await db.selectDistinct().from(usersDistinctTable).orderBy( + usersDistinctTable.id, + usersDistinctTable.name, + ); + const users2 = await db.selectDistinctOn([usersDistinctTable.id]).from(usersDistinctTable).orderBy( + usersDistinctTable.id, + ); + const users3 = await db.selectDistinctOn([usersDistinctTable.name], { name: usersDistinctTable.name }).from( + usersDistinctTable, + ).orderBy(usersDistinctTable.name); + const users4 = await db.selectDistinctOn([usersDistinctTable.id, usersDistinctTable.age]).from( + usersDistinctTable, + ).orderBy(usersDistinctTable.id, usersDistinctTable.age); + + await db.execute(sql`drop table ${usersDistinctTable}`); + + expect(users1).toEqual([ + { id: 1, name: 'Jane', age: 24 }, + { id: 1, name: 'Jane', age: 26 }, + { id: 1, name: 'John', age: 24 }, + { id: 2, name: 'John', age: 25 }, + ]); + + expect(users2).toHaveLength(2); + expect(users2[0]?.id).toBe(1); + expect(users2[1]?.id).toBe(2); + + expect(users3).toHaveLength(2); + expect(users3[0]?.name).toBe('Jane'); + expect(users3[1]?.name).toBe('John'); + + expect(users4).toEqual([ + { id: 1, name: 'John', age: 24 }, + { id: 1, name: 'Jane', age: 26 }, + { id: 2, name: 'John', age: 25 }, + ]); + }); + + test('insert returning sql', async (ctx) => { + const { db } = ctx.pg; + + const users = await db + .insert(usersTable) + .values({ name: 'John' }) + .returning({ + name: sql`upper(${usersTable.name})`, + }); + + expect(users).toEqual([{ name: 'JOHN' }]); + }); + + test('delete returning sql', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db + .delete(usersTable) + .where(eq(usersTable.name, 'John')) + .returning({ + name: sql`upper(${usersTable.name})`, + }); + + expect(users).toEqual([{ name: 'JOHN' }]); + }); + + test('update returning sql', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db + .update(usersTable) + .set({ name: 'Jane' }) + .where(eq(usersTable.name, 'John')) + .returning({ + name: sql`upper(${usersTable.name})`, + }); + + expect(users).toEqual([{ name: 'JANE' }]); + }); + + test('update with returning all fields', async (ctx) => { + const { db } = ctx.pg; + + const now = Date.now(); + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db + .update(usersTable) + .set({ name: 'Jane' }) + .where(eq(usersTable.name, 'John')) + .returning(); + + expect(users[0]!.createdAt).toBeInstanceOf(Date); + expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(100); + expect(users).toEqual([ + { id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }, + ]); + }); + + test('update with returning partial', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db + .update(usersTable) + .set({ name: 'Jane' }) + .where(eq(usersTable.name, 'John')) + .returning({ + id: usersTable.id, + name: usersTable.name, + }); + + expect(users).toEqual([{ id: 1, name: 'Jane' }]); + }); + + test('delete with returning all fields', async (ctx) => { + const { db } = ctx.pg; + + const now = Date.now(); + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning(); + + expect(users[0]!.createdAt).toBeInstanceOf(Date); + expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(100); + expect(users).toEqual([ + { id: 1, name: 'John', verified: false, jsonb: null, createdAt: users[0]!.createdAt }, + ]); + }); + + test('delete with returning partial', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ + id: usersTable.id, + name: usersTable.name, + }); + + expect(users).toEqual([{ id: 1, name: 'John' }]); + }); + + test('insert + select', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersTable).values({ name: 'John' }); + const result = await db.select().from(usersTable); + expect(result).toEqual([ + { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }, + ]); + + await db.insert(usersTable).values({ name: 'Jane' }); + const result2 = await db.select().from(usersTable); + expect(result2).toEqual([ + { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, + { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, + ]); + }); + + test('json insert', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); + const result = await db + .select({ + id: usersTable.id, + name: usersTable.name, + jsonb: usersTable.jsonb, + }) + .from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); + }); + + test('char insert', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(citiesTable).values({ name: 'Austin', state: 'TX' }); + const result = await db + .select({ id: citiesTable.id, name: citiesTable.name, state: citiesTable.state }) + .from(citiesTable); + + expect(result).toEqual([{ id: 1, name: 'Austin', state: 'TX' }]); + }); + + test('char update', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(citiesTable).values({ name: 'Austin', state: 'TX' }); + await db.update(citiesTable).set({ name: 'Atlanta', state: 'GA' }).where(eq(citiesTable.id, 1)); + const result = await db + .select({ id: citiesTable.id, name: citiesTable.name, state: citiesTable.state }) + .from(citiesTable); + + expect(result).toEqual([{ id: 1, name: 'Atlanta', state: 'GA' }]); + }); + + test('char delete', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(citiesTable).values({ name: 'Austin', state: 'TX' }); + await db.delete(citiesTable).where(eq(citiesTable.state, 'TX')); + const result = await db + .select({ id: citiesTable.id, name: citiesTable.name, state: citiesTable.state }) + .from(citiesTable); + + expect(result).toEqual([]); + }); + + test('insert with overridden default values', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersTable).values({ name: 'John', verified: true }); + const result = await db.select().from(usersTable); + + expect(result).toEqual([ + { id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }, + ]); + }); + + test('insert many', async (ctx) => { + const { db } = ctx.pg; + + await db + .insert(usersTable) + .values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]); + const result = await db + .select({ + id: usersTable.id, + name: usersTable.name, + jsonb: usersTable.jsonb, + verified: usersTable.verified, + }) + .from(usersTable); + + expect(result).toEqual([ + { id: 1, name: 'John', jsonb: null, verified: false }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, + { id: 3, name: 'Jane', jsonb: null, verified: false }, + { id: 4, name: 'Austin', jsonb: null, verified: true }, + ]); + }); + + test('insert many with returning', async (ctx) => { + const { db } = ctx.pg; + + const result = await db + .insert(usersTable) + .values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]) + .returning({ + id: usersTable.id, + name: usersTable.name, + jsonb: usersTable.jsonb, + verified: usersTable.verified, + }); + + expect(result).toEqual([ + { id: 1, name: 'John', jsonb: null, verified: false }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, + { id: 3, name: 'Jane', jsonb: null, verified: false }, + { id: 4, name: 'Austin', jsonb: null, verified: true }, + ]); + }); + + test('select with group by as field', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db + .select({ name: usersTable.name }) + .from(usersTable) + .groupBy(usersTable.name); + + expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); + }); + + test('select with exists', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const user = alias(usersTable, 'user'); + const result = await db.select({ name: usersTable.name }).from(usersTable).where( + exists( + db.select({ one: sql`1` }).from(user).where(and(eq(usersTable.name, 'John'), eq(user.id, usersTable.id))), + ), + ); + + expect(result).toEqual([{ name: 'John' }]); + }); + + test('select with group by as sql', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db + .select({ name: usersTable.name }) + .from(usersTable) + .groupBy(sql`${usersTable.name}`); + + expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); + }); + + test('select with group by as sql + column', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db + .select({ name: usersTable.name }) + .from(usersTable) + .groupBy(sql`${usersTable.name}`, usersTable.id); + + expect(result).toEqual([{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); + }); + + test('select with group by as column + sql', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db + .select({ name: usersTable.name }) + .from(usersTable) + .groupBy(usersTable.id, sql`${usersTable.name}`); + + expect(result).toEqual([{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); + }); + + test('select with group by complex query', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db + .select({ name: usersTable.name }) + .from(usersTable) + .groupBy(usersTable.id, sql`${usersTable.name}`) + .orderBy(asc(usersTable.name)) + .limit(1); + + expect(result).toEqual([{ name: 'Jane' }]); + }); + + test('build query', async (ctx) => { + const { db } = ctx.pg; + + const query = db + .select({ id: usersTable.id, name: usersTable.name }) + .from(usersTable) + .groupBy(usersTable.id, usersTable.name) + .toSQL(); + + expect(query).toEqual({ + sql: 'select "id", "name" from "users" group by "users"."id", "users"."name"', + params: [], + }); + }); + + test('insert sql', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersTable).values({ name: sql`${'John'}` }); + const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); + expect(result).toEqual([{ id: 1, name: 'John' }]); + }); + + test('partial join with alias', async (ctx) => { + const { db } = ctx.pg; + const customerAlias = alias(usersTable, 'customer'); + + await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select({ + user: { + id: usersTable.id, + name: usersTable.name, + }, + customer: { + id: customerAlias.id, + name: customerAlias.name, + }, + }) + .from(usersTable) + .leftJoin(customerAlias, eq(customerAlias.id, 11)) + .where(eq(usersTable.id, 10)); + + expect(result).toEqual([ + { + user: { id: 10, name: 'Ivan' }, + customer: { id: 11, name: 'Hans' }, + }, + ]); + }); + + test('full join with alias', async (ctx) => { + const { db } = ctx.pg; + + const pgTable = pgTableCreator((name) => `prefixed_${name}`); + + const users = pgTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); + + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select() + .from(users) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(users.id, 10)); + + expect(result).toEqual([{ + users: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); + + await db.execute(sql`drop table ${users}`); + }); + + test('select from alias', async (ctx) => { + const { db } = ctx.pg; + + const pgTable = pgTableCreator((name) => `prefixed_${name}`); + + const users = pgTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); + + const user = alias(users, 'user'); + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select() + .from(user) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(user.id, 10)); + + expect(result).toEqual([{ + user: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); + + await db.execute(sql`drop table ${users}`); + }); + + test('insert with spaces', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersTable).values({ name: sql`'Jo h n'` }); + const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); + }); + + test('prepared statement', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersTable).values({ name: 'John' }); + const statement = db + .select({ + id: usersTable.id, + name: usersTable.name, + }) + .from(usersTable) + .prepare('statement1'); + const result = await statement.execute(); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + }); + + test('prepared statement reuse', async (ctx) => { + const { db } = ctx.pg; + + const stmt = db + .insert(usersTable) + .values({ + verified: true, + name: sql.placeholder('name'), + }) + .prepare('stmt2'); + + for (let i = 0; i < 10; i++) { + await stmt.execute({ name: `John ${i}` }); + } + + const result = await db + .select({ + id: usersTable.id, + name: usersTable.name, + verified: usersTable.verified, + }) + .from(usersTable); + + expect(result).toEqual([ + { id: 1, name: 'John 0', verified: true }, + { id: 2, name: 'John 1', verified: true }, + { id: 3, name: 'John 2', verified: true }, + { id: 4, name: 'John 3', verified: true }, + { id: 5, name: 'John 4', verified: true }, + { id: 6, name: 'John 5', verified: true }, + { id: 7, name: 'John 6', verified: true }, + { id: 8, name: 'John 7', verified: true }, + { id: 9, name: 'John 8', verified: true }, + { id: 10, name: 'John 9', verified: true }, + ]); + }); + + test('prepared statement with placeholder in .where', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersTable).values({ name: 'John' }); + const stmt = db + .select({ + id: usersTable.id, + name: usersTable.name, + }) + .from(usersTable) + .where(eq(usersTable.id, sql.placeholder('id'))) + .prepare('stmt3'); + const result = await stmt.execute({ id: 1 }); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + }); + + test('prepared statement with placeholder in .limit', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersTable).values({ name: 'John' }); + const stmt = db + .select({ + id: usersTable.id, + name: usersTable.name, + }) + .from(usersTable) + .where(eq(usersTable.id, sql.placeholder('id'))) + .limit(sql.placeholder('limit')) + .prepare('stmt_limit'); + + const result = await stmt.execute({ id: 1, limit: 1 }); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + expect(result).toHaveLength(1); + }); + + test('prepared statement with placeholder in .offset', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'John1' }]); + const stmt = db + .select({ + id: usersTable.id, + name: usersTable.name, + }) + .from(usersTable) + .offset(sql.placeholder('offset')) + .prepare('stmt_offset'); + + const result = await stmt.execute({ offset: 1 }); + + expect(result).toEqual([{ id: 2, name: 'John1' }]); + }); + + // TODO change tests to new structure + test('Query check: Insert all defaults in 1 row', async (ctx) => { + const { db } = ctx.pg; + + const users = pgTable('users', { + id: serial('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state'), + }); + + const query = db + .insert(users) + .values({}) + .toSQL(); + + expect(query).toEqual({ + sql: 'insert into "users" ("id", "name", "state") values (default, default, default)', + params: [], + }); + }); + + test('Query check: Insert all defaults in multiple rows', async (ctx) => { + const { db } = ctx.pg; + + const users = pgTable('users', { + id: serial('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state').default('UA'), + }); + + const query = db + .insert(users) + .values([{}, {}]) + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into "users" ("id", "name", "state") values (default, default, default), (default, default, default)', + params: [], + }); + }); + + test('Insert all defaults in 1 row', async (ctx) => { + const { db } = ctx.pg; + + const users = pgTable('empty_insert_single', { + id: serial('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state'), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id serial primary key, name text default 'Dan', state text)`, + ); + + await db.insert(users).values({}); + + const res = await db.select().from(users); + + expect(res).toEqual([{ id: 1, name: 'Dan', state: null }]); + }); + + test('Insert all defaults in multiple rows', async (ctx) => { + const { db } = ctx.pg; + + const users = pgTable('empty_insert_multiple', { + id: serial('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state'), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id serial primary key, name text default 'Dan', state text)`, + ); + + await db.insert(users).values([{}, {}]); + + const res = await db.select().from(users); + + expect(res).toEqual([{ id: 1, name: 'Dan', state: null }, { id: 2, name: 'Dan', state: null }]); + }); + + test('build query insert with onConflict do update', async (ctx) => { + const { db } = ctx.pg; + + const query = db + .insert(usersTable) + .values({ name: 'John', jsonb: ['foo', 'bar'] }) + .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }) + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do update set "name" = $3', + params: ['John', '["foo","bar"]', 'John1'], + }); + }); + + test('build query insert with onConflict do update / multiple columns', async (ctx) => { + const { db } = ctx.pg; + + const query = db + .insert(usersTable) + .values({ name: 'John', jsonb: ['foo', 'bar'] }) + .onConflictDoUpdate({ target: [usersTable.id, usersTable.name], set: { name: 'John1' } }) + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id","name") do update set "name" = $3', + params: ['John', '["foo","bar"]', 'John1'], + }); + }); + + test('build query insert with onConflict do nothing', async (ctx) => { + const { db } = ctx.pg; + + const query = db + .insert(usersTable) + .values({ name: 'John', jsonb: ['foo', 'bar'] }) + .onConflictDoNothing() + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict do nothing', + params: ['John', '["foo","bar"]'], + }); + }); + + test('build query insert with onConflict do nothing + target', async (ctx) => { + const { db } = ctx.pg; + + const query = db + .insert(usersTable) + .values({ name: 'John', jsonb: ['foo', 'bar'] }) + .onConflictDoNothing({ target: usersTable.id }) + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do nothing', + params: ['John', '["foo","bar"]'], + }); + }); + + test('insert with onConflict do update', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersTable).values({ name: 'John' }); + + await db + .insert(usersTable) + .values({ id: 1, name: 'John' }) + .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }); + + const res = await db + .select({ id: usersTable.id, name: usersTable.name }) + .from(usersTable) + .where(eq(usersTable.id, 1)); + + expect(res).toEqual([{ id: 1, name: 'John1' }]); + }); + + test('insert with onConflict do nothing', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersTable).values({ name: 'John' }); + + await db.insert(usersTable).values({ id: 1, name: 'John' }).onConflictDoNothing(); + + const res = await db + .select({ id: usersTable.id, name: usersTable.name }) + .from(usersTable) + .where(eq(usersTable.id, 1)); + + expect(res).toEqual([{ id: 1, name: 'John' }]); + }); + + test('insert with onConflict do nothing + target', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersTable).values({ name: 'John' }); + + await db + .insert(usersTable) + .values({ id: 1, name: 'John' }) + .onConflictDoNothing({ target: usersTable.id }); + + const res = await db + .select({ id: usersTable.id, name: usersTable.name }) + .from(usersTable) + .where(eq(usersTable.id, 1)); + + expect(res).toEqual([{ id: 1, name: 'John' }]); + }); + + test('left join (flat object fields)', async (ctx) => { + const { db } = ctx.pg; + + const { id: cityId } = await db + .insert(citiesTable) + .values([{ name: 'Paris' }, { name: 'London' }]) + .returning({ id: citiesTable.id }) + .then((rows) => rows[0]!); + + await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); + + const res = await db + .select({ + userId: users2Table.id, + userName: users2Table.name, + cityId: citiesTable.id, + cityName: citiesTable.name, + }) + .from(users2Table) + .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); + + expect(res).toEqual([ + { userId: 1, userName: 'John', cityId, cityName: 'Paris' }, + { userId: 2, userName: 'Jane', cityId: null, cityName: null }, + ]); + }); + + test('left join (grouped fields)', async (ctx) => { + const { db } = ctx.pg; + + const { id: cityId } = await db + .insert(citiesTable) + .values([{ name: 'Paris' }, { name: 'London' }]) + .returning({ id: citiesTable.id }) + .then((rows) => rows[0]!); + + await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); + + const res = await db + .select({ + id: users2Table.id, + user: { + name: users2Table.name, + nameUpper: sql`upper(${users2Table.name})`, + }, + city: { + id: citiesTable.id, + name: citiesTable.name, + nameUpper: sql`upper(${citiesTable.name})`, + }, + }) + .from(users2Table) + .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); + + expect(res).toEqual([ + { + id: 1, + user: { name: 'John', nameUpper: 'JOHN' }, + city: { id: cityId, name: 'Paris', nameUpper: 'PARIS' }, + }, + { + id: 2, + user: { name: 'Jane', nameUpper: 'JANE' }, + city: null, + }, + ]); + }); + + test('left join (all fields)', async (ctx) => { + const { db } = ctx.pg; + + const { id: cityId } = await db + .insert(citiesTable) + .values([{ name: 'Paris' }, { name: 'London' }]) + .returning({ id: citiesTable.id }) + .then((rows) => rows[0]!); + + await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); + + const res = await db + .select() + .from(users2Table) + .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); + + expect(res).toEqual([ + { + users2: { + id: 1, + name: 'John', + cityId, + }, + cities: { + id: cityId, + name: 'Paris', + state: null, + }, + }, + { + users2: { + id: 2, + name: 'Jane', + cityId: null, + }, + cities: null, + }, + ]); + }); + + test('join subquery', async (ctx) => { + const { db } = ctx.pg; + + await db + .insert(courseCategoriesTable) + .values([ + { name: 'Category 1' }, + { name: 'Category 2' }, + { name: 'Category 3' }, + { name: 'Category 4' }, + ]); + + await db + .insert(coursesTable) + .values([ + { name: 'Development', categoryId: 2 }, + { name: 'IT & Software', categoryId: 3 }, + { name: 'Marketing', categoryId: 4 }, + { name: 'Design', categoryId: 1 }, + ]); + + const sq2 = db + .select({ + categoryId: courseCategoriesTable.id, + category: courseCategoriesTable.name, + total: sql`count(${courseCategoriesTable.id})`, + }) + .from(courseCategoriesTable) + .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) + .as('sq2'); + + const res = await db + .select({ + courseName: coursesTable.name, + categoryId: sq2.categoryId, + }) + .from(coursesTable) + .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) + .orderBy(coursesTable.name); + + expect(res).toEqual([ + { courseName: 'Design', categoryId: 1 }, + { courseName: 'Development', categoryId: 2 }, + { courseName: 'IT & Software', categoryId: 3 }, + { courseName: 'Marketing', categoryId: 4 }, + ]); + }); + + test('with ... select', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(orders).values([ + { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, + { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 50, quantity: 5 }, + ]); + + const regionalSales = db + .$with('regional_sales') + .as( + db + .select({ + region: orders.region, + totalSales: sql`sum(${orders.amount})`.as('total_sales'), + }) + .from(orders) + .groupBy(orders.region), + ); + + const topRegions = db + .$with('top_regions') + .as( + db + .select({ + region: regionalSales.region, + }) + .from(regionalSales) + .where( + gt( + regionalSales.totalSales, + db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), + ), + ), + ); + + const result1 = await db + .with(regionalSales, topRegions) + .select({ + region: orders.region, + product: orders.product, + productUnits: sql`sum(${orders.quantity})::int`, + productSales: sql`sum(${orders.amount})::int`, + }) + .from(orders) + .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) + .groupBy(orders.region, orders.product) + .orderBy(orders.region, orders.product); + const result2 = await db + .with(regionalSales, topRegions) + .selectDistinct({ + region: orders.region, + product: orders.product, + productUnits: sql`sum(${orders.quantity})::int`, + productSales: sql`sum(${orders.amount})::int`, + }) + .from(orders) + .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) + .groupBy(orders.region, orders.product) + .orderBy(orders.region, orders.product); + const result3 = await db + .with(regionalSales, topRegions) + .selectDistinctOn([orders.region], { + region: orders.region, + productUnits: sql`sum(${orders.quantity})::int`, + productSales: sql`sum(${orders.amount})::int`, + }) + .from(orders) + .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) + .groupBy(orders.region) + .orderBy(orders.region); + + expect(result1).toEqual([ + { + region: 'Europe', + product: 'A', + productUnits: 3, + productSales: 30, + }, + { + region: 'Europe', + product: 'B', + productUnits: 5, + productSales: 50, + }, + { + region: 'US', + product: 'A', + productUnits: 7, + productSales: 70, + }, + { + region: 'US', + product: 'B', + productUnits: 9, + productSales: 90, + }, + ]); + expect(result2).toEqual(result1); + expect(result3).toEqual([ + { + region: 'Europe', + productUnits: 8, + productSales: 80, + }, + { + region: 'US', + productUnits: 16, + productSales: 160, + }, + ]); + }); + + test('with ... update', async (ctx) => { + const { db } = ctx.pg; + + const products = pgTable('products', { + id: serial('id').primaryKey(), + price: numeric('price').notNull(), + cheap: boolean('cheap').notNull().default(false), + }); + + await db.execute(sql`drop table if exists ${products}`); + await db.execute(sql` + create table ${products} ( + id serial primary key, + price numeric not null, + cheap boolean not null default false + ) + `); + + await db.insert(products).values([ + { price: '10.99' }, + { price: '25.85' }, + { price: '32.99' }, + { price: '2.50' }, + { price: '4.59' }, + ]); + + const averagePrice = db + .$with('average_price') + .as( + db + .select({ + value: sql`avg(${products.price})`.as('value'), + }) + .from(products), + ); + + const result = await db + .with(averagePrice) + .update(products) + .set({ + cheap: true, + }) + .where(lt(products.price, sql`(select * from ${averagePrice})`)) + .returning({ + id: products.id, + }); + + expect(result).toEqual([ + { id: 1 }, + { id: 4 }, + { id: 5 }, + ]); + }); + + test('with ... insert', async (ctx) => { + const { db } = ctx.pg; + + const users = pgTable('users', { + username: text('username').notNull(), + admin: boolean('admin').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (username text not null, admin boolean not null default false)`); + + const userCount = db + .$with('user_count') + .as( + db + .select({ + value: sql`count(*)`.as('value'), + }) + .from(users), + ); + + const result = await db + .with(userCount) + .insert(users) + .values([ + { username: 'user1', admin: sql`((select * from ${userCount}) = 0)` }, + ]) + .returning({ + admin: users.admin, + }); + + expect(result).toEqual([{ admin: true }]); + }); + + test('with ... delete', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(orders).values([ + { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, + { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 50, quantity: 5 }, + ]); + + const averageAmount = db + .$with('average_amount') + .as( + db + .select({ + value: sql`avg(${orders.amount})`.as('value'), + }) + .from(orders), + ); + + const result = await db + .with(averageAmount) + .delete(orders) + .where(gt(orders.amount, sql`(select * from ${averageAmount})`)) + .returning({ + id: orders.id, + }); + + expect(result).toEqual([ + { id: 6 }, + { id: 7 }, + { id: 8 }, + ]); + }); + + test('select from subquery sql', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(users2Table).values([{ name: 'John' }, { name: 'Jane' }]); + + const sq = db + .select({ name: sql`${users2Table.name} || ' modified'`.as('name') }) + .from(users2Table) + .as('sq'); + + const res = await db.select({ name: sq.name }).from(sq); + + expect(res).toEqual([{ name: 'John modified' }, { name: 'Jane modified' }]); + }); + + test('select a field without joining its table', (ctx) => { + const { db } = ctx.pg; + + expect(() => db.select({ name: users2Table.name }).from(usersTable).prepare('query')).toThrowError(); + }); + + test('select all fields from subquery without alias', (ctx) => { + const { db } = ctx.pg; + + const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); + + expect(() => db.select().from(sq).prepare('query')).toThrowError(); + }); + + test('select count()', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); + + const res = await db.select({ count: sql`count(*)` }).from(usersTable); + + expect(res).toEqual([{ count: '2' }]); + }); + + test('select count w/ custom mapper', async (ctx) => { + const { db } = ctx.pg; + + function count(value: PgColumn | SQLWrapper): SQL; + function count(value: PgColumn | SQLWrapper, alias: string): SQL.Aliased; + function count(value: PgColumn | SQLWrapper, alias?: string): SQL | SQL.Aliased { + const result = sql`count(${value})`.mapWith(Number); + if (!alias) { + return result; + } + return result.as(alias); + } + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); + + const res = await db.select({ count: count(sql`*`) }).from(usersTable); + + expect(res).toEqual([{ count: 2 }]); + }); + + test('network types', async (ctx) => { + const { db } = ctx.pg; + + const value: typeof network.$inferSelect = { + inet: '127.0.0.1', + cidr: '192.168.100.128/25', + macaddr: '08:00:2b:01:02:03', + macaddr8: '08:00:2b:01:02:03:04:05', + }; + + await db.insert(network).values(value); + + const res = await db.select().from(network); + + expect(res).toEqual([value]); + }); + + test('array types', async (ctx) => { + const { db } = ctx.pg; + + const values: typeof salEmp.$inferSelect[] = [ + { + name: 'John', + payByQuarter: [10000, 10000, 10000, 10000], + schedule: [['meeting', 'lunch'], ['training', 'presentation']], + }, + { + name: 'Carol', + payByQuarter: [20000, 25000, 25000, 25000], + schedule: [['breakfast', 'consulting'], ['meeting', 'lunch']], + }, + ]; + + await db.insert(salEmp).values(values); + + const res = await db.select().from(salEmp); + + expect(res).toEqual(values); + }); + + test('select for ...', (ctx) => { + const { db } = ctx.pg; + + { + const query = db + .select() + .from(users2Table) + .for('update') + .toSQL(); + + expect(query.sql).toMatch(/ for update$/); + } + + { + const query = db + .select() + .from(users2Table) + .for('update', { of: [users2Table, coursesTable] }) + .toSQL(); + + expect(query.sql).toMatch(/ for update of "users2", "courses"$/); + } + + { + const query = db + .select() + .from(users2Table) + .for('no key update', { of: users2Table }) + .toSQL(); + + expect(query.sql).toMatch(/for no key update of "users2"$/); + } + + { + const query = db + .select() + .from(users2Table) + .for('no key update', { of: users2Table, skipLocked: true }) + .toSQL(); + + expect(query.sql).toMatch(/ for no key update of "users2" skip locked$/); + } + + { + const query = db + .select() + .from(users2Table) + .for('share', { of: users2Table, noWait: true }) + .toSQL(); + + expect(query.sql).toMatch(/for share of "users2" no wait$/); + } + }); + + test('having', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(citiesTable).values([{ name: 'London' }, { name: 'Paris' }, { name: 'New York' }]); + + await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { + name: 'Jack', + cityId: 2, + }]); + + const result = await db + .select({ + id: citiesTable.id, + name: sql`upper(${citiesTable.name})`.as('upper_name'), + usersCount: sql`count(${users2Table.id})::int`.as('users_count'), + }) + .from(citiesTable) + .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) + .where(({ name }) => sql`length(${name}) >= 3`) + .groupBy(citiesTable.id) + .having(({ usersCount }) => sql`${usersCount} > 0`) + .orderBy(({ name }) => name); + + expect(result).toEqual([ + { + id: 1, + name: 'LONDON', + usersCount: 2, + }, + { + id: 2, + name: 'PARIS', + usersCount: 1, + }, + ]); + }); + + test('view', async (ctx) => { + const { db } = ctx.pg; + + const newYorkers1 = pgView('new_yorkers') + .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); + + const newYorkers2 = pgView('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull(), + }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); + + const newYorkers3 = pgView('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull(), + }).existing(); + + await db.execute(sql`create view ${newYorkers1} as ${getViewConfig(newYorkers1).query}`); + + await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); + + await db.insert(users2Table).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 1 }, + { name: 'Jack', cityId: 2 }, + ]); + + { + const result = await db.select().from(newYorkers1); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers2); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers3); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); + expect(result).toEqual([ + { name: 'John' }, + { name: 'Jane' }, + ]); + } + + await db.execute(sql`drop view ${newYorkers1}`); + }); + + // NEXT + test('materialized view', async (ctx) => { + const { db } = ctx.pg; + + const newYorkers1 = pgMaterializedView('new_yorkers') + .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); + + const newYorkers2 = pgMaterializedView('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull(), + }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); + + const newYorkers3 = pgMaterializedView('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull(), + }).existing(); + + await db.execute(sql`create materialized view ${newYorkers1} as ${getMaterializedViewConfig(newYorkers1).query}`); + + await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); + + await db.insert(users2Table).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 1 }, + { name: 'Jack', cityId: 2 }, + ]); + + { + const result = await db.select().from(newYorkers1); + expect(result).toEqual([]); + } + + await db.refreshMaterializedView(newYorkers1); + + { + const result = await db.select().from(newYorkers1); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers2); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers3); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); + expect(result).toEqual([ + { name: 'John' }, + { name: 'Jane' }, + ]); + } + + await db.execute(sql`drop materialized view ${newYorkers1}`); + }); + + test('select from existing view', async (ctx) => { + const { db } = ctx.pg; + + const schema = pgSchema('test_schema'); + + const newYorkers = schema.view('new_yorkers', { + id: integer('id').notNull(), + }).existing(); + + await db.execute(sql`drop schema if exists ${schema} cascade`); + await db.execute(sql`create schema ${schema}`); + await db.execute(sql`create view ${newYorkers} as select id from ${usersTable}`); + + await db.insert(usersTable).values({ id: 100, name: 'John' }); + + const result = await db.select({ + id: usersTable.id, + }).from(usersTable).innerJoin(newYorkers, eq(newYorkers.id, usersTable.id)); + + expect(result).toEqual([{ id: 100 }]); + }); + + // TODO: copy to SQLite and MySQL, add to docs + test('select from raw sql', async (ctx) => { + const { db } = ctx.pg; + + const result = await db.select({ + id: sql`id`, + name: sql`name`, + }).from(sql`(select 1 as id, 'John' as name) as users`); + + Expect>; + expect(result).toEqual([ + { id: 1, name: 'John' }, + ]); + }); + + test('select from raw sql with joins', async (ctx) => { + const { db } = ctx.pg; + + const result = await db + .select({ + id: sql`users.id`, + name: sql`users.name`, + userCity: sql`users.city`, + cityName: sql`cities.name`, + }) + .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) + .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`); + + Expect>; + + expect(result).toEqual([ + { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, + ]); + }); + + test('join on aliased sql from select', async (ctx) => { + const { db } = ctx.pg; + + const result = await db + .select({ + userId: sql`users.id`.as('userId'), + name: sql`users.name`, + userCity: sql`users.city`, + cityId: sql`cities.id`.as('cityId'), + cityName: sql`cities.name`, + }) + .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) + .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)); + + Expect< + Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> + >; + + expect(result).toEqual([ + { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, + ]); + }); + + test('join on aliased sql from with clause', async (ctx) => { + const { db } = ctx.pg; + + const users = db.$with('users').as( + db.select({ + id: sql`id`.as('userId'), + name: sql`name`.as('userName'), + city: sql`city`.as('city'), + }).from( + sql`(select 1 as id, 'John' as name, 'New York' as city) as users`, + ), + ); + + const cities = db.$with('cities').as( + db.select({ + id: sql`id`.as('cityId'), + name: sql`name`.as('cityName'), + }).from( + sql`(select 1 as id, 'Paris' as name) as cities`, + ), + ); + + const result = await db + .with(users, cities) + .select({ + userId: users.id, + name: users.name, + userCity: users.city, + cityId: cities.id, + cityName: cities.name, + }) + .from(users) + .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)); + + Expect< + Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> + >; + + expect(result).toEqual([ + { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, + ]); + }); + + test('prefixed table', async (ctx) => { + const { db } = ctx.pg; + + const pgTable = pgTableCreator((name) => `myprefix_${name}`); + + const users = pgTable('test_prefixed_table_with_unique_name', { + id: integer('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table myprefix_test_prefixed_table_with_unique_name (id integer not null primary key, name text not null)`, + ); + + await db.insert(users).values({ id: 1, name: 'John' }); + + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + + await db.execute(sql`drop table ${users}`); + }); + + test('select from enum', async (ctx) => { + const { db } = ctx.pg; + + const muscleEnum = pgEnum('muscle', [ + 'abdominals', + 'hamstrings', + 'adductors', + 'quadriceps', + 'biceps', + 'shoulders', + 'chest', + 'middle_back', + 'calves', + 'glutes', + 'lower_back', + 'lats', + 'triceps', + 'traps', + 'forearms', + 'neck', + 'abductors', + ]); + + const forceEnum = pgEnum('force', ['isometric', 'isotonic', 'isokinetic']); + + const levelEnum = pgEnum('level', ['beginner', 'intermediate', 'advanced']); + + const mechanicEnum = pgEnum('mechanic', ['compound', 'isolation']); + + const equipmentEnum = pgEnum('equipment', [ + 'barbell', + 'dumbbell', + 'bodyweight', + 'machine', + 'cable', + 'kettlebell', + ]); + + const categoryEnum = pgEnum('category', ['upper_body', 'lower_body', 'full_body']); + + const exercises = pgTable('exercises', { + id: serial('id').primaryKey(), + name: varchar('name').notNull(), + force: forceEnum('force'), + level: levelEnum('level'), + mechanic: mechanicEnum('mechanic'), + equipment: equipmentEnum('equipment'), + instructions: text('instructions'), + category: categoryEnum('category'), + primaryMuscles: muscleEnum('primary_muscles').array(), + secondaryMuscles: muscleEnum('secondary_muscles').array(), + createdAt: timestamp('created_at').notNull().default(sql`now()`), + updatedAt: timestamp('updated_at').notNull().default(sql`now()`), + }); + + await db.execute(sql`drop table if exists ${exercises}`); + await db.execute(sql`drop type if exists ${sql.identifier(muscleEnum.enumName)}`); + await db.execute(sql`drop type if exists ${sql.identifier(forceEnum.enumName)}`); + await db.execute(sql`drop type if exists ${sql.identifier(levelEnum.enumName)}`); + await db.execute(sql`drop type if exists ${sql.identifier(mechanicEnum.enumName)}`); + await db.execute(sql`drop type if exists ${sql.identifier(equipmentEnum.enumName)}`); + await db.execute(sql`drop type if exists ${sql.identifier(categoryEnum.enumName)}`); + + await db.execute( + sql`create type ${ + sql.identifier(muscleEnum.enumName) + } as enum ('abdominals', 'hamstrings', 'adductors', 'quadriceps', 'biceps', 'shoulders', 'chest', 'middle_back', 'calves', 'glutes', 'lower_back', 'lats', 'triceps', 'traps', 'forearms', 'neck', 'abductors')`, + ); + await db.execute( + sql`create type ${sql.identifier(forceEnum.enumName)} as enum ('isometric', 'isotonic', 'isokinetic')`, + ); + await db.execute( + sql`create type ${sql.identifier(levelEnum.enumName)} as enum ('beginner', 'intermediate', 'advanced')`, + ); + await db.execute(sql`create type ${sql.identifier(mechanicEnum.enumName)} as enum ('compound', 'isolation')`); + await db.execute( + sql`create type ${ + sql.identifier(equipmentEnum.enumName) + } as enum ('barbell', 'dumbbell', 'bodyweight', 'machine', 'cable', 'kettlebell')`, + ); + await db.execute( + sql`create type ${sql.identifier(categoryEnum.enumName)} as enum ('upper_body', 'lower_body', 'full_body')`, + ); + await db.execute(sql` + create table ${exercises} ( + id serial primary key, + name varchar not null, + force force, + level level, + mechanic mechanic, + equipment equipment, + instructions text, + category category, + primary_muscles muscle[], + secondary_muscles muscle[], + created_at timestamp not null default now(), + updated_at timestamp not null default now() + ) + `); + + await db.insert(exercises).values({ + name: 'Bench Press', + force: 'isotonic', + level: 'beginner', + mechanic: 'compound', + equipment: 'barbell', + instructions: + 'Lie on your back on a flat bench. Grasp the barbell with an overhand grip, slightly wider than shoulder width. Unrack the barbell and hold it over you with your arms locked. Lower the barbell to your chest. Press the barbell back to the starting position.', + category: 'upper_body', + primaryMuscles: ['chest', 'triceps'], + secondaryMuscles: ['shoulders', 'traps'], + }); + + const result = await db.select().from(exercises); + + expect(result).toEqual([ + { + id: 1, + name: 'Bench Press', + force: 'isotonic', + level: 'beginner', + mechanic: 'compound', + equipment: 'barbell', + instructions: + 'Lie on your back on a flat bench. Grasp the barbell with an overhand grip, slightly wider than shoulder width. Unrack the barbell and hold it over you with your arms locked. Lower the barbell to your chest. Press the barbell back to the starting position.', + category: 'upper_body', + primaryMuscles: ['chest', 'triceps'], + secondaryMuscles: ['shoulders', 'traps'], + createdAt: result[0]!.createdAt, + updatedAt: result[0]!.updatedAt, + }, + ]); + + await db.execute(sql`drop table ${exercises}`); + await db.execute(sql`drop type ${sql.identifier(muscleEnum.enumName)}`); + await db.execute(sql`drop type ${sql.identifier(forceEnum.enumName)}`); + await db.execute(sql`drop type ${sql.identifier(levelEnum.enumName)}`); + await db.execute(sql`drop type ${sql.identifier(mechanicEnum.enumName)}`); + await db.execute(sql`drop type ${sql.identifier(equipmentEnum.enumName)}`); + await db.execute(sql`drop type ${sql.identifier(categoryEnum.enumName)}`); + }); + + test('all date and time columns', async (ctx) => { + const { db } = ctx.pg; + + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + dateString: date('date_string', { mode: 'string' }).notNull(), + time: time('time', { precision: 3 }).notNull(), + datetime: timestamp('datetime').notNull(), + datetimeWTZ: timestamp('datetime_wtz', { withTimezone: true }).notNull(), + datetimeString: timestamp('datetime_string', { mode: 'string' }).notNull(), + datetimeFullPrecision: timestamp('datetime_full_precision', { precision: 6, mode: 'string' }).notNull(), + datetimeWTZString: timestamp('datetime_wtz_string', { withTimezone: true, mode: 'string' }).notNull(), + interval: interval('interval').notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + date_string date not null, + time time(3) not null, + datetime timestamp not null, + datetime_wtz timestamp with time zone not null, + datetime_string timestamp not null, + datetime_full_precision timestamp(6) not null, + datetime_wtz_string timestamp with time zone not null, + interval interval not null + ) + `); + + const someDatetime = new Date('2022-01-01T00:00:00.123Z'); + const fullPrecision = '2022-01-01T00:00:00.123456Z'; + const someTime = '23:23:12.432'; + + await db.insert(table).values({ + dateString: '2022-01-01', + time: someTime, + datetime: someDatetime, + datetimeWTZ: someDatetime, + datetimeString: '2022-01-01T00:00:00.123Z', + datetimeFullPrecision: fullPrecision, + datetimeWTZString: '2022-01-01T00:00:00.123Z', + interval: '1 day', + }); + + const result = await db.select().from(table); + + Expect< + Equal<{ + id: number; + dateString: string; + time: string; + datetime: Date; + datetimeWTZ: Date; + datetimeString: string; + datetimeFullPrecision: string; + datetimeWTZString: string; + interval: string; + }[], typeof result> + >; + + Expect< + Equal<{ + dateString: string; + time: string; + datetime: Date; + datetimeWTZ: Date; + datetimeString: string; + datetimeFullPrecision: string; + datetimeWTZString: string; + interval: string; + id?: number | undefined; + }, typeof table.$inferInsert> + >; + + expect(result).toEqual([ + { + id: 1, + dateString: '2022-01-01', + time: someTime, + datetime: someDatetime, + datetimeWTZ: someDatetime, + datetimeString: '2022-01-01 00:00:00.123', + datetimeFullPrecision: fullPrecision.replace('T', ' ').replace('Z', ''), + datetimeWTZString: '2022-01-01 00:00:00.123+00', + interval: '1 day', + }, + ]); + + await db.execute(sql`drop table if exists ${table}`); + }); + + test('all date and time columns with timezone second case mode date', async (ctx) => { + const { db } = ctx.pg; + + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(3) with time zone not null + ) + `); + + const insertedDate = new Date(); + + // 1. Insert date as new date + await db.insert(table).values([ + { timestamp: insertedDate }, + ]); + + // 2, Select as date and check that timezones are the same + // There is no way to check timezone in Date object, as it is always represented internally in UTC + const result = await db.select().from(table); + + expect(result).toEqual([{ id: 1, timestamp: insertedDate }]); + + // 3. Compare both dates + expect(insertedDate.getTime()).toBe(result[0]?.timestamp.getTime()); + + await db.execute(sql`drop table if exists ${table}`); + }); + + test('all date and time columns with timezone third case mode date', async (ctx) => { + const { db } = ctx.pg; + + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(3) with time zone not null + ) + `); + + const insertedDate = new Date('2022-01-01 20:00:00.123-04'); // used different time zones, internally is still UTC + const insertedDate2 = new Date('2022-01-02 04:00:00.123+04'); // They are both the same date in different time zones + + // 1. Insert date as new dates with different time zones + await db.insert(table).values([ + { timestamp: insertedDate }, + { timestamp: insertedDate2 }, + ]); + + // 2, Select and compare both dates + const result = await db.select().from(table); + + expect(result[0]?.timestamp.getTime()).toBe(result[1]?.timestamp.getTime()); + + await db.execute(sql`drop table if exists ${table}`); + }); + + test('orderBy with aliased column', (ctx) => { + const { db } = ctx.pg; + + const query = db.select({ + test: sql`something`.as('test'), + }).from(users2Table).orderBy((fields) => fields.test).toSQL(); + + expect(query.sql).toBe('select something as "test" from "users2" order by "test"'); + }); + + test('select from sql', async (ctx) => { + const { db } = ctx.pg; + + const metricEntry = pgTable('metric_entry', { + id: pgUuid('id').notNull(), + createdAt: timestamp('created_at').notNull(), + }); + + await db.execute(sql`drop table if exists ${metricEntry}`); + await db.execute(sql`create table ${metricEntry} (id uuid not null, created_at timestamp not null)`); + + const metricId = uuidV4(); + + const intervals = db.$with('intervals').as( + db + .select({ + startTime: sql`(date'2023-03-01'+ x * '1 day'::interval)`.as('start_time'), + endTime: sql`(date'2023-03-01'+ (x+1) *'1 day'::interval)`.as('end_time'), + }) + .from(sql`generate_series(0, 29, 1) as t(x)`), + ); + + const func = () => + db + .with(intervals) + .select({ + startTime: intervals.startTime, + endTime: intervals.endTime, + count: sql`count(${metricEntry})`, + }) + .from(metricEntry) + .rightJoin( + intervals, + and( + eq(metricEntry.id, metricId), + gte(metricEntry.createdAt, intervals.startTime), + lt(metricEntry.createdAt, intervals.endTime), + ), + ) + .groupBy(intervals.startTime, intervals.endTime) + .orderBy(asc(intervals.startTime)); + + await expect((async () => { + func(); + })()).resolves.not.toThrowError(); + }); + + test('timestamp timezone', async (ctx) => { + const { db } = ctx.pg; + + const usersTableWithAndWithoutTimezone = pgTable('users_test_with_and_without_timezone', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), + updatedAt: timestamp('updated_at', { withTimezone: false }).notNull().defaultNow(), + }); + + await db.execute(sql`drop table if exists ${usersTableWithAndWithoutTimezone}`); + + await db.execute( + sql` + create table users_test_with_and_without_timezone ( + id serial not null primary key, + name text not null, + created_at timestamptz not null default now(), + updated_at timestamp not null default now() + ) + `, + ); + + const date = new Date(Date.parse('2020-01-01T00:00:00+04:00')); + + await db.insert(usersTableWithAndWithoutTimezone).values({ name: 'With default times' }); + await db.insert(usersTableWithAndWithoutTimezone).values({ + name: 'Without default times', + createdAt: date, + updatedAt: date, + }); + const users = await db.select().from(usersTableWithAndWithoutTimezone); + + // check that the timestamps are set correctly for default times + expect(Math.abs(users[0]!.updatedAt.getTime() - Date.now())).toBeLessThan(2000); + expect(Math.abs(users[0]!.createdAt.getTime() - Date.now())).toBeLessThan(2000); + + // check that the timestamps are set correctly for non default times + expect(Math.abs(users[1]!.updatedAt.getTime() - date.getTime())).toBeLessThan(2000); + expect(Math.abs(users[1]!.createdAt.getTime() - date.getTime())).toBeLessThan(2000); + }); + + test('transaction', async (ctx) => { + const { db } = ctx.pg; + + const users = pgTable('users_transactions', { + id: serial('id').primaryKey(), + balance: integer('balance').notNull(), + }); + const products = pgTable('products_transactions', { + id: serial('id').primaryKey(), + price: integer('price').notNull(), + stock: integer('stock').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`drop table if exists ${products}`); + + await db.execute(sql`create table users_transactions (id serial not null primary key, balance integer not null)`); + await db.execute( + sql`create table products_transactions (id serial not null primary key, price integer not null, stock integer not null)`, + ); + + const user = await db.insert(users).values({ balance: 100 }).returning().then((rows) => rows[0]!); + const product = await db.insert(products).values({ price: 10, stock: 10 }).returning().then((rows) => rows[0]!); + + await db.transaction(async (tx) => { + await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)); + await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)); + }); + + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, balance: 90 }]); + + await db.execute(sql`drop table ${users}`); + await db.execute(sql`drop table ${products}`); + }); + + test('transaction rollback', async (ctx) => { + const { db } = ctx.pg; + + const users = pgTable('users_transactions_rollback', { + id: serial('id').primaryKey(), + balance: integer('balance').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table users_transactions_rollback (id serial not null primary key, balance integer not null)`, + ); + + await expect((async () => { + await db.transaction(async (tx) => { + await tx.insert(users).values({ balance: 100 }); + tx.rollback(); + }); + })()).rejects.toThrowError(TransactionRollbackError); + + const result = await db.select().from(users); + + expect(result).toEqual([]); + + await db.execute(sql`drop table ${users}`); + }); + + test('nested transaction', async (ctx) => { + const { db } = ctx.pg; + + const users = pgTable('users_nested_transactions', { + id: serial('id').primaryKey(), + balance: integer('balance').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table users_nested_transactions (id serial not null primary key, balance integer not null)`, + ); + + await db.transaction(async (tx) => { + await tx.insert(users).values({ balance: 100 }); + + await tx.transaction(async (tx) => { + await tx.update(users).set({ balance: 200 }); + }); + }); + + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, balance: 200 }]); + + await db.execute(sql`drop table ${users}`); + }); + + test('nested transaction rollback', async (ctx) => { + const { db } = ctx.pg; + + const users = pgTable('users_nested_transactions_rollback', { + id: serial('id').primaryKey(), + balance: integer('balance').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table users_nested_transactions_rollback (id serial not null primary key, balance integer not null)`, + ); + + await db.transaction(async (tx) => { + await tx.insert(users).values({ balance: 100 }); + + await expect((async () => { + await tx.transaction(async (tx) => { + await tx.update(users).set({ balance: 200 }); + tx.rollback(); + }); + })()).rejects.toThrowError(TransactionRollbackError); + }); + + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, balance: 100 }]); + + await db.execute(sql`drop table ${users}`); + }); + + test('join subquery with join', async (ctx) => { + const { db } = ctx.pg; + + const internalStaff = pgTable('internal_staff', { + userId: integer('user_id').notNull(), + }); + + const customUser = pgTable('custom_user', { + id: integer('id').notNull(), + }); + + const ticket = pgTable('ticket', { + staffId: integer('staff_id').notNull(), + }); + + await db.execute(sql`drop table if exists ${internalStaff}`); + await db.execute(sql`drop table if exists ${customUser}`); + await db.execute(sql`drop table if exists ${ticket}`); + + await db.execute(sql`create table internal_staff (user_id integer not null)`); + await db.execute(sql`create table custom_user (id integer not null)`); + await db.execute(sql`create table ticket (staff_id integer not null)`); + + await db.insert(internalStaff).values({ userId: 1 }); + await db.insert(customUser).values({ id: 1 }); + await db.insert(ticket).values({ staffId: 1 }); + + const subq = db + .select() + .from(internalStaff) + .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) + .as('internal_staff'); + + const mainQuery = await db + .select() + .from(ticket) + .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); + + expect(mainQuery).toEqual([{ + ticket: { staffId: 1 }, + internal_staff: { + internal_staff: { userId: 1 }, + custom_user: { id: 1 }, + }, + }]); + + await db.execute(sql`drop table ${internalStaff}`); + await db.execute(sql`drop table ${customUser}`); + await db.execute(sql`drop table ${ticket}`); + }); + + test('subquery with view', async (ctx) => { + const { db } = ctx.pg; + + const users = pgTable('users_subquery_view', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull(), + }); + + const newYorkers = pgView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`drop view if exists ${newYorkers}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, + ); + await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); + + await db.insert(users).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 2 }, + { name: 'Jack', cityId: 1 }, + { name: 'Jill', cityId: 2 }, + ]); + + const sq = db.$with('sq').as(db.select().from(newYorkers)); + const result = await db.with(sq).select().from(sq); + + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 3, name: 'Jack', cityId: 1 }, + ]); + + await db.execute(sql`drop view ${newYorkers}`); + await db.execute(sql`drop table ${users}`); + }); + + test('join view as subquery', async (ctx) => { + const { db } = ctx.pg; + + const users = pgTable('users_join_view', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull(), + }); + + const newYorkers = pgView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`drop view if exists ${newYorkers}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, + ); + await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); + + await db.insert(users).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 2 }, + { name: 'Jack', cityId: 1 }, + { name: 'Jill', cityId: 2 }, + ]); + + const sq = db.select().from(newYorkers).as('new_yorkers_sq'); + + const result = await db.select().from(users).leftJoin(sq, eq(users.id, sq.id)); + + expect(result).toEqual([ + { + users_join_view: { id: 1, name: 'John', cityId: 1 }, + new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, + }, + { + users_join_view: { id: 2, name: 'Jane', cityId: 2 }, + new_yorkers_sq: null, + }, + { + users_join_view: { id: 3, name: 'Jack', cityId: 1 }, + new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, + }, + { + users_join_view: { id: 4, name: 'Jill', cityId: 2 }, + new_yorkers_sq: null, + }, + ]); + + await db.execute(sql`drop view ${newYorkers}`); + await db.execute(sql`drop table ${users}`); + }); + + test('table selection with single table', async (ctx) => { + const { db } = ctx.pg; + + const users = pgTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, + ); + + await db.insert(users).values({ name: 'John', cityId: 1 }); + + const result = await db.select({ users }).from(users); + + expect(result).toEqual([{ users: { id: 1, name: 'John', cityId: 1 } }]); + + await db.execute(sql`drop table ${users}`); + }); + + test('set null to jsonb field', async (ctx) => { + const { db } = ctx.pg; + + const users = pgTable('users', { + id: serial('id').primaryKey(), + jsonb: jsonb('jsonb'), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, jsonb jsonb)`, + ); + + const result = await db.insert(users).values({ jsonb: null }).returning(); + + expect(result).toEqual([{ id: 1, jsonb: null }]); + + await db.execute(sql`drop table ${users}`); + }); + + test('insert undefined', async (ctx) => { + const { db } = ctx.pg; + + const users = pgTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, name text)`, + ); + + await expect((async () => { + await db.insert(users).values({ name: undefined }); + })()).resolves.not.toThrowError(); + + await db.execute(sql`drop table ${users}`); + }); + + test('update undefined', async (ctx) => { + const { db } = ctx.pg; + + const users = pgTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, name text)`, + ); + + await expect((async () => { + await db.update(users).set({ name: undefined }); + })()).rejects.toThrowError(); + await expect((async () => { + db.update(users).set({ name: undefined }); + })()).rejects.toThrowError(); + + await db.execute(sql`drop table ${users}`); + }); + + test('array operators', async (ctx) => { + const { db } = ctx.pg; + + const posts = pgTable('posts', { + id: serial('id').primaryKey(), + tags: text('tags').array(), + }); + + await db.execute(sql`drop table if exists ${posts}`); + + await db.execute( + sql`create table ${posts} (id serial primary key, tags text[])`, + ); + + await db.insert(posts).values([{ + tags: ['ORM'], + }, { + tags: ['Typescript'], + }, { + tags: ['Typescript', 'ORM'], + }, { + tags: ['Typescript', 'Frontend', 'React'], + }, { + tags: ['Typescript', 'ORM', 'Database', 'Postgres'], + }, { + tags: ['Java', 'Spring', 'OOP'], + }]); + + const contains = await db.select({ id: posts.id }).from(posts) + .where(arrayContains(posts.tags, ['Typescript', 'ORM'])); + const contained = await db.select({ id: posts.id }).from(posts) + .where(arrayContained(posts.tags, ['Typescript', 'ORM'])); + const overlaps = await db.select({ id: posts.id }).from(posts) + .where(arrayOverlaps(posts.tags, ['Typescript', 'ORM'])); + const withSubQuery = await db.select({ id: posts.id }).from(posts) + .where(arrayContains( + posts.tags, + db.select({ tags: posts.tags }).from(posts).where(eq(posts.id, 1)), + )); + + expect(contains).toEqual([{ id: 3 }, { id: 5 }]); + expect(contained).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); + expect(overlaps).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }, { id: 5 }]); + expect(withSubQuery).toEqual([{ id: 1 }, { id: 3 }, { id: 5 }]); + }); + + test('set operations (union) from query builder with subquery', async (ctx) => { + const { db } = ctx.pg; + + await setupSetOperationTest(db); + + const sq = db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).as('sq'); + + const result = await db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).union( + db.select().from(sq), + ).orderBy(asc(sql`name`)).limit(2).offset(1); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 3, name: 'Jack' }, + { id: 2, name: 'Jane' }, + ]); + + await expect((async () => { + db + .select({ id: cities2Table.id, name: citiesTable.name, name2: users2Table.name }) + .from(cities2Table).union( + // @ts-expect-error + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table), + ).orderBy(asc(sql`name`)); + })()).rejects.toThrowError(); + }); + + test('set operations (union) as function', async (ctx) => { + const { db } = ctx.pg; + + await setupSetOperationTest(db); + + const result = await union( + db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).orderBy(asc(sql`name`)).limit(1).offset(1); + + expect(result).toHaveLength(1); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + ]); + + await expect((async () => { + union( + db + .select({ name: citiesTable.name, id: cities2Table.id }) + .from(cities2Table).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).orderBy(asc(sql`name`)); + })()).rejects.toThrowError(); + }); + + test('set operations (union all) from query builder', async (ctx) => { + const { db } = ctx.pg; + + await setupSetOperationTest(db); + + const result = await db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).limit(2).unionAll( + db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).limit(2), + ).orderBy(asc(sql`id`)); + + expect(result).toHaveLength(4); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + { id: 1, name: 'New York' }, + { id: 2, name: 'London' }, + { id: 2, name: 'London' }, + ]); + + await expect((async () => { + db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).limit(2).unionAll( + db + .select({ name: citiesTable.name, id: cities2Table.id }) + .from(cities2Table).limit(2), + ).orderBy(asc(sql`id`)); + })()).rejects.toThrowError(); + }); + + test('set operations (union all) as function', async (ctx) => { + const { db } = ctx.pg; + + await setupSetOperationTest(db); + + const result = await unionAll( + db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ); + + expect(result).toHaveLength(3); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + { id: 1, name: 'John' }, + { id: 1, name: 'John' }, + ]); + + await expect((async () => { + unionAll( + db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).where(eq(citiesTable.id, 1)), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ); + })()).rejects.toThrowError(); + }); + + test('set operations (intersect) from query builder', async (ctx) => { + const { db } = ctx.pg; + + await setupSetOperationTest(db); + + const result = await db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).intersect( + db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).where(gt(citiesTable.id, 1)), + ).orderBy(asc(sql`name`)); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + await expect((async () => { + db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).intersect( + // @ts-expect-error + db + .select({ id: cities2Table.id, name: citiesTable.name, id2: cities2Table.id }) + .from(cities2Table).where(gt(citiesTable.id, 1)), + ).orderBy(asc(sql`name`)); + })()).rejects.toThrowError(); + }); + + test('set operations (intersect) as function', async (ctx) => { + const { db } = ctx.pg; + + await setupSetOperationTest(db); + + const result = await intersect( + db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ); + + expect(result).toHaveLength(0); + + expect(result).toEqual([]); + + await expect((async () => { + intersect( + db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table).where(eq(users2Table.id, 1)), + ); + })()).rejects.toThrowError(); + }); + + test('set operations (intersect all) from query builder', async (ctx) => { + const { db } = ctx.pg; + + await setupSetOperationTest(db); + + const result = await db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).limit(2).intersectAll( + db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).limit(2), + ).orderBy(asc(sql`id`)); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + { id: 2, name: 'London' }, + ]); + + await expect((async () => { + db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).limit(2).intersectAll( + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(cities2Table).limit(2), + ).orderBy(asc(sql`id`)); + })()).rejects.toThrowError(); + }); + + test('set operations (intersect all) as function', async (ctx) => { + const { db } = ctx.pg; + + await setupSetOperationTest(db); + + const result = await intersectAll( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ); + + expect(result).toHaveLength(1); + + expect(result).toEqual([ + { id: 1, name: 'John' }, + ]); + + await expect((async () => { + intersectAll( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ); + })()).rejects.toThrowError(); + }); + + test('set operations (except) from query builder', async (ctx) => { + const { db } = ctx.pg; + + await setupSetOperationTest(db); + + const result = await db + .select() + .from(cities2Table).except( + db + .select() + .from(cities2Table).where(gt(citiesTable.id, 1)), + ); + + expect(result).toHaveLength(1); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + ]); + + await expect((async () => { + db + .select() + .from(cities2Table).except( + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(cities2Table).where(gt(citiesTable.id, 1)), + ); + })()).rejects.toThrowError(); + }); + + test('set operations (except) as function', async (ctx) => { + const { db } = ctx.pg; + + await setupSetOperationTest(db); + + const result = await except( + db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table), + db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).orderBy(asc(sql`id`)); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + await expect((async () => { + except( + db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(cities2Table).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).orderBy(asc(sql`id`)); + })()).rejects.toThrowError(); + }); + + test('set operations (except all) from query builder', async (ctx) => { + const { db } = ctx.pg; + + await setupSetOperationTest(db); + + const result = await db + .select() + .from(cities2Table).exceptAll( + db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).where(eq(citiesTable.id, 1)), + ).orderBy(asc(sql`id`)); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + await expect((async () => { + db + .select({ name: cities2Table.name, id: cities2Table.id }) + .from(cities2Table).exceptAll( + db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).where(eq(citiesTable.id, 1)), + ).orderBy(asc(sql`id`)); + })()).rejects.toThrowError(); + }); + + test('set operations (except all) as function', async (ctx) => { + const { db } = ctx.pg; + + await setupSetOperationTest(db); + + const result = await exceptAll( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(gt(users2Table.id, 7)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).orderBy(asc(sql`id`)).limit(5).offset(2); + + expect(result).toHaveLength(4); + + expect(result).toEqual([ + { id: 4, name: 'Peter' }, + { id: 5, name: 'Ben' }, + { id: 6, name: 'Jill' }, + { id: 7, name: 'Mary' }, + ]); + + await expect((async () => { + exceptAll( + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(gt(users2Table.id, 7)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).orderBy(asc(sql`id`)); + })()).rejects.toThrowError(); + }); + + test('set operations (mixed) from query builder with subquery', async (ctx) => { + const { db } = ctx.pg; + + await setupSetOperationTest(db); + const sq = db + .select() + .from(cities2Table).where(gt(citiesTable.id, 1)).as('sq'); + + const result = await db + .select() + .from(cities2Table).except( + ({ unionAll }) => + unionAll( + db.select().from(sq), + db.select().from(cities2Table).where(eq(citiesTable.id, 2)), + ), + ); + + expect(result).toHaveLength(1); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + ]); + + await expect((async () => { + db + .select() + .from(cities2Table).except( + ({ unionAll }) => + unionAll( + db + .select({ name: cities2Table.name, id: cities2Table.id }) + .from(cities2Table).where(gt(citiesTable.id, 1)), + db.select().from(cities2Table).where(eq(citiesTable.id, 2)), + ), + ); + })()).rejects.toThrowError(); + }); + + test('set operations (mixed all) as function', async (ctx) => { + const { db } = ctx.pg; + + await setupSetOperationTest(db); + + const result = await union( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + except( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(gte(users2Table.id, 5)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 7)), + ), + db + .select().from(cities2Table).where(gt(citiesTable.id, 1)), + ).orderBy(asc(sql`id`)); + + expect(result).toHaveLength(6); + + expect(result).toEqual([ + { id: 1, name: 'John' }, + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + { id: 5, name: 'Ben' }, + { id: 6, name: 'Jill' }, + { id: 8, name: 'Sally' }, + ]); + + await expect((async () => { + union( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + except( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(gte(users2Table.id, 5)), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table).where(eq(users2Table.id, 7)), + ), + db + .select().from(cities2Table).where(gt(citiesTable.id, 1)), + ).orderBy(asc(sql`id`)); + })()).rejects.toThrowError(); + }); + + test('aggregate function: count', async (ctx) => { + const { db } = ctx.pg; + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: count() }).from(table); + const result2 = await db.select({ value: count(table.a) }).from(table); + const result3 = await db.select({ value: countDistinct(table.name) }).from(table); + + expect(result1[0]?.value).toBe(7); + expect(result2[0]?.value).toBe(5); + expect(result3[0]?.value).toBe(6); + }); + + test('aggregate function: avg', async (ctx) => { + const { db } = ctx.pg; + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: avg(table.b) }).from(table); + const result2 = await db.select({ value: avg(table.nullOnly) }).from(table); + const result3 = await db.select({ value: avgDistinct(table.b) }).from(table); + + expect(result1[0]?.value).toBe('33.3333333333333333'); + expect(result2[0]?.value).toBeNull(); + expect(result3[0]?.value).toBe('42.5000000000000000'); + }); + + test('aggregate function: sum', async (ctx) => { + const { db } = ctx.pg; + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: sum(table.b) }).from(table); + const result2 = await db.select({ value: sum(table.nullOnly) }).from(table); + const result3 = await db.select({ value: sumDistinct(table.b) }).from(table); + + expect(result1[0]?.value).toBe('200'); + expect(result2[0]?.value).toBeNull(); + expect(result3[0]?.value).toBe('170'); + }); + + test('aggregate function: max', async (ctx) => { + const { db } = ctx.pg; + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: max(table.b) }).from(table); + const result2 = await db.select({ value: max(table.nullOnly) }).from(table); + + expect(result1[0]?.value).toBe(90); + expect(result2[0]?.value).toBeNull(); + }); + + test('aggregate function: min', async (ctx) => { + const { db } = ctx.pg; + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: min(table.b) }).from(table); + const result2 = await db.select({ value: min(table.nullOnly) }).from(table); + + expect(result1[0]?.value).toBe(10); + expect(result2[0]?.value).toBeNull(); + }); + + test('array mapping and parsing', async (ctx) => { + const { db } = ctx.pg; + + const arrays = pgTable('arrays_tests', { + id: serial('id').primaryKey(), + tags: text('tags').array(), + nested: text('nested').array().array(), + numbers: integer('numbers').notNull().array(), + }); + + await db.execute(sql`drop table if exists ${arrays}`); + await db.execute(sql` + create table ${arrays} ( + id serial primary key, + tags text[], + nested text[][], + numbers integer[] + ) + `); + + await db.insert(arrays).values({ + tags: ['', 'b', 'c'], + nested: [['1', ''], ['3', '\\a']], + numbers: [1, 2, 3], + }); + + const result = await db.select().from(arrays); + + expect(result).toEqual([{ + id: 1, + tags: ['', 'b', 'c'], + nested: [['1', ''], ['3', '\\a']], + numbers: [1, 2, 3], + }]); + + await db.execute(sql`drop table ${arrays}`); + }); + + test('test $onUpdateFn and $onUpdate works as $default', async (ctx) => { + const { db } = ctx.pg; + + await db.execute(sql`drop table if exists ${usersOnUpdate}`); + + await db.execute( + sql` + create table ${usersOnUpdate} ( + id serial primary key, + name text not null, + update_counter integer default 1 not null, + updated_at timestamp(3), + always_null text + ) + `, + ); + + await db.insert(usersOnUpdate).values([ + { name: 'John' }, + { name: 'Jane' }, + { name: 'Jack' }, + { name: 'Jill' }, + ]); + + const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); + + const justDates = await db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); + + const response = await db.select({ ...rest }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); + + expect(response).toEqual([ + { name: 'John', id: 1, updateCounter: 1, alwaysNull: null }, + { name: 'Jane', id: 2, updateCounter: 1, alwaysNull: null }, + { name: 'Jack', id: 3, updateCounter: 1, alwaysNull: null }, + { name: 'Jill', id: 4, updateCounter: 1, alwaysNull: null }, + ]); + const msDelay = 250; + + for (const eachUser of justDates) { + expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); + } + }); + + test('test $onUpdateFn and $onUpdate works updating', async (ctx) => { + const { db } = ctx.pg; + + await db.execute(sql`drop table if exists ${usersOnUpdate}`); + + await db.execute( + sql` + create table ${usersOnUpdate} ( + id serial primary key, + name text not null, + update_counter integer default 1, + updated_at timestamp(3), + always_null text + ) + `, + ); + + await db.insert(usersOnUpdate).values([ + { name: 'John', alwaysNull: 'this will be null after updating' }, + { name: 'Jane' }, + { name: 'Jack' }, + { name: 'Jill' }, + ]); + + const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); + const initial = await db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); + + await db.update(usersOnUpdate).set({ name: 'Angel' }).where(eq(usersOnUpdate.id, 1)); + await db.update(usersOnUpdate).set({ updateCounter: null }).where(eq(usersOnUpdate.id, 2)); + + const justDates = await db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); + + const response = await db.select({ ...rest }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); + + expect(response).toEqual([ + { name: 'Angel', id: 1, updateCounter: 2, alwaysNull: null }, + { name: 'Jane', id: 2, updateCounter: null, alwaysNull: null }, + { name: 'Jack', id: 3, updateCounter: 1, alwaysNull: null }, + { name: 'Jill', id: 4, updateCounter: 1, alwaysNull: null }, + ]); + const msDelay = 15000; + + expect(initial[0]?.updatedAt?.valueOf()).not.toBe(justDates[0]?.updatedAt?.valueOf()); + + for (const eachUser of justDates) { + expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); + } + }); + + test('test if method with sql operators', async (ctx) => { + const { db } = ctx.pg; + + const users = pgTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + age: integer('age').notNull(), + city: text('city').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute(sql` + create table ${users} ( + id serial primary key, + name text not null, + age integer not null, + city text not null + ) + `); + + await db.insert(users).values([ + { id: 1, name: 'John', age: 20, city: 'New York' }, + { id: 2, name: 'Alice', age: 21, city: 'New York' }, + { id: 3, name: 'Nick', age: 22, city: 'London' }, + { id: 4, name: 'Lina', age: 23, city: 'London' }, + ]); + + const condition1 = true; + + const [result1] = await db.select().from(users).where(eq(users.id, 1).if(condition1)); + + expect(result1).toEqual({ id: 1, name: 'John', age: 20, city: 'New York' }); + + const condition2 = 1; + + const [result2] = await db.select().from(users).where(sql`${users.id} = 1`.if(condition2)); + + expect(result2).toEqual({ id: 1, name: 'John', age: 20, city: 'New York' }); + + const condition3 = 'non-empty string'; + + const result3 = await db.select().from(users).where( + or(eq(users.id, 1).if(condition3), eq(users.id, 2).if(condition3)), + ); + + expect(result3).toEqual([{ id: 1, name: 'John', age: 20, city: 'New York' }, { + id: 2, + name: 'Alice', + age: 21, + city: 'New York', + }]); + + const condtition4 = false; + + const result4 = await db.select().from(users).where(eq(users.id, 1).if(condtition4)); + + expect(result4).toEqual([ + { id: 1, name: 'John', age: 20, city: 'New York' }, + { id: 2, name: 'Alice', age: 21, city: 'New York' }, + { id: 3, name: 'Nick', age: 22, city: 'London' }, + { id: 4, name: 'Lina', age: 23, city: 'London' }, + ]); + + const condition5 = undefined; + + const result5 = await db.select().from(users).where(sql`${users.id} = 1`.if(condition5)); + + expect(result5).toEqual([ + { id: 1, name: 'John', age: 20, city: 'New York' }, + { id: 2, name: 'Alice', age: 21, city: 'New York' }, + { id: 3, name: 'Nick', age: 22, city: 'London' }, + { id: 4, name: 'Lina', age: 23, city: 'London' }, + ]); + + const condition6 = null; + + const result6 = await db.select().from(users).where( + or(eq(users.id, 1).if(condition6), eq(users.id, 2).if(condition6)), + ); + + expect(result6).toEqual([ + { id: 1, name: 'John', age: 20, city: 'New York' }, + { id: 2, name: 'Alice', age: 21, city: 'New York' }, + { id: 3, name: 'Nick', age: 22, city: 'London' }, + { id: 4, name: 'Lina', age: 23, city: 'London' }, + ]); + + const condition7 = { + term1: 0, + term2: 1, + }; + + const result7 = await db.select().from(users).where( + and(gt(users.age, 20).if(condition7.term1), eq(users.city, 'New York').if(condition7.term2)), + ); + + expect(result7).toEqual([ + { id: 1, name: 'John', age: 20, city: 'New York' }, + { id: 2, name: 'Alice', age: 21, city: 'New York' }, + ]); + + const condition8 = { + term1: '', + term2: 'non-empty string', + }; + + const result8 = await db.select().from(users).where( + or(lt(users.age, 21).if(condition8.term1), eq(users.city, 'London').if(condition8.term2)), + ); + + expect(result8).toEqual([ + { id: 3, name: 'Nick', age: 22, city: 'London' }, + { id: 4, name: 'Lina', age: 23, city: 'London' }, + ]); + + const condition9 = { + term1: 1, + term2: true, + }; + + const result9 = await db.select().from(users).where( + and( + inArray(users.city, ['New York', 'London']).if(condition9.term1), + ilike(users.name, 'a%').if(condition9.term2), + ), + ); + + expect(result9).toEqual([ + { id: 2, name: 'Alice', age: 21, city: 'New York' }, + ]); + + const condition10 = { + term1: 4, + term2: 19, + }; + + const result10 = await db.select().from(users).where( + and( + sql`length(${users.name}) <= ${condition10.term1}`.if(condition10.term1), + gt(users.age, condition10.term2).if(condition10.term2 > 20), + ), + ); + + expect(result10).toEqual([ + { id: 1, name: 'John', age: 20, city: 'New York' }, + { id: 3, name: 'Nick', age: 22, city: 'London' }, + { id: 4, name: 'Lina', age: 23, city: 'London' }, + ]); + + const condition11 = true; + + const result11 = await db.select().from(users).where( + or(eq(users.city, 'New York'), gte(users.age, 22))!.if(condition11), + ); + + expect(result11).toEqual([ + { id: 1, name: 'John', age: 20, city: 'New York' }, + { id: 2, name: 'Alice', age: 21, city: 'New York' }, + { id: 3, name: 'Nick', age: 22, city: 'London' }, + { id: 4, name: 'Lina', age: 23, city: 'London' }, + ]); + + const condition12 = false; + + const result12 = await db.select().from(users).where( + and(eq(users.city, 'London'), gte(users.age, 23))!.if(condition12), + ); + + expect(result12).toEqual([ + { id: 1, name: 'John', age: 20, city: 'New York' }, + { id: 2, name: 'Alice', age: 21, city: 'New York' }, + { id: 3, name: 'Nick', age: 22, city: 'London' }, + { id: 4, name: 'Lina', age: 23, city: 'London' }, + ]); + + const condition13 = true; + + const result13 = await db.select().from(users).where(sql`(city = 'New York' or age >= 22)`.if(condition13)); + + expect(result13).toEqual([ + { id: 1, name: 'John', age: 20, city: 'New York' }, + { id: 2, name: 'Alice', age: 21, city: 'New York' }, + { id: 3, name: 'Nick', age: 22, city: 'London' }, + { id: 4, name: 'Lina', age: 23, city: 'London' }, + ]); + + const condition14 = false; + + const result14 = await db.select().from(users).where(sql`(city = 'London' and age >= 23)`.if(condition14)); + + expect(result14).toEqual([ + { id: 1, name: 'John', age: 20, city: 'New York' }, + { id: 2, name: 'Alice', age: 21, city: 'New York' }, + { id: 3, name: 'Nick', age: 22, city: 'London' }, + { id: 4, name: 'Lina', age: 23, city: 'London' }, + ]); + + await db.execute(sql`drop table ${users}`); + }); + + // MySchema tests + test('mySchema :: select all fields', async (ctx) => { + const { db } = ctx.pg; + + const now = Date.now(); + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const result = await db.select().from(usersMySchemaTable); + + expect(result[0]!.createdAt).toBeInstanceOf(Date); + expect(Math.abs(result[0]!.createdAt.getTime() - now)).toBeLessThan(100); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + }); + + test('mySchema :: select sql', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersMySchemaTable.name})`, + }).from(usersMySchemaTable); + + expect(users).toEqual([{ name: 'JOHN' }]); + }); + + test('mySchema :: select typed sql', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersMySchemaTable.name})`, + }).from(usersMySchemaTable); + + expect(users).toEqual([{ name: 'JOHN' }]); + }); + + test('mySchema :: select distinct', async (ctx) => { + const { db } = ctx.pg; + + const usersDistinctTable = pgTable('users_distinct', { + id: integer('id').notNull(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${usersDistinctTable}`); + await db.execute(sql`create table ${usersDistinctTable} (id integer, name text)`); + + await db.insert(usersDistinctTable).values([ + { id: 1, name: 'John' }, + { id: 1, name: 'John' }, + { id: 2, name: 'John' }, + { id: 1, name: 'Jane' }, + ]); + const users1 = await db.selectDistinct().from(usersDistinctTable).orderBy( + usersDistinctTable.id, + usersDistinctTable.name, + ); + const users2 = await db.selectDistinctOn([usersDistinctTable.id]).from(usersDistinctTable).orderBy( + usersDistinctTable.id, + ); + const users3 = await db.selectDistinctOn([usersDistinctTable.name], { name: usersDistinctTable.name }).from( + usersDistinctTable, + ).orderBy(usersDistinctTable.name); + + await db.execute(sql`drop table ${usersDistinctTable}`); + + expect(users1).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); + + expect(users2).toHaveLength(2); + expect(users2[0]?.id).toBe(1); + expect(users2[1]?.id).toBe(2); + + expect(users3).toHaveLength(2); + expect(users3[0]?.name).toBe('Jane'); + expect(users3[1]?.name).toBe('John'); + }); + + test('mySchema :: insert returning sql', async (ctx) => { + const { db } = ctx.pg; + + const users = await db.insert(usersMySchemaTable).values({ name: 'John' }).returning({ + name: sql`upper(${usersMySchemaTable.name})`, + }); + + expect(users).toEqual([{ name: 'JOHN' }]); + }); + + test('mySchema :: delete returning sql', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const users = await db.delete(usersMySchemaTable).where(eq(usersMySchemaTable.name, 'John')).returning({ + name: sql`upper(${usersMySchemaTable.name})`, + }); + + expect(users).toEqual([{ name: 'JOHN' }]); + }); + + test('mySchema :: update with returning partial', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const users = await db.update(usersMySchemaTable).set({ name: 'Jane' }).where(eq(usersMySchemaTable.name, 'John')) + .returning({ + id: usersMySchemaTable.id, + name: usersMySchemaTable.name, + }); + + expect(users).toEqual([{ id: 1, name: 'Jane' }]); + }); + + test('mySchema :: delete with returning all fields', async (ctx) => { + const { db } = ctx.pg; + + const now = Date.now(); + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const users = await db.delete(usersMySchemaTable).where(eq(usersMySchemaTable.name, 'John')).returning(); + + expect(users[0]!.createdAt).toBeInstanceOf(Date); + expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(100); + expect(users).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); + }); + + test('mySchema :: insert + select', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const result = await db.select().from(usersMySchemaTable); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + + await db.insert(usersMySchemaTable).values({ name: 'Jane' }); + const result2 = await db.select().from(usersMySchemaTable); + expect(result2).toEqual([ + { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, + { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, + ]); + }); + + test('mySchema :: insert with overridden default values', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersMySchemaTable).values({ name: 'John', verified: true }); + const result = await db.select().from(usersMySchemaTable); + + expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); + }); + + test('mySchema :: insert many', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersMySchemaTable).values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]); + const result = await db.select({ + id: usersMySchemaTable.id, + name: usersMySchemaTable.name, + jsonb: usersMySchemaTable.jsonb, + verified: usersMySchemaTable.verified, + }).from(usersMySchemaTable); + + expect(result).toEqual([ + { id: 1, name: 'John', jsonb: null, verified: false }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, + { id: 3, name: 'Jane', jsonb: null, verified: false }, + { id: 4, name: 'Austin', jsonb: null, verified: true }, + ]); + }); + + test('mySchema :: select with group by as field', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersMySchemaTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersMySchemaTable.name }).from(usersMySchemaTable) + .groupBy(usersMySchemaTable.name); + + expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); + }); + + test('mySchema :: select with group by as column + sql', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersMySchemaTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersMySchemaTable.name }).from(usersMySchemaTable) + .groupBy(usersMySchemaTable.id, sql`${usersMySchemaTable.name}`); + + expect(result).toEqual([{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); + }); + + test('mySchema :: build query', async (ctx) => { + const { db } = ctx.pg; + + const query = db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from(usersMySchemaTable) + .groupBy(usersMySchemaTable.id, usersMySchemaTable.name) + .toSQL(); + + expect(query).toEqual({ + sql: 'select "id", "name" from "mySchema"."users" group by "users"."id", "users"."name"', + params: [], + }); + }); + + test('mySchema :: partial join with alias', async (ctx) => { + const { db } = ctx.pg; + const customerAlias = alias(usersMySchemaTable, 'customer'); + + await db.insert(usersMySchemaTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select({ + user: { + id: usersMySchemaTable.id, + name: usersMySchemaTable.name, + }, + customer: { + id: customerAlias.id, + name: customerAlias.name, + }, + }).from(usersMySchemaTable) + .leftJoin(customerAlias, eq(customerAlias.id, 11)) + .where(eq(usersMySchemaTable.id, 10)); + + expect(result).toEqual([{ + user: { id: 10, name: 'Ivan' }, + customer: { id: 11, name: 'Hans' }, + }]); + }); + + test('mySchema :: insert with spaces', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersMySchemaTable).values({ name: sql`'Jo h n'` }); + const result = await db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from( + usersMySchemaTable, + ); + + expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); + }); + + test('mySchema :: prepared statement with placeholder in .limit', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const stmt = db + .select({ + id: usersMySchemaTable.id, + name: usersMySchemaTable.name, + }) + .from(usersMySchemaTable) + .where(eq(usersMySchemaTable.id, sql.placeholder('id'))) + .limit(sql.placeholder('limit')) + .prepare('mySchema_stmt_limit'); + + const result = await stmt.execute({ id: 1, limit: 1 }); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + expect(result).toHaveLength(1); + }); + + test('mySchema :: build query insert with onConflict do update / multiple columns', async (ctx) => { + const { db } = ctx.pg; + + const query = db.insert(usersMySchemaTable) + .values({ name: 'John', jsonb: ['foo', 'bar'] }) + .onConflictDoUpdate({ target: [usersMySchemaTable.id, usersMySchemaTable.name], set: { name: 'John1' } }) + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into "mySchema"."users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id","name") do update set "name" = $3', + params: ['John', '["foo","bar"]', 'John1'], + }); + }); + + test('mySchema :: build query insert with onConflict do nothing + target', async (ctx) => { + const { db } = ctx.pg; + + const query = db.insert(usersMySchemaTable) + .values({ name: 'John', jsonb: ['foo', 'bar'] }) + .onConflictDoNothing({ target: usersMySchemaTable.id }) + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into "mySchema"."users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do nothing', + params: ['John', '["foo","bar"]'], + }); + }); + + test('mySchema :: select from tables with same name from different schema using alias', async (ctx) => { + const { db } = ctx.pg; + + await db.insert(usersMySchemaTable).values({ id: 10, name: 'Ivan' }); + await db.insert(usersTable).values({ id: 11, name: 'Hans' }); + + const customerAlias = alias(usersTable, 'customer'); + + const result = await db + .select().from(usersMySchemaTable) + .leftJoin(customerAlias, eq(customerAlias.id, 11)) + .where(eq(customerAlias.id, 11)); + + expect(result).toEqual([{ + users: { + id: 10, + name: 'Ivan', + verified: false, + jsonb: null, + createdAt: result[0]!.users.createdAt, + }, + customer: { + id: 11, + name: 'Hans', + verified: false, + jsonb: null, + createdAt: result[0]!.customer!.createdAt, + }, + }]); + }); + + test('mySchema :: view', async (ctx) => { + const { db } = ctx.pg; + + const newYorkers1 = mySchema.view('new_yorkers') + .as((qb) => qb.select().from(users2MySchemaTable).where(eq(users2MySchemaTable.cityId, 1))); + + const newYorkers2 = mySchema.view('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull(), + }).as(sql`select * from ${users2MySchemaTable} where ${eq(users2MySchemaTable.cityId, 1)}`); + + const newYorkers3 = mySchema.view('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull(), + }).existing(); + + await db.execute(sql`create view ${newYorkers1} as ${getViewConfig(newYorkers1).query}`); + + await db.insert(citiesMySchemaTable).values([{ name: 'New York' }, { name: 'Paris' }]); + + await db.insert(users2MySchemaTable).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 1 }, + { name: 'Jack', cityId: 2 }, + ]); + + { + const result = await db.select().from(newYorkers1); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers2); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers3); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); + expect(result).toEqual([ + { name: 'John' }, + { name: 'Jane' }, + ]); + } + + await db.execute(sql`drop view ${newYorkers1}`); + }); + + test('mySchema :: materialized view', async (ctx) => { + const { db } = ctx.pg; + + const newYorkers1 = mySchema.materializedView('new_yorkers') + .as((qb) => qb.select().from(users2MySchemaTable).where(eq(users2MySchemaTable.cityId, 1))); + + const newYorkers2 = mySchema.materializedView('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull(), + }).as(sql`select * from ${users2MySchemaTable} where ${eq(users2MySchemaTable.cityId, 1)}`); + + const newYorkers3 = mySchema.materializedView('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull(), + }).existing(); + + await db.execute(sql`create materialized view ${newYorkers1} as ${getMaterializedViewConfig(newYorkers1).query}`); + + await db.insert(citiesMySchemaTable).values([{ name: 'New York' }, { name: 'Paris' }]); + + await db.insert(users2MySchemaTable).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 1 }, + { name: 'Jack', cityId: 2 }, + ]); + + { + const result = await db.select().from(newYorkers1); + expect(result).toEqual([]); + } + + await db.refreshMaterializedView(newYorkers1); + + { + const result = await db.select().from(newYorkers1); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers2); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers3); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); + expect(result).toEqual([ + { name: 'John' }, + { name: 'Jane' }, + ]); + } + + await db.execute(sql`drop materialized view ${newYorkers1}`); + }); + }); +} diff --git a/integration-tests/tests/pg.custom.test.ts b/integration-tests/tests/pg/pg-custom.test.ts similarity index 63% rename from integration-tests/tests/pg.custom.test.ts rename to integration-tests/tests/pg/pg-custom.test.ts index 923eed9ad..0d21261a6 100644 --- a/integration-tests/tests/pg.custom.test.ts +++ b/integration-tests/tests/pg/pg-custom.test.ts @@ -1,19 +1,47 @@ -import 'dotenv/config'; - -import type { TestFn } from 'ava'; -import anyTest from 'ava'; -import Docker from 'dockerode'; -import { asc, eq, name, placeholder, sql } from 'drizzle-orm'; +import retry from 'async-retry'; +import { asc, eq, sql } from 'drizzle-orm'; import type { NodePgDatabase } from 'drizzle-orm/node-postgres'; import { drizzle } from 'drizzle-orm/node-postgres'; import { migrate } from 'drizzle-orm/node-postgres/migrator'; import { alias, customType, pgTable, pgTableCreator, serial, text } from 'drizzle-orm/pg-core'; -import getPort from 'get-port'; -import pg from 'pg'; -import { v4 as uuid } from 'uuid'; -import { randomString } from './utils'; +import { Client } from 'pg'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { randomString } from '~/utils'; +import { createDockerDB } from './pg-common'; + +const ENABLE_LOGGING = false; + +let db: NodePgDatabase; +let client: Client; + +beforeAll(async () => { + const connectionString = process.env['PG_CONNECTION_STRING'] ?? await createDockerDB(); + client = await retry(async () => { + client = new Client(connectionString); + await client.connect(); + return client; + }, { + retries: 20, + factor: 1, + minTimeout: 250, + maxTimeout: 250, + randomize: false, + onRetry() { + client?.end(); + }, + }); + db = drizzle(client, { logger: ENABLE_LOGGING }); +}); -const { Client } = pg; +afterAll(async () => { + await client?.end(); +}); + +beforeEach((ctx) => { + ctx.pg = { + db, + }; +}); const customSerial = customType<{ data: number; notNull: true; default: true }>({ dataType() { @@ -69,82 +97,11 @@ const usersMigratorTable = pgTable('users12', { email: text('email').notNull(), }); -interface Context { - docker: Docker; - pgContainer: Docker.Container; - db: NodePgDatabase; - client: pg.Client; -} - -const test = anyTest as TestFn; - -async function createDockerDB(ctx: Context): Promise { - const docker = (ctx.docker = new Docker()); - const port = await getPort({ port: 5432 }); - const image = 'postgres:14'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) - ); - - ctx.pgContainer = await docker.createContainer({ - Image: image, - Env: ['POSTGRES_PASSWORD=postgres', 'POSTGRES_USER=postgres', 'POSTGRES_DB=postgres'], - name: `drizzle-integration-tests-${uuid()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '5432/tcp': [{ HostPort: `${port}` }], - }, - }, - }); - - await ctx.pgContainer.start(); - - return `postgres://postgres:postgres@localhost:${port}/postgres`; -} - -test.before(async (t) => { - const ctx = t.context; - const connectionString = process.env['PG_CONNECTION_STRING'] ?? await createDockerDB(ctx); - - const sleep = 250; - let timeLeft = 5000; - let connected = false; - let lastError: unknown | undefined; - do { - try { - ctx.client = new Client(connectionString); - await ctx.client.connect(); - connected = true; - break; - } catch (e) { - lastError = e; - await new Promise((resolve) => setTimeout(resolve, sleep)); - timeLeft -= sleep; - } - } while (timeLeft > 0); - if (!connected) { - console.error('Cannot connect to Postgres'); - await ctx.client?.end().catch(console.error); - await ctx.pgContainer?.stop().catch(console.error); - throw lastError; - } - ctx.db = drizzle(ctx.client, { logger: false }); -}); - -test.after.always(async (t) => { - const ctx = t.context; - await ctx.client?.end().catch(console.error); - await ctx.pgContainer?.stop().catch(console.error); -}); - -test.beforeEach(async (t) => { - const ctx = t.context; - await ctx.db.execute(sql`drop schema public cascade`); - await ctx.db.execute(sql`create schema public`); - await ctx.db.execute( +beforeEach(async (ctx) => { + const { db } = ctx.pg; + await db.execute(sql`drop schema if exists public cascade`); + await db.execute(sql`create schema public`); + await db.execute( sql` create table users ( id serial primary key, @@ -157,88 +114,88 @@ test.beforeEach(async (t) => { ); }); -test.serial('select all fields', async (t) => { - const { db } = t.context; +test('select all fields', async (ctx) => { + const { db } = ctx.pg; const now = Date.now(); await db.insert(usersTable).values({ name: 'John' }); const result = await db.select().from(usersTable); - t.assert(result[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 100); - t.deepEqual(result, [{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + expect(result[0]!.createdAt).toBeInstanceOf(Date); + expect(Math.abs(result[0]!.createdAt.getTime() - now)).toBeLessThan(100); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); }); -test.serial('select sql', async (t) => { - const { db } = t.context; +test('select sql', async (ctx) => { + const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersTable.name})`, }).from(usersTable); - t.deepEqual(users, [{ name: 'JOHN' }]); + expect(users).toEqual([{ name: 'JOHN' }]); }); -test.serial('select typed sql', async (t) => { - const { db } = t.context; +test('select typed sql', async (ctx) => { + const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersTable.name})`, }).from(usersTable); - t.deepEqual(users, [{ name: 'JOHN' }]); + expect(users).toEqual([{ name: 'JOHN' }]); }); -test.serial('insert returning sql', async (t) => { - const { db } = t.context; +test('insert returning sql', async (ctx) => { + const { db } = ctx.pg; const users = await db.insert(usersTable).values({ name: 'John' }).returning({ name: sql`upper(${usersTable.name})`, }); - t.deepEqual(users, [{ name: 'JOHN' }]); + expect(users).toEqual([{ name: 'JOHN' }]); }); -test.serial('delete returning sql', async (t) => { - const { db } = t.context; +test('delete returning sql', async (ctx) => { + const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ name: sql`upper(${usersTable.name})`, }); - t.deepEqual(users, [{ name: 'JOHN' }]); + expect(users).toEqual([{ name: 'JOHN' }]); }); -test.serial('update returning sql', async (t) => { - const { db } = t.context; +test('update returning sql', async (ctx) => { + const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ name: sql`upper(${usersTable.name})`, }); - t.deepEqual(users, [{ name: 'JANE' }]); + expect(users).toEqual([{ name: 'JANE' }]); }); -test.serial('update with returning all fields', async (t) => { - const { db } = t.context; +test('update with returning all fields', async (ctx) => { + const { db } = ctx.pg; const now = Date.now(); await db.insert(usersTable).values({ name: 'John' }); const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning(); - t.assert(users[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 100); - t.deepEqual(users, [{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); + expect(users[0]!.createdAt).toBeInstanceOf(Date); + expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(100); + expect(users).toEqual([{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); }); -test.serial('update with returning partial', async (t) => { - const { db } = t.context; +test('update with returning partial', async (ctx) => { + const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ @@ -246,24 +203,24 @@ test.serial('update with returning partial', async (t) => { name: usersTable.name, }); - t.deepEqual(users, [{ id: 1, name: 'Jane' }]); + expect(users).toEqual([{ id: 1, name: 'Jane' }]); }); -test.serial('delete with returning all fields', async (t) => { - const { db } = t.context; +test('delete with returning all fields', async (ctx) => { + const { db } = ctx.pg; const now = Date.now(); await db.insert(usersTable).values({ name: 'John' }); const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning(); - t.assert(users[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 100); - t.deepEqual(users, [{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); + expect(users[0]!.createdAt).toBeInstanceOf(Date); + expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(100); + expect(users).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); }); -test.serial('delete with returning partial', async (t) => { - const { db } = t.context; +test('delete with returning partial', async (ctx) => { + const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ @@ -271,26 +228,26 @@ test.serial('delete with returning partial', async (t) => { name: usersTable.name, }); - t.deepEqual(users, [{ id: 1, name: 'John' }]); + expect(users).toEqual([{ id: 1, name: 'John' }]); }); -test.serial('insert + select', async (t) => { - const { db } = t.context; +test('insert + select', async (ctx) => { + const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); const result = await db.select().from(usersTable); - t.deepEqual(result, [{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); await db.insert(usersTable).values({ name: 'Jane' }); const result2 = await db.select().from(usersTable); - t.deepEqual(result2, [ + expect(result2).toEqual([ { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, ]); }); -test.serial('json insert', async (t) => { - const { db } = t.context; +test('json insert', async (ctx) => { + const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); const result = await db.select({ @@ -299,20 +256,20 @@ test.serial('json insert', async (t) => { jsonb: usersTable.jsonb, }).from(usersTable); - t.deepEqual(result, [{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); + expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); }); -test.serial('insert with overridden default values', async (t) => { - const { db } = t.context; +test('insert with overridden default values', async (ctx) => { + const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John', verified: true }); const result = await db.select().from(usersTable); - t.deepEqual(result, [{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); + expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); }); -test.serial('insert many', async (t) => { - const { db } = t.context; +test('insert many', async (ctx) => { + const { db } = ctx.pg; await db.insert(usersTable).values([ { name: 'John' }, @@ -327,7 +284,7 @@ test.serial('insert many', async (t) => { verified: usersTable.verified, }).from(usersTable); - t.deepEqual(result, [ + expect(result).toEqual([ { id: 1, name: 'John', jsonb: null, verified: false }, { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, { id: 3, name: 'Jane', jsonb: null, verified: false }, @@ -335,8 +292,8 @@ test.serial('insert many', async (t) => { ]); }); -test.serial('insert many with returning', async (t) => { - const { db } = t.context; +test('insert many with returning', async (ctx) => { + const { db } = ctx.pg; const result = await db.insert(usersTable).values([ { name: 'John' }, @@ -351,7 +308,7 @@ test.serial('insert many with returning', async (t) => { verified: usersTable.verified, }); - t.deepEqual(result, [ + expect(result).toEqual([ { id: 1, name: 'John', jsonb: null, verified: false }, { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, { id: 3, name: 'Jane', jsonb: null, verified: false }, @@ -359,52 +316,52 @@ test.serial('insert many with returning', async (t) => { ]); }); -test.serial('select with group by as field', async (t) => { - const { db } = t.context; +test('select with group by as field', async (ctx) => { + const { db } = ctx.pg; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.name); - t.deepEqual(result, [{ name: 'Jane' }, { name: 'John' }]); + expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); }); -test.serial('select with group by as sql', async (t) => { - const { db } = t.context; +test('select with group by as sql', async (ctx) => { + const { db } = ctx.pg; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(sql`${usersTable.name}`); - t.deepEqual(result, [{ name: 'Jane' }, { name: 'John' }]); + expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); }); -test.serial('select with group by as sql + column', async (t) => { - const { db } = t.context; +test('select with group by as sql + column', async (ctx) => { + const { db } = ctx.pg; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(sql`${usersTable.name}`, usersTable.id); - t.deepEqual(result, [{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); + expect(result).toEqual([{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); }); -test.serial('select with group by as column + sql', async (t) => { - const { db } = t.context; +test('select with group by as column + sql', async (ctx) => { + const { db } = ctx.pg; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, sql`${usersTable.name}`); - t.deepEqual(result, [{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); + expect(result).toEqual([{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); }); -test.serial('select with group by complex query', async (t) => { - const { db } = t.context; +test('select with group by complex query', async (ctx) => { + const { db } = ctx.pg; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); @@ -413,32 +370,32 @@ test.serial('select with group by complex query', async (t) => { .orderBy(asc(usersTable.name)) .limit(1); - t.deepEqual(result, [{ name: 'Jane' }]); + expect(result).toEqual([{ name: 'Jane' }]); }); -test.serial('build query', async (t) => { - const { db } = t.context; +test('build query', async (ctx) => { + const { db } = ctx.pg; const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, usersTable.name) .toSQL(); - t.deepEqual(query, { + expect(query).toEqual({ sql: 'select "id", "name" from "users" group by "users"."id", "users"."name"', params: [], }); }); -test.serial('insert sql', async (t) => { - const { db } = t.context; +test('insert sql', async (ctx) => { + const { db } = ctx.pg; await db.insert(usersTable).values({ name: sql`${'John'}` }); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); - t.deepEqual(result, [{ id: 1, name: 'John' }]); + expect(result).toEqual([{ id: 1, name: 'John' }]); }); -test.serial('partial join with alias', async (t) => { - const { db } = t.context; +test('partial join with alias', async (ctx) => { + const { db } = ctx.pg; const customerAlias = alias(usersTable, 'customer'); await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); @@ -456,14 +413,14 @@ test.serial('partial join with alias', async (t) => { .leftJoin(customerAlias, eq(customerAlias.id, 11)) .where(eq(usersTable.id, 10)); - t.deepEqual(result, [{ + expect(result).toEqual([{ user: { id: 10, name: 'Ivan' }, customer: { id: 11, name: 'Hans' }, }]); }); -test.serial('full join with alias', async (t) => { - const { db } = t.context; +test('full join with alias', async (ctx) => { + const { db } = ctx.pg; const pgTable = pgTableCreator((name) => `prefixed_${name}`); @@ -483,7 +440,7 @@ test.serial('full join with alias', async (t) => { .leftJoin(customers, eq(customers.id, 11)) .where(eq(users.id, 10)); - t.deepEqual(result, [{ + expect(result).toEqual([{ users: { id: 10, name: 'Ivan', @@ -497,17 +454,17 @@ test.serial('full join with alias', async (t) => { await db.execute(sql`drop table ${users}`); }); -test.serial('insert with spaces', async (t) => { - const { db } = t.context; +test('insert with spaces', async (ctx) => { + const { db } = ctx.pg; await db.insert(usersTable).values({ name: sql`'Jo h n'` }); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); - t.deepEqual(result, [{ id: 1, name: 'Jo h n' }]); + expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); }); -test.serial('prepared statement', async (t) => { - const { db } = t.context; +test('prepared statement', async (ctx) => { + const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); const statement = db.select({ @@ -517,15 +474,15 @@ test.serial('prepared statement', async (t) => { .prepare('statement1'); const result = await statement.execute(); - t.deepEqual(result, [{ id: 1, name: 'John' }]); + expect(result).toEqual([{ id: 1, name: 'John' }]); }); -test.serial('prepared statement reuse', async (t) => { - const { db } = t.context; +test('prepared statement reuse', async (ctx) => { + const { db } = ctx.pg; const stmt = db.insert(usersTable).values({ verified: true, - name: placeholder('name'), + name: sql.placeholder('name'), }).prepare('stmt2'); for (let i = 0; i < 10; i++) { @@ -538,7 +495,7 @@ test.serial('prepared statement reuse', async (t) => { verified: usersTable.verified, }).from(usersTable); - t.deepEqual(result, [ + expect(result).toEqual([ { id: 1, name: 'John 0', verified: true }, { id: 2, name: 'John 1', verified: true }, { id: 3, name: 'John 2', verified: true }, @@ -552,23 +509,23 @@ test.serial('prepared statement reuse', async (t) => { ]); }); -test.serial('prepared statement with placeholder in .where', async (t) => { - const { db } = t.context; +test('prepared statement with placeholder in .where', async (ctx) => { + const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); const stmt = db.select({ id: usersTable.id, name: usersTable.name, }).from(usersTable) - .where(eq(usersTable.id, placeholder('id'))) + .where(eq(usersTable.id, sql.placeholder('id'))) .prepare('stmt3'); const result = await stmt.execute({ id: 1 }); - t.deepEqual(result, [{ id: 1, name: 'John' }]); + expect(result).toEqual([{ id: 1, name: 'John' }]); }); -test.serial('prepared statement with placeholder in .limit', async (t) => { - const { db } = t.context; +test('prepared statement with placeholder in .limit', async (ctx) => { + const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); const stmt = db @@ -577,18 +534,18 @@ test.serial('prepared statement with placeholder in .limit', async (t) => { name: usersTable.name, }) .from(usersTable) - .where(eq(usersTable.id, placeholder('id'))) - .limit(placeholder('limit')) + .where(eq(usersTable.id, sql.placeholder('id'))) + .limit(sql.placeholder('limit')) .prepare('stmt_limit'); const result = await stmt.execute({ id: 1, limit: 1 }); - t.deepEqual(result, [{ id: 1, name: 'John' }]); - t.is(result.length, 1); + expect(result).toEqual([{ id: 1, name: 'John' }]); + expect(result).toHaveLength(1); }); -test.serial('prepared statement with placeholder in .offset', async (t) => { - const { db } = t.context; +test('prepared statement with placeholder in .offset', async (ctx) => { + const { db } = ctx.pg; await db.insert(usersTable).values([{ name: 'John' }, { name: 'John1' }]); const stmt = db @@ -597,17 +554,15 @@ test.serial('prepared statement with placeholder in .offset', async (t) => { name: usersTable.name, }) .from(usersTable) - .offset(placeholder('offset')) + .offset(sql.placeholder('offset')) .prepare('stmt_offset'); const result = await stmt.execute({ offset: 1 }); - t.deepEqual(result, [{ id: 2, name: 'John1' }]); + expect(result).toEqual([{ id: 2, name: 'John1' }]); }); -test.serial('migrator : default migration strategy', async (t) => { - const { db } = t.context; - +test('migrator : default migration strategy', async () => { await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); @@ -618,15 +573,14 @@ test.serial('migrator : default migration strategy', async (t) => { const result = await db.select().from(usersMigratorTable); - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); }); -test.serial('migrator : migrate with custom schema', async (t) => { - const { db } = t.context; +test('migrator : migrate with custom schema', async () => { const customSchema = randomString(); await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); @@ -636,20 +590,19 @@ test.serial('migrator : migrate with custom schema', async (t) => { // test if the custom migrations table was created const { rowCount } = await db.execute(sql`select * from ${sql.identifier(customSchema)}."__drizzle_migrations";`); - t.true(rowCount > 0); + expect(rowCount! > 0).toBeTruthy(); // test if the migrated table are working as expected await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table ${sql.identifier(customSchema)}."__drizzle_migrations"`); }); -test.serial('migrator : migrate with custom table', async (t) => { - const { db } = t.context; +test('migrator : migrate with custom table', async () => { const customTable = randomString(); await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); @@ -659,20 +612,19 @@ test.serial('migrator : migrate with custom table', async (t) => { // test if the custom migrations table was created const { rowCount } = await db.execute(sql`select * from "drizzle".${sql.identifier(customTable)};`); - t.true(rowCount > 0); + expect(rowCount! > 0).toBeTruthy(); // test if the migrated table are working as expected await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table "drizzle".${sql.identifier(customTable)}`); }); -test.serial('migrator : migrate with custom table and custom schema', async (t) => { - const { db } = t.context; +test('migrator : migrate with custom table and custom schema', async () => { const customTable = randomString(); const customSchema = randomString(); await db.execute(sql`drop table if exists all_columns`); @@ -689,109 +641,103 @@ test.serial('migrator : migrate with custom table and custom schema', async (t) const { rowCount } = await db.execute( sql`select * from ${sql.identifier(customSchema)}.${sql.identifier(customTable)};`, ); - t.true(rowCount > 0); + expect(rowCount! > 0).toBeTruthy(); // test if the migrated table are working as expected await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table ${sql.identifier(customSchema)}.${sql.identifier(customTable)}`); }); -test.serial('insert via db.execute + select via db.execute', async (t) => { - const { db } = t.context; - - await db.execute(sql`insert into ${usersTable} (${name(usersTable.name.name)}) values (${'John'})`); +test('insert via db.execute + select via db.execute', async () => { + await db.execute(sql`insert into ${usersTable} (${sql.identifier(usersTable.name.name)}) values (${'John'})`); const result = await db.execute<{ id: number; name: string }>(sql`select id, name from "users"`); - t.deepEqual(result.rows, [{ id: 1, name: 'John' }]); + expect(result.rows).toEqual([{ id: 1, name: 'John' }]); }); -test.serial('insert via db.execute + returning', async (t) => { - const { db } = t.context; - +test('insert via db.execute + returning', async () => { const inserted = await db.execute<{ id: number; name: string }>( sql`insert into ${usersTable} (${ - name(usersTable.name.name) + sql.identifier(usersTable.name.name) }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, ); - t.deepEqual(inserted.rows, [{ id: 1, name: 'John' }]); + expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); }); -test.serial('insert via db.execute w/ query builder', async (t) => { - const { db } = t.context; - +test('insert via db.execute w/ query builder', async () => { const inserted = await db.execute>( db.insert(usersTable).values({ name: 'John' }).returning({ id: usersTable.id, name: usersTable.name }), ); - t.deepEqual(inserted.rows, [{ id: 1, name: 'John' }]); + expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); }); -test.serial('build query insert with onConflict do update', async (t) => { - const { db } = t.context; +test('build query insert with onConflict do update', async (ctx) => { + const { db } = ctx.pg; const query = db.insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }) .toSQL(); - t.deepEqual(query, { + expect(query).toEqual({ sql: 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do update set "name" = $3', params: ['John', '["foo","bar"]', 'John1'], }); }); -test.serial('build query insert with onConflict do update / multiple columns', async (t) => { - const { db } = t.context; +test('build query insert with onConflict do update / multiple columns', async (ctx) => { + const { db } = ctx.pg; const query = db.insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onConflictDoUpdate({ target: [usersTable.id, usersTable.name], set: { name: 'John1' } }) .toSQL(); - t.deepEqual(query, { + expect(query).toEqual({ sql: 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id","name") do update set "name" = $3', params: ['John', '["foo","bar"]', 'John1'], }); }); -test.serial('build query insert with onConflict do nothing', async (t) => { - const { db } = t.context; +test('build query insert with onConflict do nothing', async (ctx) => { + const { db } = ctx.pg; const query = db.insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onConflictDoNothing() .toSQL(); - t.deepEqual(query, { + expect(query).toEqual({ sql: 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict do nothing', params: ['John', '["foo","bar"]'], }); }); -test.serial('build query insert with onConflict do nothing + target', async (t) => { - const { db } = t.context; +test('build query insert with onConflict do nothing + target', async (ctx) => { + const { db } = ctx.pg; const query = db.insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onConflictDoNothing({ target: usersTable.id }) .toSQL(); - t.deepEqual(query, { + expect(query).toEqual({ sql: 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do nothing', params: ['John', '["foo","bar"]'], }); }); -test.serial('insert with onConflict do update', async (t) => { - const { db } = t.context; +test('insert with onConflict do update', async (ctx) => { + const { db } = ctx.pg; await db.insert(usersTable) .values({ name: 'John' }); @@ -804,11 +750,11 @@ test.serial('insert with onConflict do update', async (t) => { eq(usersTable.id, 1), ); - t.deepEqual(res, [{ id: 1, name: 'John1' }]); + expect(res).toEqual([{ id: 1, name: 'John1' }]); }); -test.serial('insert with onConflict do nothing', async (t) => { - const { db } = t.context; +test('insert with onConflict do nothing', async (ctx) => { + const { db } = ctx.pg; await db.insert(usersTable) .values({ name: 'John' }); @@ -821,11 +767,11 @@ test.serial('insert with onConflict do nothing', async (t) => { eq(usersTable.id, 1), ); - t.deepEqual(res, [{ id: 1, name: 'John' }]); + expect(res).toEqual([{ id: 1, name: 'John' }]); }); -test.serial('insert with onConflict do nothing + target', async (t) => { - const { db } = t.context; +test('insert with onConflict do nothing + target', async (ctx) => { + const { db } = ctx.pg; await db.insert(usersTable) .values({ name: 'John' }); @@ -838,5 +784,5 @@ test.serial('insert with onConflict do nothing + target', async (t) => { eq(usersTable.id, 1), ); - t.deepEqual(res, [{ id: 1, name: 'John' }]); + expect(res).toEqual([{ id: 1, name: 'John' }]); }); diff --git a/integration-tests/tests/pg/pg-proxy.test.ts b/integration-tests/tests/pg/pg-proxy.test.ts new file mode 100644 index 000000000..4fb473df6 --- /dev/null +++ b/integration-tests/tests/pg/pg-proxy.test.ts @@ -0,0 +1,488 @@ +import retry from 'async-retry'; +import { sql } from 'drizzle-orm'; +import { pgTable, serial, timestamp } from 'drizzle-orm/pg-core'; +import type { PgRemoteDatabase } from 'drizzle-orm/pg-proxy'; +import { drizzle as proxyDrizzle } from 'drizzle-orm/pg-proxy'; +import { migrate } from 'drizzle-orm/pg-proxy/migrator'; +import * as pg from 'pg'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { skipTests } from '~/common'; +import { createDockerDB, tests, usersMigratorTable, usersTable } from './pg-common'; + +// eslint-disable-next-line drizzle-internal/require-entity-kind +class ServerSimulator { + constructor(private db: pg.Client) { + const { types } = pg; + + types.setTypeParser(types.builtins.TIMESTAMPTZ, (val) => val); + types.setTypeParser(types.builtins.TIMESTAMP, (val) => val); + types.setTypeParser(types.builtins.DATE, (val) => val); + types.setTypeParser(types.builtins.INTERVAL, (val) => val); + } + + async query(sql: string, params: any[], method: 'all' | 'execute') { + if (method === 'all') { + try { + const result = await this.db.query({ + text: sql, + values: params, + rowMode: 'array', + }); + + return { data: result.rows as any }; + } catch (e: any) { + return { error: e }; + } + } else if (method === 'execute') { + try { + const result = await this.db.query({ + text: sql, + values: params, + }); + + return { data: result.rows as any }; + } catch (e: any) { + return { error: e }; + } + } else { + return { error: 'Unknown method value' }; + } + } + + async migrations(queries: string[]) { + await this.db.query('BEGIN'); + try { + for (const query of queries) { + await this.db.query(query); + } + await this.db.query('COMMIT'); + } catch (e) { + await this.db.query('ROLLBACK'); + throw e; + } + + return {}; + } +} + +const ENABLE_LOGGING = false; + +let db: PgRemoteDatabase; +let client: pg.Client; +let serverSimulator: ServerSimulator; + +beforeAll(async () => { + const connectionString = process.env['PG_CONNECTION_STRING'] ?? await createDockerDB(); + client = await retry(async () => { + client = new pg.Client(connectionString); + await client.connect(); + return client; + }, { + retries: 20, + factor: 1, + minTimeout: 250, + maxTimeout: 250, + randomize: false, + onRetry() { + client?.end(); + }, + }); + serverSimulator = new ServerSimulator(client); + db = proxyDrizzle(async (sql, params, method) => { + try { + const response = await serverSimulator.query(sql, params, method); + + if (response.error !== undefined) { + throw response.error; + } + + return { rows: response.data }; + } catch (e: any) { + console.error('Error from pg proxy server:', e.message); + throw e; + } + }, { + logger: ENABLE_LOGGING, + }); +}); + +afterAll(async () => { + await client?.end(); +}); + +beforeEach((ctx) => { + ctx.pg = { + db, + }; +}); + +test('migrator : default migration strategy', async () => { + await db.execute(sql`drop table if exists all_columns`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + + // './drizzle2/pg-proxy/first' ?? + await migrate(db, async (queries) => { + try { + await serverSimulator.migrations(queries); + } catch (e) { + console.error(e); + throw new Error('Proxy server cannot run migrations'); + } + }, { migrationsFolder: './drizzle2/pg' }); + + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + + const result = await db.select().from(usersMigratorTable); + + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table all_columns`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); +}); + +test('all date and time columns without timezone first case mode string', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) not null + ) + `); + + // 1. Insert date in string format without timezone in it + await db.insert(table).values([ + { timestamp: '2022-01-01 02:00:00.123456' }, + ]); + + // 2, Select in string format and check that values are the same + const result = await db.select().from(table); + + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456' }]); + + // 3. Select as raw query and check that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + expect(result2).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('all date and time columns without timezone second case mode string', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) not null + ) + `); + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: '2022-01-01T02:00:00.123456-02' }, + ]); + + // 2, Select as raw query and check that values are the same + const result = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + expect(result).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('all date and time columns without timezone third case mode date', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'date', precision: 3 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(3) not null + ) + `); + + const insertedDate = new Date('2022-01-01 20:00:00.123+04'); + + // 1. Insert date as new date + await db.insert(table).values([ + { timestamp: insertedDate }, + ]); + + // 2, Select as raw query as string + const result = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + // 3. Compare both dates using orm mapping - Need to add 'Z' to tell JS that it is UTC + expect(new Date(result[0]!.timestamp_string + 'Z').getTime()).toBe(insertedDate.getTime()); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('test mode string for timestamp with timezone', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) with time zone not null + ) + `); + + const timestampString = '2022-01-01 00:00:00.123456-0200'; + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); + + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); + + // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); + + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same + expect(result2).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('test mode date for timestamp with timezone', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(3) with time zone not null + ) + `); + + const timestampString = new Date('2022-01-01 00:00:00.456-0200'); + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); + + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); + + // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same + expect(result).toEqual([{ id: 1, timestamp: timestampString }]); + + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same + expect(result2).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.456+00' }]); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('test mode string for timestamp with timezone in UTC timezone', async () => { + // get current timezone from db + const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); + + // set timezone to UTC + await db.execute(sql`set time zone 'UTC'`); + + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) with time zone not null + ) + `); + + const timestampString = '2022-01-01 00:00:00.123456-0200'; + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); + + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); + + // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); + + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same + expect(result2).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); + + await db.execute(sql`set time zone '${sql.raw(timezone[0]!.TimeZone)}'`); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('test mode string for timestamp with timezone in different timezone', async () => { + // get current timezone from db + const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); + + // set timezone to HST (UTC - 10) + await db.execute(sql`set time zone 'HST'`); + + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) with time zone not null + ) + `); + + const timestampString = '2022-01-01 00:00:00.123456-1000'; + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); + + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); + + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 00:00:00.123456-10' }]); + + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + expect(result2).toEqual([{ id: 1, timestamp_string: '2022-01-01 00:00:00.123456-10' }]); + + await db.execute(sql`set time zone '${sql.raw(timezone[0]!.TimeZone)}'`); + + await db.execute(sql`drop table if exists ${table}`); +}); + +skipTests([ + 'migrator : default migration strategy', + 'migrator : migrate with custom schema', + 'migrator : migrate with custom table', + 'migrator : migrate with custom table and custom schema', + 'insert via db.execute + select via db.execute', + 'insert via db.execute + returning', + 'insert via db.execute w/ query builder', + 'all date and time columns without timezone first case mode string', + 'all date and time columns without timezone third case mode date', + 'test mode string for timestamp with timezone', + 'test mode date for timestamp with timezone', + 'test mode string for timestamp with timezone in UTC timezone', + 'test mode string for timestamp with timezone in different timezone', + 'transaction', + 'transaction rollback', + 'nested transaction', + 'nested transaction rollback', + 'test $onUpdateFn and $onUpdate works updating', +]); +tests(); + +beforeEach(async () => { + await db.execute(sql`drop schema if exists public cascade`); + await db.execute(sql`create schema public`); + await db.execute( + sql` + create table users ( + id serial primary key, + name text not null, + verified boolean not null default false, + jsonb jsonb, + created_at timestamptz not null default now() + ) + `, + ); +}); + +test('insert via db.execute + select via db.execute', async () => { + await db.execute( + sql`insert into ${usersTable} (${sql.identifier(usersTable.name.name)}) values (${'John'})`, + ); + + const result = await db.execute<{ id: number; name: string }>( + sql`select id, name from "users"`, + ); + expect(result).toEqual([{ id: 1, name: 'John' }]); +}); + +test('insert via db.execute + returning', async () => { + const inserted = await db.execute<{ id: number; name: string }>( + sql`insert into ${usersTable} (${ + sql.identifier( + usersTable.name.name, + ) + }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, + ); + expect(inserted).toEqual([{ id: 1, name: 'John' }]); +}); + +test('insert via db.execute w/ query builder', async () => { + const inserted = await db.execute>( + db + .insert(usersTable) + .values({ name: 'John' }) + .returning({ id: usersTable.id, name: usersTable.name }), + ); + expect(inserted).toEqual([{ id: 1, name: 'John' }]); +}); diff --git a/integration-tests/tests/pg/pglite.test.ts b/integration-tests/tests/pg/pglite.test.ts new file mode 100644 index 000000000..37cd3fe62 --- /dev/null +++ b/integration-tests/tests/pg/pglite.test.ts @@ -0,0 +1,106 @@ +import { PGlite } from '@electric-sql/pglite'; +import { Name, sql } from 'drizzle-orm'; +import { drizzle, type PgliteDatabase } from 'drizzle-orm/pglite'; +import { migrate } from 'drizzle-orm/pglite/migrator'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { skipTests } from '~/common'; +import { tests, usersMigratorTable, usersTable } from './pg-common'; + +const ENABLE_LOGGING = false; + +let db: PgliteDatabase; +let client: PGlite; + +beforeAll(async () => { + client = new PGlite(); + db = drizzle(client, { logger: ENABLE_LOGGING }); +}); + +afterAll(async () => { + await client?.close(); +}); + +beforeEach((ctx) => { + ctx.pg = { + db, + }; +}); + +test('migrator : default migration strategy', async () => { + await db.execute(sql`drop table if exists all_columns`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + + await migrate(db, { migrationsFolder: './drizzle2/pg' }); + + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + + const result = await db.select().from(usersMigratorTable); + + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table all_columns`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); +}); + +test('insert via db.execute + select via db.execute', async () => { + await db.execute(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); + + const result = await db.execute<{ id: number; name: string }>(sql`select id, name from "users"`); + expect(Array.prototype.slice.call(result.rows)).toEqual([{ id: 1, name: 'John' }]); +}); + +test('insert via db.execute + returning', async () => { + const result = await db.execute<{ id: number; name: string }>( + sql`insert into ${usersTable} (${new Name( + usersTable.name.name, + )}) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, + ); + expect(Array.prototype.slice.call(result.rows)).toEqual([{ id: 1, name: 'John' }]); +}); + +test('insert via db.execute w/ query builder', async () => { + const result = await db.execute>( + db.insert(usersTable).values({ name: 'John' }).returning({ id: usersTable.id, name: usersTable.name }), + ); + expect(Array.prototype.slice.call(result.rows)).toEqual([{ id: 1, name: 'John' }]); +}); + +skipTests([ + 'migrator : default migration strategy', + 'migrator : migrate with custom schema', + 'migrator : migrate with custom table', + 'migrator : migrate with custom table and custom schema', + 'insert via db.execute + select via db.execute', + 'insert via db.execute + returning', + 'insert via db.execute w/ query builder', + 'all date and time columns without timezone first case mode string', + 'all date and time columns without timezone third case mode date', + 'test mode string for timestamp with timezone', + 'test mode date for timestamp with timezone', + 'test mode string for timestamp with timezone in UTC timezone', + 'test mode string for timestamp with timezone in different timezone', + 'view', + 'materialized view', + 'subquery with view', + 'mySchema :: materialized view', + 'select count()', +]); +tests(); + +beforeEach(async () => { + await db.execute(sql`drop schema if exists public cascade`); + await db.execute(sql`create schema public`); + await db.execute( + sql` + create table users ( + id serial primary key, + name text not null, + verified boolean not null default false, + jsonb jsonb, + created_at timestamptz not null default now() + ) + `, + ); +}); diff --git a/integration-tests/tests/pg/postgres-js.test.ts b/integration-tests/tests/pg/postgres-js.test.ts new file mode 100644 index 000000000..7becec7eb --- /dev/null +++ b/integration-tests/tests/pg/postgres-js.test.ts @@ -0,0 +1,473 @@ +import retry from 'async-retry'; +import type { PostgresJsDatabase } from 'drizzle-orm/postgres-js'; +import { drizzle } from 'drizzle-orm/postgres-js'; +import postgres, { type Sql } from 'postgres'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; + +import { Name, sql } from 'drizzle-orm'; +import { pgTable, serial, timestamp } from 'drizzle-orm/pg-core'; +import { migrate } from 'drizzle-orm/postgres-js/migrator'; +import { skipTests } from '~/common'; +import { randomString } from '~/utils'; +import { createDockerDB, tests, usersMigratorTable, usersTable } from './pg-common'; + +const ENABLE_LOGGING = false; + +let db: PostgresJsDatabase; +let client: Sql; + +beforeAll(async () => { + const connectionString = process.env['PG_CONNECTION_STRING'] ?? await createDockerDB(); + client = await retry(async () => { + client = postgres(connectionString, { + max: 1, + onnotice: () => { + // disable notices + }, + }); + await client`select 1`; + return client; + }, { + retries: 20, + factor: 1, + minTimeout: 250, + maxTimeout: 250, + randomize: false, + onRetry() { + client?.end(); + }, + }); + db = drizzle(client, { logger: ENABLE_LOGGING }); +}); + +afterAll(async () => { + await client?.end(); +}); + +beforeEach((ctx) => { + ctx.pg = { + db, + }; +}); + +test('migrator : default migration strategy', async () => { + await db.execute(sql`drop table if exists all_columns`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + + await migrate(db, { migrationsFolder: './drizzle2/pg' }); + + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + + const result = await db.select().from(usersMigratorTable); + + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table all_columns`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); +}); + +test('migrator : migrate with custom schema', async () => { + const customSchema = randomString(); + await db.execute(sql`drop table if exists all_columns`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + + await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsSchema: customSchema }); + + // test if the custom migrations table was created + const { count } = await db.execute(sql`select * from ${sql.identifier(customSchema)}."__drizzle_migrations";`); + expect(count > 0).toBeTruthy(); + + // test if the migrated table are working as expected + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table all_columns`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table ${sql.identifier(customSchema)}."__drizzle_migrations"`); +}); + +test('migrator : migrate with custom table', async () => { + const customTable = randomString(); + await db.execute(sql`drop table if exists all_columns`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + + await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable }); + + // test if the custom migrations table was created + const { count } = await db.execute(sql`select * from "drizzle".${sql.identifier(customTable)};`); + expect(count > 0).toBeTruthy(); + + // test if the migrated table are working as expected + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table all_columns`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table "drizzle".${sql.identifier(customTable)}`); +}); + +test('migrator : migrate with custom table and custom schema', async () => { + const customTable = randomString(); + const customSchema = randomString(); + await db.execute(sql`drop table if exists all_columns`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + + await migrate(db, { + migrationsFolder: './drizzle2/pg', + migrationsTable: customTable, + migrationsSchema: customSchema, + }); + + // test if the custom migrations table was created + const { count } = await db.execute( + sql`select * from ${sql.identifier(customSchema)}.${sql.identifier(customTable)};`, + ); + expect(count > 0).toBeTruthy(); + + // test if the migrated table are working as expected + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table all_columns`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table ${sql.identifier(customSchema)}.${sql.identifier(customTable)}`); +}); + +test('all date and time columns without timezone first case mode string', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) not null + ) + `); + + // 1. Insert date in string format without timezone in it + await db.insert(table).values([ + { timestamp: '2022-01-01 02:00:00.123456' }, + ]); + + // 2, Select in string format and check that values are the same + const result = await db.select().from(table); + + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456' }]); + + // 3. Select as raw query and check that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + expect([...result2]).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('all date and time columns without timezone second case mode string', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) not null + ) + `); + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: '2022-01-01T02:00:00.123456-02' }, + ]); + + // 2, Select as raw query and check that values are the same + const result = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + expect([...result]).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('all date and time columns without timezone third case mode date', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'date', precision: 3 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(3) not null + ) + `); + + const insertedDate = new Date('2022-01-01 20:00:00.123+04'); + + // 1. Insert date as new date + await db.insert(table).values([ + { timestamp: insertedDate }, + ]); + + // 2, Select as raw query as string + const result = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + // 3. Compare both dates using orm mapping - Need to add 'Z' to tell JS that it is UTC + expect(new Date(result[0]!.timestamp_string + 'Z').getTime()).toBe(insertedDate.getTime()); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('test mode string for timestamp with timezone', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) with time zone not null + ) + `); + + const timestampString = '2022-01-01 00:00:00.123456-0200'; + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); + + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); + + // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); + + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same + expect([...result2]).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('test mode date for timestamp with timezone', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(3) with time zone not null + ) + `); + + const timestampString = new Date('2022-01-01 00:00:00.456-0200'); + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); + + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); + + // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same + expect(result).toEqual([{ id: 1, timestamp: timestampString }]); + + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same + expect([...result2]).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.456+00' }]); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('test mode string for timestamp with timezone in UTC timezone', async () => { + // get current timezone from db + const [timezone] = await db.execute<{ TimeZone: string }>(sql`show timezone`); + + // set timezone to UTC + await db.execute(sql`set time zone 'UTC'`); + + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) with time zone not null + ) + `); + + const timestampString = '2022-01-01 00:00:00.123456-0200'; + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); + + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); + + // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); + + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same + expect([...result2]).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); + + await db.execute(sql`set time zone '${sql.raw(timezone!.TimeZone)}'`); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('test mode string for timestamp with timezone in different timezone', async () => { + // get current timezone from db + const [timezone] = await db.execute<{ TimeZone: string }>(sql`show timezone`); + + // set timezone to HST (UTC - 10) + await db.execute(sql`set time zone 'HST'`); + + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) with time zone not null + ) + `); + + const timestampString = '2022-01-01 00:00:00.123456-1000'; + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); + + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); + + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 00:00:00.123456-10' }]); + + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + expect([...result2]).toEqual([{ id: 1, timestamp_string: '2022-01-01 00:00:00.123456-10' }]); + + await db.execute(sql`set time zone '${sql.raw(timezone!.TimeZone)}'`); + + await db.execute(sql`drop table if exists ${table}`); +}); + +skipTests([ + 'migrator : default migration strategy', + 'migrator : migrate with custom schema', + 'migrator : migrate with custom table', + 'migrator : migrate with custom table and custom schema', + 'insert via db.execute + select via db.execute', + 'insert via db.execute + returning', + 'insert via db.execute w/ query builder', + 'all date and time columns without timezone first case mode string', + 'all date and time columns without timezone third case mode date', + 'test mode string for timestamp with timezone', + 'test mode date for timestamp with timezone', + 'test mode string for timestamp with timezone in UTC timezone', + 'test mode string for timestamp with timezone in different timezone', +]); +tests(); + +beforeEach(async () => { + await db.execute(sql`drop schema if exists public cascade`); + await db.execute(sql`create schema public`); + await db.execute( + sql` + create table users ( + id serial primary key, + name text not null, + verified boolean not null default false, + jsonb jsonb, + created_at timestamptz not null default now() + ) + `, + ); +}); + +test('insert via db.execute + select via db.execute', async () => { + await db.execute(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); + + const result = await db.execute<{ id: number; name: string }>(sql`select id, name from "users"`); + expect(Array.prototype.slice.call(result)).toEqual([{ id: 1, name: 'John' }]); +}); + +test('insert via db.execute + returning', async () => { + const result = await db.execute<{ id: number; name: string }>( + sql`insert into ${usersTable} (${new Name( + usersTable.name.name, + )}) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, + ); + expect(Array.prototype.slice.call(result)).toEqual([{ id: 1, name: 'John' }]); +}); + +test('insert via db.execute w/ query builder', async () => { + const result = await db.execute>( + db.insert(usersTable).values({ name: 'John' }).returning({ id: usersTable.id, name: usersTable.name }), + ); + expect(Array.prototype.slice.call(result)).toEqual([{ id: 1, name: 'John' }]); +}); diff --git a/integration-tests/tests/pg/vercel-pg.test.ts b/integration-tests/tests/pg/vercel-pg.test.ts new file mode 100644 index 000000000..5f3062eff --- /dev/null +++ b/integration-tests/tests/pg/vercel-pg.test.ts @@ -0,0 +1,486 @@ +import { createClient, type VercelClient } from '@vercel/postgres'; +import { sql } from 'drizzle-orm'; +import { pgTable, serial, timestamp } from 'drizzle-orm/pg-core'; +import { drizzle, type VercelPgDatabase } from 'drizzle-orm/vercel-postgres'; +import { migrate } from 'drizzle-orm/vercel-postgres/migrator'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { skipTests } from '~/common'; +import { randomString } from '~/utils'; +import { createDockerDB, tests, usersMigratorTable, usersTable } from './pg-common'; + +const ENABLE_LOGGING = false; + +let db: VercelPgDatabase; +let client: VercelClient; + +beforeAll(async () => { + const connectionString = process.env['PG_CONNECTION_STRING'] ?? (await createDockerDB()); + + const sleep = 250; + let timeLeft = 5000; + let connected = false; + let lastError: unknown | undefined; + do { + try { + client = createClient({ connectionString }); + await client.connect(); + connected = true; + break; + } catch (e) { + lastError = e; + await new Promise((resolve) => setTimeout(resolve, sleep)); + timeLeft -= sleep; + } + } while (timeLeft > 0); + if (!connected) { + console.log(connectionString); + console.error('Cannot connect to Postgres'); + await client?.end().catch(console.error); + // await pgContainer?.stop().catch(console.error); + throw lastError; + } + db = drizzle(client, { logger: ENABLE_LOGGING }); +}); + +afterAll(async () => { + await client?.end(); +}); + +beforeEach((ctx) => { + ctx.pg = { + db, + }; +}); + +test('migrator : default migration strategy', async () => { + await db.execute(sql`drop table if exists all_columns`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + + await migrate(db, { migrationsFolder: './drizzle2/pg' }); + + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + + const result = await db.select().from(usersMigratorTable); + + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table all_columns`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); +}); + +test('migrator : migrate with custom schema', async () => { + const customSchema = randomString(); + await db.execute(sql`drop table if exists all_columns`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + + await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsSchema: customSchema }); + + // test if the custom migrations table was created + const { rowCount } = await db.execute(sql`select * from ${sql.identifier(customSchema)}."__drizzle_migrations";`); + expect(rowCount && rowCount > 0).toBeTruthy(); + + // test if the migrated table are working as expected + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table all_columns`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table ${sql.identifier(customSchema)}."__drizzle_migrations"`); +}); + +test('migrator : migrate with custom table', async () => { + const customTable = randomString(); + await db.execute(sql`drop table if exists all_columns`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + + await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable }); + + // test if the custom migrations table was created + const { rowCount } = await db.execute(sql`select * from "drizzle".${sql.identifier(customTable)};`); + expect(rowCount && rowCount > 0).toBeTruthy(); + + // test if the migrated table are working as expected + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table all_columns`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table "drizzle".${sql.identifier(customTable)}`); +}); + +test('migrator : migrate with custom table and custom schema', async () => { + const customTable = randomString(); + const customSchema = randomString(); + await db.execute(sql`drop table if exists all_columns`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + + await migrate(db, { + migrationsFolder: './drizzle2/pg', + migrationsTable: customTable, + migrationsSchema: customSchema, + }); + + // test if the custom migrations table was created + const { rowCount } = await db.execute( + sql`select * from ${sql.identifier(customSchema)}.${sql.identifier(customTable)};`, + ); + expect(rowCount && rowCount > 0).toBeTruthy(); + + // test if the migrated table are working as expected + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table all_columns`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table ${sql.identifier(customSchema)}.${sql.identifier(customTable)}`); +}); + +test('all date and time columns without timezone first case mode string', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) not null + ) + `); + + // 1. Insert date in string format without timezone in it + await db.insert(table).values([ + { timestamp: '2022-01-01 02:00:00.123456' }, + ]); + + // 2, Select in string format and check that values are the same + const result = await db.select().from(table); + + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456' }]); + + // 3. Select as raw query and check that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('all date and time columns without timezone second case mode string', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) not null + ) + `); + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: '2022-01-01T02:00:00.123456-02' }, + ]); + + // 2, Select as raw query and check that values are the same + const result = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + expect(result.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('all date and time columns without timezone third case mode date', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'date', precision: 3 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(3) not null + ) + `); + + const insertedDate = new Date('2022-01-01 20:00:00.123+04'); + + // 1. Insert date as new date + await db.insert(table).values([ + { timestamp: insertedDate }, + ]); + + // 2, Select as raw query as string + const result = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + // 3. Compare both dates using orm mapping - Need to add 'Z' to tell JS that it is UTC + expect(new Date(result.rows[0]!.timestamp_string + 'Z').getTime()).toBe(insertedDate.getTime()); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('test mode string for timestamp with timezone', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) with time zone not null + ) + `); + + const timestampString = '2022-01-01 00:00:00.123456-0200'; + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); + + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); + + // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); + + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('test mode date for timestamp with timezone', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(3) with time zone not null + ) + `); + + const timestampString = new Date('2022-01-01 00:00:00.456-0200'); + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); + + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); + + // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same + expect(result).toEqual([{ id: 1, timestamp: timestampString }]); + + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.456+00' }]); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('test mode string for timestamp with timezone in UTC timezone', async () => { + // get current timezone from db + const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); + + // set timezone to UTC + await db.execute(sql`set time zone 'UTC'`); + + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) with time zone not null + ) + `); + + const timestampString = '2022-01-01 00:00:00.123456-0200'; + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); + + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); + + // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); + + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); + + await db.execute(sql`set time zone '${sql.raw(timezone.rows[0]!.TimeZone)}'`); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('test mode string for timestamp with timezone in different timezone', async () => { + // get current timezone from db + const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); + + // set timezone to HST (UTC - 10) + await db.execute(sql`set time zone 'HST'`); + + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) with time zone not null + ) + `); + + const timestampString = '2022-01-01 00:00:00.123456-1000'; + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); + + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); + + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 00:00:00.123456-10' }]); + + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 00:00:00.123456-10' }]); + + await db.execute(sql`set time zone '${sql.raw(timezone.rows[0]!.TimeZone)}'`); + + await db.execute(sql`drop table if exists ${table}`); +}); + +skipTests([ + 'migrator : default migration strategy', + 'migrator : migrate with custom schema', + 'migrator : migrate with custom table', + 'migrator : migrate with custom table and custom schema', + 'insert via db.execute + select via db.execute', + 'insert via db.execute + returning', + 'insert via db.execute w/ query builder', + 'all date and time columns without timezone first case mode string', + 'all date and time columns without timezone third case mode date', + 'test mode string for timestamp with timezone', + 'test mode date for timestamp with timezone', + 'test mode string for timestamp with timezone in UTC timezone', + 'test mode string for timestamp with timezone in different timezone', + 'build query insert with onConflict do nothing + target', // + 'select from tables with same name from different schema using alias', // +]); +tests(); + +beforeEach(async () => { + await db.execute(sql`drop schema if exists public cascade`); + await db.execute(sql`create schema public`); + await db.execute( + sql` + create table users ( + id serial primary key, + name text not null, + verified boolean not null default false, + jsonb jsonb, + created_at timestamptz not null default now() + ) + `, + ); +}); + +test('insert via db.execute + select via db.execute', async () => { + await db.execute( + sql`insert into ${usersTable} (${sql.identifier(usersTable.name.name)}) values (${'John'})`, + ); + + const result = await db.execute<{ id: number; name: string }>( + sql`select id, name from "users"`, + ); + expect(result.rows).toEqual([{ id: 1, name: 'John' }]); +}); + +test('insert via db.execute + returning', async () => { + const inserted = await db.execute<{ id: number; name: string }>( + sql`insert into ${usersTable} (${ + sql.identifier( + usersTable.name.name, + ) + }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, + ); + expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); +}); + +test('insert via db.execute w/ query builder', async () => { + const inserted = await db.execute>( + db + .insert(usersTable) + .values({ name: 'John' }) + .returning({ id: usersTable.id, name: usersTable.name }), + ); + expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); +}); diff --git a/integration-tests/tests/pg/xata-http.test.ts b/integration-tests/tests/pg/xata-http.test.ts new file mode 100644 index 000000000..80c97e765 --- /dev/null +++ b/integration-tests/tests/pg/xata-http.test.ts @@ -0,0 +1,425 @@ +import retry from 'async-retry'; +import { sql } from 'drizzle-orm'; +import { pgTable, serial, timestamp } from 'drizzle-orm/pg-core'; +import { drizzle } from 'drizzle-orm/xata-http'; +import type { XataHttpClient, XataHttpDatabase } from 'drizzle-orm/xata-http'; +import { migrate } from 'drizzle-orm/xata-http/migrator'; +import { beforeAll, beforeEach, expect, test } from 'vitest'; +import { skipTests } from '~/common'; +import { randomString } from '~/utils'; +import { getXataClient } from '../xata/xata.ts'; +import { tests, usersMigratorTable, usersTable } from './pg-common'; + +const ENABLE_LOGGING = false; + +let db: XataHttpDatabase; +let client: XataHttpClient; + +beforeAll(async () => { + const apiKey = process.env['XATA_API_KEY']; + if (!apiKey) { + throw new Error('XATA_API_KEY is not defined'); + } + + client = await retry(async () => { + client = getXataClient(); + return client; + }, { + retries: 20, + factor: 1, + minTimeout: 250, + maxTimeout: 250, + randomize: false, + }); + db = drizzle(client, { logger: ENABLE_LOGGING }); +}); + +beforeEach((ctx) => { + ctx.pg = { + db, + }; +}); + +test('migrator : default migration strategy', async () => { + await db.execute(sql`drop table if exists all_columns`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + + await migrate(db, { migrationsFolder: './drizzle2/pg' }); + + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + + const result = await db.select().from(usersMigratorTable); + + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table all_columns`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); +}); + +test('migrator : migrate with custom table', async () => { + const customTable = randomString(); + await db.execute(sql`drop table if exists all_columns`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + + await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable }); + + // test if the custom migrations table was created + const { records } = await db.execute(sql`select * from "drizzle".${sql.identifier(customTable)};`); + expect(records && records.length > 0).toBeTruthy(); + + // test if the migrated table are working as expected + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table all_columns`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table "drizzle".${sql.identifier(customTable)}`); +}); + +test('all date and time columns without timezone first case mode string', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) not null + ) + `); + + // 1. Insert date in string format without timezone in it + await db.insert(table).values([ + { timestamp: '2022-01-01 02:00:00.123456' }, + ]); + + // 2, Select in string format and check that values are the same + const result = await db.select().from(table); + + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456' }]); + + // 3. Select as raw query and check that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + expect(result2.records).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('all date and time columns without timezone second case mode string', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) not null + ) + `); + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: '2022-01-01T02:00:00.123456-02' }, + ]); + + // 2, Select as raw query and check that values are the same + const result = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + expect(result.records).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('all date and time columns without timezone third case mode date', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'date', precision: 3 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(3) not null + ) + `); + + const insertedDate = new Date('2022-01-01 20:00:00.123+04'); + + // 1. Insert date as new date + await db.insert(table).values([ + { timestamp: insertedDate }, + ]); + + // 2, Select as raw query as string + const result = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + // 3. Compare both dates using orm mapping - Need to add 'Z' to tell JS that it is UTC + expect(new Date(result.records[0]!.timestamp_string + 'Z').getTime()).toBe(insertedDate.getTime()); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('test mode string for timestamp with timezone', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) with time zone not null + ) + `); + + const timestampString = '2022-01-01 00:00:00.123456-0200'; + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); + + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); + + // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); + + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same + expect(result2.records).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('test mode date for timestamp with timezone', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(3) with time zone not null + ) + `); + + const timestampString = new Date('2022-01-01 00:00:00.456-0200'); + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); + + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); + + // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same + expect(result).toEqual([{ id: 1, timestamp: timestampString }]); + + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same + expect(result2.records).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.456+00' }]); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('test mode string for timestamp with timezone in UTC timezone', async () => { + // get current timezone from db + const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); + + // set timezone to UTC + await db.execute(sql`set time zone 'UTC'`); + + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) with time zone not null + ) + `); + + const timestampString = '2022-01-01 00:00:00.123456-0200'; + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); + + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); + + // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); + + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same + expect(result2.records).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); + + await db.execute(sql`set time zone '${sql.raw(timezone.records[0]!.TimeZone)}'`); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('test mode string for timestamp with timezone in different timezone', async () => { + // get current timezone from db + const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); + + // set timezone to HST (UTC - 10) + await db.execute(sql`set time zone 'HST'`); + + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) with time zone not null + ) + `); + + const timestampString = '2022-01-01 00:00:00.123456-1000'; + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); + + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); + + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 00:00:00.123456-10' }]); + + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + expect(result2.records).toEqual([{ id: 1, timestamp_string: '2022-01-01 00:00:00.123456-10' }]); + + await db.execute(sql`set time zone '${sql.raw(timezone.records[0]!.TimeZone)}'`); + + await db.execute(sql`drop table if exists ${table}`); +}); + +skipTests([ + 'migrator : default migration strategy', + 'migrator : migrate with custom schema', + 'migrator : migrate with custom table', + 'migrator : migrate with custom table and custom schema', + 'insert via db.execute + select via db.execute', + 'insert via db.execute + returning', + 'insert via db.execute w/ query builder', + 'all date and time columns without timezone first case mode string', + 'all date and time columns without timezone third case mode date', + 'test mode string for timestamp with timezone', + 'test mode date for timestamp with timezone', + 'test mode string for timestamp with timezone in UTC timezone', + 'test mode string for timestamp with timezone in different timezone', + 'view', + 'materialized view', + 'select from enum', + 'subquery with view', +]); +tests(); + +beforeEach(async () => { + await db.execute(sql`drop schema if exists public cascade`); + await db.execute(sql`create schema public`); + await db.execute( + sql` + create table users ( + id serial primary key, + name text not null, + verified boolean not null default false, + jsonb jsonb, + created_at timestamptz not null default now() + ) + `, + ); +}); + +test('insert via db.execute + select via db.execute', async () => { + await db.execute( + sql`insert into ${usersTable} (${sql.identifier(usersTable.name.name)}) values (${'John'})`, + ); + + const result = await db.execute<{ id: number; name: string }>( + sql`select id, name from "users"`, + ); + expect(result.records).toEqual([{ id: 1, name: 'John' }]); +}); + +test('insert via db.execute + returning', async () => { + const inserted = await db.execute<{ id: number; name: string }>( + sql`insert into ${usersTable} (${ + sql.identifier( + usersTable.name.name, + ) + }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, + ); + expect(inserted.records).toEqual([{ id: 1, name: 'John' }]); +}); + +test('insert via db.execute w/ query builder', async () => { + const inserted = await db.execute>( + db + .insert(usersTable) + .values({ name: 'John' }) + .returning({ id: usersTable.id, name: usersTable.name }), + ); + expect(inserted.records).toEqual([{ id: 1, name: 'John' }]); +}); diff --git a/integration-tests/tests/pglite.test.ts b/integration-tests/tests/pglite.test.ts deleted file mode 100644 index 92ab48f29..000000000 --- a/integration-tests/tests/pglite.test.ts +++ /dev/null @@ -1,4072 +0,0 @@ -import 'dotenv/config'; - -import { PGlite } from '@electric-sql/pglite'; -import type { TestFn } from 'ava'; -import anyTest from 'ava'; -import { - and, - arrayContained, - arrayContains, - arrayOverlaps, - asc, - avg, - avgDistinct, - count, - countDistinct, - eq, - exists, - getTableColumns, - gt, - gte, - inArray, - lt, - max, - min, - name, - placeholder, - type SQL, - sql, - type SQLWrapper, - sum, - sumDistinct, - TransactionRollbackError, -} from 'drizzle-orm'; -import { - alias, - boolean, - char, - cidr, - date, - except, - exceptAll, - foreignKey, - getMaterializedViewConfig, - getTableConfig, - getViewConfig, - inet, - integer, - intersect, - intersectAll, - interval, - jsonb, - macaddr, - macaddr8, - numeric, - type PgColumn, - pgEnum, - pgMaterializedView, - pgTable, - pgTableCreator, - pgView, - primaryKey, - serial, - text, - time, - timestamp, - union, - unionAll, - unique, - uniqueKeyName, - uuid as pgUuid, - varchar, -} from 'drizzle-orm/pg-core'; -import { drizzle, type PgliteDatabase } from 'drizzle-orm/pglite'; -import { migrate } from 'drizzle-orm/pglite/migrator'; -import { v4 as uuid } from 'uuid'; -import { type Equal, Expect, randomString } from './utils.ts'; - -const ENABLE_LOGGING = false; - -const usersTable = pgTable('users', { - id: serial('id' as string).primaryKey(), - name: text('name').notNull(), - verified: boolean('verified').notNull().default(false), - jsonb: jsonb('jsonb').$type(), - createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), -}); - -const usersOnUpdate = pgTable('users_on_update', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - updateCounter: integer('update_counter').default(sql`1`).$onUpdateFn(() => sql`update_counter + 1`), - updatedAt: timestamp('updated_at', { mode: 'date', precision: 3 }).$onUpdate(() => new Date()), - alwaysNull: text('always_null').$type().$onUpdate(() => null), - // uppercaseName: text('uppercase_name').$onUpdateFn(() => sql`upper(name)`), looks like this is not supported in pg -}); - -const citiesTable = pgTable('cities', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - state: char('state', { length: 2 }), -}); - -const cities2Table = pgTable('cities', { - id: serial('id').primaryKey(), - name: text('name').notNull(), -}); - -const users2Table = pgTable('users2', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').references(() => citiesTable.id), -}); - -const coursesTable = pgTable('courses', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - categoryId: integer('category_id').references(() => courseCategoriesTable.id), -}); - -const courseCategoriesTable = pgTable('course_categories', { - id: serial('id').primaryKey(), - name: text('name').notNull(), -}); - -const orders = pgTable('orders', { - id: serial('id').primaryKey(), - region: text('region').notNull(), - product: text('product').notNull().$default(() => 'random_string'), - amount: integer('amount').notNull(), - quantity: integer('quantity').notNull(), -}); - -const network = pgTable('network_table', { - inet: inet('inet').notNull(), - cidr: cidr('cidr').notNull(), - macaddr: macaddr('macaddr').notNull(), - macaddr8: macaddr8('macaddr8').notNull(), -}); - -const salEmp = pgTable('sal_emp', { - name: text('name'), - payByQuarter: integer('pay_by_quarter').array(), - schedule: text('schedule').array().array(), -}); - -const _tictactoe = pgTable('tictactoe', { - squares: integer('squares').array(3).array(3), -}); - -const usersMigratorTable = pgTable('users12', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - email: text('email').notNull(), -}); - -// To test aggregate functions -const aggregateTable = pgTable('aggregate_table', { - id: serial('id').notNull(), - name: text('name').notNull(), - a: integer('a'), - b: integer('b'), - c: integer('c'), - nullOnly: integer('null_only'), -}); - -interface Context { - db: PgliteDatabase; - client: PGlite; -} - -const test = anyTest as TestFn; - -test.before(async (t) => { - const ctx = t.context; - - ctx.client = new PGlite(); - ctx.db = drizzle(ctx.client, { logger: ENABLE_LOGGING }); -}); - -test.beforeEach(async (t) => { - const ctx = t.context; - await ctx.db.execute(sql`drop schema public cascade`); - await ctx.db.execute(sql`create schema public`); - await ctx.db.execute( - sql` - create table users ( - id serial primary key, - name text not null, - verified boolean not null default false, - jsonb jsonb, - created_at timestamptz not null default now() - ) - `, - ); - await ctx.db.execute( - sql` - create table cities ( - id serial primary key, - name text not null, - state char(2) - ) - `, - ); - await ctx.db.execute( - sql` - create table users2 ( - id serial primary key, - name text not null, - city_id integer references cities(id) - ) - `, - ); - await ctx.db.execute( - sql` - create table course_categories ( - id serial primary key, - name text not null - ) - `, - ); - await ctx.db.execute( - sql` - create table courses ( - id serial primary key, - name text not null, - category_id integer references course_categories(id) - ) - `, - ); - await ctx.db.execute( - sql` - create table orders ( - id serial primary key, - region text not null, - product text not null, - amount integer not null, - quantity integer not null - ) - `, - ); - await ctx.db.execute( - sql` - create table network_table ( - inet inet not null, - cidr cidr not null, - macaddr macaddr not null, - macaddr8 macaddr8 not null - ) - `, - ); - await ctx.db.execute( - sql` - create table sal_emp ( - name text not null, - pay_by_quarter integer[] not null, - schedule text[][] not null - ) - `, - ); - await ctx.db.execute( - sql` - create table tictactoe ( - squares integer[3][3] not null - ) - `, - ); -}); - -async function setupSetOperationTest(db: PgliteDatabase) { - await db.execute(sql`drop table if exists users2`); - await db.execute(sql`drop table if exists cities`); - await db.execute( - sql` - create table cities ( - id serial primary key, - name text not null - ) - `, - ); - await db.execute( - sql` - create table users2 ( - id serial primary key, - name text not null, - city_id integer references cities(id) - ) - `, - ); - - await db.insert(cities2Table).values([ - { id: 1, name: 'New York' }, - { id: 2, name: 'London' }, - { id: 3, name: 'Tampa' }, - ]); - - await db.insert(users2Table).values([ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 2 }, - { id: 3, name: 'Jack', cityId: 3 }, - { id: 4, name: 'Peter', cityId: 3 }, - { id: 5, name: 'Ben', cityId: 2 }, - { id: 6, name: 'Jill', cityId: 1 }, - { id: 7, name: 'Mary', cityId: 2 }, - { id: 8, name: 'Sally', cityId: 1 }, - ]); -} - -async function setupAggregateFunctionsTest(db: PgliteDatabase) { - await db.execute(sql`drop table if exists "aggregate_table"`); - await db.execute( - sql` - create table "aggregate_table" ( - "id" serial not null, - "name" text not null, - "a" integer, - "b" integer, - "c" integer, - "null_only" integer - ); - `, - ); - await db.insert(aggregateTable).values([ - { name: 'value 1', a: 5, b: 10, c: 20 }, - { name: 'value 1', a: 5, b: 20, c: 30 }, - { name: 'value 2', a: 10, b: 50, c: 60 }, - { name: 'value 3', a: 20, b: 20, c: null }, - { name: 'value 4', a: null, b: 90, c: 120 }, - { name: 'value 5', a: 80, b: 10, c: null }, - { name: 'value 6', a: null, b: null, c: 150 }, - ]); -} - -test.serial('table configs: unique third param', async (t) => { - const cities1Table = pgTable('cities1', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - state: char('state', { length: 2 }), - }, (t) => ({ - f: unique('custom_name').on(t.name, t.state).nullsNotDistinct(), - f1: unique('custom_name1').on(t.name, t.state), - })); - - const tableConfig = getTableConfig(cities1Table); - - t.assert(tableConfig.uniqueConstraints.length === 2); - - t.assert(tableConfig.uniqueConstraints[0]?.name === 'custom_name'); - t.assert(tableConfig.uniqueConstraints[0]?.nullsNotDistinct); - t.deepEqual(tableConfig.uniqueConstraints[0]?.columns.map((t) => t.name), ['name', 'state']); - - t.assert(tableConfig.uniqueConstraints[1]?.name, 'custom_name1'); - t.assert(!tableConfig.uniqueConstraints[1]?.nullsNotDistinct); - t.deepEqual(tableConfig.uniqueConstraints[0]?.columns.map((t) => t.name), ['name', 'state']); -}); - -test.serial('table configs: unique in column', async (t) => { - const cities1Table = pgTable('cities1', { - id: serial('id').primaryKey(), - name: text('name').notNull().unique(), - state: char('state', { length: 2 }).unique('custom'), - field: char('field', { length: 2 }).unique('custom_field', { nulls: 'not distinct' }), - }); - - const tableConfig = getTableConfig(cities1Table); - - const columnName = tableConfig.columns.find((it) => it.name === 'name'); - t.assert(columnName?.uniqueName === uniqueKeyName(cities1Table, [columnName!.name])); - t.assert(columnName?.isUnique); - - const columnState = tableConfig.columns.find((it) => it.name === 'state'); - t.assert(columnState?.uniqueName === 'custom'); - t.assert(columnState?.isUnique); - - const columnField = tableConfig.columns.find((it) => it.name === 'field'); - t.assert(columnField?.uniqueName === 'custom_field'); - t.assert(columnField?.isUnique); - t.assert(columnField?.uniqueType === 'not distinct'); -}); - -test.serial('table config: foreign keys name', async (t) => { - const table = pgTable('cities', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - state: text('state'), - }, (t) => ({ - f: foreignKey({ foreignColumns: [t.id], columns: [t.id], name: 'custom_fk' }), - })); - - const tableConfig = getTableConfig(table); - - t.is(tableConfig.foreignKeys.length, 1); - t.is(tableConfig.foreignKeys[0]!.getName(), 'custom_fk'); -}); - -test.serial('table config: primary keys name', async (t) => { - const table = pgTable('cities', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - state: text('state'), - }, (t) => ({ - f: primaryKey({ columns: [t.id, t.name], name: 'custom_pk' }), - })); - - const tableConfig = getTableConfig(table); - - t.is(tableConfig.primaryKeys.length, 1); - t.is(tableConfig.primaryKeys[0]!.getName(), 'custom_pk'); -}); - -test.serial('select all fields', async (t) => { - const { db } = t.context; - - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }); - const result = await db.select().from(usersTable); - - t.assert(result[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 100); - t.deepEqual(result, [ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }, - ]); -}); - -test.serial('select sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .select({ - name: sql`upper(${usersTable.name})`, - }) - .from(usersTable); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('select typed sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - - const users = await db.select({ - name: sql`upper(${usersTable.name})`, - }).from(usersTable); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('$default function', async (t) => { - const { db } = t.context; - - const insertedOrder = await db.insert(orders).values({ id: 1, region: 'Ukraine', amount: 1, quantity: 1 }) - .returning(); - const selectedOrder = await db.select().from(orders); - - t.deepEqual(insertedOrder, [{ - id: 1, - amount: 1, - quantity: 1, - region: 'Ukraine', - product: 'random_string', - }]); - - t.deepEqual(selectedOrder, [{ - id: 1, - amount: 1, - quantity: 1, - region: 'Ukraine', - product: 'random_string', - }]); -}); - -test.serial('select distinct', async (t) => { - const { db } = t.context; - - const usersDistinctTable = pgTable('users_distinct', { - id: integer('id').notNull(), - name: text('name').notNull(), - age: integer('age').notNull(), - }); - - await db.execute(sql`drop table if exists ${usersDistinctTable}`); - await db.execute(sql`create table ${usersDistinctTable} (id integer, name text, age integer)`); - - await db.insert(usersDistinctTable).values([ - { id: 1, name: 'John', age: 24 }, - { id: 1, name: 'John', age: 24 }, - { id: 2, name: 'John', age: 25 }, - { id: 1, name: 'Jane', age: 24 }, - { id: 1, name: 'Jane', age: 26 }, - ]); - const users1 = await db.selectDistinct().from(usersDistinctTable).orderBy( - usersDistinctTable.id, - usersDistinctTable.name, - ); - const users2 = await db.selectDistinctOn([usersDistinctTable.id]).from(usersDistinctTable).orderBy( - usersDistinctTable.id, - ); - const users3 = await db.selectDistinctOn([usersDistinctTable.name], { name: usersDistinctTable.name }).from( - usersDistinctTable, - ).orderBy(usersDistinctTable.name); - const users4 = await db.selectDistinctOn([usersDistinctTable.id, usersDistinctTable.age]).from( - usersDistinctTable, - ).orderBy(usersDistinctTable.id, usersDistinctTable.age); - - await db.execute(sql`drop table ${usersDistinctTable}`); - - t.deepEqual(users1, [ - { id: 1, name: 'Jane', age: 24 }, - { id: 1, name: 'Jane', age: 26 }, - { id: 1, name: 'John', age: 24 }, - { id: 2, name: 'John', age: 25 }, - ]); - - t.deepEqual(users2.length, 2); - t.deepEqual(users2[0]?.id, 1); - t.deepEqual(users2[1]?.id, 2); - - t.deepEqual(users3.length, 2); - t.deepEqual(users3[0]?.name, 'Jane'); - t.deepEqual(users3[1]?.name, 'John'); - - t.deepEqual(users4, [ - { id: 1, name: 'John', age: 24 }, - { id: 1, name: 'Jane', age: 26 }, - { id: 2, name: 'John', age: 25 }, - ]); -}); - -test.serial('insert returning sql', async (t) => { - const { db } = t.context; - - const users = await db - .insert(usersTable) - .values({ name: 'John' }) - .returning({ - name: sql`upper(${usersTable.name})`, - }); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('delete returning sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .delete(usersTable) - .where(eq(usersTable.name, 'John')) - .returning({ - name: sql`upper(${usersTable.name})`, - }); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('update returning sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .update(usersTable) - .set({ name: 'Jane' }) - .where(eq(usersTable.name, 'John')) - .returning({ - name: sql`upper(${usersTable.name})`, - }); - - t.deepEqual(users, [{ name: 'JANE' }]); -}); - -test.serial('update with returning all fields', async (t) => { - const { db } = t.context; - - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .update(usersTable) - .set({ name: 'Jane' }) - .where(eq(usersTable.name, 'John')) - .returning(); - - t.assert(users[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 100); - t.deepEqual(users, [ - { id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }, - ]); -}); - -test.serial('update with returning partial', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .update(usersTable) - .set({ name: 'Jane' }) - .where(eq(usersTable.name, 'John')) - .returning({ - id: usersTable.id, - name: usersTable.name, - }); - - t.deepEqual(users, [{ id: 1, name: 'Jane' }]); -}); - -test.serial('delete with returning all fields', async (t) => { - const { db } = t.context; - - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning(); - - t.assert(users[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 100); - t.deepEqual(users, [ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: users[0]!.createdAt }, - ]); -}); - -test.serial('delete with returning partial', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ - id: usersTable.id, - name: usersTable.name, - }); - - t.deepEqual(users, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert + select', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const result = await db.select().from(usersTable); - t.deepEqual(result, [ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }, - ]); - - await db.insert(usersTable).values({ name: 'Jane' }); - const result2 = await db.select().from(usersTable); - t.deepEqual(result2, [ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, - { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, - ]); -}); - -test.serial('json insert', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); - const result = await db - .select({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - }) - .from(usersTable); - - t.deepEqual(result, [{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); -}); - -test.serial('char insert', async (t) => { - const { db } = t.context; - - await db.insert(citiesTable).values({ name: 'Austin', state: 'TX' }); - const result = await db - .select({ id: citiesTable.id, name: citiesTable.name, state: citiesTable.state }) - .from(citiesTable); - - t.deepEqual(result, [{ id: 1, name: 'Austin', state: 'TX' }]); -}); - -test.serial('char update', async (t) => { - const { db } = t.context; - - await db.insert(citiesTable).values({ name: 'Austin', state: 'TX' }); - await db.update(citiesTable).set({ name: 'Atlanta', state: 'GA' }).where(eq(citiesTable.id, 1)); - const result = await db - .select({ id: citiesTable.id, name: citiesTable.name, state: citiesTable.state }) - .from(citiesTable); - - t.deepEqual(result, [{ id: 1, name: 'Atlanta', state: 'GA' }]); -}); - -test.serial('char delete', async (t) => { - const { db } = t.context; - - await db.insert(citiesTable).values({ name: 'Austin', state: 'TX' }); - await db.delete(citiesTable).where(eq(citiesTable.state, 'TX')); - const result = await db - .select({ id: citiesTable.id, name: citiesTable.name, state: citiesTable.state }) - .from(citiesTable); - - t.deepEqual(result, []); -}); - -test.serial('insert with overridden default values', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John', verified: true }); - const result = await db.select().from(usersTable); - - t.deepEqual(result, [ - { id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }, - ]); -}); - -test.serial('insert many', async (t) => { - const { db } = t.context; - - await db - .insert(usersTable) - .values([ - { name: 'John' }, - { name: 'Bruce', jsonb: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]); - const result = await db - .select({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - verified: usersTable.verified, - }) - .from(usersTable); - - t.deepEqual(result, [ - { id: 1, name: 'John', jsonb: null, verified: false }, - { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, - { id: 3, name: 'Jane', jsonb: null, verified: false }, - { id: 4, name: 'Austin', jsonb: null, verified: true }, - ]); -}); - -test.serial('insert many with returning', async (t) => { - const { db } = t.context; - - const result = await db - .insert(usersTable) - .values([ - { name: 'John' }, - { name: 'Bruce', jsonb: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]) - .returning({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - verified: usersTable.verified, - }); - - t.deepEqual(result, [ - { id: 1, name: 'John', jsonb: null, verified: false }, - { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, - { id: 3, name: 'Jane', jsonb: null, verified: false }, - { id: 4, name: 'Austin', jsonb: null, verified: true }, - ]); -}); - -test.serial('select with group by as field', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(usersTable.name); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with exists', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const user = alias(usersTable, 'user'); - const result = await db.select({ name: usersTable.name }).from(usersTable).where( - exists(db.select({ one: sql`1` }).from(user).where(and(eq(usersTable.name, 'John'), eq(user.id, usersTable.id)))), - ); - - t.deepEqual(result, [{ name: 'John' }]); -}); - -test.serial('select with group by as sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(sql`${usersTable.name}`); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with group by as sql + column', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(sql`${usersTable.name}`, usersTable.id); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with group by as column + sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with group by complex query', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`) - .orderBy(asc(usersTable.name)) - .limit(1); - - t.deepEqual(result, [{ name: 'Jane' }]); -}); - -test.serial('build query', async (t) => { - const { db } = t.context; - - const query = db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .groupBy(usersTable.id, usersTable.name) - .toSQL(); - - t.deepEqual(query, { - sql: 'select "id", "name" from "users" group by "users"."id", "users"."name"', - params: [], - }); -}); - -test.serial('insert sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: sql`${'John'}` }); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('partial join with alias', async (t) => { - const { db } = t.context; - const customerAlias = alias(usersTable, 'customer'); - - await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select({ - user: { - id: usersTable.id, - name: usersTable.name, - }, - customer: { - id: customerAlias.id, - name: customerAlias.name, - }, - }) - .from(usersTable) - .leftJoin(customerAlias, eq(customerAlias.id, 11)) - .where(eq(usersTable.id, 10)); - - t.deepEqual(result, [ - { - user: { id: 10, name: 'Ivan' }, - customer: { id: 11, name: 'Hans' }, - }, - ]); -}); - -test.serial('full join with alias', async (t) => { - const { db } = t.context; - - const pgTable = pgTableCreator((name) => `prefixed_${name}`); - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); - - const customers = alias(users, 'customer'); - - await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select() - .from(users) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(users.id, 10)); - - t.deepEqual(result, [{ - users: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('select from alias', async (t) => { - const { db } = t.context; - - const pgTable = pgTableCreator((name) => `prefixed_${name}`); - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); - - const user = alias(users, 'user'); - const customers = alias(users, 'customer'); - - await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select() - .from(user) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(user.id, 10)); - - t.deepEqual(result, [{ - user: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('insert with spaces', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: sql`'Jo h n'` }); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); - - t.deepEqual(result, [{ id: 1, name: 'Jo h n' }]); -}); - -test.serial('prepared statement', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const statement = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .prepare('statement1'); - const result = await statement.execute(); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('prepared statement reuse', async (t) => { - const { db } = t.context; - - const stmt = db - .insert(usersTable) - .values({ - verified: true, - name: placeholder('name'), - }) - .prepare('stmt2'); - - for (let i = 0; i < 10; i++) { - await stmt.execute({ name: `John ${i}` }); - } - - const result = await db - .select({ - id: usersTable.id, - name: usersTable.name, - verified: usersTable.verified, - }) - .from(usersTable); - - t.deepEqual(result, [ - { id: 1, name: 'John 0', verified: true }, - { id: 2, name: 'John 1', verified: true }, - { id: 3, name: 'John 2', verified: true }, - { id: 4, name: 'John 3', verified: true }, - { id: 5, name: 'John 4', verified: true }, - { id: 6, name: 'John 5', verified: true }, - { id: 7, name: 'John 6', verified: true }, - { id: 8, name: 'John 7', verified: true }, - { id: 9, name: 'John 8', verified: true }, - { id: 10, name: 'John 9', verified: true }, - ]); -}); - -test.serial('prepared statement with placeholder in .where', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const stmt = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .where(eq(usersTable.id, placeholder('id'))) - .prepare('stmt3'); - const result = await stmt.execute({ id: 1 }); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('prepared statement with placeholder in .limit', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const stmt = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .where(eq(usersTable.id, placeholder('id'))) - .limit(placeholder('limit')) - .prepare('stmt_limit'); - - const result = await stmt.execute({ id: 1, limit: 1 }); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); - t.is(result.length, 1); -}); - -test.serial('prepared statement with placeholder in .offset', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'John1' }]); - const stmt = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .offset(placeholder('offset')) - .prepare('stmt_offset'); - - const result = await stmt.execute({ offset: 1 }); - - t.deepEqual(result, [{ id: 2, name: 'John1' }]); -}); - -// TODO change tests to new structure -test.serial('migrator : default migration strategy', async (t) => { - const { db } = t.context; - - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - - await migrate(db, { migrationsFolder: './drizzle2/pg' }); - - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - - const result = await db.select().from(usersMigratorTable); - - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); - - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); -}); - -test.serial('migrator : migrate with custom schema', async (t) => { - const { db } = t.context; - const customSchema = randomString(); - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - - await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsSchema: customSchema }); - - // test if the custom migrations table was created - const { rows } = await db.execute(sql`select * from ${sql.identifier(customSchema)}."__drizzle_migrations";`); - t.true(rows.length! > 0); - - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); - - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table ${sql.identifier(customSchema)}."__drizzle_migrations"`); -}); - -test.serial('migrator : migrate with custom table', async (t) => { - const { db } = t.context; - const customTable = randomString(); - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - - await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable }); - - // test if the custom migrations table was created - const { rows } = await db.execute(sql`select * from "drizzle".${sql.identifier(customTable)};`); - t.true(rows.length! > 0); - - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); - - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table "drizzle".${sql.identifier(customTable)}`); -}); - -test.serial('migrator : migrate with custom table and custom schema', async (t) => { - const { db } = t.context; - const customTable = randomString(); - const customSchema = randomString(); - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - - await migrate(db, { - migrationsFolder: './drizzle2/pg', - migrationsTable: customTable, - migrationsSchema: customSchema, - }); - - // test if the custom migrations table was created - const { rows } = await db.execute( - sql`select * from ${sql.identifier(customSchema)}.${sql.identifier(customTable)};`, - ); - t.true(rows.length! > 0); - - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); - - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table ${sql.identifier(customSchema)}.${sql.identifier(customTable)}`); -}); - -test.serial('insert via db.execute + select via db.execute', async (t) => { - const { db } = t.context; - - await db.execute( - sql`insert into ${usersTable} (${name(usersTable.name.name)}) values (${'John'})`, - ); - - const result = await db.execute<{ id: number; name: string }>( - sql`select id, name from "users"`, - ); - t.deepEqual(result.rows, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert via db.execute + returning', async (t) => { - const { db } = t.context; - - const inserted = await db.execute<{ id: number; name: string }>( - sql`insert into ${usersTable} (${ - name( - usersTable.name.name, - ) - }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, - ); - t.deepEqual(inserted.rows, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert via db.execute w/ query builder', async (t) => { - const { db } = t.context; - - const inserted = await db.execute>( - db - .insert(usersTable) - .values({ name: 'John' }) - .returning({ id: usersTable.id, name: usersTable.name }), - ); - t.deepEqual(inserted.rows, [{ id: 1, name: 'John' }]); -}); - -test.serial('Query check: Insert all defaults in 1 row', async (t) => { - const { db } = t.context; - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state'), - }); - - const query = db - .insert(users) - .values({}) - .toSQL(); - - t.deepEqual(query, { - sql: 'insert into "users" ("id", "name", "state") values (default, default, default)', - params: [], - }); -}); - -test.serial('Query check: Insert all defaults in multiple rows', async (t) => { - const { db } = t.context; - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state').default('UA'), - }); - - const query = db - .insert(users) - .values([{}, {}]) - .toSQL(); - - t.deepEqual(query, { - sql: 'insert into "users" ("id", "name", "state") values (default, default, default), (default, default, default)', - params: [], - }); -}); - -test.serial('Insert all defaults in 1 row', async (t) => { - const { db } = t.context; - - const users = pgTable('empty_insert_single', { - id: serial('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial primary key, name text default 'Dan', state text)`, - ); - - await db.insert(users).values({}); - - const res = await db.select().from(users); - - t.deepEqual(res, [{ id: 1, name: 'Dan', state: null }]); -}); - -test.serial('Insert all defaults in multiple rows', async (t) => { - const { db } = t.context; - - const users = pgTable('empty_insert_multiple', { - id: serial('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial primary key, name text default 'Dan', state text)`, - ); - - await db.insert(users).values([{}, {}]); - - const res = await db.select().from(users); - - t.deepEqual(res, [{ id: 1, name: 'Dan', state: null }, { id: 2, name: 'Dan', state: null }]); -}); - -test.serial('build query insert with onConflict do update', async (t) => { - const { db } = t.context; - - const query = db - .insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }) - .toSQL(); - - t.deepEqual(query, { - sql: - 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do update set "name" = $3', - params: ['John', '["foo","bar"]', 'John1'], - }); -}); - -test.serial('build query insert with onConflict do update / multiple columns', async (t) => { - const { db } = t.context; - - const query = db - .insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoUpdate({ target: [usersTable.id, usersTable.name], set: { name: 'John1' } }) - .toSQL(); - - t.deepEqual(query, { - sql: - 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id","name") do update set "name" = $3', - params: ['John', '["foo","bar"]', 'John1'], - }); -}); - -test.serial('build query insert with onConflict do nothing', async (t) => { - const { db } = t.context; - - const query = db - .insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoNothing() - .toSQL(); - - t.deepEqual(query, { - sql: - 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict do nothing', - params: ['John', '["foo","bar"]'], - }); -}); - -test.serial('build query insert with onConflict do nothing + target', async (t) => { - const { db } = t.context; - - const query = db - .insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoNothing({ target: usersTable.id }) - .toSQL(); - - t.deepEqual(query, { - sql: - 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do nothing', - params: ['John', '["foo","bar"]'], - }); -}); - -test.serial('insert with onConflict do update', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - - await db - .insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }); - - const res = await db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)); - - t.deepEqual(res, [{ id: 1, name: 'John1' }]); -}); - -test.serial('insert with onConflict do nothing', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - - await db.insert(usersTable).values({ id: 1, name: 'John' }).onConflictDoNothing(); - - const res = await db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert with onConflict do nothing + target', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - - await db - .insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoNothing({ target: usersTable.id }); - - const res = await db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); -}); - -test.serial('left join (flat object fields)', async (t) => { - const { db } = t.context; - - const { id: cityId } = await db - .insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]) - .returning({ id: citiesTable.id }) - .then((rows) => rows[0]!); - - await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); - - const res = await db - .select({ - userId: users2Table.id, - userName: users2Table.name, - cityId: citiesTable.id, - cityName: citiesTable.name, - }) - .from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); - - t.deepEqual(res, [ - { userId: 1, userName: 'John', cityId, cityName: 'Paris' }, - { userId: 2, userName: 'Jane', cityId: null, cityName: null }, - ]); -}); - -test.serial('left join (grouped fields)', async (t) => { - const { db } = t.context; - - const { id: cityId } = await db - .insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]) - .returning({ id: citiesTable.id }) - .then((rows) => rows[0]!); - - await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); - - const res = await db - .select({ - id: users2Table.id, - user: { - name: users2Table.name, - nameUpper: sql`upper(${users2Table.name})`, - }, - city: { - id: citiesTable.id, - name: citiesTable.name, - nameUpper: sql`upper(${citiesTable.name})`, - }, - }) - .from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); - - t.deepEqual(res, [ - { - id: 1, - user: { name: 'John', nameUpper: 'JOHN' }, - city: { id: cityId, name: 'Paris', nameUpper: 'PARIS' }, - }, - { - id: 2, - user: { name: 'Jane', nameUpper: 'JANE' }, - city: null, - }, - ]); -}); - -test.serial('left join (all fields)', async (t) => { - const { db } = t.context; - - const { id: cityId } = await db - .insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]) - .returning({ id: citiesTable.id }) - .then((rows) => rows[0]!); - - await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); - - const res = await db - .select() - .from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); - - t.deepEqual(res, [ - { - users2: { - id: 1, - name: 'John', - cityId, - }, - cities: { - id: cityId, - name: 'Paris', - state: null, - }, - }, - { - users2: { - id: 2, - name: 'Jane', - cityId: null, - }, - cities: null, - }, - ]); -}); - -test.serial('join subquery', async (t) => { - const { db } = t.context; - - await db - .insert(courseCategoriesTable) - .values([ - { name: 'Category 1' }, - { name: 'Category 2' }, - { name: 'Category 3' }, - { name: 'Category 4' }, - ]); - - await db - .insert(coursesTable) - .values([ - { name: 'Development', categoryId: 2 }, - { name: 'IT & Software', categoryId: 3 }, - { name: 'Marketing', categoryId: 4 }, - { name: 'Design', categoryId: 1 }, - ]); - - const sq2 = db - .select({ - categoryId: courseCategoriesTable.id, - category: courseCategoriesTable.name, - total: sql`count(${courseCategoriesTable.id})`, - }) - .from(courseCategoriesTable) - .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) - .as('sq2'); - - const res = await db - .select({ - courseName: coursesTable.name, - categoryId: sq2.categoryId, - }) - .from(coursesTable) - .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) - .orderBy(coursesTable.name); - - t.deepEqual(res, [ - { courseName: 'Design', categoryId: 1 }, - { courseName: 'Development', categoryId: 2 }, - { courseName: 'IT & Software', categoryId: 3 }, - { courseName: 'Marketing', categoryId: 4 }, - ]); -}); - -test.serial('with ... select', async (t) => { - const { db } = t.context; - - await db.insert(orders).values([ - { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, - { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 50, quantity: 5 }, - ]); - - const regionalSales = db - .$with('regional_sales') - .as( - db - .select({ - region: orders.region, - totalSales: sql`sum(${orders.amount})`.as('total_sales'), - }) - .from(orders) - .groupBy(orders.region), - ); - - const topRegions = db - .$with('top_regions') - .as( - db - .select({ - region: regionalSales.region, - }) - .from(regionalSales) - .where( - gt( - regionalSales.totalSales, - db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), - ), - ), - ); - - const result1 = await db - .with(regionalSales, topRegions) - .select({ - region: orders.region, - product: orders.product, - productUnits: sql`sum(${orders.quantity})::int`, - productSales: sql`sum(${orders.amount})::int`, - }) - .from(orders) - .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) - .groupBy(orders.region, orders.product) - .orderBy(orders.region, orders.product); - const result2 = await db - .with(regionalSales, topRegions) - .selectDistinct({ - region: orders.region, - product: orders.product, - productUnits: sql`sum(${orders.quantity})::int`, - productSales: sql`sum(${orders.amount})::int`, - }) - .from(orders) - .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) - .groupBy(orders.region, orders.product) - .orderBy(orders.region, orders.product); - const result3 = await db - .with(regionalSales, topRegions) - .selectDistinctOn([orders.region], { - region: orders.region, - productUnits: sql`sum(${orders.quantity})::int`, - productSales: sql`sum(${orders.amount})::int`, - }) - .from(orders) - .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) - .groupBy(orders.region) - .orderBy(orders.region); - - t.deepEqual(result1, [ - { - region: 'Europe', - product: 'A', - productUnits: 3, - productSales: 30, - }, - { - region: 'Europe', - product: 'B', - productUnits: 5, - productSales: 50, - }, - { - region: 'US', - product: 'A', - productUnits: 7, - productSales: 70, - }, - { - region: 'US', - product: 'B', - productUnits: 9, - productSales: 90, - }, - ]); - t.deepEqual(result2, result1); - t.deepEqual(result3, [ - { - region: 'Europe', - productUnits: 8, - productSales: 80, - }, - { - region: 'US', - productUnits: 16, - productSales: 160, - }, - ]); -}); - -test.serial('with ... update', async (t) => { - const { db } = t.context; - - const products = pgTable('products', { - id: serial('id').primaryKey(), - price: numeric('price').notNull(), - cheap: boolean('cheap').notNull().default(false), - }); - - await db.execute(sql`drop table if exists ${products}`); - await db.execute(sql` - create table ${products} ( - id serial primary key, - price numeric not null, - cheap boolean not null default false - ) - `); - - await db.insert(products).values([ - { price: '10.99' }, - { price: '25.85' }, - { price: '32.99' }, - { price: '2.50' }, - { price: '4.59' }, - ]); - - const averagePrice = db - .$with('average_price') - .as( - db - .select({ - value: sql`avg(${products.price})`.as('value'), - }) - .from(products), - ); - - const result = await db - .with(averagePrice) - .update(products) - .set({ - cheap: true, - }) - .where(lt(products.price, sql`(select * from ${averagePrice})`)) - .returning({ - id: products.id, - }); - - t.deepEqual(result, [ - { id: 1 }, - { id: 4 }, - { id: 5 }, - ]); -}); - -test.serial('with ... insert', async (t) => { - const { db } = t.context; - - const users = pgTable('users', { - username: text('username').notNull(), - admin: boolean('admin').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (username text not null, admin boolean not null default false)`); - - const userCount = db - .$with('user_count') - .as( - db - .select({ - value: sql`count(*)`.as('value'), - }) - .from(users), - ); - - const result = await db - .with(userCount) - .insert(users) - .values([ - { username: 'user1', admin: sql`((select * from ${userCount}) = 0)` }, - ]) - .returning({ - admin: users.admin, - }); - - t.deepEqual(result, [{ admin: true }]); -}); - -test.serial('with ... delete', async (t) => { - const { db } = t.context; - - await db.insert(orders).values([ - { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, - { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 50, quantity: 5 }, - ]); - - const averageAmount = db - .$with('average_amount') - .as( - db - .select({ - value: sql`avg(${orders.amount})`.as('value'), - }) - .from(orders), - ); - - const result = await db - .with(averageAmount) - .delete(orders) - .where(gt(orders.amount, sql`(select * from ${averageAmount})`)) - .returning({ - id: orders.id, - }); - - t.deepEqual(result, [ - { id: 6 }, - { id: 7 }, - { id: 8 }, - ]); -}); - -test.serial('select from subquery sql', async (t) => { - const { db } = t.context; - - await db.insert(users2Table).values([{ name: 'John' }, { name: 'Jane' }]); - - const sq = db - .select({ name: sql`${users2Table.name} || ' modified'`.as('name') }) - .from(users2Table) - .as('sq'); - - const res = await db.select({ name: sq.name }).from(sq); - - t.deepEqual(res, [{ name: 'John modified' }, { name: 'Jane modified' }]); -}); - -test.serial('select a field without joining its table', (t) => { - const { db } = t.context; - - t.throws(() => db.select({ name: users2Table.name }).from(usersTable).prepare('query')); -}); - -test.serial('select all fields from subquery without alias', (t) => { - const { db } = t.context; - - const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); - - t.throws(() => db.select().from(sq).prepare('query')); -}); - -test.serial('select count()', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); - - const res = await db.select({ count: sql`count(*)` }).from(usersTable); - - t.deepEqual(res, [{ count: 2 }]); -}); - -test.serial('select count w/ custom mapper', async (t) => { - const { db } = t.context; - - function count(value: PgColumn | SQLWrapper): SQL; - function count(value: PgColumn | SQLWrapper, alias: string): SQL.Aliased; - function count(value: PgColumn | SQLWrapper, alias?: string): SQL | SQL.Aliased { - const result = sql`count(${value})`.mapWith(Number); - if (!alias) { - return result; - } - return result.as(alias); - } - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); - - const res = await db.select({ count: count(sql`*`) }).from(usersTable); - - t.deepEqual(res, [{ count: 2 }]); -}); - -test.serial('network types', async (t) => { - const { db } = t.context; - - const value: typeof network.$inferSelect = { - inet: '127.0.0.1', - cidr: '192.168.100.128/25', - macaddr: '08:00:2b:01:02:03', - macaddr8: '08:00:2b:01:02:03:04:05', - }; - - await db.insert(network).values(value); - - const res = await db.select().from(network); - - t.deepEqual(res, [value]); -}); - -test.serial('array types', async (t) => { - const { db } = t.context; - - const values: typeof salEmp.$inferSelect[] = [ - { - name: 'John', - payByQuarter: [10000, 10000, 10000, 10000], - schedule: [['meeting', 'lunch'], ['training', 'presentation']], - }, - { - name: 'Carol', - payByQuarter: [20000, 25000, 25000, 25000], - schedule: [['breakfast', 'consulting'], ['meeting', 'lunch']], - }, - ]; - - await db.insert(salEmp).values(values); - - const res = await db.select().from(salEmp); - - t.deepEqual(res, values); -}); - -test.serial('select for ...', (t) => { - const { db } = t.context; - - { - const query = db - .select() - .from(users2Table) - .for('update') - .toSQL(); - - t.regex( - query.sql, - / for update$/, - ); - } - - { - const query = db - .select() - .from(users2Table) - .for('update', { of: [users2Table, coursesTable] }) - .toSQL(); - - t.regex( - query.sql, - / for update of "users2", "courses"$/, - ); - } - - { - const query = db - .select() - .from(users2Table) - .for('no key update', { of: users2Table }) - .toSQL(); - - t.regex( - query.sql, - /for no key update of "users2"$/, - ); - } - - { - const query = db - .select() - .from(users2Table) - .for('no key update', { of: users2Table, skipLocked: true }) - .toSQL(); - - t.regex( - query.sql, - / for no key update of "users2" skip locked$/, - ); - } - - { - const query = db - .select() - .from(users2Table) - .for('share', { of: users2Table, noWait: true }) - .toSQL(); - - t.regex( - query.sql, - // eslint-disable-next-line unicorn/better-regex - /for share of "users2" no wait$/, - ); - } -}); - -test.serial('having', async (t) => { - const { db } = t.context; - - await db.insert(citiesTable).values([{ name: 'London' }, { name: 'Paris' }, { name: 'New York' }]); - - await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { - name: 'Jack', - cityId: 2, - }]); - - const result = await db - .select({ - id: citiesTable.id, - name: sql`upper(${citiesTable.name})`.as('upper_name'), - usersCount: sql`count(${users2Table.id})::int`.as('users_count'), - }) - .from(citiesTable) - .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) - .where(({ name }) => sql`length(${name}) >= 3`) - .groupBy(citiesTable.id) - .having(({ usersCount }) => sql`${usersCount} > 0`) - .orderBy(({ name }) => name); - - t.deepEqual(result, [ - { - id: 1, - name: 'LONDON', - usersCount: 2, - }, - { - id: 2, - name: 'PARIS', - usersCount: 1, - }, - ]); -}); - -test.serial('view', async (t) => { - const { db } = t.context; - - const newYorkers1 = pgView('new_yorkers') - .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); - - const newYorkers2 = pgView('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); - - const newYorkers3 = pgView('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).existing(); - - await db.execute(sql`create view ${newYorkers1} as ${getViewConfig(newYorkers1).query}`); - - await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); - - await db.insert(users2Table).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 1 }, - { name: 'Jack', cityId: 2 }, - ]); - - { - const result = await db.select().from(newYorkers1); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers2); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers3); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); - t.deepEqual(result, [ - { name: 'John' }, - { name: 'Jane' }, - ]); - } - - await db.execute(sql`drop view ${newYorkers1}`); -}); - -test.serial.skip('materialized view', async (t) => { - // Disabled due to bug in PGlite: - // https://github.com/electric-sql/pglite/issues/63 - const { db } = t.context; - - const newYorkers1 = pgMaterializedView('new_yorkers') - .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); - - const newYorkers2 = pgMaterializedView('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); - - const newYorkers3 = pgMaterializedView('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).existing(); - - await db.execute(sql`create materialized view ${newYorkers1} as ${getMaterializedViewConfig(newYorkers1).query}`); - - await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); - - await db.insert(users2Table).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 1 }, - { name: 'Jack', cityId: 2 }, - ]); - - { - const result = await db.select().from(newYorkers1); - t.deepEqual(result, []); - } - - await db.refreshMaterializedView(newYorkers1); - - { - const result = await db.select().from(newYorkers1); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers2); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers3); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); - t.deepEqual(result, [ - { name: 'John' }, - { name: 'Jane' }, - ]); - } - - await db.execute(sql`drop materialized view ${newYorkers1}`); -}); - -// TODO: copy to SQLite and MySQL, add to docs -test.serial('select from raw sql', async (t) => { - const { db } = t.context; - - const result = await db.select({ - id: sql`id`, - name: sql`name`, - }).from(sql`(select 1 as id, 'John' as name) as users`); - - Expect>; - - t.deepEqual(result, [ - { id: 1, name: 'John' }, - ]); -}); - -test.serial('select from raw sql with joins', async (t) => { - const { db } = t.context; - - const result = await db - .select({ - id: sql`users.id`, - name: sql`users.name`, - userCity: sql`users.city`, - cityName: sql`cities.name`, - }) - .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) - .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`); - - Expect>; - - t.deepEqual(result, [ - { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, - ]); -}); - -test.serial('join on aliased sql from select', async (t) => { - const { db } = t.context; - - const result = await db - .select({ - userId: sql`users.id`.as('userId'), - name: sql`users.name`, - userCity: sql`users.city`, - cityId: sql`cities.id`.as('cityId'), - cityName: sql`cities.name`, - }) - .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) - .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)); - - Expect>; - - t.deepEqual(result, [ - { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, - ]); -}); - -test.serial('join on aliased sql from with clause', async (t) => { - const { db } = t.context; - - const users = db.$with('users').as( - db.select({ - id: sql`id`.as('userId'), - name: sql`name`.as('userName'), - city: sql`city`.as('city'), - }).from( - sql`(select 1 as id, 'John' as name, 'New York' as city) as users`, - ), - ); - - const cities = db.$with('cities').as( - db.select({ - id: sql`id`.as('cityId'), - name: sql`name`.as('cityName'), - }).from( - sql`(select 1 as id, 'Paris' as name) as cities`, - ), - ); - - const result = await db - .with(users, cities) - .select({ - userId: users.id, - name: users.name, - userCity: users.city, - cityId: cities.id, - cityName: cities.name, - }) - .from(users) - .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)); - - Expect>; - - t.deepEqual(result, [ - { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, - ]); -}); - -test.serial('prefixed table', async (t) => { - const { db } = t.context; - - const pgTable = pgTableCreator((name) => `myprefix_${name}`); - - const users = pgTable('test_prefixed_table_with_unique_name', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table myprefix_test_prefixed_table_with_unique_name (id integer not null primary key, name text not null)`, - ); - - await db.insert(users).values({ id: 1, name: 'John' }); - - const result = await db.select().from(users); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('select from enum', async (t) => { - const { db } = t.context; - - const muscleEnum = pgEnum('muscle', [ - 'abdominals', - 'hamstrings', - 'adductors', - 'quadriceps', - 'biceps', - 'shoulders', - 'chest', - 'middle_back', - 'calves', - 'glutes', - 'lower_back', - 'lats', - 'triceps', - 'traps', - 'forearms', - 'neck', - 'abductors', - ]); - - const forceEnum = pgEnum('force', ['isometric', 'isotonic', 'isokinetic']); - - const levelEnum = pgEnum('level', ['beginner', 'intermediate', 'advanced']); - - const mechanicEnum = pgEnum('mechanic', ['compound', 'isolation']); - - const equipmentEnum = pgEnum('equipment', ['barbell', 'dumbbell', 'bodyweight', 'machine', 'cable', 'kettlebell']); - - const categoryEnum = pgEnum('category', ['upper_body', 'lower_body', 'full_body']); - - const exercises = pgTable('exercises', { - id: serial('id').primaryKey(), - name: varchar('name').notNull(), - force: forceEnum('force'), - level: levelEnum('level'), - mechanic: mechanicEnum('mechanic'), - equipment: equipmentEnum('equipment'), - instructions: text('instructions'), - category: categoryEnum('category'), - primaryMuscles: muscleEnum('primary_muscles').array(), - secondaryMuscles: muscleEnum('secondary_muscles').array(), - createdAt: timestamp('created_at').notNull().default(sql`now()`), - updatedAt: timestamp('updated_at').notNull().default(sql`now()`), - }); - - await db.execute(sql`drop table if exists ${exercises}`); - await db.execute(sql`drop type if exists ${name(muscleEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(forceEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(levelEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(mechanicEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(equipmentEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(categoryEnum.enumName)}`); - - await db.execute( - sql`create type ${ - name(muscleEnum.enumName) - } as enum ('abdominals', 'hamstrings', 'adductors', 'quadriceps', 'biceps', 'shoulders', 'chest', 'middle_back', 'calves', 'glutes', 'lower_back', 'lats', 'triceps', 'traps', 'forearms', 'neck', 'abductors')`, - ); - await db.execute(sql`create type ${name(forceEnum.enumName)} as enum ('isometric', 'isotonic', 'isokinetic')`); - await db.execute(sql`create type ${name(levelEnum.enumName)} as enum ('beginner', 'intermediate', 'advanced')`); - await db.execute(sql`create type ${name(mechanicEnum.enumName)} as enum ('compound', 'isolation')`); - await db.execute( - sql`create type ${ - name(equipmentEnum.enumName) - } as enum ('barbell', 'dumbbell', 'bodyweight', 'machine', 'cable', 'kettlebell')`, - ); - await db.execute(sql`create type ${name(categoryEnum.enumName)} as enum ('upper_body', 'lower_body', 'full_body')`); - await db.execute(sql` - create table ${exercises} ( - id serial primary key, - name varchar not null, - force force, - level level, - mechanic mechanic, - equipment equipment, - instructions text, - category category, - primary_muscles muscle[], - secondary_muscles muscle[], - created_at timestamp not null default now(), - updated_at timestamp not null default now() - ) - `); - - await db.insert(exercises).values({ - name: 'Bench Press', - force: 'isotonic', - level: 'beginner', - mechanic: 'compound', - equipment: 'barbell', - instructions: - 'Lie on your back on a flat bench. Grasp the barbell with an overhand grip, slightly wider than shoulder width. Unrack the barbell and hold it over you with your arms locked. Lower the barbell to your chest. Press the barbell back to the starting position.', - category: 'upper_body', - primaryMuscles: ['chest', 'triceps'], - secondaryMuscles: ['shoulders', 'traps'], - }); - - const result = await db.select().from(exercises); - - t.deepEqual(result, [ - { - id: 1, - name: 'Bench Press', - force: 'isotonic', - level: 'beginner', - mechanic: 'compound', - equipment: 'barbell', - instructions: - 'Lie on your back on a flat bench. Grasp the barbell with an overhand grip, slightly wider than shoulder width. Unrack the barbell and hold it over you with your arms locked. Lower the barbell to your chest. Press the barbell back to the starting position.', - category: 'upper_body', - primaryMuscles: ['chest', 'triceps'], - secondaryMuscles: ['shoulders', 'traps'], - createdAt: result[0]!.createdAt, - updatedAt: result[0]!.updatedAt, - }, - ]); - - await db.execute(sql`drop table ${exercises}`); - await db.execute(sql`drop type ${name(muscleEnum.enumName)}`); - await db.execute(sql`drop type ${name(forceEnum.enumName)}`); - await db.execute(sql`drop type ${name(levelEnum.enumName)}`); - await db.execute(sql`drop type ${name(mechanicEnum.enumName)}`); - await db.execute(sql`drop type ${name(equipmentEnum.enumName)}`); - await db.execute(sql`drop type ${name(categoryEnum.enumName)}`); -}); - -test.serial('all date and time columns', async (t) => { - const { db } = t.context; - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - dateString: date('date_string', { mode: 'string' }).notNull(), - time: time('time', { precision: 3 }).notNull(), - datetime: timestamp('datetime').notNull(), - datetimeWTZ: timestamp('datetime_wtz', { withTimezone: true }).notNull(), - datetimeString: timestamp('datetime_string', { mode: 'string' }).notNull(), - datetimeFullPrecision: timestamp('datetime_full_precision', { precision: 6, mode: 'string' }).notNull(), - datetimeWTZString: timestamp('datetime_wtz_string', { withTimezone: true, mode: 'string' }).notNull(), - interval: interval('interval').notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - date_string date not null, - time time(3) not null, - datetime timestamp not null, - datetime_wtz timestamp with time zone not null, - datetime_string timestamp not null, - datetime_full_precision timestamp(6) not null, - datetime_wtz_string timestamp with time zone not null, - interval interval not null - ) - `); - - const someDatetime = new Date('2022-01-01T00:00:00.123Z'); - const fullPrecision = '2022-01-01T00:00:00.123456Z'; - const someTime = '23:23:12.432'; - - await db.insert(table).values({ - dateString: '2022-01-01', - time: someTime, - datetime: someDatetime, - datetimeWTZ: someDatetime, - datetimeString: '2022-01-01T00:00:00.123Z', - datetimeFullPrecision: fullPrecision, - datetimeWTZString: '2022-01-01T00:00:00.123Z', - interval: '1 day', - }); - - const result = await db.select().from(table); - - Expect< - Equal<{ - id: number; - dateString: string; - time: string; - datetime: Date; - datetimeWTZ: Date; - datetimeString: string; - datetimeFullPrecision: string; - datetimeWTZString: string; - interval: string; - }[], typeof result> - >; - - Expect< - Equal<{ - dateString: string; - time: string; - datetime: Date; - datetimeWTZ: Date; - datetimeString: string; - datetimeFullPrecision: string; - datetimeWTZString: string; - interval: string; - id?: number | undefined; - }, typeof table.$inferInsert> - >; - - t.deepEqual(result, [ - { - id: 1, - dateString: '2022-01-01', - time: someTime, - datetime: someDatetime, - datetimeWTZ: someDatetime, - datetimeString: '2022-01-01 00:00:00.123', - datetimeFullPrecision: fullPrecision.replace('T', ' ').replace('Z', ''), - datetimeWTZString: '2022-01-01 00:00:00.123+00', - interval: '1 day', - }, - ]); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('all date and time columns with timezone second case mode date', async (t) => { - const { db } = t.context; - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(3) with time zone not null - ) - `); - - const insertedDate = new Date(); - - // 1. Insert date as new date - await db.insert(table).values([ - { timestamp: insertedDate }, - ]); - - // 2, Select as date and check that timezones are the same - // There is no way to check timezone in Date object, as it is always represented internally in UTC - const result = await db.select().from(table); - - t.deepEqual(result, [{ id: 1, timestamp: insertedDate }]); - - // 3. Compare both dates - t.deepEqual(insertedDate.getTime(), result[0]?.timestamp.getTime()); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('all date and time columns with timezone third case mode date', async (t) => { - const { db } = t.context; - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(3) with time zone not null - ) - `); - - const insertedDate = new Date('2022-01-01 20:00:00.123-04'); // used different time zones, internally is still UTC - const insertedDate2 = new Date('2022-01-02 04:00:00.123+04'); // They are both the same date in different time zones - - // 1. Insert date as new dates with different time zones - await db.insert(table).values([ - { timestamp: insertedDate }, - { timestamp: insertedDate2 }, - ]); - - // 2, Select and compare both dates - const result = await db.select().from(table); - - t.deepEqual(result[0]?.timestamp.getTime(), result[1]?.timestamp.getTime()); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('all date and time columns without timezone first case mode string', async (t) => { - const { db } = t.context; - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(6) not null - ) - `); - - // 1. Insert date in string format without timezone in it - await db.insert(table).values([ - { timestamp: '2022-01-01 02:00:00.123456' }, - ]); - - // 2, Select in string format and check that values are the same - const result = await db.select().from(table); - - t.deepEqual(result, [{ id: 1, timestamp: '2022-01-01 02:00:00.123456' }]); - - // 3. Select as raw query and check that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); - - t.deepEqual(result2.rows, [{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('all date and time columns without timezone second case mode string', async (t) => { - const { db } = t.context; - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(6) not null - ) - `); - - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: '2022-01-01T02:00:00.123456-02' }, - ]); - - // 2, Select as raw query and check that values are the same - const result = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); - - t.deepEqual(result.rows, [{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('all date and time columns without timezone third case mode date', async (t) => { - const { db } = t.context; - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'date', precision: 3 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(3) not null - ) - `); - - const insertedDate = new Date('2022-01-01 20:00:00.123+04'); - - // 1. Insert date as new date - await db.insert(table).values([ - { timestamp: insertedDate }, - ]); - - // 2, Select as raw query as string - const result = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); - - // 3. Compare both dates using orm mapping - Need to add 'Z' to tell JS that it is UTC - t.deepEqual(new Date(result.rows[0]!.timestamp_string + 'Z').getTime(), insertedDate.getTime()); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('test mode string for timestamp with timezone', async (t) => { - const { db } = t.context; - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(6) with time zone not null - ) - `); - - const timestampString = '2022-01-01 00:00:00.123456-0200'; - - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: timestampString }, - ]); - - // 2. Select date in string format and check that the values are the same - const result = await db.select().from(table); - - // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same - t.deepEqual(result, [{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); - - // 3. Select as raw query and checke that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); - - // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same - t.deepEqual(result2.rows, [{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('test mode date for timestamp with timezone', async (t) => { - const { db } = t.context; - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(3) with time zone not null - ) - `); - - const timestampString = new Date('2022-01-01 00:00:00.456-0200'); - - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: timestampString }, - ]); - - // 2. Select date in string format and check that the values are the same - const result = await db.select().from(table); - - // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same - t.deepEqual(result, [{ id: 1, timestamp: timestampString }]); - - // 3. Select as raw query and checke that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); - - // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same - t.deepEqual(result2.rows, [{ id: 1, timestamp_string: '2022-01-01 02:00:00.456+00' }]); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial.skip('test mode string for timestamp with timezone in UTC timezone', async (t) => { - // Disabled due to bug in PGlite: - // https://github.com/electric-sql/pglite/issues/62 - const { db } = t.context; - - // get current timezone from db - const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); - - // set timezone to UTC - await db.execute(sql`set time zone 'UTC'`); - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(6) with time zone not null - ) - `); - - const timestampString = '2022-01-01 00:00:00.123456-0200'; - - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: timestampString }, - ]); - - // 2. Select date in string format and check that the values are the same - const result = await db.select().from(table); - - // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same - t.deepEqual(result, [{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); - - // 3. Select as raw query and checke that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); - - // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same - t.deepEqual(result2.rows, [{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); - - await db.execute(sql`set time zone '${sql.raw(timezone.rows[0]!.TimeZone)}'`); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial.skip('test mode string for timestamp with timezone in different timezone', async (t) => { - // Disabled due to bug in PGlite: - // https://github.com/electric-sql/pglite/issues/62 - const { db } = t.context; - - // get current timezone from db - const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); - - // set timezone to HST (UTC - 10) - await db.execute(sql`set time zone 'HST'`); - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(6) with time zone not null - ) - `); - - const timestampString = '2022-01-01 00:00:00.123456-1000'; - - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: timestampString }, - ]); - - // 2. Select date in string format and check that the values are the same - const result = await db.select().from(table); - - t.deepEqual(result, [{ id: 1, timestamp: '2022-01-01 00:00:00.123456-10' }]); - - // 3. Select as raw query and checke that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); - - t.deepEqual(result2.rows, [{ id: 1, timestamp_string: '2022-01-01 00:00:00.123456-10' }]); - - await db.execute(sql`set time zone '${sql.raw(timezone.rows[0]!.TimeZone)}'`); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('orderBy with aliased column', (t) => { - const { db } = t.context; - - const query = db.select({ - test: sql`something`.as('test'), - }).from(users2Table).orderBy((fields) => fields.test).toSQL(); - - t.deepEqual(query.sql, 'select something as "test" from "users2" order by "test"'); -}); - -test.serial('select from sql', async (t) => { - const { db } = t.context; - - const metricEntry = pgTable('metric_entry', { - id: pgUuid('id').notNull(), - createdAt: timestamp('created_at').notNull(), - }); - - await db.execute(sql`drop table if exists ${metricEntry}`); - await db.execute(sql`create table ${metricEntry} (id uuid not null, created_at timestamp not null)`); - - const metricId = uuid(); - - const intervals = db.$with('intervals').as( - db - .select({ - startTime: sql`(date'2023-03-01'+ x * '1 day'::interval)`.as('start_time'), - endTime: sql`(date'2023-03-01'+ (x+1) *'1 day'::interval)`.as('end_time'), - }) - .from(sql`generate_series(0, 29, 1) as t(x)`), - ); - - await t.notThrowsAsync(() => - db - .with(intervals) - .select({ - startTime: intervals.startTime, - endTime: intervals.endTime, - count: sql`count(${metricEntry})`, - }) - .from(metricEntry) - .rightJoin( - intervals, - and( - eq(metricEntry.id, metricId), - gte(metricEntry.createdAt, intervals.startTime), - lt(metricEntry.createdAt, intervals.endTime), - ), - ) - .groupBy(intervals.startTime, intervals.endTime) - .orderBy(asc(intervals.startTime)) - ); -}); - -test.serial('timestamp timezone', async (t) => { - const { db } = t.context; - - const usersTableWithAndWithoutTimezone = pgTable('users_test_with_and_without_timezone', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), - updatedAt: timestamp('updated_at', { withTimezone: false }).notNull().defaultNow(), - }); - - await db.execute(sql`drop table if exists ${usersTableWithAndWithoutTimezone}`); - - await db.execute( - sql` - create table users_test_with_and_without_timezone ( - id serial not null primary key, - name text not null, - created_at timestamptz not null default now(), - updated_at timestamp not null default now() - ) - `, - ); - - const date = new Date(Date.parse('2020-01-01T00:00:00+04:00')); - - await db.insert(usersTableWithAndWithoutTimezone).values({ name: 'With default times' }); - await db.insert(usersTableWithAndWithoutTimezone).values({ - name: 'Without default times', - createdAt: date, - updatedAt: date, - }); - const users = await db.select().from(usersTableWithAndWithoutTimezone); - - // check that the timestamps are set correctly for default times - t.assert(Math.abs(users[0]!.updatedAt.getTime() - Date.now()) < 2000); - t.assert(Math.abs(users[0]!.createdAt.getTime() - Date.now()) < 2000); - - // check that the timestamps are set correctly for non default times - t.assert(Math.abs(users[1]!.updatedAt.getTime() - date.getTime()) < 2000); - t.assert(Math.abs(users[1]!.createdAt.getTime() - date.getTime()) < 2000); -}); - -test.serial('transaction', async (t) => { - const { db } = t.context; - - const users = pgTable('users_transactions', { - id: serial('id').primaryKey(), - balance: integer('balance').notNull(), - }); - const products = pgTable('products_transactions', { - id: serial('id').primaryKey(), - price: integer('price').notNull(), - stock: integer('stock').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`drop table if exists ${products}`); - - await db.execute(sql`create table users_transactions (id serial not null primary key, balance integer not null)`); - await db.execute( - sql`create table products_transactions (id serial not null primary key, price integer not null, stock integer not null)`, - ); - - const user = await db.insert(users).values({ balance: 100 }).returning().then((rows) => rows[0]!); - const product = await db.insert(products).values({ price: 10, stock: 10 }).returning().then((rows) => rows[0]!); - - await db.transaction(async (tx) => { - await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)); - await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)); - }); - - const result = await db.select().from(users); - - t.deepEqual(result, [{ id: 1, balance: 90 }]); - - await db.execute(sql`drop table ${users}`); - await db.execute(sql`drop table ${products}`); -}); - -test.serial('transaction rollback', async (t) => { - const { db } = t.context; - - const users = pgTable('users_transactions_rollback', { - id: serial('id').primaryKey(), - balance: integer('balance').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table users_transactions_rollback (id serial not null primary key, balance integer not null)`, - ); - - await t.throwsAsync(async () => - await db.transaction(async (tx) => { - await tx.insert(users).values({ balance: 100 }); - tx.rollback(); - }), { instanceOf: TransactionRollbackError }); - - const result = await db.select().from(users); - - t.deepEqual(result, []); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('nested transaction', async (t) => { - const { db } = t.context; - - const users = pgTable('users_nested_transactions', { - id: serial('id').primaryKey(), - balance: integer('balance').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table users_nested_transactions (id serial not null primary key, balance integer not null)`, - ); - - await db.transaction(async (tx) => { - await tx.insert(users).values({ balance: 100 }); - - await tx.transaction(async (tx) => { - await tx.update(users).set({ balance: 200 }); - }); - }); - - const result = await db.select().from(users); - - t.deepEqual(result, [{ id: 1, balance: 200 }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('nested transaction rollback', async (t) => { - const { db } = t.context; - - const users = pgTable('users_nested_transactions_rollback', { - id: serial('id').primaryKey(), - balance: integer('balance').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table users_nested_transactions_rollback (id serial not null primary key, balance integer not null)`, - ); - - await db.transaction(async (tx) => { - await tx.insert(users).values({ balance: 100 }); - - await t.throwsAsync(async () => - await tx.transaction(async (tx) => { - await tx.update(users).set({ balance: 200 }); - tx.rollback(); - }), { instanceOf: TransactionRollbackError }); - }); - - const result = await db.select().from(users); - - t.deepEqual(result, [{ id: 1, balance: 100 }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('join subquery with join', async (t) => { - const { db } = t.context; - - const internalStaff = pgTable('internal_staff', { - userId: integer('user_id').notNull(), - }); - - const customUser = pgTable('custom_user', { - id: integer('id').notNull(), - }); - - const ticket = pgTable('ticket', { - staffId: integer('staff_id').notNull(), - }); - - await db.execute(sql`drop table if exists ${internalStaff}`); - await db.execute(sql`drop table if exists ${customUser}`); - await db.execute(sql`drop table if exists ${ticket}`); - - await db.execute(sql`create table internal_staff (user_id integer not null)`); - await db.execute(sql`create table custom_user (id integer not null)`); - await db.execute(sql`create table ticket (staff_id integer not null)`); - - await db.insert(internalStaff).values({ userId: 1 }); - await db.insert(customUser).values({ id: 1 }); - await db.insert(ticket).values({ staffId: 1 }); - - const subq = db - .select() - .from(internalStaff) - .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) - .as('internal_staff'); - - const mainQuery = await db - .select() - .from(ticket) - .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); - - t.deepEqual(mainQuery, [{ - ticket: { staffId: 1 }, - internal_staff: { - internal_staff: { userId: 1 }, - custom_user: { id: 1 }, - }, - }]); - - await db.execute(sql`drop table ${internalStaff}`); - await db.execute(sql`drop table ${customUser}`); - await db.execute(sql`drop table ${ticket}`); -}); - -test.serial('subquery with view', async (t) => { - const { db } = t.context; - - const users = pgTable('users_subquery_view', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }); - - const newYorkers = pgView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`drop view if exists ${newYorkers}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, - ); - await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); - - await db.insert(users).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 2 }, - { name: 'Jack', cityId: 1 }, - { name: 'Jill', cityId: 2 }, - ]); - - const sq = db.$with('sq').as(db.select().from(newYorkers)); - const result = await db.with(sq).select().from(sq); - - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 3, name: 'Jack', cityId: 1 }, - ]); - - await db.execute(sql`drop view ${newYorkers}`); - await db.execute(sql`drop table ${users}`); -}); - -test.serial('join view as subquery', async (t) => { - const { db } = t.context; - - const users = pgTable('users_join_view', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }); - - const newYorkers = pgView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`drop view if exists ${newYorkers}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, - ); - await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); - - await db.insert(users).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 2 }, - { name: 'Jack', cityId: 1 }, - { name: 'Jill', cityId: 2 }, - ]); - - const sq = db.select().from(newYorkers).as('new_yorkers_sq'); - - const result = await db.select().from(users).leftJoin(sq, eq(users.id, sq.id)); - - t.deepEqual(result, [ - { - users_join_view: { id: 1, name: 'John', cityId: 1 }, - new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, - }, - { - users_join_view: { id: 2, name: 'Jane', cityId: 2 }, - new_yorkers_sq: null, - }, - { - users_join_view: { id: 3, name: 'Jack', cityId: 1 }, - new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, - }, - { - users_join_view: { id: 4, name: 'Jill', cityId: 2 }, - new_yorkers_sq: null, - }, - ]); - - await db.execute(sql`drop view ${newYorkers}`); - await db.execute(sql`drop table ${users}`); -}); - -test.serial('table selection with single table', async (t) => { - const { db } = t.context; - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, - ); - - await db.insert(users).values({ name: 'John', cityId: 1 }); - - const result = await db.select({ users }).from(users); - - t.deepEqual(result, [{ users: { id: 1, name: 'John', cityId: 1 } }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('set null to jsonb field', async (t) => { - const { db } = t.context; - - const users = pgTable('users', { - id: serial('id').primaryKey(), - jsonb: jsonb('jsonb'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, jsonb jsonb)`, - ); - - const result = await db.insert(users).values({ jsonb: null }).returning(); - - t.deepEqual(result, [{ id: 1, jsonb: null }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('insert undefined', async (t) => { - const { db } = t.context; - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text)`, - ); - - await t.notThrowsAsync(async () => await db.insert(users).values({ name: undefined })); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('update undefined', async (t) => { - const { db } = t.context; - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text)`, - ); - - await t.throwsAsync(async () => await db.update(users).set({ name: undefined })); - await t.notThrowsAsync(async () => await db.update(users).set({ id: 1, name: undefined })); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('array operators', async (t) => { - const { db } = t.context; - - const posts = pgTable('posts', { - id: serial('id').primaryKey(), - tags: text('tags').array(), - }); - - await db.execute(sql`drop table if exists ${posts}`); - - await db.execute( - sql`create table ${posts} (id serial primary key, tags text[])`, - ); - - await db.insert(posts).values([{ - tags: ['ORM'], - }, { - tags: ['Typescript'], - }, { - tags: ['Typescript', 'ORM'], - }, { - tags: ['Typescript', 'Frontend', 'React'], - }, { - tags: ['Typescript', 'ORM', 'Database', 'Postgres'], - }, { - tags: ['Java', 'Spring', 'OOP'], - }]); - - const contains = await db.select({ id: posts.id }).from(posts) - .where(arrayContains(posts.tags, ['Typescript', 'ORM'])); - const contained = await db.select({ id: posts.id }).from(posts) - .where(arrayContained(posts.tags, ['Typescript', 'ORM'])); - const overlaps = await db.select({ id: posts.id }).from(posts) - .where(arrayOverlaps(posts.tags, ['Typescript', 'ORM'])); - const withSubQuery = await db.select({ id: posts.id }).from(posts) - .where(arrayContains( - posts.tags, - db.select({ tags: posts.tags }).from(posts).where(eq(posts.id, 1)), - )); - - t.deepEqual(contains, [{ id: 3 }, { id: 5 }]); - t.deepEqual(contained, [{ id: 1 }, { id: 2 }, { id: 3 }]); - t.deepEqual(overlaps, [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }, { id: 5 }]); - t.deepEqual(withSubQuery, [{ id: 1 }, { id: 3 }, { id: 5 }]); -}); - -test.serial('set operations (union) from query builder with subquery', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const sq = db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).as('sq'); - - const result = await db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).union( - db.select().from(sq), - ).orderBy(asc(sql`name`)).limit(2).offset(1); - - t.assert(result.length === 2); - - t.deepEqual(result, [ - { id: 3, name: 'Jack' }, - { id: 2, name: 'Jane' }, - ]); - - t.throws(() => { - db - .select({ id: cities2Table.id, name: citiesTable.name, name2: users2Table.name }) - .from(cities2Table).union( - // @ts-expect-error - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table), - ).orderBy(asc(sql`name`)); - }); -}); - -test.serial('set operations (union) as function', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await union( - db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).orderBy(asc(sql`name`)).limit(1).offset(1); - - t.assert(result.length === 1); - - t.deepEqual(result, [ - { id: 1, name: 'New York' }, - ]); - - t.throws(() => { - union( - db - .select({ name: citiesTable.name, id: cities2Table.id }) - .from(cities2Table).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).orderBy(asc(sql`name`)); - }); -}); - -test.serial('set operations (union all) from query builder', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).limit(2).unionAll( - db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).limit(2), - ).orderBy(asc(sql`id`)); - - t.assert(result.length === 4); - - t.deepEqual(result, [ - { id: 1, name: 'New York' }, - { id: 1, name: 'New York' }, - { id: 2, name: 'London' }, - { id: 2, name: 'London' }, - ]); - - t.throws(() => { - db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).limit(2).unionAll( - db - .select({ name: citiesTable.name, id: cities2Table.id }) - .from(cities2Table).limit(2), - ).orderBy(asc(sql`id`)); - }); -}); - -test.serial('set operations (union all) as function', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await unionAll( - db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ); - - t.assert(result.length === 3); - - t.deepEqual(result, [ - { id: 1, name: 'New York' }, - { id: 1, name: 'John' }, - { id: 1, name: 'John' }, - ]); - - t.throws(() => { - unionAll( - db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).where(eq(citiesTable.id, 1)), - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ); - }); -}); - -test.serial('set operations (intersect) from query builder', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).intersect( - db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).where(gt(citiesTable.id, 1)), - ).orderBy(asc(sql`name`)); - - t.assert(result.length === 2); - - t.deepEqual(result, [ - { id: 2, name: 'London' }, - { id: 3, name: 'Tampa' }, - ]); - - t.throws(() => { - db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).intersect( - // @ts-expect-error - db - .select({ id: cities2Table.id, name: citiesTable.name, id2: cities2Table.id }) - .from(cities2Table).where(gt(citiesTable.id, 1)), - ).orderBy(asc(sql`name`)); - }); -}); - -test.serial('set operations (intersect) as function', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await intersect( - db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ); - - t.assert(result.length === 0); - - t.deepEqual(result, []); - - t.throws(() => { - intersect( - db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(users2Table).where(eq(users2Table.id, 1)), - ); - }); -}); - -test.serial('set operations (intersect all) from query builder', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).limit(2).intersectAll( - db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).limit(2), - ).orderBy(asc(sql`id`)); - - t.assert(result.length === 2); - - t.deepEqual(result, [ - { id: 1, name: 'New York' }, - { id: 2, name: 'London' }, - ]); - - t.throws(() => { - db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).limit(2).intersectAll( - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(cities2Table).limit(2), - ).orderBy(asc(sql`id`)); - }); -}); - -test.serial('set operations (intersect all) as function', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await intersectAll( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ); - - t.assert(result.length === 1); - - t.deepEqual(result, [ - { id: 1, name: 'John' }, - ]); - - t.throws(() => { - intersectAll( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ); - }); -}); - -test.serial('set operations (except) from query builder', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await db - .select() - .from(cities2Table).except( - db - .select() - .from(cities2Table).where(gt(citiesTable.id, 1)), - ); - - t.assert(result.length === 1); - - t.deepEqual(result, [ - { id: 1, name: 'New York' }, - ]); - - t.throws(() => { - db - .select() - .from(cities2Table).except( - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(cities2Table).where(gt(citiesTable.id, 1)), - ); - }); -}); - -test.serial('set operations (except) as function', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await except( - db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table), - db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).orderBy(asc(sql`id`)); - - t.assert(result.length === 2); - - t.deepEqual(result, [ - { id: 2, name: 'London' }, - { id: 3, name: 'Tampa' }, - ]); - - t.throws(() => { - except( - db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table), - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(cities2Table).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).orderBy(asc(sql`id`)); - }); -}); - -test.serial('set operations (except all) from query builder', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await db - .select() - .from(cities2Table).exceptAll( - db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).where(eq(citiesTable.id, 1)), - ).orderBy(asc(sql`id`)); - - t.assert(result.length === 2); - - t.deepEqual(result, [ - { id: 2, name: 'London' }, - { id: 3, name: 'Tampa' }, - ]); - - t.throws(() => { - db - .select({ name: cities2Table.name, id: cities2Table.id }) - .from(cities2Table).exceptAll( - db - .select({ id: cities2Table.id, name: citiesTable.name }) - .from(cities2Table).where(eq(citiesTable.id, 1)), - ).orderBy(asc(sql`id`)); - }); -}); - -test.serial('set operations (except all) as function', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await exceptAll( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(gt(users2Table.id, 7)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).orderBy(asc(sql`id`)).limit(5).offset(2); - - t.assert(result.length === 4); - - t.deepEqual(result, [ - { id: 4, name: 'Peter' }, - { id: 5, name: 'Ben' }, - { id: 6, name: 'Jill' }, - { id: 7, name: 'Mary' }, - ]); - - t.throws(() => { - exceptAll( - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(users2Table), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(gt(users2Table.id, 7)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).orderBy(asc(sql`id`)); - }); -}); - -test.serial('set operations (mixed) from query builder with subquery', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - const sq = db - .select() - .from(cities2Table).where(gt(citiesTable.id, 1)).as('sq'); - - const result = await db - .select() - .from(cities2Table).except( - ({ unionAll }) => - unionAll( - db.select().from(sq), - db.select().from(cities2Table).where(eq(citiesTable.id, 2)), - ), - ); - - t.assert(result.length === 1); - - t.deepEqual(result, [ - { id: 1, name: 'New York' }, - ]); - - t.throws(() => { - db - .select() - .from(cities2Table).except( - ({ unionAll }) => - unionAll( - db - .select({ name: cities2Table.name, id: cities2Table.id }) - .from(cities2Table).where(gt(citiesTable.id, 1)), - db.select().from(cities2Table).where(eq(citiesTable.id, 2)), - ), - ); - }); -}); - -test.serial('set operations (mixed all) as function', async (t) => { - const { db } = t.context; - - await setupSetOperationTest(db); - - const result = await union( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - except( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(gte(users2Table.id, 5)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 7)), - ), - db - .select().from(cities2Table).where(gt(citiesTable.id, 1)), - ).orderBy(asc(sql`id`)); - - t.assert(result.length === 6); - - t.deepEqual(result, [ - { id: 1, name: 'John' }, - { id: 2, name: 'London' }, - { id: 3, name: 'Tampa' }, - { id: 5, name: 'Ben' }, - { id: 6, name: 'Jill' }, - { id: 8, name: 'Sally' }, - ]); - - t.throws(() => { - union( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - except( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(gte(users2Table.id, 5)), - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(users2Table).where(eq(users2Table.id, 7)), - ), - db - .select().from(cities2Table).where(gt(citiesTable.id, 1)), - ).orderBy(asc(sql`id`)); - }); -}); - -test.serial('aggregate function: count', async (t) => { - const { db } = t.context; - const table = aggregateTable; - await setupAggregateFunctionsTest(db); - - const result1 = await db.select({ value: count() }).from(table); - const result2 = await db.select({ value: count(table.a) }).from(table); - const result3 = await db.select({ value: countDistinct(table.name) }).from(table); - - t.deepEqual(result1[0]?.value, 7); - t.deepEqual(result2[0]?.value, 5); - t.deepEqual(result3[0]?.value, 6); -}); - -test.serial('aggregate function: avg', async (t) => { - const { db } = t.context; - const table = aggregateTable; - await setupAggregateFunctionsTest(db); - - const result1 = await db.select({ value: avg(table.b) }).from(table); - const result2 = await db.select({ value: avg(table.nullOnly) }).from(table); - const result3 = await db.select({ value: avgDistinct(table.b) }).from(table); - - t.deepEqual(result1[0]?.value, '33.3333333333333333'); - t.deepEqual(result2[0]?.value, null); - t.deepEqual(result3[0]?.value, '42.5000000000000000'); -}); - -test.serial('aggregate function: sum', async (t) => { - const { db } = t.context; - const table = aggregateTable; - await setupAggregateFunctionsTest(db); - - const result1 = await db.select({ value: sum(table.b) }).from(table); - const result2 = await db.select({ value: sum(table.nullOnly) }).from(table); - const result3 = await db.select({ value: sumDistinct(table.b) }).from(table); - - t.deepEqual(result1[0]?.value, '200'); - t.deepEqual(result2[0]?.value, null); - t.deepEqual(result3[0]?.value, '170'); -}); - -test.serial('aggregate function: max', async (t) => { - const { db } = t.context; - const table = aggregateTable; - await setupAggregateFunctionsTest(db); - - const result1 = await db.select({ value: max(table.b) }).from(table); - const result2 = await db.select({ value: max(table.nullOnly) }).from(table); - - t.deepEqual(result1[0]?.value, 90); - t.deepEqual(result2[0]?.value, null); -}); - -test.serial('aggregate function: min', async (t) => { - const { db } = t.context; - const table = aggregateTable; - await setupAggregateFunctionsTest(db); - - const result1 = await db.select({ value: min(table.b) }).from(table); - const result2 = await db.select({ value: min(table.nullOnly) }).from(table); - - t.deepEqual(result1[0]?.value, 10); - t.deepEqual(result2[0]?.value, null); -}); - -test.serial('array mapping and parsing', async (t) => { - const { db } = t.context; - - const arrays = pgTable('arrays_tests', { - id: serial('id').primaryKey(), - tags: text('tags').array(), - nested: text('nested').array().array(), - numbers: integer('numbers').notNull().array(), - }); - - await db.execute(sql`drop table if exists ${arrays}`); - await db.execute(sql` - create table ${arrays} ( - id serial primary key, - tags text[], - nested text[][], - numbers integer[] - ) - `); - - await db.insert(arrays).values({ - tags: ['', 'b', 'c'], - nested: [['1', ''], ['3', '\\a']], - numbers: [1, 2, 3], - }); - - const result = await db.select().from(arrays); - - t.deepEqual(result, [{ - id: 1, - tags: ['', 'b', 'c'], - nested: [['1', ''], ['3', '\\a']], - numbers: [1, 2, 3], - }]); - - await db.execute(sql`drop table ${arrays}`); -}); - -test.serial('test $onUpdateFn and $onUpdate works as $default', async (t) => { - const { db } = t.context; - - await db.execute(sql`drop table if exists ${usersOnUpdate}`); - - await db.execute( - sql` - create table ${usersOnUpdate} ( - id serial primary key, - name text not null, - update_counter integer default 1 not null, - updated_at timestamp(3), - always_null text - ) - `, - ); - - await db.insert(usersOnUpdate).values([ - { name: 'John' }, - { name: 'Jane' }, - { name: 'Jack' }, - { name: 'Jill' }, - ]); - - const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); - - const justDates = await db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); - - const response = await db.select({ ...rest }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); - - t.deepEqual(response, [ - { name: 'John', id: 1, updateCounter: 1, alwaysNull: null }, - { name: 'Jane', id: 2, updateCounter: 1, alwaysNull: null }, - { name: 'Jack', id: 3, updateCounter: 1, alwaysNull: null }, - { name: 'Jill', id: 4, updateCounter: 1, alwaysNull: null }, - ]); - const msDelay = 250; - - for (const eachUser of justDates) { - t.assert(eachUser.updatedAt!.valueOf() > Date.now() - msDelay); - } -}); - -test.serial('test $onUpdateFn and $onUpdate works updating', async (t) => { - const { db } = t.context; - - await db.execute(sql`drop table if exists ${usersOnUpdate}`); - - await db.execute( - sql` - create table ${usersOnUpdate} ( - id serial primary key, - name text not null, - update_counter integer default 1, - updated_at timestamp(3), - always_null text - ) - `, - ); - - await db.insert(usersOnUpdate).values([ - { name: 'John', alwaysNull: 'this will be null after updating' }, - { name: 'Jane' }, - { name: 'Jack' }, - { name: 'Jill' }, - ]); - - const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); - // const initial = await db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); - - await db.update(usersOnUpdate).set({ name: 'Angel' }).where(eq(usersOnUpdate.id, 1)); - await db.update(usersOnUpdate).set({ updateCounter: null }).where(eq(usersOnUpdate.id, 2)); - - const justDates = await db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); - - const response = await db.select({ ...rest }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); - - t.deepEqual(response, [ - { name: 'Angel', id: 1, updateCounter: 2, alwaysNull: null }, - { name: 'Jane', id: 2, updateCounter: null, alwaysNull: null }, - { name: 'Jack', id: 3, updateCounter: 1, alwaysNull: null }, - { name: 'Jill', id: 4, updateCounter: 1, alwaysNull: null }, - ]); - const msDelay = 250; - - // t.assert(initial[0]?.updatedAt?.valueOf() !== justDates[0]?.updatedAt?.valueOf()); - - for (const eachUser of justDates) { - t.assert(eachUser.updatedAt!.valueOf() > Date.now() - msDelay); - } -}); diff --git a/integration-tests/tests/planetscale-serverless/mysql.test.ts b/integration-tests/tests/planetscale-serverless/mysql.test.ts deleted file mode 100644 index 8a9fdca08..000000000 --- a/integration-tests/tests/planetscale-serverless/mysql.test.ts +++ /dev/null @@ -1,1115 +0,0 @@ -import 'dotenv/config'; - -import { Client } from '@planetscale/database'; -import type { TestFn } from 'ava'; -import anyTest from 'ava'; -import { and, asc, eq, name, placeholder, sql, TransactionRollbackError } from 'drizzle-orm'; -import { - alias, - boolean, - date, - datetime, - getTableConfig, - int, - json, - mysqlEnum, - mysqlTableCreator, - mysqlView, - serial, - text, - time, - timestamp, - uniqueIndex, - varchar, - year, -} from 'drizzle-orm/mysql-core'; -import type { PlanetScaleDatabase } from 'drizzle-orm/planetscale-serverless'; -import { drizzle } from 'drizzle-orm/planetscale-serverless'; -import { migrate } from 'drizzle-orm/planetscale-serverless/migrator'; - -const ENABLE_LOGGING = false; - -const tablePrefix = 'drizzle_tests_'; - -const mysqlTable = mysqlTableCreator((name) => `${tablePrefix}${name}`); - -const usersTable = mysqlTable('userstest', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - verified: boolean('verified').notNull().default(false), - jsonb: json('jsonb').$type(), - createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), -}); - -const datesTable = mysqlTable('datestable', { - date: date('date'), - dateAsString: date('date_as_string', { mode: 'string' }), - time: time('time', { fsp: 1 }), - datetime: datetime('datetime', { fsp: 2 }), - datetimeAsString: datetime('datetime_as_string', { fsp: 2, mode: 'string' }), - year: year('year'), -}); - -const usersMigratorTable = mysqlTable('users12', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - email: text('email').notNull(), -}, (table) => { - return { - name: uniqueIndex('').on(table.name).using('btree'), - }; -}); - -interface Context { - db: PlanetScaleDatabase; -} - -const test = anyTest as TestFn; - -test.before(async (t) => { - const ctx = t.context; - - ctx.db = drizzle( - new Client({ url: process.env['PLANETSCALE_CONNECTION_STRING']! }), - { logger: ENABLE_LOGGING }, - ); -}); - -test.after.always(async (t) => { - const ctx = t.context; - await ctx.db.execute(sql`drop table if exists ${usersTable}`); - await ctx.db.execute(sql`drop table if exists ${datesTable}`); -}); - -test.beforeEach(async (t) => { - const ctx = t.context; - await ctx.db.execute(sql`drop table if exists ${usersTable}`); - await ctx.db.execute(sql`drop table if exists ${datesTable}`); - // await ctx.db.execute(sql`create schema public`); - await ctx.db.execute( - sql` - create table ${usersTable} ( - \`id\` serial primary key, - \`name\` text not null, - \`verified\` boolean not null default false, - \`jsonb\` json, - \`created_at\` timestamp not null default now() - ) - `, - ); - - await ctx.db.execute( - sql` - create table ${datesTable} ( - \`date\` date, - \`date_as_string\` date, - \`time\` time, - \`datetime\` datetime, - \`datetime_as_string\` datetime, - \`year\` year - ) - `, - ); -}); - -test.serial('select all fields', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const result = await db.select().from(usersTable); - - t.assert(result[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - // not timezone based timestamp, thats why it should not work here - // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); - t.deepEqual(result, [{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); -}); - -test.serial('select sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .select({ - name: sql`upper(${usersTable.name})`, - }) - .from(usersTable); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('select typed sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.select({ - name: sql`upper(${usersTable.name})`, - }).from(usersTable); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('select distinct', async (t) => { - const { db } = t.context; - - const usersDistinctTable = mysqlTable('users_distinct', { - id: int('id').notNull(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${usersDistinctTable}`); - await db.execute(sql`create table ${usersDistinctTable} (id int, name text)`); - - await db.insert(usersDistinctTable).values([ - { id: 1, name: 'John' }, - { id: 1, name: 'John' }, - { id: 2, name: 'John' }, - { id: 1, name: 'Jane' }, - ]); - const users = await db.selectDistinct().from(usersDistinctTable).orderBy( - usersDistinctTable.id, - usersDistinctTable.name, - ); - - await db.execute(sql`drop table ${usersDistinctTable}`); - - t.deepEqual(users, [{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); -}); - -test.serial('insert returning sql', async (t) => { - const { db } = t.context; - - const result = await db.insert(usersTable).values({ name: 'John' }); - - t.deepEqual(result.insertId, '1'); -}); - -test.serial('delete returning sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')); - - t.is(users.rowsAffected, 1); -}); - -test.serial('update returning sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); - - t.is(users.rowsAffected, 1); -}); - -test.serial('update with returning all fields', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); - - const users = await db.select().from(usersTable).where(eq(usersTable.id, 1)); - - t.is(updatedUsers.rowsAffected, 1); - - t.assert(users[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - // not timezone based timestamp, thats why it should not work here - // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 2000); - t.deepEqual(users, [{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); -}); - -test.serial('update with returning partial', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); - - const users = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( - eq(usersTable.id, 1), - ); - - t.deepEqual(updatedUsers.rowsAffected, 1); - - t.deepEqual(users, [{ id: 1, name: 'Jane' }]); -}); - -test.serial('delete with returning all fields', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); - - t.is(deletedUser.rowsAffected, 1); -}); - -test.serial('delete with returning partial', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); - - t.is(deletedUser.rowsAffected, 1); -}); - -test.serial('insert + select', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const result = await db.select().from(usersTable); - t.deepEqual(result, [{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); - - await db.insert(usersTable).values({ name: 'Jane' }); - const result2 = await db.select().from(usersTable); - t.deepEqual(result2, [ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, - { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, - ]); -}); - -test.serial('json insert', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); - const result = await db.select({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - }).from(usersTable); - - t.deepEqual(result, [{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); -}); - -test.serial('insert with overridden default values', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John', verified: true }); - const result = await db.select().from(usersTable); - - t.deepEqual(result, [{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); -}); - -test.serial('insert many', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([ - { name: 'John' }, - { name: 'Bruce', jsonb: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]); - const result = await db.select({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - verified: usersTable.verified, - }).from(usersTable); - - t.deepEqual(result, [ - { id: 1, name: 'John', jsonb: null, verified: false }, - { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, - { id: 3, name: 'Jane', jsonb: null, verified: false }, - { id: 4, name: 'Austin', jsonb: null, verified: true }, - ]); -}); - -test.serial('insert many with returning', async (t) => { - const { db } = t.context; - - const result = await db.insert(usersTable).values([ - { name: 'John' }, - { name: 'Bruce', jsonb: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]); - - t.is(result.rowsAffected, 4); -}); - -test.serial('select with group by as field', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.name); - - t.deepEqual(result, [{ name: 'John' }, { name: 'Jane' }]); -}); - -test.serial('select with group by as sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(sql`${usersTable.name}`); - - t.deepEqual(result, [{ name: 'John' }, { name: 'Jane' }]); -}); - -test.serial('select with group by as sql + column', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(sql`${usersTable.name}`, usersTable.id); - - t.deepEqual(result, [{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); -}); - -test.serial('select with group by as column + sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`); - - t.deepEqual(result, [{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); -}); - -test.serial('select with group by complex query', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`) - .orderBy(asc(usersTable.name)) - .limit(1); - - t.deepEqual(result, [{ name: 'Jane' }]); -}); - -test.serial('build query', async (t) => { - const { db } = t.context; - - const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, usersTable.name) - .toSQL(); - - const tableName = getTableConfig(usersTable).name; - - t.deepEqual(query, { - sql: `select \`id\`, \`name\` from \`${tableName}\` group by \`${tableName}\`.\`id\`, \`${tableName}\`.\`name\``, - params: [], - }); -}); - -test.serial('build query insert with onDuplicate', async (t) => { - const { db } = t.context; - - const query = db.insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onDuplicateKeyUpdate({ set: { name: 'John1' } }) - .toSQL(); - - t.deepEqual(query, { - sql: `insert into \`${ - getTableConfig(usersTable).name - }\` (\`id\`, \`name\`, \`verified\`, \`jsonb\`, \`created_at\`) values (default, ?, default, ?, default) on duplicate key update \`name\` = ?`, - params: ['John', '["foo","bar"]', 'John1'], - }); -}); - -test.serial('insert with onDuplicate', async (t) => { - const { db } = t.context; - - await db.insert(usersTable) - .values({ name: 'John' }); - - await db.insert(usersTable) - .values({ id: 1, name: 'John' }) - .onDuplicateKeyUpdate({ set: { name: 'John1' } }); - - const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( - eq(usersTable.id, 1), - ); - - t.deepEqual(res, [{ id: 1, name: 'John1' }]); -}); - -test.serial('insert sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: sql`${'John'}` }); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('partial join with alias', async (t) => { - const { db } = t.context; - const customerAlias = alias(usersTable, 'customer'); - - await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select({ - user: { - id: usersTable.id, - name: usersTable.name, - }, - customer: { - id: customerAlias.id, - name: customerAlias.name, - }, - }) - .from(usersTable) - .leftJoin(customerAlias, eq(customerAlias.id, 11)) - .where(eq(usersTable.id, 10)); - - t.deepEqual(result, [{ - user: { id: 10, name: 'Ivan' }, - customer: { id: 11, name: 'Hans' }, - }]); -}); - -test.serial('full join with alias', async (t) => { - const { db } = t.context; - - const sqliteTable = mysqlTableCreator((name) => `prefixed_${name}`); - - const users = sqliteTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); - - const customers = alias(users, 'customer'); - - await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select().from(users) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(users.id, 10)); - - t.deepEqual(result, [{ - users: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('select from alias', async (t) => { - const { db } = t.context; - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); - - const user = alias(users, 'user'); - const customers = alias(users, 'customer'); - - await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select() - .from(user) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(user.id, 10)); - - t.deepEqual(result, [{ - user: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('insert with spaces', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: sql`'Jo h n'` }); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); - - t.deepEqual(result, [{ id: 1, name: 'Jo h n' }]); -}); - -test.serial('prepared statement', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const statement = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .prepare(); - const result = await statement.execute(); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('prepared statement reuse', async (t) => { - const { db } = t.context; - - const stmt = db.insert(usersTable).values({ - verified: true, - name: placeholder('name'), - }).prepare(); - - for (let i = 0; i < 10; i++) { - await stmt.execute({ name: `John ${i}` }); - } - - const result = await db.select({ - id: usersTable.id, - name: usersTable.name, - verified: usersTable.verified, - }).from(usersTable); - - t.deepEqual(result, [ - { id: 1, name: 'John 0', verified: true }, - { id: 2, name: 'John 1', verified: true }, - { id: 3, name: 'John 2', verified: true }, - { id: 4, name: 'John 3', verified: true }, - { id: 5, name: 'John 4', verified: true }, - { id: 6, name: 'John 5', verified: true }, - { id: 7, name: 'John 6', verified: true }, - { id: 8, name: 'John 7', verified: true }, - { id: 9, name: 'John 8', verified: true }, - { id: 10, name: 'John 9', verified: true }, - ]); -}); - -test.serial('prepared statement with placeholder in .where', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const stmt = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .where(eq(usersTable.id, placeholder('id'))) - .prepare(); - const result = await stmt.execute({ id: 1 }); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('migrator', async (t) => { - const { db } = t.context; - - const migrationsTable = '__drizzle_tests_migrations'; - - await db.execute(sql`drop table if exists ${sql.raw(tablePrefix)}cities_migration`); - await db.execute(sql`drop table if exists ${sql.raw(tablePrefix)}users_migration`); - await db.execute(sql`drop table if exists ${sql.raw(tablePrefix)}users12`); - await db.execute(sql`drop table if exists ${sql.raw(migrationsTable)}`); - - await migrate(db, { - migrationsFolder: './drizzle2/planetscale', - migrationsTable: migrationsTable, - }); - - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - - const result = await db.select().from(usersMigratorTable); - - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); - - await db.execute(sql`drop table ${sql.raw(tablePrefix)}cities_migration`); - await db.execute(sql`drop table ${sql.raw(tablePrefix)}users_migration`); - await db.execute(sql`drop table ${sql.raw(tablePrefix)}users12`); - await db.execute(sql`drop table ${sql.raw(migrationsTable)}`); -}); - -test.serial('insert via db.execute + select via db.execute', async (t) => { - const { db } = t.context; - - await db.execute(sql`insert into ${usersTable} (${name(usersTable.name.name)}) values (${'John'})`); - - const result = await db.execute<{ id: number; name: string }>(sql`select id, name from ${usersTable}`); - t.deepEqual(result.rows, [{ id: '1', name: 'John' }]); -}); - -test.serial('insert via db.execute w/ query builder', async (t) => { - const { db } = t.context; - - const inserted = await db.execute( - db.insert(usersTable).values({ name: 'John' }), - ); - t.is(inserted.rowsAffected, 1); -}); - -test.serial('insert + select all possible dates', async (t) => { - const { db } = t.context; - - const date = new Date('2022-11-11'); - - await db.insert(datesTable).values({ - date: date, - dateAsString: '2022-11-11', - time: '12:12:12', - datetime: date, - year: 22, - datetimeAsString: '2022-11-11 12:12:12', - }); - - const res = await db.select().from(datesTable); - - t.assert(res[0]?.date instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(res[0]?.datetime instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(typeof res[0]?.dateAsString === 'string'); - t.assert(typeof res[0]?.datetimeAsString === 'string'); - - t.deepEqual(res[0]!.date, new Date('2022-11-11')); - t.is(res[0]!.dateAsString, '2022-11-11'); - t.is(res[0]!.time, '12:12:12'); - t.is(res[0]!.year, 2022); - t.is(res[0]!.datetimeAsString, '2022-11-11 12:12:12'); -}); - -const tableWithEnums = mysqlTable('enums_test_case', { - id: serial('id').primaryKey(), - enum1: mysqlEnum('enum1', ['a', 'b', 'c']).notNull(), - enum2: mysqlEnum('enum2', ['a', 'b', 'c']).default('a'), - enum3: mysqlEnum('enum3', ['a', 'b', 'c']).notNull().default('b'), -}); - -test.serial('Mysql enum test case #1', async (t) => { - const { db } = t.context; - - await db.execute(sql`drop table if exists ${tableWithEnums}`); - - await db.execute(sql` - create table ${tableWithEnums} ( - \`id\` serial primary key, - \`enum1\` ENUM('a', 'b', 'c') not null, - \`enum2\` ENUM('a', 'b', 'c') default 'a', - \`enum3\` ENUM('a', 'b', 'c') not null default 'b' - ) - `); - - await db.insert(tableWithEnums).values([ - { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, - { id: 2, enum1: 'a', enum3: 'c' }, - { id: 3, enum1: 'a' }, - ]); - - const res = await db.select().from(tableWithEnums); - - await db.execute(sql`drop table ${tableWithEnums}`); - - t.deepEqual(res, [ - { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, - { id: 2, enum1: 'a', enum2: 'a', enum3: 'c' }, - { id: 3, enum1: 'a', enum2: 'a', enum3: 'b' }, - ]); -}); - -test.serial('transaction', async (t) => { - const { db } = t.context; - - const users = mysqlTable('users_transactions', { - id: serial('id').primaryKey(), - balance: int('balance').notNull(), - }); - const products = mysqlTable('products_transactions', { - id: serial('id').primaryKey(), - price: int('price').notNull(), - stock: int('stock').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`drop table if exists ${products}`); - - await db.execute(sql`create table ${users} (id serial not null primary key, balance int not null)`); - await db.execute( - sql`create table ${products} (id serial not null primary key, price int not null, stock int not null)`, - ); - - const { insertId: userId } = await db.insert(users).values({ balance: 100 }); - const user = await db.select().from(users).where(eq(users.id, +userId)).then((rows) => rows[0]!); - const { insertId: productId } = await db.insert(products).values({ price: 10, stock: 10 }); - const product = await db.select().from(products).where(eq(products.id, +productId)).then((rows) => rows[0]!); - - await db.transaction(async (tx) => { - await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)); - await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)); - }); - - const result = await db.select().from(users); - - t.deepEqual(result, [{ id: 1, balance: 90 }]); - - await db.execute(sql`drop table ${users}`); - await db.execute(sql`drop table ${products}`); -}); - -test.serial('transaction rollback', async (t) => { - const { db } = t.context; - - const users = mysqlTable('users_transactions_rollback', { - id: serial('id').primaryKey(), - balance: int('balance').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, balance int not null)`, - ); - - await t.throwsAsync(async () => - await db.transaction(async (tx) => { - await tx.insert(users).values({ balance: 100 }); - tx.rollback(); - }), { instanceOf: TransactionRollbackError }); - - const result = await db.select().from(users); - - t.deepEqual(result, []); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('nested transaction', async (t) => { - const { db } = t.context; - - const users = mysqlTable('users_nested_transactions', { - id: serial('id').primaryKey(), - balance: int('balance').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, balance int not null)`, - ); - - await db.transaction(async (tx) => { - await tx.insert(users).values({ balance: 100 }); - - await tx.transaction(async (tx) => { - await tx.update(users).set({ balance: 200 }); - }); - }); - - const result = await db.select().from(users); - - t.deepEqual(result, [{ id: 1, balance: 200 }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('nested transaction rollback', async (t) => { - const { db } = t.context; - - const users = mysqlTable('users_nested_transactions_rollback', { - id: serial('id').primaryKey(), - balance: int('balance').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, balance int not null)`, - ); - - await db.transaction(async (tx) => { - await tx.insert(users).values({ balance: 100 }); - - await t.throwsAsync(async () => - await tx.transaction(async (tx) => { - await tx.update(users).set({ balance: 200 }); - tx.rollback(); - }), { instanceOf: TransactionRollbackError }); - }); - - const result = await db.select().from(users); - - t.deepEqual(result, [{ id: 1, balance: 100 }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('join subquery with join', async (t) => { - const { db } = t.context; - - const internalStaff = mysqlTable('internal_staff', { - userId: int('user_id').notNull(), - }); - - const customUser = mysqlTable('custom_user', { - id: int('id').notNull(), - }); - - const ticket = mysqlTable('ticket', { - staffId: int('staff_id').notNull(), - }); - - await db.execute(sql`drop table if exists ${internalStaff}`); - await db.execute(sql`drop table if exists ${customUser}`); - await db.execute(sql`drop table if exists ${ticket}`); - - await db.execute(sql`create table ${internalStaff} (user_id integer not null)`); - await db.execute(sql`create table ${customUser} (id integer not null)`); - await db.execute(sql`create table ${ticket} (staff_id integer not null)`); - - await db.insert(internalStaff).values({ userId: 1 }); - await db.insert(customUser).values({ id: 1 }); - await db.insert(ticket).values({ staffId: 1 }); - - const subq = db - .select() - .from(internalStaff) - .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) - .as('internal_staff'); - - const mainQuery = await db - .select() - .from(ticket) - .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); - - t.deepEqual(mainQuery, [{ - ticket: { staffId: 1 }, - internal_staff: { - internal_staff: { userId: 1 }, - custom_user: { id: 1 }, - }, - }]); - - await db.execute(sql`drop table ${internalStaff}`); - await db.execute(sql`drop table ${customUser}`); - await db.execute(sql`drop table ${ticket}`); -}); - -test.serial.skip('subquery with view', async (t) => { - const { db } = t.context; - - const users = mysqlTable('users_subquery_view', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: int('city_id').notNull(), - }); - - const newYorkers = mysqlView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`drop view if exists ${newYorkers}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, - ); - await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); - - await db.insert(users).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 2 }, - { name: 'Jack', cityId: 1 }, - { name: 'Jill', cityId: 2 }, - ]); - - const sq = db.$with('sq').as(db.select().from(newYorkers)); - const result = await db.with(sq).select().from(sq); - - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 3, name: 'Jack', cityId: 1 }, - ]); - - await db.execute(sql`drop view ${newYorkers}`); - await db.execute(sql`drop table ${users}`); -}); - -test.serial('join view as subquery', async (t) => { - const { db } = t.context; - - const users = mysqlTable('users_join_view', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: int('city_id').notNull(), - }); - - const newYorkers = mysqlView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`drop view if exists ${newYorkers}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, - ); - await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); - - await db.insert(users).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 2 }, - { name: 'Jack', cityId: 1 }, - { name: 'Jill', cityId: 2 }, - ]); - - const sq = db.select().from(newYorkers).as('new_yorkers_sq'); - - const result = await db.select().from(users).leftJoin(sq, eq(users.id, sq.id)); - - t.deepEqual(result, [ - { - users_join_view: { id: 1, name: 'John', cityId: 1 }, - new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, - }, - { - users_join_view: { id: 2, name: 'Jane', cityId: 2 }, - new_yorkers_sq: null, - }, - { - users_join_view: { id: 3, name: 'Jack', cityId: 1 }, - new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, - }, - { - users_join_view: { id: 4, name: 'Jill', cityId: 2 }, - new_yorkers_sq: null, - }, - ]); - - await db.execute(sql`drop view ${newYorkers}`); - await db.execute(sql`drop table ${users}`); -}); - -test.serial('insert undefined', async (t) => { - const { db } = t.context; - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: text('name'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text)`, - ); - - await t.notThrowsAsync(async () => await db.insert(users).values({ name: undefined })); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('update undefined', async (t) => { - const { db } = t.context; - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: text('name'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text)`, - ); - - await t.throwsAsync(async () => await db.update(users).set({ name: undefined })); - await t.notThrowsAsync(async () => await db.update(users).set({ id: 1, name: undefined })); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('join', async (t) => { - const { db } = t.context; - - const usersTable = mysqlTable( - 'users', - { - id: varchar('id', { length: 191 }).primaryKey().notNull(), - createdAt: datetime('created_at', { fsp: 3 }).notNull(), - name: varchar('name', { length: 191 }), - email: varchar('email', { length: 191 }).notNull(), - emailVerified: datetime('email_verified', { fsp: 3 }), - image: text('image'), - }, - (table) => ({ - emailIdx: uniqueIndex('email_idx').on(table.email), - }), - ); - - const accountsTable = mysqlTable( - 'accounts', - { - id: varchar('id', { length: 191 }).primaryKey().notNull(), - userId: varchar('user_id', { length: 191 }).notNull(), - type: varchar('type', { length: 191 }).notNull(), - provider: varchar('provider', { length: 191 }).notNull(), - providerAccountId: varchar('provider_account_id', { - length: 191, - }).notNull(), - refreshToken: text('refresh_token'), - accessToken: text('access_token'), - expiresAt: int('expires_at'), - tokenType: varchar('token_type', { length: 191 }), - scope: varchar('scope', { length: 191 }), - idToken: text('id_token'), - sessionState: varchar('session_state', { length: 191 }), - }, - (table) => ({ - providerProviderAccountIdIdx: uniqueIndex( - 'provider_provider_account_id_idx', - ).on(table.provider, table.providerAccountId), - }), - ); - - await db.execute(sql`drop table if exists ${usersTable}`); - await db.execute(sql`drop table if exists ${accountsTable}`); - await db.execute(sql` - create table ${usersTable} ( - id varchar(191) not null primary key, - created_at datetime(3) not null, - name varchar(191), - email varchar(191) not null, - email_verified datetime(3), - image text, - unique key email_idx (email) - ) - `); - await db.execute(sql` - create table ${accountsTable} ( - id varchar(191) not null primary key, - user_id varchar(191) not null, - type varchar(191) not null, - provider varchar(191) not null, - provider_account_id varchar(191) not null, - refresh_token text, - access_token text, - expires_at int, - token_type varchar(191), - scope varchar(191), - id_token text, - session_state varchar(191), - unique key provider_provider_account_id_idx (provider, provider_account_id) - ) - `); - - const result = await db - .select({ user: usersTable, account: accountsTable }) - .from(accountsTable) - .leftJoin(usersTable, eq(accountsTable.userId, usersTable.id)) - .where( - and( - eq(accountsTable.provider, 'provider'), - eq(accountsTable.providerAccountId, 'providerAccountId'), - ), - ) - .limit(1); - - t.deepEqual(result, []); -}); diff --git a/integration-tests/tests/postgres.js.test.ts b/integration-tests/tests/postgres.js.test.ts deleted file mode 100644 index 60ba0e541..000000000 --- a/integration-tests/tests/postgres.js.test.ts +++ /dev/null @@ -1,2791 +0,0 @@ -import 'dotenv/config'; - -import type { TestFn } from 'ava'; -import anyTest from 'ava'; -import Docker from 'dockerode'; -import { - and, - arrayContained, - arrayContains, - arrayOverlaps, - asc, - eq, - exists, - gt, - gte, - inArray, - lt, - Name, - name, - placeholder, - type SQL, - sql, - type SQLWrapper, - TransactionRollbackError, -} from 'drizzle-orm'; -import { - alias, - boolean, - date, - getMaterializedViewConfig, - getViewConfig, - integer, - interval, - jsonb, - type PgColumn, - pgEnum, - pgMaterializedView, - pgTable, - pgTableCreator, - pgView, - serial, - text, - time, - timestamp, - uuid as pgUuid, - varchar, -} from 'drizzle-orm/pg-core'; -import type { PostgresJsDatabase } from 'drizzle-orm/postgres-js'; -import { drizzle } from 'drizzle-orm/postgres-js'; -import { migrate } from 'drizzle-orm/postgres-js/migrator'; -import getPort from 'get-port'; -import postgres, { type Sql } from 'postgres'; -import { v4 as uuid } from 'uuid'; -import { type Equal, Expect, randomString } from './utils.ts'; - -const QUERY_LOGGING = false; - -const usersTable = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - verified: boolean('verified').notNull().default(false), - jsonb: jsonb('jsonb').$type(), - createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), -}); - -const users2Table = pgTable('users2', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').references(() => citiesTable.id), -}); - -const citiesTable = pgTable('cities', { - id: serial('id').primaryKey(), - name: text('name').notNull(), -}); - -const coursesTable = pgTable('courses', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - categoryId: integer('category_id').references(() => courseCategoriesTable.id), -}); - -const courseCategoriesTable = pgTable('course_categories', { - id: serial('id').primaryKey(), - name: text('name').notNull(), -}); - -const orders = pgTable('orders', { - id: serial('id').primaryKey(), - region: text('region').notNull(), - product: text('product').notNull().$default(() => 'random_string'), - amount: integer('amount').notNull(), - quantity: integer('quantity').notNull(), -}); - -const usersMigratorTable = pgTable('users12', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - email: text('email').notNull(), -}); - -interface Context { - docker: Docker; - pgContainer: Docker.Container; - db: PostgresJsDatabase; - client: Sql; -} - -const test = anyTest as TestFn; - -async function createDockerDB(ctx: Context): Promise { - const docker = (ctx.docker = new Docker()); - const port = await getPort({ port: 5432 }); - const image = 'postgres:14'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) - ); - - const pgContainer = (ctx.pgContainer = await docker.createContainer({ - Image: image, - Env: ['POSTGRES_PASSWORD=postgres', 'POSTGRES_USER=postgres', 'POSTGRES_DB=postgres'], - name: `drizzle-integration-tests-${uuid()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '5432/tcp': [{ HostPort: `${port}` }], - }, - }, - })); - - await pgContainer.start(); - - return `postgres://postgres:postgres@localhost:${port}/postgres`; -} - -test.before(async (t) => { - const ctx = t.context; - const connectionString = process.env['PG_CONNECTION_STRING'] ?? await createDockerDB(ctx); - - const sleep = 250; - let timeLeft = 5000; - let connected = false; - let lastError: unknown | undefined; - do { - try { - ctx.client = postgres(connectionString, { - max: 1, - onnotice: () => { - // disable notices - }, - }); - await ctx.client`select 1`; - connected = true; - break; - } catch (e) { - lastError = e; - await new Promise((resolve) => setTimeout(resolve, sleep)); - timeLeft -= sleep; - } - } while (timeLeft > 0); - if (!connected) { - console.error('Cannot connect to Postgres'); - throw lastError; - } - ctx.db = drizzle(ctx.client, { logger: QUERY_LOGGING }); -}); - -test.after.always(async (t) => { - const ctx = t.context; - await ctx.client?.end().catch(console.error); - await ctx.pgContainer?.stop().catch(console.error); -}); - -test.beforeEach(async (t) => { - const ctx = t.context; - await ctx.db.execute(sql`drop schema public cascade`); - await ctx.db.execute(sql`create schema public`); - await ctx.db.execute( - sql` - create table users ( - id serial primary key, - name text not null, - verified boolean not null default false, - jsonb jsonb, - created_at timestamptz not null default now() - ) - `, - ); - await ctx.db.execute( - sql` - create table cities ( - id serial primary key, - name text not null - ) - `, - ); - await ctx.db.execute( - sql` - create table users2 ( - id serial primary key, - name text not null, - city_id integer references cities(id) - ) - `, - ); - await ctx.db.execute( - sql` - create table course_categories ( - id serial primary key, - name text not null - ) - `, - ); - await ctx.db.execute( - sql` - create table courses ( - id serial primary key, - name text not null, - category_id integer references course_categories(id) - ) - `, - ); - await ctx.db.execute( - sql` - create table orders ( - id serial primary key, - region text not null, - product text not null, - amount integer not null, - quantity integer not null - ) - `, - ); -}); - -test.serial('select all fields', async (t) => { - const { db } = t.context; - - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }); - const result = await db.select().from(usersTable); - - t.assert(result[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 100); - t.deepEqual(result, [{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); -}); - -test.serial('select sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.select({ - name: sql`upper(${usersTable.name})`, - }).from(usersTable); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('select typed sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.select({ - name: sql`upper(${usersTable.name})`, - }).from(usersTable); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('select distinct', async (t) => { - const { db } = t.context; - - const usersDistinctTable = pgTable('users_distinct', { - id: integer('id').notNull(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${usersDistinctTable}`); - await db.execute(sql`create table ${usersDistinctTable} (id integer, name text)`); - - await db.insert(usersDistinctTable).values([ - { id: 1, name: 'John' }, - { id: 1, name: 'John' }, - { id: 2, name: 'John' }, - { id: 1, name: 'Jane' }, - ]); - const users1 = await db.selectDistinct().from(usersDistinctTable).orderBy( - usersDistinctTable.id, - usersDistinctTable.name, - ); - const users2 = await db.selectDistinctOn([usersDistinctTable.id]).from(usersDistinctTable).orderBy( - usersDistinctTable.id, - ); - const users3 = await db.selectDistinctOn([usersDistinctTable.name], { name: usersDistinctTable.name }).from( - usersDistinctTable, - ).orderBy(usersDistinctTable.name); - - await db.execute(sql`drop table ${usersDistinctTable}`); - - t.deepEqual(users1, [{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); - - t.deepEqual(users2.length, 2); - t.deepEqual(users2[0]?.id, 1); - t.deepEqual(users2[1]?.id, 2); - - t.deepEqual(users3.length, 2); - t.deepEqual(users3[0]?.name, 'Jane'); - t.deepEqual(users3[1]?.name, 'John'); -}); - -test.serial('insert returning sql', async (t) => { - const { db } = t.context; - - const users = await db.insert(usersTable).values({ name: 'John' }).returning({ - name: sql`upper(${usersTable.name})`, - }); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('delete returning sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ - name: sql`upper(${usersTable.name})`, - }); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('update returning sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ - name: sql`upper(${usersTable.name})`, - }); - - t.deepEqual(users, [{ name: 'JANE' }]); -}); - -test.serial('update with returning all fields', async (t) => { - const { db } = t.context; - - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning(); - - t.assert(users[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 100); - t.deepEqual(users, [{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); -}); - -test.serial('update with returning partial', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ - id: usersTable.id, - name: usersTable.name, - }); - - t.deepEqual(users, [{ id: 1, name: 'Jane' }]); -}); - -test.serial('delete with returning all fields', async (t) => { - const { db } = t.context; - - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning(); - - t.assert(users[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 100); - t.deepEqual(users, [{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); -}); - -test.serial('delete with returning partial', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ - id: usersTable.id, - name: usersTable.name, - }); - - t.deepEqual(users, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert + select', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const result = await db.select().from(usersTable); - t.deepEqual(result, [{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); - - await db.insert(usersTable).values({ name: 'Jane' }); - const result2 = await db.select().from(usersTable); - t.deepEqual(result2, [ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, - { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, - ]); -}); - -test.serial('json insert', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); - const result = await db.select({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - }).from(usersTable); - - t.deepEqual(result, [{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); -}); - -test.serial('insert with overridden default values', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John', verified: true }); - const result = await db.select().from(usersTable); - - t.deepEqual(result, [{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); -}); - -test.serial('insert many', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([ - { name: 'John' }, - { name: 'Bruce', jsonb: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]); - const result = await db.select({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - verified: usersTable.verified, - }).from(usersTable); - - t.deepEqual(result, [ - { id: 1, name: 'John', jsonb: null, verified: false }, - { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, - { id: 3, name: 'Jane', jsonb: null, verified: false }, - { id: 4, name: 'Austin', jsonb: null, verified: true }, - ]); -}); - -test.serial('insert many with returning', async (t) => { - const { db } = t.context; - - const result = await db.insert(usersTable).values([ - { name: 'John' }, - { name: 'Bruce', jsonb: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]) - .returning({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - verified: usersTable.verified, - }); - - t.deepEqual(result, [ - { id: 1, name: 'John', jsonb: null, verified: false }, - { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, - { id: 3, name: 'Jane', jsonb: null, verified: false }, - { id: 4, name: 'Austin', jsonb: null, verified: true }, - ]); -}); - -test.serial('select with group by as field', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.name); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with exists', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const user = alias(usersTable, 'user'); - const result = await db.select({ name: usersTable.name }).from(usersTable).where( - exists(db.select({ one: sql`1` }).from(user).where(and(eq(usersTable.name, 'John'), eq(user.id, usersTable.id)))), - ); - - t.deepEqual(result, [{ name: 'John' }]); -}); - -test.serial('select with group by as sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(sql`${usersTable.name}`); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with group by as sql + column', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(sql`${usersTable.name}`, usersTable.id); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with group by as column + sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with group by complex query', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`) - .orderBy(asc(usersTable.name)) - .limit(1); - - t.deepEqual(result, [{ name: 'Jane' }]); -}); - -test.serial('build query', async (t) => { - const { db } = t.context; - - const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, usersTable.name) - .toSQL(); - - t.deepEqual(query, { - sql: 'select "id", "name" from "users" group by "users"."id", "users"."name"', - params: [], - }); -}); - -test.serial('insert sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: sql`${'John'}` }); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('partial join with alias', async (t) => { - const { db } = t.context; - const customerAlias = alias(usersTable, 'customer'); - - await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select({ - user: { - id: usersTable.id, - name: usersTable.name, - }, - customer: { - id: customerAlias.id, - name: customerAlias.name, - }, - }).from(usersTable) - .leftJoin(customerAlias, eq(customerAlias.id, 11)) - .where(eq(usersTable.id, 10)); - - t.deepEqual(result, [{ - user: { id: 10, name: 'Ivan' }, - customer: { id: 11, name: 'Hans' }, - }]); -}); - -test.serial('full join with alias', async (t) => { - const { db } = t.context; - - const pgTable = pgTableCreator((name) => `prefixed_${name}`); - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); - - const customers = alias(users, 'customer'); - - await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select().from(users) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(users.id, 10)); - - t.deepEqual(result, [{ - users: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('select from alias', async (t) => { - const { db } = t.context; - - const pgTable = pgTableCreator((name) => `prefixed_${name}`); - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); - - const user = alias(users, 'user'); - const customers = alias(users, 'customer'); - - await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select() - .from(user) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(user.id, 10)); - - t.deepEqual(result, [{ - user: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('insert with spaces', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: sql`'Jo h n'` }); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); - - t.deepEqual(result, [{ id: 1, name: 'Jo h n' }]); -}); - -test.serial('prepared statement', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const statement = db.select({ - id: usersTable.id, - name: usersTable.name, - }).from(usersTable) - .prepare('statement1'); - const result = await statement.execute(); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('prepared statement with placeholder in .limit', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const stmt = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .where(eq(usersTable.id, placeholder('id'))) - .limit(placeholder('limit')) - .prepare('stmt_limit'); - - const result = await stmt.execute({ id: 1, limit: 1 }); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); - t.is(result.length, 1); -}); - -test.serial('prepared statement with placeholder in .offset', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'John1' }]); - const stmt = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .offset(placeholder('offset')) - .prepare('stmt_offset'); - - const result = await stmt.execute({ offset: 1 }); - - t.deepEqual(result, [{ id: 2, name: 'John1' }]); -}); - -test.serial('prepared statement reuse', async (t) => { - const { db } = t.context; - - const stmt = db.insert(usersTable).values({ - verified: true, - name: placeholder('name'), - }).prepare('stmt2'); - - for (let i = 0; i < 10; i++) { - await stmt.execute({ name: `John ${i}` }); - } - - const result = await db.select({ - id: usersTable.id, - name: usersTable.name, - verified: usersTable.verified, - }).from(usersTable); - - t.deepEqual(result, [ - { id: 1, name: 'John 0', verified: true }, - { id: 2, name: 'John 1', verified: true }, - { id: 3, name: 'John 2', verified: true }, - { id: 4, name: 'John 3', verified: true }, - { id: 5, name: 'John 4', verified: true }, - { id: 6, name: 'John 5', verified: true }, - { id: 7, name: 'John 6', verified: true }, - { id: 8, name: 'John 7', verified: true }, - { id: 9, name: 'John 8', verified: true }, - { id: 10, name: 'John 9', verified: true }, - ]); -}); - -test.serial('prepared statement with placeholder in .where', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const stmt = db.select({ - id: usersTable.id, - name: usersTable.name, - }).from(usersTable) - .where(eq(usersTable.id, placeholder('id'))) - .prepare('stmt3'); - const result = await stmt.execute({ id: 1 }); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -// TODO change tests to new structure -test.serial('migrator : default migration strategy', async (t) => { - const { db } = t.context; - - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - - await migrate(db, { migrationsFolder: './drizzle2/pg' }); - - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - - const result = await db.select().from(usersMigratorTable); - - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); - - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); -}); - -test.serial('migrator : migrate with custom schema', async (t) => { - const { db } = t.context; - const customSchema = randomString(); - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - - await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsSchema: customSchema }); - - // test if the custom migrations table was created - const { count } = await db.execute(sql`select * from ${sql.identifier(customSchema)}."__drizzle_migrations";`); - t.true(count > 0); - - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); - - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table ${sql.identifier(customSchema)}."__drizzle_migrations"`); -}); - -test.serial('migrator : migrate with custom table', async (t) => { - const { db } = t.context; - const customTable = randomString(); - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - - await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable }); - - // test if the custom migrations table was created - const { count } = await db.execute(sql`select * from "drizzle".${sql.identifier(customTable)};`); - t.true(count > 0); - - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); - - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table "drizzle".${sql.identifier(customTable)}`); -}); - -test.serial('migrator : migrate with custom table and custom schema', async (t) => { - const { db } = t.context; - const customTable = randomString(); - const customSchema = randomString(); - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - - await migrate(db, { - migrationsFolder: './drizzle2/pg', - migrationsTable: customTable, - migrationsSchema: customSchema, - }); - - // test if the custom migrations table was created - const { count } = await db.execute( - sql`select * from ${sql.identifier(customSchema)}.${sql.identifier(customTable)};`, - ); - t.true(count > 0); - - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); - - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table ${sql.identifier(customSchema)}.${sql.identifier(customTable)}`); -}); - -test.serial('insert via db.execute + select via db.execute', async (t) => { - const { db } = t.context; - - await db.execute(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); - - const result = await db.execute<{ id: number; name: string }>(sql`select id, name from "users"`); - t.deepEqual(Array.prototype.slice.call(result), [{ id: 1, name: 'John' }]); -}); - -test.serial('insert via db.execute + returning', async (t) => { - const { db } = t.context; - - const result = await db.execute<{ id: number; name: string }>( - sql`insert into ${usersTable} (${new Name( - usersTable.name.name, - )}) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, - ); - t.deepEqual(Array.prototype.slice.call(result), [{ id: 1, name: 'John' }]); -}); - -test.serial('insert via db.execute w/ query builder', async (t) => { - const { db } = t.context; - - const result = await db.execute>( - db.insert(usersTable).values({ name: 'John' }).returning({ id: usersTable.id, name: usersTable.name }), - ); - t.deepEqual(Array.prototype.slice.call(result), [{ id: 1, name: 'John' }]); -}); - -test.serial('Query check: Insert all defaults in 1 row', async (t) => { - const { db } = t.context; - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state'), - }); - - const query = db - .insert(users) - .values({}) - .toSQL(); - - t.deepEqual(query, { - sql: 'insert into "users" ("id", "name", "state") values (default, default, default)', - params: [], - }); -}); - -test.serial('Query check: Insert all defaults in multiple rows', async (t) => { - const { db } = t.context; - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state').default('UA'), - }); - - const query = db - .insert(users) - .values([{}, {}]) - .toSQL(); - - t.deepEqual(query, { - sql: 'insert into "users" ("id", "name", "state") values (default, default, default), (default, default, default)', - params: [], - }); -}); - -test.serial('$default function', async (t) => { - const { db } = t.context; - - const insertedOrder = await db.insert(orders).values({ id: 1, region: 'Ukraine', amount: 1, quantity: 1 }) - .returning(); - const selectedOrder = await db.select().from(orders); - - t.deepEqual(insertedOrder, [{ - id: 1, - amount: 1, - quantity: 1, - region: 'Ukraine', - product: 'random_string', - }]); - - t.deepEqual(selectedOrder, [{ - id: 1, - amount: 1, - quantity: 1, - region: 'Ukraine', - product: 'random_string', - }]); -}); - -test.serial('Insert all defaults in 1 row', async (t) => { - const { db } = t.context; - - const users = pgTable('empty_insert_single', { - id: serial('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial primary key, name text default 'Dan', state text)`, - ); - - await db.insert(users).values({}); - - const res = await db.select().from(users); - - t.deepEqual(res, [{ id: 1, name: 'Dan', state: null }]); -}); - -test.serial('Insert all defaults in multiple rows', async (t) => { - const { db } = t.context; - - const users = pgTable('empty_insert_multiple', { - id: serial('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial primary key, name text default 'Dan', state text)`, - ); - - await db.insert(users).values([{}, {}]); - - const res = await db.select().from(users); - - t.deepEqual(res, [{ id: 1, name: 'Dan', state: null }, { id: 2, name: 'Dan', state: null }]); -}); - -test.serial('build query insert with onConflict do update', async (t) => { - const { db } = t.context; - - const query = db.insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }) - .toSQL(); - - t.deepEqual(query, { - sql: - 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do update set "name" = $3', - params: ['John', '["foo","bar"]', 'John1'], - }); -}); - -test.serial('build query insert with onConflict do update / multiple columns', async (t) => { - const { db } = t.context; - - const query = db.insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoUpdate({ target: [usersTable.id, usersTable.name], set: { name: 'John1' } }) - .toSQL(); - - t.deepEqual(query, { - sql: - 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id","name") do update set "name" = $3', - params: ['John', '["foo","bar"]', 'John1'], - }); -}); - -test.serial('build query insert with onConflict do nothing', async (t) => { - const { db } = t.context; - - const query = db.insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoNothing() - .toSQL(); - - t.deepEqual(query, { - sql: - 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict do nothing', - params: ['John', '["foo","bar"]'], - }); -}); - -test.serial('build query insert with onConflict do nothing + target', async (t) => { - const { db } = t.context; - - const query = db.insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoNothing({ target: usersTable.id }) - .toSQL(); - - t.deepEqual(query, { - sql: - 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do nothing', - params: ['John', '["foo","bar"]'], - }); -}); - -test.serial('insert with onConflict do update', async (t) => { - const { db } = t.context; - - await db.insert(usersTable) - .values({ name: 'John' }); - - await db.insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }); - - const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( - eq(usersTable.id, 1), - ); - - t.deepEqual(res, [{ id: 1, name: 'John1' }]); -}); - -test.serial('insert with onConflict do nothing', async (t) => { - const { db } = t.context; - - await db.insert(usersTable) - .values({ name: 'John' }); - - await db.insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoNothing(); - - const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( - eq(usersTable.id, 1), - ); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert with onConflict do nothing + target', async (t) => { - const { db } = t.context; - - await db.insert(usersTable) - .values({ name: 'John' }); - - await db.insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoNothing({ target: usersTable.id }); - - const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( - eq(usersTable.id, 1), - ); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); -}); - -test.serial('left join (flat object fields)', async (t) => { - const { db } = t.context; - - const { id: cityId } = await db.insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]) - .returning({ id: citiesTable.id }).then((rows) => rows[0]!); - - await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); - - const res = await db.select({ - userId: users2Table.id, - userName: users2Table.name, - cityId: citiesTable.id, - cityName: citiesTable.name, - }).from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); - - t.deepEqual(res, [ - { userId: 1, userName: 'John', cityId, cityName: 'Paris' }, - { userId: 2, userName: 'Jane', cityId: null, cityName: null }, - ]); -}); - -test.serial('left join (grouped fields)', async (t) => { - const { db } = t.context; - - const { id: cityId } = await db.insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]) - .returning({ id: citiesTable.id }).then((rows) => rows[0]!); - - await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); - - const res = await db.select({ - id: users2Table.id, - user: { - name: users2Table.name, - nameUpper: sql`upper(${users2Table.name})`, - }, - city: { - id: citiesTable.id, - name: citiesTable.name, - nameUpper: sql`upper(${citiesTable.name})`, - }, - }).from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); - - t.deepEqual(res, [ - { - id: 1, - user: { name: 'John', nameUpper: 'JOHN' }, - city: { id: cityId, name: 'Paris', nameUpper: 'PARIS' }, - }, - { - id: 2, - user: { name: 'Jane', nameUpper: 'JANE' }, - city: null, - }, - ]); -}); - -test.serial('left join (all fields)', async (t) => { - const { db } = t.context; - - const { id: cityId } = await db.insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]) - .returning({ id: citiesTable.id }).then((rows) => rows[0]!); - - await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); - - const res = await db.select().from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); - - t.deepEqual(res, [ - { - users2: { - id: 1, - name: 'John', - cityId, - }, - cities: { - id: cityId, - name: 'Paris', - }, - }, - { - users2: { - id: 2, - name: 'Jane', - cityId: null, - }, - cities: null, - }, - ]); -}); - -test.serial('join subquery', async (t) => { - const { db } = t.context; - - await db.insert(courseCategoriesTable).values([ - { name: 'Category 1' }, - { name: 'Category 2' }, - { name: 'Category 3' }, - { name: 'Category 4' }, - ]); - - await db.insert(coursesTable).values([ - { name: 'Development', categoryId: 2 }, - { name: 'IT & Software', categoryId: 3 }, - { name: 'Marketing', categoryId: 4 }, - { name: 'Design', categoryId: 1 }, - ]); - - const sq2 = db - .select({ - categoryId: courseCategoriesTable.id, - category: courseCategoriesTable.name, - total: sql`count(${courseCategoriesTable.id})`, - }) - .from(courseCategoriesTable) - .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) - .as('sq2'); - - const res = await db - .select({ - courseName: coursesTable.name, - categoryId: sq2.categoryId, - }) - .from(coursesTable) - .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) - .orderBy(coursesTable.name); - - t.deepEqual(res, [ - { courseName: 'Design', categoryId: 1 }, - { courseName: 'Development', categoryId: 2 }, - { courseName: 'IT & Software', categoryId: 3 }, - { courseName: 'Marketing', categoryId: 4 }, - ]); -}); - -test.serial('with ... select', async (t) => { - const { db } = t.context; - - await db.insert(orders).values([ - { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, - { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 50, quantity: 5 }, - ]); - - const regionalSales = db - .$with('regional_sales') - .as( - db.select({ - region: orders.region, - totalSales: sql`sum(${orders.amount})`.as('total_sales'), - }) - .from(orders) - .groupBy(orders.region), - ); - - const topRegions = db - .$with('top_regions') - .as( - db - .select({ - region: regionalSales.region, - }) - .from(regionalSales) - .where( - gt( - regionalSales.totalSales, - db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), - ), - ), - ); - - const result = await db - .with(regionalSales, topRegions) - .select({ - region: orders.region, - product: orders.product, - productUnits: sql`sum(${orders.quantity})::int`, - productSales: sql`sum(${orders.amount})::int`, - }) - .from(orders) - .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) - .groupBy(orders.region, orders.product) - .orderBy(orders.region, orders.product); - - t.deepEqual(result, [ - { - region: 'Europe', - product: 'A', - productUnits: 3, - productSales: 30, - }, - { - region: 'Europe', - product: 'B', - productUnits: 5, - productSales: 50, - }, - { - region: 'US', - product: 'A', - productUnits: 7, - productSales: 70, - }, - { - region: 'US', - product: 'B', - productUnits: 9, - productSales: 90, - }, - ]); -}); - -test.serial('select from subquery sql', async (t) => { - const { db } = t.context; - - await db.insert(users2Table).values([{ name: 'John' }, { name: 'Jane' }]); - - const sq = db - .select({ name: sql`${users2Table.name} || ' modified'`.as('name') }) - .from(users2Table) - .as('sq'); - - const res = await db.select({ name: sq.name }).from(sq); - - t.deepEqual(res, [{ name: 'John modified' }, { name: 'Jane modified' }]); -}); - -test.serial('select a field without joining its table', (t) => { - const { db } = t.context; - - t.throws(() => db.select({ name: users2Table.name }).from(usersTable).prepare('query')); -}); - -test.serial('select all fields from subquery without alias', (t) => { - const { db } = t.context; - - const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); - - t.throws(() => db.select().from(sq).prepare('query')); -}); - -test.serial('select count()', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); - - const res = await db.select({ count: sql`count(*)` }).from(usersTable); - - t.deepEqual(res, [{ count: '2' }]); -}); - -test.serial('select count w/ custom mapper', async (t) => { - const { db } = t.context; - - function count(value: PgColumn | SQLWrapper): SQL; - function count(value: PgColumn | SQLWrapper, alias: string): SQL.Aliased; - function count(value: PgColumn | SQLWrapper, alias?: string): SQL | SQL.Aliased { - const result = sql`count(${value})`.mapWith(Number); - if (!alias) { - return result; - } - return result.as(alias); - } - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); - - const res = await db.select({ count: count(sql`*`) }).from(usersTable); - - t.deepEqual(res, [{ count: 2 }]); -}); - -test.serial('select for ...', (t) => { - const { db } = t.context; - - { - const query = db - .select() - .from(users2Table) - .for('update') - .toSQL(); - - t.regex( - query.sql, - / for update$/, - ); - } - - { - const query = db - .select() - .from(users2Table) - .for('update', { of: [users2Table, coursesTable] }) - .toSQL(); - - t.regex( - query.sql, - / for update of "users2", "courses"$/, - ); - } - - { - const query = db - .select() - .from(users2Table) - .for('no key update', { of: users2Table }) - .toSQL(); - - t.regex( - query.sql, - /for no key update of "users2"$/, - ); - } - - { - const query = db - .select() - .from(users2Table) - .for('no key update', { of: users2Table, skipLocked: true }) - .toSQL(); - - t.regex( - query.sql, - / for no key update of "users2" skip locked$/, - ); - } - - { - const query = db - .select() - .from(users2Table) - .for('share', { of: users2Table, noWait: true }) - .toSQL(); - - t.regex( - query.sql, - // eslint-disable-next-line unicorn/better-regex - /for share of "users2" no wait$/, - ); - } -}); - -test.serial('having', async (t) => { - const { db } = t.context; - - await db.insert(citiesTable).values([{ name: 'London' }, { name: 'Paris' }, { name: 'New York' }]); - - await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { - name: 'Jack', - cityId: 2, - }]); - - const result = await db - .select({ - id: citiesTable.id, - name: sql`upper(${citiesTable.name})`.as('upper_name'), - usersCount: sql`count(${users2Table.id})::int`.as('users_count'), - }) - .from(citiesTable) - .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) - .where(({ name }) => sql`length(${name}) >= 3`) - .groupBy(citiesTable.id) - .having(({ usersCount }) => sql`${usersCount} > 0`) - .orderBy(({ name }) => name); - - t.deepEqual(result, [ - { - id: 1, - name: 'LONDON', - usersCount: 2, - }, - { - id: 2, - name: 'PARIS', - usersCount: 1, - }, - ]); -}); - -test.serial('view', async (t) => { - const { db } = t.context; - - const newYorkers1 = pgView('new_yorkers') - .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); - - const newYorkers2 = pgView('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); - - const newYorkers3 = pgView('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).existing(); - - await db.execute(sql`create view new_yorkers as ${getViewConfig(newYorkers1).query}`); - - await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); - - await db.insert(users2Table).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 1 }, - { name: 'Jack', cityId: 2 }, - ]); - - { - const result = await db.select().from(newYorkers1); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers2); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers3); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); - t.deepEqual(result, [ - { name: 'John' }, - { name: 'Jane' }, - ]); - } - - await db.execute(sql`drop view ${newYorkers1}`); -}); - -test.serial('materialized view', async (t) => { - const { db } = t.context; - - const newYorkers1 = pgMaterializedView('new_yorkers') - .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); - - const newYorkers2 = pgMaterializedView('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); - - const newYorkers3 = pgMaterializedView('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).existing(); - - await db.execute(sql`create materialized view ${newYorkers1} as ${getMaterializedViewConfig(newYorkers1).query}`); - - await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); - - await db.insert(users2Table).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 1 }, - { name: 'Jack', cityId: 2 }, - ]); - - { - const result = await db.select().from(newYorkers1); - t.deepEqual(result, []); - } - - await db.refreshMaterializedView(newYorkers1); - - { - const result = await db.select().from(newYorkers1); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers2); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers3); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); - t.deepEqual(result, [ - { name: 'John' }, - { name: 'Jane' }, - ]); - } - - await db.execute(sql`drop materialized view ${newYorkers1}`); -}); - -test.serial('select from raw sql', async (t) => { - const { db } = t.context; - - const result = await db.select({ - id: sql`id`, - name: sql`name`, - }).from(sql`(select 1 as id, 'John' as name) as users`); - - Expect>; - - t.deepEqual(result, [ - { id: 1, name: 'John' }, - ]); -}); - -test.serial('select from raw sql with joins', async (t) => { - const { db } = t.context; - - const result = await db - .select({ - id: sql`users.id`, - name: sql`users.name`, - userCity: sql`users.city`, - cityName: sql`cities.name`, - }) - .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) - .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`); - - Expect>; - - t.deepEqual(result, [ - { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, - ]); -}); - -test.serial('join on aliased sql from select', async (t) => { - const { db } = t.context; - - const result = await db - .select({ - userId: sql`users.id`.as('userId'), - name: sql`users.name`, - userCity: sql`users.city`, - cityId: sql`cities.id`.as('cityId'), - cityName: sql`cities.name`, - }) - .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) - .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)); - - Expect>; - - t.deepEqual(result, [ - { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, - ]); -}); - -test.serial('join on aliased sql from with clause', async (t) => { - const { db } = t.context; - - const users = db.$with('users').as( - db.select({ - id: sql`id`.as('userId'), - name: sql`name`.as('userName'), - city: sql`city`.as('city'), - }).from( - sql`(select 1 as id, 'John' as name, 'New York' as city) as users`, - ), - ); - - const cities = db.$with('cities').as( - db.select({ - id: sql`id`.as('cityId'), - name: sql`name`.as('cityName'), - }).from( - sql`(select 1 as id, 'Paris' as name) as cities`, - ), - ); - - const result = await db - .with(users, cities) - .select({ - userId: users.id, - name: users.name, - userCity: users.city, - cityId: cities.id, - cityName: cities.name, - }) - .from(users) - .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)); - - Expect>; - - t.deepEqual(result, [ - { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, - ]); -}); - -test.serial('prefixed table', async (t) => { - const { db } = t.context; - - const pgTable = pgTableCreator((name) => `myprefix_${name}`); - - const users = pgTable('test_prefixed_table_with_unique_name', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table myprefix_test_prefixed_table_with_unique_name (id integer not null primary key, name text not null)`, - ); - - await db.insert(users).values({ id: 1, name: 'John' }); - - const result = await db.select().from(users); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('select from enum', async (t) => { - const { db } = t.context; - - const muscleEnum = pgEnum('muscle', [ - 'abdominals', - 'hamstrings', - 'adductors', - 'quadriceps', - 'biceps', - 'shoulders', - 'chest', - 'middle_back', - 'calves', - 'glutes', - 'lower_back', - 'lats', - 'triceps', - 'traps', - 'forearms', - 'neck', - 'abductors', - ]); - - const forceEnum = pgEnum('force', ['isometric', 'isotonic', 'isokinetic']); - - const levelEnum = pgEnum('level', ['beginner', 'intermediate', 'advanced']); - - const mechanicEnum = pgEnum('mechanic', ['compound', 'isolation']); - - const equipmentEnum = pgEnum('equipment', ['barbell', 'dumbbell', 'bodyweight', 'machine', 'cable', 'kettlebell']); - - const categoryEnum = pgEnum('category', ['upper_body', 'lower_body', 'full_body']); - - const exercises = pgTable('exercises', { - id: serial('id').primaryKey(), - name: varchar('name').notNull(), - force: forceEnum('force'), - level: levelEnum('level'), - mechanic: mechanicEnum('mechanic'), - equipment: equipmentEnum('equipment'), - instructions: text('instructions'), - category: categoryEnum('category'), - primaryMuscles: muscleEnum('primary_muscles').array(), - secondaryMuscles: muscleEnum('secondary_muscles').array(), - createdAt: timestamp('created_at').notNull().default(sql`now()`), - updatedAt: timestamp('updated_at').notNull().default(sql`now()`), - }); - - await db.execute(sql`drop table if exists ${exercises}`); - await db.execute(sql`drop type if exists ${name(muscleEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(forceEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(levelEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(mechanicEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(equipmentEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(categoryEnum.enumName)}`); - - await db.execute( - sql`create type ${ - name(muscleEnum.enumName) - } as enum ('abdominals', 'hamstrings', 'adductors', 'quadriceps', 'biceps', 'shoulders', 'chest', 'middle_back', 'calves', 'glutes', 'lower_back', 'lats', 'triceps', 'traps', 'forearms', 'neck', 'abductors')`, - ); - await db.execute(sql`create type ${name(forceEnum.enumName)} as enum ('isometric', 'isotonic', 'isokinetic')`); - await db.execute(sql`create type ${name(levelEnum.enumName)} as enum ('beginner', 'intermediate', 'advanced')`); - await db.execute(sql`create type ${name(mechanicEnum.enumName)} as enum ('compound', 'isolation')`); - await db.execute( - sql`create type ${ - name(equipmentEnum.enumName) - } as enum ('barbell', 'dumbbell', 'bodyweight', 'machine', 'cable', 'kettlebell')`, - ); - await db.execute(sql`create type ${name(categoryEnum.enumName)} as enum ('upper_body', 'lower_body', 'full_body')`); - await db.execute(sql` - create table ${exercises} ( - id serial primary key, - name varchar not null, - force force, - level level, - mechanic mechanic, - equipment equipment, - instructions text, - category category, - primary_muscles muscle[], - secondary_muscles muscle[], - created_at timestamp not null default now(), - updated_at timestamp not null default now() - ) - `); - - await db.insert(exercises).values({ - name: 'Bench Press', - force: 'isotonic', - level: 'beginner', - mechanic: 'compound', - equipment: 'barbell', - instructions: - 'Lie on your back on a flat bench. Grasp the barbell with an overhand grip, slightly wider than shoulder width. Unrack the barbell and hold it over you with your arms locked. Lower the barbell to your chest. Press the barbell back to the starting position.', - category: 'upper_body', - primaryMuscles: ['chest', 'triceps'], - secondaryMuscles: ['shoulders', 'traps'], - }); - - const result = await db.select().from(exercises); - - t.deepEqual(result, [ - { - id: 1, - name: 'Bench Press', - force: 'isotonic', - level: 'beginner', - mechanic: 'compound', - equipment: 'barbell', - instructions: - 'Lie on your back on a flat bench. Grasp the barbell with an overhand grip, slightly wider than shoulder width. Unrack the barbell and hold it over you with your arms locked. Lower the barbell to your chest. Press the barbell back to the starting position.', - category: 'upper_body', - primaryMuscles: ['chest', 'triceps'], - secondaryMuscles: ['shoulders', 'traps'], - createdAt: result[0]!.createdAt, - updatedAt: result[0]!.updatedAt, - }, - ]); - - await db.execute(sql`drop table ${exercises}`); - await db.execute(sql`drop type ${name(muscleEnum.enumName)}`); - await db.execute(sql`drop type ${name(forceEnum.enumName)}`); - await db.execute(sql`drop type ${name(levelEnum.enumName)}`); - await db.execute(sql`drop type ${name(mechanicEnum.enumName)}`); - await db.execute(sql`drop type ${name(equipmentEnum.enumName)}`); - await db.execute(sql`drop type ${name(categoryEnum.enumName)}`); -}); - -test.serial('orderBy with aliased column', (t) => { - const { db } = t.context; - - const query = db.select({ - test: sql`something`.as('test'), - }).from(users2Table).orderBy((fields) => fields.test).toSQL(); - - t.deepEqual(query.sql, 'select something as "test" from "users2" order by "test"'); -}); - -test.serial('select from sql', async (t) => { - const { db } = t.context; - - const metricEntry = pgTable('metric_entry', { - id: pgUuid('id').notNull(), - createdAt: timestamp('created_at').notNull(), - }); - - await db.execute(sql`drop table if exists ${metricEntry}`); - await db.execute(sql`create table ${metricEntry} (id uuid not null, created_at timestamp not null)`); - - const metricId = uuid(); - - const intervals = db.$with('intervals').as( - db - .select({ - startTime: sql`(date'2023-03-01'+ x * '1 day'::interval)`.as('start_time'), - endTime: sql`(date'2023-03-01'+ (x+1) *'1 day'::interval)`.as('end_time'), - }) - .from(sql`generate_series(0, 29, 1) as t(x)`), - ); - - await t.notThrowsAsync(() => - db - .with(intervals) - .select({ - startTime: intervals.startTime, - endTime: intervals.endTime, - count: sql`count(${metricEntry})`, - }) - .from(metricEntry) - .rightJoin( - intervals, - and( - eq(metricEntry.id, metricId), - gte(metricEntry.createdAt, intervals.startTime), - lt(metricEntry.createdAt, intervals.endTime), - ), - ) - .groupBy(intervals.startTime, intervals.endTime) - .orderBy(asc(intervals.startTime)) - ); - // beta -}); - -test.serial('timestamp timezone', async (t) => { - const { db } = t.context; - - const usersTableWithAndWithoutTimezone = pgTable('users_test_with_and_without_timezone', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), - updatedAt: timestamp('updated_at', { withTimezone: false }).notNull().defaultNow(), - }); - - await db.execute(sql`drop table if exists ${usersTableWithAndWithoutTimezone}`); - - await db.execute( - sql` - create table users_test_with_and_without_timezone ( - id serial not null primary key, - name text not null, - created_at timestamptz not null default now(), - updated_at timestamp not null default now() - ) - `, - ); - - const date = new Date(Date.parse('2020-01-01T00:00:00+04:00')); - - await db.insert(usersTableWithAndWithoutTimezone).values({ name: 'With default times' }); - await db.insert(usersTableWithAndWithoutTimezone).values({ - name: 'Without default times', - createdAt: date, - updatedAt: date, - }); - const users = await db.select().from(usersTableWithAndWithoutTimezone); - - // check that the timestamps are set correctly for default times - t.assert(Math.abs(users[0]!.updatedAt.getTime() - Date.now()) < 2000); - t.assert(Math.abs(users[0]!.createdAt.getTime() - Date.now()) < 2000); - - // check that the timestamps are set correctly for non default times - t.assert(Math.abs(users[1]!.updatedAt.getTime() - date.getTime()) < 2000); - t.assert(Math.abs(users[1]!.createdAt.getTime() - date.getTime()) < 2000); -}); - -test.serial('all date and time columns', async (t) => { - const { db } = t.context; - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - dateString: date('date_string', { mode: 'string' }).notNull(), - time: time('time', { precision: 3 }).notNull(), - datetime: timestamp('datetime').notNull(), - datetimeWTZ: timestamp('datetime_wtz', { withTimezone: true }).notNull(), - datetimeString: timestamp('datetime_string', { mode: 'string' }).notNull(), - datetimeFullPrecision: timestamp('datetime_full_precision', { precision: 6, mode: 'string' }).notNull(), - datetimeWTZString: timestamp('datetime_wtz_string', { withTimezone: true, mode: 'string' }).notNull(), - interval: interval('interval').notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - date_string date not null, - time time(3) not null, - datetime timestamp not null, - datetime_wtz timestamp with time zone not null, - datetime_string timestamp not null, - datetime_full_precision timestamp(6) not null, - datetime_wtz_string timestamp with time zone not null, - interval interval not null - ) - `); - - const someDatetime = new Date('2022-01-01T00:00:00.123Z'); - const fullPrecision = '2022-01-01T00:00:00.123456Z'; - const someTime = '23:23:12.432'; - - await db.insert(table).values({ - dateString: '2022-01-01', - time: someTime, - datetime: someDatetime, - datetimeWTZ: someDatetime, - datetimeString: '2022-01-01T00:00:00.123Z', - datetimeFullPrecision: fullPrecision, - datetimeWTZString: '2022-01-01T00:00:00.123Z', - interval: '1 day', - }); - - const result = await db.select().from(table); - - Expect< - Equal<{ - id: number; - dateString: string; - time: string; - datetime: Date; - datetimeWTZ: Date; - datetimeString: string; - datetimeFullPrecision: string; - datetimeWTZString: string; - interval: string; - }[], typeof result> - >; - - Expect< - Equal<{ - dateString: string; - time: string; - datetime: Date; - datetimeWTZ: Date; - datetimeString: string; - datetimeFullPrecision: string; - datetimeWTZString: string; - interval: string; - id?: number | undefined; - }, typeof table.$inferInsert> - >; - - t.deepEqual(result, [ - { - id: 1, - dateString: '2022-01-01', - time: someTime, - datetime: someDatetime, - datetimeWTZ: someDatetime, - datetimeString: '2022-01-01 00:00:00.123', - datetimeFullPrecision: fullPrecision.replace('T', ' ').replace('Z', ''), - datetimeWTZString: '2022-01-01 00:00:00.123+00', - interval: '1 day', - }, - ]); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('all date and time columns with timezone second case mode date', async (t) => { - const { db } = t.context; - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(3) with time zone not null - ) - `); - - const insertedDate = new Date(); - - // 1. Insert date as new date - await db.insert(table).values([ - { timestamp: insertedDate }, - ]); - - // 2, Select as date and check that timezones are the same - // There is no way to check timezone in Date object, as it is always represented internally in UTC - const result = await db.select().from(table); - - t.deepEqual(result, [{ id: 1, timestamp: insertedDate }]); - - // 3. Compare both dates - t.deepEqual(insertedDate.getTime(), result[0]?.timestamp.getTime()); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('all date and time columns with timezone third case mode date', async (t) => { - const { db } = t.context; - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(3) with time zone not null - ) - `); - - const insertedDate = new Date('2022-01-01 20:00:00.123-04'); // used different time zones, internally is still UTC - const insertedDate2 = new Date('2022-01-02 04:00:00.123+04'); // They are both the same date in different time zones - - // 1. Insert date as new dates with different time zones - await db.insert(table).values([ - { timestamp: insertedDate }, - { timestamp: insertedDate2 }, - ]); - - // 2, Select and compare both dates - const result = await db.select().from(table); - - t.deepEqual(result[0]?.timestamp.getTime(), result[1]?.timestamp.getTime()); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('all date and time columns without timezone first case mode string', async (t) => { - const { db } = t.context; - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(6) not null - ) - `); - - // 1. Insert date in string format without timezone in it - await db.insert(table).values([ - { timestamp: '2022-01-01 02:00:00.123456' }, - ]); - - // 2, Select in string format and check that values are the same - const result = await db.select().from(table); - - t.deepEqual(result, [{ id: 1, timestamp: '2022-01-01 02:00:00.123456' }]); - - // 3. Select as raw query and check that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); - - t.deepEqual([...result2], [{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('all date and time columns without timezone second case mode string', async (t) => { - const { db } = t.context; - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(6) not null - ) - `); - - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: '2022-01-01T02:00:00.123456-02' }, - ]); - - // 2, Select as raw query and check that values are the same - const result = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); - - t.deepEqual([...result], [{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('all date and time columns without timezone third case mode date', async (t) => { - const { db } = t.context; - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'date', precision: 3 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(3) not null - ) - `); - - const insertedDate = new Date('2022-01-01 20:00:00.123+04'); - - // 1. Insert date as new date - await db.insert(table).values([ - { timestamp: insertedDate }, - ]); - - // 2, Select as raw query as string - const result = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); - - // 3. Compare both dates using orm mapping - Need to add 'Z' to tell JS that it is UTC - t.deepEqual(new Date(result[0]!.timestamp_string + 'Z').getTime(), insertedDate.getTime()); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('test mode string for timestamp with timezone', async (t) => { - const { db } = t.context; - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(6) with time zone not null - ) - `); - - const timestampString = '2022-01-01 00:00:00.123456-0200'; - - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: timestampString }, - ]); - - // 2. Select date in string format and check that the values are the same - const result = await db.select().from(table); - - // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same - t.deepEqual(result, [{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); - - // 3. Select as raw query and checke that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); - - // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same - t.deepEqual([...result2], [{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('test mode date for timestamp with timezone', async (t) => { - const { db } = t.context; - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(3) with time zone not null - ) - `); - - const timestampString = new Date('2022-01-01 00:00:00.456-0200'); - - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: timestampString }, - ]); - - // 2. Select date in string format and check that the values are the same - const result = await db.select().from(table); - - // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same - t.deepEqual(result, [{ id: 1, timestamp: timestampString }]); - - // 3. Select as raw query and checke that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); - - // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same - t.deepEqual([...result2], [{ id: 1, timestamp_string: '2022-01-01 02:00:00.456+00' }]); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('test mode string for timestamp with timezone in UTC timezone', async (t) => { - const { db } = t.context; - - // get current timezone from db - const [timezone] = await db.execute<{ TimeZone: string }>(sql`show timezone`); - - // set timezone to UTC - await db.execute(sql`set time zone 'UTC'`); - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(6) with time zone not null - ) - `); - - const timestampString = '2022-01-01 00:00:00.123456-0200'; - - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: timestampString }, - ]); - - // 2. Select date in string format and check that the values are the same - const result = await db.select().from(table); - - // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same - t.deepEqual(result, [{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); - - // 3. Select as raw query and checke that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); - - // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same - t.deepEqual([...result2], [{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); - - await db.execute(sql`set time zone '${sql.raw(timezone!.TimeZone)}'`); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('test mode string for timestamp with timezone in different timezone', async (t) => { - const { db } = t.context; - - // get current timezone from db - const [timezone] = await db.execute<{ TimeZone: string }>(sql`show timezone`); - - // set timezone to HST (UTC - 10) - await db.execute(sql`set time zone 'HST'`); - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(6) with time zone not null - ) - `); - - const timestampString = '2022-01-01 00:00:00.123456-1000'; - - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: timestampString }, - ]); - - // 2. Select date in string format and check that the values are the same - const result = await db.select().from(table); - - t.deepEqual(result, [{ id: 1, timestamp: '2022-01-01 00:00:00.123456-10' }]); - - // 3. Select as raw query and checke that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); - - t.deepEqual([...result2], [{ id: 1, timestamp_string: '2022-01-01 00:00:00.123456-10' }]); - - await db.execute(sql`set time zone '${sql.raw(timezone!.TimeZone)}'`); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.serial('transaction', async (t) => { - const { db } = t.context; - - const users = pgTable('users_transactions', { - id: serial('id').primaryKey(), - balance: integer('balance').notNull(), - }); - const products = pgTable('products_transactions', { - id: serial('id').primaryKey(), - price: integer('price').notNull(), - stock: integer('stock').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`drop table if exists ${products}`); - - await db.execute(sql`create table users_transactions (id serial not null primary key, balance integer not null)`); - await db.execute( - sql`create table products_transactions (id serial not null primary key, price integer not null, stock integer not null)`, - ); - - const user = await db.insert(users).values({ balance: 100 }).returning().then((rows) => rows[0]!); - const product = await db.insert(products).values({ price: 10, stock: 10 }).returning().then((rows) => rows[0]!); - - await db.transaction(async (tx) => { - await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)); - await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)); - }); - - const result = await db.select().from(users); - - t.deepEqual(result, [{ id: 1, balance: 90 }]); - - await db.execute(sql`drop table ${users}`); - await db.execute(sql`drop table ${products}`); -}); - -test.serial('transaction rollback', async (t) => { - const { db } = t.context; - - const users = pgTable('users_transactions_rollback', { - id: serial('id').primaryKey(), - balance: integer('balance').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table users_transactions_rollback (id serial not null primary key, balance integer not null)`, - ); - - await t.throwsAsync(async () => - await db.transaction(async (tx) => { - await tx.insert(users).values({ balance: 100 }); - await tx.rollback(); - }), { instanceOf: TransactionRollbackError }); - - const result = await db.select().from(users); - - t.deepEqual(result, []); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('nested transaction', async (t) => { - const { db } = t.context; - - const users = pgTable('users_nested_transactions', { - id: serial('id').primaryKey(), - balance: integer('balance').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table users_nested_transactions (id serial not null primary key, balance integer not null)`, - ); - - await db.transaction(async (tx) => { - await tx.insert(users).values({ balance: 100 }); - - await tx.transaction(async (tx) => { - await tx.update(users).set({ balance: 200 }); - }); - }); - - const result = await db.select().from(users); - - t.deepEqual(result, [{ id: 1, balance: 200 }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('nested transaction rollback', async (t) => { - const { db } = t.context; - - const users = pgTable('users_nested_transactions_rollback', { - id: serial('id').primaryKey(), - balance: integer('balance').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table users_nested_transactions_rollback (id serial not null primary key, balance integer not null)`, - ); - - await db.transaction(async (tx) => { - await tx.insert(users).values({ balance: 100 }); - - await t.throwsAsync(async () => - await tx.transaction(async (tx) => { - await tx.update(users).set({ balance: 200 }); - await tx.rollback(); - }), { instanceOf: TransactionRollbackError }); - }); - - const result = await db.select().from(users); - - t.deepEqual(result, [{ id: 1, balance: 100 }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('join subquery with join', async (t) => { - const { db } = t.context; - - const internalStaff = pgTable('internal_staff', { - userId: integer('user_id').notNull(), - }); - - const customUser = pgTable('custom_user', { - id: integer('id').notNull(), - }); - - const ticket = pgTable('ticket', { - staffId: integer('staff_id').notNull(), - }); - - await db.execute(sql`drop table if exists ${internalStaff}`); - await db.execute(sql`drop table if exists ${customUser}`); - await db.execute(sql`drop table if exists ${ticket}`); - - await db.execute(sql`create table internal_staff (user_id integer not null)`); - await db.execute(sql`create table custom_user (id integer not null)`); - await db.execute(sql`create table ticket (staff_id integer not null)`); - - await db.insert(internalStaff).values({ userId: 1 }); - await db.insert(customUser).values({ id: 1 }); - await db.insert(ticket).values({ staffId: 1 }); - - const subq = db - .select() - .from(internalStaff) - .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) - .as('internal_staff'); - - const mainQuery = await db - .select() - .from(ticket) - .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); - - t.deepEqual(mainQuery, [{ - ticket: { staffId: 1 }, - internal_staff: { - internal_staff: { userId: 1 }, - custom_user: { id: 1 }, - }, - }]); - - await db.execute(sql`drop table ${internalStaff}`); - await db.execute(sql`drop table ${customUser}`); - await db.execute(sql`drop table ${ticket}`); -}); - -test.serial('join view as subquery', async (t) => { - const { db } = t.context; - - const users = pgTable('users_join_view', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }); - - const newYorkers = pgView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`drop view if exists ${newYorkers}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, - ); - await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); - - await db.insert(users).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 2 }, - { name: 'Jack', cityId: 1 }, - { name: 'Jill', cityId: 2 }, - ]); - - const sq = db.select().from(newYorkers).as('new_yorkers_sq'); - - const result = await db.select().from(users).leftJoin(sq, eq(users.id, sq.id)); - - t.deepEqual(result, [ - { - users_join_view: { id: 1, name: 'John', cityId: 1 }, - new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, - }, - { - users_join_view: { id: 2, name: 'Jane', cityId: 2 }, - new_yorkers_sq: null, - }, - { - users_join_view: { id: 3, name: 'Jack', cityId: 1 }, - new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, - }, - { - users_join_view: { id: 4, name: 'Jill', cityId: 2 }, - new_yorkers_sq: null, - }, - ]); - - await db.execute(sql`drop view ${newYorkers}`); - await db.execute(sql`drop table ${users}`); -}); - -test.serial('insert undefined', async (t) => { - const { db } = t.context; - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text)`, - ); - - await t.notThrowsAsync(async () => await db.insert(users).values({ name: undefined })); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('update undefined', async (t) => { - const { db } = t.context; - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text)`, - ); - - await t.throwsAsync(async () => await db.update(users).set({ name: undefined })); - await t.notThrowsAsync(async () => await db.update(users).set({ id: 1, name: undefined })); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('array operators', async (t) => { - const { db } = t.context; - - const posts = pgTable('posts', { - id: serial('id').primaryKey(), - tags: text('tags').array(), - }); - - await db.execute(sql`drop table if exists ${posts}`); - - await db.execute( - sql`create table ${posts} (id serial primary key, tags text[])`, - ); - - await db.insert(posts).values([{ - tags: ['ORM'], - }, { - tags: ['Typescript'], - }, { - tags: ['Typescript', 'ORM'], - }, { - tags: ['Typescript', 'Frontend', 'React'], - }, { - tags: ['Typescript', 'ORM', 'Database', 'Postgres'], - }, { - tags: ['Java', 'Spring', 'OOP'], - }]); - - const contains = await db.select({ id: posts.id }).from(posts) - .where(arrayContains(posts.tags, ['Typescript', 'ORM'])); - const contained = await db.select({ id: posts.id }).from(posts) - .where(arrayContained(posts.tags, ['Typescript', 'ORM'])); - const overlaps = await db.select({ id: posts.id }).from(posts) - .where(arrayOverlaps(posts.tags, ['Typescript', 'ORM'])); - const withSubQuery = await db.select({ id: posts.id }).from(posts) - .where(arrayContains( - posts.tags, - db.select({ tags: posts.tags }).from(posts).where(eq(posts.id, 1)), - )); - - t.deepEqual(contains, [{ id: 3 }, { id: 5 }]); - t.deepEqual(contained, [{ id: 1 }, { id: 2 }, { id: 3 }]); - t.deepEqual(overlaps, [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }, { id: 5 }]); - t.deepEqual(withSubQuery, [{ id: 1 }, { id: 3 }, { id: 5 }]); -}); - -test.serial('array mapping and parsing', async (t) => { - const { db } = t.context; - - const arrays = pgTable('arrays_tests', { - id: serial('id').primaryKey(), - tags: text('tags').array(), - nested: text('nested').array().array(), - numbers: integer('numbers').notNull().array(), - }); - - await db.execute(sql`drop table if exists ${arrays}`); - await db.execute(sql` - create table ${arrays} ( - id serial primary key, - tags text[], - nested text[][], - numbers integer[] - ) - `); - - await db.insert(arrays).values({ - tags: ['', 'b', 'c'], - nested: [['1', ''], ['3', '\\a']], - numbers: [1, 2, 3], - }); - - const result = await db.select().from(arrays); - - t.deepEqual(result, [{ - id: 1, - tags: ['', 'b', 'c'], - nested: [['1', ''], ['3', '\\a']], - numbers: [1, 2, 3], - }]); - - await db.execute(sql`drop table ${arrays}`); -}); diff --git a/integration-tests/tests/prisma/.gitignore b/integration-tests/tests/prisma/.gitignore new file mode 100644 index 000000000..794cddf53 --- /dev/null +++ b/integration-tests/tests/prisma/.gitignore @@ -0,0 +1,2 @@ +*/client +*/drizzle diff --git a/integration-tests/tests/prisma/mysql/prisma.test.ts b/integration-tests/tests/prisma/mysql/prisma.test.ts new file mode 100644 index 000000000..ee5511a25 --- /dev/null +++ b/integration-tests/tests/prisma/mysql/prisma.test.ts @@ -0,0 +1,30 @@ +import 'dotenv/config'; +import 'zx/globals'; + +import type { PrismaMySqlDatabase } from 'drizzle-orm/prisma/mysql'; +import { drizzle } from 'drizzle-orm/prisma/mysql'; +import { beforeAll, expect, expectTypeOf, test } from 'vitest'; + +import { PrismaClient } from './client'; +import { User } from './drizzle/schema.ts'; + +const ENABLE_LOGGING = false; + +let db: PrismaMySqlDatabase; + +beforeAll(async () => { + await $`prisma generate --schema tests/prisma/mysql/schema.prisma`.quiet(); + await $`prisma db push --force-reset --schema tests/prisma/mysql/schema.prisma`.quiet(); + const prisma = new PrismaClient().$extends(drizzle({ logger: ENABLE_LOGGING })); + db = prisma.$drizzle; +}); + +test('extension works', async () => { + const insert = await db.insert(User).values({ email: 'test@test.com' }); + expectTypeOf(insert).toEqualTypeOf<[]>(); + expect(insert).toEqual([]); + + const result = await db.select().from(User); + expectTypeOf(result).toEqualTypeOf(); + expect(result).toEqual([{ id: 1, email: 'test@test.com', name: null }]); +}); diff --git a/integration-tests/tests/prisma/mysql/schema.prisma b/integration-tests/tests/prisma/mysql/schema.prisma new file mode 100644 index 000000000..5bb496dcb --- /dev/null +++ b/integration-tests/tests/prisma/mysql/schema.prisma @@ -0,0 +1,20 @@ +generator client { + provider = "prisma-client-js" + output = "./client" +} + +generator drizzle { + provider = "drizzle-prisma-generator" + output = "./drizzle" +} + +datasource db { + provider = "mysql" + url = env("MYSQL_CONNECTION_STRING") +} + +model User { + id Int @id @default(autoincrement()) + email String @unique + name String? +} diff --git a/integration-tests/tests/prisma/pg/prisma.test.ts b/integration-tests/tests/prisma/pg/prisma.test.ts new file mode 100644 index 000000000..16c5ce106 --- /dev/null +++ b/integration-tests/tests/prisma/pg/prisma.test.ts @@ -0,0 +1,29 @@ +import 'dotenv/config'; +import 'zx/globals'; + +import { drizzle } from 'drizzle-orm/prisma/pg'; +import type { PrismaPgDatabase } from 'drizzle-orm/prisma/pg'; +import { beforeAll, expect, expectTypeOf, test } from 'vitest'; + +import { PrismaClient } from './client'; +import { User } from './drizzle/schema.ts'; + +const ENABLE_LOGGING = false; + +let db: PrismaPgDatabase; + +beforeAll(async () => { + await $`prisma db push --force-reset --schema tests/prisma/pg/schema.prisma`.quiet(); + const prisma = new PrismaClient().$extends(drizzle({ logger: ENABLE_LOGGING })); + db = prisma.$drizzle; +}); + +test('extension works', async () => { + const insert = await db.insert(User).values({ email: 'test@test.com' }); + expectTypeOf(insert).toEqualTypeOf<[]>(); + expect(insert).toEqual([]); + + const result = await db.select().from(User); + expectTypeOf(result).toEqualTypeOf(); + expect(result).toEqual([{ id: 1, email: 'test@test.com', name: null }]); +}); diff --git a/integration-tests/tests/prisma/pg/schema.prisma b/integration-tests/tests/prisma/pg/schema.prisma new file mode 100644 index 000000000..a5345d047 --- /dev/null +++ b/integration-tests/tests/prisma/pg/schema.prisma @@ -0,0 +1,20 @@ +generator client { + provider = "prisma-client-js" + output = "./client" +} + +generator drizzle { + provider = "drizzle-prisma-generator" + output = "./drizzle" +} + +datasource db { + provider = "postgresql" + url = env("PG_CONNECTION_STRING") +} + +model User { + id Int @id @default(autoincrement()) + email String @unique + name String? +} diff --git a/integration-tests/tests/prisma/sqlite/.gitignore b/integration-tests/tests/prisma/sqlite/.gitignore new file mode 100644 index 000000000..2fa69c243 --- /dev/null +++ b/integration-tests/tests/prisma/sqlite/.gitignore @@ -0,0 +1 @@ +db.sqlite diff --git a/integration-tests/tests/prisma/sqlite/prisma.test.ts b/integration-tests/tests/prisma/sqlite/prisma.test.ts new file mode 100644 index 000000000..4e8979cb8 --- /dev/null +++ b/integration-tests/tests/prisma/sqlite/prisma.test.ts @@ -0,0 +1,41 @@ +import 'dotenv/config'; +import 'zx/globals'; + +import { drizzle } from 'drizzle-orm/prisma/sqlite'; +import type { PrismaSQLiteDatabase } from 'drizzle-orm/prisma/sqlite'; +import { beforeAll, expect, expectTypeOf, test } from 'vitest'; + +import { PrismaClient } from './client'; +import { User } from './drizzle/schema.ts'; + +const ENABLE_LOGGING = false; + +let db: PrismaSQLiteDatabase; + +beforeAll(async () => { + await $`prisma db push --force-reset --schema tests/prisma/sqlite/schema.prisma`.quiet(); + const prisma = new PrismaClient().$extends(drizzle({ logger: ENABLE_LOGGING })); + db = prisma.$drizzle; +}); + +test('extension works', async () => { + const insert = await db.insert(User).values({ email: 'test@test.com' }); + expectTypeOf(insert).toEqualTypeOf<[]>(); + expect(insert).toEqual([]); + + const result = await db.select().from(User); + expectTypeOf(result).toEqualTypeOf(); + expect(result).toEqual([{ id: 1, email: 'test@test.com', name: null }]); + + const all = await db.select().from(User).all(); + expectTypeOf(all).toEqualTypeOf(); + expect(all).toEqual([{ id: 1, email: 'test@test.com', name: null }]); + + const get = await db.select().from(User).get(); + expectTypeOf(get).toEqualTypeOf(); + expect(get).toEqual({ id: 1, email: 'test@test.com', name: null }); + + const run = await db.insert(User).values({ email: 'test2@test.com' }).run(); + expectTypeOf(run).toEqualTypeOf<[]>(); + expect(run).toEqual([]); +}); diff --git a/integration-tests/tests/prisma/sqlite/schema.prisma b/integration-tests/tests/prisma/sqlite/schema.prisma new file mode 100644 index 000000000..6dbf2643e --- /dev/null +++ b/integration-tests/tests/prisma/sqlite/schema.prisma @@ -0,0 +1,20 @@ +generator client { + provider = "prisma-client-js" + output = "./client" +} + +generator drizzle { + provider = "drizzle-prisma-generator" + output = "./drizzle" +} + +datasource db { + provider = "sqlite" + url = "file:./db.sqlite" +} + +model User { + id Int @id @default(autoincrement()) + email String @unique + name String? +} diff --git a/integration-tests/tests/sql.js.test.ts b/integration-tests/tests/sql.js.test.ts deleted file mode 100644 index 1b6b758ba..000000000 --- a/integration-tests/tests/sql.js.test.ts +++ /dev/null @@ -1,1838 +0,0 @@ -import 'dotenv/config'; - -import type { TestFn } from 'ava'; -import anyTest from 'ava'; -import { asc, eq, type Equal, gt, inArray, Name, name, placeholder, sql, TransactionRollbackError } from 'drizzle-orm'; -import type { SQLJsDatabase } from 'drizzle-orm/sql-js'; -import { drizzle } from 'drizzle-orm/sql-js'; -import { migrate } from 'drizzle-orm/sql-js/migrator'; -import { - alias, - blob, - getViewConfig, - integer, - primaryKey, - sqliteTable, - sqliteTableCreator, - sqliteView, - text, -} from 'drizzle-orm/sqlite-core'; -import type { Database } from 'sql.js'; -import initSqlJs from 'sql.js'; -import { Expect, randomString } from './utils.ts'; - -const ENABLE_LOGGING = false; - -const usersTable = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - verified: integer('verified', { mode: 'boolean' }).notNull().default(false), - json: blob('json', { mode: 'json' }).$type(), - createdAt: integer('created_at', { mode: 'timestamp' }).notNull().default(sql`strftime('%s', 'now')`), -}); - -const users2Table = sqliteTable('users2', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').references(() => citiesTable.id), -}); - -const citiesTable = sqliteTable('cities', { - id: integer('id').primaryKey(), - name: text('name').notNull(), -}); - -const coursesTable = sqliteTable('courses', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - categoryId: integer('category_id').references(() => courseCategoriesTable.id), -}); - -const courseCategoriesTable = sqliteTable('course_categories', { - id: integer('id').primaryKey(), - name: text('name').notNull(), -}); - -const orders = sqliteTable('orders', { - id: integer('id').primaryKey(), - region: text('region').notNull(), - product: text('product').notNull(), - amount: integer('amount').notNull(), - quantity: integer('quantity').notNull(), -}); - -const usersMigratorTable = sqliteTable('users12', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - email: text('email').notNull(), -}); - -const anotherUsersMigratorTable = sqliteTable('another_users', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - email: text('email').notNull(), -}); - -const pkExampleTable = sqliteTable('pk_example', { - id: integer('id').notNull(), - name: text('name').notNull(), - email: text('email').notNull(), -}, (table) => ({ - compositePk: primaryKey(table.id, table.name), -})); - -const bigIntExample = sqliteTable('big_int_example', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - bigInt: blob('big_int', { mode: 'bigint' }).notNull(), -}); - -interface Context { - db: SQLJsDatabase; - client: Database; -} - -const test = anyTest as TestFn; - -test.before(async (t) => { - const ctx = t.context; - - const SQL = await initSqlJs(); - ctx.client = new SQL.Database(); - ctx.db = drizzle(ctx.client, { logger: ENABLE_LOGGING }); -}); - -test.after.always((t) => { - const ctx = t.context; - ctx.client?.close(); -}); - -test.beforeEach((t) => { - const ctx = t.context; - - ctx.db.run(sql`drop table if exists ${usersTable}`); - ctx.db.run(sql`drop table if exists ${users2Table}`); - ctx.db.run(sql`drop table if exists ${citiesTable}`); - ctx.db.run(sql`drop table if exists ${coursesTable}`); - ctx.db.run(sql`drop table if exists ${courseCategoriesTable}`); - ctx.db.run(sql`drop table if exists ${orders}`); - ctx.db.run(sql`drop table if exists ${bigIntExample}`); - ctx.db.run(sql`drop table if exists ${pkExampleTable}`); - - ctx.db.run(sql` - create table ${usersTable} ( - id integer primary key, - name text not null, - verified integer not null default 0, - json blob, - created_at integer not null default (strftime('%s', 'now')) - ) - `); - ctx.db.run(sql` - create table ${users2Table} ( - id integer primary key, - name text not null, - city_id integer references ${citiesTable}(${name(citiesTable.id.name)}) - ) - `); - ctx.db.run(sql` - create table ${citiesTable} ( - id integer primary key, - name text not null - ) - `); - ctx.db.run(sql` - create table ${courseCategoriesTable} ( - id integer primary key, - name text not null - ) - `); - ctx.db.run(sql` - create table ${coursesTable} ( - id integer primary key, - name text not null, - category_id integer references ${courseCategoriesTable}(${name(courseCategoriesTable.id.name)}) - ) - `); - ctx.db.run(sql` - create table ${orders} ( - id integer primary key, - region text not null, - product text not null, - amount integer not null, - quantity integer not null - ) - `); - ctx.db.run(sql` - create table ${pkExampleTable} ( - id integer not null, - name text not null, - email text not null, - primary key (id, name) - ) - `); - ctx.db.run(sql` - create table ${bigIntExample} ( - id integer primary key, - name text not null, - big_int blob not null - ) - `); -}); - -test.serial('insert bigint values', (t) => { - const { db } = t.context; - - db.insert(bigIntExample).values({ name: 'one', bigInt: BigInt('0') }).run(); - db.insert(bigIntExample).values({ name: 'two', bigInt: BigInt('127') }).run(); - db.insert(bigIntExample).values({ name: 'three', bigInt: BigInt('32767') }).run(); - db.insert(bigIntExample).values({ name: 'four', bigInt: BigInt('1234567890') }).run(); - db.insert(bigIntExample).values({ name: 'five', bigInt: BigInt('12345678900987654321') }).run(); - - const result = db.select().from(bigIntExample).all(); - t.deepEqual(result, [ - { id: 1, name: 'one', bigInt: BigInt('0') }, - { id: 2, name: 'two', bigInt: BigInt('127') }, - { id: 3, name: 'three', bigInt: BigInt('32767') }, - { id: 4, name: 'four', bigInt: BigInt('1234567890') }, - { id: 5, name: 'five', bigInt: BigInt('12345678900987654321') }, - ]); -}); - -test.serial('select all fields', (t) => { - const { db } = t.context; - - const now = Date.now(); - - db.insert(usersTable).values({ name: 'John' }).run(); - const result = db.select().from(usersTable).all(); - - t.assert(result[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 5000); - t.deepEqual(result, [{ id: 1, name: 'John', verified: false, json: null, createdAt: result[0]!.createdAt }]); -}); - -test.serial('select partial', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ name: 'John' }).run(); - const result = db.select({ name: usersTable.name }).from(usersTable).all(); - - t.deepEqual(result, [{ name: 'John' }]); -}); - -test.serial('select sql', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ name: 'John' }).run(); - const users = db.select({ - name: sql`upper(${usersTable.name})`, - }).from(usersTable).all(); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('select typed sql', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ name: 'John' }).run(); - const users = db.select({ - name: sql`upper(${usersTable.name})`, - }).from(usersTable).all(); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('select distinct', (t) => { - const { db } = t.context; - - const usersDistinctTable = sqliteTable('users_distinct', { - id: integer('id').notNull(), - name: text('name').notNull(), - }); - - db.run(sql`drop table if exists ${usersDistinctTable}`); - db.run(sql`create table ${usersDistinctTable} (id integer, name text)`); - - db.insert(usersDistinctTable).values([ - { id: 1, name: 'John' }, - { id: 1, name: 'John' }, - { id: 2, name: 'John' }, - { id: 1, name: 'Jane' }, - ]).run(); - const users = db.selectDistinct().from(usersDistinctTable).orderBy( - usersDistinctTable.id, - usersDistinctTable.name, - ).all(); - - db.run(sql`drop table ${usersDistinctTable}`); - - t.deepEqual(users, [{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); -}); - -test.serial('insert returning sql', (t) => { - const { db } = t.context; - - const users = db.insert(usersTable).values({ name: 'John' }).returning({ - name: sql`upper(${usersTable.name})`, - }).all(); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('delete returning sql', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ name: 'John' }).run(); - const users = db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ - name: sql`upper(${usersTable.name})`, - }).all(); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('update returning sql', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ name: 'John' }).run(); - const users = db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ - name: sql`upper(${usersTable.name})`, - }).all(); - - t.deepEqual(users, [{ name: 'JANE' }]); -}); - -test.serial('insert with auto increment', (t) => { - const { db } = t.context; - - db.insert(usersTable).values([ - { name: 'John' }, - { name: 'Jane' }, - { name: 'George' }, - { name: 'Austin' }, - ]).run(); - const result = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).all(); - - t.deepEqual(result, [ - { id: 1, name: 'John' }, - { id: 2, name: 'Jane' }, - { id: 3, name: 'George' }, - { id: 4, name: 'Austin' }, - ]); -}); - -test.serial('insert with default values', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ name: 'John' }).run(); - const result = db.select().from(usersTable).all(); - - t.deepEqual(result, [{ id: 1, name: 'John', verified: false, json: null, createdAt: result[0]!.createdAt }]); -}); - -test.serial('insert with overridden default values', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ name: 'John', verified: true }).run(); - const result = db.select().from(usersTable).all(); - - t.deepEqual(result, [{ id: 1, name: 'John', verified: true, json: null, createdAt: result[0]!.createdAt }]); -}); - -test.serial('update with returning all fields', (t) => { - const { db } = t.context; - - const now = Date.now(); - - db.insert(usersTable).values({ name: 'John' }).run(); - const users = db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning().all(); - - t.assert(users[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 5000); - t.deepEqual(users, [{ id: 1, name: 'Jane', verified: false, json: null, createdAt: users[0]!.createdAt }]); -}); - -test.serial('update with returning partial', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ name: 'John' }).run(); - const users = db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ - id: usersTable.id, - name: usersTable.name, - }).all(); - - t.deepEqual(users, [{ id: 1, name: 'Jane' }]); -}); - -test.serial('delete with returning all fields', (t) => { - const { db } = t.context; - - const now = Date.now(); - - db.insert(usersTable).values({ name: 'John' }).run(); - const users = db.delete(usersTable).where(eq(usersTable.name, 'John')).returning().all(); - - t.assert(users[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 5000); - t.deepEqual(users, [{ id: 1, name: 'John', verified: false, json: null, createdAt: users[0]!.createdAt }]); -}); - -test.serial('delete with returning partial', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ name: 'John' }).run(); - const users = db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ - id: usersTable.id, - name: usersTable.name, - }).all(); - - t.deepEqual(users, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert + select', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ name: 'John' }).run(); - const result = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).all(); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); - - db.insert(usersTable).values({ name: 'Jane' }).run(); - const result2 = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).all(); - - t.deepEqual(result2, [{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }]); -}); - -test.serial('json insert', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ name: 'John', json: ['foo', 'bar'] }).run(); - const result = db.select({ - id: usersTable.id, - name: usersTable.name, - json: usersTable.json, - }).from(usersTable).all(); - - t.deepEqual(result, [{ id: 1, name: 'John', json: ['foo', 'bar'] }]); -}); - -test.serial('insert many', (t) => { - const { db } = t.context; - - db.insert(usersTable).values([ - { name: 'John' }, - { name: 'Bruce', json: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]).run(); - const result = db.select({ - id: usersTable.id, - name: usersTable.name, - json: usersTable.json, - verified: usersTable.verified, - }).from(usersTable).all(); - - t.deepEqual(result, [ - { id: 1, name: 'John', json: null, verified: false }, - { id: 2, name: 'Bruce', json: ['foo', 'bar'], verified: false }, - { id: 3, name: 'Jane', json: null, verified: false }, - { id: 4, name: 'Austin', json: null, verified: true }, - ]); -}); - -test.serial('insert many with returning', (t) => { - const { db } = t.context; - - const result = db.insert(usersTable).values([ - { name: 'John' }, - { name: 'Bruce', json: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]) - .returning({ - id: usersTable.id, - name: usersTable.name, - json: usersTable.json, - verified: usersTable.verified, - }) - .all(); - - t.deepEqual(result, [ - { id: 1, name: 'John', json: null, verified: false }, - { id: 2, name: 'Bruce', json: ['foo', 'bar'], verified: false }, - { id: 3, name: 'Jane', json: null, verified: false }, - { id: 4, name: 'Austin', json: null, verified: true }, - ]); -}); - -test.serial('partial join with alias', (t) => { - const { db } = t.context; - const customerAlias = alias(usersTable, 'customer'); - - db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]).run(); - const result = db - .select({ - user: { - id: usersTable.id, - name: usersTable.name, - }, - customer: { - id: customerAlias.id, - name: customerAlias.name, - }, - }).from(usersTable) - .leftJoin(customerAlias, eq(customerAlias.id, 11)) - .where(eq(usersTable.id, 10)) - .all(); - - t.deepEqual(result, [{ - user: { id: 10, name: 'Ivan' }, - customer: { id: 11, name: 'Hans' }, - }]); -}); - -test.serial('full join with alias', (t) => { - const { db } = t.context; - - const sqliteTable = sqliteTableCreator((name) => `prefixed_${name}`); - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - }); - - db.run(sql`drop table if exists ${users}`); - db.run(sql`create table ${users} (id integer primary key, name text not null)`); - - const customers = alias(users, 'customer'); - - db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]).run(); - const result = db - .select().from(users) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(users.id, 10)) - .all(); - - t.deepEqual(result, [{ - users: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - db.run(sql`drop table ${users}`); -}); - -test.serial('select from alias', (t) => { - const { db } = t.context; - - const sqliteTable = sqliteTableCreator((name) => `prefixed_${name}`); - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - }); - - db.run(sql`drop table if exists ${users}`); - db.run(sql`create table ${users} (id integer primary key, name text not null)`); - - const user = alias(users, 'user'); - const customers = alias(users, 'customer'); - - db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]).run(); - const result = db - .select() - .from(user) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(user.id, 10)) - .all(); - - t.deepEqual(result, [{ - user: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - db.run(sql`drop table ${users}`); -}); - -test.serial('insert with spaces', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ name: sql`'Jo h n'` }).run(); - const result = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).all(); - - t.deepEqual(result, [{ id: 1, name: 'Jo h n' }]); -}); - -test.serial('prepared statement', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ name: 'John' }).run(); - const statement = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).prepare(); - const result = statement.all(); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('prepared statement reuse', (t) => { - const { db } = t.context; - - const stmt = db.insert(usersTable).values({ - verified: true, - name: placeholder('name'), - }).prepare(); - - for (let i = 0; i < 10; i++) { - stmt.run({ name: `John ${i}` }); - } - - const result = db.select({ - id: usersTable.id, - name: usersTable.name, - verified: usersTable.verified, - }).from(usersTable).all(); - - t.deepEqual(result, [ - { id: 1, name: 'John 0', verified: true }, - { id: 2, name: 'John 1', verified: true }, - { id: 3, name: 'John 2', verified: true }, - { id: 4, name: 'John 3', verified: true }, - { id: 5, name: 'John 4', verified: true }, - { id: 6, name: 'John 5', verified: true }, - { id: 7, name: 'John 6', verified: true }, - { id: 8, name: 'John 7', verified: true }, - { id: 9, name: 'John 8', verified: true }, - { id: 10, name: 'John 9', verified: true }, - ]); -}); - -test.serial('prepared statement with placeholder in .where', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ name: 'John' }).run(); - const stmt = db.select({ - id: usersTable.id, - name: usersTable.name, - }).from(usersTable) - .where(eq(usersTable.id, placeholder('id'))) - .prepare(); - const result = stmt.all({ id: 1 }); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('select with group by as field', (t) => { - const { db } = t.context; - - db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); - - const result = db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.name) - .all(); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with group by as sql', (t) => { - const { db } = t.context; - - db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); - - const result = db.select({ name: usersTable.name }).from(usersTable) - .groupBy(sql`${usersTable.name}`) - .all(); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with group by as sql + column', (t) => { - const { db } = t.context; - - db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); - - const result = db.select({ name: usersTable.name }).from(usersTable) - .groupBy(sql`${usersTable.name}`, usersTable.id) - .all(); - - t.deepEqual(result, [{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); -}); - -test.serial('select with group by as column + sql', (t) => { - const { db } = t.context; - - db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); - - const result = db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`) - .all(); - - t.deepEqual(result, [{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); -}); - -test.serial('select with group by complex query', (t) => { - const { db } = t.context; - - db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); - - const result = db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`) - .orderBy(asc(usersTable.name)) - .limit(1) - .all(); - - t.deepEqual(result, [{ name: 'Jane' }]); -}); - -test.serial('build query', (t) => { - const { db } = t.context; - - const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, usersTable.name) - .toSQL(); - - t.deepEqual(query, { - sql: 'select "id", "name" from "users" group by "users"."id", "users"."name"', - params: [], - }); -}); - -test.serial('migrator', (t) => { - const { db } = t.context; - - db.run(sql`drop table if exists another_users`); - db.run(sql`drop table if exists users12`); - db.run(sql`drop table if exists __drizzle_migrations`); - - migrate(db, { migrationsFolder: './drizzle2/sqlite' }); - - db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }).run(); - const result = db.select().from(usersMigratorTable).all(); - - db.insert(anotherUsersMigratorTable).values({ name: 'John', email: 'email' }).run(); - const result2 = db.select().from(usersMigratorTable).all(); - - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); - t.deepEqual(result2, [{ id: 1, name: 'John', email: 'email' }]); - - db.run(sql`drop table another_users`); - db.run(sql`drop table users12`); - db.run(sql`drop table __drizzle_migrations`); -}); - -test.serial('migrator : migrate with custom table', async (t) => { - const { db } = t.context; - const customTable = randomString(); - db.run(sql`drop table if exists another_users`); - db.run(sql`drop table if exists users12`); - db.run(sql`drop table if exists ${sql.identifier(customTable)}`); - - migrate(db, { migrationsFolder: './drizzle2/sqlite', migrationsTable: customTable }); - - // test if the custom migrations table was created - const res = db.all(sql`select * from ${sql.identifier(customTable)};`); - t.true(res.length > 0); - - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); - - db.run(sql`drop table another_users`); - db.run(sql`drop table users12`); - db.run(sql`drop table ${sql.identifier(customTable)}`); -}); - -test.serial('insert via db.run + select via db.all', (t) => { - const { db } = t.context; - - db.run(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); - - const result = db.all<{ id: number; name: string }>(sql`select id, name from "users"`); - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert via db.get', (t) => { - const { db } = t.context; - - const inserted = db.get<{ id: number; name: string }>( - sql`insert into ${usersTable} (${new Name( - usersTable.name.name, - )}) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, - ); - t.deepEqual(inserted, { id: 1, name: 'John' }); -}); - -test.serial('insert via db.run + select via db.get', (t) => { - const { db } = t.context; - - db.run(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); - - const result = db.get<{ id: number; name: string }>( - sql`select ${usersTable.id}, ${usersTable.name} from ${usersTable}`, - ); - t.deepEqual(result, { id: 1, name: 'John' }); -}); - -test.serial('insert via db.get w/ query builder', (t) => { - const { db } = t.context; - - const inserted = db.get>( - db.insert(usersTable).values({ name: 'John' }).returning({ id: usersTable.id, name: usersTable.name }), - ); - t.deepEqual(inserted, { id: 1, name: 'John' }); -}); - -test.serial('left join (flat object fields)', (t) => { - const { db } = t.context; - - const { id: cityId } = db.insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]) - .returning({ id: citiesTable.id }).all()[0]!; - - db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]).run(); - - const res = db.select({ - userId: users2Table.id, - userName: users2Table.name, - cityId: citiesTable.id, - cityName: citiesTable.name, - }).from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)) - .all(); - - t.deepEqual(res, [ - { userId: 1, userName: 'John', cityId, cityName: 'Paris' }, - { userId: 2, userName: 'Jane', cityId: null, cityName: null }, - ]); -}); - -test.serial('left join (grouped fields)', (t) => { - const { db } = t.context; - - const { id: cityId } = db.insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]) - .returning({ id: citiesTable.id }).all()[0]!; - - db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]).run(); - - const res = db.select({ - id: users2Table.id, - user: { - name: users2Table.name, - nameUpper: sql`upper(${users2Table.name})`, - }, - city: { - id: citiesTable.id, - name: citiesTable.name, - nameUpper: sql`upper(${citiesTable.name})`, - }, - }).from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)) - .all(); - - t.deepEqual(res, [ - { - id: 1, - user: { name: 'John', nameUpper: 'JOHN' }, - city: { id: cityId, name: 'Paris', nameUpper: 'PARIS' }, - }, - { - id: 2, - user: { name: 'Jane', nameUpper: 'JANE' }, - city: null, - }, - ]); -}); - -test.serial('left join (all fields)', (t) => { - const { db } = t.context; - - const { id: cityId } = db.insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]) - .returning({ id: citiesTable.id }).all()[0]!; - - db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]).run(); - - const res = db.select().from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)).all(); - - t.deepEqual(res, [ - { - users2: { - id: 1, - name: 'John', - cityId, - }, - cities: { - id: cityId, - name: 'Paris', - }, - }, - { - users2: { - id: 2, - name: 'Jane', - cityId: null, - }, - cities: null, - }, - ]); -}); - -test.serial('join subquery', (t) => { - const { db } = t.context; - - db.insert(courseCategoriesTable).values([ - { name: 'Category 1' }, - { name: 'Category 2' }, - { name: 'Category 3' }, - { name: 'Category 4' }, - ]).run(); - - db.insert(coursesTable).values([ - { name: 'Development', categoryId: 2 }, - { name: 'IT & Software', categoryId: 3 }, - { name: 'Marketing', categoryId: 4 }, - { name: 'Design', categoryId: 1 }, - ]).run(); - - const sq2 = db - .select({ - categoryId: courseCategoriesTable.id, - category: courseCategoriesTable.name, - total: sql`count(${courseCategoriesTable.id})`, - }) - .from(courseCategoriesTable) - .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) - .as('sq2'); - - const res = db - .select({ - courseName: coursesTable.name, - categoryId: sq2.categoryId, - }) - .from(coursesTable) - .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) - .orderBy(coursesTable.name) - .all(); - - t.deepEqual(res, [ - { courseName: 'Design', categoryId: 1 }, - { courseName: 'Development', categoryId: 2 }, - { courseName: 'IT & Software', categoryId: 3 }, - { courseName: 'Marketing', categoryId: 4 }, - ]); -}); - -test.serial('with ... select', (t) => { - const { db } = t.context; - - db.insert(orders).values([ - { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, - { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 50, quantity: 5 }, - ]).run(); - - const regionalSales = db - .$with('regional_sales') - .as( - db - .select({ - region: orders.region, - totalSales: sql`sum(${orders.amount})`.as('total_sales'), - }) - .from(orders) - .groupBy(orders.region), - ); - - const topRegions = db - .$with('top_regions') - .as( - db - .select({ - region: regionalSales.region, - }) - .from(regionalSales) - .where( - gt( - regionalSales.totalSales, - db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), - ), - ), - ); - - const result = db - .with(regionalSales, topRegions) - .select({ - region: orders.region, - product: orders.product, - productUnits: sql`cast(sum(${orders.quantity}) as int)`, - productSales: sql`cast(sum(${orders.amount}) as int)`, - }) - .from(orders) - .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) - .groupBy(orders.region, orders.product) - .orderBy(orders.region, orders.product) - .all(); - - t.deepEqual(result, [ - { - region: 'Europe', - product: 'A', - productUnits: 3, - productSales: 30, - }, - { - region: 'Europe', - product: 'B', - productUnits: 5, - productSales: 50, - }, - { - region: 'US', - product: 'A', - productUnits: 7, - productSales: 70, - }, - { - region: 'US', - product: 'B', - productUnits: 9, - productSales: 90, - }, - ]); -}); - -test.serial('select from subquery sql', (t) => { - const { db } = t.context; - - db.insert(users2Table).values([{ name: 'John' }, { name: 'Jane' }]).run(); - - const sq = db - .select({ name: sql`${users2Table.name} || ' modified'`.as('name') }) - .from(users2Table) - .as('sq'); - - const res = db.select({ name: sq.name }).from(sq).all(); - - t.deepEqual(res, [{ name: 'John modified' }, { name: 'Jane modified' }]); -}); - -test.serial('select a field without joining its table', (t) => { - const { db } = t.context; - - t.throws(() => db.select({ name: users2Table.name }).from(usersTable).prepare()); -}); - -test.serial('select all fields from subquery without alias', (t) => { - const { db } = t.context; - - const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); - - t.throws(() => db.select().from(sq).prepare()); -}); - -test.serial('select count()', (t) => { - const { db } = t.context; - - db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]).run(); - - const res = db.select({ count: sql`count(*)` }).from(usersTable).all(); - - t.deepEqual(res, [{ count: 2 }]); -}); - -test.serial('having', (t) => { - const { db } = t.context; - - db.insert(citiesTable).values([{ name: 'London' }, { name: 'Paris' }, { name: 'New York' }]).run(); - - db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { - name: 'Jack', - cityId: 2, - }]).run(); - - const result = db - .select({ - id: citiesTable.id, - name: sql`upper(${citiesTable.name})`.as('upper_name'), - usersCount: sql`count(${users2Table.id})`.as('users_count'), - }) - .from(citiesTable) - .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) - .where(({ name }) => sql`length(${name}) >= 3`) - .groupBy(citiesTable.id) - .having(({ usersCount }) => sql`${usersCount} > 0`) - .orderBy(({ name }) => name) - .all(); - - t.deepEqual(result, [ - { - id: 1, - name: 'LONDON', - usersCount: 2, - }, - { - id: 2, - name: 'PARIS', - usersCount: 1, - }, - ]); -}); - -test.serial('view', (t) => { - const { db } = t.context; - - const newYorkers1 = sqliteView('new_yorkers') - .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); - - const newYorkers2 = sqliteView('new_yorkers', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); - - const newYorkers3 = sqliteView('new_yorkers', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).existing(); - - db.run(sql`create view new_yorkers as ${getViewConfig(newYorkers1).query}`); - - db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]).run(); - - db.insert(users2Table).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 1 }, - { name: 'Jack', cityId: 2 }, - ]).run(); - - { - const result = db.select().from(newYorkers1).all(); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = db.select().from(newYorkers2).all(); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = db.select().from(newYorkers3).all(); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = db.select({ name: newYorkers1.name }).from(newYorkers1).all(); - t.deepEqual(result, [ - { name: 'John' }, - { name: 'Jane' }, - ]); - } - - db.run(sql`drop view ${newYorkers1}`); -}); - -test.serial('insert null timestamp', (t) => { - const { db } = t.context; - - const test = sqliteTable('test', { - t: integer('t', { mode: 'timestamp' }), - }); - - db.run(sql`create table ${test} (t timestamp)`); - - db.insert(test).values({ t: null }).run(); - const res = db.select().from(test).all(); - t.deepEqual(res, [{ t: null }]); - - db.run(sql`drop table ${test}`); -}); - -test.serial('select from raw sql', (t) => { - const { db } = t.context; - - const result = db.select({ - id: sql`id`, - name: sql`name`, - }).from(sql`(select 1 as id, 'John' as name) as users`).all(); - - Expect>; - - t.deepEqual(result, [ - { id: 1, name: 'John' }, - ]); -}); - -test.serial('select from raw sql with joins', (t) => { - const { db } = t.context; - - const result = db - .select({ - id: sql`users.id`, - name: sql`users.name`, - userCity: sql`users.city`, - cityName: sql`cities.name`, - }) - .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) - .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`) - .all(); - - Expect>; - - t.deepEqual(result, [ - { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, - ]); -}); - -test.serial('join on aliased sql from select', (t) => { - const { db } = t.context; - - const result = db - .select({ - userId: sql`users.id`.as('userId'), - name: sql`users.name`, - userCity: sql`users.city`, - cityId: sql`cities.id`.as('cityId'), - cityName: sql`cities.name`, - }) - .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) - .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)) - .all(); - - Expect>; - - t.deepEqual(result, [ - { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, - ]); -}); - -test.serial('join on aliased sql from with clause', (t) => { - const { db } = t.context; - - const users = db.$with('users').as( - db.select({ - id: sql`id`.as('userId'), - name: sql`name`.as('userName'), - city: sql`city`.as('city'), - }).from( - sql`(select 1 as id, 'John' as name, 'New York' as city) as users`, - ), - ); - - const cities = db.$with('cities').as( - db.select({ - id: sql`id`.as('cityId'), - name: sql`name`.as('cityName'), - }).from( - sql`(select 1 as id, 'Paris' as name) as cities`, - ), - ); - - const result = db - .with(users, cities) - .select({ - userId: users.id, - name: users.name, - userCity: users.city, - cityId: cities.id, - cityName: cities.name, - }) - .from(users) - .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)) - .all(); - - Expect>; - - t.deepEqual(result, [ - { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, - ]); -}); - -test.serial('prefixed table', (t) => { - const { db } = t.context; - - const sqliteTable = sqliteTableCreator((name) => `myprefix_${name}`); - - const users = sqliteTable('test_prefixed_table_with_unique_name', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - }); - - db.run(sql`drop table if exists ${users}`); - - db.run( - sql`create table myprefix_test_prefixed_table_with_unique_name (id integer not null primary key, name text not null)`, - ); - - db.insert(users).values({ id: 1, name: 'John' }).run(); - - const result = db.select().from(users).all(); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); - - db.run(sql`drop table ${users}`); -}); - -test.serial('orderBy with aliased column', (t) => { - const { db } = t.context; - - const query = db.select({ - test: sql`something`.as('test'), - }).from(users2Table).orderBy((fields) => fields.test).toSQL(); - - t.deepEqual(query.sql, 'select something as "test" from "users2" order by "test"'); -}); - -test.serial('transaction', (t) => { - const { db } = t.context; - - const users = sqliteTable('users_transactions', { - id: integer('id').primaryKey(), - balance: integer('balance').notNull(), - }); - const products = sqliteTable('products_transactions', { - id: integer('id').primaryKey(), - price: integer('price').notNull(), - stock: integer('stock').notNull(), - }); - - db.run(sql`drop table if exists ${users}`); - db.run(sql`drop table if exists ${products}`); - - db.run(sql`create table users_transactions (id integer not null primary key, balance integer not null)`); - db.run( - sql`create table products_transactions (id integer not null primary key, price integer not null, stock integer not null)`, - ); - - const user = db.insert(users).values({ balance: 100 }).returning().get(); - const product = db.insert(products).values({ price: 10, stock: 10 }).returning().get(); - - db.transaction((tx) => { - tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)).run(); - tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)).run(); - }); - - const result = db.select().from(users).all(); - - t.deepEqual(result, [{ id: 1, balance: 90 }]); - - db.run(sql`drop table ${users}`); - db.run(sql`drop table ${products}`); -}); - -test.serial('transaction rollback', (t) => { - const { db } = t.context; - - const users = sqliteTable('users_transactions_rollback', { - id: integer('id').primaryKey(), - balance: integer('balance').notNull(), - }); - - db.run(sql`drop table if exists ${users}`); - - db.run( - sql`create table users_transactions_rollback (id integer not null primary key, balance integer not null)`, - ); - - t.throws(() => - db.transaction((tx) => { - tx.insert(users).values({ balance: 100 }).run(); - tx.rollback(); - }), { instanceOf: TransactionRollbackError }); - - const result = db.select().from(users).all(); - - t.deepEqual(result, []); - - db.run(sql`drop table ${users}`); -}); - -test.serial('nested transaction', (t) => { - const { db } = t.context; - - const users = sqliteTable('users_nested_transactions', { - id: integer('id').primaryKey(), - balance: integer('balance').notNull(), - }); - - db.run(sql`drop table if exists ${users}`); - - db.run( - sql`create table users_nested_transactions (id integer not null primary key, balance integer not null)`, - ); - - db.transaction((tx) => { - tx.insert(users).values({ balance: 100 }).run(); - - tx.transaction((tx) => { - tx.update(users).set({ balance: 200 }).run(); - }); - }); - - const result = db.select().from(users).all(); - - t.deepEqual(result, [{ id: 1, balance: 200 }]); - - db.run(sql`drop table ${users}`); -}); - -test.serial('nested transaction rollback', (t) => { - const { db } = t.context; - - const users = sqliteTable('users_nested_transactions_rollback', { - id: integer('id').primaryKey(), - balance: integer('balance').notNull(), - }); - - db.run(sql`drop table if exists ${users}`); - - db.run( - sql`create table users_nested_transactions_rollback (id integer not null primary key, balance integer not null)`, - ); - - db.transaction((tx) => { - tx.insert(users).values({ balance: 100 }).run(); - - t.throws(() => - tx.transaction((tx) => { - tx.update(users).set({ balance: 200 }).run(); - tx.rollback(); - }), { instanceOf: TransactionRollbackError }); - }); - - const result = db.select().from(users).all(); - - t.deepEqual(result, [{ id: 1, balance: 100 }]); - - db.run(sql`drop table ${users}`); -}); - -test.serial('join subquery with join', (t) => { - const { db } = t.context; - - const internalStaff = sqliteTable('internal_staff', { - userId: integer('user_id').notNull(), - }); - - const customUser = sqliteTable('custom_user', { - id: integer('id').notNull(), - }); - - const ticket = sqliteTable('ticket', { - staffId: integer('staff_id').notNull(), - }); - - db.run(sql`drop table if exists ${internalStaff}`); - db.run(sql`drop table if exists ${customUser}`); - db.run(sql`drop table if exists ${ticket}`); - - db.run(sql`create table internal_staff (user_id integer not null)`); - db.run(sql`create table custom_user (id integer not null)`); - db.run(sql`create table ticket (staff_id integer not null)`); - - db.insert(internalStaff).values({ userId: 1 }).run(); - db.insert(customUser).values({ id: 1 }).run(); - db.insert(ticket).values({ staffId: 1 }).run(); - - const subq = db - .select() - .from(internalStaff) - .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) - .as('internal_staff'); - - const mainQuery = db - .select() - .from(ticket) - .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)) - .all(); - - t.deepEqual(mainQuery, [{ - ticket: { staffId: 1 }, - internal_staff: { - internal_staff: { userId: 1 }, - custom_user: { id: 1 }, - }, - }]); - - db.run(sql`drop table ${internalStaff}`); - db.run(sql`drop table ${customUser}`); - db.run(sql`drop table ${ticket}`); -}); - -test.serial('join view as subquery', (t) => { - const { db } = t.context; - - const users = sqliteTable('users_join_view', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }); - - const newYorkers = sqliteView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); - - db.run(sql`drop table if exists ${users}`); - db.run(sql`drop view if exists ${newYorkers}`); - - db.run( - sql`create table ${users} (id integer not null primary key, name text not null, city_id integer not null)`, - ); - db.run(sql`create view ${newYorkers} as ${getViewConfig(newYorkers).query}`); - - db.insert(users).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 2 }, - { name: 'Jack', cityId: 1 }, - { name: 'Jill', cityId: 2 }, - ]).run(); - - const sq = db.select().from(newYorkers).as('new_yorkers_sq'); - - const result = db.select().from(users).leftJoin(sq, eq(users.id, sq.id)).all(); - - t.deepEqual(result, [ - { - users_join_view: { id: 1, name: 'John', cityId: 1 }, - new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, - }, - { - users_join_view: { id: 2, name: 'Jane', cityId: 2 }, - new_yorkers_sq: null, - }, - { - users_join_view: { id: 3, name: 'Jack', cityId: 1 }, - new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, - }, - { - users_join_view: { id: 4, name: 'Jill', cityId: 2 }, - new_yorkers_sq: null, - }, - ]); - - db.run(sql`drop view ${newYorkers}`); - db.run(sql`drop table ${users}`); -}); - -test.serial('insert with onConflict do nothing', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ id: 1, name: 'John' }).run(); - - db - .insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoNothing() - .run(); - - const res = db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)) - .all(); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert with onConflict do nothing using composite pk', (t) => { - const { db } = t.context; - - db - .insert(pkExampleTable) - .values({ id: 1, name: 'John', email: 'john@example.com' }) - .run(); - - db - .insert(pkExampleTable) - .values({ id: 1, name: 'John', email: 'john1@example.com' }) - .onConflictDoNothing() - .run(); - - const res = db - .select({ id: pkExampleTable.id, name: pkExampleTable.name, email: pkExampleTable.email }) - .from(pkExampleTable) - .where(eq(pkExampleTable.id, 1)) - .all(); - - t.deepEqual(res, [{ id: 1, name: 'John', email: 'john@example.com' }]); -}); - -test.serial('insert with onConflict do nothing using target', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ id: 1, name: 'John' }).run(); - - db - .insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoNothing({ target: usersTable.id }) - .run(); - - const res = db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)) - .all(); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert with onConflict do nothing using composite pk as target', (t) => { - const { db } = t.context; - - db - .insert(pkExampleTable) - .values({ id: 1, name: 'John', email: 'john@example.com' }) - .run(); - - db - .insert(pkExampleTable) - .values({ id: 1, name: 'John', email: 'john1@example.com' }) - .onConflictDoNothing({ target: [pkExampleTable.id, pkExampleTable.name] }) - .run(); - - const res = db - .select({ id: pkExampleTable.id, name: pkExampleTable.name, email: pkExampleTable.email }) - .from(pkExampleTable) - .where(eq(pkExampleTable.id, 1)) - .all(); - - t.deepEqual(res, [{ id: 1, name: 'John', email: 'john@example.com' }]); -}); - -test.serial('insert with onConflict do update', (t) => { - const { db } = t.context; - - db.insert(usersTable).values({ id: 1, name: 'John' }).run(); - - db - .insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }) - .run(); - - const res = db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)) - .all(); - - t.deepEqual(res, [{ id: 1, name: 'John1' }]); -}); - -test.serial('insert with onConflict do update using composite pk', (t) => { - const { db } = t.context; - - db.insert(pkExampleTable).values({ id: 1, name: 'John', email: 'john@example.com' }).run(); - - db - .insert(pkExampleTable) - .values({ id: 1, name: 'John', email: 'john@example.com' }) - .onConflictDoUpdate({ target: [pkExampleTable.id, pkExampleTable.name], set: { email: 'john1@example.com' } }) - .run(); - - const res = db - .select({ id: pkExampleTable.id, name: pkExampleTable.name, email: pkExampleTable.email }) - .from(pkExampleTable) - .where(eq(pkExampleTable.id, 1)) - .all(); - - t.deepEqual(res, [{ id: 1, name: 'John', email: 'john1@example.com' }]); -}); - -test.serial('insert undefined', (t) => { - const { db } = t.context; - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name'), - }); - - db.run(sql`drop table if exists ${users}`); - - db.run( - sql`create table ${users} (id integer primary key, name text)`, - ); - - t.notThrows(() => db.insert(users).values({ name: undefined }).run()); - - db.run(sql`drop table ${users}`); -}); - -test.serial('update undefined', (t) => { - const { db } = t.context; - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name'), - }); - - db.run(sql`drop table if exists ${users}`); - - db.run( - sql`create table ${users} (id integer primary key, name text)`, - ); - - t.throws(() => db.update(users).set({ name: undefined }).run()); - t.notThrows(() => db.update(users).set({ id: 1, name: undefined }).run()); - - db.run(sql`drop table ${users}`); -}); - -test.serial('async api - CRUD', async (t) => { - const { db } = t.context; - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name'), - }); - - db.run(sql`drop table if exists ${users}`); - - db.run( - sql`create table ${users} (id integer primary key, name text)`, - ); - - await db.insert(users).values({ id: 1, name: 'John' }); - - const res = await db.select().from(users); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); - - await db.update(users).set({ name: 'John1' }).where(eq(users.id, 1)); - - const res1 = await db.select().from(users); - - t.deepEqual(res1, [{ id: 1, name: 'John1' }]); - - await db.delete(users).where(eq(users.id, 1)); - - const res2 = await db.select().from(users); - - t.deepEqual(res2, []); - - db.run(sql`drop table ${users}`); -}); - -test.serial('async api - insert + select w/ prepare + async execute', async (t) => { - const { db } = t.context; - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name'), - }); - - db.run(sql`drop table if exists ${users}`); - - db.run( - sql`create table ${users} (id integer primary key, name text)`, - ); - - const insertStmt = db.insert(users).values({ id: 1, name: 'John' }).prepare(); - await insertStmt.execute(); - - const selectStmt = db.select().from(users).prepare(); - const res = await selectStmt.execute(); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); - - const updateStmt = db.update(users).set({ name: 'John1' }).where(eq(users.id, 1)).prepare(); - await updateStmt.execute(); - - const res1 = await selectStmt.execute(); - - t.deepEqual(res1, [{ id: 1, name: 'John1' }]); - - const deleteStmt = db.delete(users).where(eq(users.id, 1)).prepare(); - await deleteStmt.execute(); - - const res2 = await selectStmt.execute(); - - t.deepEqual(res2, []); - - db.run(sql`drop table ${users}`); -}); - -test.serial('async api - insert + select w/ prepare + sync execute', (t) => { - const { db } = t.context; - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name'), - }); - - db.run(sql`drop table if exists ${users}`); - - db.run( - sql`create table ${users} (id integer primary key, name text)`, - ); - - const insertStmt = db.insert(users).values({ id: 1, name: 'John' }).prepare(); - insertStmt.execute().sync(); - - const selectStmt = db.select().from(users).prepare(); - const res = selectStmt.execute().sync(); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); - - const updateStmt = db.update(users).set({ name: 'John1' }).where(eq(users.id, 1)).prepare(); - updateStmt.execute().sync(); - - const res1 = selectStmt.execute().sync(); - - t.deepEqual(res1, [{ id: 1, name: 'John1' }]); - - const deleteStmt = db.delete(users).where(eq(users.id, 1)).prepare(); - deleteStmt.execute().sync(); - - const res2 = selectStmt.execute().sync(); - - t.deepEqual(res2, []); - - db.run(sql`drop table ${users}`); -}); - -test.serial('select + .get() for empty result', (t) => { - const { db } = t.context; - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name'), - }); - - db.run(sql`drop table if exists ${users}`); - - db.run( - sql`create table ${users} (id integer primary key, name text)`, - ); - - const res = db.select().from(users).where(eq(users.id, 1)).get(); - - t.is(res, undefined); - - db.run(sql`drop table ${users}`); -}); diff --git a/integration-tests/tests/sqlite-proxy.test.ts b/integration-tests/tests/sqlite-proxy.test.ts deleted file mode 100644 index 55a5a3e36..000000000 --- a/integration-tests/tests/sqlite-proxy.test.ts +++ /dev/null @@ -1,1114 +0,0 @@ -import 'dotenv/config'; - -import type { TestFn } from 'ava'; -import anyTest from 'ava'; -import type BetterSqlite3 from 'better-sqlite3'; -import Database from 'better-sqlite3'; -import { asc, eq, Name, placeholder, sql } from 'drizzle-orm'; -import { alias, blob, integer, primaryKey, sqliteTable, sqliteTableCreator, text } from 'drizzle-orm/sqlite-core'; -import type { SqliteRemoteDatabase } from 'drizzle-orm/sqlite-proxy'; -import { drizzle as proxyDrizzle } from 'drizzle-orm/sqlite-proxy'; -import { migrate } from 'drizzle-orm/sqlite-proxy/migrator'; - -// eslint-disable-next-line drizzle/require-entity-kind -class ServerSimulator { - constructor(private db: BetterSqlite3.Database) {} - - async query(sql: string, params: any[], method: string) { - if (method === 'run') { - try { - const result = this.db.prepare(sql).run(params); - return { data: result as any }; - } catch (e: any) { - return { error: e.message }; - } - } else if (method === 'all' || method === 'values') { - try { - const rows = this.db.prepare(sql).raw().all(params); - return { data: rows }; - } catch (e: any) { - return { error: e.message }; - } - } else if (method === 'get') { - try { - const row = this.db.prepare(sql).raw().get(params); - return { data: row }; - } catch (e: any) { - return { error: e.message }; - } - } else { - return { error: 'Unknown method value' }; - } - } - - migrations(queries: string[]) { - this.db.exec('BEGIN'); - try { - for (const query of queries) { - this.db.exec(query); - } - this.db.exec('COMMIT'); - } catch { - this.db.exec('ROLLBACK'); - } - - return {}; - } -} - -const usersTable = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - verified: integer('verified', { mode: 'boolean' }).notNull().default(false), - json: blob('json', { mode: 'json' }).$type(), - createdAt: integer('created_at', { mode: 'timestamp' }).notNull().default(sql`strftime('%s', 'now')`), -}); - -const usersMigratorTable = sqliteTable('users12', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - email: text('email').notNull(), -}); - -const anotherUsersMigratorTable = sqliteTable('another_users', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - email: text('email').notNull(), -}); - -const pkExampleTable = sqliteTable('pk_example', { - id: integer('id').notNull(), - name: text('name').notNull(), - email: text('email').notNull(), -}, (table) => ({ - compositePk: primaryKey(table.id, table.name), -})); - -const bigIntExample = sqliteTable('big_int_example', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - bigInt: blob('big_int', { mode: 'bigint' }).notNull(), -}); - -interface Context { - db: SqliteRemoteDatabase; - client: Database.Database; - serverSimulator: ServerSimulator; -} - -const test = anyTest as TestFn; - -test.before((t) => { - const ctx = t.context; - const dbPath = process.env['SQLITE_DB_PATH'] ?? ':memory:'; - - ctx.client = new Database(dbPath); - - ctx.serverSimulator = new ServerSimulator(ctx.client); - - ctx.db = proxyDrizzle(async (sql, params, method) => { - try { - const rows = await ctx.serverSimulator.query(sql, params, method); - - if (rows.error !== undefined) { - throw new Error(rows.error); - } - - return { rows: rows.data }; - } catch (e: any) { - console.error('Error from sqlite proxy server:', e.response.data); - throw e; - } - }); -}); - -test.beforeEach(async (t) => { - const ctx = t.context; - await ctx.db.run(sql`drop table if exists ${usersTable}`); - await ctx.db.run(sql`drop table if exists ${pkExampleTable}`); - await ctx.db.run(sql`drop table if exists ${bigIntExample}`); - - await ctx.db.run(sql` - create table ${usersTable} ( - id integer primary key, - name text not null, - verified integer not null default 0, - json blob, - created_at integer not null default (strftime('%s', 'now')) - ) - `); - await ctx.db.run(sql` - create table ${pkExampleTable} ( - id integer not null, - name text not null, - email text not null, - primary key (id, name) - ) - `); - await ctx.db.run(sql` - create table ${bigIntExample} ( - id integer primary key, - name text not null, - big_int blob not null - ) - `); -}); - -test.serial('insert bigint values', async (t) => { - const { db } = t.context; - - await db.insert(bigIntExample).values({ name: 'one', bigInt: BigInt('0') }).run(); - await db.insert(bigIntExample).values({ name: 'two', bigInt: BigInt('127') }).run(); - await db.insert(bigIntExample).values({ name: 'three', bigInt: BigInt('32767') }).run(); - await db.insert(bigIntExample).values({ name: 'four', bigInt: BigInt('1234567890') }).run(); - await db.insert(bigIntExample).values({ name: 'five', bigInt: BigInt('12345678900987654321') }).run(); - - const result = await db.select().from(bigIntExample).all(); - t.deepEqual(result, [ - { id: 1, name: 'one', bigInt: BigInt('0') }, - { id: 2, name: 'two', bigInt: BigInt('127') }, - { id: 3, name: 'three', bigInt: BigInt('32767') }, - { id: 4, name: 'four', bigInt: BigInt('1234567890') }, - { id: 5, name: 'five', bigInt: BigInt('12345678900987654321') }, - ]); -}); - -test.serial('select all fields', async (t) => { - const { db } = t.context; - - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }).run(); - const result = await db.select().from(usersTable).all(); - - t.assert(result[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 5000); - t.deepEqual(result, [{ id: 1, name: 'John', verified: false, json: null, createdAt: result[0]!.createdAt }]); -}); - -test.serial('select partial', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const result = await db.select({ name: usersTable.name }).from(usersTable).all(); - - t.deepEqual(result, [{ name: 'John' }]); -}); - -test.serial('select sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const users = await db.select({ - name: sql`upper(${usersTable.name})`, - }).from(usersTable).all(); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('select typed sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const users = await db.select({ - name: sql`upper(${usersTable.name})`, - }).from(usersTable).all(); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('select distinct', async (t) => { - const { db } = t.context; - - const usersDistinctTable = sqliteTable('users_distinct', { - id: integer('id').notNull(), - name: text('name').notNull(), - }); - - await db.run(sql`drop table if exists ${usersDistinctTable}`); - await db.run(sql`create table ${usersDistinctTable} (id integer, name text)`); - - await db.insert(usersDistinctTable).values([ - { id: 1, name: 'John' }, - { id: 1, name: 'John' }, - { id: 2, name: 'John' }, - { id: 1, name: 'Jane' }, - ]).run(); - const users = await db.selectDistinct().from(usersDistinctTable).orderBy( - usersDistinctTable.id, - usersDistinctTable.name, - ).all(); - - await db.run(sql`drop table ${usersDistinctTable}`); - - t.deepEqual(users, [{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); -}); - -test.serial('insert returning sql', async (t) => { - const { db } = t.context; - - const users = await db.insert(usersTable).values({ name: 'John' }).returning({ - name: sql`upper(${usersTable.name})`, - }).all(); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('insert returning sql + get()', async (t) => { - const { db } = t.context; - - const users = await db.insert(usersTable).values({ name: 'John' }).returning({ - name: sql`upper(${usersTable.name})`, - }).get(); - - t.deepEqual(users, { name: 'JOHN' }); -}); - -test.serial('delete returning sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ - name: sql`upper(${usersTable.name})`, - }).all(); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('update returning sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ - name: sql`upper(${usersTable.name})`, - }).all(); - - t.deepEqual(users, [{ name: 'JANE' }]); -}); - -test.serial('update returning sql + get()', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ - name: sql`upper(${usersTable.name})`, - }).get(); - - t.deepEqual(users, { name: 'JANE' }); -}); - -test.serial('insert with auto increment', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([ - { name: 'John' }, - { name: 'Jane' }, - { name: 'George' }, - { name: 'Austin' }, - ]).run(); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).all(); - - t.deepEqual(result, [ - { id: 1, name: 'John' }, - { id: 2, name: 'Jane' }, - { id: 3, name: 'George' }, - { id: 4, name: 'Austin' }, - ]); -}); - -test.serial('insert with default values', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const result = await db.select().from(usersTable).all(); - - t.deepEqual(result, [{ id: 1, name: 'John', verified: false, json: null, createdAt: result[0]!.createdAt }]); -}); - -test.serial('insert with overridden default values', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John', verified: true }).run(); - const result = await db.select().from(usersTable).all(); - - t.deepEqual(result, [{ id: 1, name: 'John', verified: true, json: null, createdAt: result[0]!.createdAt }]); -}); - -test.serial('update with returning all fields', async (t) => { - const { db } = t.context; - - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }).run(); - const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning().all(); - - t.assert(users[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 5000); - t.deepEqual(users, [{ id: 1, name: 'Jane', verified: false, json: null, createdAt: users[0]!.createdAt }]); -}); - -test.serial('update with returning all fields + get()', async (t) => { - const { db } = t.context; - - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }).run(); - const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning().get(); - - t.assert(users.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(users.createdAt.getTime() - now) < 5000); - t.deepEqual(users, { id: 1, name: 'Jane', verified: false, json: null, createdAt: users.createdAt }); -}); - -test.serial('update with returning partial', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ - id: usersTable.id, - name: usersTable.name, - }).all(); - - t.deepEqual(users, [{ id: 1, name: 'Jane' }]); -}); - -test.serial('delete with returning all fields', async (t) => { - const { db } = t.context; - - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }).run(); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning().all(); - - t.assert(users[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 5000); - t.deepEqual(users, [{ id: 1, name: 'John', verified: false, json: null, createdAt: users[0]!.createdAt }]); -}); - -test.serial('delete with returning all fields + get()', async (t) => { - const { db } = t.context; - - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }).run(); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning().get(); - - t.assert(users!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(users!.createdAt.getTime() - now) < 5000); - t.deepEqual(users, { id: 1, name: 'John', verified: false, json: null, createdAt: users!.createdAt }); -}); - -test.serial('delete with returning partial', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ - id: usersTable.id, - name: usersTable.name, - }).all(); - - t.deepEqual(users, [{ id: 1, name: 'John' }]); -}); - -test.serial('delete with returning partial + get()', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ - id: usersTable.id, - name: usersTable.name, - }).get(); - - t.deepEqual(users, { id: 1, name: 'John' }); -}); - -test.serial('insert + select', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).all(); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); - - await db.insert(usersTable).values({ name: 'Jane' }).run(); - const result2 = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).all(); - - t.deepEqual(result2, [{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }]); -}); - -test.serial('json insert', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John', json: ['foo', 'bar'] }).run(); - const result = await db.select({ - id: usersTable.id, - name: usersTable.name, - json: usersTable.json, - }).from(usersTable).all(); - - t.deepEqual(result, [{ id: 1, name: 'John', json: ['foo', 'bar'] }]); -}); - -test.serial('insert many', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([ - { name: 'John' }, - { name: 'Bruce', json: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]).run(); - const result = await db.select({ - id: usersTable.id, - name: usersTable.name, - json: usersTable.json, - verified: usersTable.verified, - }).from(usersTable).all(); - - t.deepEqual(result, [ - { id: 1, name: 'John', json: null, verified: false }, - { id: 2, name: 'Bruce', json: ['foo', 'bar'], verified: false }, - { id: 3, name: 'Jane', json: null, verified: false }, - { id: 4, name: 'Austin', json: null, verified: true }, - ]); -}); - -test.serial('insert many with returning', async (t) => { - const { db } = t.context; - - const result = await db.insert(usersTable).values([ - { name: 'John' }, - { name: 'Bruce', json: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]) - .returning({ - id: usersTable.id, - name: usersTable.name, - json: usersTable.json, - verified: usersTable.verified, - }) - .all(); - - t.deepEqual(result, [ - { id: 1, name: 'John', json: null, verified: false }, - { id: 2, name: 'Bruce', json: ['foo', 'bar'], verified: false }, - { id: 3, name: 'Jane', json: null, verified: false }, - { id: 4, name: 'Austin', json: null, verified: true }, - ]); -}); - -test.serial('partial join with alias', async (t) => { - const { db } = t.context; - const customerAlias = alias(usersTable, 'customer'); - - await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]).run(); - const result = await db - .select({ - user: { - id: usersTable.id, - name: usersTable.name, - }, - customer: { - id: customerAlias.id, - name: customerAlias.name, - }, - }).from(usersTable) - .leftJoin(customerAlias, eq(customerAlias.id, 11)) - .where(eq(usersTable.id, 10)) - .all(); - - t.deepEqual(result, [{ - user: { id: 10, name: 'Ivan' }, - customer: { id: 11, name: 'Hans' }, - }]); -}); - -test.serial('full join with alias', async (t) => { - const { db } = t.context; - - const sqliteTable = sqliteTableCreator((name) => `prefixed_${name}`); - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.run(sql`drop table if exists ${users}`); - await db.run(sql`create table ${users} (id integer primary key, name text not null)`); - - const customers = alias(users, 'customer'); - - await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]).run(); - const result = await db - .select().from(users) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(users.id, 10)) - .all(); - - t.deepEqual(result, [{ - users: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - await db.run(sql`drop table ${users}`); -}); - -test.serial('select from alias', async (t) => { - const { db } = t.context; - - const sqliteTable = sqliteTableCreator((name) => `prefixed_${name}`); - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.run(sql`drop table if exists ${users}`); - await db.run(sql`create table ${users} (id integer primary key, name text not null)`); - - const user = alias(users, 'user'); - const customers = alias(users, 'customer'); - - await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]).run(); - const result = await db - .select() - .from(user) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(user.id, 10)) - .all(); - - t.deepEqual(result, [{ - user: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - await db.run(sql`drop table ${users}`); -}); - -test.serial('insert with spaces', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: sql`'Jo h n'` }).run(); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).all(); - - t.deepEqual(result, [{ id: 1, name: 'Jo h n' }]); -}); - -test.serial('prepared statement', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const statement = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).prepare(); - const result = await statement.all(); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('prepared statement reuse', async (t) => { - const { db } = t.context; - - const stmt = db.insert(usersTable).values({ - verified: true, - name: placeholder('name'), - }).prepare(); - - for (let i = 0; i < 10; i++) { - await stmt.run({ name: `John ${i}` }); - } - - const result = await db.select({ - id: usersTable.id, - name: usersTable.name, - verified: usersTable.verified, - }).from(usersTable).all(); - - t.deepEqual(result, [ - { id: 1, name: 'John 0', verified: true }, - { id: 2, name: 'John 1', verified: true }, - { id: 3, name: 'John 2', verified: true }, - { id: 4, name: 'John 3', verified: true }, - { id: 5, name: 'John 4', verified: true }, - { id: 6, name: 'John 5', verified: true }, - { id: 7, name: 'John 6', verified: true }, - { id: 8, name: 'John 7', verified: true }, - { id: 9, name: 'John 8', verified: true }, - { id: 10, name: 'John 9', verified: true }, - ]); -}); - -test.serial('prepared statement with placeholder in .where', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }).run(); - const stmt = db.select({ - id: usersTable.id, - name: usersTable.name, - }).from(usersTable) - .where(eq(usersTable.id, placeholder('id'))) - .prepare(); - const result = await stmt.all({ id: 1 }); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('select with group by as field', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.name) - .all(); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with group by as sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(sql`${usersTable.name}`) - .all(); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with group by as sql + column', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(sql`${usersTable.name}`, usersTable.id) - .all(); - - t.deepEqual(result, [{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); -}); - -test.serial('select with group by as column + sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`) - .all(); - - t.deepEqual(result, [{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); -}); - -test.serial('select with group by complex query', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`) - .orderBy(asc(usersTable.name)) - .limit(1) - .all(); - - t.deepEqual(result, [{ name: 'Jane' }]); -}); - -test.serial('build query', async (t) => { - const { db } = t.context; - - const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, usersTable.name) - .toSQL(); - - t.deepEqual(query, { - sql: 'select "id", "name" from "users" group by "users"."id", "users"."name"', - params: [], - }); -}); - -test.serial('migrator', async (t) => { - const { db, serverSimulator } = t.context; - - await db.run(sql`drop table if exists another_users`); - await db.run(sql`drop table if exists users12`); - await db.run(sql`drop table if exists __drizzle_migrations`); - - await migrate(db, async (queries) => { - try { - serverSimulator.migrations(queries); - } catch (e) { - console.error(e); - throw new Error('Proxy server cannot run migrations'); - } - }, { migrationsFolder: 'drizzle2/sqlite' }); - - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }).run(); - const result = await db.select().from(usersMigratorTable).all(); - - await db.insert(anotherUsersMigratorTable).values({ name: 'John', email: 'email' }).run(); - const result2 = await db.select().from(usersMigratorTable).all(); - - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); - t.deepEqual(result2, [{ id: 1, name: 'John', email: 'email' }]); - - await db.run(sql`drop table another_users`); - await db.run(sql`drop table users12`); - await db.run(sql`drop table __drizzle_migrations`); -}); - -test.serial('insert via db.run + select via db.all', async (t) => { - const { db } = t.context; - - await db.run(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); - - const result = await db.all(sql`select id, name from "users"`); - t.deepEqual(result, [[1, 'John']]); -}); - -test.serial('insert via db.get', async (t) => { - const { db } = t.context; - - const inserted = await db.get( - sql`insert into ${usersTable} (${new Name( - usersTable.name.name, - )}) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, - ); - t.deepEqual(inserted, [1, 'John']); -}); - -test.serial('insert via db.run + select via db.get', async (t) => { - const { db } = t.context; - - await db.run(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); - - const result = await db.get( - sql`select ${usersTable.id}, ${usersTable.name} from ${usersTable}`, - ); - t.deepEqual(result, [1, 'John']); -}); - -test.serial('insert via db.get w/ query builder', async (t) => { - const { db } = t.context; - - const inserted = await db.get( - db.insert(usersTable).values({ name: 'John' }).returning({ id: usersTable.id, name: usersTable.name }), - ); - t.deepEqual(inserted, [1, 'John']); -}); - -test.after.always((t) => { - const ctx = t.context; - ctx.client?.close(); -}); - -test.serial('insert with onConflict do nothing', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ id: 1, name: 'John' }).run(); - - await db - .insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoNothing() - .run(); - - const res = await db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)) - .all(); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert with onConflict do nothing using composite pk', async (t) => { - const { db } = t.context; - - await db - .insert(pkExampleTable) - .values({ id: 1, name: 'John', email: 'john@example.com' }) - .run(); - - await db - .insert(pkExampleTable) - .values({ id: 1, name: 'John', email: 'john1@example.com' }) - .onConflictDoNothing() - .run(); - - const res = await db - .select({ id: pkExampleTable.id, name: pkExampleTable.name, email: pkExampleTable.email }) - .from(pkExampleTable) - .where(eq(pkExampleTable.id, 1)) - .all(); - - t.deepEqual(res, [{ id: 1, name: 'John', email: 'john@example.com' }]); -}); - -test.serial('insert with onConflict do nothing using target', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ id: 1, name: 'John' }).run(); - - await db - .insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoNothing({ target: usersTable.id }) - .run(); - - const res = await db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)) - .all(); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert with onConflict do nothing using composite pk as target', async (t) => { - const { db } = t.context; - - await db - .insert(pkExampleTable) - .values({ id: 1, name: 'John', email: 'john@example.com' }) - .run(); - - await db - .insert(pkExampleTable) - .values({ id: 1, name: 'John', email: 'john1@example.com' }) - .onConflictDoNothing({ target: [pkExampleTable.id, pkExampleTable.name] }) - .run(); - - const res = await db - .select({ id: pkExampleTable.id, name: pkExampleTable.name, email: pkExampleTable.email }) - .from(pkExampleTable) - .where(eq(pkExampleTable.id, 1)) - .all(); - - t.deepEqual(res, [{ id: 1, name: 'John', email: 'john@example.com' }]); -}); - -test.serial('insert with onConflict do update', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ id: 1, name: 'John' }).run(); - - await db - .insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }) - .run(); - - const res = await db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)) - .all(); - - t.deepEqual(res, [{ id: 1, name: 'John1' }]); -}); - -test.serial('insert with onConflict do update using composite pk', async (t) => { - const { db } = t.context; - - await db.insert(pkExampleTable).values({ id: 1, name: 'John', email: 'john@example.com' }).run(); - - await db - .insert(pkExampleTable) - .values({ id: 1, name: 'John', email: 'john@example.com' }) - .onConflictDoUpdate({ target: [pkExampleTable.id, pkExampleTable.name], set: { email: 'john1@example.com' } }) - .run(); - - const res = await db - .select({ id: pkExampleTable.id, name: pkExampleTable.name, email: pkExampleTable.email }) - .from(pkExampleTable) - .where(eq(pkExampleTable.id, 1)) - .all(); - - t.deepEqual(res, [{ id: 1, name: 'John', email: 'john1@example.com' }]); -}); - -test.serial('insert undefined', async (t) => { - const { db } = t.context; - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name'), - }); - - await db.run(sql`drop table if exists ${users}`); - - await db.run( - sql`create table ${users} (id integer primary key, name text)`, - ); - - await t.notThrowsAsync(async () => await db.insert(users).values({ name: undefined }).run()); - - await db.run(sql`drop table ${users}`); -}); - -test.serial('update undefined', async (t) => { - const { db } = t.context; - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name'), - }); - - await db.run(sql`drop table if exists ${users}`); - - await db.run( - sql`create table ${users} (id integer primary key, name text)`, - ); - - await t.throwsAsync(async () => await db.update(users).set({ name: undefined }).run()); - await t.notThrowsAsync(async () => await db.update(users).set({ id: 1, name: undefined }).run()); - - await db.run(sql`drop table ${users}`); -}); - -test.serial('async api - CRUD', async (t) => { - const { db } = t.context; - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name'), - }); - - db.run(sql`drop table if exists ${users}`); - - db.run( - sql`create table ${users} (id integer primary key, name text)`, - ); - - await db.insert(users).values({ id: 1, name: 'John' }); - - const res = await db.select().from(users); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); - - await db.update(users).set({ name: 'John1' }).where(eq(users.id, 1)); - - const res1 = await db.select().from(users); - - t.deepEqual(res1, [{ id: 1, name: 'John1' }]); - - await db.delete(users).where(eq(users.id, 1)); - - const res2 = await db.select().from(users); - - t.deepEqual(res2, []); - - await db.run(sql`drop table ${users}`); -}); - -test.serial('async api - insert + select w/ prepare + async execute', async (t) => { - const { db } = t.context; - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name'), - }); - - db.run(sql`drop table if exists ${users}`); - - db.run( - sql`create table ${users} (id integer primary key, name text)`, - ); - - const insertStmt = db.insert(users).values({ id: 1, name: 'John' }).prepare(); - await insertStmt.execute(); - - const selectStmt = db.select().from(users).prepare(); - const res = await selectStmt.execute(); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); - - const updateStmt = db.update(users).set({ name: 'John1' }).where(eq(users.id, 1)).prepare(); - await updateStmt.execute(); - - const res1 = await selectStmt.execute(); - - t.deepEqual(res1, [{ id: 1, name: 'John1' }]); - - const deleteStmt = db.delete(users).where(eq(users.id, 1)).prepare(); - await deleteStmt.execute(); - - const res2 = await selectStmt.execute(); - - t.deepEqual(res2, []); - - await db.run(sql`drop table ${users}`); -}); - -test.serial('async api - insert + select w/ prepare + sync execute', async (t) => { - const { db } = t.context; - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name'), - }); - - db.run(sql`drop table if exists ${users}`); - - db.run( - sql`create table ${users} (id integer primary key, name text)`, - ); - - const insertStmt = db.insert(users).values({ id: 1, name: 'John' }).prepare(); - await insertStmt.execute(); - - const selectStmt = db.select().from(users).prepare(); - const res = await selectStmt.execute(); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); - - const updateStmt = db.update(users).set({ name: 'John1' }).where(eq(users.id, 1)).prepare(); - await updateStmt.execute(); - - const res1 = await selectStmt.execute(); - - t.deepEqual(res1, [{ id: 1, name: 'John1' }]); - - const deleteStmt = db.delete(users).where(eq(users.id, 1)).prepare(); - await deleteStmt.execute(); - - const res2 = await selectStmt.execute(); - - t.deepEqual(res2, []); - - await db.run(sql`drop table ${users}`); -}); - -test.serial('select + .get() for empty result', async (t) => { - const { db } = t.context; - - const users = sqliteTable('users', { - id: integer('id').primaryKey(), - name: text('name'), - }); - - db.run(sql`drop table if exists ${users}`); - - db.run( - sql`create table ${users} (id integer primary key, name text)`, - ); - - const res = await db.select().from(users).where(eq(users.id, 1)).get(); - - t.is(res, undefined); - - await db.run(sql`drop table ${users}`); -}); diff --git a/integration-tests/tests/sqlite/better-sqlite.test.ts b/integration-tests/tests/sqlite/better-sqlite.test.ts new file mode 100644 index 000000000..53feee15f --- /dev/null +++ b/integration-tests/tests/sqlite/better-sqlite.test.ts @@ -0,0 +1,60 @@ +import Database from 'better-sqlite3'; +import { sql } from 'drizzle-orm'; +import { type BetterSQLite3Database, drizzle } from 'drizzle-orm/better-sqlite3'; +import { migrate } from 'drizzle-orm/better-sqlite3/migrator'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { skipTests } from '~/common'; +import { anotherUsersMigratorTable, tests, usersMigratorTable } from './sqlite-common'; + +const ENABLE_LOGGING = false; + +let db: BetterSQLite3Database; +let client: Database.Database; + +beforeAll(async () => { + const dbPath = process.env['SQLITE_DB_PATH'] ?? ':memory:'; + client = new Database(dbPath); + db = drizzle(client, { logger: ENABLE_LOGGING }); +}); + +afterAll(async () => { + client?.close(); +}); + +beforeEach((ctx) => { + ctx.sqlite = { + db, + }; +}); + +test('migrator', async () => { + db.run(sql`drop table if exists another_users`); + db.run(sql`drop table if exists users12`); + db.run(sql`drop table if exists __drizzle_migrations`); + + migrate(db, { migrationsFolder: './drizzle2/sqlite' }); + + db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }).run(); + const result = db.select().from(usersMigratorTable).all(); + + db.insert(anotherUsersMigratorTable).values({ name: 'John', email: 'email' }).run(); + const result2 = db.select().from(anotherUsersMigratorTable).all(); + + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + expect(result2).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + db.run(sql`drop table another_users`); + db.run(sql`drop table users12`); + db.run(sql`drop table __drizzle_migrations`); +}); + +skipTests([ + /** + * doesn't work properly: + * Expect: should rollback transaction and don't insert/ update data + * Received: data inserted/ updated + */ + 'transaction rollback', + 'nested transaction rollback', +]); +tests(); diff --git a/integration-tests/tests/d1-batch.test.ts b/integration-tests/tests/sqlite/d1-batch.test.ts similarity index 97% rename from integration-tests/tests/d1-batch.test.ts rename to integration-tests/tests/sqlite/d1-batch.test.ts index 7abebbb51..7ca1dff0f 100644 --- a/integration-tests/tests/d1-batch.test.ts +++ b/integration-tests/tests/sqlite/d1-batch.test.ts @@ -45,7 +45,7 @@ export const usersToGroupsTable = sqliteTable( ), }, (t) => ({ - pk: primaryKey(t.userId, t.groupId), + pk: primaryKey({ columns: [t.userId, t.groupId] }), }), ); export const usersToGroupsConfig = relations(usersToGroupsTable, ({ one }) => ({ @@ -138,7 +138,8 @@ let db: DrizzleD1Database; beforeAll(async () => { const sqliteDb = await createSQLiteDB(':memory:'); - db = drizzle(new D1Database(new D1DatabaseAPI(sqliteDb)) as any, { schema, logger: ENABLE_LOGGING }); + const d1db = new D1Database(new D1DatabaseAPI(sqliteDb)); + db = drizzle(d1db as any, { logger: ENABLE_LOGGING, schema }); }); beforeEach(async () => { @@ -544,7 +545,3 @@ test('insert + delete + select + select partial', async () => { { id: 2, invitedBy: null }, ); }); - -// * additionally -// batch for all libsql cases, just replace simple calls with batch calls -// batch for all rqb cases, just replace simple calls with batch calls diff --git a/integration-tests/tests/sqlite/d1.test.ts b/integration-tests/tests/sqlite/d1.test.ts new file mode 100644 index 000000000..20e9e9d14 --- /dev/null +++ b/integration-tests/tests/sqlite/d1.test.ts @@ -0,0 +1,89 @@ +import { D1Database, D1DatabaseAPI } from '@miniflare/d1'; +import { createSQLiteDB } from '@miniflare/shared'; +import { sql } from 'drizzle-orm'; +import type { DrizzleD1Database } from 'drizzle-orm/d1'; +import { drizzle } from 'drizzle-orm/d1'; +import { migrate } from 'drizzle-orm/d1/migrator'; +import { beforeAll, beforeEach, expect, test } from 'vitest'; +import { skipTests } from '~/common'; +import { randomString } from '~/utils'; +import { anotherUsersMigratorTable, tests, usersMigratorTable } from './sqlite-common'; + +const ENABLE_LOGGING = false; + +let db: DrizzleD1Database; + +beforeAll(async () => { + const sqliteDb = await createSQLiteDB(':memory:'); + const d1db = new D1Database(new D1DatabaseAPI(sqliteDb)); + db = drizzle(d1db as any, { logger: ENABLE_LOGGING }); +}); + +beforeEach((ctx) => { + ctx.sqlite = { + db, + }; +}); + +test('migrator', async () => { + await db.run(sql`drop table if exists another_users`); + await db.run(sql`drop table if exists users12`); + await db.run(sql`drop table if exists __drizzle_migrations`); + + await migrate(db, { migrationsFolder: './drizzle2/sqlite' }); + + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }).run(); + const result = await db.select().from(usersMigratorTable).all(); + + await db.insert(anotherUsersMigratorTable).values({ name: 'John', email: 'email' }).run(); + const result2 = await db.select().from(anotherUsersMigratorTable).all(); + + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + expect(result2).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.run(sql`drop table another_users`); + await db.run(sql`drop table users12`); + await db.run(sql`drop table __drizzle_migrations`); +}); + +test('migrator : migrate with custom table', async () => { + const customTable = randomString(); + await db.run(sql`drop table if exists another_users`); + await db.run(sql`drop table if exists users12`); + await db.run(sql`drop table if exists ${sql.identifier(customTable)}`); + + await migrate(db, { migrationsFolder: './drizzle2/sqlite', migrationsTable: customTable }); + + // test if the custom migrations table was created + const res = await db.all(sql`select * from ${sql.identifier(customTable)};`); + expect(res.length > 0).toBeTruthy(); + + // test if the migrated table are working as expected + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.run(sql`drop table another_users`); + await db.run(sql`drop table users12`); + await db.run(sql`drop table ${sql.identifier(customTable)}`); +}); + +skipTests([ + // Cannot convert 49,50,55 to a BigInt + 'insert bigint values', + // SyntaxError: Unexpected token , in JSON at position 2 + 'json insert', + 'insert many', + 'insert many with returning', + /** + * TODO: Fix Bug! The objects should be equal + * + * See #528 for more details. + * Tldr the D1 driver does not execute joins successfully + */ + 'partial join with alias', + 'full join with alias', + 'select from alias', + 'join view as subquery', +]); +tests(); diff --git a/integration-tests/tests/libsql-batch.test.ts b/integration-tests/tests/sqlite/libsql-batch.test.ts similarity index 94% rename from integration-tests/tests/libsql-batch.test.ts rename to integration-tests/tests/sqlite/libsql-batch.test.ts index 4b1883804..693845f30 100644 --- a/integration-tests/tests/libsql-batch.test.ts +++ b/integration-tests/tests/sqlite/libsql-batch.test.ts @@ -1,9 +1,10 @@ -import 'dotenv/config'; -import type { Client, ResultSet } from '@libsql/client'; import { createClient } from '@libsql/client'; +import type { Client, ResultSet } from '@libsql/client'; +import retry from 'async-retry'; import { eq, relations, sql } from 'drizzle-orm'; import { drizzle, type LibSQLDatabase } from 'drizzle-orm/libsql'; -import { type AnySQLiteColumn, integer, primaryKey, sqliteTable, text } from 'drizzle-orm/sqlite-core'; +import type { AnySQLiteColumn } from 'drizzle-orm/sqlite-core'; +import { integer, primaryKey, sqliteTable, text } from 'drizzle-orm/sqlite-core'; import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; const ENABLE_LOGGING = false; @@ -44,7 +45,7 @@ export const usersToGroupsTable = sqliteTable( ), }, (t) => ({ - pk: primaryKey(t.userId, t.groupId), + pk: primaryKey({ columns: [t.userId, t.groupId] }), }), ); export const usersToGroupsConfig = relations(usersToGroupsTable, ({ one }) => ({ @@ -142,29 +143,26 @@ beforeAll(async () => { if (!url) { throw new Error('LIBSQL_URL is not set'); } - const sleep = 250; - let timeLeft = 5000; - let connected = false; - let lastError: unknown | undefined; - do { - try { - client = createClient({ url, authToken }); - connected = true; - break; - } catch (e) { - lastError = e; - await new Promise((resolve) => setTimeout(resolve, sleep)); - timeLeft -= sleep; - } - } while (timeLeft > 0); - if (!connected) { - console.error('Cannot connect to libsql'); - throw lastError; - } - + client = await retry(async () => { + client = createClient({ url, authToken }); + return client; + }, { + retries: 20, + factor: 1, + minTimeout: 250, + maxTimeout: 250, + randomize: false, + onRetry() { + client?.close(); + }, + }); db = drizzle(client, { schema, logger: ENABLE_LOGGING }); }); +afterAll(async () => { + // client?.close(); +}); + beforeEach(async () => { await db.run(sql`drop table if exists \`groups\``); await db.run(sql`drop table if exists \`users\``); @@ -563,7 +561,3 @@ test('insert + delete + select + select partial', async () => { { id: 2, invitedBy: null }, ); }); - -// * additionally -// batch for all libsql cases, just replace simple calls with batch calls -// batch for all rqb cases, just replace simple calls with batch calls diff --git a/integration-tests/tests/sqlite/libsql.test.ts b/integration-tests/tests/sqlite/libsql.test.ts new file mode 100644 index 000000000..71d3b289e --- /dev/null +++ b/integration-tests/tests/sqlite/libsql.test.ts @@ -0,0 +1,90 @@ +import { type Client, createClient } from '@libsql/client'; +import retry from 'async-retry'; +import { sql } from 'drizzle-orm'; +import { drizzle, type LibSQLDatabase } from 'drizzle-orm/libsql'; +import { migrate } from 'drizzle-orm/libsql/migrator'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { randomString } from '~/utils'; +import { anotherUsersMigratorTable, tests, usersMigratorTable } from './sqlite-common'; + +const ENABLE_LOGGING = false; + +let db: LibSQLDatabase; +let client: Client; + +beforeAll(async () => { + const url = process.env['LIBSQL_URL']; + const authToken = process.env['LIBSQL_AUTH_TOKEN']; + if (!url) { + throw new Error('LIBSQL_URL is not set'); + } + client = await retry(async () => { + client = createClient({ url, authToken }); + return client; + }, { + retries: 20, + factor: 1, + minTimeout: 250, + maxTimeout: 250, + randomize: false, + onRetry() { + client?.close(); + }, + }); + db = drizzle(client, { logger: ENABLE_LOGGING }); +}); + +afterAll(async () => { + client?.close(); +}); + +beforeEach((ctx) => { + ctx.sqlite = { + db, + }; +}); + +test('migrator', async () => { + await db.run(sql`drop table if exists another_users`); + await db.run(sql`drop table if exists users12`); + await db.run(sql`drop table if exists __drizzle_migrations`); + + await migrate(db, { migrationsFolder: './drizzle2/sqlite' }); + + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }).run(); + const result = await db.select().from(usersMigratorTable).all(); + + await db.insert(anotherUsersMigratorTable).values({ name: 'John', email: 'email' }).run(); + const result2 = await db.select().from(anotherUsersMigratorTable).all(); + + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + expect(result2).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.run(sql`drop table another_users`); + await db.run(sql`drop table users12`); + await db.run(sql`drop table __drizzle_migrations`); +}); + +test('migrator : migrate with custom table', async () => { + const customTable = randomString(); + await db.run(sql`drop table if exists another_users`); + await db.run(sql`drop table if exists users12`); + await db.run(sql`drop table if exists ${sql.identifier(customTable)}`); + + await migrate(db, { migrationsFolder: './drizzle2/sqlite', migrationsTable: customTable }); + + // test if the custom migrations table was created + const res = await db.all(sql`select * from ${sql.identifier(customTable)};`); + expect(res.length > 0).toBeTruthy(); + + // test if the migrated table are working as expected + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.run(sql`drop table another_users`); + await db.run(sql`drop table users12`); + await db.run(sql`drop table ${sql.identifier(customTable)}`); +}); + +tests(); diff --git a/integration-tests/tests/sqlite/sql-js.test.ts b/integration-tests/tests/sqlite/sql-js.test.ts new file mode 100644 index 000000000..ec3d7b583 --- /dev/null +++ b/integration-tests/tests/sqlite/sql-js.test.ts @@ -0,0 +1,62 @@ +import { sql } from 'drizzle-orm'; +import type { SQLJsDatabase } from 'drizzle-orm/sql-js'; +import { drizzle } from 'drizzle-orm/sql-js'; +import { migrate } from 'drizzle-orm/sql-js/migrator'; +import type { Database } from 'sql.js'; +import initSqlJs from 'sql.js'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { skipTests } from '~/common'; +import { anotherUsersMigratorTable, tests, usersMigratorTable } from './sqlite-common'; + +const ENABLE_LOGGING = false; + +let db: SQLJsDatabase; +let client: Database; + +beforeAll(async () => { + const SQL = await initSqlJs(); + client = new SQL.Database(); + db = drizzle(client, { logger: ENABLE_LOGGING }); +}); + +beforeEach((ctx) => { + ctx.sqlite = { + db, + }; +}); + +afterAll(async () => { + client?.close(); +}); + +test('migrator', async () => { + db.run(sql`drop table if exists another_users`); + db.run(sql`drop table if exists users12`); + db.run(sql`drop table if exists __drizzle_migrations`); + + migrate(db, { migrationsFolder: './drizzle2/sqlite' }); + + db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }).run(); + const result = db.select().from(usersMigratorTable).all(); + + db.insert(anotherUsersMigratorTable).values({ name: 'John', email: 'email' }).run(); + const result2 = db.select().from(anotherUsersMigratorTable).all(); + + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + expect(result2).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + db.run(sql`drop table another_users`); + db.run(sql`drop table users12`); + db.run(sql`drop table __drizzle_migrations`); +}); + +skipTests([ + /** + * doesn't work properly: + * Expect: should rollback transaction and don't insert/ update data + * Received: data inserted/ updated + */ + 'transaction rollback', + 'nested transaction rollback', +]); +tests(); diff --git a/integration-tests/tests/sqlite/sqlite-common.ts b/integration-tests/tests/sqlite/sqlite-common.ts new file mode 100644 index 000000000..a816d8cca --- /dev/null +++ b/integration-tests/tests/sqlite/sqlite-common.ts @@ -0,0 +1,2684 @@ +import { + and, + asc, + avg, + avgDistinct, + count, + countDistinct, + eq, + exists, + getTableColumns, + gt, + gte, + inArray, + lt, + max, + min, + Name, + sql, + sum, + sumDistinct, + TransactionRollbackError, +} from 'drizzle-orm'; +import { + alias, + type BaseSQLiteDatabase, + blob, + except, + foreignKey, + getTableConfig, + getViewConfig, + int, + integer, + intersect, + numeric, + primaryKey, + sqliteTable, + sqliteTableCreator, + sqliteView, + text, + union, + unionAll, + unique, + uniqueKeyName, +} from 'drizzle-orm/sqlite-core'; +import { beforeEach, describe, expect, test } from 'vitest'; +import type { Equal } from '~/utils'; +import { Expect } from '~/utils'; + +declare module 'vitest' { + interface TestContext { + sqlite: { + db: BaseSQLiteDatabase<'async' | 'sync', any, Record>; + }; + } +} + +export const usersTable = sqliteTable('users', { + id: integer('id').primaryKey(), + name: text('name').notNull(), + verified: integer('verified', { mode: 'boolean' }).notNull().default(false), + json: blob('json', { mode: 'json' }).$type(), + createdAt: integer('created_at', { mode: 'timestamp' }).notNull().default(sql`strftime('%s', 'now')`), +}); + +const usersOnUpdate = sqliteTable('users_on_update', { + id: integer('id').primaryKey({ autoIncrement: true }), + name: text('name').notNull(), + updateCounter: integer('update_counter').default(sql`1`).$onUpdateFn(() => sql`update_counter + 1`), + updatedAt: integer('updated_at', { mode: 'timestamp_ms' }).$onUpdate(() => new Date()), + alwaysNull: text('always_null').$type().$onUpdate(() => null), + // uppercaseName: text('uppercase_name').$onUpdateFn(() => + // sql`upper(s.name)` + // ), This doesn't seem to be supported in sqlite +}); + +export const users2Table = sqliteTable('users2', { + id: integer('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').references(() => citiesTable.id), +}); + +export const citiesTable = sqliteTable('cities', { + id: integer('id').primaryKey(), + name: text('name').notNull(), +}); + +const coursesTable = sqliteTable('courses', { + id: integer('id').primaryKey(), + name: text('name').notNull(), + categoryId: integer('category_id').references(() => courseCategoriesTable.id), +}); + +const courseCategoriesTable = sqliteTable('course_categories', { + id: integer('id').primaryKey(), + name: text('name').notNull(), +}); + +const orders = sqliteTable('orders', { + id: integer('id').primaryKey(), + region: text('region').notNull(), + product: text('product').notNull().$default(() => 'random_string'), + amount: integer('amount').notNull(), + quantity: integer('quantity').notNull(), +}); + +export const usersMigratorTable = sqliteTable('users12', { + id: integer('id').primaryKey(), + name: text('name').notNull(), + email: text('email').notNull(), +}); + +export const anotherUsersMigratorTable = sqliteTable('another_users', { + id: integer('id').primaryKey(), + name: text('name').notNull(), + email: text('email').notNull(), +}); + +const pkExampleTable = sqliteTable('pk_example', { + id: integer('id').notNull(), + name: text('name').notNull(), + email: text('email').notNull(), +}, (table) => ({ + compositePk: primaryKey({ columns: [table.id, table.name] }), +})); + +const bigIntExample = sqliteTable('big_int_example', { + id: integer('id').primaryKey(), + name: text('name').notNull(), + bigInt: blob('big_int', { mode: 'bigint' }).notNull(), +}); + +// To test aggregate functions +const aggregateTable = sqliteTable('aggregate_table', { + id: integer('id').primaryKey({ autoIncrement: true }).notNull(), + name: text('name').notNull(), + a: integer('a'), + b: integer('b'), + c: integer('c'), + nullOnly: integer('null_only'), +}); + +export function tests() { + describe('common', () => { + beforeEach(async (ctx) => { + const { db } = ctx.sqlite; + + await db.run(sql`drop table if exists ${usersTable}`); + await db.run(sql`drop table if exists ${users2Table}`); + await db.run(sql`drop table if exists ${citiesTable}`); + await db.run(sql`drop table if exists ${coursesTable}`); + await db.run(sql`drop table if exists ${courseCategoriesTable}`); + await db.run(sql`drop table if exists ${orders}`); + await db.run(sql`drop table if exists ${bigIntExample}`); + await db.run(sql`drop table if exists ${pkExampleTable}`); + + await db.run(sql` + create table ${usersTable} ( + id integer primary key, + name text not null, + verified integer not null default 0, + json blob, + created_at integer not null default (strftime('%s', 'now')) + ) + `); + + await db.run(sql` + create table ${citiesTable} ( + id integer primary key, + name text not null + ) + `); + await db.run(sql` + create table ${courseCategoriesTable} ( + id integer primary key, + name text not null + ) + `); + + await db.run(sql` + create table ${users2Table} ( + id integer primary key, + name text not null, + city_id integer references ${citiesTable}(${sql.identifier(citiesTable.id.name)}) + ) + `); + await db.run(sql` + create table ${coursesTable} ( + id integer primary key, + name text not null, + category_id integer references ${courseCategoriesTable}(${sql.identifier(courseCategoriesTable.id.name)}) + ) + `); + await db.run(sql` + create table ${orders} ( + id integer primary key, + region text not null, + product text not null, + amount integer not null, + quantity integer not null + ) + `); + await db.run(sql` + create table ${pkExampleTable} ( + id integer not null, + name text not null, + email text not null, + primary key (id, name) + ) + `); + await db.run(sql` + create table ${bigIntExample} ( + id integer primary key, + name text not null, + big_int blob not null + ) + `); + }); + + async function setupSetOperationTest(db: BaseSQLiteDatabase) { + await db.run(sql`drop table if exists users2`); + await db.run(sql`drop table if exists cities`); + await db.run(sql` + create table \`cities\` ( + id integer primary key, + name text not null + ) + `); + + await db.run(sql` + create table \`users2\` ( + id integer primary key, + name text not null, + city_id integer references ${citiesTable}(${sql.identifier(citiesTable.id.name)}) + ) + `); + + await db.insert(citiesTable).values([ + { id: 1, name: 'New York' }, + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + await db.insert(users2Table).values([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 2 }, + { id: 3, name: 'Jack', cityId: 3 }, + { id: 4, name: 'Peter', cityId: 3 }, + { id: 5, name: 'Ben', cityId: 2 }, + { id: 6, name: 'Jill', cityId: 1 }, + { id: 7, name: 'Mary', cityId: 2 }, + { id: 8, name: 'Sally', cityId: 1 }, + ]); + } + + async function setupAggregateFunctionsTest(db: BaseSQLiteDatabase) { + await db.run(sql`drop table if exists "aggregate_table"`); + await db.run( + sql` + create table "aggregate_table" ( + "id" integer primary key autoincrement not null, + "name" text not null, + "a" integer, + "b" integer, + "c" integer, + "null_only" integer + ); + `, + ); + await db.insert(aggregateTable).values([ + { name: 'value 1', a: 5, b: 10, c: 20 }, + { name: 'value 1', a: 5, b: 20, c: 30 }, + { name: 'value 2', a: 10, b: 50, c: 60 }, + { name: 'value 3', a: 20, b: 20, c: null }, + { name: 'value 4', a: null, b: 90, c: 120 }, + { name: 'value 5', a: 80, b: 10, c: null }, + { name: 'value 6', a: null, b: null, c: 150 }, + ]); + } + + test('table config: foreign keys name', async () => { + const table = sqliteTable('cities', { + id: int('id').primaryKey(), + name: text('name').notNull(), + state: text('state'), + }, (t) => ({ + f: foreignKey({ foreignColumns: [t.id], columns: [t.id], name: 'custom_fk' }), + f1: foreignKey({ foreignColumns: [t.id], columns: [t.id], name: 'custom_fk_deprecated' }), + })); + + const tableConfig = getTableConfig(table); + + expect(tableConfig.foreignKeys).toHaveLength(2); + expect(tableConfig.foreignKeys[0]!.getName()).toBe('custom_fk'); + expect(tableConfig.foreignKeys[1]!.getName()).toBe('custom_fk_deprecated'); + }); + + test('table config: primary keys name', async () => { + const table = sqliteTable('cities', { + id: int('id').primaryKey(), + name: text('name').notNull(), + state: text('state'), + }, (t) => ({ + f: primaryKey({ columns: [t.id, t.name], name: 'custom_pk' }), + })); + + const tableConfig = getTableConfig(table); + + expect(tableConfig.primaryKeys).toHaveLength(1); + expect(tableConfig.primaryKeys[0]!.getName()).toBe('custom_pk'); + }); + + test('insert bigint values', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(bigIntExample).values({ name: 'one', bigInt: BigInt('0') }).run(); + await db.insert(bigIntExample).values({ name: 'two', bigInt: BigInt('127') }).run(); + await db.insert(bigIntExample).values({ name: 'three', bigInt: BigInt('32767') }).run(); + await db.insert(bigIntExample).values({ name: 'four', bigInt: BigInt('1234567890') }).run(); + await db.insert(bigIntExample).values({ name: 'five', bigInt: BigInt('12345678900987654321') }).run(); + + const result = await db.select().from(bigIntExample).all(); + expect(result).toEqual([ + { id: 1, name: 'one', bigInt: BigInt('0') }, + { id: 2, name: 'two', bigInt: BigInt('127') }, + { id: 3, name: 'three', bigInt: BigInt('32767') }, + { id: 4, name: 'four', bigInt: BigInt('1234567890') }, + { id: 5, name: 'five', bigInt: BigInt('12345678900987654321') }, + ]); + }); + + test('select all fields', async (ctx) => { + const { db } = ctx.sqlite; + + const now = Date.now(); + + await db.insert(usersTable).values({ name: 'John' }).run(); + const result = await db.select().from(usersTable).all(); + expect(result[0]!.createdAt).toBeInstanceOf(Date); + expect(Math.abs(result[0]!.createdAt.getTime() - now)).toBeLessThan(5000); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, json: null, createdAt: result[0]!.createdAt }]); + }); + + test('select partial', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(usersTable).values({ name: 'John' }).run(); + const result = await db.select({ name: usersTable.name }).from(usersTable).all(); + + expect(result).toEqual([{ name: 'John' }]); + }); + + test('select sql', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(usersTable).values({ name: 'John' }).run(); + const users = await db.select({ + name: sql`upper(${usersTable.name})`, + }).from(usersTable).all(); + + expect(users).toEqual([{ name: 'JOHN' }]); + }); + + test('select typed sql', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(usersTable).values({ name: 'John' }).run(); + const users = await db.select({ + name: sql`upper(${usersTable.name})`, + }).from(usersTable).all(); + + expect(users).toEqual([{ name: 'JOHN' }]); + }); + + test('select distinct', async (ctx) => { + const { db } = ctx.sqlite; + + const usersDistinctTable = sqliteTable('users_distinct', { + id: integer('id').notNull(), + name: text('name').notNull(), + }); + + await db.run(sql`drop table if exists ${usersDistinctTable}`); + await db.run(sql`create table ${usersDistinctTable} (id integer, name text)`); + + await db.insert(usersDistinctTable).values([ + { id: 1, name: 'John' }, + { id: 1, name: 'John' }, + { id: 2, name: 'John' }, + { id: 1, name: 'Jane' }, + ]).run(); + const users = await db.selectDistinct().from(usersDistinctTable).orderBy( + usersDistinctTable.id, + usersDistinctTable.name, + ).all(); + + await db.run(sql`drop table ${usersDistinctTable}`); + + expect(users).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); + }); + + test('insert returning sql', async (ctx) => { + const { db } = ctx.sqlite; + + const users = await db.insert(usersTable).values({ name: 'John' }).returning({ + name: sql`upper(${usersTable.name})`, + }).all(); + + expect(users).toEqual([{ name: 'JOHN' }]); + }); + + test('$default function', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(orders).values({ id: 1, region: 'Ukraine', amount: 1, quantity: 1 }); + const selectedOrder = await db.select().from(orders); + + expect(selectedOrder).toEqual([{ + id: 1, + amount: 1, + quantity: 1, + region: 'Ukraine', + product: 'random_string', + }]); + }); + + test('delete returning sql', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(usersTable).values({ name: 'John' }).run(); + const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ + name: sql`upper(${usersTable.name})`, + }).all(); + + expect(users).toEqual([{ name: 'JOHN' }]); + }); + + test('query check: insert single empty row', (ctx) => { + const { db } = ctx.sqlite; + + const users = sqliteTable('users', { + id: integer('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state'), + }); + + const query = db + .insert(users) + .values({}) + .toSQL(); + + expect(query).toEqual({ + sql: 'insert into "users" ("id", "name", "state") values (null, ?, null)', + params: ['Dan'], + }); + }); + + test('query check: insert multiple empty rows', (ctx) => { + const { db } = ctx.sqlite; + + const users = sqliteTable('users', { + id: integer('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state'), + }); + + const query = db + .insert(users) + .values([{}, {}]) + .toSQL(); + + expect(query).toEqual({ + sql: 'insert into "users" ("id", "name", "state") values (null, ?, null), (null, ?, null)', + params: ['Dan', 'Dan'], + }); + }); + + test('Insert all defaults in 1 row', async (ctx) => { + const { db } = ctx.sqlite; + + const users = sqliteTable('empty_insert_single', { + id: integer('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state'), + }); + + await db.run(sql`drop table if exists ${users}`); + + await db.run( + sql`create table ${users} (id integer primary key, name text default 'Dan', state text)`, + ); + + await db.insert(users).values({}).run(); + + const res = await db.select().from(users).all(); + + expect(res).toEqual([{ id: 1, name: 'Dan', state: null }]); + }); + + test('Insert all defaults in multiple rows', async (ctx) => { + const { db } = ctx.sqlite; + + const users = sqliteTable('empty_insert_multiple', { + id: integer('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state'), + }); + + await db.run(sql`drop table if exists ${users}`); + + await db.run( + sql`create table ${users} (id integer primary key, name text default 'Dan', state text)`, + ); + + await db.insert(users).values([{}, {}]).run(); + + const res = await db.select().from(users).all(); + + expect(res).toEqual([{ id: 1, name: 'Dan', state: null }, { id: 2, name: 'Dan', state: null }]); + }); + + test('update returning sql', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(usersTable).values({ name: 'John' }).run(); + const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ + name: sql`upper(${usersTable.name})`, + }).all(); + + expect(users).toEqual([{ name: 'JANE' }]); + }); + + test('insert with auto increment', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(usersTable).values([ + { name: 'John' }, + { name: 'Jane' }, + { name: 'George' }, + { name: 'Austin' }, + ]).run(); + const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).all(); + + expect(result).toEqual([ + { id: 1, name: 'John' }, + { id: 2, name: 'Jane' }, + { id: 3, name: 'George' }, + { id: 4, name: 'Austin' }, + ]); + }); + + test('insert with default values', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(usersTable).values({ name: 'John' }).run(); + const result = await db.select().from(usersTable).all(); + + expect(result).toEqual([{ id: 1, name: 'John', verified: false, json: null, createdAt: result[0]!.createdAt }]); + }); + + test('insert with overridden default values', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(usersTable).values({ name: 'John', verified: true }).run(); + const result = await db.select().from(usersTable).all(); + + expect(result).toEqual([{ id: 1, name: 'John', verified: true, json: null, createdAt: result[0]!.createdAt }]); + }); + + test('update with returning all fields', async (ctx) => { + const { db } = ctx.sqlite; + + const now = Date.now(); + + await db.insert(usersTable).values({ name: 'John' }).run(); + const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning() + .all(); + + expect(users[0]!.createdAt).toBeInstanceOf(Date); + expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(5000); + expect(users).toEqual([{ id: 1, name: 'Jane', verified: false, json: null, createdAt: users[0]!.createdAt }]); + }); + + test('update with returning partial', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(usersTable).values({ name: 'John' }).run(); + const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ + id: usersTable.id, + name: usersTable.name, + }).all(); + + expect(users).toEqual([{ id: 1, name: 'Jane' }]); + }); + + test('delete with returning all fields', async (ctx) => { + const { db } = ctx.sqlite; + + const now = Date.now(); + + await db.insert(usersTable).values({ name: 'John' }).run(); + const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning().all(); + + expect(users[0]!.createdAt).toBeInstanceOf(Date); + expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(5000); + expect(users).toEqual([{ id: 1, name: 'John', verified: false, json: null, createdAt: users[0]!.createdAt }]); + }); + + test('delete with returning partial', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(usersTable).values({ name: 'John' }).run(); + const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ + id: usersTable.id, + name: usersTable.name, + }).all(); + + expect(users).toEqual([{ id: 1, name: 'John' }]); + }); + + test('insert + select', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(usersTable).values({ name: 'John' }).run(); + const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).all(); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + + await db.insert(usersTable).values({ name: 'Jane' }).run(); + const result2 = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).all(); + + expect(result2).toEqual([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }]); + }); + + test('json insert', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(usersTable).values({ name: 'John', json: ['foo', 'bar'] }).run(); + const result = await db.select({ + id: usersTable.id, + name: usersTable.name, + json: usersTable.json, + }).from(usersTable).all(); + + expect(result).toEqual([{ id: 1, name: 'John', json: ['foo', 'bar'] }]); + }); + + test('insert many', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(usersTable).values([ + { name: 'John' }, + { name: 'Bruce', json: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]).run(); + const result = await db.select({ + id: usersTable.id, + name: usersTable.name, + json: usersTable.json, + verified: usersTable.verified, + }).from(usersTable).all(); + + expect(result).toEqual([ + { id: 1, name: 'John', json: null, verified: false }, + { id: 2, name: 'Bruce', json: ['foo', 'bar'], verified: false }, + { id: 3, name: 'Jane', json: null, verified: false }, + { id: 4, name: 'Austin', json: null, verified: true }, + ]); + }); + + test('insert many with returning', async (ctx) => { + const { db } = ctx.sqlite; + + const result = await db.insert(usersTable).values([ + { name: 'John' }, + { name: 'Bruce', json: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]) + .returning({ + id: usersTable.id, + name: usersTable.name, + json: usersTable.json, + verified: usersTable.verified, + }) + .all(); + + expect(result).toEqual([ + { id: 1, name: 'John', json: null, verified: false }, + { id: 2, name: 'Bruce', json: ['foo', 'bar'], verified: false }, + { id: 3, name: 'Jane', json: null, verified: false }, + { id: 4, name: 'Austin', json: null, verified: true }, + ]); + }); + + test('partial join with alias', async (ctx) => { + const { db } = ctx.sqlite; + const customerAlias = alias(usersTable, 'customer'); + + await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + + const result = await db + .select({ + user: { + id: usersTable.id, + name: usersTable.name, + }, + customer: { + id: customerAlias.id, + name: customerAlias.name, + }, + }).from(usersTable) + .leftJoin(customerAlias, eq(customerAlias.id, 11)) + .where(eq(usersTable.id, 10)); + + expect(result).toEqual([{ + user: { id: 10, name: 'Ivan' }, + customer: { id: 11, name: 'Hans' }, + }]); + }); + + test('full join with alias', async (ctx) => { + const { db } = ctx.sqlite; + + const sqliteTable = sqliteTableCreator((name) => `prefixed_${name}`); + + const users = sqliteTable('users', { + id: integer('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.run(sql`drop table if exists ${users}`); + await db.run(sql`create table ${users} (id integer primary key, name text not null)`); + + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]).run(); + const result = await db + .select().from(users) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(users.id, 10)) + .all(); + + expect(result).toEqual([{ + users: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); + + await db.run(sql`drop table ${users}`); + }); + + test('select from alias', async (ctx) => { + const { db } = ctx.sqlite; + + const sqliteTable = sqliteTableCreator((name) => `prefixed_${name}`); + + const users = sqliteTable('users', { + id: integer('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.run(sql`drop table if exists ${users}`); + await db.run(sql`create table ${users} (id integer primary key, name text not null)`); + + const user = alias(users, 'user'); + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]).run(); + const result = await db + .select() + .from(user) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(user.id, 10)) + .all(); + + expect(result).toEqual([{ + user: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); + + await db.run(sql`drop table ${users}`); + }); + + test('insert with spaces', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(usersTable).values({ name: sql`'Jo h n'` }).run(); + const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).all(); + + expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); + }); + + test('prepared statement', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(usersTable).values({ name: 'John' }).run(); + const statement = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).prepare(); + const result = await statement.all(); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + }); + + test('prepared statement reuse', async (ctx) => { + const { db } = ctx.sqlite; + + const stmt = db.insert(usersTable).values({ + verified: true, + name: sql.placeholder('name'), + }).prepare(); + + for (let i = 0; i < 10; i++) { + await stmt.run({ name: `John ${i}` }); + } + + const result = await db.select({ + id: usersTable.id, + name: usersTable.name, + verified: usersTable.verified, + }).from(usersTable).all(); + + expect(result).toEqual([ + { id: 1, name: 'John 0', verified: true }, + { id: 2, name: 'John 1', verified: true }, + { id: 3, name: 'John 2', verified: true }, + { id: 4, name: 'John 3', verified: true }, + { id: 5, name: 'John 4', verified: true }, + { id: 6, name: 'John 5', verified: true }, + { id: 7, name: 'John 6', verified: true }, + { id: 8, name: 'John 7', verified: true }, + { id: 9, name: 'John 8', verified: true }, + { id: 10, name: 'John 9', verified: true }, + ]); + }); + + test('prepared statement with placeholder in .where', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(usersTable).values({ name: 'John' }).run(); + const stmt = db.select({ + id: usersTable.id, + name: usersTable.name, + }).from(usersTable) + .where(eq(usersTable.id, sql.placeholder('id'))) + .prepare(); + const result = await stmt.all({ id: 1 }); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + }); + + test('select with group by as field', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.name) + .all(); + + expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); + }); + + test('select with exists', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); + + const user = alias(usersTable, 'user'); + const result = await db.select({ name: usersTable.name }).from(usersTable).where( + exists( + db.select({ one: sql`1` }).from(user).where(and(eq(usersTable.name, 'John'), eq(user.id, usersTable.id))), + ), + ).all(); + + expect(result).toEqual([{ name: 'John' }]); + }); + + test('select with group by as sql', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(sql`${usersTable.name}`) + .all(); + + expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); + }); + + test('select with group by as sql + column', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(sql`${usersTable.name}`, usersTable.id) + .all(); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + }); + + test('select with group by as column + sql', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, sql`${usersTable.name}`) + .all(); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + }); + + test('select with group by complex query', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, sql`${usersTable.name}`) + .orderBy(asc(usersTable.name)) + .limit(1) + .all(); + + expect(result).toEqual([{ name: 'Jane' }]); + }); + + test('build query', async (ctx) => { + const { db } = ctx.sqlite; + + const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, usersTable.name) + .toSQL(); + + expect(query).toEqual({ + sql: 'select "id", "name" from "users" group by "users"."id", "users"."name"', + params: [], + }); + }); + + test('insert via db.run + select via db.all', async (ctx) => { + const { db } = ctx.sqlite; + + await db.run(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); + + const result = await db.all<{ id: number; name: string }>(sql`select id, name from "users"`); + expect(result).toEqual([{ id: 1, name: 'John' }]); + }); + + test('insert via db.get', async (ctx) => { + const { db } = ctx.sqlite; + + const inserted = await db.get<{ id: number; name: string }>( + sql`insert into ${usersTable} (${new Name( + usersTable.name.name, + )}) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, + ); + expect(inserted).toEqual({ id: 1, name: 'John' }); + }); + + test('insert via db.run + select via db.get', async (ctx) => { + const { db } = ctx.sqlite; + + await db.run(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); + + const result = await db.get<{ id: number; name: string }>( + sql`select ${usersTable.id}, ${usersTable.name} from ${usersTable}`, + ); + expect(result).toEqual({ id: 1, name: 'John' }); + }); + + test('insert via db.get w/ query builder', async (ctx) => { + const { db } = ctx.sqlite; + + const inserted = await db.get>( + db.insert(usersTable).values({ name: 'John' }).returning({ id: usersTable.id, name: usersTable.name }), + ); + expect(inserted).toEqual({ id: 1, name: 'John' }); + }); + + test('join subquery', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(courseCategoriesTable).values([ + { name: 'Category 1' }, + { name: 'Category 2' }, + { name: 'Category 3' }, + { name: 'Category 4' }, + ]).run(); + + await db.insert(coursesTable).values([ + { name: 'Development', categoryId: 2 }, + { name: 'IT & Software', categoryId: 3 }, + { name: 'Marketing', categoryId: 4 }, + { name: 'Design', categoryId: 1 }, + ]).run(); + + const sq2 = db + .select({ + categoryId: courseCategoriesTable.id, + category: courseCategoriesTable.name, + total: sql`count(${courseCategoriesTable.id})`, + }) + .from(courseCategoriesTable) + .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) + .as('sq2'); + + const res = await db + .select({ + courseName: coursesTable.name, + categoryId: sq2.categoryId, + }) + .from(coursesTable) + .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) + .orderBy(coursesTable.name) + .all(); + + expect(res).toEqual([ + { courseName: 'Design', categoryId: 1 }, + { courseName: 'Development', categoryId: 2 }, + { courseName: 'IT & Software', categoryId: 3 }, + { courseName: 'Marketing', categoryId: 4 }, + ]); + }); + + test('with ... select', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(orders).values([ + { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, + { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 50, quantity: 5 }, + ]).run(); + + const regionalSales = await db + .$with('regional_sales') + .as( + db + .select({ + region: orders.region, + totalSales: sql`sum(${orders.amount})`.as('total_sales'), + }) + .from(orders) + .groupBy(orders.region), + ); + + const topRegions = await db + .$with('top_regions') + .as( + db + .select({ + region: regionalSales.region, + }) + .from(regionalSales) + .where( + gt( + regionalSales.totalSales, + db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), + ), + ), + ); + + const result = await db + .with(regionalSales, topRegions) + .select({ + region: orders.region, + product: orders.product, + productUnits: sql`cast(sum(${orders.quantity}) as int)`, + productSales: sql`cast(sum(${orders.amount}) as int)`, + }) + .from(orders) + .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) + .groupBy(orders.region, orders.product) + .orderBy(orders.region, orders.product) + .all(); + + expect(result).toEqual([ + { + region: 'Europe', + product: 'A', + productUnits: 3, + productSales: 30, + }, + { + region: 'Europe', + product: 'B', + productUnits: 5, + productSales: 50, + }, + { + region: 'US', + product: 'A', + productUnits: 7, + productSales: 70, + }, + { + region: 'US', + product: 'B', + productUnits: 9, + productSales: 90, + }, + ]); + }); + + test('with ... update', async (ctx) => { + const { db } = ctx.sqlite; + + const products = sqliteTable('products', { + id: integer('id').primaryKey(), + price: numeric('price').notNull(), + cheap: integer('cheap', { mode: 'boolean' }).notNull().default(false), + }); + + await db.run(sql`drop table if exists ${products}`); + await db.run(sql` + create table ${products} ( + id integer primary key, + price numeric not null, + cheap integer not null default 0 + ) + `); + + await db.insert(products).values([ + { price: '10.99' }, + { price: '25.85' }, + { price: '32.99' }, + { price: '2.50' }, + { price: '4.59' }, + ]); + + const averagePrice = db + .$with('average_price') + .as( + db + .select({ + value: sql`avg(${products.price})`.as('value'), + }) + .from(products), + ); + + const result = await db + .with(averagePrice) + .update(products) + .set({ + cheap: true, + }) + .where(lt(products.price, sql`(select * from ${averagePrice})`)) + .returning({ + id: products.id, + }); + + expect(result).toEqual([ + { id: 1 }, + { id: 4 }, + { id: 5 }, + ]); + }); + + test('with ... insert', async (ctx) => { + const { db } = ctx.sqlite; + + const users = sqliteTable('users', { + username: text('username').notNull(), + admin: integer('admin', { mode: 'boolean' }).notNull(), + }); + + await db.run(sql`drop table if exists ${users}`); + await db.run(sql`create table ${users} (username text not null, admin integer not null default 0)`); + + const userCount = db + .$with('user_count') + .as( + db + .select({ + value: sql`count(*)`.as('value'), + }) + .from(users), + ); + + const result = await db + .with(userCount) + .insert(users) + .values([ + { username: 'user1', admin: sql`((select * from ${userCount}) = 0)` }, + ]) + .returning({ + admin: users.admin, + }); + + expect(result).toEqual([{ admin: true }]); + }); + + test('with ... delete', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(orders).values([ + { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, + { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 50, quantity: 5 }, + ]); + + const averageAmount = db + .$with('average_amount') + .as( + db + .select({ + value: sql`avg(${orders.amount})`.as('value'), + }) + .from(orders), + ); + + const result = await db + .with(averageAmount) + .delete(orders) + .where(gt(orders.amount, sql`(select * from ${averageAmount})`)) + .returning({ + id: orders.id, + }); + + expect(result).toEqual([ + { id: 6 }, + { id: 7 }, + { id: 8 }, + ]); + }); + + test('select from subquery sql', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(users2Table).values([{ name: 'John' }, { name: 'Jane' }]).run(); + + const sq = db + .select({ name: sql`${users2Table.name} || ' modified'`.as('name') }) + .from(users2Table) + .as('sq'); + + const res = await db.select({ name: sq.name }).from(sq).all(); + + expect(res).toEqual([{ name: 'John modified' }, { name: 'Jane modified' }]); + }); + + test('select a field without joining its table', (ctx) => { + const { db } = ctx.sqlite; + + expect(() => db.select({ name: users2Table.name }).from(usersTable).prepare()).toThrowError(); + }); + + test('select all fields from subquery without alias', (ctx) => { + const { db } = ctx.sqlite; + + const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); + + expect(() => db.select().from(sq).prepare()).toThrowError(); + }); + + test('select count()', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]).run(); + + const res = await db.select({ count: sql`count(*)` }).from(usersTable).all(); + + expect(res).toEqual([{ count: 2 }]); + }); + + test('having', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(citiesTable).values([{ name: 'London' }, { name: 'Paris' }, { name: 'New York' }]).run(); + + await db.insert(users2Table).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 1 }, + { name: 'Jack', cityId: 2 }, + ]).run(); + + const result = await db + .select({ + id: citiesTable.id, + name: sql`upper(${citiesTable.name})`.as('upper_name'), + usersCount: sql`count(${users2Table.id})`.as('users_count'), + }) + .from(citiesTable) + .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) + .where(({ name }) => sql`length(${name}) >= 3`) + .groupBy(citiesTable.id) + .having(({ usersCount }) => sql`${usersCount} > 0`) + .orderBy(({ name }) => name) + .all(); + + expect(result).toEqual([ + { + id: 1, + name: 'LONDON', + usersCount: 2, + }, + { + id: 2, + name: 'PARIS', + usersCount: 1, + }, + ]); + }); + + test('view', async (ctx) => { + const { db } = ctx.sqlite; + + const newYorkers1 = sqliteView('new_yorkers') + .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); + + const newYorkers2 = sqliteView('new_yorkers', { + id: integer('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull(), + }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); + + const newYorkers3 = sqliteView('new_yorkers', { + id: integer('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull(), + }).existing(); + + await db.run(sql`create view new_yorkers as ${getViewConfig(newYorkers1).query}`); + + await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]).run(); + + await db.insert(users2Table).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 1 }, + { name: 'Jack', cityId: 2 }, + ]).run(); + + { + const result = await db.select().from(newYorkers1).all(); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers2).all(); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers3).all(); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select({ name: newYorkers1.name }).from(newYorkers1).all(); + expect(result).toEqual([ + { name: 'John' }, + { name: 'Jane' }, + ]); + } + + await db.run(sql`drop view ${newYorkers1}`); + }); + + test('insert null timestamp', async (ctx) => { + const { db } = ctx.sqlite; + + const test = sqliteTable('test', { + t: integer('t', { mode: 'timestamp' }), + }); + + await db.run(sql`create table ${test} (t timestamp)`); + + await db.insert(test).values({ t: null }).run(); + const res = await db.select().from(test).all(); + expect(res).toEqual([{ t: null }]); + + await db.run(sql`drop table ${test}`); + }); + + test('select from raw sql', async (ctx) => { + const { db } = ctx.sqlite; + + const result = await db.select({ + id: sql`id`, + name: sql`name`, + }).from(sql`(select 1 as id, 'John' as name) as users`).all(); + + Expect>; + + expect(result).toEqual([ + { id: 1, name: 'John' }, + ]); + }); + + test('select from raw sql with joins', async (ctx) => { + const { db } = ctx.sqlite; + + const result = await db + .select({ + id: sql`users.id`, + name: sql`users.name`.as('userName'), + userCity: sql`users.city`, + cityName: sql`cities.name`.as('cityName'), + }) + .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) + .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`) + .all(); + + Expect>; + + expect(result).toEqual([ + { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, + ]); + }); + + test('join on aliased sql from select', async (ctx) => { + const { db } = ctx.sqlite; + + const result = await db + .select({ + userId: sql`users.id`.as('userId'), + name: sql`users.name`.as('userName'), + userCity: sql`users.city`, + cityId: sql`cities.id`.as('cityId'), + cityName: sql`cities.name`.as('cityName'), + }) + .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) + .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)) + .all(); + + Expect< + Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> + >; + + expect(result).toEqual([ + { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, + ]); + }); + + test('join on aliased sql from with clause', async (ctx) => { + const { db } = ctx.sqlite; + + const users = db.$with('users').as( + db.select({ + id: sql`id`.as('userId'), + name: sql`name`.as('userName'), + city: sql`city`.as('city'), + }).from( + sql`(select 1 as id, 'John' as name, 'New York' as city) as users`, + ), + ); + + const cities = db.$with('cities').as( + db.select({ + id: sql`id`.as('cityId'), + name: sql`name`.as('cityName'), + }).from( + sql`(select 1 as id, 'Paris' as name) as cities`, + ), + ); + + const result = await db + .with(users, cities) + .select({ + userId: users.id, + name: users.name, + userCity: users.city, + cityId: cities.id, + cityName: cities.name, + }) + .from(users) + .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)) + .all(); + + Expect< + Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> + >; + + expect(result).toEqual([ + { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, + ]); + }); + + test('prefixed table', async (ctx) => { + const { db } = ctx.sqlite; + + const sqliteTable = sqliteTableCreator((name) => `myprefix_${name}`); + + const users = sqliteTable('test_prefixed_table_with_unique_name', { + id: integer('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.run(sql`drop table if exists ${users}`); + + await db.run( + sql`create table myprefix_test_prefixed_table_with_unique_name (id integer not null primary key, name text not null)`, + ); + + await db.insert(users).values({ id: 1, name: 'John' }).run(); + + const result = await db.select().from(users).all(); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + + await db.run(sql`drop table ${users}`); + }); + + test('orderBy with aliased column', (ctx) => { + const { db } = ctx.sqlite; + + const query = db.select({ + test: sql`something`.as('test'), + }).from(users2Table).orderBy((fields) => fields.test).toSQL(); + + expect(query.sql).toBe('select something as "test" from "users2" order by "test"'); + }); + + test('transaction', async (ctx) => { + const { db } = ctx.sqlite; + + const users = sqliteTable('users_transactions', { + id: integer('id').primaryKey(), + balance: integer('balance').notNull(), + }); + const products = sqliteTable('products_transactions', { + id: integer('id').primaryKey(), + price: integer('price').notNull(), + stock: integer('stock').notNull(), + }); + + await db.run(sql`drop table if exists ${users}`); + await db.run(sql`drop table if exists ${products}`); + + await db.run(sql`create table users_transactions (id integer not null primary key, balance integer not null)`); + await db.run( + sql`create table products_transactions (id integer not null primary key, price integer not null, stock integer not null)`, + ); + + const user = await db.insert(users).values({ balance: 100 }).returning().get(); + const product = await db.insert(products).values({ price: 10, stock: 10 }).returning().get(); + + await db.transaction(async (tx) => { + await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)).run(); + await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)).run(); + }); + + const result = await db.select().from(users).all(); + + expect(result).toEqual([{ id: 1, balance: 90 }]); + + await db.run(sql`drop table ${users}`); + await db.run(sql`drop table ${products}`); + }); + + test('transaction rollback', async (ctx) => { + const { db } = ctx.sqlite; + + const users = sqliteTable('users_transactions_rollback', { + id: integer('id').primaryKey(), + balance: integer('balance').notNull(), + }); + + await db.run(sql`drop table if exists ${users}`); + + await db.run( + sql`create table users_transactions_rollback (id integer not null primary key, balance integer not null)`, + ); + await expect(async () => { + await db.transaction(async (tx) => { + await tx.insert(users).values({ balance: 100 }).run(); + tx.rollback(); + }); + }).rejects.toThrowError(TransactionRollbackError); + + const result = await db.select().from(users).all(); + + expect(result).toEqual([]); + + await db.run(sql`drop table ${users}`); + }); + + test('nested transaction', async (ctx) => { + const { db } = ctx.sqlite; + + const users = sqliteTable('users_nested_transactions', { + id: integer('id').primaryKey(), + balance: integer('balance').notNull(), + }); + + await db.run(sql`drop table if exists ${users}`); + + await db.run( + sql`create table users_nested_transactions (id integer not null primary key, balance integer not null)`, + ); + + await db.transaction(async (tx) => { + await tx.insert(users).values({ balance: 100 }).run(); + + await tx.transaction(async (tx) => { + await tx.update(users).set({ balance: 200 }).run(); + }); + }); + + const result = await db.select().from(users).all(); + + expect(result).toEqual([{ id: 1, balance: 200 }]); + + await db.run(sql`drop table ${users}`); + }); + + test('nested transaction rollback', async (ctx) => { + const { db } = ctx.sqlite; + + const users = sqliteTable('users_nested_transactions_rollback', { + id: integer('id').primaryKey(), + balance: integer('balance').notNull(), + }); + + await db.run(sql`drop table if exists ${users}`); + + await db.run( + sql`create table users_nested_transactions_rollback (id integer not null primary key, balance integer not null)`, + ); + + await db.transaction(async (tx) => { + await tx.insert(users).values({ balance: 100 }).run(); + + await expect(async () => { + await tx.transaction(async (tx) => { + await tx.update(users).set({ balance: 200 }).run(); + tx.rollback(); + }); + }).rejects.toThrowError(TransactionRollbackError); + }); + + const result = await db.select().from(users).all(); + + expect(result).toEqual([{ id: 1, balance: 100 }]); + + await db.run(sql`drop table ${users}`); + }); + + test('join subquery with join', async (ctx) => { + const { db } = ctx.sqlite; + + const internalStaff = sqliteTable('internal_staff', { + userId: integer('user_id').notNull(), + }); + + const customUser = sqliteTable('custom_user', { + id: integer('id').notNull(), + }); + + const ticket = sqliteTable('ticket', { + staffId: integer('staff_id').notNull(), + }); + + await db.run(sql`drop table if exists ${internalStaff}`); + await db.run(sql`drop table if exists ${customUser}`); + await db.run(sql`drop table if exists ${ticket}`); + + await db.run(sql`create table internal_staff (user_id integer not null)`); + await db.run(sql`create table custom_user (id integer not null)`); + await db.run(sql`create table ticket (staff_id integer not null)`); + + await db.insert(internalStaff).values({ userId: 1 }).run(); + await db.insert(customUser).values({ id: 1 }).run(); + await db.insert(ticket).values({ staffId: 1 }).run(); + + const subq = await db + .select() + .from(internalStaff) + .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) + .as('internal_staff'); + + const mainQuery = await db + .select() + .from(ticket) + .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)) + .all(); + + expect(mainQuery).toEqual([{ + ticket: { staffId: 1 }, + internal_staff: { + internal_staff: { userId: 1 }, + custom_user: { id: 1 }, + }, + }]); + + await db.run(sql`drop table ${internalStaff}`); + await db.run(sql`drop table ${customUser}`); + await db.run(sql`drop table ${ticket}`); + }); + + test('join view as subquery', async (ctx) => { + const { db } = ctx.sqlite; + + const users = sqliteTable('users_join_view', { + id: integer('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull(), + }); + + const newYorkers = sqliteView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); + + await db.run(sql`drop table if exists ${users}`); + await db.run(sql`drop view if exists ${newYorkers}`); + + await db.run( + sql`create table ${users} (id integer not null primary key, name text not null, city_id integer not null)`, + ); + await db.run(sql`create view ${newYorkers} as ${getViewConfig(newYorkers).query}`); + + db.insert(users).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 2 }, + { name: 'Jack', cityId: 1 }, + { name: 'Jill', cityId: 2 }, + ]).run(); + + const sq = db.select().from(newYorkers).as('new_yorkers_sq'); + + const result = await db.select().from(users).leftJoin(sq, eq(users.id, sq.id)).all(); + + expect(result).toEqual([ + { + users_join_view: { id: 1, name: 'John', cityId: 1 }, + new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, + }, + { + users_join_view: { id: 2, name: 'Jane', cityId: 2 }, + new_yorkers_sq: null, + }, + { + users_join_view: { id: 3, name: 'Jack', cityId: 1 }, + new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, + }, + { + users_join_view: { id: 4, name: 'Jill', cityId: 2 }, + new_yorkers_sq: null, + }, + ]); + + await db.run(sql`drop view ${newYorkers}`); + await db.run(sql`drop table ${users}`); + }); + + test('insert with onConflict do nothing', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(usersTable).values({ id: 1, name: 'John' }).run(); + + await db + .insert(usersTable) + .values({ id: 1, name: 'John' }) + .onConflictDoNothing() + .run(); + + const res = await db + .select({ id: usersTable.id, name: usersTable.name }) + .from(usersTable) + .where(eq(usersTable.id, 1)) + .all(); + + expect(res).toEqual([{ id: 1, name: 'John' }]); + }); + + test('insert with onConflict do nothing using composite pk', async (ctx) => { + const { db } = ctx.sqlite; + + await db + .insert(pkExampleTable) + .values({ id: 1, name: 'John', email: 'john@example.com' }) + .run(); + + await db + .insert(pkExampleTable) + .values({ id: 1, name: 'John', email: 'john1@example.com' }) + .onConflictDoNothing() + .run(); + + const res = await db + .select({ id: pkExampleTable.id, name: pkExampleTable.name, email: pkExampleTable.email }) + .from(pkExampleTable) + .where(eq(pkExampleTable.id, 1)) + .all(); + + expect(res).toEqual([{ id: 1, name: 'John', email: 'john@example.com' }]); + }); + + test('insert with onConflict do nothing using target', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(usersTable).values({ id: 1, name: 'John' }).run(); + + await db + .insert(usersTable) + .values({ id: 1, name: 'John' }) + .onConflictDoNothing({ target: usersTable.id }) + .run(); + + const res = await db + .select({ id: usersTable.id, name: usersTable.name }) + .from(usersTable) + .where(eq(usersTable.id, 1)) + .all(); + + expect(res).toEqual([{ id: 1, name: 'John' }]); + }); + + test('insert with onConflict do nothing using composite pk as target', async (ctx) => { + const { db } = ctx.sqlite; + + await db + .insert(pkExampleTable) + .values({ id: 1, name: 'John', email: 'john@example.com' }) + .run(); + + await db + .insert(pkExampleTable) + .values({ id: 1, name: 'John', email: 'john1@example.com' }) + .onConflictDoNothing({ target: [pkExampleTable.id, pkExampleTable.name] }) + .run(); + + const res = await db + .select({ id: pkExampleTable.id, name: pkExampleTable.name, email: pkExampleTable.email }) + .from(pkExampleTable) + .where(eq(pkExampleTable.id, 1)) + .all(); + + expect(res).toEqual([{ id: 1, name: 'John', email: 'john@example.com' }]); + }); + + test('insert with onConflict do update', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(usersTable).values({ id: 1, name: 'John' }).run(); + + await db + .insert(usersTable) + .values({ id: 1, name: 'John' }) + .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }) + .run(); + + const res = await db + .select({ id: usersTable.id, name: usersTable.name }) + .from(usersTable) + .where(eq(usersTable.id, 1)) + .all(); + + expect(res).toEqual([{ id: 1, name: 'John1' }]); + }); + + test('insert with onConflict do update where', async (ctx) => { + const { db } = ctx.sqlite; + + await db + .insert(usersTable) + .values([{ id: 1, name: 'John', verified: false }]) + .run(); + + await db + .insert(usersTable) + .values({ id: 1, name: 'John1', verified: true }) + .onConflictDoUpdate({ + target: usersTable.id, + set: { name: 'John1', verified: true }, + where: eq(usersTable.verified, false), + }) + .run(); + + const res = await db + .select({ id: usersTable.id, name: usersTable.name, verified: usersTable.verified }) + .from(usersTable) + .where(eq(usersTable.id, 1)) + .all(); + + expect(res).toEqual([{ id: 1, name: 'John1', verified: true }]); + }); + + test('insert with onConflict do update using composite pk', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(pkExampleTable).values({ id: 1, name: 'John', email: 'john@example.com' }).run(); + + await db + .insert(pkExampleTable) + .values({ id: 1, name: 'John', email: 'john@example.com' }) + .onConflictDoUpdate({ target: [pkExampleTable.id, pkExampleTable.name], set: { email: 'john1@example.com' } }) + .run(); + + const res = await db + .select({ id: pkExampleTable.id, name: pkExampleTable.name, email: pkExampleTable.email }) + .from(pkExampleTable) + .where(eq(pkExampleTable.id, 1)) + .all(); + + expect(res).toEqual([{ id: 1, name: 'John', email: 'john1@example.com' }]); + }); + + test('insert undefined', async (ctx) => { + const { db } = ctx.sqlite; + + const users = sqliteTable('users', { + id: integer('id').primaryKey(), + name: text('name'), + }); + + await db.run(sql`drop table if exists ${users}`); + + await db.run( + sql`create table ${users} (id integer primary key, name text)`, + ); + + await expect((async () => { + await db.insert(users).values({ name: undefined }).run(); + })()).resolves.not.toThrowError(); + + await db.run(sql`drop table ${users}`); + }); + + test('update undefined', async (ctx) => { + const { db } = ctx.sqlite; + + const users = sqliteTable('users', { + id: integer('id').primaryKey(), + name: text('name'), + }); + + await db.run(sql`drop table if exists ${users}`); + + await db.run( + sql`create table ${users} (id integer primary key, name text)`, + ); + + await expect((async () => { + await db.update(users).set({ name: undefined }).run(); + })()).rejects.toThrowError(); + await expect((async () => { + await db.update(users).set({ id: 1, name: undefined }).run(); + })()).resolves.not.toThrowError(); + + await db.run(sql`drop table ${users}`); + }); + + test('async api - CRUD', async (ctx) => { + const { db } = ctx.sqlite; + + const users = sqliteTable('users', { + id: integer('id').primaryKey(), + name: text('name'), + }); + + db.run(sql`drop table if exists ${users}`); + + db.run( + sql`create table ${users} (id integer primary key, name text)`, + ); + + await db.insert(users).values({ id: 1, name: 'John' }); + + const res = await db.select().from(users); + + expect(res).toEqual([{ id: 1, name: 'John' }]); + + await db.update(users).set({ name: 'John1' }).where(eq(users.id, 1)); + + const res1 = await db.select().from(users); + + expect(res1).toEqual([{ id: 1, name: 'John1' }]); + + await db.delete(users).where(eq(users.id, 1)); + + const res2 = await db.select().from(users); + + expect(res2).toEqual([]); + + await db.run(sql`drop table ${users}`); + }); + + test('async api - insert + select w/ prepare + async execute', async (ctx) => { + const { db } = ctx.sqlite; + + const users = sqliteTable('users', { + id: integer('id').primaryKey(), + name: text('name'), + }); + + db.run(sql`drop table if exists ${users}`); + + db.run( + sql`create table ${users} (id integer primary key, name text)`, + ); + + const insertStmt = db.insert(users).values({ id: 1, name: 'John' }).prepare(); + await insertStmt.execute(); + + const selectStmt = db.select().from(users).prepare(); + const res = await selectStmt.execute(); + + expect(res).toEqual([{ id: 1, name: 'John' }]); + + const updateStmt = db.update(users).set({ name: 'John1' }).where(eq(users.id, 1)).prepare(); + await updateStmt.execute(); + + const res1 = await selectStmt.execute(); + + expect(res1).toEqual([{ id: 1, name: 'John1' }]); + + const deleteStmt = db.delete(users).where(eq(users.id, 1)).prepare(); + await deleteStmt.execute(); + + const res2 = await selectStmt.execute(); + + expect(res2).toEqual([]); + + await db.run(sql`drop table ${users}`); + }); + + test('async api - insert + select w/ prepare + sync execute', async (ctx) => { + const { db } = ctx.sqlite; + + const users = sqliteTable('users', { + id: integer('id').primaryKey(), + name: text('name'), + }); + + db.run(sql`drop table if exists ${users}`); + + db.run( + sql`create table ${users} (id integer primary key, name text)`, + ); + + const insertStmt = db.insert(users).values({ id: 1, name: 'John' }).prepare(); + await insertStmt.execute(); + + const selectStmt = db.select().from(users).prepare(); + const res = await selectStmt.execute(); + + expect(res).toEqual([{ id: 1, name: 'John' }]); + + const updateStmt = db.update(users).set({ name: 'John1' }).where(eq(users.id, 1)).prepare(); + await updateStmt.execute(); + + const res1 = await selectStmt.execute(); + + expect(res1).toEqual([{ id: 1, name: 'John1' }]); + + const deleteStmt = db.delete(users).where(eq(users.id, 1)).prepare(); + await deleteStmt.execute(); + + const res2 = await selectStmt.execute(); + + expect(res2).toEqual([]); + + await db.run(sql`drop table ${users}`); + }); + + test('select + .get() for empty result', async (ctx) => { + const { db } = ctx.sqlite; + + const users = sqliteTable('users', { + id: integer('id').primaryKey(), + name: text('name'), + }); + + db.run(sql`drop table if exists ${users}`); + + db.run( + sql`create table ${users} (id integer primary key, name text)`, + ); + + const res = await db.select().from(users).where(eq(users.id, 1)).get(); + + expect(res).toBeUndefined(); + + await db.run(sql`drop table ${users}`); + }); + + test('set operations (union) from query builder with subquery', async (ctx) => { + const { db } = ctx.sqlite; + + await setupSetOperationTest(db); + + const sq = db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).union( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table), + ).orderBy(asc(sql`name`)).as('sq'); + + const result = await db.select().from(sq).limit(5).offset(5); + + expect(result).toHaveLength(5); + + expect(result).toEqual([ + { id: 2, name: 'London' }, + { id: 7, name: 'Mary' }, + { id: 1, name: 'New York' }, + { id: 4, name: 'Peter' }, + { id: 8, name: 'Sally' }, + ]); + + await expect(async () => { + db + .select({ name: citiesTable.name, id: citiesTable.id }) + .from(citiesTable).union( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table), + ).orderBy(asc(sql`name`)); + }).rejects.toThrowError(); + }); + + test('set operations (union) as function', async (ctx) => { + const { db } = ctx.sqlite; + + await setupSetOperationTest(db); + + const result = await union( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).orderBy(asc(sql`name`)); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 1, name: 'John' }, + { id: 1, name: 'New York' }, + ]); + + await expect(async () => { + union( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).orderBy(asc(sql`name`)); + }).rejects.toThrowError(); + }); + + test('set operations (union all) from query builder', async (ctx) => { + const { db } = ctx.sqlite; + + await setupSetOperationTest(db); + + const result = await db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).unionAll( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable), + ).orderBy(asc(citiesTable.id)).limit(5).offset(1); + + expect(result).toHaveLength(5); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + { id: 2, name: 'London' }, + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + { id: 3, name: 'Tampa' }, + ]); + + await expect(async () => { + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).unionAll( + db + .select({ name: citiesTable.name, id: citiesTable.id }) + .from(citiesTable), + ).orderBy(asc(citiesTable.id)).limit(5).offset(1); + }).rejects.toThrowError(); + }); + + test('set operations (union all) as function', async (ctx) => { + const { db } = ctx.sqlite; + + await setupSetOperationTest(db); + + const result = await unionAll( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ); + + expect(result).toHaveLength(3); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + { id: 1, name: 'John' }, + { id: 1, name: 'John' }, + ]); + + await expect(async () => { + unionAll( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table).where(eq(users2Table.id, 1)), + ); + }).rejects.toThrowError(); + }); + + test('set operations (intersect) from query builder', async (ctx) => { + const { db } = ctx.sqlite; + + await setupSetOperationTest(db); + + const result = await db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).intersect( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(gt(citiesTable.id, 1)), + ).orderBy(asc(sql`name`)); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + await expect(async () => { + db + .select({ name: citiesTable.name, id: citiesTable.id }) + .from(citiesTable).intersect( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(gt(citiesTable.id, 1)), + ).orderBy(asc(sql`name`)); + }).rejects.toThrowError(); + }); + + test('set operations (intersect) as function', async (ctx) => { + const { db } = ctx.sqlite; + + await setupSetOperationTest(db); + + const result = await intersect( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ); + + expect(result).toHaveLength(0); + + expect(result).toEqual([]); + + await expect(async () => { + intersect( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ); + }).rejects.toThrowError(); + }); + + test('set operations (except) from query builder', async (ctx) => { + const { db } = ctx.sqlite; + + await setupSetOperationTest(db); + + const result = await db + .select() + .from(citiesTable).except( + db + .select() + .from(citiesTable).where(gt(citiesTable.id, 1)), + ); + + expect(result).toHaveLength(1); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + ]); + + await expect(async () => { + db + .select() + .from(citiesTable).except( + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(citiesTable).where(gt(citiesTable.id, 1)), + ); + }).rejects.toThrowError(); + }); + + test('set operations (except) as function', async (ctx) => { + const { db } = ctx.sqlite; + + await setupSetOperationTest(db); + + const result = await except( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable), + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).orderBy(asc(sql`id`)); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + await expect(async () => { + except( + db + .select({ name: citiesTable.name, id: citiesTable.id }) + .from(citiesTable), + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).orderBy(asc(sql`id`)); + }).rejects.toThrowError(); + }); + + test('set operations (mixed) from query builder', async (ctx) => { + const { db } = ctx.sqlite; + + await setupSetOperationTest(db); + + const result = await db + .select() + .from(citiesTable).except( + ({ unionAll }) => + unionAll( + db + .select() + .from(citiesTable).where(gt(citiesTable.id, 1)), + db.select().from(citiesTable).where(eq(citiesTable.id, 2)), + ), + ); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + { id: 2, name: 'London' }, + ]); + + await expect(async () => { + db + .select() + .from(citiesTable).except( + ({ unionAll }) => + unionAll( + db + .select() + .from(citiesTable).where(gt(citiesTable.id, 1)), + db.select({ name: citiesTable.name, id: citiesTable.id }) + .from(citiesTable).where(eq(citiesTable.id, 2)), + ), + ); + }).rejects.toThrowError(); + }); + + test('set operations (mixed all) as function with subquery', async (ctx) => { + const { db } = ctx.sqlite; + + await setupSetOperationTest(db); + + const sq = union( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + except( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(gte(users2Table.id, 5)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 7)), + ), + db + .select().from(citiesTable).where(gt(citiesTable.id, 1)), + ).orderBy(asc(sql`id`)).as('sq'); + + const result = await db.select().from(sq).limit(4).offset(1); + + expect(result).toHaveLength(4); + + expect(result).toEqual([ + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + { id: 5, name: 'Ben' }, + { id: 6, name: 'Jill' }, + ]); + + await expect(async () => { + union( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + except( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(gte(users2Table.id, 5)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 7)), + ), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(citiesTable).where(gt(citiesTable.id, 1)), + ).orderBy(asc(sql`id`)); + }).rejects.toThrowError(); + }); + + test('aggregate function: count', async (ctx) => { + const { db } = ctx.sqlite; + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: count() }).from(table); + const result2 = await db.select({ value: count(table.a) }).from(table); + const result3 = await db.select({ value: countDistinct(table.name) }).from(table); + + expect(result1[0]?.value).toBe(7); + expect(result2[0]?.value).toBe(5); + expect(result3[0]?.value).toBe(6); + }); + + test('aggregate function: avg', async (ctx) => { + const { db } = ctx.sqlite; + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: avg(table.a) }).from(table); + const result2 = await db.select({ value: avg(table.nullOnly) }).from(table); + const result3 = await db.select({ value: avgDistinct(table.b) }).from(table); + + expect(result1[0]?.value).toBe('24'); + expect(result2[0]?.value).toBeNull(); + expect(result3[0]?.value).toBe('42.5'); + }); + + test('aggregate function: sum', async (ctx) => { + const { db } = ctx.sqlite; + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: sum(table.b) }).from(table); + const result2 = await db.select({ value: sum(table.nullOnly) }).from(table); + const result3 = await db.select({ value: sumDistinct(table.b) }).from(table); + + expect(result1[0]?.value).toBe('200'); + expect(result2[0]?.value).toBeNull(); + expect(result3[0]?.value).toBe('170'); + }); + + test('aggregate function: max', async (ctx) => { + const { db } = ctx.sqlite; + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: max(table.b) }).from(table); + const result2 = await db.select({ value: max(table.nullOnly) }).from(table); + + expect(result1[0]?.value).toBe(90); + expect(result2[0]?.value).toBeNull(); + }); + + test('aggregate function: min', async (ctx) => { + const { db } = ctx.sqlite; + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: min(table.b) }).from(table); + const result2 = await db.select({ value: min(table.nullOnly) }).from(table); + + expect(result1[0]?.value).toBe(10); + expect(result2[0]?.value).toBeNull(); + }); + + test('test $onUpdateFn and $onUpdate works as $default', async (ctx) => { + const { db } = ctx.sqlite; + + await db.run(sql`drop table if exists ${usersOnUpdate}`); + + await db.run( + sql` + create table ${usersOnUpdate} ( + id integer primary key autoincrement, + name text not null, + update_counter integer default 1 not null, + updated_at integer, + always_null text + ) + `, + ); + + await db.insert(usersOnUpdate).values([ + { name: 'John' }, + { name: 'Jane' }, + { name: 'Jack' }, + { name: 'Jill' }, + ]); + const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); + + const justDates = await db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); + + const response = await db.select({ ...rest }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); + + expect(response).toEqual([ + { name: 'John', id: 1, updateCounter: 1, alwaysNull: null }, + { name: 'Jane', id: 2, updateCounter: 1, alwaysNull: null }, + { name: 'Jack', id: 3, updateCounter: 1, alwaysNull: null }, + { name: 'Jill', id: 4, updateCounter: 1, alwaysNull: null }, + ]); + const msDelay = 250; + + for (const eachUser of justDates) { + expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); + } + }); + + test('test $onUpdateFn and $onUpdate works updating', async (ctx) => { + const { db } = ctx.sqlite; + + await db.run(sql`drop table if exists ${usersOnUpdate}`); + + await db.run( + sql` + create table ${usersOnUpdate} ( + id integer primary key autoincrement, + name text not null, + update_counter integer default 1, + updated_at integer, + always_null text + ) + `, + ); + + await db.insert(usersOnUpdate).values([ + { name: 'John', alwaysNull: 'this will be null after updating' }, + { name: 'Jane' }, + { name: 'Jack' }, + { name: 'Jill' }, + ]); + const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); + + await db.update(usersOnUpdate).set({ name: 'Angel' }).where(eq(usersOnUpdate.id, 1)); + await db.update(usersOnUpdate).set({ updateCounter: null }).where(eq(usersOnUpdate.id, 2)); + + const justDates = await db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); + + const response = await db.select({ ...rest }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); + + expect(response).toEqual([ + { name: 'Angel', id: 1, updateCounter: 2, alwaysNull: null }, + { name: 'Jane', id: 2, updateCounter: null, alwaysNull: null }, + { name: 'Jack', id: 3, updateCounter: 1, alwaysNull: null }, + { name: 'Jill', id: 4, updateCounter: 1, alwaysNull: null }, + ]); + const msDelay = 250; + + for (const eachUser of justDates) { + expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); + } + }); + }); + + test('table configs: unique third param', () => { + const cities1Table = sqliteTable('cities1', { + id: int('id').primaryKey(), + name: text('name').notNull(), + state: text('state'), + }, (t) => ({ + f: unique().on(t.name, t.state), + f1: unique('custom').on(t.name, t.state), + })); + + const tableConfig = getTableConfig(cities1Table); + + expect(tableConfig.uniqueConstraints).toHaveLength(2); + + expect(tableConfig.uniqueConstraints[0]?.columns.map((t) => t.name)).toEqual(['name', 'state']); + expect( + tableConfig.uniqueConstraints[0]?.name, + ).toEqual( + uniqueKeyName(cities1Table, tableConfig.uniqueConstraints[0]?.columns?.map((column) => column.name) ?? []), + ); + + expect(tableConfig.uniqueConstraints[1]?.columns.map((t) => t.name)).toEqual(['name', 'state']); + expect(tableConfig.uniqueConstraints[1]?.name).toBe('custom'); + }); + + test('table configs: unique in column', () => { + const cities1Table = sqliteTable('cities1', { + id: int('id').primaryKey(), + name: text('name').notNull().unique(), + state: text('state').unique('custom'), + field: text('field').unique(), + }); + + const tableConfig = getTableConfig(cities1Table); + + const columnName = tableConfig.columns.find((it) => it.name === 'name'); + expect(columnName?.isUnique).toBeTruthy(); + expect(columnName?.uniqueName).toBe(uniqueKeyName(cities1Table, [columnName!.name])); + + const columnState = tableConfig.columns.find((it) => it.name === 'state'); + expect(columnState?.isUnique).toBeTruthy(); + expect(columnState?.uniqueName).toBe('custom'); + + const columnField = tableConfig.columns.find((it) => it.name === 'field'); + expect(columnField?.isUnique).toBeTruthy(); + expect(columnField?.uniqueName).toBe(uniqueKeyName(cities1Table, [columnField!.name])); + }); +} diff --git a/integration-tests/tests/sqlite-proxy-batch.test.ts b/integration-tests/tests/sqlite/sqlite-proxy-batch.test.ts similarity index 98% rename from integration-tests/tests/sqlite-proxy-batch.test.ts rename to integration-tests/tests/sqlite/sqlite-proxy-batch.test.ts index ffc22a5f5..1137faaf4 100644 --- a/integration-tests/tests/sqlite-proxy-batch.test.ts +++ b/integration-tests/tests/sqlite/sqlite-proxy-batch.test.ts @@ -1,8 +1,9 @@ -import 'dotenv/config'; -import Database from 'better-sqlite3'; +/* eslint-disable drizzle-internal/require-entity-kind */ import type BetterSqlite3 from 'better-sqlite3'; +import Database from 'better-sqlite3'; import { eq, relations, sql } from 'drizzle-orm'; -import { type AnySQLiteColumn, integer, primaryKey, sqliteTable, text } from 'drizzle-orm/sqlite-core'; +import type { AnySQLiteColumn } from 'drizzle-orm/sqlite-core'; +import { integer, primaryKey, sqliteTable, text } from 'drizzle-orm/sqlite-core'; import type { SqliteRemoteDatabase, SqliteRemoteResult } from 'drizzle-orm/sqlite-proxy'; import { drizzle as proxyDrizzle } from 'drizzle-orm/sqlite-proxy'; import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; @@ -43,7 +44,7 @@ export const usersToGroupsTable = sqliteTable( ), }, (t) => ({ - pk: primaryKey(t.userId, t.groupId), + pk: primaryKey({ columns: [t.userId, t.groupId] }), }), ); export const usersToGroupsConfig = relations(usersToGroupsTable, ({ one }) => ({ @@ -132,7 +133,6 @@ const schema = { usersConfig, }; -// eslint-disable-next-line drizzle/require-entity-kind class ServerSimulator { constructor(private db: BetterSqlite3.Database) {} @@ -217,9 +217,7 @@ let serverSimulator: ServerSimulator; beforeAll(async () => { const dbPath = process.env['SQLITE_DB_PATH'] ?? ':memory:'; - client = new Database(dbPath); - serverSimulator = new ServerSimulator(client); db = proxyDrizzle(async (sql, params, method) => { diff --git a/integration-tests/tests/sqlite/sqlite-proxy.test.ts b/integration-tests/tests/sqlite/sqlite-proxy.test.ts new file mode 100644 index 000000000..9066b2bec --- /dev/null +++ b/integration-tests/tests/sqlite/sqlite-proxy.test.ts @@ -0,0 +1,146 @@ +/* eslint-disable drizzle-internal/require-entity-kind */ +import type BetterSqlite3 from 'better-sqlite3'; +import Database from 'better-sqlite3'; +import { Name, sql } from 'drizzle-orm'; +import type { SqliteRemoteDatabase } from 'drizzle-orm/sqlite-proxy'; +import { drizzle as proxyDrizzle } from 'drizzle-orm/sqlite-proxy'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { skipTests } from '~/common'; +import { tests, usersTable } from './sqlite-common'; + +class ServerSimulator { + constructor(private db: BetterSqlite3.Database) {} + + async query(sql: string, params: any[], method: string) { + if (method === 'run') { + try { + const result = this.db.prepare(sql).run(params); + return { data: result as any }; + } catch (e: any) { + return { error: e.message }; + } + } else if (method === 'all' || method === 'values') { + try { + const rows = this.db.prepare(sql).raw().all(params); + return { data: rows }; + } catch (e: any) { + return { error: e.message }; + } + } else if (method === 'get') { + try { + const row = this.db.prepare(sql).raw().get(params); + return { data: row }; + } catch (e: any) { + return { error: e.message }; + } + } else { + return { error: 'Unknown method value' }; + } + } + + migrations(queries: string[]) { + this.db.exec('BEGIN'); + try { + for (const query of queries) { + this.db.exec(query); + } + this.db.exec('COMMIT'); + } catch { + this.db.exec('ROLLBACK'); + } + + return {}; + } +} + +let db: SqliteRemoteDatabase; +let client: Database.Database; +let serverSimulator: ServerSimulator; + +beforeAll(async () => { + const dbPath = process.env['SQLITE_DB_PATH'] ?? ':memory:'; + client = new Database(dbPath); + serverSimulator = new ServerSimulator(client); + + db = proxyDrizzle(async (sql, params, method) => { + try { + const rows = await serverSimulator.query(sql, params, method); + + if (rows.error !== undefined) { + throw new Error(rows.error); + } + + return { rows: rows.data }; + } catch (e: any) { + console.error('Error from sqlite proxy server:', e.response.data); + throw e; + } + }); +}); + +beforeEach((ctx) => { + ctx.sqlite = { + db, + }; +}); + +afterAll(async () => { + client?.close(); +}); + +skipTests([ + // Different driver respond + 'insert via db.get w/ query builder', + 'insert via db.run + select via db.get', + 'insert via db.get', + 'insert via db.run + select via db.all', +]); +tests(); + +beforeEach(async () => { + await db.run(sql`drop table if exists ${usersTable}`); + + await db.run(sql` + create table ${usersTable} ( + id integer primary key, + name text not null, + verified integer not null default 0, + json blob, + created_at integer not null default (strftime('%s', 'now')) + ) + `); +}); + +test('insert via db.get w/ query builder', async () => { + const inserted = await db.get>( + db.insert(usersTable).values({ name: 'John' }).returning({ id: usersTable.id, name: usersTable.name }), + ); + expect(inserted).toEqual([1, 'John']); +}); + +test('insert via db.run + select via db.get', async () => { + await db.run(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); + + const result = await db.get<{ id: number; name: string }>( + sql`select ${usersTable.id}, ${usersTable.name} from ${usersTable}`, + ); + expect(result).toEqual([1, 'John']); +}); + +test('insert via db.get', async () => { + const inserted = await db.get<{ id: number; name: string }>( + sql`insert into ${usersTable} (${new Name( + usersTable.name.name, + )}) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, + ); + expect(inserted).toEqual([1, 'John']); +}); + +test('insert via db.run + select via db.all', async (ctx) => { + const { db } = ctx.sqlite; + + await db.run(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); + + const result = await db.all<{ id: number; name: string }>(sql`select id, name from "users"`); + expect(result).toEqual([[1, 'John']]); +}); diff --git a/integration-tests/tests/vercel-pg.test.ts b/integration-tests/tests/vercel-pg.test.ts deleted file mode 100644 index 16a9a9ab4..000000000 --- a/integration-tests/tests/vercel-pg.test.ts +++ /dev/null @@ -1,2470 +0,0 @@ -import 'dotenv/config'; - -import { createClient, type VercelClient } from '@vercel/postgres'; -import type { TestFn } from 'ava'; -import anyTest from 'ava'; -import Docker from 'dockerode'; -import { - and, - arrayContained, - arrayContains, - arrayOverlaps, - asc, - eq, - gt, - gte, - inArray, - lt, - name, - placeholder, - type SQL, - sql, - type SQLWrapper, - TransactionRollbackError, -} from 'drizzle-orm'; -import { - alias, - boolean, - char, - cidr, - getMaterializedViewConfig, - getViewConfig, - inet, - integer, - jsonb, - macaddr, - macaddr8, - type PgColumn, - pgEnum, - pgMaterializedView, - pgTable, - pgTableCreator, - pgView, - serial, - text, - timestamp, - uuid as pgUuid, - varchar, -} from 'drizzle-orm/pg-core'; -import { drizzle, type VercelPgDatabase } from 'drizzle-orm/vercel-postgres'; -import { migrate } from 'drizzle-orm/vercel-postgres/migrator'; -import getPort from 'get-port'; -import { v4 as uuid } from 'uuid'; -import { type Equal, Expect, randomString } from './utils.ts'; - -const ENABLE_LOGGING = false; - -const usersTable = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - verified: boolean('verified').notNull().default(false), - jsonb: jsonb('jsonb').$type(), - createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), -}); - -const citiesTable = pgTable('cities', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - state: char('state', { length: 2 }), -}); - -const users2Table = pgTable('users2', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').references(() => citiesTable.id), -}); - -const coursesTable = pgTable('courses', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - categoryId: integer('category_id').references(() => courseCategoriesTable.id), -}); - -const courseCategoriesTable = pgTable('course_categories', { - id: serial('id').primaryKey(), - name: text('name').notNull(), -}); - -const orders = pgTable('orders', { - id: serial('id').primaryKey(), - region: text('region').notNull(), - product: text('product').notNull(), - amount: integer('amount').notNull(), - quantity: integer('quantity').notNull(), -}); - -const network = pgTable('network_table', { - inet: inet('inet').notNull(), - cidr: cidr('cidr').notNull(), - macaddr: macaddr('macaddr').notNull(), - macaddr8: macaddr8('macaddr8').notNull(), -}); - -const salEmp = pgTable('sal_emp', { - name: text('name'), - payByQuarter: integer('pay_by_quarter').array(), - schedule: text('schedule').array().array(), -}); - -const _tictactoe = pgTable('tictactoe', { - squares: integer('squares').array(3).array(3), -}); - -const usersMigratorTable = pgTable('users12', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - email: text('email').notNull(), -}); - -interface Context { - docker: Docker; - pgContainer: Docker.Container; - db: VercelPgDatabase; - client: VercelClient; -} - -const test = anyTest as TestFn; - -async function createDockerDB(ctx: Context): Promise { - const docker = (ctx.docker = new Docker()); - const port = await getPort({ port: 5432 }); - const image = 'postgres:14'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) - ); - - ctx.pgContainer = await docker.createContainer({ - Image: image, - Env: ['POSTGRES_PASSWORD=postgres', 'POSTGRES_USER=postgres', 'POSTGRES_DB=postgres'], - name: `drizzle-integration-tests-${uuid()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '5432/tcp': [{ HostPort: `${port}` }], - }, - }, - }); - - await ctx.pgContainer.start(); - - return `postgres://postgres:postgres@localhost:${port}/postgres`; -} - -test.before(async (t) => { - const ctx = t.context; - const connectionString = process.env['PG_CONNECTION_STRING'] ?? (await createDockerDB(ctx)); - - const sleep = 250; - let timeLeft = 5000; - let connected = false; - let lastError: unknown | undefined; - do { - try { - ctx.client = createClient({ connectionString }); - await ctx.client.connect(); - connected = true; - break; - } catch (e) { - lastError = e; - await new Promise((resolve) => setTimeout(resolve, sleep)); - timeLeft -= sleep; - } - } while (timeLeft > 0); - if (!connected) { - console.error('Cannot connect to Postgres'); - await ctx.client?.end().catch(console.error); - await ctx.pgContainer?.stop().catch(console.error); - throw lastError; - } - ctx.db = drizzle(ctx.client, { logger: ENABLE_LOGGING }); -}); - -test.after.always(async (t) => { - const ctx = t.context; - await ctx.client?.end().catch(console.error); - await ctx.pgContainer?.stop().catch(console.error); -}); - -test.beforeEach(async (t) => { - const ctx = t.context; - await ctx.db.execute(sql`drop schema public cascade`); - await ctx.db.execute(sql`create schema public`); - await ctx.db.execute( - sql` - create table users ( - id serial primary key, - name text not null, - verified boolean not null default false, - jsonb jsonb, - created_at timestamptz not null default now() - ) - `, - ); - await ctx.db.execute( - sql` - create table cities ( - id serial primary key, - name text not null, - state char(2) - ) - `, - ); - await ctx.db.execute( - sql` - create table users2 ( - id serial primary key, - name text not null, - city_id integer references cities(id) - ) - `, - ); - await ctx.db.execute( - sql` - create table course_categories ( - id serial primary key, - name text not null - ) - `, - ); - await ctx.db.execute( - sql` - create table courses ( - id serial primary key, - name text not null, - category_id integer references course_categories(id) - ) - `, - ); - await ctx.db.execute( - sql` - create table orders ( - id serial primary key, - region text not null, - product text not null, - amount integer not null, - quantity integer not null - ) - `, - ); - await ctx.db.execute( - sql` - create table network_table ( - inet inet not null, - cidr cidr not null, - macaddr macaddr not null, - macaddr8 macaddr8 not null - ) - `, - ); - await ctx.db.execute( - sql` - create table sal_emp ( - name text not null, - pay_by_quarter integer[] not null, - schedule text[][] not null - ) - `, - ); - await ctx.db.execute( - sql` - create table tictactoe ( - squares integer[3][3] not null - ) - `, - ); -}); - -test.serial('select all fields', async (t) => { - const { db } = t.context; - - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }); - const result = await db.select().from(usersTable); - - t.assert(result[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 100); - t.deepEqual(result, [ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }, - ]); -}); - -test.serial('select sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .select({ - name: sql`upper(${usersTable.name})`, - }) - .from(usersTable); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('select typed sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - - const users = await db.select({ - name: sql`upper(${usersTable.name})`, - }).from(usersTable); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('select distinct', async (t) => { - const { db } = t.context; - - const usersDistinctTable = pgTable('users_distinct', { - id: integer('id').notNull(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${usersDistinctTable}`); - await db.execute(sql`create table ${usersDistinctTable} (id integer, name text)`); - - await db.insert(usersDistinctTable).values([ - { id: 1, name: 'John' }, - { id: 1, name: 'John' }, - { id: 2, name: 'John' }, - { id: 1, name: 'Jane' }, - ]); - const users1 = await db.selectDistinct().from(usersDistinctTable).orderBy( - usersDistinctTable.id, - usersDistinctTable.name, - ); - const users2 = await db.selectDistinctOn([usersDistinctTable.id]).from(usersDistinctTable).orderBy( - usersDistinctTable.id, - ); - const users3 = await db.selectDistinctOn([usersDistinctTable.name], { name: usersDistinctTable.name }).from( - usersDistinctTable, - ).orderBy(usersDistinctTable.name); - - await db.execute(sql`drop table ${usersDistinctTable}`); - - t.deepEqual(users1, [{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); - - t.deepEqual(users2.length, 2); - t.deepEqual(users2[0]?.id, 1); - t.deepEqual(users2[1]?.id, 2); - - t.deepEqual(users3.length, 2); - t.deepEqual(users3[0]?.name, 'Jane'); - t.deepEqual(users3[1]?.name, 'John'); -}); - -test.serial('insert returning sql', async (t) => { - const { db } = t.context; - - const users = await db - .insert(usersTable) - .values({ name: 'John' }) - .returning({ - name: sql`upper(${usersTable.name})`, - }); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('delete returning sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .delete(usersTable) - .where(eq(usersTable.name, 'John')) - .returning({ - name: sql`upper(${usersTable.name})`, - }); - - t.deepEqual(users, [{ name: 'JOHN' }]); -}); - -test.serial('update returning sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .update(usersTable) - .set({ name: 'Jane' }) - .where(eq(usersTable.name, 'John')) - .returning({ - name: sql`upper(${usersTable.name})`, - }); - - t.deepEqual(users, [{ name: 'JANE' }]); -}); - -test.serial('update with returning all fields', async (t) => { - const { db } = t.context; - - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .update(usersTable) - .set({ name: 'Jane' }) - .where(eq(usersTable.name, 'John')) - .returning(); - - t.assert(users[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 100); - t.deepEqual(users, [ - { id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }, - ]); -}); - -test.serial('update with returning partial', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .update(usersTable) - .set({ name: 'Jane' }) - .where(eq(usersTable.name, 'John')) - .returning({ - id: usersTable.id, - name: usersTable.name, - }); - - t.deepEqual(users, [{ id: 1, name: 'Jane' }]); -}); - -test.serial('delete with returning all fields', async (t) => { - const { db } = t.context; - - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning(); - - t.assert(users[0]!.createdAt instanceof Date); // eslint-disable-line no-instanceof/no-instanceof - t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 100); - t.deepEqual(users, [ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: users[0]!.createdAt }, - ]); -}); - -test.serial('delete with returning partial', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ - id: usersTable.id, - name: usersTable.name, - }); - - t.deepEqual(users, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert + select', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const result = await db.select().from(usersTable); - t.deepEqual(result, [ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }, - ]); - - await db.insert(usersTable).values({ name: 'Jane' }); - const result2 = await db.select().from(usersTable); - t.deepEqual(result2, [ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, - { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, - ]); -}); - -test.serial('json insert', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); - const result = await db - .select({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - }) - .from(usersTable); - - t.deepEqual(result, [{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); -}); - -test.serial('char insert', async (t) => { - const { db } = t.context; - - await db.insert(citiesTable).values({ name: 'Austin', state: 'TX' }); - const result = await db - .select({ id: citiesTable.id, name: citiesTable.name, state: citiesTable.state }) - .from(citiesTable); - - t.deepEqual(result, [{ id: 1, name: 'Austin', state: 'TX' }]); -}); - -test.serial('char update', async (t) => { - const { db } = t.context; - - await db.insert(citiesTable).values({ name: 'Austin', state: 'TX' }); - await db.update(citiesTable).set({ name: 'Atlanta', state: 'GA' }).where(eq(citiesTable.id, 1)); - const result = await db - .select({ id: citiesTable.id, name: citiesTable.name, state: citiesTable.state }) - .from(citiesTable); - - t.deepEqual(result, [{ id: 1, name: 'Atlanta', state: 'GA' }]); -}); - -test.serial('char delete', async (t) => { - const { db } = t.context; - - await db.insert(citiesTable).values({ name: 'Austin', state: 'TX' }); - await db.delete(citiesTable).where(eq(citiesTable.state, 'TX')); - const result = await db - .select({ id: citiesTable.id, name: citiesTable.name, state: citiesTable.state }) - .from(citiesTable); - - t.deepEqual(result, []); -}); - -test.serial('insert with overridden default values', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John', verified: true }); - const result = await db.select().from(usersTable); - - t.deepEqual(result, [ - { id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }, - ]); -}); - -test.serial('insert many', async (t) => { - const { db } = t.context; - - await db - .insert(usersTable) - .values([ - { name: 'John' }, - { name: 'Bruce', jsonb: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]); - const result = await db - .select({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - verified: usersTable.verified, - }) - .from(usersTable); - - t.deepEqual(result, [ - { id: 1, name: 'John', jsonb: null, verified: false }, - { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, - { id: 3, name: 'Jane', jsonb: null, verified: false }, - { id: 4, name: 'Austin', jsonb: null, verified: true }, - ]); -}); - -test.serial('insert many with returning', async (t) => { - const { db } = t.context; - - const result = await db - .insert(usersTable) - .values([ - { name: 'John' }, - { name: 'Bruce', jsonb: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]) - .returning({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - verified: usersTable.verified, - }); - - t.deepEqual(result, [ - { id: 1, name: 'John', jsonb: null, verified: false }, - { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, - { id: 3, name: 'Jane', jsonb: null, verified: false }, - { id: 4, name: 'Austin', jsonb: null, verified: true }, - ]); -}); - -test.serial('select with group by as field', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(usersTable.name); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with group by as sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(sql`${usersTable.name}`); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with group by as sql + column', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(sql`${usersTable.name}`, usersTable.id); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with group by as column + sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`); - - t.deepEqual(result, [{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); -}); - -test.serial('select with group by complex query', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`) - .orderBy(asc(usersTable.name)) - .limit(1); - - t.deepEqual(result, [{ name: 'Jane' }]); -}); - -test.serial('build query', async (t) => { - const { db } = t.context; - - const query = db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .groupBy(usersTable.id, usersTable.name) - .toSQL(); - - t.deepEqual(query, { - sql: 'select "id", "name" from "users" group by "users"."id", "users"."name"', - params: [], - }); -}); - -test.serial('insert sql', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: sql`${'John'}` }); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('partial join with alias', async (t) => { - const { db } = t.context; - const customerAlias = alias(usersTable, 'customer'); - - await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select({ - user: { - id: usersTable.id, - name: usersTable.name, - }, - customer: { - id: customerAlias.id, - name: customerAlias.name, - }, - }) - .from(usersTable) - .leftJoin(customerAlias, eq(customerAlias.id, 11)) - .where(eq(usersTable.id, 10)); - - t.deepEqual(result, [ - { - user: { id: 10, name: 'Ivan' }, - customer: { id: 11, name: 'Hans' }, - }, - ]); -}); - -test.serial('full join with alias', async (t) => { - const { db } = t.context; - - const pgTable = pgTableCreator((name) => `prefixed_${name}`); - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); - - const customers = alias(users, 'customer'); - - await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select() - .from(users) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(users.id, 10)); - - t.deepEqual(result, [{ - users: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('select from alias', async (t) => { - const { db } = t.context; - - const pgTable = pgTableCreator((name) => `prefixed_${name}`); - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); - - const user = alias(users, 'user'); - const customers = alias(users, 'customer'); - - await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select() - .from(user) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(user.id, 10)); - - t.deepEqual(result, [{ - user: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('insert with spaces', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: sql`'Jo h n'` }); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); - - t.deepEqual(result, [{ id: 1, name: 'Jo h n' }]); -}); - -test.serial('prepared statement', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const statement = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .prepare('statement1'); - const result = await statement.execute(); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('prepared statement reuse', async (t) => { - const { db } = t.context; - - const stmt = db - .insert(usersTable) - .values({ - verified: true, - name: placeholder('name'), - }) - .prepare('stmt2'); - - for (let i = 0; i < 10; i++) { - await stmt.execute({ name: `John ${i}` }); - } - - const result = await db - .select({ - id: usersTable.id, - name: usersTable.name, - verified: usersTable.verified, - }) - .from(usersTable); - - t.deepEqual(result, [ - { id: 1, name: 'John 0', verified: true }, - { id: 2, name: 'John 1', verified: true }, - { id: 3, name: 'John 2', verified: true }, - { id: 4, name: 'John 3', verified: true }, - { id: 5, name: 'John 4', verified: true }, - { id: 6, name: 'John 5', verified: true }, - { id: 7, name: 'John 6', verified: true }, - { id: 8, name: 'John 7', verified: true }, - { id: 9, name: 'John 8', verified: true }, - { id: 10, name: 'John 9', verified: true }, - ]); -}); - -test.serial('prepared statement with placeholder in .where', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const stmt = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .where(eq(usersTable.id, placeholder('id'))) - .prepare('stmt3'); - const result = await stmt.execute({ id: 1 }); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); -}); - -test.serial('prepared statement with placeholder in .limit', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - const stmt = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .where(eq(usersTable.id, placeholder('id'))) - .limit(placeholder('limit')) - .prepare('stmt_limit'); - - const result = await stmt.execute({ id: 1, limit: 1 }); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); - t.is(result.length, 1); -}); - -test.serial('prepared statement with placeholder in .offset', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'John1' }]); - const stmt = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .offset(placeholder('offset')) - .prepare('stmt_offset'); - - const result = await stmt.execute({ offset: 1 }); - - t.deepEqual(result, [{ id: 2, name: 'John1' }]); -}); - -// TODO change tests to new structure -test.serial('migrator : default migration strategy', async (t) => { - const { db } = t.context; - - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - - await migrate(db, { migrationsFolder: './drizzle2/pg' }); - - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - - const result = await db.select().from(usersMigratorTable); - - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); - - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); -}); - -test.serial('migrator : migrate with custom schema', async (t) => { - const { db } = t.context; - const customSchema = randomString(); - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - - await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsSchema: customSchema }); - - // test if the custom migrations table was created - const { rowCount } = await db.execute(sql`select * from ${sql.identifier(customSchema)}."__drizzle_migrations";`); - t.true(rowCount > 0); - - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); - - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table ${sql.identifier(customSchema)}."__drizzle_migrations"`); -}); - -test.serial('migrator : migrate with custom table', async (t) => { - const { db } = t.context; - const customTable = randomString(); - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - - await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable }); - - // test if the custom migrations table was created - const { rowCount } = await db.execute(sql`select * from "drizzle".${sql.identifier(customTable)};`); - t.true(rowCount > 0); - - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); - - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table "drizzle".${sql.identifier(customTable)}`); -}); - -test.serial('migrator : migrate with custom table and custom schema', async (t) => { - const { db } = t.context; - const customTable = randomString(); - const customSchema = randomString(); - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - - await migrate(db, { - migrationsFolder: './drizzle2/pg', - migrationsTable: customTable, - migrationsSchema: customSchema, - }); - - // test if the custom migrations table was created - const { rowCount } = await db.execute( - sql`select * from ${sql.identifier(customSchema)}.${sql.identifier(customTable)};`, - ); - t.true(rowCount > 0); - - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - t.deepEqual(result, [{ id: 1, name: 'John', email: 'email' }]); - - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table ${sql.identifier(customSchema)}.${sql.identifier(customTable)}`); -}); - -test.serial('insert via db.execute + select via db.execute', async (t) => { - const { db } = t.context; - - await db.execute( - sql`insert into ${usersTable} (${name(usersTable.name.name)}) values (${'John'})`, - ); - - const result = await db.execute<{ id: number; name: string }>( - sql`select id, name from "users"`, - ); - t.deepEqual(result.rows, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert via db.execute + returning', async (t) => { - const { db } = t.context; - - const inserted = await db.execute<{ id: number; name: string }>( - sql`insert into ${usersTable} (${ - name( - usersTable.name.name, - ) - }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, - ); - t.deepEqual(inserted.rows, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert via db.execute w/ query builder', async (t) => { - const { db } = t.context; - - const inserted = await db.execute>( - db - .insert(usersTable) - .values({ name: 'John' }) - .returning({ id: usersTable.id, name: usersTable.name }), - ); - t.deepEqual(inserted.rows, [{ id: 1, name: 'John' }]); -}); - -test.serial('build query insert with onConflict do update', async (t) => { - const { db } = t.context; - - const query = db - .insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }) - .toSQL(); - - t.deepEqual(query, { - sql: 'insert into "users" ("name", "jsonb") values ($1, $2) on conflict ("id") do update set "name" = $3', - params: ['John', '["foo","bar"]', 'John1'], - }); -}); - -test.serial('build query insert with onConflict do update / multiple columns', async (t) => { - const { db } = t.context; - - const query = db - .insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoUpdate({ target: [usersTable.id, usersTable.name], set: { name: 'John1' } }) - .toSQL(); - - t.deepEqual(query, { - sql: 'insert into "users" ("name", "jsonb") values ($1, $2) on conflict ("id","name") do update set "name" = $3', - params: ['John', '["foo","bar"]', 'John1'], - }); -}); - -test.serial('build query insert with onConflict do nothing', async (t) => { - const { db } = t.context; - - const query = db - .insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoNothing() - .toSQL(); - - t.deepEqual(query, { - sql: 'insert into "users" ("name", "jsonb") values ($1, $2) on conflict do nothing', - params: ['John', '["foo","bar"]'], - }); -}); - -test.serial('build query insert with onConflict do nothing + target', async (t) => { - const { db } = t.context; - - const query = db - .insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoNothing({ target: usersTable.id }) - .toSQL(); - - t.deepEqual(query, { - sql: 'insert into "users" ("name", "jsonb") values ($1, $2) on conflict ("id") do nothing', - params: ['John', '["foo","bar"]'], - }); -}); - -test.serial('insert with onConflict do update', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - - await db - .insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }); - - const res = await db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)); - - t.deepEqual(res, [{ id: 1, name: 'John1' }]); -}); - -test.serial('insert with onConflict do nothing', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - - await db.insert(usersTable).values({ id: 1, name: 'John' }).onConflictDoNothing(); - - const res = await db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); -}); - -test.serial('insert with onConflict do nothing + target', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values({ name: 'John' }); - - await db - .insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoNothing({ target: usersTable.id }); - - const res = await db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)); - - t.deepEqual(res, [{ id: 1, name: 'John' }]); -}); - -test.serial('left join (flat object fields)', async (t) => { - const { db } = t.context; - - const { id: cityId } = await db - .insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]) - .returning({ id: citiesTable.id }) - .then((rows) => rows[0]!); - - await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); - - const res = await db - .select({ - userId: users2Table.id, - userName: users2Table.name, - cityId: citiesTable.id, - cityName: citiesTable.name, - }) - .from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); - - t.deepEqual(res, [ - { userId: 1, userName: 'John', cityId, cityName: 'Paris' }, - { userId: 2, userName: 'Jane', cityId: null, cityName: null }, - ]); -}); - -test.serial('left join (grouped fields)', async (t) => { - const { db } = t.context; - - const { id: cityId } = await db - .insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]) - .returning({ id: citiesTable.id }) - .then((rows) => rows[0]!); - - await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); - - const res = await db - .select({ - id: users2Table.id, - user: { - name: users2Table.name, - nameUpper: sql`upper(${users2Table.name})`, - }, - city: { - id: citiesTable.id, - name: citiesTable.name, - nameUpper: sql`upper(${citiesTable.name})`, - }, - }) - .from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); - - t.deepEqual(res, [ - { - id: 1, - user: { name: 'John', nameUpper: 'JOHN' }, - city: { id: cityId, name: 'Paris', nameUpper: 'PARIS' }, - }, - { - id: 2, - user: { name: 'Jane', nameUpper: 'JANE' }, - city: null, - }, - ]); -}); - -test.serial('left join (all fields)', async (t) => { - const { db } = t.context; - - const { id: cityId } = await db - .insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]) - .returning({ id: citiesTable.id }) - .then((rows) => rows[0]!); - - await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); - - const res = await db - .select() - .from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); - - t.deepEqual(res, [ - { - users2: { - id: 1, - name: 'John', - cityId, - }, - cities: { - id: cityId, - name: 'Paris', - state: null, - }, - }, - { - users2: { - id: 2, - name: 'Jane', - cityId: null, - }, - cities: null, - }, - ]); -}); - -test.serial('join subquery', async (t) => { - const { db } = t.context; - - await db - .insert(courseCategoriesTable) - .values([ - { name: 'Category 1' }, - { name: 'Category 2' }, - { name: 'Category 3' }, - { name: 'Category 4' }, - ]); - - await db - .insert(coursesTable) - .values([ - { name: 'Development', categoryId: 2 }, - { name: 'IT & Software', categoryId: 3 }, - { name: 'Marketing', categoryId: 4 }, - { name: 'Design', categoryId: 1 }, - ]); - - const sq2 = db - .select({ - categoryId: courseCategoriesTable.id, - category: courseCategoriesTable.name, - total: sql`count(${courseCategoriesTable.id})`, - }) - .from(courseCategoriesTable) - .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) - .as('sq2'); - - const res = await db - .select({ - courseName: coursesTable.name, - categoryId: sq2.categoryId, - }) - .from(coursesTable) - .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) - .orderBy(coursesTable.name); - - t.deepEqual(res, [ - { courseName: 'Design', categoryId: 1 }, - { courseName: 'Development', categoryId: 2 }, - { courseName: 'IT & Software', categoryId: 3 }, - { courseName: 'Marketing', categoryId: 4 }, - ]); -}); - -test.serial('with ... select', async (t) => { - const { db } = t.context; - - await db.insert(orders).values([ - { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, - { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 50, quantity: 5 }, - ]); - - const regionalSales = db - .$with('regional_sales') - .as( - db - .select({ - region: orders.region, - totalSales: sql`sum(${orders.amount})`.as('total_sales'), - }) - .from(orders) - .groupBy(orders.region), - ); - - const topRegions = db - .$with('top_regions') - .as( - db - .select({ - region: regionalSales.region, - }) - .from(regionalSales) - .where( - gt( - regionalSales.totalSales, - db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), - ), - ), - ); - - const result = await db - .with(regionalSales, topRegions) - .select({ - region: orders.region, - product: orders.product, - productUnits: sql`sum(${orders.quantity})::int`, - productSales: sql`sum(${orders.amount})::int`, - }) - .from(orders) - .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) - .groupBy(orders.region, orders.product) - .orderBy(orders.region, orders.product); - - t.deepEqual(result, [ - { - region: 'Europe', - product: 'A', - productUnits: 3, - productSales: 30, - }, - { - region: 'Europe', - product: 'B', - productUnits: 5, - productSales: 50, - }, - { - region: 'US', - product: 'A', - productUnits: 7, - productSales: 70, - }, - { - region: 'US', - product: 'B', - productUnits: 9, - productSales: 90, - }, - ]); -}); - -test.serial('select from subquery sql', async (t) => { - const { db } = t.context; - - await db.insert(users2Table).values([{ name: 'John' }, { name: 'Jane' }]); - - const sq = db - .select({ name: sql`${users2Table.name} || ' modified'`.as('name') }) - .from(users2Table) - .as('sq'); - - const res = await db.select({ name: sq.name }).from(sq); - - t.deepEqual(res, [{ name: 'John modified' }, { name: 'Jane modified' }]); -}); - -test.serial('select a field without joining its table', (t) => { - const { db } = t.context; - - t.throws(() => db.select({ name: users2Table.name }).from(usersTable).prepare('query')); -}); - -test.serial('select all fields from subquery without alias', (t) => { - const { db } = t.context; - - const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); - - t.throws(() => db.select().from(sq).prepare('query')); -}); - -test.serial('select count()', async (t) => { - const { db } = t.context; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); - - const res = await db.select({ count: sql`count(*)` }).from(usersTable); - - t.deepEqual(res, [{ count: '2' }]); -}); - -test.serial('select count w/ custom mapper', async (t) => { - const { db } = t.context; - - function count(value: PgColumn | SQLWrapper): SQL; - function count(value: PgColumn | SQLWrapper, alias: string): SQL.Aliased; - function count(value: PgColumn | SQLWrapper, alias?: string): SQL | SQL.Aliased { - const result = sql`count(${value})`.mapWith(Number); - if (!alias) { - return result; - } - return result.as(alias); - } - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); - - const res = await db.select({ count: count(sql`*`) }).from(usersTable); - - t.deepEqual(res, [{ count: 2 }]); -}); - -test.serial('network types', async (t) => { - const { db } = t.context; - - const value: typeof network.$inferSelect = { - inet: '127.0.0.1', - cidr: '192.168.100.128/25', - macaddr: '08:00:2b:01:02:03', - macaddr8: '08:00:2b:01:02:03:04:05', - }; - - await db.insert(network).values(value); - - const res = await db.select().from(network); - - t.deepEqual(res, [value]); -}); - -test.serial('array types', async (t) => { - const { db } = t.context; - - const values: typeof salEmp.$inferSelect[] = [ - { - name: 'John', - payByQuarter: [10000, 10000, 10000, 10000], - schedule: [['meeting', 'lunch'], ['training', 'presentation']], - }, - { - name: 'Carol', - payByQuarter: [20000, 25000, 25000, 25000], - schedule: [['breakfast', 'consulting'], ['meeting', 'lunch']], - }, - ]; - - await db.insert(salEmp).values(values); - - const res = await db.select().from(salEmp); - - t.deepEqual(res, values); -}); - -test.serial('select for ...', (t) => { - const { db } = t.context; - - { - const query = db - .select() - .from(users2Table) - .for('update') - .toSQL(); - - t.regex( - query.sql, - / for update$/, - ); - } - - { - const query = db - .select() - .from(users2Table) - .for('update', { of: [users2Table, coursesTable] }) - .toSQL(); - - t.regex( - query.sql, - / for update of "users2", "courses"$/, - ); - } - - { - const query = db - .select() - .from(users2Table) - .for('no key update', { of: users2Table }) - .toSQL(); - - t.regex( - query.sql, - /for no key update of "users2"$/, - ); - } - - { - const query = db - .select() - .from(users2Table) - .for('no key update', { of: users2Table, skipLocked: true }) - .toSQL(); - - t.regex( - query.sql, - / for no key update of "users2" skip locked$/, - ); - } - - { - const query = db - .select() - .from(users2Table) - .for('share', { of: users2Table, noWait: true }) - .toSQL(); - - t.regex( - query.sql, - // eslint-disable-next-line unicorn/better-regex - /for share of "users2" no wait$/, - ); - } -}); - -test.serial('having', async (t) => { - const { db } = t.context; - - await db.insert(citiesTable).values([{ name: 'London' }, { name: 'Paris' }, { name: 'New York' }]); - - await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { - name: 'Jack', - cityId: 2, - }]); - - const result = await db - .select({ - id: citiesTable.id, - name: sql`upper(${citiesTable.name})`.as('upper_name'), - usersCount: sql`count(${users2Table.id})::int`.as('users_count'), - }) - .from(citiesTable) - .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) - .where(({ name }) => sql`length(${name}) >= 3`) - .groupBy(citiesTable.id) - .having(({ usersCount }) => sql`${usersCount} > 0`) - .orderBy(({ name }) => name); - - t.deepEqual(result, [ - { - id: 1, - name: 'LONDON', - usersCount: 2, - }, - { - id: 2, - name: 'PARIS', - usersCount: 1, - }, - ]); -}); - -test.serial('view', async (t) => { - const { db } = t.context; - - const newYorkers1 = pgView('new_yorkers') - .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); - - const newYorkers2 = pgView('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); - - const newYorkers3 = pgView('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).existing(); - - await db.execute(sql`create view ${newYorkers1} as ${getViewConfig(newYorkers1).query}`); - - await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); - - await db.insert(users2Table).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 1 }, - { name: 'Jack', cityId: 2 }, - ]); - - { - const result = await db.select().from(newYorkers1); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers2); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers3); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); - t.deepEqual(result, [ - { name: 'John' }, - { name: 'Jane' }, - ]); - } - - await db.execute(sql`drop view ${newYorkers1}`); -}); - -test.serial('materialized view', async (t) => { - const { db } = t.context; - - const newYorkers1 = pgMaterializedView('new_yorkers') - .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); - - const newYorkers2 = pgMaterializedView('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); - - const newYorkers3 = pgMaterializedView('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }).existing(); - - await db.execute(sql`create materialized view ${newYorkers1} as ${getMaterializedViewConfig(newYorkers1).query}`); - - await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); - - await db.insert(users2Table).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 1 }, - { name: 'Jack', cityId: 2 }, - ]); - - { - const result = await db.select().from(newYorkers1); - t.deepEqual(result, []); - } - - await db.refreshMaterializedView(newYorkers1); - - { - const result = await db.select().from(newYorkers1); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers2); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers3); - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); - t.deepEqual(result, [ - { name: 'John' }, - { name: 'Jane' }, - ]); - } - - await db.execute(sql`drop materialized view ${newYorkers1}`); -}); - -// TODO: copy to SQLite and MySQL, add to docs -test.serial('select from raw sql', async (t) => { - const { db } = t.context; - - const result = await db.select({ - id: sql`id`, - name: sql`name`, - }).from(sql`(select 1 as id, 'John' as name) as users`); - - Expect>; - - t.deepEqual(result, [ - { id: 1, name: 'John' }, - ]); -}); - -test.serial('select from raw sql with joins', async (t) => { - const { db } = t.context; - - const result = await db - .select({ - id: sql`users.id`, - name: sql`users.name`, - userCity: sql`users.city`, - cityName: sql`cities.name`, - }) - .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) - .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`); - - Expect>; - - t.deepEqual(result, [ - { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, - ]); -}); - -test.serial('join on aliased sql from select', async (t) => { - const { db } = t.context; - - const result = await db - .select({ - userId: sql`users.id`.as('userId'), - name: sql`users.name`, - userCity: sql`users.city`, - cityId: sql`cities.id`.as('cityId'), - cityName: sql`cities.name`, - }) - .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) - .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)); - - Expect>; - - t.deepEqual(result, [ - { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, - ]); -}); - -test.serial('join on aliased sql from with clause', async (t) => { - const { db } = t.context; - - const users = db.$with('users').as( - db.select({ - id: sql`id`.as('userId'), - name: sql`name`.as('userName'), - city: sql`city`.as('city'), - }).from( - sql`(select 1 as id, 'John' as name, 'New York' as city) as users`, - ), - ); - - const cities = db.$with('cities').as( - db.select({ - id: sql`id`.as('cityId'), - name: sql`name`.as('cityName'), - }).from( - sql`(select 1 as id, 'Paris' as name) as cities`, - ), - ); - - const result = await db - .with(users, cities) - .select({ - userId: users.id, - name: users.name, - userCity: users.city, - cityId: cities.id, - cityName: cities.name, - }) - .from(users) - .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)); - - Expect>; - - t.deepEqual(result, [ - { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, - ]); -}); - -test.serial('prefixed table', async (t) => { - const { db } = t.context; - - const pgTable = pgTableCreator((name) => `myprefix_${name}`); - - const users = pgTable('test_prefixed_table_with_unique_name', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table myprefix_test_prefixed_table_with_unique_name (id integer not null primary key, name text not null)`, - ); - - await db.insert(users).values({ id: 1, name: 'John' }); - - const result = await db.select().from(users); - - t.deepEqual(result, [{ id: 1, name: 'John' }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('select from enum', async (t) => { - const { db } = t.context; - - const muscleEnum = pgEnum('muscle', [ - 'abdominals', - 'hamstrings', - 'adductors', - 'quadriceps', - 'biceps', - 'shoulders', - 'chest', - 'middle_back', - 'calves', - 'glutes', - 'lower_back', - 'lats', - 'triceps', - 'traps', - 'forearms', - 'neck', - 'abductors', - ]); - - const forceEnum = pgEnum('force', ['isometric', 'isotonic', 'isokinetic']); - - const levelEnum = pgEnum('level', ['beginner', 'intermediate', 'advanced']); - - const mechanicEnum = pgEnum('mechanic', ['compound', 'isolation']); - - const equipmentEnum = pgEnum('equipment', ['barbell', 'dumbbell', 'bodyweight', 'machine', 'cable', 'kettlebell']); - - const categoryEnum = pgEnum('category', ['upper_body', 'lower_body', 'full_body']); - - const exercises = pgTable('exercises', { - id: serial('id').primaryKey(), - name: varchar('name').notNull(), - force: forceEnum('force'), - level: levelEnum('level'), - mechanic: mechanicEnum('mechanic'), - equipment: equipmentEnum('equipment'), - instructions: text('instructions'), - category: categoryEnum('category'), - primaryMuscles: muscleEnum('primary_muscles').array(), - secondaryMuscles: muscleEnum('secondary_muscles').array(), - createdAt: timestamp('created_at').notNull().default(sql`now()`), - updatedAt: timestamp('updated_at').notNull().default(sql`now()`), - }); - - await db.execute(sql`drop table if exists ${exercises}`); - await db.execute(sql`drop type if exists ${name(muscleEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(forceEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(levelEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(mechanicEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(equipmentEnum.enumName)}`); - await db.execute(sql`drop type if exists ${name(categoryEnum.enumName)}`); - - await db.execute( - sql`create type ${ - name(muscleEnum.enumName) - } as enum ('abdominals', 'hamstrings', 'adductors', 'quadriceps', 'biceps', 'shoulders', 'chest', 'middle_back', 'calves', 'glutes', 'lower_back', 'lats', 'triceps', 'traps', 'forearms', 'neck', 'abductors')`, - ); - await db.execute(sql`create type ${name(forceEnum.enumName)} as enum ('isometric', 'isotonic', 'isokinetic')`); - await db.execute(sql`create type ${name(levelEnum.enumName)} as enum ('beginner', 'intermediate', 'advanced')`); - await db.execute(sql`create type ${name(mechanicEnum.enumName)} as enum ('compound', 'isolation')`); - await db.execute( - sql`create type ${ - name(equipmentEnum.enumName) - } as enum ('barbell', 'dumbbell', 'bodyweight', 'machine', 'cable', 'kettlebell')`, - ); - await db.execute(sql`create type ${name(categoryEnum.enumName)} as enum ('upper_body', 'lower_body', 'full_body')`); - await db.execute(sql` - create table ${exercises} ( - id serial primary key, - name varchar not null, - force force, - level level, - mechanic mechanic, - equipment equipment, - instructions text, - category category, - primary_muscles muscle[], - secondary_muscles muscle[], - created_at timestamp not null default now(), - updated_at timestamp not null default now() - ) - `); - - await db.insert(exercises).values({ - name: 'Bench Press', - force: 'isotonic', - level: 'beginner', - mechanic: 'compound', - equipment: 'barbell', - instructions: - 'Lie on your back on a flat bench. Grasp the barbell with an overhand grip, slightly wider than shoulder width. Unrack the barbell and hold it over you with your arms locked. Lower the barbell to your chest. Press the barbell back to the starting position.', - category: 'upper_body', - primaryMuscles: ['chest', 'triceps'], - secondaryMuscles: ['shoulders', 'traps'], - }); - - const result = await db.select().from(exercises); - - t.deepEqual(result, [ - { - id: 1, - name: 'Bench Press', - force: 'isotonic', - level: 'beginner', - mechanic: 'compound', - equipment: 'barbell', - instructions: - 'Lie on your back on a flat bench. Grasp the barbell with an overhand grip, slightly wider than shoulder width. Unrack the barbell and hold it over you with your arms locked. Lower the barbell to your chest. Press the barbell back to the starting position.', - category: 'upper_body', - primaryMuscles: ['chest', 'triceps'], - secondaryMuscles: ['shoulders', 'traps'], - createdAt: result[0]!.createdAt, - updatedAt: result[0]!.updatedAt, - }, - ]); - - await db.execute(sql`drop table ${exercises}`); - await db.execute(sql`drop type ${name(muscleEnum.enumName)}`); - await db.execute(sql`drop type ${name(forceEnum.enumName)}`); - await db.execute(sql`drop type ${name(levelEnum.enumName)}`); - await db.execute(sql`drop type ${name(mechanicEnum.enumName)}`); - await db.execute(sql`drop type ${name(equipmentEnum.enumName)}`); - await db.execute(sql`drop type ${name(categoryEnum.enumName)}`); -}); - -test.serial('orderBy with aliased column', (t) => { - const { db } = t.context; - - const query = db.select({ - test: sql`something`.as('test'), - }).from(users2Table).orderBy((fields) => fields.test).toSQL(); - - t.deepEqual(query.sql, 'select something as "test" from "users2" order by "test"'); -}); - -test.serial('select from sql', async (t) => { - const { db } = t.context; - - const metricEntry = pgTable('metric_entry', { - id: pgUuid('id').notNull(), - createdAt: timestamp('created_at').notNull(), - }); - - await db.execute(sql`drop table if exists ${metricEntry}`); - await db.execute(sql`create table ${metricEntry} (id uuid not null, created_at timestamp not null)`); - - const metricId = uuid(); - - const intervals = db.$with('intervals').as( - db - .select({ - startTime: sql`(date'2023-03-01'+ x * '1 day'::interval)`.as('start_time'), - endTime: sql`(date'2023-03-01'+ (x+1) *'1 day'::interval)`.as('end_time'), - }) - .from(sql`generate_series(0, 29, 1) as t(x)`), - ); - - await t.notThrowsAsync(() => - db - .with(intervals) - .select({ - startTime: intervals.startTime, - endTime: intervals.endTime, - count: sql`count(${metricEntry})`, - }) - .from(metricEntry) - .rightJoin( - intervals, - and( - eq(metricEntry.id, metricId), - gte(metricEntry.createdAt, intervals.startTime), - lt(metricEntry.createdAt, intervals.endTime), - ), - ) - .groupBy(intervals.startTime, intervals.endTime) - .orderBy(asc(intervals.startTime)) - ); -}); - -test.serial('timestamp timezone', async (t) => { - const { db } = t.context; - - const usersTableWithAndWithoutTimezone = pgTable('users_test_with_and_without_timezone', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), - updatedAt: timestamp('updated_at', { withTimezone: false }).notNull().defaultNow(), - }); - - await db.execute(sql`drop table if exists ${usersTableWithAndWithoutTimezone}`); - - await db.execute( - sql` - create table users_test_with_and_without_timezone ( - id serial not null primary key, - name text not null, - created_at timestamptz not null default now(), - updated_at timestamp not null default now() - ) - `, - ); - - const date = new Date(Date.parse('2020-01-01T00:00:00+04:00')); - - await db.insert(usersTableWithAndWithoutTimezone).values({ name: 'With default times' }); - await db.insert(usersTableWithAndWithoutTimezone).values({ - name: 'Without default times', - createdAt: date, - updatedAt: date, - }); - const users = await db.select().from(usersTableWithAndWithoutTimezone); - - // check that the timestamps are set correctly for default times - t.assert(Math.abs(users[0]!.updatedAt.getTime() - Date.now()) < 2000); - t.assert(Math.abs(users[0]!.createdAt.getTime() - Date.now()) < 2000); - - // check that the timestamps are set correctly for non default times - t.assert(Math.abs(users[1]!.updatedAt.getTime() - date.getTime()) < 2000); - t.assert(Math.abs(users[1]!.createdAt.getTime() - date.getTime()) < 2000); -}); - -test.serial('transaction', async (t) => { - const { db } = t.context; - - const users = pgTable('users_transactions', { - id: serial('id').primaryKey(), - balance: integer('balance').notNull(), - }); - const products = pgTable('products_transactions', { - id: serial('id').primaryKey(), - price: integer('price').notNull(), - stock: integer('stock').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`drop table if exists ${products}`); - - await db.execute(sql`create table users_transactions (id serial not null primary key, balance integer not null)`); - await db.execute( - sql`create table products_transactions (id serial not null primary key, price integer not null, stock integer not null)`, - ); - - const user = await db.insert(users).values({ balance: 100 }).returning().then((rows) => rows[0]!); - const product = await db.insert(products).values({ price: 10, stock: 10 }).returning().then((rows) => rows[0]!); - - await db.transaction(async (tx) => { - await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)); - await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)); - }); - - const result = await db.select().from(users); - - t.deepEqual(result, [{ id: 1, balance: 90 }]); - - await db.execute(sql`drop table ${users}`); - await db.execute(sql`drop table ${products}`); -}); - -test.serial('transaction rollback', async (t) => { - const { db } = t.context; - - const users = pgTable('users_transactions_rollback', { - id: serial('id').primaryKey(), - balance: integer('balance').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table users_transactions_rollback (id serial not null primary key, balance integer not null)`, - ); - - await t.throwsAsync(async () => - await db.transaction(async (tx) => { - await tx.insert(users).values({ balance: 100 }); - tx.rollback(); - }), { instanceOf: TransactionRollbackError }); - - const result = await db.select().from(users); - - t.deepEqual(result, []); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('nested transaction', async (t) => { - const { db } = t.context; - - const users = pgTable('users_nested_transactions', { - id: serial('id').primaryKey(), - balance: integer('balance').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table users_nested_transactions (id serial not null primary key, balance integer not null)`, - ); - - await db.transaction(async (tx) => { - await tx.insert(users).values({ balance: 100 }); - - await tx.transaction(async (tx) => { - await tx.update(users).set({ balance: 200 }); - }); - }); - - const result = await db.select().from(users); - - t.deepEqual(result, [{ id: 1, balance: 200 }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('nested transaction rollback', async (t) => { - const { db } = t.context; - - const users = pgTable('users_nested_transactions_rollback', { - id: serial('id').primaryKey(), - balance: integer('balance').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table users_nested_transactions_rollback (id serial not null primary key, balance integer not null)`, - ); - - await db.transaction(async (tx) => { - await tx.insert(users).values({ balance: 100 }); - - await t.throwsAsync(async () => - await tx.transaction(async (tx) => { - await tx.update(users).set({ balance: 200 }); - tx.rollback(); - }), { instanceOf: TransactionRollbackError }); - }); - - const result = await db.select().from(users); - - t.deepEqual(result, [{ id: 1, balance: 100 }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('join subquery with join', async (t) => { - const { db } = t.context; - - const internalStaff = pgTable('internal_staff', { - userId: integer('user_id').notNull(), - }); - - const customUser = pgTable('custom_user', { - id: integer('id').notNull(), - }); - - const ticket = pgTable('ticket', { - staffId: integer('staff_id').notNull(), - }); - - await db.execute(sql`drop table if exists ${internalStaff}`); - await db.execute(sql`drop table if exists ${customUser}`); - await db.execute(sql`drop table if exists ${ticket}`); - - await db.execute(sql`create table internal_staff (user_id integer not null)`); - await db.execute(sql`create table custom_user (id integer not null)`); - await db.execute(sql`create table ticket (staff_id integer not null)`); - - await db.insert(internalStaff).values({ userId: 1 }); - await db.insert(customUser).values({ id: 1 }); - await db.insert(ticket).values({ staffId: 1 }); - - const subq = db - .select() - .from(internalStaff) - .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) - .as('internal_staff'); - - const mainQuery = await db - .select() - .from(ticket) - .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); - - t.deepEqual(mainQuery, [{ - ticket: { staffId: 1 }, - internal_staff: { - internal_staff: { userId: 1 }, - custom_user: { id: 1 }, - }, - }]); - - await db.execute(sql`drop table ${internalStaff}`); - await db.execute(sql`drop table ${customUser}`); - await db.execute(sql`drop table ${ticket}`); -}); - -test.serial('subquery with view', async (t) => { - const { db } = t.context; - - const users = pgTable('users_subquery_view', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }); - - const newYorkers = pgView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`drop view if exists ${newYorkers}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, - ); - await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); - - await db.insert(users).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 2 }, - { name: 'Jack', cityId: 1 }, - { name: 'Jill', cityId: 2 }, - ]); - - const sq = db.$with('sq').as(db.select().from(newYorkers)); - const result = await db.with(sq).select().from(sq); - - t.deepEqual(result, [ - { id: 1, name: 'John', cityId: 1 }, - { id: 3, name: 'Jack', cityId: 1 }, - ]); - - await db.execute(sql`drop view ${newYorkers}`); - await db.execute(sql`drop table ${users}`); -}); - -test.serial('join view as subquery', async (t) => { - const { db } = t.context; - - const users = pgTable('users_join_view', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }); - - const newYorkers = pgView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`drop view if exists ${newYorkers}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, - ); - await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); - - await db.insert(users).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 2 }, - { name: 'Jack', cityId: 1 }, - { name: 'Jill', cityId: 2 }, - ]); - - const sq = db.select().from(newYorkers).as('new_yorkers_sq'); - - const result = await db.select().from(users).leftJoin(sq, eq(users.id, sq.id)); - - t.deepEqual(result, [ - { - users_join_view: { id: 1, name: 'John', cityId: 1 }, - new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, - }, - { - users_join_view: { id: 2, name: 'Jane', cityId: 2 }, - new_yorkers_sq: null, - }, - { - users_join_view: { id: 3, name: 'Jack', cityId: 1 }, - new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, - }, - { - users_join_view: { id: 4, name: 'Jill', cityId: 2 }, - new_yorkers_sq: null, - }, - ]); - - await db.execute(sql`drop view ${newYorkers}`); - await db.execute(sql`drop table ${users}`); -}); - -test.serial('table selection with single table', async (t) => { - const { db } = t.context; - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, - ); - - await db.insert(users).values({ name: 'John', cityId: 1 }); - - const result = await db.select({ users }).from(users); - - t.deepEqual(result, [{ users: { id: 1, name: 'John', cityId: 1 } }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('set null to jsonb field', async (t) => { - const { db } = t.context; - - const users = pgTable('users', { - id: serial('id').primaryKey(), - jsonb: jsonb('jsonb'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, jsonb jsonb)`, - ); - - const result = await db.insert(users).values({ jsonb: null }).returning(); - - t.deepEqual(result, [{ id: 1, jsonb: null }]); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('insert undefined', async (t) => { - const { db } = t.context; - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text)`, - ); - - await t.notThrowsAsync(async () => await db.insert(users).values({ name: undefined })); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('update undefined', async (t) => { - const { db } = t.context; - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text)`, - ); - - await t.throwsAsync(async () => await db.update(users).set({ name: undefined })); - await t.notThrowsAsync(async () => await db.update(users).set({ id: 1, name: undefined })); - - await db.execute(sql`drop table ${users}`); -}); - -test.serial('array operators', async (t) => { - const { db } = t.context; - - const posts = pgTable('posts', { - id: serial('id').primaryKey(), - tags: text('tags').array(), - }); - - await db.execute(sql`drop table if exists ${posts}`); - - await db.execute( - sql`create table ${posts} (id serial primary key, tags text[])`, - ); - - await db.insert(posts).values([{ - tags: ['ORM'], - }, { - tags: ['Typescript'], - }, { - tags: ['Typescript', 'ORM'], - }, { - tags: ['Typescript', 'Frontend', 'React'], - }, { - tags: ['Typescript', 'ORM', 'Database', 'Postgres'], - }, { - tags: ['Java', 'Spring', 'OOP'], - }]); - - const contains = await db.select({ id: posts.id }).from(posts) - .where(arrayContains(posts.tags, ['Typescript', 'ORM'])); - const contained = await db.select({ id: posts.id }).from(posts) - .where(arrayContained(posts.tags, ['Typescript', 'ORM'])); - const overlaps = await db.select({ id: posts.id }).from(posts) - .where(arrayOverlaps(posts.tags, ['Typescript', 'ORM'])); - const withSubQuery = await db.select({ id: posts.id }).from(posts) - .where(arrayContains( - posts.tags, - db.select({ tags: posts.tags }).from(posts).where(eq(posts.id, 1)), - )); - - t.deepEqual(contains, [{ id: 3 }, { id: 5 }]); - t.deepEqual(contained, [{ id: 1 }, { id: 2 }, { id: 3 }]); - t.deepEqual(overlaps, [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }, { id: 5 }]); - t.deepEqual(withSubQuery, [{ id: 1 }, { id: 3 }, { id: 5 }]); -}); diff --git a/integration-tests/tests/version.test.ts b/integration-tests/tests/version.test.ts index b97d6202c..f12ae9e71 100644 --- a/integration-tests/tests/version.test.ts +++ b/integration-tests/tests/version.test.ts @@ -1,13 +1,13 @@ import 'dotenv/config'; -import test from 'ava'; import * as version from 'drizzle-orm/version'; +import { expect, test } from 'vitest'; import { z } from 'zod'; -test('shape', (t) => { +test('shape', () => { const shape = z.object({ compatibilityVersion: z.number(), npmVersion: z.string(), }); - t.notThrows(() => shape.parse(version)); + expect(() => shape.parse(version)).not.toThrowError(); }); diff --git a/integration-tests/tests/xata-http.test.ts b/integration-tests/tests/xata-http.test.ts deleted file mode 100644 index 8a70aca6c..000000000 --- a/integration-tests/tests/xata-http.test.ts +++ /dev/null @@ -1,2385 +0,0 @@ -import 'dotenv/config'; - -import { - and, - arrayContained, - arrayContains, - arrayOverlaps, - asc, - eq, - gt, - gte, - inArray, - lt, - name, - placeholder, - type SQL, - sql, - type SQLWrapper, -} from 'drizzle-orm'; -import { - alias, - boolean, - char, - cidr, - date, - inet, - integer, - interval, - jsonb, - macaddr, - macaddr8, - type PgColumn, - pgTable, - pgTableCreator, - serial, - text, - time, - timestamp, - uuid as pgUuid, -} from 'drizzle-orm/pg-core'; -import { drizzle } from 'drizzle-orm/xata-http'; -import type { XataHttpClient, XataHttpDatabase } from 'drizzle-orm/xata-http'; -import { migrate } from 'drizzle-orm/xata-http/migrator'; -import { v4 as uuid } from 'uuid'; -import { beforeAll, beforeEach, expect, test } from 'vitest'; -import { type Equal, Expect, randomString } from './utils.ts'; -import { getXataClient } from './xata/xata.ts'; - -const ENABLE_LOGGING = false; - -const usersTable = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - verified: boolean('verified').notNull().default(false), - jsonb: jsonb('jsonb').$type(), - createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), -}); - -const citiesTable = pgTable('cities', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - state: char('state', { length: 2 }), -}); - -const users2Table = pgTable('users2', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').references(() => citiesTable.id), -}); - -const coursesTable = pgTable('courses', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - categoryId: integer('category_id').references(() => courseCategoriesTable.id), -}); - -const courseCategoriesTable = pgTable('course_categories', { - id: serial('id').primaryKey(), - name: text('name').notNull(), -}); - -const orders = pgTable('orders', { - id: serial('id').primaryKey(), - region: text('region').notNull(), - product: text('product').notNull(), - amount: integer('amount').notNull(), - quantity: integer('quantity').notNull(), -}); - -const network = pgTable('network_table', { - inet: inet('inet').notNull(), - cidr: cidr('cidr').notNull(), - macaddr: macaddr('macaddr').notNull(), - macaddr8: macaddr8('macaddr8').notNull(), -}); - -const salEmp = pgTable('sal_emp', { - name: text('name'), - payByQuarter: integer('pay_by_quarter').array(), - schedule: text('schedule').array().array(), -}); - -const _tictactoe = pgTable('tictactoe', { - squares: integer('squares').array(3).array(3), -}); - -const usersMigratorTable = pgTable('users12', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - email: text('email').notNull(), -}); - -let db: XataHttpDatabase; -let client: XataHttpClient; - -beforeAll(async () => { - const apiKey = process.env['XATA_API_KEY']; - if (!apiKey) { - throw new Error('XATA_API_KEY is not defined'); - } - - client = getXataClient(); - db = drizzle(client, { logger: ENABLE_LOGGING }); -}); - -beforeEach(async () => { - await db.execute(sql`drop table if exists users cascade`); - await db.execute(sql`drop table if exists cities cascade`); - await db.execute(sql`drop table if exists users2 cascade`); - await db.execute(sql`drop table if exists course_categories cascade`); - await db.execute(sql`drop table if exists courses cascade`); - await db.execute(sql`drop table if exists orders cascade`); - await db.execute(sql`drop table if exists network_table cascade`); - await db.execute(sql`drop table if exists sal_emp cascade`); - await db.execute(sql`drop table if exists tictactoe cascade`); - - await client.sql({ - statement: ` - create table users ( - id serial primary key, - name text not null, - verified boolean not null default false, - jsonb jsonb, - created_at timestamptz not null default now() - ) - `, - }); - await client.sql({ - statement: ` - create table cities ( - id serial primary key, - name text not null, - state char(2) - ) - `, - }); - await client.sql({ - statement: ` - create table users2 ( - id serial primary key, - name text not null, - city_id integer references cities(id) - ) - `, - }); - await client.sql({ - statement: ` - create table course_categories ( - id serial primary key, - name text not null - ) - `, - }); - await client.sql({ - statement: ` - create table courses ( - id serial primary key, - name text not null, - category_id integer references course_categories(id) - ) - `, - }); - await client.sql({ - statement: ` - create table orders ( - id serial primary key, - region text not null, - product text not null, - amount integer not null, - quantity integer not null - ) - `, - }); - await client.sql({ - statement: ` - create table network_table ( - inet inet not null, - cidr cidr not null, - macaddr macaddr not null, - macaddr8 macaddr8 not null - ) - `, - }); - await client.sql({ - statement: ` - create table sal_emp ( - name text not null, - pay_by_quarter integer[] not null, - schedule text[][] not null - ) - `, - }); - await client.sql({ - statement: ` - create table tictactoe ( - squares integer[3][3] not null - ) - `, - }); -}); - -test('select all fields', async () => { - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }); - const result = await db.select().from(usersTable); - - expect(result[0]!.createdAt instanceof Date).toBeTruthy(); // eslint-disable-line no-instanceof/no-instanceof - expect(Math.abs(result[0]!.createdAt.getTime() - now) < 1000).toBeTruthy(); - expect(result).toEqual([ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }, - ]); -}); - -test('select sql', async () => { - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .select({ - name: sql`upper(${usersTable.name})`, - }) - .from(usersTable); - - expect(users).toEqual([{ name: 'JOHN' }]); -}); - -test('select typed sql', async () => { - await db.insert(usersTable).values({ name: 'John' }); - - const users = await db.select({ - name: sql`upper(${usersTable.name})`, - }).from(usersTable); - - expect(users).toEqual([{ name: 'JOHN' }]); -}); - -test('select distinct', async () => { - const usersDistinctTable = pgTable('users_distinct', { - id: integer('id').notNull(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${usersDistinctTable}`); - await db.execute(sql`create table ${usersDistinctTable} (id integer, name text)`); - - await db.insert(usersDistinctTable).values([ - { id: 1, name: 'John' }, - { id: 1, name: 'John' }, - { id: 2, name: 'John' }, - { id: 1, name: 'Jane' }, - ]); - const users1 = await db.selectDistinct().from(usersDistinctTable).orderBy( - usersDistinctTable.id, - usersDistinctTable.name, - ); - const users2 = await db.selectDistinctOn([usersDistinctTable.id]).from(usersDistinctTable).orderBy( - usersDistinctTable.id, - ); - const users3 = await db.selectDistinctOn([usersDistinctTable.name], { name: usersDistinctTable.name }).from( - usersDistinctTable, - ).orderBy(usersDistinctTable.name); - - await db.execute(sql`drop table ${usersDistinctTable}`); - - expect(users1).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); - - expect(users2.length).toEqual(2); - expect(users2[0]?.id).toEqual(1); - expect(users2[1]?.id).toEqual(2); - - expect(users3.length).toEqual(2); - expect(users3[0]?.name, 'Jane'); - expect(users3[1]?.name, 'John'); -}); - -test('insert returning sql', async () => { - const users = await db - .insert(usersTable) - .values({ name: 'John' }) - .returning({ - name: sql`upper(${usersTable.name})`, - }); - - expect(users).toEqual([{ name: 'JOHN' }]); -}); - -test('delete returning sql', async () => { - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .delete(usersTable) - .where(eq(usersTable.name, 'John')) - .returning({ - name: sql`upper(${usersTable.name})`, - }); - - expect(users).toEqual([{ name: 'JOHN' }]); -}); - -test('update returning sql', async () => { - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .update(usersTable) - .set({ name: 'Jane' }) - .where(eq(usersTable.name, 'John')) - .returning({ - name: sql`upper(${usersTable.name})`, - }); - - expect(users).toEqual([{ name: 'JANE' }]); -}); - -test('update with returning all fields', async () => { - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .update(usersTable) - .set({ name: 'Jane' }) - .where(eq(usersTable.name, 'John')) - .returning(); - - expect(users[0]!.createdAt instanceof Date).toBeTruthy(); // eslint-disable-line no-instanceof/no-instanceof - expect(Math.abs(users[0]!.createdAt.getTime() - now) < 1000).toBeTruthy(); - expect(users).toEqual([ - { id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }, - ]); -}); - -test('update with returning partial', async () => { - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .update(usersTable) - .set({ name: 'Jane' }) - .where(eq(usersTable.name, 'John')) - .returning({ - id: usersTable.id, - name: usersTable.name, - }); - - expect(users).toEqual([{ id: 1, name: 'Jane' }]); -}); - -test('delete with returning all fields', async () => { - const now = Date.now(); - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning(); - - expect(users[0]!.createdAt instanceof Date).toBeTruthy(); // eslint-disable-line no-instanceof/no-instanceof - expect(Math.abs(users[0]!.createdAt.getTime() - now) < 1000).toBeTruthy(); - expect(users).toEqual([ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: users[0]!.createdAt }, - ]); -}); - -test('delete with returning partial', async () => { - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ - id: usersTable.id, - name: usersTable.name, - }); - - expect(users).toEqual([{ id: 1, name: 'John' }]); -}); - -test('insert + select', async () => { - await db.insert(usersTable).values({ name: 'John' }); - const result = await db.select().from(usersTable); - expect(result).toEqual([ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }, - ]); - - await db.insert(usersTable).values({ name: 'Jane' }); - const result2 = await db.select().from(usersTable); - expect(result2).toEqual([ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, - { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, - ]); -}); - -test('json insert', async () => { - await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); - const result = await db - .select({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - }) - .from(usersTable); - - expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); -}); - -test('char insert', async () => { - await db.insert(citiesTable).values({ name: 'Austin', state: 'TX' }); - const result = await db - .select({ id: citiesTable.id, name: citiesTable.name, state: citiesTable.state }) - .from(citiesTable); - - expect(result).toEqual([{ id: 1, name: 'Austin', state: 'TX' }]); -}); - -test('char update', async () => { - await db.insert(citiesTable).values({ name: 'Austin', state: 'TX' }); - await db.update(citiesTable).set({ name: 'Atlanta', state: 'GA' }).where(eq(citiesTable.id, 1)); - const result = await db - .select({ id: citiesTable.id, name: citiesTable.name, state: citiesTable.state }) - .from(citiesTable); - - expect(result).toEqual([{ id: 1, name: 'Atlanta', state: 'GA' }]); -}); - -test('char delete', async () => { - await db.insert(citiesTable).values({ name: 'Austin', state: 'TX' }); - await db.delete(citiesTable).where(eq(citiesTable.state, 'TX')); - const result = await db - .select({ id: citiesTable.id, name: citiesTable.name, state: citiesTable.state }) - .from(citiesTable); - - expect(result).toEqual([]); -}); - -test('insert with overridden default values', async () => { - await db.insert(usersTable).values({ name: 'John', verified: true }); - const result = await db.select().from(usersTable); - - expect(result).toEqual([ - { id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }, - ]); -}); - -test('insert many', async () => { - await db - .insert(usersTable) - .values([ - { name: 'John' }, - { name: 'Bruce', jsonb: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]); - const result = await db - .select({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - verified: usersTable.verified, - }) - .from(usersTable); - - expect(result).toEqual([ - { id: 1, name: 'John', jsonb: null, verified: false }, - { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, - { id: 3, name: 'Jane', jsonb: null, verified: false }, - { id: 4, name: 'Austin', jsonb: null, verified: true }, - ]); -}); - -test('insert many with returning', async () => { - const result = await db - .insert(usersTable) - .values([ - { name: 'John' }, - { name: 'Bruce', jsonb: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]) - .returning({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - verified: usersTable.verified, - }); - - expect(result).toEqual([ - { id: 1, name: 'John', jsonb: null, verified: false }, - { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, - { id: 3, name: 'Jane', jsonb: null, verified: false }, - { id: 4, name: 'Austin', jsonb: null, verified: true }, - ]); -}); - -test('select with group by as field', async () => { - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(usersTable.name); - - expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); -}); - -test('select with group by as sql', async () => { - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(sql`${usersTable.name}`); - - expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); -}); - -test('select with group by as sql + column', async () => { - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(sql`${usersTable.name}`, usersTable.id); - - expect(result).toEqual([{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); -}); - -test('select with group by as column + sql', async () => { - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`); - - expect(result).toEqual([{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); -}); - -test('select with group by complex query', async () => { - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`) - .orderBy(asc(usersTable.name)) - .limit(1); - - expect(result).toEqual([{ name: 'Jane' }]); -}); - -test('build query', async () => { - const query = db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .groupBy(usersTable.id, usersTable.name) - .toSQL(); - - expect(query).toEqual({ - sql: 'select "id", "name" from "users" group by "users"."id", "users"."name"', - params: [], - }); -}); - -test('insert sql', async () => { - await db.insert(usersTable).values({ name: sql`${'John'}` }); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); - expect(result).toEqual([{ id: 1, name: 'John' }]); -}); - -test('partial join with alias', async () => { - const customerAlias = alias(usersTable, 'customer'); - - await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select({ - user: { - id: usersTable.id, - name: usersTable.name, - }, - customer: { - id: customerAlias.id, - name: customerAlias.name, - }, - }) - .from(usersTable) - .leftJoin(customerAlias, eq(customerAlias.id, 11)) - .where(eq(usersTable.id, 10)); - - expect(result).toEqual([ - { - user: { id: 10, name: 'Ivan' }, - customer: { id: 11, name: 'Hans' }, - }, - ]); -}); - -test('full join with alias', async () => { - const pgTable = pgTableCreator((name) => `prefixed_${name}`); - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); - - const customers = alias(users, 'customer'); - - await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select() - .from(users) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(users.id, 10)); - - expect(result).toEqual([{ - users: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - await db.execute(sql`drop table ${users}`); -}); - -test('select from alias', async () => { - const pgTable = pgTableCreator((name) => `prefixed_${name}`); - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); - - const user = alias(users, 'user'); - const customers = alias(users, 'customer'); - - await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select() - .from(user) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(user.id, 10)); - - expect(result).toEqual([{ - user: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - await db.execute(sql`drop table ${users}`); -}); - -test('insert with spaces', async () => { - await db.insert(usersTable).values({ name: sql`'Jo h n'` }); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); - - expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); -}); - -test('prepared statement', async () => { - await db.insert(usersTable).values({ name: 'John' }); - const statement = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .prepare('statement1'); - const result = await statement.execute(); - - expect(result).toEqual([{ id: 1, name: 'John' }]); -}); - -test('prepared statement reuse', async () => { - const stmt = db - .insert(usersTable) - .values({ - verified: true, - name: placeholder('name'), - }) - .prepare('stmt2'); - - for (let i = 0; i < 10; i++) { - await stmt.execute({ name: `John ${i}` }); - } - - const result = await db - .select({ - id: usersTable.id, - name: usersTable.name, - verified: usersTable.verified, - }) - .from(usersTable); - - expect(result).toEqual([ - { id: 1, name: 'John 0', verified: true }, - { id: 2, name: 'John 1', verified: true }, - { id: 3, name: 'John 2', verified: true }, - { id: 4, name: 'John 3', verified: true }, - { id: 5, name: 'John 4', verified: true }, - { id: 6, name: 'John 5', verified: true }, - { id: 7, name: 'John 6', verified: true }, - { id: 8, name: 'John 7', verified: true }, - { id: 9, name: 'John 8', verified: true }, - { id: 10, name: 'John 9', verified: true }, - ]); -}); - -test('prepared statement with placeholder in .where', async () => { - await db.insert(usersTable).values({ name: 'John' }); - const stmt = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .where(eq(usersTable.id, placeholder('id'))) - .prepare('stmt3'); - const result = await stmt.execute({ id: 1 }); - - expect(result).toEqual([{ id: 1, name: 'John' }]); -}); - -test('prepared statement with placeholder in .limit', async () => { - await db.insert(usersTable).values({ name: 'John' }); - const stmt = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .where(eq(usersTable.id, placeholder('id'))) - .limit(placeholder('limit')) - .prepare('stmt_limit'); - - const result = await stmt.execute({ id: 1, limit: 1 }); - - expect(result).toEqual([{ id: 1, name: 'John' }]); - expect(result.length).toEqual(1); -}); - -test('prepared statement with placeholder in .offset', async () => { - await db.insert(usersTable).values([{ name: 'John' }, { name: 'John1' }]); - const stmt = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .offset(placeholder('offset')) - .prepare('stmt_offset'); - - const result = await stmt.execute({ offset: 1 }); - - expect(result).toEqual([{ id: 2, name: 'John1' }]); -}); - -test('migrator : default migration strategy', async () => { - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists __drizzle_migrations`); - - await migrate(db, { migrationsFolder: './drizzle2/pg' }); - - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - - const result = await db.select().from(usersMigratorTable); - - expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); - - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table __drizzle_migrations`); -}); - -test('migrator : migrate with custom table', async () => { - const customTable = randomString(); - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists __drizzle_migrations`); - - await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable }); - - // test if the custom migrations table was created - const { records } = await db.execute(sql`select * from ${sql.identifier(customTable)};`); - expect(records.length > 0).toBeTruthy(); - - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); - - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table ${sql.identifier(customTable)}`); -}); - -test('insert via db.execute + select via db.execute', async () => { - await db.execute( - sql`insert into ${usersTable} (${name(usersTable.name.name)}) values (${'John'})`, - ); - - const result = await db.execute<{ id: number; name: string }>( - sql`select id, name from "users"`, - ); - - expect(result.records).toEqual([{ id: 1, name: 'John' }]); -}); - -test('insert via db.execute + returning', async () => { - const inserted = await db.execute<{ id: number; name: string }>( - sql`insert into ${usersTable} (${ - name( - usersTable.name.name, - ) - }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, - ); - expect(inserted.records).toEqual([{ id: 1, name: 'John' }]); -}); - -test('insert via db.execute w/ query builder', async () => { - const inserted = await db.execute>( - db - .insert(usersTable) - .values({ name: 'John' }) - .returning({ id: usersTable.id, name: usersTable.name }), - ); - expect(inserted.records).toEqual([{ id: 1, name: 'John' }]); -}); - -test('build query insert with onConflict do update', async () => { - const query = db - .insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }) - .toSQL(); - - expect(query).toEqual({ - sql: - 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do update set "name" = $3', - params: ['John', '["foo","bar"]', 'John1'], - }); -}); - -test('build query insert with onConflict do update / multiple columns', async () => { - const query = db - .insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoUpdate({ target: [usersTable.id, usersTable.name], set: { name: 'John1' } }) - .toSQL(); - - expect(query).toEqual({ - sql: - 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id","name") do update set "name" = $3', - params: ['John', '["foo","bar"]', 'John1'], - }); -}); - -test('build query insert with onConflict do nothing', async () => { - const query = db - .insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoNothing() - .toSQL(); - - expect(query).toEqual({ - sql: - 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict do nothing', - params: ['John', '["foo","bar"]'], - }); -}); - -test('build query insert with onConflict do nothing + target', async () => { - const query = db - .insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoNothing({ target: usersTable.id }) - .toSQL(); - - expect(query).toEqual({ - sql: - 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do nothing', - params: ['John', '["foo","bar"]'], - }); -}); - -test('insert with onConflict do update', async () => { - await db.insert(usersTable).values({ name: 'John' }); - - await db - .insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }); - - const res = await db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)); - - expect(res).toEqual([{ id: 1, name: 'John1' }]); -}); - -test('insert with onConflict do nothing', async () => { - await db.insert(usersTable).values({ name: 'John' }); - - await db.insert(usersTable).values({ id: 1, name: 'John' }).onConflictDoNothing(); - - const res = await db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)); - - expect(res).toEqual([{ id: 1, name: 'John' }]); -}); - -test('insert with onConflict do nothing + target', async () => { - await db.insert(usersTable).values({ name: 'John' }); - - await db - .insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoNothing({ target: usersTable.id }); - - const res = await db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)); - - expect(res).toEqual([{ id: 1, name: 'John' }]); -}); - -test('left join (flat object fields)', async () => { - const { id: cityId } = await db - .insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]) - .returning({ id: citiesTable.id }) - .then((rows) => rows[0]!); - - await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); - - const res = await db - .select({ - userId: users2Table.id, - userName: users2Table.name, - cityId: citiesTable.id, - cityName: citiesTable.name, - }) - .from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); - - expect(res).toEqual([ - { userId: 1, userName: 'John', cityId, cityName: 'Paris' }, - { userId: 2, userName: 'Jane', cityId: null, cityName: null }, - ]); -}); - -test('left join (grouped fields)', async () => { - const { id: cityId } = await db - .insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]) - .returning({ id: citiesTable.id }) - .then((rows) => rows[0]!); - - await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); - - const res = await db - .select({ - id: users2Table.id, - user: { - name: users2Table.name, - nameUpper: sql`upper(${users2Table.name})`, - }, - city: { - id: citiesTable.id, - name: citiesTable.name, - nameUpper: sql`upper(${citiesTable.name})`, - }, - }) - .from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); - - expect(res).toEqual([ - { - id: 1, - user: { name: 'John', nameUpper: 'JOHN' }, - city: { id: cityId, name: 'Paris', nameUpper: 'PARIS' }, - }, - { - id: 2, - user: { name: 'Jane', nameUpper: 'JANE' }, - city: null, - }, - ]); -}); - -test('left join (all fields)', async () => { - const { id: cityId } = await db - .insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]) - .returning({ id: citiesTable.id }) - .then((rows) => rows[0]!); - - await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); - - const res = await db - .select() - .from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); - - expect(res).toEqual([ - { - users2: { - id: 1, - name: 'John', - cityId, - }, - cities: { - id: cityId, - name: 'Paris', - state: null, - }, - }, - { - users2: { - id: 2, - name: 'Jane', - cityId: null, - }, - cities: null, - }, - ]); -}); - -test('join subquery', async () => { - await db - .insert(courseCategoriesTable) - .values([ - { name: 'Category 1' }, - { name: 'Category 2' }, - { name: 'Category 3' }, - { name: 'Category 4' }, - ]); - - await db - .insert(coursesTable) - .values([ - { name: 'Development', categoryId: 2 }, - { name: 'IT & Software', categoryId: 3 }, - { name: 'Marketing', categoryId: 4 }, - { name: 'Design', categoryId: 1 }, - ]); - - const sq2 = db - .select({ - categoryId: courseCategoriesTable.id, - category: courseCategoriesTable.name, - total: sql`count(${courseCategoriesTable.id})`, - }) - .from(courseCategoriesTable) - .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) - .as('sq2'); - - const res = await db - .select({ - courseName: coursesTable.name, - categoryId: sq2.categoryId, - }) - .from(coursesTable) - .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) - .orderBy(coursesTable.name); - - expect(res).toEqual([ - { courseName: 'Design', categoryId: 1 }, - { courseName: 'Development', categoryId: 2 }, - { courseName: 'IT & Software', categoryId: 3 }, - { courseName: 'Marketing', categoryId: 4 }, - ]); -}); - -test('with ... select', async () => { - await db.insert(orders).values([ - { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, - { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 50, quantity: 5 }, - ]); - - const regionalSales = db - .$with('regional_sales') - .as( - db - .select({ - region: orders.region, - totalSales: sql`sum(${orders.amount})`.as('total_sales'), - }) - .from(orders) - .groupBy(orders.region), - ); - - const topRegions = db - .$with('top_regions') - .as( - db - .select({ - region: regionalSales.region, - }) - .from(regionalSales) - .where( - gt( - regionalSales.totalSales, - db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), - ), - ), - ); - - const result = await db - .with(regionalSales, topRegions) - .select({ - region: orders.region, - product: orders.product, - productUnits: sql`sum(${orders.quantity})::int`, - productSales: sql`sum(${orders.amount})::int`, - }) - .from(orders) - .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) - .groupBy(orders.region, orders.product) - .orderBy(orders.region, orders.product); - - expect(result).toEqual([ - { - region: 'Europe', - product: 'A', - productUnits: 3, - productSales: 30, - }, - { - region: 'Europe', - product: 'B', - productUnits: 5, - productSales: 50, - }, - { - region: 'US', - product: 'A', - productUnits: 7, - productSales: 70, - }, - { - region: 'US', - product: 'B', - productUnits: 9, - productSales: 90, - }, - ]); -}); - -test('select from subquery sql', async () => { - await db.insert(users2Table).values([{ name: 'John' }, { name: 'Jane' }]); - - const sq = db - .select({ name: sql`${users2Table.name} || ' modified'`.as('name') }) - .from(users2Table) - .as('sq'); - - const res = await db.select({ name: sq.name }).from(sq); - - expect(res).toEqual([{ name: 'John modified' }, { name: 'Jane modified' }]); -}); - -test('select a field without joining its table', () => { - expect(() => db.select({ name: users2Table.name }).from(usersTable).prepare('query')).toThrowError(); -}); - -test('select all fields from subquery without alias', () => { - const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); - - expect(() => db.select().from(sq).prepare('query')).toThrowError; -}); - -test('select count()', async () => { - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); - - const res = await db.select({ count: sql`count(*)` }).from(usersTable); - - expect(res).toEqual([{ count: 2 }]); -}); - -test('select count w/ custom mapper', async () => { - function count(value: PgColumn | SQLWrapper): SQL; - function count(value: PgColumn | SQLWrapper, alias: string): SQL.Aliased; - function count(value: PgColumn | SQLWrapper, alias?: string): SQL | SQL.Aliased { - const result = sql`count(${value})`.mapWith(Number); - if (!alias) { - return result; - } - return result.as(alias); - } - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); - - const res = await db.select({ count: count(sql`*`) }).from(usersTable); - - expect(res).toEqual([{ count: 2 }]); -}); - -test.skip('network types', async () => { - const value: typeof network.$inferSelect = { - inet: '127.0.0.1', - cidr: '192.168.100.128/25', - macaddr: '08:00:2b:01:02:03', - macaddr8: '08:00:2b:01:02:03:04:05', - }; - - await db.insert(network).values(value); - - const res = await db.select().from(network); - - expect(res).toEqual([value]); -}); - -test.skip('array types', async () => { - const values: typeof salEmp.$inferSelect[] = [ - { - name: 'John', - payByQuarter: [10000, 10000, 10000, 10000], - schedule: [['meeting', 'lunch'], ['training', 'presentation']], - }, - { - name: 'Carol', - payByQuarter: [20000, 25000, 25000, 25000], - schedule: [['breakfast', 'consulting'], ['meeting', 'lunch']], - }, - ]; - - await db.insert(salEmp).values(values); - - const res = await db.select().from(salEmp); - - expect(res).toEqual(values); -}); - -// test('select for ...', (t) => { -// { -// const query = db -// .select() -// .from(users2Table) -// .for('update') -// .toSQL(); - -// t.regex( -// query.sql, -// / for update$/, -// ); -// } - -// { -// const query = db -// .select() -// .from(users2Table) -// .for('update', { of: [users2Table, coursesTable] }) -// .toSQL(); - -// t.regex( -// query.sql, -// / for update of "users2", "courses"$/, -// ); -// } - -// { -// const query = db -// .select() -// .from(users2Table) -// .for('no key update', { of: users2Table }) -// .toSQL(); - -// t.regex( -// query.sql, -// /for no key update of "users2"$/, -// ); -// } - -// { -// const query = db -// .select() -// .from(users2Table) -// .for('no key update', { of: users2Table, skipLocked: true }) -// .toSQL(); - -// t.regex( -// query.sql, -// / for no key update of "users2" skip locked$/, -// ); -// } - -// { -// const query = db -// .select() -// .from(users2Table) -// .for('share', { of: users2Table, noWait: true }) -// .toSQL(); - -// t.regex( -// query.sql, -// // eslint-disable-next-line unicorn/better-regex -// /for share of "users2" no wait$/, -// ); -// } -// }); - -test('having', async () => { - await db.insert(citiesTable).values([{ name: 'London' }, { name: 'Paris' }, { name: 'New York' }]); - - await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { - name: 'Jack', - cityId: 2, - }]); - - const result = await db - .select({ - id: citiesTable.id, - name: sql`upper(${citiesTable.name})`.as('upper_name'), - usersCount: sql`count(${users2Table.id})::int`.as('users_count'), - }) - .from(citiesTable) - .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) - .where(({ name }) => sql`length(${name}) >= 3`) - .groupBy(citiesTable.id) - .having(({ usersCount }) => sql`${usersCount} > 0`) - .orderBy(({ name }) => name); - - expect(result).toEqual([ - { - id: 1, - name: 'LONDON', - usersCount: 2, - }, - { - id: 2, - name: 'PARIS', - usersCount: 1, - }, - ]); -}); - -// Not supported in Xata HTTP -// test('view', async () => { -// - -// const newYorkers1 = pgView('new_yorkers') -// .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); - -// const newYorkers2 = pgView('new_yorkers', { -// id: serial('id').primaryKey(), -// name: text('name').notNull(), -// cityId: integer('city_id').notNull(), -// }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); - -// const newYorkers3 = pgView('new_yorkers', { -// id: serial('id').primaryKey(), -// name: text('name').notNull(), -// cityId: integer('city_id').notNull(), -// }).existing(); - -// await db.execute(sql`create view ${newYorkers1} as ${getViewConfig(newYorkers1).query}`); - -// await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); - -// await db.insert(users2Table).values([ -// { name: 'John', cityId: 1 }, -// { name: 'Jane', cityId: 1 }, -// { name: 'Jack', cityId: 2 }, -// ]); - -// { -// const result = await db.select().from(newYorkers1); -// expect(result, [ -// { id: 1, name: 'John', cityId: 1 }, -// { id: 2, name: 'Jane', cityId: 1 }, -// ]); -// } - -// { -// const result = await db.select().from(newYorkers2); -// expect(result, [ -// { id: 1, name: 'John', cityId: 1 }, -// { id: 2, name: 'Jane', cityId: 1 }, -// ]); -// } - -// { -// const result = await db.select().from(newYorkers3); -// expect(result, [ -// { id: 1, name: 'John', cityId: 1 }, -// { id: 2, name: 'Jane', cityId: 1 }, -// ]); -// } - -// { -// const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); -// expect(result, [ -// { name: 'John' }, -// { name: 'Jane' }, -// ]); -// } - -// await db.execute(sql`drop view ${newYorkers1}`); -// }); - -// test('materialized view', async () => { -// - -// const newYorkers1 = pgMaterializedView('new_yorkers') -// .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); - -// const newYorkers2 = pgMaterializedView('new_yorkers', { -// id: serial('id').primaryKey(), -// name: text('name').notNull(), -// cityId: integer('city_id').notNull(), -// }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); - -// const newYorkers3 = pgMaterializedView('new_yorkers', { -// id: serial('id').primaryKey(), -// name: text('name').notNull(), -// cityId: integer('city_id').notNull(), -// }).existing(); - -// await db.execute(sql`create materialized view ${newYorkers1} as ${getMaterializedViewConfig(newYorkers1).query}`); - -// await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); - -// await db.insert(users2Table).values([ -// { name: 'John', cityId: 1 }, -// { name: 'Jane', cityId: 1 }, -// { name: 'Jack', cityId: 2 }, -// ]); - -// { -// const result = await db.select().from(newYorkers1); -// expect(result, []); -// } - -// await db.refreshMaterializedView(newYorkers1); - -// { -// const result = await db.select().from(newYorkers1); -// expect(result, [ -// { id: 1, name: 'John', cityId: 1 }, -// { id: 2, name: 'Jane', cityId: 1 }, -// ]); -// } - -// { -// const result = await db.select().from(newYorkers2); -// expect(result, [ -// { id: 1, name: 'John', cityId: 1 }, -// { id: 2, name: 'Jane', cityId: 1 }, -// ]); -// } - -// { -// const result = await db.select().from(newYorkers3); -// expect(result, [ -// { id: 1, name: 'John', cityId: 1 }, -// { id: 2, name: 'Jane', cityId: 1 }, -// ]); -// } - -// { -// const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); -// expect(result, [ -// { name: 'John' }, -// { name: 'Jane' }, -// ]); -// } - -// await db.execute(sql`drop materialized view ${newYorkers1}`); -// }); - -// TODO: copy to SQLite and MySQL, add to docs -test('select from raw sql', async () => { - const result = await db.select({ - id: sql`id`, - name: sql`name`, - }).from(sql`(select 1 as id, 'John' as name) as users`); - - Expect>; - - expect(result).toEqual([ - { id: 1, name: 'John' }, - ]); -}); - -test('select from raw sql with joins', async () => { - const result = await db - .select({ - id: sql`users.id`, - name: sql`users.name`, - userCity: sql`users.city`, - cityName: sql`cities.name`, - }) - .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) - .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`); - - Expect>; - - expect(result).toEqual([ - { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, - ]); -}); - -test('join on aliased sql from select', async () => { - const result = await db - .select({ - userId: sql`users.id`.as('userId'), - name: sql`users.name`, - userCity: sql`users.city`, - cityId: sql`cities.id`.as('cityId'), - cityName: sql`cities.name`, - }) - .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) - .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)); - - Expect>; - - expect(result).toEqual([ - { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, - ]); -}); - -test('join on aliased sql from with clause', async () => { - const users = db.$with('users').as( - db.select({ - id: sql`id`.as('userId'), - name: sql`name`.as('userName'), - city: sql`city`.as('city'), - }).from( - sql`(select 1 as id, 'John' as name, 'New York' as city) as users`, - ), - ); - - const cities = db.$with('cities').as( - db.select({ - id: sql`id`.as('cityId'), - name: sql`name`.as('cityName'), - }).from( - sql`(select 1 as id, 'Paris' as name) as cities`, - ), - ); - - const result = await db - .with(users, cities) - .select({ - userId: users.id, - name: users.name, - userCity: users.city, - cityId: cities.id, - cityName: cities.name, - }) - .from(users) - .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)); - - Expect>; - - expect(result).toEqual([ - { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, - ]); -}); - -test('prefixed table', async () => { - const pgTable = pgTableCreator((name) => `myprefix_${name}`); - - const users = pgTable('test_prefixed_table_with_unique_name', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table myprefix_test_prefixed_table_with_unique_name (id integer not null primary key, name text not null)`, - ); - - await db.insert(users).values({ id: 1, name: 'John' }); - - const result = await db.select().from(users); - - expect(result).toEqual([{ id: 1, name: 'John' }]); - - await db.execute(sql`drop table ${users}`); -}); - -// Not supported in Xata -// test('select from enum', async () => { -// - -// const muscleEnum = pgEnum('muscle', [ -// 'abdominals', -// 'hamstrings', -// 'adductors', -// 'quadriceps', -// 'biceps', -// 'shoulders', -// 'chest', -// 'middle_back', -// 'calves', -// 'glutes', -// 'lower_back', -// 'lats', -// 'triceps', -// 'traps', -// 'forearms', -// 'neck', -// 'abductors', -// ]); - -// const forceEnum = pgEnum('force', ['isometric', 'isotonic', 'isokinetic']); - -// const levelEnum = pgEnum('level', ['beginner', 'intermediate', 'advanced']); - -// const mechanicEnum = pgEnum('mechanic', ['compound', 'isolation']); - -// const equipmentEnum = pgEnum('equipment', ['barbell', 'dumbbell', 'bodyweight', 'machine', 'cable', 'kettlebell']); - -// const categoryEnum = pgEnum('category', ['upper_body', 'lower_body', 'full_body']); - -// const exercises = pgTable('exercises', { -// id: serial('id').primaryKey(), -// name: varchar('name').notNull(), -// force: forceEnum('force'), -// level: levelEnum('level'), -// mechanic: mechanicEnum('mechanic'), -// equipment: equipmentEnum('equipment'), -// instructions: text('instructions'), -// category: categoryEnum('category'), -// primaryMuscles: muscleEnum('primary_muscles').array(), -// secondaryMuscles: muscleEnum('secondary_muscles').array(), -// createdAt: timestamp('created_at').notNull().default(sql`now()`), -// updatedAt: timestamp('updated_at').notNull().default(sql`now()`), -// }); - -// await db.execute(sql`drop table if exists ${exercises}`); -// await db.execute(sql`drop type if exists ${name(muscleEnum.enumName)}`); -// await db.execute(sql`drop type if exists ${name(forceEnum.enumName)}`); -// await db.execute(sql`drop type if exists ${name(levelEnum.enumName)}`); -// await db.execute(sql`drop type if exists ${name(mechanicEnum.enumName)}`); -// await db.execute(sql`drop type if exists ${name(equipmentEnum.enumName)}`); -// await db.execute(sql`drop type if exists ${name(categoryEnum.enumName)}`); - -// await db.execute( -// sql`create type ${ -// name(muscleEnum.enumName) -// } as enum ('abdominals', 'hamstrings', 'adductors', 'quadriceps', 'biceps', 'shoulders', 'chest', 'middle_back', 'calves', 'glutes', 'lower_back', 'lats', 'triceps', 'traps', 'forearms', 'neck', 'abductors')`, -// ); -// await db.execute(sql`create type ${name(forceEnum.enumName)} as enum ('isometric', 'isotonic', 'isokinetic')`); -// await db.execute(sql`create type ${name(levelEnum.enumName)} as enum ('beginner', 'intermediate', 'advanced')`); -// await db.execute(sql`create type ${name(mechanicEnum.enumName)} as enum ('compound', 'isolation')`); -// await db.execute( -// sql`create type ${ -// name(equipmentEnum.enumName) -// } as enum ('barbell', 'dumbbell', 'bodyweight', 'machine', 'cable', 'kettlebell')`, -// ); -// await db.execute(sql`create type ${name(categoryEnum.enumName)} as enum ('upper_body', 'lower_body', 'full_body')`); -// await db.execute(sql` -// create table ${exercises} ( -// id serial primary key, -// name varchar not null, -// force force, -// level level, -// mechanic mechanic, -// equipment equipment, -// instructions text, -// category category, -// primary_muscles muscle[], -// secondary_muscles muscle[], -// created_at timestamp not null default now(), -// updated_at timestamp not null default now() -// ) -// `); - -// await db.insert(exercises).values({ -// name: 'Bench Press', -// force: 'isotonic', -// level: 'beginner', -// mechanic: 'compound', -// equipment: 'barbell', -// instructions: -// 'Lie on your back on a flat bench. Grasp the barbell with an overhand grip, slightly wider than shoulder width. Unrack the barbell and hold it over you with your arms locked. Lower the barbell to your chest. Press the barbell back to the starting position.', -// category: 'upper_body', -// primaryMuscles: ['chest', 'triceps'], -// secondaryMuscles: ['shoulders', 'traps'], -// }); - -// const result = await db.select().from(exercises); - -// expect(result, [ -// { -// id: 1, -// name: 'Bench Press', -// force: 'isotonic', -// level: 'beginner', -// mechanic: 'compound', -// equipment: 'barbell', -// instructions: -// 'Lie on your back on a flat bench. Grasp the barbell with an overhand grip, slightly wider than shoulder width. Unrack the barbell and hold it over you with your arms locked. Lower the barbell to your chest. Press the barbell back to the starting position.', -// category: 'upper_body', -// primaryMuscles: ['chest', 'triceps'], -// secondaryMuscles: ['shoulders', 'traps'], -// createdAt: result[0]!.createdAt, -// updatedAt: result[0]!.updatedAt, -// }, -// ]); - -// await db.execute(sql`drop table ${exercises}`); -// await db.execute(sql`drop type ${name(muscleEnum.enumName)}`); -// await db.execute(sql`drop type ${name(forceEnum.enumName)}`); -// await db.execute(sql`drop type ${name(levelEnum.enumName)}`); -// await db.execute(sql`drop type ${name(mechanicEnum.enumName)}`); -// await db.execute(sql`drop type ${name(equipmentEnum.enumName)}`); -// await db.execute(sql`drop type ${name(categoryEnum.enumName)}`); -// }); - -test('orderBy with aliased column', () => { - const query = db.select({ - test: sql`something`.as('test'), - }).from(users2Table).orderBy((fields) => fields.test).toSQL(); - - expect(query.sql, 'select something as "test" from "users2" order by "test"'); -}); - -test('select from sql', async () => { - const metricEntry = pgTable('metric_entry', { - id: pgUuid('id').notNull(), - createdAt: timestamp('created_at').notNull(), - }); - - await db.execute(sql`drop table if exists ${metricEntry}`); - await db.execute(sql`create table ${metricEntry} (id uuid not null, created_at timestamp not null)`); - - const metricId = uuid(); - - const intervals = db.$with('intervals').as( - db - .select({ - startTime: sql`(date'2023-03-01'+ x * '1 day'::interval)`.as('start_time'), - endTime: sql`(date'2023-03-01'+ (x+1) *'1 day'::interval)`.as('end_time'), - }) - .from(sql`generate_series(0, 29, 1) as t(x)`), - ); - - expect(() => - db - .with(intervals) - .select({ - startTime: intervals.startTime, - endTime: intervals.endTime, - count: sql`count(${metricEntry})`, - }) - .from(metricEntry) - .rightJoin( - intervals, - and( - eq(metricEntry.id, metricId), - gte(metricEntry.createdAt, intervals.startTime), - lt(metricEntry.createdAt, intervals.endTime), - ), - ) - .groupBy(intervals.startTime, intervals.endTime) - .orderBy(asc(intervals.startTime)) - ).not.toThrowError(); -}); - -test.skip('timestamp timezone', async () => { - const usersTableWithAndWithoutTimezone = pgTable('users_test_with_and_without_timezone', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), - updatedAt: timestamp('updated_at', { withTimezone: false }).notNull().defaultNow(), - }); - - await db.execute(sql`drop table if exists ${usersTableWithAndWithoutTimezone}`); - - await db.execute( - sql` - create table users_test_with_and_without_timezone ( - id serial not null primary key, - name text not null, - created_at timestamptz not null default now(), - updated_at timestamp not null default now() - ) - `, - ); - - const date = new Date(Date.parse('2020-01-01T00:00:00+04:00')); - - await db.insert(usersTableWithAndWithoutTimezone).values({ name: 'With default times' }); - await db.insert(usersTableWithAndWithoutTimezone).values({ - name: 'Without default times', - createdAt: date, - updatedAt: date, - }); - const users = await db.select().from(usersTableWithAndWithoutTimezone); - - // check that the timestamps are set correctly for default times - expect(Math.abs(users[0]!.updatedAt.getTime() - Date.now()) < 3000).toBeTruthy(); - expect(Math.abs(users[0]!.createdAt.getTime() - Date.now()) < 3000).toBeTruthy(); - - // check that the timestamps are set correctly for non default times - expect(Math.abs(users[1]!.updatedAt.getTime() - date.getTime()) < 3000).toBeTruthy(); - expect(Math.abs(users[1]!.createdAt.getTime() - date.getTime()) < 3000).toBeTruthy(); -}); - -test.skip('all date and time columns', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - dateString: date('date_string', { mode: 'string' }).notNull(), - time: time('time', { precision: 3 }).notNull(), - datetime: timestamp('datetime').notNull(), - datetimeWTZ: timestamp('datetime_wtz', { withTimezone: true }).notNull(), - datetimeString: timestamp('datetime_string', { mode: 'string' }).notNull(), - datetimeFullPrecision: timestamp('datetime_full_precision', { precision: 6, mode: 'string' }).notNull(), - datetimeWTZString: timestamp('datetime_wtz_string', { withTimezone: true, mode: 'string' }).notNull(), - interval: interval('interval').notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - date_string date not null, - time time(3) not null, - datetime timestamp not null, - datetime_wtz timestamp with time zone not null, - datetime_string timestamp not null, - datetime_full_precision timestamp(6) not null, - datetime_wtz_string timestamp with time zone not null, - interval interval not null - ) - `); - - const someDatetime = new Date('2022-01-01T00:00:00.123Z'); - const fullPrecision = '2022-01-01T00:00:00.123456'; - const someTime = '23:23:12.432'; - - await db.insert(table).values({ - dateString: '2022-01-01', - time: someTime, - datetime: someDatetime, - datetimeWTZ: someDatetime, - datetimeString: '2022-01-01T00:00:00.123Z', - datetimeFullPrecision: fullPrecision.replace('T', ' ').replace('Z', ''), - datetimeWTZString: '2022-01-01T00:00:00.123Z', - interval: '1 day', - }); - - const result = await db.select().from(table); - - Expect< - Equal<{ - id: number; - dateString: string; - time: string; - datetime: Date; - datetimeWTZ: Date; - datetimeString: string; - datetimeFullPrecision: string; - datetimeWTZString: string; - interval: string; - }[], typeof result> - >; - - Expect< - Equal<{ - dateString: string; - time: string; - datetime: Date; - datetimeWTZ: Date; - datetimeString: string; - datetimeFullPrecision: string; - datetimeWTZString: string; - interval: string; - id?: number | undefined; - }, typeof table.$inferInsert> - >; - - expect(result).toEqual([ - { - id: 1, - dateString: '2022-01-01', - time: someTime, - datetime: someDatetime, - datetimeWTZ: someDatetime, - datetimeString: '2022-01-01 00:00:00.123', - datetimeFullPrecision: fullPrecision.replace('T', ' '), - datetimeWTZString: '2022-01-01 00:00:00.123+00', - interval: '1 day', - }, - ]); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.skip('all date and time columns with timezone', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), - timestampAsDate: timestamp('timestamp_date', { withTimezone: true, precision: 3 }).notNull(), - timestampTimeZones: timestamp('timestamp_date_2', { withTimezone: true, precision: 3 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(6) with time zone not null, - timestamp_date timestamp(3) with time zone not null, - timestamp_date_2 timestamp(3) with time zone not null - ) - `); - - const timestampString = '2022-01-01 00:00:00.123456-0200'; - const timestampDate = new Date(); - const timestampDateWTZ = new Date('2022-01-01 00:00:00.123 +0500'); - - const timestampString2 = '2022-01-01 00:00:00.123456-0400'; - const timestampDate2 = new Date(); - const timestampDateWTZ2 = new Date('2022-01-01 00:00:00.123 +0200'); - - await db.insert(table).values([ - { timestamp: timestampString, timestampAsDate: timestampDate, timestampTimeZones: timestampDateWTZ }, - { timestamp: timestampString2, timestampAsDate: timestampDate2, timestampTimeZones: timestampDateWTZ2 }, - ]); - - const result = await db.select().from(table); - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - timestamp_date: string; - timestamp_date_2: string; - }>(sql`select * from ${table}`); - - // Whatever you put in, you get back when you're using the date mode - // But when using the string mode, postgres returns a string transformed into UTC - expect(result).toEqual([ - { - id: 1, - timestamp: '2022-01-01T02:00:00.123456Z', - timestampAsDate: timestampDate, - timestampTimeZones: timestampDateWTZ, - }, - { - id: 2, - timestamp: '2022-01-01T04:00:00.123456Z', - timestampAsDate: timestampDate2, - timestampTimeZones: timestampDateWTZ2, - }, - ]); - - expect(result2.records).toEqual([ - { - id: 1, - timestamp_string: '2022-01-01 02:00:00.123456+00', - timestamp_date: timestampDate.toISOString().replace('T', ' ').replace('Z', '') + '+00', - timestamp_date_2: timestampDateWTZ.toISOString().replace('T', ' ').replace('Z', '') + '+00', - }, - { - id: 2, - timestamp_string: '2022-01-01 04:00:00.123456+00', - timestamp_date: timestampDate2.toISOString().replace('T', ' ').replace('Z', '') + '+00', - timestamp_date_2: timestampDateWTZ2.toISOString().replace('T', ' ').replace('Z', '') + '+00', - }, - ]); - - expect( - result[0]?.timestampTimeZones.getTime(), - ).toEqual( - new Date((result2.records[0] as any).timestamp_date_2 as any).getTime(), - ); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.skip('all date and time columns without timezone', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestampString: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), - timestampString2: timestamp('timestamp_string2', { precision: 3, mode: 'string' }).notNull(), - timestampDate: timestamp('timestamp_date', { precision: 3 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(6) not null, - timestamp_string2 timestamp(3) not null, - timestamp_date timestamp(3) not null - ) - `); - - const timestampString = '2022-01-01 00:00:00.123456'; - const timestampString2 = '2022-01-02 00:00:00.123 -0300'; - const timestampDate = new Date('2022-01-01 00:00:00.123Z'); - - const timestampString_2 = '2022-01-01 00:00:00.123456'; - const timestampString2_2 = '2022-01-01 00:00:00.123 -0300'; - const timestampDate2 = new Date('2022-01-01 00:00:00.123 +0200'); - - await db.insert(table).values([ - { timestampString, timestampString2, timestampDate }, - { timestampString: timestampString_2, timestampString2: timestampString2_2, timestampDate: timestampDate2 }, - ]); - - const result = await db.select().from(table); - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - timestamp_string2: string; - timestamp_date: string; - }>(sql`select * from ${table}`); - - // Whatever you put in, you get back when you're using the date mode - // But when using the string mode, postgres returns a string transformed into UTC - expect(result).toEqual([ - { - id: 1, - timestampString: timestampString, - timestampString2: '2022-01-02 00:00:00.123', - timestampDate: timestampDate, - }, - { - id: 2, - timestampString: timestampString_2, - timestampString2: '2022-01-01 00:00:00.123', - timestampDate: timestampDate2, - }, - ]); - - expect(result2.records).toEqual([ - { - id: 1, - timestamp_string: timestampString, - timestamp_string2: '2022-01-02 00:00:00.123', - timestamp_date: timestampDate.toISOString().replace('T', ' ').replace('Z', ''), - }, - { - id: 2, - timestamp_string: timestampString_2, - timestamp_string2: '2022-01-01 00:00:00.123', - timestamp_date: timestampDate2.toISOString().replace('T', ' ').replace('Z', ''), - }, - ]); - - expect((result2.records[0] as any).timestamp_string).toEqual('2022-01-01 00:00:00.123456'); - // need to add the 'Z', otherwise javascript assumes it's in local time - expect(new Date((result2.records[0] as any).timestamp_date + 'Z' as any).getTime()).toEqual(timestampDate.getTime()); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test('transaction', async () => { - const users = pgTable('users_transactions', { - id: serial('id').primaryKey(), - balance: integer('balance').notNull(), - }); - const products = pgTable('products_transactions', { - id: serial('id').primaryKey(), - price: integer('price').notNull(), - stock: integer('stock').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`drop table if exists ${products}`); - - await db.execute(sql`create table users_transactions (id serial not null primary key, balance integer not null)`); - await db.execute( - sql`create table products_transactions (id serial not null primary key, price integer not null, stock integer not null)`, - ); - - const user = await db.insert(users).values({ balance: 100 }).returning().then((rows) => rows[0]!); - const product = await db.insert(products).values({ price: 10, stock: 10 }).returning().then((rows) => rows[0]!); - - await expect( - db.transaction(async (tx) => { - await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)); - await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)); - }), - ).rejects.toThrowError('No transactions support in Xata Http driver'); - - // t.is(error!.message, 'No transactions support in Xata Http driver'); - - const result = await db.select().from(users); - - expect(result).toEqual([{ id: 1, balance: 100 }]); - - await db.execute(sql`drop table ${users}`); - await db.execute(sql`drop table ${products}`); -}); - -test('join subquery with join', async () => { - const internalStaff = pgTable('internal_staff', { - userId: integer('user_id').notNull(), - }); - - const customUser = pgTable('custom_user', { - id: integer('id').notNull(), - }); - - const ticket = pgTable('ticket', { - staffId: integer('staff_id').notNull(), - }); - - await db.execute(sql`drop table if exists ${internalStaff}`); - await db.execute(sql`drop table if exists ${customUser}`); - await db.execute(sql`drop table if exists ${ticket}`); - - await db.execute(sql`create table internal_staff (user_id integer not null)`); - await db.execute(sql`create table custom_user (id integer not null)`); - await db.execute(sql`create table ticket (staff_id integer not null)`); - - await db.insert(internalStaff).values({ userId: 1 }); - await db.insert(customUser).values({ id: 1 }); - await db.insert(ticket).values({ staffId: 1 }); - - const subq = db - .select() - .from(internalStaff) - .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) - .as('internal_staff'); - - const mainQuery = await db - .select() - .from(ticket) - .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); - - expect(mainQuery).toEqual([{ - ticket: { staffId: 1 }, - internal_staff: { - internal_staff: { userId: 1 }, - custom_user: { id: 1 }, - }, - }]); - - await db.execute(sql`drop table ${internalStaff}`); - await db.execute(sql`drop table ${customUser}`); - await db.execute(sql`drop table ${ticket}`); -}); - -// Not supported in Xata -// test('subquery with view', async () => { -// - -// const users = pgTable('users_subquery_view', { -// id: serial('id').primaryKey(), -// name: text('name').notNull(), -// cityId: integer('city_id').notNull(), -// }); - -// const newYorkers = pgView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); - -// await db.execute(sql`drop table if exists ${users}`); -// await db.execute(sql`drop view if exists ${newYorkers}`); - -// await db.execute( -// sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, -// ); -// await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); - -// await db.insert(users).values([ -// { name: 'John', cityId: 1 }, -// { name: 'Jane', cityId: 2 }, -// { name: 'Jack', cityId: 1 }, -// { name: 'Jill', cityId: 2 }, -// ]); - -// const sq = db.$with('sq').as(db.select().from(newYorkers)); -// const result = await db.with(sq).select().from(sq); - -// expect(result, [ -// { id: 1, name: 'John', cityId: 1 }, -// { id: 3, name: 'Jack', cityId: 1 }, -// ]); - -// await db.execute(sql`drop view ${newYorkers}`); -// await db.execute(sql`drop table ${users}`); -// }); - -// test('join view as subquery', async () => { -// - -// const users = pgTable('users_join_view', { -// id: serial('id').primaryKey(), -// name: text('name').notNull(), -// cityId: integer('city_id').notNull(), -// }); - -// const newYorkers = pgView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); - -// await db.execute(sql`drop table if exists ${users}`); -// await db.execute(sql`drop view if exists ${newYorkers}`); - -// await db.execute( -// sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, -// ); -// await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); - -// await db.insert(users).values([ -// { name: 'John', cityId: 1 }, -// { name: 'Jane', cityId: 2 }, -// { name: 'Jack', cityId: 1 }, -// { name: 'Jill', cityId: 2 }, -// ]); - -// const sq = db.select().from(newYorkers).as('new_yorkers_sq'); - -// const result = await db.select().from(users).leftJoin(sq, eq(users.id, sq.id)); - -// expect(result, [ -// { -// users_join_view: { id: 1, name: 'John', cityId: 1 }, -// new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, -// }, -// { -// users_join_view: { id: 2, name: 'Jane', cityId: 2 }, -// new_yorkers_sq: null, -// }, -// { -// users_join_view: { id: 3, name: 'Jack', cityId: 1 }, -// new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, -// }, -// { -// users_join_view: { id: 4, name: 'Jill', cityId: 2 }, -// new_yorkers_sq: null, -// }, -// ]); - -// await db.execute(sql`drop view ${newYorkers}`); -// await db.execute(sql`drop table ${users}`); -// }); - -test('table selection with single table', async () => { - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: integer('city_id').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, - ); - - await db.insert(users).values({ name: 'John', cityId: 1 }); - - const result = await db.select({ users }).from(users); - - expect(result).toEqual([{ users: { id: 1, name: 'John', cityId: 1 } }]); - - await db.execute(sql`drop table ${users}`); -}); - -test('set null to jsonb field', async () => { - const users = pgTable('users', { - id: serial('id').primaryKey(), - jsonb: jsonb('jsonb'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, jsonb jsonb)`, - ); - - const result = await db.insert(users).values({ jsonb: null }).returning(); - - expect(result).toEqual([{ id: 1, jsonb: null }]); - - await db.execute(sql`drop table ${users}`); -}); - -test('insert undefined', async () => { - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text)`, - ); - - await expect(db.insert(users).values({ name: undefined })).resolves.not.toThrowError(); - - await db.execute(sql`drop table ${users}`); -}); - -test('update undefined', async () => { - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text)`, - ); - - expect(() => db.update(users).set({ name: undefined })).toThrowError(); - await expect(db.update(users).set({ id: 1, name: undefined })).resolves.not.toThrowError(); - - await db.execute(sql`drop table ${users}`); -}); - -test('array operators', async () => { - const posts = pgTable('posts', { - id: serial('id').primaryKey(), - tags: text('tags').array(), - }); - - await db.execute(sql`drop table if exists ${posts}`); - - await db.execute( - sql`create table ${posts} (id serial primary key, tags text[])`, - ); - - await db.insert(posts).values([{ - tags: ['ORM'], - }, { - tags: ['Typescript'], - }, { - tags: ['Typescript', 'ORM'], - }, { - tags: ['Typescript', 'Frontend', 'React'], - }, { - tags: ['Typescript', 'ORM', 'Database', 'Postgres'], - }, { - tags: ['Java', 'Spring', 'OOP'], - }]); - - const contains = await db.select({ id: posts.id }).from(posts) - .where(arrayContains(posts.tags, ['Typescript', 'ORM'])); - const contained = await db.select({ id: posts.id }).from(posts) - .where(arrayContained(posts.tags, ['Typescript', 'ORM'])); - const overlaps = await db.select({ id: posts.id }).from(posts) - .where(arrayOverlaps(posts.tags, ['Typescript', 'ORM'])); - const withSubQuery = await db.select({ id: posts.id }).from(posts) - .where(arrayContains( - posts.tags, - db.select({ tags: posts.tags }).from(posts).where(eq(posts.id, 1)), - )); - - expect(contains).toEqual([{ id: 3 }, { id: 5 }]); - expect(contained).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); - expect(overlaps).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }, { id: 5 }]); - expect(withSubQuery).toEqual([{ id: 1 }, { id: 3 }, { id: 5 }]); -}); diff --git a/integration-tests/tests/xata/xata.ts b/integration-tests/tests/xata/xata.ts index 718c99a69..e805b209e 100644 --- a/integration-tests/tests/xata/xata.ts +++ b/integration-tests/tests/xata/xata.ts @@ -15,7 +15,7 @@ const defaultOptions = { databaseURL: 'https://Andrii-Sherman-s-workspace-2r5ujp.us-east-1.xata.sh/db/integration-tests', }; -// eslint-disable-next-line drizzle/require-entity-kind +// eslint-disable-next-line drizzle-internal/require-entity-kind export class XataClient extends DatabaseClient { constructor(options?: BaseClientOptions) { super({ ...defaultOptions, ...options }, tables); diff --git a/integration-tests/tsconfig.json b/integration-tests/tsconfig.json index 6def1ee38..38541f8e2 100644 --- a/integration-tests/tsconfig.json +++ b/integration-tests/tsconfig.json @@ -1,11 +1,12 @@ { "extends": "../tsconfig.json", "compilerOptions": { + "checkJs": false, "noEmit": true, "paths": { "~/*": ["./tests/*"] } }, "include": ["tests", "type-tests"], - "exclude": ["**/playground", "**/.sst"] + "exclude": ["**/playground", "**/.sst", "tests/prisma/*/client/**/*.js"] } diff --git a/integration-tests/vitest.config.ts b/integration-tests/vitest.config.ts index e9ecf0bd6..defc44cc4 100644 --- a/integration-tests/vitest.config.ts +++ b/integration-tests/vitest.config.ts @@ -1,5 +1,4 @@ import 'dotenv/config'; -import { viteCommonjs } from '@originjs/vite-plugin-commonjs'; import tsconfigPaths from 'vite-tsconfig-paths'; import { defineConfig } from 'vitest/config'; @@ -8,27 +7,30 @@ export default defineConfig({ include: [ 'tests/extensions/postgis/**/*', 'tests/relational/**/*.test.ts', - 'tests/libsql-batch.test.ts', - 'tests/d1-batch.test.ts', - 'tests/sqlite-proxy-batch.test.ts', - 'tests/neon-http-batch.test.ts', + 'tests/pg/**/*.test.ts', + 'tests/mysql/**/*.test.ts', + 'tests/sqlite/**/*.test.ts', 'tests/replicas/**/*', 'tests/imports/**/*', - 'tests/xata-http.test.ts', 'tests/extensions/vectors/**/*', - 'tests/tidb-serverless.test.ts', - // 'tests/awsdatapi.test.ts', + 'tests/version.test.ts', ], exclude: [ ...(process.env.SKIP_EXTERNAL_DB_TESTS ? [ 'tests/relational/mysql.planetscale.test.ts', 'tests/neon-http-batch.test.ts', - 'tests/xata-http.test.ts', - 'tests/tidb-serverless.test.ts', + // 'tests/pg/xata-http.test.ts', + 'tests/mysql/tidb-serverless.test.ts', ] : []), + 'tests/pg/awsdatapi.test.ts', + 'tests/awsdatapi.alltypes.test.ts', + 'tests/pg/vercel-pg.test.ts', 'tests/relational/vercel.test.ts', + // Have a strange "invalid SQL: ERROR: must be owner of schema public" error. Will need to check with xata team + 'tests/pg/xata-http.test.ts', + 'tests/pg/neon-http-batch.ts', ], typecheck: { tsconfig: 'tsconfig.json', @@ -36,6 +38,11 @@ export default defineConfig({ testTimeout: 100000, hookTimeout: 100000, isolate: false, + poolOptions: { + threads: { + singleThread: true, + }, + }, }, - plugins: [viteCommonjs(), tsconfigPaths()], + plugins: [tsconfigPaths()], }); diff --git a/package.json b/package.json index 431fd321e..3327aad18 100755 --- a/package.json +++ b/package.json @@ -11,34 +11,35 @@ "lint": "concurrently -n eslint,dprint \"eslint --ext ts .\" \"dprint check --list-different\"" }, "devDependencies": { - "@arethetypeswrong/cli": "^0.12.1", + "@arethetypeswrong/cli": "^0.15.3", "@trivago/prettier-plugin-sort-imports": "^4.2.0", "@typescript-eslint/eslint-plugin": "^6.7.3", "@typescript-eslint/experimental-utils": "^5.62.0", "@typescript-eslint/parser": "^6.7.3", "bun-types": "^1.0.3", "concurrently": "^8.2.1", - "dprint": "^0.45.0", + "dprint": "^0.46.2", "drizzle-kit": "^0.19.13", "drizzle-orm": "workspace:./drizzle-orm/dist", "drizzle-orm-old": "npm:drizzle-orm@^0.27.2", "eslint": "^8.50.0", - "eslint-plugin-drizzle": "link:eslint/eslint-plugin-drizzle", + "eslint-plugin-drizzle-internal": "link:eslint/eslint-plugin-drizzle-internal", "eslint-plugin-import": "^2.28.1", "eslint-plugin-no-instanceof": "^1.0.1", "eslint-plugin-unicorn": "^48.0.1", "eslint-plugin-unused-imports": "^3.0.0", "glob": "^10.3.10", "prettier": "^3.0.3", - "recast": "^0.23.4", + "recast": "^0.23.9", "resolve-tspaths": "^0.8.16", "tsup": "^7.2.0", + "tsx": "^4.10.5", "turbo": "^1.10.14", - "typescript": "5.2.2" + "typescript": "5.4.5" }, "pnpm": { "patchedDependencies": { - "typescript@5.2.2": "patches/typescript@5.2.2.patch" + "typescript@5.4.5": "patches/typescript@5.4.5.patch" } } } diff --git a/patches/typescript@5.2.2.patch b/patches/typescript@5.4.5.patch similarity index 71% rename from patches/typescript@5.2.2.patch rename to patches/typescript@5.4.5.patch index e054837f8..41abe4d7b 100644 --- a/patches/typescript@5.2.2.patch +++ b/patches/typescript@5.4.5.patch @@ -1,8 +1,8 @@ diff --git a/lib/tsserver.js b/lib/tsserver.js -index 382e1e2937fd02bed4c84b52f366049f2060ef1f..3ac8abaa9b30f0bcfb504220775ef8f3ee63eac3 100644 +index 3f1ce62663e3c32aa487f0fc7dcb3dd940e7cd24..559f2f70531180c5d54d98b18ae54a67eab54e1d 100644 --- a/lib/tsserver.js +++ b/lib/tsserver.js -@@ -15053,7 +15053,7 @@ function isRestParameter(node) { +@@ -15203,7 +15203,7 @@ function isInternalDeclaration(node, sourceFile) { // src/compiler/utilities.ts var resolvingEmptyArray = []; var externalHelpersModuleNameText = "tslib"; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index f06a6ae2a..38a6aaa6a 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -5,29 +5,29 @@ settings: excludeLinksFromLockfile: false patchedDependencies: - typescript@5.2.2: - hash: wmhs4olj6eveeldp6si4l46ssq - path: patches/typescript@5.2.2.patch + typescript@5.4.5: + hash: q3iy4fwdhi5sis3wty7d4nbsme + path: patches/typescript@5.4.5.patch importers: .: devDependencies: '@arethetypeswrong/cli': - specifier: ^0.12.1 - version: 0.12.1(encoding@0.1.13) + specifier: ^0.15.3 + version: 0.15.3 '@trivago/prettier-plugin-sort-imports': specifier: ^4.2.0 version: 4.2.0(prettier@3.0.3) '@typescript-eslint/eslint-plugin': specifier: ^6.7.3 - version: 6.7.3(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)))(eslint@8.50.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) + version: 6.7.3(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)))(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) '@typescript-eslint/experimental-utils': specifier: ^5.62.0 - version: 5.62.0(eslint@8.50.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) + version: 5.62.0(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) '@typescript-eslint/parser': specifier: ^6.7.3 - version: 6.7.3(eslint@8.50.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) + version: 6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) bun-types: specifier: ^1.0.3 version: 1.0.3 @@ -35,8 +35,8 @@ importers: specifier: ^8.2.1 version: 8.2.1 dprint: - specifier: ^0.45.0 - version: 0.45.0 + specifier: ^0.46.2 + version: 0.46.3 drizzle-kit: specifier: ^0.19.13 version: 0.19.13 @@ -45,16 +45,16 @@ importers: version: link:drizzle-orm/dist drizzle-orm-old: specifier: npm:drizzle-orm@^0.27.2 - version: drizzle-orm@0.27.2(@aws-sdk/client-rds-data@3.549.0)(@cloudflare/workers-types@4.20230904.0)(@libsql/client@0.5.6(encoding@0.1.13))(@neondatabase/serverless@0.9.0)(@opentelemetry/api@1.4.1)(@planetscale/database@1.16.0)(@types/better-sqlite3@7.6.4)(@types/pg@8.10.1)(@types/sql.js@1.4.4)(@vercel/postgres@0.8.0)(better-sqlite3@8.4.0)(bun-types@1.0.3)(knex@2.4.2(better-sqlite3@8.4.0)(mysql2@3.3.3)(pg@8.11.0)(sqlite3@5.1.6(encoding@0.1.13)))(kysely@0.25.0)(mysql2@3.3.3)(pg@8.11.0)(postgres@3.3.5)(sql.js@1.8.0)(sqlite3@5.1.6(encoding@0.1.13)) + version: drizzle-orm@0.27.2(@aws-sdk/client-rds-data@3.583.0)(@cloudflare/workers-types@4.20240524.0)(@libsql/client@0.6.0)(@neondatabase/serverless@0.9.3)(@opentelemetry/api@1.8.0)(@planetscale/database@1.18.0)(@types/better-sqlite3@7.6.10)(@types/pg@8.11.6)(@types/sql.js@1.4.9)(@vercel/postgres@0.8.0)(better-sqlite3@10.0.0)(bun-types@1.0.3)(knex@3.1.0(better-sqlite3@10.0.0)(mysql2@3.9.8)(pg@8.11.5)(sqlite3@5.1.7))(kysely@0.27.3)(mysql2@3.9.8)(pg@8.11.5)(postgres@3.4.4)(sql.js@1.10.3)(sqlite3@5.1.7) eslint: specifier: ^8.50.0 version: 8.50.0 - eslint-plugin-drizzle: - specifier: link:eslint/eslint-plugin-drizzle - version: link:eslint/eslint-plugin-drizzle + eslint-plugin-drizzle-internal: + specifier: link:eslint/eslint-plugin-drizzle-internal + version: link:eslint/eslint-plugin-drizzle-internal eslint-plugin-import: specifier: ^2.28.1 - version: 2.28.1(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)))(eslint@8.50.0) + version: 2.28.1(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)))(eslint@8.50.0) eslint-plugin-no-instanceof: specifier: ^1.0.1 version: 1.0.1 @@ -63,7 +63,7 @@ importers: version: 48.0.1(eslint@8.50.0) eslint-plugin-unused-imports: specifier: ^3.0.0 - version: 3.0.0(@typescript-eslint/eslint-plugin@6.7.3(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)))(eslint@8.50.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)))(eslint@8.50.0) + version: 3.0.0(@typescript-eslint/eslint-plugin@6.7.3(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)))(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)))(eslint@8.50.0) glob: specifier: ^10.3.10 version: 10.3.10 @@ -71,32 +71,35 @@ importers: specifier: ^3.0.3 version: 3.0.3 recast: - specifier: ^0.23.4 - version: 0.23.4 + specifier: ^0.23.9 + version: 0.23.9 resolve-tspaths: specifier: ^0.8.16 - version: 0.8.16(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) + version: 0.8.16(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) tsup: specifier: ^7.2.0 - version: 7.2.0(postcss@8.4.38)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) + version: 7.2.0(postcss@8.4.39)(ts-node@10.9.2(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)))(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + tsx: + specifier: ^4.10.5 + version: 4.10.5 turbo: specifier: ^1.10.14 version: 1.10.14 typescript: - specifier: 5.2.2 - version: 5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq) + specifier: 5.4.5 + version: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) drizzle-orm: devDependencies: '@aws-sdk/client-rds-data': specifier: ^3.549.0 - version: 3.549.0 + version: 3.583.0 '@cloudflare/workers-types': specifier: ^4.20230904.0 - version: 4.20230904.0 + version: 4.20240512.0 '@electric-sql/pglite': specifier: ^0.1.1 - version: 0.1.1 + version: 0.1.5 '@libsql/client': specifier: ^0.5.6 version: 0.5.6(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) @@ -105,55 +108,58 @@ importers: version: 0.9.0 '@op-engineering/op-sqlite': specifier: ^2.0.16 - version: 2.0.16(react-native@0.73.6(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4))(bufferutil@4.0.8)(encoding@0.1.13)(react@18.2.0)(utf-8-validate@6.0.3))(react@18.2.0) + version: 2.0.22(react-native@0.74.1(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(@types/react@18.3.1)(bufferutil@4.0.8)(encoding@0.1.13)(react@18.3.1)(utf-8-validate@6.0.3))(react@18.3.1) '@opentelemetry/api': specifier: ^1.4.1 - version: 1.4.1 + version: 1.8.0 '@originjs/vite-plugin-commonjs': specifier: ^1.0.3 version: 1.0.3 '@planetscale/database': specifier: ^1.16.0 - version: 1.16.0 + version: 1.18.0 + '@prisma/client': + specifier: 5.14.0 + version: 5.14.0(prisma@5.14.0) '@tidbcloud/serverless': specifier: ^0.1.1 version: 0.1.1 '@types/better-sqlite3': specifier: ^7.6.4 - version: 7.6.4 + version: 7.6.10 '@types/node': specifier: ^20.2.5 - version: 20.2.5 + version: 20.12.12 '@types/pg': specifier: ^8.10.1 - version: 8.10.1 + version: 8.11.6 '@types/react': specifier: ^18.2.45 - version: 18.2.45 + version: 18.3.1 '@types/sql.js': specifier: ^1.4.4 - version: 1.4.4 + version: 1.4.9 '@vercel/postgres': specifier: ^0.8.0 version: 0.8.0 '@xata.io/client': specifier: ^0.29.3 - version: 0.29.3(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) + version: 0.29.4(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) better-sqlite3: specifier: ^8.4.0 - version: 8.4.0 + version: 8.7.0 bun-types: specifier: ^0.6.6 - version: 0.6.6 + version: 0.6.14 cpy: specifier: ^10.1.0 version: 10.1.0 expo-sqlite: specifier: ^13.2.0 - version: 13.2.0(expo@50.0.14(@babel/core@7.24.4)(@react-native/babel-preset@0.73.21(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4)))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)) + version: 13.4.0(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)) knex: specifier: ^2.4.2 - version: 2.4.2(better-sqlite3@8.4.0)(mysql2@3.3.3)(pg@8.11.0)(sqlite3@5.1.6(encoding@0.1.13)) + version: 2.5.1(better-sqlite3@8.7.0)(mysql2@3.3.3)(pg@8.11.5)(sqlite3@5.1.7) kysely: specifier: ^0.25.0 version: 0.25.0 @@ -162,34 +168,37 @@ importers: version: 3.3.3 pg: specifier: ^8.11.0 - version: 8.11.0 + version: 8.11.5 postgres: specifier: ^3.3.5 - version: 3.3.5 + version: 3.4.4 + prisma: + specifier: 5.14.0 + version: 5.14.0 react: specifier: ^18.2.0 - version: 18.2.0 + version: 18.3.1 sql.js: specifier: ^1.8.0 - version: 1.8.0 + version: 1.10.3 sqlite3: specifier: ^5.1.2 - version: 5.1.6(encoding@0.1.13) + version: 5.1.7 tslib: specifier: ^2.5.2 - version: 2.5.2 + version: 2.6.2 tsx: specifier: ^3.12.7 - version: 3.12.7 + version: 3.14.0 vite-tsconfig-paths: - specifier: ^4.2.0 - version: 4.2.0(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq))(vite@4.3.9(@types/node@20.2.5)(terser@5.30.3)) + specifier: ^4.3.2 + version: 4.3.2(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))(vite@5.3.3(@types/node@20.12.12)(lightningcss@1.25.1)(terser@5.31.0)) vitest: - specifier: ^0.31.4 - version: 0.31.4(@vitest/ui@0.31.4)(terser@5.30.3) + specifier: ^1.6.0 + version: 1.6.0(@types/node@20.12.12)(@vitest/ui@1.6.0)(lightningcss@1.25.1)(terser@5.31.0) zod: specifier: ^3.20.2 - version: 3.21.4 + version: 3.23.7 zx: specifier: ^7.2.2 version: 7.2.2 @@ -201,16 +210,13 @@ importers: version: 0.4.1(rollup@3.27.2) '@rollup/plugin-typescript': specifier: ^11.1.0 - version: 11.1.1(rollup@3.27.2)(tslib@2.6.2)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) + version: 11.1.1(rollup@3.27.2)(tslib@2.6.2)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) '@sinclair/typebox': specifier: ^0.29.6 version: 0.29.6 '@types/node': specifier: ^18.15.10 version: 18.15.10 - ava: - specifier: ^5.1.0 - version: 5.3.0 cpy: specifier: ^10.1.0 version: 10.1.0 @@ -223,9 +229,12 @@ importers: rollup: specifier: ^3.20.7 version: 3.27.2 - tsx: - specifier: ^3.12.2 - version: 3.12.7 + vite-tsconfig-paths: + specifier: ^4.3.2 + version: 4.3.2(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))(vite@5.3.3(@types/node@18.15.10)(lightningcss@1.25.1)(terser@5.31.0)) + vitest: + specifier: ^1.6.0 + version: 1.6.0(@types/node@18.15.10)(@vitest/ui@1.6.0)(lightningcss@1.25.1)(terser@5.31.0) zx: specifier: ^7.2.2 version: 7.2.2 @@ -237,13 +246,10 @@ importers: version: 0.4.1(rollup@3.27.2) '@rollup/plugin-typescript': specifier: ^11.1.0 - version: 11.1.1(rollup@3.27.2)(tslib@2.6.2)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) + version: 11.1.1(rollup@3.27.2)(tslib@2.6.2)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) '@types/node': specifier: ^18.15.10 version: 18.15.10 - ava: - specifier: ^5.1.0 - version: 5.3.0 cpy: specifier: ^10.1.0 version: 10.1.0 @@ -256,12 +262,15 @@ importers: rollup: specifier: ^3.20.7 version: 3.27.2 - tsx: - specifier: ^3.12.2 - version: 3.12.7 valibot: specifier: ^0.30.0 version: 0.30.0 + vite-tsconfig-paths: + specifier: ^4.3.2 + version: 4.3.2(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))(vite@5.3.3(@types/node@18.15.10)(lightningcss@1.25.1)(terser@5.31.0)) + vitest: + specifier: ^1.6.0 + version: 1.6.0(@types/node@18.15.10)(@vitest/ui@1.6.0)(lightningcss@1.25.1)(terser@5.31.0) zx: specifier: ^7.2.2 version: 7.2.2 @@ -273,13 +282,10 @@ importers: version: 0.4.1(rollup@3.20.7) '@rollup/plugin-typescript': specifier: ^11.1.0 - version: 11.1.0(rollup@3.20.7)(tslib@2.6.2)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) + version: 11.1.0(rollup@3.20.7)(tslib@2.6.2)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) '@types/node': specifier: ^18.15.10 version: 18.15.10 - ava: - specifier: ^5.1.0 - version: 5.2.0 cpy: specifier: ^10.1.0 version: 10.1.0 @@ -292,9 +298,12 @@ importers: rollup: specifier: ^3.20.7 version: 3.20.7 - tsx: - specifier: ^3.12.2 - version: 3.12.6 + vite-tsconfig-paths: + specifier: ^4.3.2 + version: 4.3.2(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))(vite@5.3.3(@types/node@18.15.10)(lightningcss@1.25.1)(terser@5.31.0)) + vitest: + specifier: ^1.6.0 + version: 1.6.0(@types/node@18.15.10)(@vitest/ui@1.6.0)(lightningcss@1.25.1)(terser@5.31.0) zod: specifier: ^3.20.2 version: 3.21.4 @@ -309,13 +318,13 @@ importers: version: 20.10.1 '@typescript-eslint/parser': specifier: ^6.10.0 - version: 6.10.0(eslint@8.53.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) + version: 6.10.0(eslint@8.53.0)(typescript@5.2.2) '@typescript-eslint/rule-tester': specifier: ^6.10.0 - version: 6.10.0(@eslint/eslintrc@3.0.2)(eslint@8.53.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) + version: 6.10.0(@eslint/eslintrc@3.1.0)(eslint@8.53.0)(typescript@5.2.2) '@typescript-eslint/utils': specifier: ^6.10.0 - version: 6.10.0(eslint@8.53.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) + version: 6.10.0(eslint@8.53.0)(typescript@5.2.2) cpy-cli: specifier: ^5.0.0 version: 5.0.0 @@ -324,25 +333,25 @@ importers: version: 8.53.0 typescript: specifier: ^5.2.2 - version: 5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq) + version: 5.2.2 vitest: - specifier: ^0.34.6 - version: 0.34.6(@vitest/ui@0.31.4)(terser@5.30.3) + specifier: ^1.6.0 + version: 1.6.0(@types/node@20.10.1)(@vitest/ui@1.6.0)(lightningcss@1.25.1)(terser@5.31.0) integration-tests: dependencies: '@aws-sdk/client-rds-data': specifier: ^3.549.0 - version: 3.549.0 + version: 3.583.0 '@aws-sdk/credential-providers': specifier: ^3.549.0 - version: 3.549.0 + version: 3.569.0(@aws-sdk/client-sso-oidc@3.583.0) '@electric-sql/pglite': specifier: ^0.1.1 - version: 0.1.1 + version: 0.1.5 '@libsql/client': specifier: ^0.5.6 - version: 0.5.6(bufferutil@4.0.7)(encoding@0.1.13)(utf-8-validate@6.0.3) + version: 0.5.6(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) '@miniflare/d1': specifier: ^2.14.2 version: 2.14.2 @@ -351,28 +360,37 @@ importers: version: 2.14.2 '@planetscale/database': specifier: ^1.16.0 - version: 1.16.0 + version: 1.18.0 + '@prisma/client': + specifier: 5.14.0 + version: 5.14.0(prisma@5.14.0) '@tidbcloud/serverless': specifier: ^0.1.1 version: 0.1.1 '@typescript/analyze-trace': specifier: ^0.10.0 - version: 0.10.0 + version: 0.10.1 '@vercel/postgres': - specifier: ^0.3.0 - version: 0.3.0 + specifier: ^0.8.0 + version: 0.8.0 '@xata.io/client': specifier: ^0.29.3 - version: 0.29.3(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) + version: 0.29.4(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + async-retry: + specifier: ^1.3.3 + version: 1.3.3 better-sqlite3: specifier: ^8.4.0 - version: 8.4.0 + version: 8.7.0 dockerode: specifier: ^3.3.4 version: 3.3.5 dotenv: specifier: ^16.1.4 - version: 16.1.4 + version: 16.4.5 + drizzle-prisma-generator: + specifier: ^0.1.2 + version: 0.1.4 drizzle-typebox: specifier: workspace:../drizzle-typebox/dist version: link:../drizzle-typebox/dist @@ -384,92 +402,101 @@ importers: version: link:../drizzle-zod/dist express: specifier: ^4.18.2 - version: 4.18.2 + version: 4.19.2 get-port: specifier: ^7.0.0 - version: 7.0.0 + version: 7.1.0 mysql2: specifier: ^3.3.3 version: 3.3.3 pg: specifier: ^8.11.0 - version: 8.11.0 + version: 8.11.5 postgres: specifier: ^3.3.5 - version: 3.3.5 + version: 3.4.4 + prisma: + specifier: 5.14.0 + version: 5.14.0 source-map-support: specifier: ^0.5.21 version: 0.5.21 sql.js: specifier: ^1.8.0 - version: 1.8.0 + version: 1.10.3 sqlite3: specifier: ^5.1.4 - version: 5.1.6(encoding@0.1.13) + version: 5.1.7 sst: specifier: ^3.0.4 - version: 3.0.4 + version: 3.0.14 uuid: specifier: ^9.0.0 - version: 9.0.0 + version: 9.0.1 uvu: specifier: ^0.5.6 version: 0.5.6 vitest: - specifier: ^0.31.4 - version: 0.31.4(@vitest/ui@0.31.4)(terser@5.30.3) + specifier: ^1.6.0 + version: 1.6.0(@types/node@20.12.12)(@vitest/ui@1.6.0)(lightningcss@1.25.1)(terser@5.31.0) zod: specifier: ^3.20.2 - version: 3.21.4 + version: 3.23.7 devDependencies: '@neondatabase/serverless': specifier: 0.9.0 version: 0.9.0 - '@originjs/vite-plugin-commonjs': - specifier: ^1.0.3 - version: 1.0.3 + '@types/async-retry': + specifier: ^1.4.8 + version: 1.4.8 '@types/axios': specifier: ^0.14.0 version: 0.14.0 '@types/better-sqlite3': specifier: ^7.6.4 - version: 7.6.4 + version: 7.6.10 '@types/dockerode': specifier: ^3.3.18 - version: 3.3.18 + version: 3.3.29 '@types/express': specifier: ^4.17.16 - version: 4.17.17 + version: 4.17.21 '@types/node': specifier: ^20.2.5 - version: 20.2.5 + version: 20.12.12 '@types/pg': specifier: ^8.10.1 - version: 8.10.1 + version: 8.11.6 '@types/sql.js': specifier: ^1.4.4 - version: 1.4.4 + version: 1.4.9 '@types/uuid': specifier: ^9.0.1 - version: 9.0.1 + version: 9.0.8 '@vitest/ui': - specifier: ^0.31.4 - version: 0.31.4(vitest@0.31.4) + specifier: ^1.6.0 + version: 1.6.0(vitest@1.6.0) ava: specifier: ^5.3.0 - version: 5.3.0 + version: 5.3.0(@ava/typescript@5.0.0) axios: specifier: ^1.4.0 - version: 1.4.0 + version: 1.6.8 + cross-env: + specifier: ^7.0.3 + version: 7.0.3 + ts-node: + specifier: ^10.9.2 + version: 10.9.2(@types/node@20.12.12)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) tsx: - specifier: ^3.12.7 - version: 3.12.7 + specifier: ^4.14.0 + version: 4.16.2 vite: - specifier: ^4.3.9 - version: 4.3.9(@types/node@20.2.5)(terser@5.30.3) + specifier: ^5.2.13 + version: 5.3.3(@types/node@20.12.12)(lightningcss@1.25.1)(terser@5.31.0) vite-tsconfig-paths: - specifier: ^4.2.0 - version: 4.2.0(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq))(vite@4.3.9(@types/node@20.2.5)(terser@5.30.3)) + specifier: ^4.3.2 + version: 4.3.2(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))(vite@5.3.3(@types/node@20.12.12)(lightningcss@1.25.1)(terser@5.31.0)) zx: specifier: ^7.2.2 version: 7.2.2 @@ -484,15 +511,21 @@ packages: resolution: {integrity: sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==} engines: {node: '>=6.0.0'} - '@andrewbranch/untar.js@1.0.2': - resolution: {integrity: sha512-hL80MHK3b++pEp6K23+Nl5r5D1F19DRagp2ruCBIv4McyCiLKq67vUNvEQY1aGCAKNZ8GxV23n5MhOm7RwO8Pg==} + '@andrewbranch/untar.js@1.0.3': + resolution: {integrity: sha512-Jh15/qVmrLGhkKJBdXlK1+9tY4lZruYjsgkDFj08ZmDiWVBLJcqkok7Z0/R0In+i1rScBpJlSvrTS2Lm41Pbnw==} - '@arethetypeswrong/cli@0.12.1': - resolution: {integrity: sha512-5nA91oqi8GPv9NkxgcjdpyKSMJ0WCcX8YYcxlZS5XBqY6cau0pMt5S0CXU3QGgl9qDryrok1QaM1xtUUhBKTAA==} + '@arethetypeswrong/cli@0.15.3': + resolution: {integrity: sha512-sIMA9ZJBWDEg1+xt5RkAEflZuf8+PO8SdKj17x6PtETuUho+qlZJg4DgmKc3q+QwQ9zOB5VLK6jVRbFdNLdUIA==} + engines: {node: '>=18'} hasBin: true - '@arethetypeswrong/core@0.12.1': - resolution: {integrity: sha512-1XCwz+IRSptRu1Y48D462vu3de8sLFrtXaXkgthIZ8+iRhEBIZtu+q7MwrfR3hWbYIgUsBj2WugtIgaPAdX9FA==} + '@arethetypeswrong/core@0.15.1': + resolution: {integrity: sha512-FYp6GBAgsNz81BkfItRz8RLZO03w5+BaeiPma1uCfmxTnxbtuMrI/dbzGiOk8VghO108uFI0oJo0OkewdSHw7g==} + engines: {node: '>=18'} + + '@ava/typescript@5.0.0': + resolution: {integrity: sha512-2twsQz2fUd95QK1MtKuEnjkiN47SKHZfi/vWj040EN6Eo2ZW3SNcAwncJqXXoMTYZTWtBRXYp3Fg8z+JkFI9aQ==} + engines: {node: ^18.18 || ^20.8 || ^21 || ^22} '@aws-crypto/crc32@3.0.0': resolution: {integrity: sha512-IzSgsrxUcsrejQbPVilIKy16kAT52EwB6zSaI+M3xxIhKh5+aldEyvI+z6erM7TCLB2BJsFrtHjp6/4/sr+3dA==} @@ -512,133 +545,193 @@ packages: '@aws-crypto/util@3.0.0': resolution: {integrity: sha512-2OJlpeJpCR48CC8r+uKVChzs9Iungj9wkZrl8Z041DWEWvyIHILYKCPNzJghKsivj+S3mLo6BVc7mBNzdxA46w==} - '@aws-sdk/client-cognito-identity@3.549.0': - resolution: {integrity: sha512-KrmjksANuWZTLx8JGtHXsHJ8bA72DoH5rMXhAUQSeSwGYlJKQWeBN9um4XtOOP6fMO9FtEorsG9cxJRk92M7Yw==} - engines: {node: '>=14.0.0'} + '@aws-sdk/client-cognito-identity@3.569.0': + resolution: {integrity: sha512-cD1HcdJNpUZgrATWCAQs2amQKI69pG+jF4b5ySq9KJkVi6gv2PWsD6QGDG8H12lMWaIKYlOpKbpnYTpcuvqUcg==} + engines: {node: '>=16.0.0'} '@aws-sdk/client-lambda@3.478.0': resolution: {integrity: sha512-7+PEE1aV3qVeuswL6cUBfHeljxC/WaXFj+214/W3q71uRdLbX5Z7ZOD15sJbjSu+4VZN9ugMaxEcp+oLiqWl+A==} engines: {node: '>=14.0.0'} - '@aws-sdk/client-rds-data@3.549.0': - resolution: {integrity: sha512-l1py0Y9l5WLAjvp+3IiykMs27zgmaCL5epp/nNY2uET9L2VMjbu3Exw50iSp47O3Ff3vjkin7QfnhQhfQCjYvQ==} - engines: {node: '>=14.0.0'} + '@aws-sdk/client-rds-data@3.583.0': + resolution: {integrity: sha512-xBnrVGNmMsTafzlaeZiFUahr3TP4zF2yRnsWzibylbXXIjaGdcLoiskNizo62syCh/8LbgpY6EN34EeYWsfMiw==} + engines: {node: '>=16.0.0'} - '@aws-sdk/client-sso-oidc@3.549.0': - resolution: {integrity: sha512-FbB4A78ILAb8sM4TfBd+3CrQcfZIhe0gtVZNbaxpq5cJZh1K7oZ8vPfKw4do9JWkDUXPLsD9Bwz12f8/JpAb6Q==} - engines: {node: '>=14.0.0'} - peerDependencies: - '@aws-sdk/credential-provider-node': ^3.549.0 + '@aws-sdk/client-sso-oidc@3.569.0': + resolution: {integrity: sha512-u5DEjNEvRvlKKh1QLCDuQ8GIrx+OFvJFLfhorsp4oCxDylvORs+KfyKKnJAw4wYEEHyxyz9GzHD7p6a8+HLVHw==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/client-sso-oidc@3.583.0': + resolution: {integrity: sha512-LO3wmrFXPi2kNE46lD1XATfRrvdNxXd4DlTFouoWmr7lvqoUkcbmtkV2r/XChZA2z0HiDauphC1e8b8laJVeSg==} + engines: {node: '>=16.0.0'} '@aws-sdk/client-sso@3.478.0': resolution: {integrity: sha512-Jxy9cE1JMkPR0PklCpq3cORHnZq/Z4klhSTNGgZNeBWovMa+plor52kyh8iUNHKl3XEJvTbHM7V+dvrr/x0P1g==} engines: {node: '>=14.0.0'} - '@aws-sdk/client-sso@3.549.0': - resolution: {integrity: sha512-lz+yflOAj5Q263FlCsKpNqttaCb2NPh8jC76gVCqCt7TPxRDBYVaqg0OZYluDaETIDNJi4DwN2Azcck7ilwuPw==} - engines: {node: '>=14.0.0'} + '@aws-sdk/client-sso@3.568.0': + resolution: {integrity: sha512-LSD7k0ZBQNWouTN5dYpUkeestoQ+r5u6cp6o+FATKeiFQET85RNA3xJ4WPnOI5rBC1PETKhQXvF44863P3hCaQ==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/client-sso@3.583.0': + resolution: {integrity: sha512-FNJ2MmiBtZZwgkj4+GLVrzqwmD6D8FBptrFZk7PnGkSf7v1Q8txYNI6gY938RRhYJ4lBW4cNbhPvWoDxAl90Hw==} + engines: {node: '>=16.0.0'} '@aws-sdk/client-sts@3.478.0': resolution: {integrity: sha512-D+QID0dYzmn9dcxgKP3/nMndUqiQbDLsqI0Zf2pG4MW5gPhVNKlDGIV3Ztz8SkMjzGJExNOLW2L569o8jshJVw==} engines: {node: '>=14.0.0'} - '@aws-sdk/client-sts@3.549.0': - resolution: {integrity: sha512-63IreJ598Dzvpb+6sy81KfIX5iQxnrWSEtlyeCdC2GO6gmSQVwJzc9kr5pAC83lHmlZcm/Q3KZr3XBhRQqP0og==} - engines: {node: '>=14.0.0'} - peerDependencies: - '@aws-sdk/credential-provider-node': ^3.549.0 + '@aws-sdk/client-sts@3.569.0': + resolution: {integrity: sha512-3AyipQ2zHszkcTr8n1Sp7CiMUi28aMf1vOhEo0KKi0DWGo1Z1qJEpWeRP363KG0n9/8U3p1IkXGz5FRbpXZxIw==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/client-sts@3.583.0': + resolution: {integrity: sha512-xDMxiemPDWr9dY2Q4AyixkRnk/hvS6fs6OWxuVCz1WO47YhaAfOsEGAgQMgDLLaOfj/oLU5D14uTNBEPGh4rBA==} + engines: {node: '>=16.0.0'} '@aws-sdk/core@3.477.0': resolution: {integrity: sha512-o0434EH+d1BxHZvgG7z8vph2SYefciQ5RnJw2MgvETGnthgqsnI4nnNJLSw0FVeqCeS18n6vRtzqlGYR2YPCNg==} engines: {node: '>=14.0.0'} - '@aws-sdk/core@3.549.0': - resolution: {integrity: sha512-jC61OxJn72r/BbuDRCcluiw05Xw9eVLG0CwxQpF3RocxfxyZqlrGYaGecZ8Wy+7g/3sqGRC/Ar5eUhU1YcLx7w==} - engines: {node: '>=14.0.0'} + '@aws-sdk/core@3.567.0': + resolution: {integrity: sha512-zUDEQhC7blOx6sxhHdT75x98+SXQVdUIMu8z8AjqMWiYK2v4WkOS8i6dOS4E5OjL5J1Ac+ruy8op/Bk4AFqSIw==} + engines: {node: '>=16.0.0'} - '@aws-sdk/credential-provider-cognito-identity@3.549.0': - resolution: {integrity: sha512-EADYw4JimdZ3mGhxtAXSdARNunw/4T7Vd82vvsvqavqL3S9jt5+2SrZ2/PYrweJHLRFggMHcBs82FRql1efMaA==} - engines: {node: '>=14.0.0'} + '@aws-sdk/core@3.582.0': + resolution: {integrity: sha512-ofmD96IQc9g1dbyqlCyxu5fCG7kIl9p1NoN5+vGBUyLdbmPCV3Pdg99nRHYEJuv2MgGx5AUFGDPMHcqbJpnZIw==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/credential-provider-cognito-identity@3.569.0': + resolution: {integrity: sha512-CHS0Zyuazh5cYLaJr2/I9up0xAu8Y+um/h0o4xNf00cKGT0Sdhoby5vyelHjVTeZt+OeOMTBt6IdqGwVbVG9gQ==} + engines: {node: '>=16.0.0'} '@aws-sdk/credential-provider-env@3.468.0': resolution: {integrity: sha512-k/1WHd3KZn0EQYjadooj53FC0z24/e4dUZhbSKTULgmxyO62pwh9v3Brvw4WRa/8o2wTffU/jo54tf4vGuP/ZA==} engines: {node: '>=14.0.0'} - '@aws-sdk/credential-provider-env@3.535.0': - resolution: {integrity: sha512-XppwO8c0GCGSAvdzyJOhbtktSEaShg14VJKg8mpMa1XcgqzmcqqHQjtDWbx5rZheY1VdpXZhpEzJkB6LpQejpA==} - engines: {node: '>=14.0.0'} + '@aws-sdk/credential-provider-env@3.568.0': + resolution: {integrity: sha512-MVTQoZwPnP1Ev5A7LG+KzeU6sCB8BcGkZeDT1z1V5Wt7GPq0MgFQTSSjhImnB9jqRSZkl1079Bt3PbO6lfIS8g==} + engines: {node: '>=16.0.0'} - '@aws-sdk/credential-provider-http@3.535.0': - resolution: {integrity: sha512-kdj1wCmOMZ29jSlUskRqN04S6fJ4dvt0Nq9Z32SA6wO7UG8ht6Ot9h/au/eTWJM3E1somZ7D771oK7dQt9b8yw==} - engines: {node: '>=14.0.0'} + '@aws-sdk/credential-provider-env@3.577.0': + resolution: {integrity: sha512-Jxu255j0gToMGEiqufP8ZtKI8HW90lOLjwJ3LrdlD/NLsAY0tOQf1fWc53u28hWmmNGMxmCrL2p66IOgMDhDUw==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/credential-provider-http@3.568.0': + resolution: {integrity: sha512-gL0NlyI2eW17hnCrh45hZV+qjtBquB+Bckiip9R6DIVRKqYcoILyiFhuOgf2bXeF23gVh6j18pvUvIoTaFWs5w==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/credential-provider-http@3.582.0': + resolution: {integrity: sha512-kGOUKw5ryPkDIYB69PjK3SicVLTbWB06ouFN2W1EvqUJpkQGPAUGzYcomKtt3mJaCTf/1kfoaHwARAl6KKSP8Q==} + engines: {node: '>=16.0.0'} '@aws-sdk/credential-provider-ini@3.478.0': resolution: {integrity: sha512-SsrYEYUvTG9ZoPC+zB19AnVoOKID+QIEHJDIi1GCZXW5kTVyr1saTVm4orG2TjYvbHQMddsWtHOvGYXZWAYMbw==} engines: {node: '>=14.0.0'} - '@aws-sdk/credential-provider-ini@3.549.0': - resolution: {integrity: sha512-k6IIrluZjQpzui5Din8fW3bFFhHaJ64XrsfYx0Ks1mb7xan84dJxmYP3tdDDmLzUeJv5h95ag88taHfjY9rakA==} - engines: {node: '>=14.0.0'} + '@aws-sdk/credential-provider-ini@3.568.0': + resolution: {integrity: sha512-m5DUN9mpto5DhEvo6w3+8SS6q932ja37rTNvpPqWJIaWhj7OorAwVirSaJQAQB/M8+XCUIrUonxytphZB28qGQ==} + engines: {node: '>=16.0.0'} + peerDependencies: + '@aws-sdk/client-sts': ^3.568.0 + + '@aws-sdk/credential-provider-ini@3.583.0': + resolution: {integrity: sha512-8I0oWNg/yps6ctjhEeL/qJ9BIa/+xXP7RPDQqFKZ2zBkWbmLLOoMWXRvl8uKUBD6qCe+DGmcu9skfVXeXSesEQ==} + engines: {node: '>=16.0.0'} + peerDependencies: + '@aws-sdk/client-sts': ^3.583.0 '@aws-sdk/credential-provider-node@3.478.0': resolution: {integrity: sha512-nwDutJYeHiIZCQDgKIUrsgwAWTil0mNe+cbd+j8fi+wwxkWUzip+F0+z02molJ8WrUUKNRhqB1V5aVx7IranuA==} engines: {node: '>=14.0.0'} - '@aws-sdk/credential-provider-node@3.549.0': - resolution: {integrity: sha512-f3YgalsMuywEAVX4AUm9tojqrBdfpAac0+D320ePzas0Ntbp7ItYu9ceKIhgfzXO3No7P3QK0rCrOxL+ABTn8Q==} - engines: {node: '>=14.0.0'} + '@aws-sdk/credential-provider-node@3.569.0': + resolution: {integrity: sha512-7jH4X2qlPU3PszZP1zvHJorhLARbU1tXvp8ngBe8ArXBrkFpl/dQ2Y/IRAICPm/pyC1IEt8L/CvKp+dz7v/eRw==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/credential-provider-node@3.583.0': + resolution: {integrity: sha512-yBNypBXny7zJH85SzxDj8s1mbLXv9c/Vbq0qR3R3POj2idZ6ywB/qlIRC1XwBuv49Wvg8kA1wKXk3K3jrpcVIw==} + engines: {node: '>=16.0.0'} '@aws-sdk/credential-provider-process@3.468.0': resolution: {integrity: sha512-OYSn1A/UsyPJ7Z8Q2cNhTf55O36shPmSsvOfND04nSfu1nPaR+VUvvsP7v+brhGpwC/GAKTIdGAo4blH31BS6A==} engines: {node: '>=14.0.0'} - '@aws-sdk/credential-provider-process@3.535.0': - resolution: {integrity: sha512-9O1OaprGCnlb/kYl8RwmH7Mlg8JREZctB8r9sa1KhSsWFq/SWO0AuJTyowxD7zL5PkeS4eTvzFFHWCa3OO5epA==} - engines: {node: '>=14.0.0'} + '@aws-sdk/credential-provider-process@3.568.0': + resolution: {integrity: sha512-r01zbXbanP17D+bQUb7mD8Iu2SuayrrYZ0Slgvx32qgz47msocV9EPCSwI4Hkw2ZtEPCeLQR4XCqFJB1D9P50w==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/credential-provider-process@3.577.0': + resolution: {integrity: sha512-Gin6BWtOiXxIgITrJ3Nwc+Y2P1uVT6huYR4EcbA/DJUPWyO0n9y5UFLewPvVbLkRn15JeEqErBLUrHclkiOKtw==} + engines: {node: '>=16.0.0'} '@aws-sdk/credential-provider-sso@3.478.0': resolution: {integrity: sha512-LsDShG51X/q+s5ZFN7kHVqrd8ZHdyEyHqdhoocmRvvw2Dif50M0AqQfvCrW1ndj5CNzXO4x/eH8EK5ZOVlS6Sg==} engines: {node: '>=14.0.0'} - '@aws-sdk/credential-provider-sso@3.549.0': - resolution: {integrity: sha512-BGopRKHs7W8zkoH8qmSHrjudj263kXbhVkAUPxVUz0I28+CZNBgJC/RfVCbOpzmysIQEpwSqvOv1y0k+DQzIJQ==} - engines: {node: '>=14.0.0'} + '@aws-sdk/credential-provider-sso@3.568.0': + resolution: {integrity: sha512-+TA77NWOEXMUcfLoOuim6xiyXFg1GqHj55ggI1goTKGVvdHYZ+rhxZbwjI29+ewzPt/qcItDJcvhrjOrg9lCag==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/credential-provider-sso@3.583.0': + resolution: {integrity: sha512-G/1EvL9tBezSiU+06tG4K/kOvFfPjnheT4JSXqjPM7+vjKzgp2jxp1J9MMd69zs4jVWon932zMeGgjrCplzMEg==} + engines: {node: '>=16.0.0'} '@aws-sdk/credential-provider-web-identity@3.468.0': resolution: {integrity: sha512-rexymPmXjtkwCPfhnUq3EjO1rSkf39R4Jz9CqiM7OsqK2qlT5Y/V3gnMKn0ZMXsYaQOMfM3cT5xly5R+OKDHlw==} engines: {node: '>=14.0.0'} - '@aws-sdk/credential-provider-web-identity@3.549.0': - resolution: {integrity: sha512-QzclVXPxuwSI7515l34sdvliVq5leroO8P7RQFKRgfyQKO45o1psghierwG3PgV6jlMiv78FIAGJBr/n4qZ7YA==} - engines: {node: '>=14.0.0'} + '@aws-sdk/credential-provider-web-identity@3.568.0': + resolution: {integrity: sha512-ZJSmTmoIdg6WqAULjYzaJ3XcbgBzVy36lir6Y0UBMRGaxDgos1AARuX6EcYzXOl+ksLvxt/xMQ+3aYh1LWfKSw==} + engines: {node: '>=16.0.0'} + peerDependencies: + '@aws-sdk/client-sts': ^3.568.0 - '@aws-sdk/credential-providers@3.549.0': - resolution: {integrity: sha512-icbw8zCX2eSGPGBZLD6HKSgUMnpL95KzUikr94sVN81UuP1EnueaWj6gnErqP2Dr05ZEF9wMZxwd91qu8kVTNw==} - engines: {node: '>=14.0.0'} + '@aws-sdk/credential-provider-web-identity@3.577.0': + resolution: {integrity: sha512-ZGHGNRaCtJJmszb9UTnC7izNCtRUttdPlLdMkh41KPS32vfdrBDHs1JrpbZijItRj1xKuOXsiYSXLAaHGcLh8Q==} + engines: {node: '>=16.0.0'} + peerDependencies: + '@aws-sdk/client-sts': ^3.577.0 + + '@aws-sdk/credential-providers@3.569.0': + resolution: {integrity: sha512-UL7EewaM1Xk6e4XLsxrCBv/owVSDI6Katnok6uMfqA8dA0x3ELjO7W35DW4wpWejQHErN5Gp1zloV9y3t34FMQ==} + engines: {node: '>=16.0.0'} '@aws-sdk/middleware-host-header@3.468.0': resolution: {integrity: sha512-gwQ+/QhX+lhof304r6zbZ/V5l5cjhGRxLL3CjH1uJPMcOAbw9wUlMdl+ibr8UwBZ5elfKFGiB1cdW/0uMchw0w==} engines: {node: '>=14.0.0'} - '@aws-sdk/middleware-host-header@3.535.0': - resolution: {integrity: sha512-0h6TWjBWtDaYwHMQJI9ulafeS4lLaw1vIxRjbpH0svFRt6Eve+Sy8NlVhECfTU2hNz/fLubvrUxsXoThaLBIew==} - engines: {node: '>=14.0.0'} + '@aws-sdk/middleware-host-header@3.567.0': + resolution: {integrity: sha512-zQHHj2N3in9duKghH7AuRNrOMLnKhW6lnmb7dznou068DJtDr76w475sHp2TF0XELsOGENbbBsOlN/S5QBFBVQ==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/middleware-host-header@3.577.0': + resolution: {integrity: sha512-9ca5MJz455CODIVXs0/sWmJm7t3QO4EUa1zf8pE8grLpzf0J94bz/skDWm37Pli13T3WaAQBHCTiH2gUVfCsWg==} + engines: {node: '>=16.0.0'} '@aws-sdk/middleware-logger@3.468.0': resolution: {integrity: sha512-X5XHKV7DHRXI3f29SAhJPe/OxWRFgDWDMMCALfzhmJfCi6Jfh0M14cJKoC+nl+dk9lB+36+jKjhjETZaL2bPlA==} engines: {node: '>=14.0.0'} - '@aws-sdk/middleware-logger@3.535.0': - resolution: {integrity: sha512-huNHpONOrEDrdRTvSQr1cJiRMNf0S52NDXtaPzdxiubTkP+vni2MohmZANMOai/qT0olmEVX01LhZ0ZAOgmg6A==} - engines: {node: '>=14.0.0'} + '@aws-sdk/middleware-logger@3.568.0': + resolution: {integrity: sha512-BinH72RG7K3DHHC1/tCulocFv+ZlQ9SrPF9zYT0T1OT95JXuHhB7fH8gEABrc6DAtOdJJh2fgxQjPy5tzPtsrA==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/middleware-logger@3.577.0': + resolution: {integrity: sha512-aPFGpGjTZcJYk+24bg7jT4XdIp42mFXSuPt49lw5KygefLyJM/sB0bKKqPYYivW0rcuZ9brQ58eZUNthrzYAvg==} + engines: {node: '>=16.0.0'} '@aws-sdk/middleware-recursion-detection@3.468.0': resolution: {integrity: sha512-vch9IQib2Ng9ucSyRW2eKNQXHUPb5jUPCLA5otTW/8nGjcOU37LxQG4WrxO7uaJ9Oe8hjHO+hViE3P0KISUhtA==} engines: {node: '>=14.0.0'} - '@aws-sdk/middleware-recursion-detection@3.535.0': - resolution: {integrity: sha512-am2qgGs+gwqmR4wHLWpzlZ8PWhm4ktj5bYSgDrsOfjhdBlWNxvPoID9/pDAz5RWL48+oH7I6SQzMqxXsFDikrw==} - engines: {node: '>=14.0.0'} + '@aws-sdk/middleware-recursion-detection@3.567.0': + resolution: {integrity: sha512-rFk3QhdT4IL6O/UWHmNdjJiURutBCy+ogGqaNHf/RELxgXH3KmYorLwCe0eFb5hq8f6vr3zl4/iH7YtsUOuo1w==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/middleware-recursion-detection@3.577.0': + resolution: {integrity: sha512-pn3ZVEd2iobKJlR3H+bDilHjgRnNrQ6HMmK9ZzZw89Ckn3Dcbv48xOv4RJvu0aU8SDLl/SNCxppKjeLDTPGBNA==} + engines: {node: '>=16.0.0'} '@aws-sdk/middleware-signing@3.468.0': resolution: {integrity: sha512-s+7fSB1gdnnTj5O0aCCarX3z5Vppop8kazbNSZADdkfHIDWCN80IH4ZNjY3OWqaAz0HmR4LNNrovdR304ojb4Q==} @@ -648,55 +741,78 @@ packages: resolution: {integrity: sha512-Rec+nAPIzzwxgHPW+xqY6tooJGFOytpYg/xSRv8/IXl3xKGhmpMGs6gDWzmMBv/qy5nKTvLph/csNWJ98GWXCw==} engines: {node: '>=14.0.0'} - '@aws-sdk/middleware-user-agent@3.540.0': - resolution: {integrity: sha512-8Rd6wPeXDnOYzWj1XCmOKcx/Q87L0K1/EHqOBocGjLVbN3gmRxBvpmR1pRTjf7IsWfnnzN5btqtcAkfDPYQUMQ==} - engines: {node: '>=14.0.0'} + '@aws-sdk/middleware-user-agent@3.567.0': + resolution: {integrity: sha512-a7DBGMRBLWJU3BqrQjOtKS4/RcCh/BhhKqwjCE0FEhhm6A/GGuAs/DcBGOl6Y8Wfsby3vejSlppTLH/qtV1E9w==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/middleware-user-agent@3.583.0': + resolution: {integrity: sha512-xVNXXXDWvBVI/AeVtSdA9SVumqxiZaESk/JpUn9GMkmtTKfter0Cweap+1iQ9j8bRAO0vNhmIkbcvdB1S4WVUw==} + engines: {node: '>=16.0.0'} '@aws-sdk/region-config-resolver@3.470.0': resolution: {integrity: sha512-C1o1J06iIw8cyAAOvHqT4Bbqf+PgQ/RDlSyjt2gFfP2OovDpc2o2S90dE8f8iZdSGpg70N5MikT1DBhW9NbhtQ==} engines: {node: '>=14.0.0'} - '@aws-sdk/region-config-resolver@3.535.0': - resolution: {integrity: sha512-IXOznDiaItBjsQy4Fil0kzX/J3HxIOknEphqHbOfUf+LpA5ugcsxuQQONrbEQusCBnfJyymrldBvBhFmtlU9Wg==} - engines: {node: '>=14.0.0'} + '@aws-sdk/region-config-resolver@3.567.0': + resolution: {integrity: sha512-VMDyYi5Dh2NydDiIARZ19DwMfbyq0llS736cp47qopmO6wzdeul7WRTx8NKfEYN0/AwEaqmTW0ohx58jSB1lYg==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/region-config-resolver@3.577.0': + resolution: {integrity: sha512-4ChCFACNwzqx/xjg3zgFcW8Ali6R9C95cFECKWT/7CUM1D0MGvkclSH2cLarmHCmJgU6onKkJroFtWp0kHhgyg==} + engines: {node: '>=16.0.0'} '@aws-sdk/token-providers@3.478.0': resolution: {integrity: sha512-7b5tj1y/wGHZIZ+ckjOUKgKrMuCJMF/G1UKZKIqqdekeEsjcThbvoxAMeY0FEowu2ODVk/ggOmpBFxcu0iYd6A==} engines: {node: '>=14.0.0'} - '@aws-sdk/token-providers@3.549.0': - resolution: {integrity: sha512-rJyeXkXknLukRFGuMQOgKnPBa+kLODJtOqEBf929SpQ96f1I6ytdndmWbB5B/OQN5Fu5DOOQUQqJypDQVl5ibQ==} - engines: {node: '>=14.0.0'} + '@aws-sdk/token-providers@3.568.0': + resolution: {integrity: sha512-mCQElYzY5N2JlXB7LyjOoLvRN/JiSV+E9szLwhYN3dleTUCMbGqWb7RiAR2V3fO+mz8f9kR7DThTExKJbKogKw==} + engines: {node: '>=16.0.0'} + peerDependencies: + '@aws-sdk/client-sso-oidc': ^3.568.0 - '@aws-sdk/types@3.342.0': - resolution: {integrity: sha512-5uyXVda/AgUpdZNJ9JPHxwyxr08miPiZ/CKSMcRdQVjcNnrdzY9m/iM9LvnQT44sQO+IEEkF2IoZIWvZcq199A==} - engines: {node: '>=14.0.0'} + '@aws-sdk/token-providers@3.577.0': + resolution: {integrity: sha512-0CkIZpcC3DNQJQ1hDjm2bdSy/Xjs7Ny5YvSsacasGOkNfk+FdkiQy6N67bZX3Zbc9KIx+Nz4bu3iDeNSNplnnQ==} + engines: {node: '>=16.0.0'} + peerDependencies: + '@aws-sdk/client-sso-oidc': ^3.577.0 '@aws-sdk/types@3.468.0': resolution: {integrity: sha512-rx/9uHI4inRbp2tw3Y4Ih4PNZkVj32h7WneSg3MVgVjAoVD5Zti9KhS5hkvsBxfgmQmg0AQbE+b1sy5WGAgntA==} engines: {node: '>=14.0.0'} - '@aws-sdk/types@3.535.0': - resolution: {integrity: sha512-aY4MYfduNj+sRR37U7XxYR8wemfbKP6lx00ze2M2uubn7mZotuVrWYAafbMSXrdEMSToE5JDhr28vArSOoLcSg==} - engines: {node: '>=14.0.0'} + '@aws-sdk/types@3.567.0': + resolution: {integrity: sha512-JBznu45cdgQb8+T/Zab7WpBmfEAh77gsk99xuF4biIb2Sw1mdseONdoGDjEJX57a25TzIv/WUJ2oABWumckz1A==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/types@3.577.0': + resolution: {integrity: sha512-FT2JZES3wBKN/alfmhlo+3ZOq/XJ0C7QOZcDNrpKjB0kqYoKjhVKZ/Hx6ArR0czkKfHzBBEs6y40ebIHx2nSmA==} + engines: {node: '>=16.0.0'} '@aws-sdk/util-endpoints@3.478.0': resolution: {integrity: sha512-u9Mcg3euGJGs5clPt9mBuhBjHiEKiD0PnfvArhfq9i+dcY5mbCq/i1Dezp3iv1fZH9xxQt7hPXDfSpt1yUSM6g==} engines: {node: '>=14.0.0'} - '@aws-sdk/util-endpoints@3.540.0': - resolution: {integrity: sha512-1kMyQFAWx6f8alaI6UT65/5YW/7pDWAKAdNwL6vuJLea03KrZRX3PMoONOSJpAS5m3Ot7HlWZvf3wZDNTLELZw==} - engines: {node: '>=14.0.0'} + '@aws-sdk/util-endpoints@3.567.0': + resolution: {integrity: sha512-WVhot3qmi0BKL9ZKnUqsvCd++4RF2DsJIG32NlRaml1FT9KaqSzNv0RXeA6k/kYwiiNT7y3YWu3Lbzy7c6vG9g==} + engines: {node: '>=16.0.0'} - '@aws-sdk/util-locate-window@3.535.0': - resolution: {integrity: sha512-PHJ3SL6d2jpcgbqdgiPxkXpu7Drc2PYViwxSIqvvMKhDwzSB1W3mMvtpzwKM4IE7zLFodZo0GKjJ9AsoXndXhA==} - engines: {node: '>=14.0.0'} + '@aws-sdk/util-endpoints@3.583.0': + resolution: {integrity: sha512-ZC9mb2jq6BFXPYsUsD2tmYcnlmd+9PGNwnFNn8jk4abna5Jjk2wDknN81ybktmBR5ttN9W8ugmktuKtvAMIDCQ==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/util-locate-window@3.568.0': + resolution: {integrity: sha512-3nh4TINkXYr+H41QaPelCceEB2FXP3fxp93YZXB/kqJvX0U9j0N0Uk45gvsjmEPzG8XxkPEeLIfT2I1M7A6Lig==} + engines: {node: '>=16.0.0'} '@aws-sdk/util-user-agent-browser@3.468.0': resolution: {integrity: sha512-OJyhWWsDEizR3L+dCgMXSUmaCywkiZ7HSbnQytbeKGwokIhD69HTiJcibF/sgcM5gk4k3Mq3puUhGnEZ46GIig==} - '@aws-sdk/util-user-agent-browser@3.535.0': - resolution: {integrity: sha512-RWMcF/xV5n+nhaA/Ff5P3yNP3Kur/I+VNZngog4TEs92oB/nwOdAg/2JL8bVAhUbMrjTjpwm7PItziYFQoqyig==} + '@aws-sdk/util-user-agent-browser@3.567.0': + resolution: {integrity: sha512-cqP0uXtZ7m7hRysf3fRyJwcY1jCgQTpJy7BHB5VpsE7DXlXHD5+Ur5L42CY7UrRPrB6lc6YGFqaAOs5ghMcLyA==} + + '@aws-sdk/util-user-agent-browser@3.577.0': + resolution: {integrity: sha512-zEAzHgR6HWpZOH7xFgeJLc6/CzMcx4nxeQolZxVZoB5pPaJd3CjyRhZN0xXeZB0XIRCWmb4yJBgyiugXLNMkLA==} '@aws-sdk/util-user-agent-node@3.470.0': resolution: {integrity: sha512-QxsZ9iVHcBB/XRdYvwfM5AMvNp58HfqkIrH88mY0cmxuvtlIGDfWjczdDrZMJk9y0vIq+cuoCHsGXHu7PyiEAQ==} @@ -707,9 +823,18 @@ packages: aws-crt: optional: true - '@aws-sdk/util-user-agent-node@3.535.0': - resolution: {integrity: sha512-dRek0zUuIT25wOWJlsRm97nTkUlh1NDcLsQZIN2Y8KxhwoXXWtJs5vaDPT+qAg+OpcNj80i1zLR/CirqlFg/TQ==} - engines: {node: '>=14.0.0'} + '@aws-sdk/util-user-agent-node@3.568.0': + resolution: {integrity: sha512-NVoZoLnKF+eXPBvXg+KqixgJkPSrerR6Gqmbjwqbv14Ini+0KNKB0/MXas1mDGvvEgtNkHI/Cb9zlJ3KXpti2A==} + engines: {node: '>=16.0.0'} + peerDependencies: + aws-crt: '>=1.0.0' + peerDependenciesMeta: + aws-crt: + optional: true + + '@aws-sdk/util-user-agent-node@3.577.0': + resolution: {integrity: sha512-XqvtFjbSMtycZTWVwDe8DRWovuoMbA54nhUoZwVU6rW9OSD6NZWGR512BUGHFaWzW0Wg8++Dj10FrKTG2XtqfA==} + engines: {node: '>=16.0.0'} peerDependencies: aws-crt: '>=1.0.0' peerDependenciesMeta: @@ -730,121 +855,129 @@ packages: resolution: {integrity: sha512-XktuhWlJ5g+3TJXc5upd9Ks1HutSArik6jf2eAjYFyIOf4ej3RN+184cZbzDvbPnuTJIUhPKKJE3cIsYTiAT3w==} engines: {node: '>=6.9.0'} - '@babel/code-frame@7.24.2': - resolution: {integrity: sha512-y5+tLQyV8pg3fsiln67BVLD1P13Eg4lh5RW9mF0zUuvLrv9uIQ4MCL+CRT+FTsBlBjcIan6PGsLcBN0m3ClUyQ==} + '@babel/code-frame@7.24.6': + resolution: {integrity: sha512-ZJhac6FkEd1yhG2AHOmfcXG4ceoLltoCVJjN5XsWN9BifBQr+cHJbWi0h68HZuSORq+3WtJ2z0hwF2NG1b5kcA==} engines: {node: '>=6.9.0'} - '@babel/compat-data@7.24.4': - resolution: {integrity: sha512-vg8Gih2MLK+kOkHJp4gBEIkyaIi00jgWot2D9QOmmfLC8jINSOzmCLta6Bvz/JSBCqnegV0L80jhxkol5GWNfQ==} + '@babel/compat-data@7.24.6': + resolution: {integrity: sha512-aC2DGhBq5eEdyXWqrDInSqQjO0k8xtPRf5YylULqx8MCd6jBtzqfta/3ETMRpuKIc5hyswfO80ObyA1MvkCcUQ==} engines: {node: '>=6.9.0'} - '@babel/core@7.24.4': - resolution: {integrity: sha512-MBVlMXP+kkl5394RBLSxxk/iLTeVGuXTV3cIDXavPpMMqnSnt6apKgan/U8O3USWZCWZT/TbgfEpKa4uMgN4Dg==} + '@babel/core@7.24.6': + resolution: {integrity: sha512-qAHSfAdVyFmIvl0VHELib8xar7ONuSHrE2hLnsaWkYNTI68dmi1x8GYDhJjMI/e7XWal9QBlZkwbOnkcw7Z8gQ==} engines: {node: '>=6.9.0'} '@babel/generator@7.17.7': resolution: {integrity: sha512-oLcVCTeIFadUoArDTwpluncplrYBmTCCZZgXCbgNGvOBBiSDDK3eWO4b/+eOTli5tKv1lg+a5/NAXg+nTcei1w==} engines: {node: '>=6.9.0'} - '@babel/generator@7.24.4': - resolution: {integrity: sha512-Xd6+v6SnjWVx/nus+y0l1sxMOTOMBkyL4+BIdbALyatQnAe/SRVjANeDPSCYaX+i1iJmuGSKf3Z+E+V/va1Hvw==} + '@babel/generator@7.24.6': + resolution: {integrity: sha512-S7m4eNa6YAPJRHmKsLHIDJhNAGNKoWNiWefz1MBbpnt8g9lvMDl1hir4P9bo/57bQEmuwEhnRU/AMWsD0G/Fbg==} engines: {node: '>=6.9.0'} - '@babel/helper-annotate-as-pure@7.22.5': - resolution: {integrity: sha512-LvBTxu8bQSQkcyKOU+a1btnNFQ1dMAd0R6PyW3arXes06F6QLWLIrd681bxRPIXlrMGR3XYnW9JyML7dP3qgxg==} + '@babel/helper-annotate-as-pure@7.24.6': + resolution: {integrity: sha512-DitEzDfOMnd13kZnDqns1ccmftwJTS9DMkyn9pYTxulS7bZxUxpMly3Nf23QQ6NwA4UB8lAqjbqWtyvElEMAkg==} engines: {node: '>=6.9.0'} - '@babel/helper-builder-binary-assignment-operator-visitor@7.22.15': - resolution: {integrity: sha512-QkBXwGgaoC2GtGZRoma6kv7Szfv06khvhFav67ZExau2RaXzy8MpHSMO2PNoP2XtmQphJQRHFfg77Bq731Yizw==} + '@babel/helper-builder-binary-assignment-operator-visitor@7.24.6': + resolution: {integrity: sha512-+wnfqc5uHiMYtvRX7qu80Toef8BXeh4HHR1SPeonGb1SKPniNEd4a/nlaJJMv/OIEYvIVavvo0yR7u10Gqz0Iw==} engines: {node: '>=6.9.0'} - '@babel/helper-compilation-targets@7.23.6': - resolution: {integrity: sha512-9JB548GZoQVmzrFgp8o7KxdgkTGm6xs9DW0o/Pim72UDjzr5ObUQ6ZzYPqA+g9OTS2bBQoctLJrky0RDCAWRgQ==} + '@babel/helper-compilation-targets@7.24.6': + resolution: {integrity: sha512-VZQ57UsDGlX/5fFA7GkVPplZhHsVc+vuErWgdOiysI9Ksnw0Pbbd6pnPiR/mmJyKHgyIW0c7KT32gmhiF+cirg==} engines: {node: '>=6.9.0'} - '@babel/helper-create-class-features-plugin@7.24.4': - resolution: {integrity: sha512-lG75yeuUSVu0pIcbhiYMXBXANHrpUPaOfu7ryAzskCgKUHuAxRQI5ssrtmF0X9UXldPlvT0XM/A4F44OXRt6iQ==} + '@babel/helper-create-class-features-plugin@7.24.6': + resolution: {integrity: sha512-djsosdPJVZE6Vsw3kk7IPRWethP94WHGOhQTc67SNXE0ZzMhHgALw8iGmYS0TD1bbMM0VDROy43od7/hN6WYcA==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0 - '@babel/helper-create-regexp-features-plugin@7.22.15': - resolution: {integrity: sha512-29FkPLFjn4TPEa3RE7GpW+qbE8tlsu3jntNYNfcGsc49LphF1PQIiD+vMZ1z1xVOKt+93khA9tc2JBs3kBjA7w==} + '@babel/helper-create-regexp-features-plugin@7.24.6': + resolution: {integrity: sha512-C875lFBIWWwyv6MHZUG9HmRrlTDgOsLWZfYR0nW69gaKJNe0/Mpxx5r0EID2ZdHQkdUmQo2t0uNckTL08/1BgA==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0 - '@babel/helper-define-polyfill-provider@0.6.1': - resolution: {integrity: sha512-o7SDgTJuvx5vLKD6SFvkydkSMBvahDKGiNJzG22IZYXhiqoe9efY7zocICBgzHV4IRg5wdgl2nEL/tulKIEIbA==} + '@babel/helper-define-polyfill-provider@0.6.2': + resolution: {integrity: sha512-LV76g+C502biUK6AyZ3LK10vDpDyCzZnhZFXkH1L75zHPj68+qc8Zfpx2th+gzwA2MzyK+1g/3EPl62yFnVttQ==} peerDependencies: '@babel/core': ^7.4.0 || ^8.0.0-0 <8.0.0 - '@babel/helper-environment-visitor@7.22.20': - resolution: {integrity: sha512-zfedSIzFhat/gFhWfHtgWvlec0nqB9YEIVrpuwjruLlXfUSnA8cJB0miHKwqDnQ7d32aKo2xt88/xZptwxbfhA==} - engines: {node: '>=6.9.0'} - '@babel/helper-environment-visitor@7.22.5': resolution: {integrity: sha512-XGmhECfVA/5sAt+H+xpSg0mfrHq6FzNr9Oxh7PSEBBRUb/mL7Kz3NICXb194rCqAEdxkhPT1a88teizAFyvk8Q==} engines: {node: '>=6.9.0'} + '@babel/helper-environment-visitor@7.24.6': + resolution: {integrity: sha512-Y50Cg3k0LKLMjxdPjIl40SdJgMB85iXn27Vk/qbHZCFx/o5XO3PSnpi675h1KEmmDb6OFArfd5SCQEQ5Q4H88g==} + engines: {node: '>=6.9.0'} + '@babel/helper-function-name@7.22.5': resolution: {integrity: sha512-wtHSq6jMRE3uF2otvfuD3DIvVhOsSNshQl0Qrd7qC9oQJzHvOL4qQXlQn2916+CXGywIjpGuIkoyZRRxHPiNQQ==} engines: {node: '>=6.9.0'} - '@babel/helper-function-name@7.23.0': - resolution: {integrity: sha512-OErEqsrxjZTJciZ4Oo+eoZqeW9UIiOcuYKRJA4ZAgV9myA+pOXhhmpfNCKjEH/auVfEYVFJ6y1Tc4r0eIApqiw==} + '@babel/helper-function-name@7.24.6': + resolution: {integrity: sha512-xpeLqeeRkbxhnYimfr2PC+iA0Q7ljX/d1eZ9/inYbmfG2jpl8Lu3DyXvpOAnrS5kxkfOWJjioIMQsaMBXFI05w==} engines: {node: '>=6.9.0'} '@babel/helper-hoist-variables@7.22.5': resolution: {integrity: sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw==} engines: {node: '>=6.9.0'} - '@babel/helper-member-expression-to-functions@7.23.0': - resolution: {integrity: sha512-6gfrPwh7OuT6gZyJZvd6WbTfrqAo7vm4xCzAXOusKqq/vWdKXphTpj5klHKNmRUU6/QRGlBsyU9mAIPaWHlqJA==} + '@babel/helper-hoist-variables@7.24.6': + resolution: {integrity: sha512-SF/EMrC3OD7dSta1bLJIlrsVxwtd0UpjRJqLno6125epQMJ/kyFmpTT4pbvPbdQHzCHg+biQ7Syo8lnDtbR+uA==} + engines: {node: '>=6.9.0'} + + '@babel/helper-member-expression-to-functions@7.24.6': + resolution: {integrity: sha512-OTsCufZTxDUsv2/eDXanw/mUZHWOxSbEmC3pP8cgjcy5rgeVPWWMStnv274DV60JtHxTk0adT0QrCzC4M9NWGg==} engines: {node: '>=6.9.0'} - '@babel/helper-module-imports@7.24.3': - resolution: {integrity: sha512-viKb0F9f2s0BCS22QSF308z/+1YWKV/76mwt61NBzS5izMzDPwdq1pTrzf+Li3npBWX9KdQbkeCt1jSAM7lZqg==} + '@babel/helper-module-imports@7.24.6': + resolution: {integrity: sha512-a26dmxFJBF62rRO9mmpgrfTLsAuyHk4e1hKTUkD/fcMfynt8gvEKwQPQDVxWhca8dHoDck+55DFt42zV0QMw5g==} engines: {node: '>=6.9.0'} - '@babel/helper-module-transforms@7.23.3': - resolution: {integrity: sha512-7bBs4ED9OmswdfDzpz4MpWgSrV7FXlc3zIagvLFjS5H+Mk7Snr21vQ6QwrsoCGMfNC4e4LQPdoULEt4ykz0SRQ==} + '@babel/helper-module-transforms@7.24.6': + resolution: {integrity: sha512-Y/YMPm83mV2HJTbX1Qh2sjgjqcacvOlhbzdCCsSlblOKjSYmQqEbO6rUniWQyRo9ncyfjT8hnUjlG06RXDEmcA==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0 - '@babel/helper-optimise-call-expression@7.22.5': - resolution: {integrity: sha512-HBwaojN0xFRx4yIvpwGqxiV2tUfl7401jlok564NgB9EHS1y6QT17FmKWm4ztqjeVdXLuC4fSvHc5ePpQjoTbw==} + '@babel/helper-optimise-call-expression@7.24.6': + resolution: {integrity: sha512-3SFDJRbx7KuPRl8XDUr8O7GAEB8iGyWPjLKJh/ywP/Iy9WOmEfMrsWbaZpvBu2HSYn4KQygIsz0O7m8y10ncMA==} engines: {node: '>=6.9.0'} - '@babel/helper-plugin-utils@7.24.0': - resolution: {integrity: sha512-9cUznXMG0+FxRuJfvL82QlTqIzhVW9sL0KjMPHhAOOvpQGL8QtdxnBKILjBqxlHyliz0yCa1G903ZXI/FuHy2w==} + '@babel/helper-plugin-utils@7.24.6': + resolution: {integrity: sha512-MZG/JcWfxybKwsA9N9PmtF2lOSFSEMVCpIRrbxccZFLJPrJciJdG/UhSh5W96GEteJI2ARqm5UAHxISwRDLSNg==} engines: {node: '>=6.9.0'} - '@babel/helper-remap-async-to-generator@7.22.20': - resolution: {integrity: sha512-pBGyV4uBqOns+0UvhsTO8qgl8hO89PmiDYv+/COyp1aeMcmfrfruz+/nCMFiYyFF/Knn0yfrC85ZzNFjembFTw==} + '@babel/helper-remap-async-to-generator@7.24.6': + resolution: {integrity: sha512-1Qursq9ArRZPAMOZf/nuzVW8HgJLkTB9y9LfP4lW2MVp4e9WkLJDovfKBxoDcCk6VuzIxyqWHyBoaCtSRP10yg==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0 - '@babel/helper-replace-supers@7.24.1': - resolution: {integrity: sha512-QCR1UqC9BzG5vZl8BMicmZ28RuUBnHhAMddD8yHFHDRH9lLTZ9uUPehX8ctVPT8l0TKblJidqcgUUKGVrePleQ==} + '@babel/helper-replace-supers@7.24.6': + resolution: {integrity: sha512-mRhfPwDqDpba8o1F8ESxsEkJMQkUF8ZIWrAc0FtWhxnjfextxMWxr22RtFizxxSYLjVHDeMgVsRq8BBZR2ikJQ==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0 - '@babel/helper-simple-access@7.22.5': - resolution: {integrity: sha512-n0H99E/K+Bika3++WNL17POvo4rKWZ7lZEp1Q+fStVbUi8nxPQEBOlTmCOxW/0JsS56SKKQ+ojAe2pHKJHN35w==} + '@babel/helper-simple-access@7.24.6': + resolution: {integrity: sha512-nZzcMMD4ZhmB35MOOzQuiGO5RzL6tJbsT37Zx8M5L/i9KSrukGXWTjLe1knIbb/RmxoJE9GON9soq0c0VEMM5g==} engines: {node: '>=6.9.0'} - '@babel/helper-skip-transparent-expression-wrappers@7.22.5': - resolution: {integrity: sha512-tK14r66JZKiC43p8Ki33yLBVJKlQDFoA8GYN67lWCDCqoL6EMMSuM9b+Iff2jHaM/RRFYl7K+iiru7hbRqNx8Q==} + '@babel/helper-skip-transparent-expression-wrappers@7.24.6': + resolution: {integrity: sha512-jhbbkK3IUKc4T43WadP96a27oYti9gEf1LdyGSP2rHGH77kwLwfhO7TgwnWvxxQVmke0ImmCSS47vcuxEMGD3Q==} engines: {node: '>=6.9.0'} '@babel/helper-split-export-declaration@7.22.6': resolution: {integrity: sha512-AsUnxuLhRYsisFiaJwvp1QF+I3KjD5FOxut14q/GzovUe6orHLesW2C7d754kRm53h5gqrz6sFl6sxc4BVtE/g==} engines: {node: '>=6.9.0'} + '@babel/helper-split-export-declaration@7.24.6': + resolution: {integrity: sha512-CvLSkwXGWnYlF9+J3iZUvwgAxKiYzK3BWuo+mLzD/MDGOZDj7Gq8+hqaOkMxmJwmlv0iu86uH5fdADd9Hxkymw==} + engines: {node: '>=6.9.0'} + '@babel/helper-string-parser@7.22.5': resolution: {integrity: sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw==} engines: {node: '>=6.9.0'} @@ -853,8 +986,8 @@ packages: resolution: {integrity: sha512-803gmbQdqwdf4olxrX4AJyFBV/RTr3rSmOj0rKwesmzlfhYNDEs+/iOcznzpNWlJlIlTJC2QfPFcHB6DlzdVLQ==} engines: {node: '>=6.9.0'} - '@babel/helper-string-parser@7.24.1': - resolution: {integrity: sha512-2ofRCjnnA9y+wk8b9IAREroeUP02KHp431N2mhKniy2yKIDKpbrHv9eXwm8cBeWQYcJmzv5qKCu65P47eCF7CQ==} + '@babel/helper-string-parser@7.24.6': + resolution: {integrity: sha512-WdJjwMEkmBicq5T9fm/cHND3+UlFa2Yj8ALLgmoSQAJZysYbBjw+azChSGPN4DSPLXOcooGRvDwZWMcF/mLO2Q==} engines: {node: '>=6.9.0'} '@babel/helper-validator-identifier@7.22.20': @@ -865,16 +998,20 @@ packages: resolution: {integrity: sha512-aJXu+6lErq8ltp+JhkJUfk1MTGyuA4v7f3pA+BJ5HLfNC6nAQ0Cpi9uOquUj8Hehg0aUiHzWQbOVJGao6ztBAQ==} engines: {node: '>=6.9.0'} - '@babel/helper-validator-option@7.23.5': - resolution: {integrity: sha512-85ttAOMLsr53VgXkTbkx8oA6YTfT4q7/HzXSLEYmjcSTJPMPQtvq1BD79Byep5xMUYbGRzEpDsjUf3dyp54IKw==} + '@babel/helper-validator-identifier@7.24.6': + resolution: {integrity: sha512-4yA7s865JHaqUdRbnaxarZREuPTHrjpDT+pXoAZ1yhyo6uFnIEpS8VMu16siFOHDpZNKYv5BObhsB//ycbICyw==} + engines: {node: '>=6.9.0'} + + '@babel/helper-validator-option@7.24.6': + resolution: {integrity: sha512-Jktc8KkF3zIkePb48QO+IapbXlSapOW9S+ogZZkcO6bABgYAxtZcjZ/O005111YLf+j4M84uEgwYoidDkXbCkQ==} engines: {node: '>=6.9.0'} - '@babel/helper-wrap-function@7.22.20': - resolution: {integrity: sha512-pms/UwkOpnQe/PDAEdV/d7dVCoBbB+R4FvYoHGZz+4VPcg7RtYy2KP7S2lbuWM6FCSgob5wshfGESbC/hzNXZw==} + '@babel/helper-wrap-function@7.24.6': + resolution: {integrity: sha512-f1JLrlw/jbiNfxvdrfBgio/gRBk3yTAEJWirpAkiJG2Hb22E7cEYKHWo0dFPTv/niPovzIdPdEDetrv6tC6gPQ==} engines: {node: '>=6.9.0'} - '@babel/helpers@7.24.4': - resolution: {integrity: sha512-FewdlZbSiwaVGlgT1DPANDuCHaDMiOo+D/IDYRFYjHOuv66xMSJ7fQwwODwRNAPkADIO/z1EoF/l2BCWlWABDw==} + '@babel/helpers@7.24.6': + resolution: {integrity: sha512-V2PI+NqnyFu1i0GyTd/O/cTpxzQCYioSkUIRmgo7gFEHKKCg5w46+r/A6WeUR1+P3TeQ49dspGPNd/E3n9AnnA==} engines: {node: '>=6.9.0'} '@babel/highlight@7.22.10': @@ -885,8 +1022,8 @@ packages: resolution: {integrity: sha512-dkdMCN3py0+ksCgYmGG8jKeGA/8Tk+gJwSYYlFGxG5lmhfKNoAy004YpLxpS1W2J8m/EK2Ew+yOs9pVRwO89mg==} engines: {node: '>=6.9.0'} - '@babel/highlight@7.24.2': - resolution: {integrity: sha512-Yac1ao4flkTxTteCDZLEvdxg2fZfz1v8M4QpaGypq/WPDqg3ijHYbDfs+LG5hvzSoqaSZ9/Z9lKSP3CjZjv+pA==} + '@babel/highlight@7.24.6': + resolution: {integrity: sha512-2YnuOp4HAk2BsBrJJvYCbItHx0zWscI1C3zgWkz+wDyD9I7GIVrfnLyrR4Y1VR+7p+chAEcrgRQYZAGIKMV7vQ==} engines: {node: '>=6.9.0'} '@babel/parser@7.22.10': @@ -894,31 +1031,31 @@ packages: engines: {node: '>=6.0.0'} hasBin: true - '@babel/parser@7.24.4': - resolution: {integrity: sha512-zTvEBcghmeBma9QIGunWevvBAp4/Qu9Bdq+2k0Ot4fVMD6v3dsC9WOcRSKk7tRRyBM/53yKMJko9xOatGQAwSg==} + '@babel/parser@7.24.6': + resolution: {integrity: sha512-eNZXdfU35nJC2h24RznROuOpO94h6x8sg9ju0tT9biNtLZ2vuP8SduLqqV+/8+cebSLV9SJEAN5Z3zQbJG/M+Q==} engines: {node: '>=6.0.0'} hasBin: true - '@babel/plugin-bugfix-firefox-class-in-computed-class-key@7.24.4': - resolution: {integrity: sha512-qpl6vOOEEzTLLcsuqYYo8yDtrTocmu2xkGvgNebvPjT9DTtfFYGmgDqY+rBYXNlqL4s9qLDn6xkrJv4RxAPiTA==} + '@babel/plugin-bugfix-firefox-class-in-computed-class-key@7.24.6': + resolution: {integrity: sha512-bYndrJ6Ph6Ar+GaB5VAc0JPoP80bQCm4qon6JEzXfRl5QZyQ8Ur1K6k7htxWmPA5z+k7JQvaMUrtXlqclWYzKw==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0 - '@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@7.24.1': - resolution: {integrity: sha512-y4HqEnkelJIOQGd+3g1bTeKsA5c6qM7eOn7VggGVbBc0y8MLSKHacwcIE2PplNlQSj0PqS9rrXL/nkPVK+kUNg==} + '@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@7.24.6': + resolution: {integrity: sha512-iVuhb6poq5ikqRq2XWU6OQ+R5o9wF+r/or9CeUyovgptz0UlnK4/seOQ1Istu/XybYjAhQv1FRSSfHHufIku5Q==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0 - '@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@7.24.1': - resolution: {integrity: sha512-Hj791Ii4ci8HqnaKHAlLNs+zaLXb0EzSDhiAWp5VNlyvCNymYfacs64pxTxbH1znW/NcArSmwpmG9IKE/TUVVQ==} + '@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@7.24.6': + resolution: {integrity: sha512-c8TER5xMDYzzFcGqOEp9l4hvB7dcbhcGjcLVwxWfe4P5DOafdwjsBJZKsmv+o3aXh7NhopvayQIovHrh2zSRUQ==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.13.0 - '@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly@7.24.1': - resolution: {integrity: sha512-m9m/fXsXLiHfwdgydIFnpk+7jlVbnvlK5B2EKiPdLUb6WX654ZaaEWJUjk8TftRbZpK0XibovlLWX4KIZhV6jw==} + '@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly@7.24.6': + resolution: {integrity: sha512-z8zEjYmwBUHN/pCF3NuWBhHQjJCrd33qAi8MgANfMrAvn72k2cImT8VjK9LJFu4ysOLJqhfkYYb3MvwANRUNZQ==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0 @@ -937,18 +1074,25 @@ packages: peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-proposal-decorators@7.24.1': - resolution: {integrity: sha512-zPEvzFijn+hRvJuX2Vu3KbEBN39LN3f7tW3MQO2LsIs57B26KU+kUc82BdAktS1VCM6libzh45eKGI65lg0cpA==} + '@babel/plugin-proposal-decorators@7.24.6': + resolution: {integrity: sha512-8DjR0/DzlBhz2SVi9a19/N2U5+C3y3rseXuyoKL9SP8vnbewscj1eHZtL6kpEn4UCuUmqEo0mvqyDYRFoN2gpA==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-proposal-export-default-from@7.24.1': - resolution: {integrity: sha512-+0hrgGGV3xyYIjOrD/bUZk/iUwOIGuoANfRfVg1cPhYBxF+TIXSEcc42DqzBICmWsnAQ+SfKedY0bj8QD+LuMg==} + '@babel/plugin-proposal-export-default-from@7.24.6': + resolution: {integrity: sha512-qPPDbYs9j5IArMFqYi85QxatHURSzRyskKpIbjrVoVglDuGdhu1s7UTCmXvP/qR2aHa3EdJ8X3iZvQAHjmdHUw==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 + '@babel/plugin-proposal-logical-assignment-operators@7.20.7': + resolution: {integrity: sha512-y7C7cZgpMIjWlKE5T7eJwp+tnRYM89HmRvWM5EQuB5BoHEONjmQ8lSNmBUwOyy/GFRsohJED51YBF79hE1djug==} + engines: {node: '>=6.9.0'} + deprecated: This proposal has been merged to the ECMAScript standard and thus this plugin is no longer maintained. Please use @babel/plugin-transform-logical-assignment-operators instead. + peerDependencies: + '@babel/core': ^7.0.0-0 + '@babel/plugin-proposal-nullish-coalescing-operator@7.18.6': resolution: {integrity: sha512-wQxQzxYeJqHcfppzBDnm1yAY0jSRkUXR2z8RePZYrKwMKgMlE8+Z6LUno+bd6LvbGh8Gltvy74+9pIYkr+XkKA==} engines: {node: '>=6.9.0'} @@ -1006,8 +1150,8 @@ packages: peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-syntax-decorators@7.24.1': - resolution: {integrity: sha512-05RJdO/cCrtVWuAaSn1tS3bH8jbsJa/Y1uD186u6J4C/1mnHFxseeuWpsqr9anvo7TUulev7tm7GDwRV+VuhDw==} + '@babel/plugin-syntax-decorators@7.24.6': + resolution: {integrity: sha512-gInH8LEqBp+wkwTVihCd/qf+4s28g81FZyvlIbAurHk9eSiItEKG7E0uNK2UdpgsD79aJVAW3R3c85h0YJ0jsw==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 @@ -1017,8 +1161,8 @@ packages: peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-syntax-export-default-from@7.24.1': - resolution: {integrity: sha512-cNXSxv9eTkGUtd0PsNMK8Yx5xeScxfpWOUAxE+ZPAXXEcAMOC3fk7LRdXq5fvpra2pLx2p1YtkAhpUbB2SwaRA==} + '@babel/plugin-syntax-export-default-from@7.24.6': + resolution: {integrity: sha512-Nzl7kZ4tjOM2LJpejBMPwZs7OJfc26++2HsMQuSrw6gxpqXGtZZ3Rj4Zt4Qm7vulMZL2gHIGGc2stnlQnHQCqA==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 @@ -1028,20 +1172,20 @@ packages: peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-syntax-flow@7.24.1': - resolution: {integrity: sha512-sxi2kLTI5DeW5vDtMUsk4mTPwvlUDbjOnoWayhynCwrw4QXRld4QEYwqzY8JmQXaJUtgUuCIurtSRH5sn4c7mA==} + '@babel/plugin-syntax-flow@7.24.6': + resolution: {integrity: sha512-gNkksSdV8RbsCoHF9sjVYrHfYACMl/8U32UfUhJ9+84/ASXw8dlx+eHyyF0m6ncQJ9IBSxfuCkB36GJqYdXTOA==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-syntax-import-assertions@7.24.1': - resolution: {integrity: sha512-IuwnI5XnuF189t91XbxmXeCDz3qs6iDRO7GJ++wcfgeXNs/8FmIlKcpDSXNVyuLQxlwvskmI3Ct73wUODkJBlQ==} + '@babel/plugin-syntax-import-assertions@7.24.6': + resolution: {integrity: sha512-BE6o2BogJKJImTmGpkmOic4V0hlRRxVtzqxiSPa8TIFxyhi4EFjHm08nq1M4STK4RytuLMgnSz0/wfflvGFNOg==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-syntax-import-attributes@7.24.1': - resolution: {integrity: sha512-zhQTMH0X2nVLnb04tz+s7AMuasX8U0FnpE+nHTOhSOINjWMnopoZTxtIKsd45n4GQ/HIZLyfIpoul8e2m0DnRA==} + '@babel/plugin-syntax-import-attributes@7.24.6': + resolution: {integrity: sha512-D+CfsVZousPXIdudSII7RGy52+dYRtbyKAZcvtQKq/NpsivyMVduepzcLqG5pMBugtMdedxdC8Ramdpcne9ZWQ==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 @@ -1056,8 +1200,8 @@ packages: peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-syntax-jsx@7.24.1': - resolution: {integrity: sha512-2eCtxZXf+kbkMIsXS4poTvT4Yu5rXiRa+9xGVT56raghjmBTKMpFNc9R4IDiB4emao9eO22Ox7CxuJG7BgExqA==} + '@babel/plugin-syntax-jsx@7.24.6': + resolution: {integrity: sha512-lWfvAIFNWMlCsU0DRUun2GpFwZdGTukLaHJqRh1JRb80NdAP5Sb1HDHB5X9P9OtgZHQl089UzQkpYlBq2VTPRw==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 @@ -1104,8 +1248,8 @@ packages: peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-syntax-typescript@7.24.1': - resolution: {integrity: sha512-Yhnmvy5HZEnHUty6i++gcfH1/l68AHnItFHnaCv6hn9dNh0hQvvQJsxpi4BMBFN5DLeHBuucT/0DgzXif/OyRw==} + '@babel/plugin-syntax-typescript@7.24.6': + resolution: {integrity: sha512-TzCtxGgVTEJWWwcYwQhCIQ6WaKlo80/B+Onsk4RRCcYqpYGFcG9etPW94VToGte5AAcxRrhjPUFvUS3Y2qKi4A==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 @@ -1116,356 +1260,356 @@ packages: peerDependencies: '@babel/core': ^7.0.0 - '@babel/plugin-transform-arrow-functions@7.24.1': - resolution: {integrity: sha512-ngT/3NkRhsaep9ck9uj2Xhv9+xB1zShY3tM3g6om4xxCELwCDN4g4Aq5dRn48+0hasAql7s2hdBOysCfNpr4fw==} + '@babel/plugin-transform-arrow-functions@7.24.6': + resolution: {integrity: sha512-jSSSDt4ZidNMggcLx8SaKsbGNEfIl0PHx/4mFEulorE7bpYLbN0d3pDW3eJ7Y5Z3yPhy3L3NaPCYyTUY7TuugQ==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-async-generator-functions@7.24.3': - resolution: {integrity: sha512-Qe26CMYVjpQxJ8zxM1340JFNjZaF+ISWpr1Kt/jGo+ZTUzKkfw/pphEWbRCb+lmSM6k/TOgfYLvmbHkUQ0asIg==} + '@babel/plugin-transform-async-generator-functions@7.24.6': + resolution: {integrity: sha512-VEP2o4iR2DqQU6KPgizTW2mnMx6BG5b5O9iQdrW9HesLkv8GIA8x2daXBQxw1MrsIkFQGA/iJ204CKoQ8UcnAA==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-async-to-generator@7.24.1': - resolution: {integrity: sha512-AawPptitRXp1y0n4ilKcGbRYWfbbzFWz2NqNu7dacYDtFtz0CMjG64b3LQsb3KIgnf4/obcUL78hfaOS7iCUfw==} + '@babel/plugin-transform-async-to-generator@7.24.6': + resolution: {integrity: sha512-NTBA2SioI3OsHeIn6sQmhvXleSl9T70YY/hostQLveWs0ic+qvbA3fa0kwAwQ0OA/XGaAerNZRQGJyRfhbJK4g==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-block-scoped-functions@7.24.1': - resolution: {integrity: sha512-TWWC18OShZutrv9C6mye1xwtam+uNi2bnTOCBUd5sZxyHOiWbU6ztSROofIMrK84uweEZC219POICK/sTYwfgg==} + '@babel/plugin-transform-block-scoped-functions@7.24.6': + resolution: {integrity: sha512-XNW7jolYHW9CwORrZgA/97tL/k05qe/HL0z/qqJq1mdWhwwCM6D4BJBV7wAz9HgFziN5dTOG31znkVIzwxv+vw==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-block-scoping@7.24.4': - resolution: {integrity: sha512-nIFUZIpGKDf9O9ttyRXpHFpKC+X3Y5mtshZONuEUYBomAKoM4y029Jr+uB1bHGPhNmK8YXHevDtKDOLmtRrp6g==} + '@babel/plugin-transform-block-scoping@7.24.6': + resolution: {integrity: sha512-S/t1Xh4ehW7sGA7c1j/hiOBLnEYCp/c2sEG4ZkL8kI1xX9tW2pqJTCHKtdhe/jHKt8nG0pFCrDHUXd4DvjHS9w==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-class-properties@7.24.1': - resolution: {integrity: sha512-OMLCXi0NqvJfORTaPQBwqLXHhb93wkBKZ4aNwMl6WtehO7ar+cmp+89iPEQPqxAnxsOKTaMcs3POz3rKayJ72g==} + '@babel/plugin-transform-class-properties@7.24.6': + resolution: {integrity: sha512-j6dZ0Z2Z2slWLR3kt9aOmSIrBvnntWjMDN/TVcMPxhXMLmJVqX605CBRlcGI4b32GMbfifTEsdEjGjiE+j/c3A==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-class-static-block@7.24.4': - resolution: {integrity: sha512-B8q7Pz870Hz/q9UgP8InNpY01CSLDSCyqX7zcRuv3FcPl87A2G17lASroHWaCtbdIcbYzOZ7kWmXFKbijMSmFg==} + '@babel/plugin-transform-class-static-block@7.24.6': + resolution: {integrity: sha512-1QSRfoPI9RoLRa8Mnakc6v3e0gJxiZQTYrMfLn+mD0sz5+ndSzwymp2hDcYJTyT0MOn0yuWzj8phlIvO72gTHA==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.12.0 - '@babel/plugin-transform-classes@7.24.1': - resolution: {integrity: sha512-ZTIe3W7UejJd3/3R4p7ScyyOoafetUShSf4kCqV0O7F/RiHxVj/wRaRnQlrGwflvcehNA8M42HkAiEDYZu2F1Q==} + '@babel/plugin-transform-classes@7.24.6': + resolution: {integrity: sha512-+fN+NO2gh8JtRmDSOB6gaCVo36ha8kfCW1nMq2Gc0DABln0VcHN4PrALDvF5/diLzIRKptC7z/d7Lp64zk92Fg==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-computed-properties@7.24.1': - resolution: {integrity: sha512-5pJGVIUfJpOS+pAqBQd+QMaTD2vCL/HcePooON6pDpHgRp4gNRmzyHTPIkXntwKsq3ayUFVfJaIKPw2pOkOcTw==} + '@babel/plugin-transform-computed-properties@7.24.6': + resolution: {integrity: sha512-cRzPobcfRP0ZtuIEkA8QzghoUpSB3X3qSH5W2+FzG+VjWbJXExtx0nbRqwumdBN1x/ot2SlTNQLfBCnPdzp6kg==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-destructuring@7.24.1': - resolution: {integrity: sha512-ow8jciWqNxR3RYbSNVuF4U2Jx130nwnBnhRw6N6h1bOejNkABmcI5X5oz29K4alWX7vf1C+o6gtKXikzRKkVdw==} + '@babel/plugin-transform-destructuring@7.24.6': + resolution: {integrity: sha512-YLW6AE5LQpk5npNXL7i/O+U9CE4XsBCuRPgyjl1EICZYKmcitV+ayuuUGMJm2lC1WWjXYszeTnIxF/dq/GhIZQ==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-dotall-regex@7.24.1': - resolution: {integrity: sha512-p7uUxgSoZwZ2lPNMzUkqCts3xlp8n+o05ikjy7gbtFJSt9gdU88jAmtfmOxHM14noQXBxfgzf2yRWECiNVhTCw==} + '@babel/plugin-transform-dotall-regex@7.24.6': + resolution: {integrity: sha512-rCXPnSEKvkm/EjzOtLoGvKseK+dS4kZwx1HexO3BtRtgL0fQ34awHn34aeSHuXtZY2F8a1X8xqBBPRtOxDVmcA==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-duplicate-keys@7.24.1': - resolution: {integrity: sha512-msyzuUnvsjsaSaocV6L7ErfNsa5nDWL1XKNnDePLgmz+WdU4w/J8+AxBMrWfi9m4IxfL5sZQKUPQKDQeeAT6lA==} + '@babel/plugin-transform-duplicate-keys@7.24.6': + resolution: {integrity: sha512-/8Odwp/aVkZwPFJMllSbawhDAO3UJi65foB00HYnK/uXvvCPm0TAXSByjz1mpRmp0q6oX2SIxpkUOpPFHk7FLA==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-dynamic-import@7.24.1': - resolution: {integrity: sha512-av2gdSTyXcJVdI+8aFZsCAtR29xJt0S5tas+Ef8NvBNmD1a+N/3ecMLeMBgfcK+xzsjdLDT6oHt+DFPyeqUbDA==} + '@babel/plugin-transform-dynamic-import@7.24.6': + resolution: {integrity: sha512-vpq8SSLRTBLOHUZHSnBqVo0AKX3PBaoPs2vVzYVWslXDTDIpwAcCDtfhUcHSQQoYoUvcFPTdC8TZYXu9ZnLT/w==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-exponentiation-operator@7.24.1': - resolution: {integrity: sha512-U1yX13dVBSwS23DEAqU+Z/PkwE9/m7QQy8Y9/+Tdb8UWYaGNDYwTLi19wqIAiROr8sXVum9A/rtiH5H0boUcTw==} + '@babel/plugin-transform-exponentiation-operator@7.24.6': + resolution: {integrity: sha512-EemYpHtmz0lHE7hxxxYEuTYOOBZ43WkDgZ4arQ4r+VX9QHuNZC+WH3wUWmRNvR8ECpTRne29aZV6XO22qpOtdA==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-export-namespace-from@7.24.1': - resolution: {integrity: sha512-Ft38m/KFOyzKw2UaJFkWG9QnHPG/Q/2SkOrRk4pNBPg5IPZ+dOxcmkK5IyuBcxiNPyyYowPGUReyBvrvZs7IlQ==} + '@babel/plugin-transform-export-namespace-from@7.24.6': + resolution: {integrity: sha512-inXaTM1SVrIxCkIJ5gqWiozHfFMStuGbGJAxZFBoHcRRdDP0ySLb3jH6JOwmfiinPwyMZqMBX+7NBDCO4z0NSA==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-flow-strip-types@7.24.1': - resolution: {integrity: sha512-iIYPIWt3dUmUKKE10s3W+jsQ3icFkw0JyRVyY1B7G4yK/nngAOHLVx8xlhA6b/Jzl/Y0nis8gjqhqKtRDQqHWQ==} + '@babel/plugin-transform-flow-strip-types@7.24.6': + resolution: {integrity: sha512-1l8b24NoCpaQ13Vi6FtLG1nv6kNoi8PWvQb1AYO7GHZDpFfBYc3lbXArx1lP2KRt8b4pej1eWc/zrRmsQTfOdQ==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-for-of@7.24.1': - resolution: {integrity: sha512-OxBdcnF04bpdQdR3i4giHZNZQn7cm8RQKcSwA17wAAqEELo1ZOwp5FFgeptWUQXFyT9kwHo10aqqauYkRZPCAg==} + '@babel/plugin-transform-for-of@7.24.6': + resolution: {integrity: sha512-n3Sf72TnqK4nw/jziSqEl1qaWPbCRw2CziHH+jdRYvw4J6yeCzsj4jdw8hIntOEeDGTmHVe2w4MVL44PN0GMzg==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-function-name@7.24.1': - resolution: {integrity: sha512-BXmDZpPlh7jwicKArQASrj8n22/w6iymRnvHYYd2zO30DbE277JO20/7yXJT3QxDPtiQiOxQBbZH4TpivNXIxA==} + '@babel/plugin-transform-function-name@7.24.6': + resolution: {integrity: sha512-sOajCu6V0P1KPljWHKiDq6ymgqB+vfo3isUS4McqW1DZtvSVU2v/wuMhmRmkg3sFoq6GMaUUf8W4WtoSLkOV/Q==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-json-strings@7.24.1': - resolution: {integrity: sha512-U7RMFmRvoasscrIFy5xA4gIp8iWnWubnKkKuUGJjsuOH7GfbMkB+XZzeslx2kLdEGdOJDamEmCqOks6e8nv8DQ==} + '@babel/plugin-transform-json-strings@7.24.6': + resolution: {integrity: sha512-Uvgd9p2gUnzYJxVdBLcU0KurF8aVhkmVyMKW4MIY1/BByvs3EBpv45q01o7pRTVmTvtQq5zDlytP3dcUgm7v9w==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-literals@7.24.1': - resolution: {integrity: sha512-zn9pwz8U7nCqOYIiBaOxoQOtYmMODXTJnkxG4AtX8fPmnCRYWBOHD0qcpwS9e2VDSp1zNJYpdnFMIKb8jmwu6g==} + '@babel/plugin-transform-literals@7.24.6': + resolution: {integrity: sha512-f2wHfR2HF6yMj+y+/y07+SLqnOSwRp8KYLpQKOzS58XLVlULhXbiYcygfXQxJlMbhII9+yXDwOUFLf60/TL5tw==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-logical-assignment-operators@7.24.1': - resolution: {integrity: sha512-OhN6J4Bpz+hIBqItTeWJujDOfNP+unqv/NJgyhlpSqgBTPm37KkMmZV6SYcOj+pnDbdcl1qRGV/ZiIjX9Iy34w==} + '@babel/plugin-transform-logical-assignment-operators@7.24.6': + resolution: {integrity: sha512-EKaWvnezBCMkRIHxMJSIIylzhqK09YpiJtDbr2wsXTwnO0TxyjMUkaw4RlFIZMIS0iDj0KyIg7H7XCguHu/YDA==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-member-expression-literals@7.24.1': - resolution: {integrity: sha512-4ojai0KysTWXzHseJKa1XPNXKRbuUrhkOPY4rEGeR+7ChlJVKxFa3H3Bz+7tWaGKgJAXUWKOGmltN+u9B3+CVg==} + '@babel/plugin-transform-member-expression-literals@7.24.6': + resolution: {integrity: sha512-9g8iV146szUo5GWgXpRbq/GALTnY+WnNuRTuRHWWFfWGbP9ukRL0aO/jpu9dmOPikclkxnNsjY8/gsWl6bmZJQ==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-modules-amd@7.24.1': - resolution: {integrity: sha512-lAxNHi4HVtjnHd5Rxg3D5t99Xm6H7b04hUS7EHIXcUl2EV4yl1gWdqZrNzXnSrHveL9qMdbODlLF55mvgjAfaQ==} + '@babel/plugin-transform-modules-amd@7.24.6': + resolution: {integrity: sha512-eAGogjZgcwqAxhyFgqghvoHRr+EYRQPFjUXrTYKBRb5qPnAVxOOglaxc4/byHqjvq/bqO2F3/CGwTHsgKJYHhQ==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-modules-commonjs@7.24.1': - resolution: {integrity: sha512-szog8fFTUxBfw0b98gEWPaEqF42ZUD/T3bkynW/wtgx2p/XCP55WEsb+VosKceRSd6njipdZvNogqdtI4Q0chw==} + '@babel/plugin-transform-modules-commonjs@7.24.6': + resolution: {integrity: sha512-JEV8l3MHdmmdb7S7Cmx6rbNEjRCgTQMZxllveHO0mx6uiclB0NflCawlQQ6+o5ZrwjUBYPzHm2XoK4wqGVUFuw==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-modules-systemjs@7.24.1': - resolution: {integrity: sha512-mqQ3Zh9vFO1Tpmlt8QPnbwGHzNz3lpNEMxQb1kAemn/erstyqw1r9KeOlOfo3y6xAnFEcOv2tSyrXfmMk+/YZA==} + '@babel/plugin-transform-modules-systemjs@7.24.6': + resolution: {integrity: sha512-xg1Z0J5JVYxtpX954XqaaAT6NpAY6LtZXvYFCJmGFJWwtlz2EmJoR8LycFRGNE8dBKizGWkGQZGegtkV8y8s+w==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-modules-umd@7.24.1': - resolution: {integrity: sha512-tuA3lpPj+5ITfcCluy6nWonSL7RvaG0AOTeAuvXqEKS34lnLzXpDb0dcP6K8jD0zWZFNDVly90AGFJPnm4fOYg==} + '@babel/plugin-transform-modules-umd@7.24.6': + resolution: {integrity: sha512-esRCC/KsSEUvrSjv5rFYnjZI6qv4R1e/iHQrqwbZIoRJqk7xCvEUiN7L1XrmW5QSmQe3n1XD88wbgDTWLbVSyg==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-named-capturing-groups-regex@7.22.5': - resolution: {integrity: sha512-YgLLKmS3aUBhHaxp5hi1WJTgOUb/NCuDHzGT9z9WTt3YG+CPRhJs6nprbStx6DnWM4dh6gt7SU3sZodbZ08adQ==} + '@babel/plugin-transform-named-capturing-groups-regex@7.24.6': + resolution: {integrity: sha512-6DneiCiu91wm3YiNIGDWZsl6GfTTbspuj/toTEqLh9d4cx50UIzSdg+T96p8DuT7aJOBRhFyaE9ZvTHkXrXr6Q==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0 - '@babel/plugin-transform-new-target@7.24.1': - resolution: {integrity: sha512-/rurytBM34hYy0HKZQyA0nHbQgQNFm4Q/BOc9Hflxi2X3twRof7NaE5W46j4kQitm7SvACVRXsa6N/tSZxvPug==} + '@babel/plugin-transform-new-target@7.24.6': + resolution: {integrity: sha512-f8liz9JG2Va8A4J5ZBuaSdwfPqN6axfWRK+y66fjKYbwf9VBLuq4WxtinhJhvp1w6lamKUwLG0slK2RxqFgvHA==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-nullish-coalescing-operator@7.24.1': - resolution: {integrity: sha512-iQ+caew8wRrhCikO5DrUYx0mrmdhkaELgFa+7baMcVuhxIkN7oxt06CZ51D65ugIb1UWRQ8oQe+HXAVM6qHFjw==} + '@babel/plugin-transform-nullish-coalescing-operator@7.24.6': + resolution: {integrity: sha512-+QlAiZBMsBK5NqrBWFXCYeXyiU1y7BQ/OYaiPAcQJMomn5Tyg+r5WuVtyEuvTbpV7L25ZSLfE+2E9ywj4FD48A==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-numeric-separator@7.24.1': - resolution: {integrity: sha512-7GAsGlK4cNL2OExJH1DzmDeKnRv/LXq0eLUSvudrehVA5Rgg4bIrqEUW29FbKMBRT0ztSqisv7kjP+XIC4ZMNw==} + '@babel/plugin-transform-numeric-separator@7.24.6': + resolution: {integrity: sha512-6voawq8T25Jvvnc4/rXcWZQKKxUNZcKMS8ZNrjxQqoRFernJJKjE3s18Qo6VFaatG5aiX5JV1oPD7DbJhn0a4Q==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-object-rest-spread@7.24.1': - resolution: {integrity: sha512-XjD5f0YqOtebto4HGISLNfiNMTTs6tbkFf2TOqJlYKYmbo+mN9Dnpl4SRoofiziuOWMIyq3sZEUqLo3hLITFEA==} + '@babel/plugin-transform-object-rest-spread@7.24.6': + resolution: {integrity: sha512-OKmi5wiMoRW5Smttne7BwHM8s/fb5JFs+bVGNSeHWzwZkWXWValR1M30jyXo1s/RaqgwwhEC62u4rFH/FBcBPg==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-object-super@7.24.1': - resolution: {integrity: sha512-oKJqR3TeI5hSLRxudMjFQ9re9fBVUU0GICqM3J1mi8MqlhVr6hC/ZN4ttAyMuQR6EZZIY6h/exe5swqGNNIkWQ==} + '@babel/plugin-transform-object-super@7.24.6': + resolution: {integrity: sha512-N/C76ihFKlZgKfdkEYKtaRUtXZAgK7sOY4h2qrbVbVTXPrKGIi8aww5WGe/+Wmg8onn8sr2ut6FXlsbu/j6JHg==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-optional-catch-binding@7.24.1': - resolution: {integrity: sha512-oBTH7oURV4Y+3EUrf6cWn1OHio3qG/PVwO5J03iSJmBg6m2EhKjkAu/xuaXaYwWW9miYtvbWv4LNf0AmR43LUA==} + '@babel/plugin-transform-optional-catch-binding@7.24.6': + resolution: {integrity: sha512-L5pZ+b3O1mSzJ71HmxSCmTVd03VOT2GXOigug6vDYJzE5awLI7P1g0wFcdmGuwSDSrQ0L2rDOe/hHws8J1rv3w==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-optional-chaining@7.24.1': - resolution: {integrity: sha512-n03wmDt+987qXwAgcBlnUUivrZBPZ8z1plL0YvgQalLm+ZE5BMhGm94jhxXtA1wzv1Cu2aaOv1BM9vbVttrzSg==} + '@babel/plugin-transform-optional-chaining@7.24.6': + resolution: {integrity: sha512-cHbqF6l1QP11OkYTYQ+hhVx1E017O5ZcSPXk9oODpqhcAD1htsWG2NpHrrhthEO2qZomLK0FXS+u7NfrkF5aOQ==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-parameters@7.24.1': - resolution: {integrity: sha512-8Jl6V24g+Uw5OGPeWNKrKqXPDw2YDjLc53ojwfMcKwlEoETKU9rU0mHUtcg9JntWI/QYzGAXNWEcVHZ+fR+XXg==} + '@babel/plugin-transform-parameters@7.24.6': + resolution: {integrity: sha512-ST7guE8vLV+vI70wmAxuZpIKzVjvFX9Qs8bl5w6tN/6gOypPWUmMQL2p7LJz5E63vEGrDhAiYetniJFyBH1RkA==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-private-methods@7.24.1': - resolution: {integrity: sha512-tGvisebwBO5em4PaYNqt4fkw56K2VALsAbAakY0FjTYqJp7gfdrgr7YX76Or8/cpik0W6+tj3rZ0uHU9Oil4tw==} + '@babel/plugin-transform-private-methods@7.24.6': + resolution: {integrity: sha512-T9LtDI0BgwXOzyXrvgLTT8DFjCC/XgWLjflczTLXyvxbnSR/gpv0hbmzlHE/kmh9nOvlygbamLKRo6Op4yB6aw==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-private-property-in-object@7.24.1': - resolution: {integrity: sha512-pTHxDVa0BpUbvAgX3Gat+7cSciXqUcY9j2VZKTbSB6+VQGpNgNO9ailxTGHSXlqOnX1Hcx1Enme2+yv7VqP9bg==} + '@babel/plugin-transform-private-property-in-object@7.24.6': + resolution: {integrity: sha512-Qu/ypFxCY5NkAnEhCF86Mvg3NSabKsh/TPpBVswEdkGl7+FbsYHy1ziRqJpwGH4thBdQHh8zx+z7vMYmcJ7iaQ==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-property-literals@7.24.1': - resolution: {integrity: sha512-LetvD7CrHmEx0G442gOomRr66d7q8HzzGGr4PMHGr+5YIm6++Yke+jxj246rpvsbyhJwCLxcTn6zW1P1BSenqA==} + '@babel/plugin-transform-property-literals@7.24.6': + resolution: {integrity: sha512-oARaglxhRsN18OYsnPTpb8TcKQWDYNsPNmTnx5++WOAsUJ0cSC/FZVlIJCKvPbU4yn/UXsS0551CFKJhN0CaMw==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-react-display-name@7.24.1': - resolution: {integrity: sha512-mvoQg2f9p2qlpDQRBC7M3c3XTr0k7cp/0+kFKKO/7Gtu0LSw16eKB+Fabe2bDT/UpsyasTBBkAnbdsLrkD5XMw==} + '@babel/plugin-transform-react-display-name@7.24.6': + resolution: {integrity: sha512-/3iiEEHDsJuj9QU09gbyWGSUxDboFcD7Nj6dnHIlboWSodxXAoaY/zlNMHeYAC0WsERMqgO9a7UaM77CsYgWcg==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-react-jsx-development@7.22.5': - resolution: {integrity: sha512-bDhuzwWMuInwCYeDeMzyi7TaBgRQei6DqxhbyniL7/VG4RSS7HtSL2QbY4eESy1KJqlWt8g3xeEBGPuo+XqC8A==} + '@babel/plugin-transform-react-jsx-development@7.24.6': + resolution: {integrity: sha512-F7EsNp5StNDouSSdYyDSxh4J+xvj/JqG+Cb6s2fA+jCyHOzigG5vTwgH8tU2U8Voyiu5zCG9bAK49wTr/wPH0w==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-react-jsx-self@7.24.1': - resolution: {integrity: sha512-kDJgnPujTmAZ/9q2CN4m2/lRsUUPDvsG3+tSHWUJIzMGTt5U/b/fwWd3RO3n+5mjLrsBrVa5eKFRVSQbi3dF1w==} + '@babel/plugin-transform-react-jsx-self@7.24.6': + resolution: {integrity: sha512-FfZfHXtQ5jYPQsCRyLpOv2GeLIIJhs8aydpNh39vRDjhD411XcfWDni5i7OjP/Rs8GAtTn7sWFFELJSHqkIxYg==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-react-jsx-source@7.24.1': - resolution: {integrity: sha512-1v202n7aUq4uXAieRTKcwPzNyphlCuqHHDcdSNc+vdhoTEZcFMh+L5yZuCmGaIO7bs1nJUNfHB89TZyoL48xNA==} + '@babel/plugin-transform-react-jsx-source@7.24.6': + resolution: {integrity: sha512-BQTBCXmFRreU3oTUXcGKuPOfXAGb1liNY4AvvFKsOBAJ89RKcTsIrSsnMYkj59fNa66OFKnSa4AJZfy5Y4B9WA==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-react-jsx@7.23.4': - resolution: {integrity: sha512-5xOpoPguCZCRbo/JeHlloSkTA8Bld1J/E1/kLfD1nsuiW1m8tduTA1ERCgIZokDflX/IBzKcqR3l7VlRgiIfHA==} + '@babel/plugin-transform-react-jsx@7.24.6': + resolution: {integrity: sha512-pCtPHhpRZHfwdA5G1Gpk5mIzMA99hv0R8S/Ket50Rw+S+8hkt3wBWqdqHaPw0CuUYxdshUgsPiLQ5fAs4ASMhw==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-react-pure-annotations@7.24.1': - resolution: {integrity: sha512-+pWEAaDJvSm9aFvJNpLiM2+ktl2Sn2U5DdyiWdZBxmLc6+xGt88dvFqsHiAiDS+8WqUwbDfkKz9jRxK3M0k+kA==} + '@babel/plugin-transform-react-pure-annotations@7.24.6': + resolution: {integrity: sha512-0HoDQlFJJkXRyV2N+xOpUETbKHcouSwijRQbKWVtxsPoq5bbB30qZag9/pSc5xcWVYjTHlLsBsY+hZDnzQTPNw==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-regenerator@7.24.1': - resolution: {integrity: sha512-sJwZBCzIBE4t+5Q4IGLaaun5ExVMRY0lYwos/jNecjMrVCygCdph3IKv0tkP5Fc87e/1+bebAmEAGBfnRD+cnw==} + '@babel/plugin-transform-regenerator@7.24.6': + resolution: {integrity: sha512-SMDxO95I8WXRtXhTAc8t/NFQUT7VYbIWwJCJgEli9ml4MhqUMh4S6hxgH6SmAC3eAQNWCDJFxcFeEt9w2sDdXg==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-reserved-words@7.24.1': - resolution: {integrity: sha512-JAclqStUfIwKN15HrsQADFgeZt+wexNQ0uLhuqvqAUFoqPMjEcFCYZBhq0LUdz6dZK/mD+rErhW71fbx8RYElg==} + '@babel/plugin-transform-reserved-words@7.24.6': + resolution: {integrity: sha512-DcrgFXRRlK64dGE0ZFBPD5egM2uM8mgfrvTMOSB2yKzOtjpGegVYkzh3s1zZg1bBck3nkXiaOamJUqK3Syk+4A==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-runtime@7.24.3': - resolution: {integrity: sha512-J0BuRPNlNqlMTRJ72eVptpt9VcInbxO6iP3jaxr+1NPhC0UkKL+6oeX6VXMEYdADnuqmMmsBspt4d5w8Y/TCbQ==} + '@babel/plugin-transform-runtime@7.24.6': + resolution: {integrity: sha512-W3gQydMb0SY99y/2lV0Okx2xg/8KzmZLQsLaiCmwNRl1kKomz14VurEm+2TossUb+sRvBCnGe+wx8KtIgDtBbQ==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-shorthand-properties@7.24.1': - resolution: {integrity: sha512-LyjVB1nsJ6gTTUKRjRWx9C1s9hE7dLfP/knKdrfeH9UPtAGjYGgxIbFfx7xyLIEWs7Xe1Gnf8EWiUqfjLhInZA==} + '@babel/plugin-transform-shorthand-properties@7.24.6': + resolution: {integrity: sha512-xnEUvHSMr9eOWS5Al2YPfc32ten7CXdH7Zwyyk7IqITg4nX61oHj+GxpNvl+y5JHjfN3KXE2IV55wAWowBYMVw==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-spread@7.24.1': - resolution: {integrity: sha512-KjmcIM+fxgY+KxPVbjelJC6hrH1CgtPmTvdXAfn3/a9CnWGSTY7nH4zm5+cjmWJybdcPSsD0++QssDsjcpe47g==} + '@babel/plugin-transform-spread@7.24.6': + resolution: {integrity: sha512-h/2j7oIUDjS+ULsIrNZ6/TKG97FgmEk1PXryk/HQq6op4XUUUwif2f69fJrzK0wza2zjCS1xhXmouACaWV5uPA==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-sticky-regex@7.24.1': - resolution: {integrity: sha512-9v0f1bRXgPVcPrngOQvLXeGNNVLc8UjMVfebo9ka0WF3/7+aVUHmaJVT3sa0XCzEFioPfPHZiOcYG9qOsH63cw==} + '@babel/plugin-transform-sticky-regex@7.24.6': + resolution: {integrity: sha512-fN8OcTLfGmYv7FnDrsjodYBo1DhPL3Pze/9mIIE2MGCT1KgADYIOD7rEglpLHZj8PZlC/JFX5WcD+85FLAQusw==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-template-literals@7.24.1': - resolution: {integrity: sha512-WRkhROsNzriarqECASCNu/nojeXCDTE/F2HmRgOzi7NGvyfYGq1NEjKBK3ckLfRgGc6/lPAqP0vDOSw3YtG34g==} + '@babel/plugin-transform-template-literals@7.24.6': + resolution: {integrity: sha512-BJbEqJIcKwrqUP+KfUIkxz3q8VzXe2R8Wv8TaNgO1cx+nNavxn/2+H8kp9tgFSOL6wYPPEgFvU6IKS4qoGqhmg==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-typeof-symbol@7.24.1': - resolution: {integrity: sha512-CBfU4l/A+KruSUoW+vTQthwcAdwuqbpRNB8HQKlZABwHRhsdHZ9fezp4Sn18PeAlYxTNiLMlx4xUBV3AWfg1BA==} + '@babel/plugin-transform-typeof-symbol@7.24.6': + resolution: {integrity: sha512-IshCXQ+G9JIFJI7bUpxTE/oA2lgVLAIK8q1KdJNoPXOpvRaNjMySGuvLfBw/Xi2/1lLo953uE8hyYSDW3TSYig==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-typescript@7.24.4': - resolution: {integrity: sha512-79t3CQ8+oBGk/80SQ8MN3Bs3obf83zJ0YZjDmDaEZN8MqhMI760apl5z6a20kFeMXBwJX99VpKT8CKxEBp5H1g==} + '@babel/plugin-transform-typescript@7.24.6': + resolution: {integrity: sha512-H0i+hDLmaYYSt6KU9cZE0gb3Cbssa/oxWis7PX4ofQzbvsfix9Lbh8SRk7LCPDlLWJHUiFeHU0qRRpF/4Zv7mQ==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-unicode-escapes@7.24.1': - resolution: {integrity: sha512-RlkVIcWT4TLI96zM660S877E7beKlQw7Ig+wqkKBiWfj0zH5Q4h50q6er4wzZKRNSYpfo6ILJ+hrJAGSX2qcNw==} + '@babel/plugin-transform-unicode-escapes@7.24.6': + resolution: {integrity: sha512-bKl3xxcPbkQQo5eX9LjjDpU2xYHeEeNQbOhj0iPvetSzA+Tu9q/o5lujF4Sek60CM6MgYvOS/DJuwGbiEYAnLw==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-unicode-property-regex@7.24.1': - resolution: {integrity: sha512-Ss4VvlfYV5huWApFsF8/Sq0oXnGO+jB+rijFEFugTd3cwSObUSnUi88djgR5528Csl0uKlrI331kRqe56Ov2Ng==} + '@babel/plugin-transform-unicode-property-regex@7.24.6': + resolution: {integrity: sha512-8EIgImzVUxy15cZiPii9GvLZwsy7Vxc+8meSlR3cXFmBIl5W5Tn9LGBf7CDKkHj4uVfNXCJB8RsVfnmY61iedA==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-unicode-regex@7.24.1': - resolution: {integrity: sha512-2A/94wgZgxfTsiLaQ2E36XAOdcZmGAaEEgVmxQWwZXWkGhvoHbaqXcKnU8zny4ycpu3vNqg0L/PcCiYtHtA13g==} + '@babel/plugin-transform-unicode-regex@7.24.6': + resolution: {integrity: sha512-pssN6ExsvxaKU638qcWb81RrvvgZom3jDgU/r5xFZ7TONkZGFf4MhI2ltMb8OcQWhHyxgIavEU+hgqtbKOmsPA==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-unicode-sets-regex@7.24.1': - resolution: {integrity: sha512-fqj4WuzzS+ukpgerpAoOnMfQXwUHFxXUZUE84oL2Kao2N8uSlvcpnAidKASgsNgzZHBsHWvcm8s9FPWUhAb8fA==} + '@babel/plugin-transform-unicode-sets-regex@7.24.6': + resolution: {integrity: sha512-quiMsb28oXWIDK0gXLALOJRXLgICLiulqdZGOaPPd0vRT7fQp74NtdADAVu+D8s00C+0Xs0MxVP0VKF/sZEUgw==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0 - '@babel/preset-env@7.24.4': - resolution: {integrity: sha512-7Kl6cSmYkak0FK/FXjSEnLJ1N9T/WA2RkMhu17gZ/dsxKJUuTYNIylahPTzqpLyJN4WhDif8X0XK1R8Wsguo/A==} + '@babel/preset-env@7.24.6': + resolution: {integrity: sha512-CrxEAvN7VxfjOG8JNF2Y/eMqMJbZPZ185amwGUBp8D9USK90xQmv7dLdFSa+VbD7fdIqcy/Mfv7WtzG8+/qxKg==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/preset-flow@7.24.1': - resolution: {integrity: sha512-sWCV2G9pcqZf+JHyv/RyqEIpFypxdCSxWIxQjpdaQxenNog7cN1pr76hg8u0Fz8Qgg0H4ETkGcJnXL8d4j0PPA==} + '@babel/preset-flow@7.24.6': + resolution: {integrity: sha512-huoe0T1Qs9fQhMWbmqE/NHUeZbqmHDsN6n/jYvPcUUHfuKiPV32C9i8tDhMbQ1DEKTjbBP7Rjm3nSLwlB2X05g==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 @@ -1475,20 +1619,20 @@ packages: peerDependencies: '@babel/core': ^7.0.0-0 || ^8.0.0-0 <8.0.0 - '@babel/preset-react@7.24.1': - resolution: {integrity: sha512-eFa8up2/8cZXLIpkafhaADTXSnl7IsUFCYenRWrARBz0/qZwcT0RBXpys0LJU4+WfPoF2ZG6ew6s2V6izMCwRA==} + '@babel/preset-react@7.24.6': + resolution: {integrity: sha512-8mpzh1bWvmINmwM3xpz6ahu57mNaWavMm+wBNjQ4AFu1nghKBiIRET7l/Wmj4drXany/BBGjJZngICcD98F1iw==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/preset-typescript@7.24.1': - resolution: {integrity: sha512-1DBaMmRDpuYQBPWD8Pf/WEwCrtgRHxsZnP4mIy9G/X+hFfbI47Q2G4t1Paakld84+qsk2fSsUPMKg71jkoOOaQ==} + '@babel/preset-typescript@7.24.6': + resolution: {integrity: sha512-U10aHPDnokCFRXgyT/MaIRTivUu2K/mu0vJlwRS9LxJmJet+PFQNKpggPyFCUtC6zWSBPjvxjnpNkAn3Uw2m5w==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/register@7.23.7': - resolution: {integrity: sha512-EjJeB6+kvpk+Y5DAkEAmbOBEFkh9OASx0huoEkqYTFxAZHzOAX2Oh5uwAUuL2rUddqfM0SA+KPXV2TbzoZ2kvQ==} + '@babel/register@7.24.6': + resolution: {integrity: sha512-WSuFCc2wCqMeXkz/i3yfAAsxwWflEgbVkZzivgAmXl/MxrXeoYFZOOPllbC8R8WTF7u61wSRQtDVZ1879cdu6w==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 @@ -1500,24 +1644,24 @@ packages: resolution: {integrity: sha512-21t/fkKLMZI4pqP2wlmsQAWnYW1PDyKyyUV4vCi+B25ydmdaYTKXPwCj0BzSUnZf4seIiYvSA3jcZ3gdsMFkLQ==} engines: {node: '>=6.9.0'} - '@babel/runtime@7.24.4': - resolution: {integrity: sha512-dkxf7+hn8mFBwKjs9bvBlArzLVxVbS8usaPUDd5p2a9JCL9tB8OaOVN1isD4+Xyk4ns89/xeOmbQvgdK7IIVdA==} + '@babel/runtime@7.24.6': + resolution: {integrity: sha512-Ja18XcETdEl5mzzACGd+DKgaGJzPTCow7EglgwTmHdwokzDFYh/MHua6lU6DV/hjF2IaOJ4oX2nqnjG7RElKOw==} engines: {node: '>=6.9.0'} '@babel/template@7.22.5': resolution: {integrity: sha512-X7yV7eiwAxdj9k94NEylvbVHLiVG1nvzCV2EAowhxLTwODV1jl9UzZ48leOC0sH7OnuHrIkllaBgneUykIcZaw==} engines: {node: '>=6.9.0'} - '@babel/template@7.24.0': - resolution: {integrity: sha512-Bkf2q8lMB0AFpX0NFEqSbx1OkTHf0f+0j82mkw+ZpzBnkk7e9Ql0891vlfgi+kHwOk8tQjiQHpqh4LaSa0fKEA==} + '@babel/template@7.24.6': + resolution: {integrity: sha512-3vgazJlLwNXi9jhrR1ef8qiB65L1RK90+lEQwv4OxveHnqC3BfmnHdgySwRLzf6akhlOYenT+b7AfWq+a//AHw==} engines: {node: '>=6.9.0'} '@babel/traverse@7.17.3': resolution: {integrity: sha512-5irClVky7TxRWIRtxlh2WPUUOLhcPN06AGgaQSB8AEwuyEBgJVuJ5imdHm5zxk8w0QS5T+tDfnDxAlhWjpb7cw==} engines: {node: '>=6.9.0'} - '@babel/traverse@7.24.1': - resolution: {integrity: sha512-xuU6o9m68KeqZbQuDt2TcKSxUw/mrsvavlEqQ1leZ/B+C9tk6E4sRWy97WaXgvq5E+nU3cXMxv3WKOCanVMCmQ==} + '@babel/traverse@7.24.6': + resolution: {integrity: sha512-OsNjaJwT9Zn8ozxcfoBc+RaHdj3gFmCmYoQLUII1o6ZrUwku0BMg80FoOTPx+Gi6XhcQxAYE4xyjPTo4SxEQqw==} engines: {node: '>=6.9.0'} '@babel/types@7.17.0': @@ -1532,63 +1676,67 @@ packages: resolution: {integrity: sha512-+uarb83brBzPKN38NX1MkB6vb6+mwvR6amUulqAE7ccQw1pEl+bCia9TbdG1lsnFP7lZySvUn37CHyXQdfTwzg==} engines: {node: '>=6.9.0'} - '@babel/types@7.24.0': - resolution: {integrity: sha512-+j7a5c253RfKh8iABBhywc8NSfP5LURe7Uh4qpsh6jc+aLJguvmIUBdjSdEMQv2bENrCR5MfRdjGo7vzS/ob7w==} + '@babel/types@7.24.6': + resolution: {integrity: sha512-WaMsgi6Q8zMgMth93GvWPXkhAIEobfsIkLTacoVZoK1J0CevIPGYY2Vo5YvJGqyHqXM6P4ppOYGsIRU8MM9pFQ==} engines: {node: '>=6.9.0'} '@balena/dockerignore@1.0.2': resolution: {integrity: sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q==} - '@cloudflare/workers-types@4.20230904.0': - resolution: {integrity: sha512-IX4oJCe14ctblSPZBlW64BVZ9nYLUo6sD2I5gu3hX0ywByYWm1OuoKm9Xb/Zpbj8Ph18Z7Ryii6u2/ocnncXdA==} + '@cloudflare/workers-types@4.20240512.0': + resolution: {integrity: sha512-o2yTEWg+YK/I1t/Me+dA0oarO0aCbjibp6wSeaw52DSE9tDyKJ7S+Qdyw/XsMrKn4t8kF6f/YOba+9O4MJfW9w==} + + '@cloudflare/workers-types@4.20240524.0': + resolution: {integrity: sha512-GpSr4uE7y39DU9f0+wmrL76xd03wn0jy1ClITaa3ZZltKjirAV8TW1GzHrvvKyVGx6u3lekrFnB1HzVHsCYHDQ==} '@colors/colors@1.5.0': resolution: {integrity: sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==} engines: {node: '>=0.1.90'} - '@dprint/darwin-arm64@0.45.0': - resolution: {integrity: sha512-pkSSmixIKXr5t32bhXIUbpIBm8F8uhsJcUUvfkFNsRbQvNwRp71ribZpE8dKl0ZFOlAFeWD6WLE8smp/QtiGUA==} + '@cspotcode/source-map-support@0.8.1': + resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==} + engines: {node: '>=12'} + + '@dprint/darwin-arm64@0.46.3': + resolution: {integrity: sha512-1ycDpGvclGHF3UG5V6peymPDg6ouNTqM6BjhVELQ6zwr+X98AMhq/1slgO8hwHtPcaS5qhTAS+PkzOmBJRegow==} cpu: [arm64] os: [darwin] - '@dprint/darwin-x64@0.45.0': - resolution: {integrity: sha512-PHcXSrRO53KH9N+YPbPtr40NnDo2t7hO7KLMfl2ktRNLjrmKg6F8XDDsr2C7Z11k3jyEEU2Jq8hhpaKHwNapmQ==} + '@dprint/darwin-x64@0.46.3': + resolution: {integrity: sha512-v5IpLmrY836Q5hJAxZuX097ZNQvoZgO6JKO4bK4l6XDhhHAw2XTIUr41+FM5r36ENxyASMk0NpHjhcHtih3o0g==} cpu: [x64] os: [darwin] - '@dprint/linux-arm64-glibc@0.45.0': - resolution: {integrity: sha512-NgIpvZHpiQaY4DxSygxknxBtvKE2KLK9dEbUNKNE098yTHhGq7ouPsoM7RtsO34RHJ3tEZLLJEuBHn20XP8LMg==} + '@dprint/linux-arm64-glibc@0.46.3': + resolution: {integrity: sha512-9P13g1vgV8RfQH2qBGa8YAfaOeWA42RIhj7lmWRpkDFtwau96reMKwnBBn8bHUnc5e6bSsbPUOMb/X1KMUKz/g==} cpu: [arm64] os: [linux] - '@dprint/linux-arm64-musl@0.45.0': - resolution: {integrity: sha512-Y8p+FC0RNyKCGQjy99Uh1LSPrlQtUTvo4brdvU1THF3pyWu6Bg1p6NiP5a6SjE/6t9CMKZJz39zPreQtnDkSDA==} + '@dprint/linux-arm64-musl@0.46.3': + resolution: {integrity: sha512-AAcdcMSZ6DEIoY9E0xQHjkZP+THP7EWsQge4TWzglSIjzn31YltglHAGYFcLB4CTJYpF0NsFDNFktzgkO+s0og==} cpu: [arm64] os: [linux] - '@dprint/linux-x64-glibc@0.45.0': - resolution: {integrity: sha512-u03NCZIpJhE5gIl9Q7jNL4sOPBFd/8BLVBiuLoLtbiTZQ+NNudHKgGNATJBU67q1MKpqKnt8/gQm139cJkHhrw==} + '@dprint/linux-x64-glibc@0.46.3': + resolution: {integrity: sha512-c5cQ3G1rC64nBZ8Pd2LGWwzkEk4D7Ax9NrBbwYmNPvs6mFbGlJPC1+RD95x2WwIrIlMIciLG+Kxmt25PzBphmg==} cpu: [x64] os: [linux] - '@dprint/linux-x64-musl@0.45.0': - resolution: {integrity: sha512-DQN8LPtxismkeU1X+sQywa80kWwCBcpQh9fXoJcvTEHrgzHBqbG2SEsUZpM12oKEua1KE/iBh+vgZ+4I3TdI2A==} + '@dprint/linux-x64-musl@0.46.3': + resolution: {integrity: sha512-ONtk2QtLcV0TqWOCOqzUFQixgk3JC+vnJLB5L6tQwT7BX5LzeircfE/1f4dg459iqejNC9MBXZkHnXqabvWSow==} cpu: [x64] os: [linux] - '@dprint/win32-x64@0.45.0': - resolution: {integrity: sha512-aZHIWG2jIlEp4BER1QG6YYqPd6TxT9S77AeUkWJixNiMEo+33mPRVCBcugRWI/WJWveX8yWFVXkToORtnSFeEA==} + '@dprint/win32-x64@0.46.3': + resolution: {integrity: sha512-xvj4DSEilf0gGdT7CqnwNEgfWNuWqT6eIBxHDEUbmcn1vZ7IwirtqRq/nm3lmYtQaJ4EbtMQZvACHZwxC7G96w==} cpu: [x64] os: [win32] '@drizzle-team/studio@0.0.5': resolution: {integrity: sha512-ps5qF0tMxWRVu+V5gvCRrQNqlY92aTnIKdq27gm9LZMSdaKYZt6AVvSK1dlUMzs6Rt0Jm80b+eWct6xShBKhIw==} - '@electric-sql/pglite@0.1.1': - resolution: {integrity: sha512-7tJNIJBXuiuVl6Y9ehwv9mTlQlPeQbQ7wIKn49eorToPlNnkYnBzVWpOOTkNqv6Xu4dz75vl3S/9BmlfqCqM1w==} - - '@esbuild-kit/cjs-loader@2.4.2': - resolution: {integrity: sha512-BDXFbYOJzT/NBEtp71cvsrGPwGAMGRB/349rwKuoxNSiKjPraNNnlK6MIIabViCjqZugu6j+xeMDlEkWdHHJSg==} + '@electric-sql/pglite@0.1.5': + resolution: {integrity: sha512-eymv4ONNvoPZQTvOQIi5dbpR+J5HzEv0qQH9o/y3gvNheJV/P/NFcrbsfJZYTsDKoq7DKrTiFNexsRkJKy8x9Q==} '@esbuild-kit/core-utils@3.1.0': resolution: {integrity: sha512-Uuk8RpCg/7fdHSceR1M6XbSZFSuMrxcePFuGgyvsBn+u339dk5OeL4jv2EojwTN2st/unJGsVm4qHWjWNmJ/tw==} @@ -1596,6 +1744,18 @@ packages: '@esbuild-kit/esm-loader@2.5.5': resolution: {integrity: sha512-Qwfvj/qoPbClxCRNuac1Du01r9gvNOT+pMYtJDapfB1eoGN1YlJ1BixLyL9WVENRx5RXgNLdfYdx/CuswlGhMw==} + '@esbuild/aix-ppc64@0.20.2': + resolution: {integrity: sha512-D+EBOJHXdNZcLJRBkhENNG8Wji2kgc9AZ9KiPr1JuZjsNtyHzrsfLRrY0tk2H2aoFu6RANO1y1iPPUCDYWkb5g==} + engines: {node: '>=12'} + cpu: [ppc64] + os: [aix] + + '@esbuild/aix-ppc64@0.21.5': + resolution: {integrity: sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==} + engines: {node: '>=12'} + cpu: [ppc64] + os: [aix] + '@esbuild/android-arm64@0.17.19': resolution: {integrity: sha512-KBMWvEZooR7+kzY0BtbTQn0OAYY7CsiydT63pVEaPtVYF0hXbUaOyZog37DKxK7NF3XacBJOpYT4adIJh+avxA==} engines: {node: '>=12'} @@ -1608,6 +1768,18 @@ packages: cpu: [arm64] os: [android] + '@esbuild/android-arm64@0.20.2': + resolution: {integrity: sha512-mRzjLacRtl/tWU0SvD8lUEwb61yP9cqQo6noDZP/O8VkwafSYwZ4yWy24kan8jE/IMERpYncRt2dw438LP3Xmg==} + engines: {node: '>=12'} + cpu: [arm64] + os: [android] + + '@esbuild/android-arm64@0.21.5': + resolution: {integrity: sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==} + engines: {node: '>=12'} + cpu: [arm64] + os: [android] + '@esbuild/android-arm@0.17.19': resolution: {integrity: sha512-rIKddzqhmav7MSmoFCmDIb6e2W57geRsM94gV2l38fzhXMwq7hZoClug9USI2pFRGL06f4IOPHHpFNOkWieR8A==} engines: {node: '>=12'} @@ -1620,6 +1792,18 @@ packages: cpu: [arm] os: [android] + '@esbuild/android-arm@0.20.2': + resolution: {integrity: sha512-t98Ra6pw2VaDhqNWO2Oph2LXbz/EJcnLmKLGBJwEwXX/JAN83Fym1rU8l0JUWK6HkIbWONCSSatf4sf2NBRx/w==} + engines: {node: '>=12'} + cpu: [arm] + os: [android] + + '@esbuild/android-arm@0.21.5': + resolution: {integrity: sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==} + engines: {node: '>=12'} + cpu: [arm] + os: [android] + '@esbuild/android-x64@0.17.19': resolution: {integrity: sha512-uUTTc4xGNDT7YSArp/zbtmbhO0uEEK9/ETW29Wk1thYUJBz3IVnvgEiEwEa9IeLyvnpKrWK64Utw2bgUmDveww==} engines: {node: '>=12'} @@ -1632,6 +1816,18 @@ packages: cpu: [x64] os: [android] + '@esbuild/android-x64@0.20.2': + resolution: {integrity: sha512-btzExgV+/lMGDDa194CcUQm53ncxzeBrWJcncOBxuC6ndBkKxnHdFJn86mCIgTELsooUmwUm9FkhSp5HYu00Rg==} + engines: {node: '>=12'} + cpu: [x64] + os: [android] + + '@esbuild/android-x64@0.21.5': + resolution: {integrity: sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==} + engines: {node: '>=12'} + cpu: [x64] + os: [android] + '@esbuild/darwin-arm64@0.17.19': resolution: {integrity: sha512-80wEoCfF/hFKM6WE1FyBHc9SfUblloAWx6FJkFWTWiCoht9Mc0ARGEM47e67W9rI09YoUxJL68WHfDRYEAvOhg==} engines: {node: '>=12'} @@ -1644,6 +1840,18 @@ packages: cpu: [arm64] os: [darwin] + '@esbuild/darwin-arm64@0.20.2': + resolution: {integrity: sha512-4J6IRT+10J3aJH3l1yzEg9y3wkTDgDk7TSDFX+wKFiWjqWp/iCfLIYzGyasx9l0SAFPT1HwSCR+0w/h1ES/MjA==} + engines: {node: '>=12'} + cpu: [arm64] + os: [darwin] + + '@esbuild/darwin-arm64@0.21.5': + resolution: {integrity: sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==} + engines: {node: '>=12'} + cpu: [arm64] + os: [darwin] + '@esbuild/darwin-x64@0.17.19': resolution: {integrity: sha512-IJM4JJsLhRYr9xdtLytPLSH9k/oxR3boaUIYiHkAawtwNOXKE8KoU8tMvryogdcT8AU+Bflmh81Xn6Q0vTZbQw==} engines: {node: '>=12'} @@ -1656,6 +1864,18 @@ packages: cpu: [x64] os: [darwin] + '@esbuild/darwin-x64@0.20.2': + resolution: {integrity: sha512-tBcXp9KNphnNH0dfhv8KYkZhjc+H3XBkF5DKtswJblV7KlT9EI2+jeA8DgBjp908WEuYll6pF+UStUCfEpdysA==} + engines: {node: '>=12'} + cpu: [x64] + os: [darwin] + + '@esbuild/darwin-x64@0.21.5': + resolution: {integrity: sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==} + engines: {node: '>=12'} + cpu: [x64] + os: [darwin] + '@esbuild/freebsd-arm64@0.17.19': resolution: {integrity: sha512-pBwbc7DufluUeGdjSU5Si+P3SoMF5DQ/F/UmTSb8HXO80ZEAJmrykPyzo1IfNbAoaqw48YRpv8shwd1NoI0jcQ==} engines: {node: '>=12'} @@ -1668,6 +1888,18 @@ packages: cpu: [arm64] os: [freebsd] + '@esbuild/freebsd-arm64@0.20.2': + resolution: {integrity: sha512-d3qI41G4SuLiCGCFGUrKsSeTXyWG6yem1KcGZVS+3FYlYhtNoNgYrWcvkOoaqMhwXSMrZRl69ArHsGJ9mYdbbw==} + engines: {node: '>=12'} + cpu: [arm64] + os: [freebsd] + + '@esbuild/freebsd-arm64@0.21.5': + resolution: {integrity: sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==} + engines: {node: '>=12'} + cpu: [arm64] + os: [freebsd] + '@esbuild/freebsd-x64@0.17.19': resolution: {integrity: sha512-4lu+n8Wk0XlajEhbEffdy2xy53dpR06SlzvhGByyg36qJw6Kpfk7cp45DR/62aPH9mtJRmIyrXAS5UWBrJT6TQ==} engines: {node: '>=12'} @@ -1680,6 +1912,18 @@ packages: cpu: [x64] os: [freebsd] + '@esbuild/freebsd-x64@0.20.2': + resolution: {integrity: sha512-d+DipyvHRuqEeM5zDivKV1KuXn9WeRX6vqSqIDgwIfPQtwMP4jaDsQsDncjTDDsExT4lR/91OLjRo8bmC1e+Cw==} + engines: {node: '>=12'} + cpu: [x64] + os: [freebsd] + + '@esbuild/freebsd-x64@0.21.5': + resolution: {integrity: sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [freebsd] + '@esbuild/linux-arm64@0.17.19': resolution: {integrity: sha512-ct1Tg3WGwd3P+oZYqic+YZF4snNl2bsnMKRkb3ozHmnM0dGWuxcPTTntAF6bOP0Sp4x0PjSF+4uHQ1xvxfRKqg==} engines: {node: '>=12'} @@ -1692,6 +1936,18 @@ packages: cpu: [arm64] os: [linux] + '@esbuild/linux-arm64@0.20.2': + resolution: {integrity: sha512-9pb6rBjGvTFNira2FLIWqDk/uaf42sSyLE8j1rnUpuzsODBq7FvpwHYZxQ/It/8b+QOS1RYfqgGFNLRI+qlq2A==} + engines: {node: '>=12'} + cpu: [arm64] + os: [linux] + + '@esbuild/linux-arm64@0.21.5': + resolution: {integrity: sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==} + engines: {node: '>=12'} + cpu: [arm64] + os: [linux] + '@esbuild/linux-arm@0.17.19': resolution: {integrity: sha512-cdmT3KxjlOQ/gZ2cjfrQOtmhG4HJs6hhvm3mWSRDPtZ/lP5oe8FWceS10JaSJC13GBd4eH/haHnqf7hhGNLerA==} engines: {node: '>=12'} @@ -1704,6 +1960,18 @@ packages: cpu: [arm] os: [linux] + '@esbuild/linux-arm@0.20.2': + resolution: {integrity: sha512-VhLPeR8HTMPccbuWWcEUD1Az68TqaTYyj6nfE4QByZIQEQVWBB8vup8PpR7y1QHL3CpcF6xd5WVBU/+SBEvGTg==} + engines: {node: '>=12'} + cpu: [arm] + os: [linux] + + '@esbuild/linux-arm@0.21.5': + resolution: {integrity: sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==} + engines: {node: '>=12'} + cpu: [arm] + os: [linux] + '@esbuild/linux-ia32@0.17.19': resolution: {integrity: sha512-w4IRhSy1VbsNxHRQpeGCHEmibqdTUx61Vc38APcsRbuVgK0OPEnQ0YD39Brymn96mOx48Y2laBQGqgZ0j9w6SQ==} engines: {node: '>=12'} @@ -1716,6 +1984,18 @@ packages: cpu: [ia32] os: [linux] + '@esbuild/linux-ia32@0.20.2': + resolution: {integrity: sha512-o10utieEkNPFDZFQm9CoP7Tvb33UutoJqg3qKf1PWVeeJhJw0Q347PxMvBgVVFgouYLGIhFYG0UGdBumROyiig==} + engines: {node: '>=12'} + cpu: [ia32] + os: [linux] + + '@esbuild/linux-ia32@0.21.5': + resolution: {integrity: sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==} + engines: {node: '>=12'} + cpu: [ia32] + os: [linux] + '@esbuild/linux-loong64@0.14.54': resolution: {integrity: sha512-bZBrLAIX1kpWelV0XemxBZllyRmM6vgFQQG2GdNb+r3Fkp0FOh1NJSvekXDs7jq70k4euu1cryLMfU+mTXlEpw==} engines: {node: '>=12'} @@ -1734,6 +2014,18 @@ packages: cpu: [loong64] os: [linux] + '@esbuild/linux-loong64@0.20.2': + resolution: {integrity: sha512-PR7sp6R/UC4CFVomVINKJ80pMFlfDfMQMYynX7t1tNTeivQ6XdX5r2XovMmha/VjR1YN/HgHWsVcTRIMkymrgQ==} + engines: {node: '>=12'} + cpu: [loong64] + os: [linux] + + '@esbuild/linux-loong64@0.21.5': + resolution: {integrity: sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==} + engines: {node: '>=12'} + cpu: [loong64] + os: [linux] + '@esbuild/linux-mips64el@0.17.19': resolution: {integrity: sha512-LKJltc4LVdMKHsrFe4MGNPp0hqDFA1Wpt3jE1gEyM3nKUvOiO//9PheZZHfYRfYl6AwdTH4aTcXSqBerX0ml4A==} engines: {node: '>=12'} @@ -1746,8 +2038,20 @@ packages: cpu: [mips64el] os: [linux] - '@esbuild/linux-ppc64@0.17.19': - resolution: {integrity: sha512-/c/DGybs95WXNS8y3Ti/ytqETiW7EU44MEKuCAcpPto3YjQbyK3IQVKfF6nbghD7EcLUGl0NbiL5Rt5DMhn5tg==} + '@esbuild/linux-mips64el@0.20.2': + resolution: {integrity: sha512-4BlTqeutE/KnOiTG5Y6Sb/Hw6hsBOZapOVF6njAESHInhlQAghVVZL1ZpIctBOoTFbQyGW+LsVYZ8lSSB3wkjA==} + engines: {node: '>=12'} + cpu: [mips64el] + os: [linux] + + '@esbuild/linux-mips64el@0.21.5': + resolution: {integrity: sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==} + engines: {node: '>=12'} + cpu: [mips64el] + os: [linux] + + '@esbuild/linux-ppc64@0.17.19': + resolution: {integrity: sha512-/c/DGybs95WXNS8y3Ti/ytqETiW7EU44MEKuCAcpPto3YjQbyK3IQVKfF6nbghD7EcLUGl0NbiL5Rt5DMhn5tg==} engines: {node: '>=12'} cpu: [ppc64] os: [linux] @@ -1758,6 +2062,18 @@ packages: cpu: [ppc64] os: [linux] + '@esbuild/linux-ppc64@0.20.2': + resolution: {integrity: sha512-rD3KsaDprDcfajSKdn25ooz5J5/fWBylaaXkuotBDGnMnDP1Uv5DLAN/45qfnf3JDYyJv/ytGHQaziHUdyzaAg==} + engines: {node: '>=12'} + cpu: [ppc64] + os: [linux] + + '@esbuild/linux-ppc64@0.21.5': + resolution: {integrity: sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==} + engines: {node: '>=12'} + cpu: [ppc64] + os: [linux] + '@esbuild/linux-riscv64@0.17.19': resolution: {integrity: sha512-FC3nUAWhvFoutlhAkgHf8f5HwFWUL6bYdvLc/TTuxKlvLi3+pPzdZiFKSWz/PF30TB1K19SuCxDTI5KcqASJqA==} engines: {node: '>=12'} @@ -1770,6 +2086,18 @@ packages: cpu: [riscv64] os: [linux] + '@esbuild/linux-riscv64@0.20.2': + resolution: {integrity: sha512-snwmBKacKmwTMmhLlz/3aH1Q9T8v45bKYGE3j26TsaOVtjIag4wLfWSiZykXzXuE1kbCE+zJRmwp+ZbIHinnVg==} + engines: {node: '>=12'} + cpu: [riscv64] + os: [linux] + + '@esbuild/linux-riscv64@0.21.5': + resolution: {integrity: sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==} + engines: {node: '>=12'} + cpu: [riscv64] + os: [linux] + '@esbuild/linux-s390x@0.17.19': resolution: {integrity: sha512-IbFsFbxMWLuKEbH+7sTkKzL6NJmG2vRyy6K7JJo55w+8xDk7RElYn6xvXtDW8HCfoKBFK69f3pgBJSUSQPr+4Q==} engines: {node: '>=12'} @@ -1782,6 +2110,18 @@ packages: cpu: [s390x] os: [linux] + '@esbuild/linux-s390x@0.20.2': + resolution: {integrity: sha512-wcWISOobRWNm3cezm5HOZcYz1sKoHLd8VL1dl309DiixxVFoFe/o8HnwuIwn6sXre88Nwj+VwZUvJf4AFxkyrQ==} + engines: {node: '>=12'} + cpu: [s390x] + os: [linux] + + '@esbuild/linux-s390x@0.21.5': + resolution: {integrity: sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==} + engines: {node: '>=12'} + cpu: [s390x] + os: [linux] + '@esbuild/linux-x64@0.17.19': resolution: {integrity: sha512-68ngA9lg2H6zkZcyp22tsVt38mlhWde8l3eJLWkyLrp4HwMUr3c1s/M2t7+kHIhvMjglIBrFpncX1SzMckomGw==} engines: {node: '>=12'} @@ -1794,6 +2134,18 @@ packages: cpu: [x64] os: [linux] + '@esbuild/linux-x64@0.20.2': + resolution: {integrity: sha512-1MdwI6OOTsfQfek8sLwgyjOXAu+wKhLEoaOLTjbijk6E2WONYpH9ZU2mNtR+lZ2B4uwr+usqGuVfFT9tMtGvGw==} + engines: {node: '>=12'} + cpu: [x64] + os: [linux] + + '@esbuild/linux-x64@0.21.5': + resolution: {integrity: sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [linux] + '@esbuild/netbsd-x64@0.17.19': resolution: {integrity: sha512-CwFq42rXCR8TYIjIfpXCbRX0rp1jo6cPIUPSaWwzbVI4aOfX96OXY8M6KNmtPcg7QjYeDmN+DD0Wp3LaBOLf4Q==} engines: {node: '>=12'} @@ -1806,6 +2158,18 @@ packages: cpu: [x64] os: [netbsd] + '@esbuild/netbsd-x64@0.20.2': + resolution: {integrity: sha512-K8/DhBxcVQkzYc43yJXDSyjlFeHQJBiowJ0uVL6Tor3jGQfSGHNNJcWxNbOI8v5k82prYqzPuwkzHt3J1T1iZQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [netbsd] + + '@esbuild/netbsd-x64@0.21.5': + resolution: {integrity: sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==} + engines: {node: '>=12'} + cpu: [x64] + os: [netbsd] + '@esbuild/openbsd-x64@0.17.19': resolution: {integrity: sha512-cnq5brJYrSZ2CF6c35eCmviIN3k3RczmHz8eYaVlNasVqsNY+JKohZU5MKmaOI+KkllCdzOKKdPs762VCPC20g==} engines: {node: '>=12'} @@ -1818,6 +2182,18 @@ packages: cpu: [x64] os: [openbsd] + '@esbuild/openbsd-x64@0.20.2': + resolution: {integrity: sha512-eMpKlV0SThJmmJgiVyN9jTPJ2VBPquf6Kt/nAoo6DgHAoN57K15ZghiHaMvqjCye/uU4X5u3YSMgVBI1h3vKrQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [openbsd] + + '@esbuild/openbsd-x64@0.21.5': + resolution: {integrity: sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==} + engines: {node: '>=12'} + cpu: [x64] + os: [openbsd] + '@esbuild/sunos-x64@0.17.19': resolution: {integrity: sha512-vCRT7yP3zX+bKWFeP/zdS6SqdWB8OIpaRq/mbXQxTGHnIxspRtigpkUcDMlSCOejlHowLqII7K2JKevwyRP2rg==} engines: {node: '>=12'} @@ -1830,6 +2206,18 @@ packages: cpu: [x64] os: [sunos] + '@esbuild/sunos-x64@0.20.2': + resolution: {integrity: sha512-2UyFtRC6cXLyejf/YEld4Hajo7UHILetzE1vsRcGL3earZEW77JxrFjH4Ez2qaTiEfMgAXxfAZCm1fvM/G/o8w==} + engines: {node: '>=12'} + cpu: [x64] + os: [sunos] + + '@esbuild/sunos-x64@0.21.5': + resolution: {integrity: sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==} + engines: {node: '>=12'} + cpu: [x64] + os: [sunos] + '@esbuild/win32-arm64@0.17.19': resolution: {integrity: sha512-yYx+8jwowUstVdorcMdNlzklLYhPxjniHWFKgRqH7IFlUEa0Umu3KuYplf1HUZZ422e3NU9F4LGb+4O0Kdcaag==} engines: {node: '>=12'} @@ -1842,6 +2230,18 @@ packages: cpu: [arm64] os: [win32] + '@esbuild/win32-arm64@0.20.2': + resolution: {integrity: sha512-GRibxoawM9ZCnDxnP3usoUDO9vUkpAxIIZ6GQI+IlVmr5kP3zUq+l17xELTHMWTWzjxa2guPNyrpq1GWmPvcGQ==} + engines: {node: '>=12'} + cpu: [arm64] + os: [win32] + + '@esbuild/win32-arm64@0.21.5': + resolution: {integrity: sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==} + engines: {node: '>=12'} + cpu: [arm64] + os: [win32] + '@esbuild/win32-ia32@0.17.19': resolution: {integrity: sha512-eggDKanJszUtCdlVs0RB+h35wNlb5v4TWEkq4vZcmVt5u/HiDZrTXe2bWFQUez3RgNHwx/x4sk5++4NSSicKkw==} engines: {node: '>=12'} @@ -1854,6 +2254,18 @@ packages: cpu: [ia32] os: [win32] + '@esbuild/win32-ia32@0.20.2': + resolution: {integrity: sha512-HfLOfn9YWmkSKRQqovpnITazdtquEW8/SoHW7pWpuEeguaZI4QnCRW6b+oZTztdBnZOS2hqJ6im/D5cPzBTTlQ==} + engines: {node: '>=12'} + cpu: [ia32] + os: [win32] + + '@esbuild/win32-ia32@0.21.5': + resolution: {integrity: sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==} + engines: {node: '>=12'} + cpu: [ia32] + os: [win32] + '@esbuild/win32-x64@0.17.19': resolution: {integrity: sha512-lAhycmKnVOuRYNtRtatQR1LPQf2oYCkRGkSFnseDAKPl8lu5SOsK/e1sXe5a0Pc5kHIHe6P2I/ilntNv2xf3cA==} engines: {node: '>=12'} @@ -1866,6 +2278,18 @@ packages: cpu: [x64] os: [win32] + '@esbuild/win32-x64@0.20.2': + resolution: {integrity: sha512-N49X4lJX27+l9jbLKSqZ6bKNjzQvHaT8IIFUy+YIqmXQdjYCToGWwOItDrfby14c78aDd5NHQl29xingXfCdLQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [win32] + + '@esbuild/win32-x64@0.21.5': + resolution: {integrity: sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==} + engines: {node: '>=12'} + cpu: [x64] + os: [win32] + '@eslint-community/eslint-utils@4.4.0': resolution: {integrity: sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} @@ -1884,8 +2308,8 @@ packages: resolution: {integrity: sha512-yZzuIG+jnVu6hNSzFEN07e8BxF3uAzYtQb6uDkaYZLo6oYZDCq454c5kB8zxnzfCYyP4MIuyBn10L0DqwujTmA==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - '@eslint/eslintrc@3.0.2': - resolution: {integrity: sha512-wV19ZEGEMAC1eHgrS7UQPqsdEiCIbTKTasEfcXAigzoXICcqZSjBZEHlZwNVvKg6UBCjSlos84XiLqsRJnIcIg==} + '@eslint/eslintrc@3.1.0': + resolution: {integrity: sha512-4Bfj15dVJdoy3RfZmmo86RK1Fwzn6SstsvK9JS+BaVKqC6QQQQyXekNaC+g+LKNgkQ+2VhGAzm6hO40AhMR3zQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} '@eslint/js@8.50.0': @@ -1900,55 +2324,49 @@ packages: resolution: {integrity: sha512-Ydf4LidRB/EBI+YrB+cVLqIseiRfjUI/AeHBgjGMtq3GroraDu81OV7zqophRgupngoL3iS3JUMDMnxO7g39qA==} engines: {'0': node >=0.10.0} - '@expo/cli@0.17.8': - resolution: {integrity: sha512-yfkoghCltbGPDbRI71Qu3puInjXx4wO82+uhW82qbWLvosfIN7ep5Gr0Lq54liJpvlUG6M0IXM1GiGqcCyP12w==} + '@expo/cli@0.18.13': + resolution: {integrity: sha512-ZO1fpDK8z6mLeQGuFP6e3cZyCHV55ohZY7/tEyhpft3bwysS680eyFg5SFe+tWNFesnziFrbtI8JaUyhyjqovA==} hasBin: true '@expo/code-signing-certificates@0.0.5': resolution: {integrity: sha512-BNhXkY1bblxKZpltzAx98G2Egj9g1Q+JRcvR7E99DOj862FTCX+ZPsAUtPTr7aHxwtrL7+fL3r0JSmM9kBm+Bw==} - '@expo/config-plugins@7.8.4': - resolution: {integrity: sha512-hv03HYxb/5kX8Gxv/BTI8TLc9L06WzqAfHRRXdbar4zkLcP2oTzvsLEF4/L/TIpD3rsnYa0KU42d0gWRxzPCJg==} + '@expo/config-plugins@8.0.4': + resolution: {integrity: sha512-Hi+xuyNWE2LT4LVbGttHJgl9brnsdWAhEB42gWKb5+8ae86Nr/KwUBQJsJppirBYTeLjj5ZlY0glYnAkDa2jqw==} - '@expo/config-types@50.0.0': - resolution: {integrity: sha512-0kkhIwXRT6EdFDwn+zTg9R2MZIAEYGn1MVkyRohAd+C9cXOb5RA8WLQi7vuxKF9m1SMtNAUrf0pO+ENK0+/KSw==} + '@expo/config-types@51.0.0': + resolution: {integrity: sha512-acn03/u8mQvBhdTQtA7CNhevMltUhbSrpI01FYBJwpVntufkU++ncQujWKlgY/OwIajcfygk1AY4xcNZ5ImkRA==} - '@expo/config@8.5.4': - resolution: {integrity: sha512-ggOLJPHGzJSJHVBC1LzwXwR6qUn8Mw7hkc5zEKRIdhFRuIQ6s2FE4eOvP87LrNfDF7eZGa6tJQYsiHSmZKG+8Q==} + '@expo/config@9.0.2': + resolution: {integrity: sha512-BKQ4/qBf3OLT8hHp5kjObk2vxwoRQ1yYQBbG/OM9Jdz32yYtrU8opTbKRAxfZEWH5i3ZHdLrPdC1rO0I6WxtTw==} - '@expo/devcert@1.1.0': - resolution: {integrity: sha512-ghUVhNJQOCTdQckSGTHctNp/0jzvVoMMkVh+6SHn+TZj8sU15U/npXIDt8NtQp0HedlPaCgkVdMu8Sacne0aEA==} + '@expo/devcert@1.1.2': + resolution: {integrity: sha512-FyWghLu7rUaZEZSTLt/XNRukm0c9GFfwP0iFaswoDWpV6alvVg+zRAfCLdIVQEz1SVcQ3zo1hMZFDrnKGvkCuQ==} - '@expo/env@0.2.2': - resolution: {integrity: sha512-m9nGuaSpzdvMzevQ1H60FWgf4PG5s4J0dfKUzdAGnDu7sMUerY/yUeDaA4+OBo3vBwGVQ+UHcQS9vPSMBNaPcg==} + '@expo/env@0.3.0': + resolution: {integrity: sha512-OtB9XVHWaXidLbHvrVDeeXa09yvTl3+IQN884sO6PhIi2/StXfgSH/9zC7IvzrDB8kW3EBJ1PPLuCUJ2hxAT7Q==} - '@expo/fingerprint@0.6.0': - resolution: {integrity: sha512-KfpoVRTMwMNJ/Cf5o+Ou8M/Y0EGSTqK+rbi70M2Y0K2qgWNfMJ1gm6sYO9uc8lcTr7YSYM1Rme3dk7QXhpScNA==} - hasBin: true - - '@expo/image-utils@0.4.1': - resolution: {integrity: sha512-EZb+VHSmw+a5s2hS9qksTcWylY0FDaIAVufcxoaRS9tHIXLjW5zcKW7Rhj9dSEbZbRVy9yXXdHKa3GQdUQIOFw==} + '@expo/image-utils@0.5.1': + resolution: {integrity: sha512-U/GsFfFox88lXULmFJ9Shfl2aQGcwoKPF7fawSCLixIKtMCpsI+1r0h+5i0nQnmt9tHuzXZDL8+Dg1z6OhkI9A==} - '@expo/json-file@8.3.0': - resolution: {integrity: sha512-yROUeXJXR5goagB8c3muFLCzLmdGOvoPpR5yDNaXrnTp4euNykr9yW0wWhJx4YVRTNOPtGBnEbbJBW+a9q+S6g==} + '@expo/json-file@8.3.3': + resolution: {integrity: sha512-eZ5dld9AD0PrVRiIWpRkm5aIoWBw3kAyd8VkuWEy92sEthBKDDDHAnK2a0dw0Eil6j7rK7lS/Qaq/Zzngv2h5A==} - '@expo/metro-config@0.17.6': - resolution: {integrity: sha512-WaC1C+sLX/Wa7irwUigLhng3ckmXIEQefZczB8DfYmleV6uhfWWo2kz/HijFBpV7FKs2cW6u8J/aBQpFkxlcqg==} - peerDependencies: - '@react-native/babel-preset': '*' + '@expo/metro-config@0.18.4': + resolution: {integrity: sha512-vh9WDf/SzE+NYCn6gqbzLKiXtENFlFZdAqyj9nI38RvQ4jw6TJIQ8+ExcdLDT3MOG36Ytg44XX9Zb3OWF6LVxw==} - '@expo/osascript@2.1.0': - resolution: {integrity: sha512-bOhuFnlRaS7CU33+rFFIWdcET/Vkyn1vsN8BYFwCDEF5P1fVVvYN7bFOsQLTMD3nvi35C1AGmtqUr/Wfv8Xaow==} + '@expo/osascript@2.1.2': + resolution: {integrity: sha512-/ugqDG+52uzUiEpggS9GPdp9g0U9EQrXcTdluHDmnlGmR2nV/F83L7c+HCUyPnf77QXwkr8gQk16vQTbxBQ5eA==} engines: {node: '>=12'} - '@expo/package-manager@1.4.2': - resolution: {integrity: sha512-LKdo/6y4W7llZ6ghsg1kdx2CeH/qR/c6QI/JI8oPUvppsZoeIYjSkdflce978fAMfR8IXoi0wt0jA2w0kWpwbg==} + '@expo/package-manager@1.5.2': + resolution: {integrity: sha512-IuA9XtGBilce0q8cyxtWINqbzMB1Fia0Yrug/O53HNuRSwQguV/iqjV68bsa4z8mYerePhcFgtvISWLAlNEbUA==} - '@expo/plist@0.1.0': - resolution: {integrity: sha512-xWD+8vIFif0wKyuqe3fmnmnSouXYucciZXFzS0ZD5OV9eSAS1RGQI5FaGGJ6zxJ4mpdy/4QzbLdBjnYE5vxA0g==} + '@expo/plist@0.1.3': + resolution: {integrity: sha512-GW/7hVlAylYg1tUrEASclw1MMk9FP4ZwyFAY/SUTJIhPDQHtfOlXREyWV3hhrHdX/K+pS73GNgdfT6E/e+kBbg==} - '@expo/prebuild-config@6.7.4': - resolution: {integrity: sha512-x8EUdCa8DTMZ/dtEXjHAdlP+ljf6oSeSKNzhycXiHhpMSMG9jEhV28ocCwc6cKsjK5GziweEiHwvrj6+vsBlhA==} + '@expo/prebuild-config@7.0.4': + resolution: {integrity: sha512-E2n3QbwgV8Qa0CBw7BHrWBDWD7l8yw+N/yjvXpSPFFtoZLMSKyegdkJFACh2u+UIRKUSZm8zQwHeZR0rqAxV9g==} peerDependencies: expo-modules-autolinking: '>=0.8.1' @@ -1959,16 +2377,12 @@ packages: '@expo/sdk-runtime-versions@1.0.0': resolution: {integrity: sha512-Doz2bfiPndXYFPMRwPyGa1k5QaKDVpY806UJj570epIiMzWaYyCtobasyfC++qfIXVb5Ocy7r3tP9d62hAQ7IQ==} - '@expo/spawn-async@1.5.0': - resolution: {integrity: sha512-LB7jWkqrHo+5fJHNrLAFdimuSXQ2MQ4lA7SQW5bf/HbsXuV2VrT/jN/M8f/KoWt0uJMGN4k/j7Opx4AvOOxSew==} - engines: {node: '>=4'} - '@expo/spawn-async@1.7.2': resolution: {integrity: sha512-QdWi16+CHB9JYP7gma19OVVg0BFkvU8zNj9GjWorYI8Iv8FUxjOCcYRuAmX4s/h91e4e7BPsskc8cSrZYho9Ew==} engines: {node: '>=12'} - '@expo/vector-icons@14.0.0': - resolution: {integrity: sha512-5orm59pdnBQlovhU9k4DbjMUZBHNlku7IRgFY56f7pcaaCnXq9yaLJoOQl9sMwNdFzf4gnkTyHmR5uN10mI9rA==} + '@expo/vector-icons@14.0.2': + resolution: {integrity: sha512-70LpmXQu4xa8cMxjp1fydgRPsalefnHaXLzIwaHMEzcZhnyjw2acZz8azRrZOslPVAWlxItOa2Dd7WtD/kI+CA==} '@expo/websql@1.0.1': resolution: {integrity: sha512-H9/t1V7XXyKC343FJz/LwaVBfDhs6IqhDtSYWpt8LNSQDVjf5NvVJLc5wp+KCpRidZx8+0+YeHJN45HOXmqjFA==} @@ -2090,60 +2504,71 @@ packages: '@jridgewell/trace-mapping@0.3.25': resolution: {integrity: sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==} + '@jridgewell/trace-mapping@0.3.9': + resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} + '@libsql/client@0.5.6': resolution: {integrity: sha512-UBjmDoxz75Z2sHdP+ETCROpeLA/77VMesiff8R4UWK1rnaWbh6/YoCLDILMJL3Rh0udQeKxjL8MjXthqohax+g==} + '@libsql/client@0.6.0': + resolution: {integrity: sha512-qhQzTG/y2IEVbL3+9PULDvlQFWJ/RnjFXECr/Nc3nRngGiiMysDaOV5VUzYk7DulUX98EA4wi+z3FspKrUplUA==} + '@libsql/core@0.5.6': resolution: {integrity: sha512-3vicUAydq6jPth410n4AsHHm1n2psTwvkSf94nfJlSXutGSZsl0updn2N/mJBgqUHkbuFoWZtlMifF0SwBj1xQ==} - '@libsql/darwin-arm64@0.3.10': - resolution: {integrity: sha512-RaexEFfPAFogd6dJlqkpCkTxdr6K14Z0286lodIJ8Ny77mWuWyBkWKxf70OYWXXAMxMJFUW+6al1F3/Osf/pTg==} + '@libsql/core@0.6.0': + resolution: {integrity: sha512-affAB8vSqQwqI9NBDJ5uJCVaHoOAS2pOpbv1kWConh1SBbmJBnHHd4KG73RAJ2sgd2+NbT9WA+XJBqxgp28YSw==} + + '@libsql/darwin-arm64@0.3.18': + resolution: {integrity: sha512-Zt49dt+cwhPCkuoWgvjbQd4ckNfCJR5xzIAyhgHl3CBZqZaEuaXTOGKLNQT7bnFRPuQcdLt5PBT1cenKu2N6pA==} cpu: [arm64] os: [darwin] - '@libsql/darwin-x64@0.3.10': - resolution: {integrity: sha512-SNVN6n4qNUdMW1fJMFmx4qn4n5RnXsxjFbczpkzG/V7m/5VeTFt1chhGcrahTHCr3+K6eRJWJUEQHRGqjBwPkw==} + '@libsql/darwin-x64@0.3.18': + resolution: {integrity: sha512-faq6HUGDaNaueeqPei5cypHaD/hhazUyfHo094CXiEeRZq6ZKtNl5PHdlr8jE/Uw8USNpVVQaLdnvSgKcpRPHw==} cpu: [x64] os: [darwin] '@libsql/hrana-client@0.5.6': resolution: {integrity: sha512-mjQoAmejZ1atG+M3YR2ZW+rg6ceBByH/S/h17ZoYZkqbWrvohFhXyz2LFxj++ARMoY9m6w3RJJIRdJdmnEUlFg==} + '@libsql/hrana-client@0.6.0': + resolution: {integrity: sha512-k+fqzdjqg3IvWfKmVJK5StsbjeTcyNAXFelUbXbGNz3yH1gEVT9mZ6kmhsIXP30ZSyVV0AE1Gi25p82mxC9hwg==} + '@libsql/isomorphic-fetch@0.1.12': resolution: {integrity: sha512-MRo4UcmjAGAa3ac56LoD5OE13m2p0lu0VEtZC2NZMcogM/jc5fU9YtMQ3qbPjFJ+u2BBjFZgMPkQaLS1dlMhpg==} + '@libsql/isomorphic-fetch@0.2.1': + resolution: {integrity: sha512-Sv07QP1Aw8A5OOrmKgRUBKe2fFhF2hpGJhtHe3d1aRnTESZCGkn//0zDycMKTGamVWb3oLYRroOsCV8Ukes9GA==} + '@libsql/isomorphic-ws@0.1.5': resolution: {integrity: sha512-DtLWIH29onUYR00i0GlQ3UdcTRC6EP4u9w/h9LxpUZJWRMARk6dQwZ6Jkd+QdwVpuAOrdxt18v0K2uIYR3fwFg==} - '@libsql/linux-arm64-gnu@0.3.10': - resolution: {integrity: sha512-2uXpi9d8qtyIOr7pyG4a88j6YXgemyIHEs2Wbp+PPletlCIPsFS+E7IQHbz8VwTohchOzcokGUm1Bc5QC+A7wg==} + '@libsql/linux-arm64-gnu@0.3.18': + resolution: {integrity: sha512-5m9xtDAhoyLSV54tho9uQ2ZIDeJWc0vU3Xpe/VK4+6bpURISs23qNhXiCrZnnq3oV0hFlBfcIgQUIATmb6jD2A==} cpu: [arm64] os: [linux] - '@libsql/linux-arm64-musl@0.3.10': - resolution: {integrity: sha512-72SN1FUavLvzHddCS861ynSpQndcW5oLGKA3U8CyMfgIZIwJAPc7+48Uj1plW00htXBx4GBpcntFp68KKIx3YQ==} + '@libsql/linux-arm64-musl@0.3.18': + resolution: {integrity: sha512-oYD5+oM2gPEalp+EoR5DVQBRtdGjLsocjsRbQs5O2m4WOBJKER7VUfDYZHsifLGZoBSc11Yo6s9IR9rjGWy20w==} cpu: [arm64] os: [linux] - '@libsql/linux-x64-gnu@0.3.10': - resolution: {integrity: sha512-hXyNqVRi7ONuyWZ1SX6setxL0QaQ7InyS3bHLupsi9s7NpOGD5vcpTaYicJOqmIIm+6kt8vJfmo7ZxlarIHy7Q==} + '@libsql/linux-x64-gnu@0.3.18': + resolution: {integrity: sha512-QDSSP60nS8KIldGE7H3bpEflQHiL1erwED6huoVJdmDFxsyDJX2CYdWUWW8Za0ZUOvUbnEWAOyMhp6j1dBbZqw==} cpu: [x64] os: [linux] - '@libsql/linux-x64-musl@0.3.10': - resolution: {integrity: sha512-kNmIRxomVwt9S+cLyYS497F/3gXFF4r8wW12YSBQgxG75JYft07AHVd8J7HINg+oqRkLzT0s+mVX5dM6nk68EQ==} + '@libsql/linux-x64-musl@0.3.18': + resolution: {integrity: sha512-5SXwTlaLCUPzxYyq+P0c7Ko7tcEjpd1X6RZKe1DuRFmJPg6f7j2+LrPEhMSIbqKcrl5ACUUAyoKmGZqNYwz23w==} cpu: [x64] os: [linux] - '@libsql/win32-x64-msvc@0.3.10': - resolution: {integrity: sha512-c/6rjdtGULKrJkLgfLobFefObfOtxjXGmCfPxv6pr0epPCeUEssfDbDIeEH9fQUgzogIMWEHwT8so52UJ/iT1Q==} + '@libsql/win32-x64-msvc@0.3.18': + resolution: {integrity: sha512-9EEIHz+e8tTbx9TMkb8ByZnzxc0pYFirK1nSbqC6cFEST95fiY0NCfQ/zAzJxe90KckbjifX6BbO69eWIi3TAg==} cpu: [x64] os: [win32] - '@mapbox/node-pre-gyp@1.0.10': - resolution: {integrity: sha512-4ySo4CjzStuprMwk35H5pPbkymjv1SF3jGLj6rAHp/xT/RF7TL7bd9CTm1xDY49K2qF7jmR/g7k+SkLETP6opA==} - hasBin: true - '@miniflare/core@2.14.2': resolution: {integrity: sha512-n/smm5ZTg7ilGM4fxO7Gxhbe573oc8Za06M3b2fO+lPWqF6NJcEKdCC+sJntVFbn3Cbbd2G1ChISmugPfmlCkQ==} engines: {node: '>=16.13'} @@ -2167,15 +2592,15 @@ packages: '@neon-rs/load@0.0.4': resolution: {integrity: sha512-kTPhdZyTQxB+2wpiRcFWrDcejc4JI6tkPuS7UZCG4l6Zvc5kU/gGQ/ozvHTh1XR5tS+UlfAfGuPajjzQjCiHCw==} - '@neondatabase/serverless@0.4.3': - resolution: {integrity: sha512-U8tpuF5f0R5WRsciR7iaJ5S2h54DWa6Z6CEW+J4KgwyvRN3q3qDz0MibdfFXU0WqnRoi/9RSf/2XN4TfeaOCbQ==} - '@neondatabase/serverless@0.7.2': resolution: {integrity: sha512-wU3WA2uTyNO7wjPs3Mg0G01jztAxUxzd9/mskMmtPwPTjf7JKWi9AW5/puOGXLxmZ9PVgRFeBVRVYq5nBPhsCg==} '@neondatabase/serverless@0.9.0': resolution: {integrity: sha512-mmJnUAzlzvxNSZuuhI6kgJjH+JgFdBMYUWxihtq/nj0Tjt+Y5UU3W+SvRFoucnd5NObYkuLYQzk+zV5DGFKGJg==} + '@neondatabase/serverless@0.9.3': + resolution: {integrity: sha512-6ZBK8asl2Z3+ADEaELvbaVVGVlmY1oAzkxxZfpmXPKFuJhbDN+5fU3zYBamsahS/Ch1zE+CVWB3R+8QEI2LMSw==} + '@nodelib/fs.scandir@2.1.5': resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} engines: {node: '>= 8'} @@ -2191,19 +2616,23 @@ packages: '@npmcli/fs@1.1.1': resolution: {integrity: sha512-8KG5RD0GVP4ydEzRn/I4BNDuxDtqVbOdm8675T49OIG/NGhaK0pjPX7ZcDlvKYbA+ulvVK3ztfcF4uBdOxuJbQ==} + '@npmcli/fs@3.1.1': + resolution: {integrity: sha512-q9CRWjpHCMIh5sVyefoD1cA7PkvILqCZsnSOEUUivORLjxCO/Irmue2DprETiNgEqktDBZaM1Bi+jrarx1XdCg==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + '@npmcli/move-file@1.1.2': resolution: {integrity: sha512-1SUf/Cg2GzGDyaf15aR9St9TWlb+XvbZXWpDx8YKs7MLzMH/BCeopv+y9vzrzgkfykCGuWOlSu3mZhj2+FQcrg==} engines: {node: '>=10'} deprecated: This functionality has been moved to @npmcli/fs - '@op-engineering/op-sqlite@2.0.16': - resolution: {integrity: sha512-tQuDhkPO6Ryp52PuzMm6wVhsY4ppg1VDHOZpY8pmRjTyZLY1pQasBjhgpL/7ks9fSZa4sUlgCQOtfbrN95gXtA==} + '@op-engineering/op-sqlite@2.0.22': + resolution: {integrity: sha512-fccByrMSDNV7koyAtu4oEWMtl0chpfQk4zbe7TrM7iIqcvBvayIeeK+noQ2JwgFOlhQvPAO852n0fip9d9zZog==} peerDependencies: react: '*' react-native: '*' - '@opentelemetry/api@1.4.1': - resolution: {integrity: sha512-O2yRJce1GOc6PAy3QxFM4NzFiWzvScDC1/5ihYBL6BUEVdq0XMWN01sppE+H6bBXbaFYipjwFLEWLg5PaSOThA==} + '@opentelemetry/api@1.8.0': + resolution: {integrity: sha512-I/s6F7yKUDdtMsoBWXJe8Qz40Tui5vsuKCWJEWVL+5q9sSWRzzx6v2KeNsOBEwd94j0eWkpWCH4yB6rZg9Mf0w==} engines: {node: '>=8.0.0'} '@originjs/vite-plugin-commonjs@1.0.3': @@ -2213,108 +2642,144 @@ packages: resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} engines: {node: '>=14'} - '@planetscale/database@1.16.0': - resolution: {integrity: sha512-HNUrTqrd8aTRZYMDcsoZ62s36sIWkMMmKZBOehoCWR2WrfNPKq+Q1yQef5okl3pSVlldFnu2h/dbHjOsDTHXug==} + '@planetscale/database@1.18.0': + resolution: {integrity: sha512-t2XdOfrVgcF7AW791FtdPS27NyNqcE1SpoXgk3HpziousvUMsJi4Q6NL3JyOBpsMOrvk94749o8yyonvX5quPw==} engines: {node: '>=16'} - '@polka/url@1.0.0-next.21': - resolution: {integrity: sha512-a5Sab1C4/icpTZVzZc5Ghpz88yQtGOyNqYXcZgOssB2uuAr+wF/MvN6bgtW32q7HHrvBki+BsZ0OuNv6EV3K9g==} + '@polka/url@1.0.0-next.25': + resolution: {integrity: sha512-j7P6Rgr3mmtdkeDGTe0E/aYyWEWVtc5yFXtHCRHs28/jptDEWfaVOc5T7cblqy1XKPPfCxJc/8DwQ5YgLOZOVQ==} + + '@prisma/client@5.14.0': + resolution: {integrity: sha512-akMSuyvLKeoU4LeyBAUdThP/uhVP3GuLygFE3MlYzaCb3/J8SfsYBE5PkaFuLuVpLyA6sFoW+16z/aPhNAESqg==} + engines: {node: '>=16.13'} + peerDependencies: + prisma: '*' + peerDependenciesMeta: + prisma: + optional: true + + '@prisma/debug@5.14.0': + resolution: {integrity: sha512-iq56qBZuFfX3fCxoxT8gBX33lQzomBU0qIUaEj1RebsKVz1ob/BVH1XSBwwwvRVtZEV1b7Fxx2eVu34Ge/mg3w==} - '@react-native-community/cli-clean@12.3.6': - resolution: {integrity: sha512-gUU29ep8xM0BbnZjwz9MyID74KKwutq9x5iv4BCr2im6nly4UMf1B1D+V225wR7VcDGzbgWjaezsJShLLhC5ig==} + '@prisma/debug@5.16.1': + resolution: {integrity: sha512-JsNgZAg6BD9RInLSrg7ZYzo11N7cVvYArq3fHGSD89HSgtN0VDdjV6bib7YddbcO6snzjchTiLfjeTqBjtArVQ==} - '@react-native-community/cli-config@12.3.6': - resolution: {integrity: sha512-JGWSYQ9EAK6m2v0abXwFLEfsqJ1zkhzZ4CV261QZF9MoUNB6h57a274h1MLQR9mG6Tsh38wBUuNfEPUvS1vYew==} + '@prisma/engines-version@5.14.0-25.e9771e62de70f79a5e1c604a2d7c8e2a0a874b48': + resolution: {integrity: sha512-ip6pNkRo1UxWv+6toxNcYvItNYaqQjXdFNGJ+Nuk2eYtRoEdoF13wxo7/jsClJFFenMPVNVqXQDV0oveXnR1cA==} - '@react-native-community/cli-debugger-ui@12.3.6': - resolution: {integrity: sha512-SjUKKsx5FmcK9G6Pb6UBFT0s9JexVStK5WInmANw75Hm7YokVvHEgtprQDz2Uvy5znX5g2ujzrkIU//T15KQzA==} + '@prisma/engines@5.14.0': + resolution: {integrity: sha512-lgxkKZ6IEygVcw6IZZUlPIfLQ9hjSYAtHjZ5r64sCLDgVzsPFCi2XBBJgzPMkOQ5RHzUD4E/dVdpn9+ez8tk1A==} - '@react-native-community/cli-doctor@12.3.6': - resolution: {integrity: sha512-fvBDv2lTthfw4WOQKkdTop2PlE9GtfrlNnpjB818MhcdEnPjfQw5YaTUcnNEGsvGomdCs1MVRMgYXXwPSN6OvQ==} + '@prisma/fetch-engine@5.14.0': + resolution: {integrity: sha512-VrheA9y9DMURK5vu8OJoOgQpxOhas3qF0IBHJ8G/0X44k82kc8E0w98HCn2nhnbOOMwbWsJWXfLC2/F8n5u0gQ==} - '@react-native-community/cli-hermes@12.3.6': - resolution: {integrity: sha512-sNGwfOCl8OAIjWCkwuLpP8NZbuO0dhDI/2W7NeOGDzIBsf4/c4MptTrULWtGIH9okVPLSPX0NnRyGQ+mSwWyuQ==} + '@prisma/generator-helper@5.16.1': + resolution: {integrity: sha512-WxV/msovIubvr20iIdPJN0MUj46J26ax+sV+vMQSCeVoHQW//xdJZoPnimG54M7+CA9kupXjVpgjiPX4rcKQeA==} - '@react-native-community/cli-platform-android@12.3.6': - resolution: {integrity: sha512-DeDDAB8lHpuGIAPXeeD9Qu2+/wDTFPo99c8uSW49L0hkmZJixzvvvffbGQAYk32H0TmaI7rzvzH+qzu7z3891g==} + '@prisma/get-platform@5.14.0': + resolution: {integrity: sha512-/yAyBvcEjRv41ynZrhdrPtHgk47xLRRq/o5eWGcUpBJ1YrUZTYB8EoPiopnP7iQrMATK8stXQdPOoVlrzuTQZw==} - '@react-native-community/cli-platform-ios@12.3.6': - resolution: {integrity: sha512-3eZ0jMCkKUO58wzPWlvAPRqezVKm9EPZyaPyHbRPWU8qw7JqkvnRlWIaYDGpjCJgVW4k2hKsEursLtYKb188tg==} + '@react-native-community/cli-clean@13.6.6': + resolution: {integrity: sha512-cBwJTwl0NyeA4nyMxbhkWZhxtILYkbU3TW3k8AXLg+iGphe0zikYMGB3T+haTvTc6alTyEFwPbimk9bGIqkjAQ==} - '@react-native-community/cli-plugin-metro@12.3.6': - resolution: {integrity: sha512-3jxSBQt4fkS+KtHCPSyB5auIT+KKIrPCv9Dk14FbvOaEh9erUWEm/5PZWmtboW1z7CYeNbFMeXm9fM2xwtVOpg==} + '@react-native-community/cli-config@13.6.6': + resolution: {integrity: sha512-mbG425zCKr8JZhv/j11382arezwS/70juWMsn8j2lmrGTrP1cUdW0MF15CCIFtJsqyK3Qs+FTmqttRpq81QfSg==} - '@react-native-community/cli-server-api@12.3.6': - resolution: {integrity: sha512-80NIMzo8b2W+PL0Jd7NjiJW9mgaT8Y8wsIT/lh6mAvYH7mK0ecDJUYUTAAv79Tbo1iCGPAr3T295DlVtS8s4yQ==} + '@react-native-community/cli-debugger-ui@13.6.6': + resolution: {integrity: sha512-Vv9u6eS4vKSDAvdhA0OiQHoA7y39fiPIgJ6biT32tN4avHDtxlc6TWZGiqv7g98SBvDWvoVAmdPLcRf3kU+c8g==} - '@react-native-community/cli-tools@12.3.6': - resolution: {integrity: sha512-FPEvZn19UTMMXUp/piwKZSh8cMEfO8G3KDtOwo53O347GTcwNrKjgZGtLSPELBX2gr+YlzEft3CoRv2Qmo83fQ==} + '@react-native-community/cli-doctor@13.6.6': + resolution: {integrity: sha512-TWZb5g6EmQe2Ua2TEWNmyaEayvlWH4GmdD9ZC+p8EpKFpB1NpDGMK6sXbpb42TDvwZg5s4TDRplK0PBEA/SVDg==} - '@react-native-community/cli-types@12.3.6': - resolution: {integrity: sha512-xPqTgcUtZowQ8WKOkI9TLGBwH2bGggOC4d2FFaIRST3gTcjrEeGRNeR5aXCzJFIgItIft8sd7p2oKEdy90+01Q==} + '@react-native-community/cli-hermes@13.6.6': + resolution: {integrity: sha512-La5Ie+NGaRl3klei6WxKoOxmCUSGGxpOk6vU5pEGf0/O7ky+Ay0io+zXYUZqlNMi/cGpO7ZUijakBYOB/uyuFg==} - '@react-native-community/cli@12.3.6': - resolution: {integrity: sha512-647OSi6xBb8FbwFqX9zsJxOzu685AWtrOUWHfOkbKD+5LOpGORw+GQo0F9rWZnB68rLQyfKUZWJeaD00pGv5fw==} + '@react-native-community/cli-platform-android@13.6.6': + resolution: {integrity: sha512-/tMwkBeNxh84syiSwNlYtmUz/Ppc+HfKtdopL/5RB+fd3SV1/5/NPNjMlyLNgFKnpxvKCInQ7dnl6jGHJjeHjg==} + + '@react-native-community/cli-platform-apple@13.6.6': + resolution: {integrity: sha512-bOmSSwoqNNT3AmCRZXEMYKz1Jf1l2F86Nhs7qBcXdY/sGiJ+Flng564LOqvdAlVLTbkgz47KjNKCS2pP4Jg0Mg==} + + '@react-native-community/cli-platform-ios@13.6.6': + resolution: {integrity: sha512-vjDnRwhlSN5ryqKTas6/DPkxuouuyFBAqAROH4FR1cspTbn6v78JTZKDmtQy9JMMo7N5vZj1kASU5vbFep9IOQ==} + + '@react-native-community/cli-server-api@13.6.6': + resolution: {integrity: sha512-ZtCXxoFlM7oDv3iZ3wsrT3SamhtUJuIkX2WePLPlN5bcbq7zimbPm2lHyicNJtpcGQ5ymsgpUWPCNZsWQhXBqQ==} + + '@react-native-community/cli-tools@13.6.6': + resolution: {integrity: sha512-ptOnn4AJczY5njvbdK91k4hcYazDnGtEPrqIwEI+k/CTBHNdb27Rsm2OZ7ye6f7otLBqF8gj/hK6QzJs8CEMgw==} + + '@react-native-community/cli-types@13.6.6': + resolution: {integrity: sha512-733iaYzlmvNK7XYbnWlMjdE+2k0hlTBJW071af/xb6Bs+hbJqBP9c03FZuYH2hFFwDDntwj05bkri/P7VgSxug==} + + '@react-native-community/cli@13.6.6': + resolution: {integrity: sha512-IqclB7VQ84ye8Fcs89HOpOscY4284VZg2pojHNl8H0Lzd4DadXJWQoxC7zWm8v2f8eyeX2kdhxp2ETD5tceIgA==} engines: {node: '>=18'} hasBin: true - '@react-native/assets-registry@0.73.1': - resolution: {integrity: sha512-2FgAbU7uKM5SbbW9QptPPZx8N9Ke2L7bsHb+EhAanZjFZunA9PaYtyjUQ1s7HD+zDVqOQIvjkpXSv7Kejd2tqg==} + '@react-native/assets-registry@0.74.83': + resolution: {integrity: sha512-2vkLMVnp+YTZYTNSDIBZojSsjz8sl5PscP3j4GcV6idD8V978SZfwFlk8K0ti0BzRs11mzL0Pj17km597S/eTQ==} engines: {node: '>=18'} - '@react-native/babel-plugin-codegen@0.73.4': - resolution: {integrity: sha512-XzRd8MJGo4Zc5KsphDHBYJzS1ryOHg8I2gOZDAUCGcwLFhdyGu1zBNDJYH2GFyDrInn9TzAbRIf3d4O+eltXQQ==} + '@react-native/babel-plugin-codegen@0.74.83': + resolution: {integrity: sha512-+S0st3t4Ro00bi9gjT1jnK8qTFOU+CwmziA7U9odKyWrCoRJrgmrvogq/Dr1YXlpFxexiGIupGut1VHxr+fxJA==} engines: {node: '>=18'} - '@react-native/babel-preset@0.73.21': - resolution: {integrity: sha512-WlFttNnySKQMeujN09fRmrdWqh46QyJluM5jdtDNrkl/2Hx6N4XeDUGhABvConeK95OidVO7sFFf7sNebVXogA==} + '@react-native/babel-preset@0.74.83': + resolution: {integrity: sha512-KJuu3XyVh3qgyUer+rEqh9a/JoUxsDOzkJNfRpDyXiAyjDRoVch60X/Xa/NcEQ93iCVHAWs0yQ+XGNGIBCYE6g==} engines: {node: '>=18'} peerDependencies: '@babel/core': '*' - '@react-native/codegen@0.73.3': - resolution: {integrity: sha512-sxslCAAb8kM06vGy9Jyh4TtvjhcP36k/rvj2QE2Jdhdm61KvfafCATSIsOfc0QvnduWFcpXUPvAVyYwuv7PYDg==} + '@react-native/codegen@0.74.83': + resolution: {integrity: sha512-GgvgHS3Aa2J8/mp1uC/zU8HuTh8ZT5jz7a4mVMWPw7+rGyv70Ba8uOVBq6UH2Q08o617IATYc+0HfyzAfm4n0w==} engines: {node: '>=18'} peerDependencies: '@babel/preset-env': ^7.1.6 - '@react-native/community-cli-plugin@0.73.17': - resolution: {integrity: sha512-F3PXZkcHg+1ARIr6FRQCQiB7ZAA+MQXGmq051metRscoLvgYJwj7dgC8pvgy0kexzUkHu5BNKrZeySzUft3xuQ==} + '@react-native/community-cli-plugin@0.74.83': + resolution: {integrity: sha512-7GAFjFOg1mFSj8bnFNQS4u8u7+QtrEeflUIDVZGEfBZQ3wMNI5ycBzbBGycsZYiq00Xvoc6eKFC7kvIaqeJpUQ==} engines: {node: '>=18'} - '@react-native/debugger-frontend@0.73.3': - resolution: {integrity: sha512-RgEKnWuoo54dh7gQhV7kvzKhXZEhpF9LlMdZolyhGxHsBqZ2gXdibfDlfcARFFifPIiaZ3lXuOVVa4ei+uPgTw==} + '@react-native/debugger-frontend@0.74.83': + resolution: {integrity: sha512-RGQlVUegBRxAUF9c1ss1ssaHZh6CO+7awgtI9sDeU0PzDZY/40ImoPD5m0o0SI6nXoVzbPtcMGzU+VO590pRfA==} engines: {node: '>=18'} - '@react-native/dev-middleware@0.73.8': - resolution: {integrity: sha512-oph4NamCIxkMfUL/fYtSsE+JbGOnrlawfQ0kKtDQ5xbOjPKotKoXqrs1eGwozNKv7FfQ393stk1by9a6DyASSg==} + '@react-native/dev-middleware@0.74.83': + resolution: {integrity: sha512-UH8iriqnf7N4Hpi20D7M2FdvSANwTVStwFCSD7VMU9agJX88Yk0D1T6Meh2RMhUu4kY2bv8sTkNRm7LmxvZqgA==} engines: {node: '>=18'} - '@react-native/gradle-plugin@0.73.4': - resolution: {integrity: sha512-PMDnbsZa+tD55Ug+W8CfqXiGoGneSSyrBZCMb5JfiB3AFST3Uj5e6lw8SgI/B6SKZF7lG0BhZ6YHZsRZ5MlXmg==} + '@react-native/gradle-plugin@0.74.83': + resolution: {integrity: sha512-Pw2BWVyOHoBuJVKxGVYF6/GSZRf6+v1Ygc+ULGz5t20N8qzRWPa2fRZWqoxsN7TkNLPsECYY8gooOl7okOcPAQ==} engines: {node: '>=18'} - '@react-native/js-polyfills@0.73.1': - resolution: {integrity: sha512-ewMwGcumrilnF87H4jjrnvGZEaPFCAC4ebraEK+CurDDmwST/bIicI4hrOAv+0Z0F7DEK4O4H7r8q9vH7IbN4g==} + '@react-native/js-polyfills@0.74.83': + resolution: {integrity: sha512-/t74n8r6wFhw4JEoOj3bN71N1NDLqaawB75uKAsSjeCwIR9AfCxlzZG0etsXtOexkY9KMeZIQ7YwRPqUdNXuqw==} engines: {node: '>=18'} - '@react-native/metro-babel-transformer@0.73.15': - resolution: {integrity: sha512-LlkSGaXCz+xdxc9819plmpsl4P4gZndoFtpjN3GMBIu6f7TBV0GVbyJAU4GE8fuAWPVSVL5ArOcdkWKSbI1klw==} + '@react-native/metro-babel-transformer@0.74.83': + resolution: {integrity: sha512-hGdx5N8diu8y+GW/ED39vTZa9Jx1di2ZZ0aapbhH4egN1agIAusj5jXTccfNBwwWF93aJ5oVbRzfteZgjbutKg==} engines: {node: '>=18'} peerDependencies: '@babel/core': '*' - '@react-native/normalize-color@2.1.0': - resolution: {integrity: sha512-Z1jQI2NpdFJCVgpY+8Dq/Bt3d+YUi1928Q+/CZm/oh66fzM0RUl54vvuXlPJKybH4pdCZey1eDTPaLHkMPNgWA==} - - '@react-native/normalize-colors@0.73.2': - resolution: {integrity: sha512-bRBcb2T+I88aG74LMVHaKms2p/T8aQd8+BZ7LuuzXlRfog1bMWWn/C5i0HVuvW4RPtXQYgIlGiXVDy9Ir1So/w==} + '@react-native/normalize-colors@0.74.83': + resolution: {integrity: sha512-jhCY95gRDE44qYawWVvhTjTplW1g+JtKTKM3f8xYT1dJtJ8QWv+gqEtKcfmOHfDkSDaMKG0AGBaDTSK8GXLH8Q==} - '@react-native/virtualized-lists@0.73.4': - resolution: {integrity: sha512-HpmLg1FrEiDtrtAbXiwCgXFYyloK/dOIPIuWW3fsqukwJEWAiTzm1nXGJ7xPU5XTHiWZ4sKup5Ebaj8z7iyWog==} + '@react-native/virtualized-lists@0.74.83': + resolution: {integrity: sha512-rmaLeE34rj7py4FxTod7iMTC7BAsm+HrGA8WxYmEJeyTV7WSaxAkosKoYBz8038mOiwnG9VwA/7FrB6bEQvn1A==} engines: {node: '>=18'} peerDependencies: + '@types/react': ^18.2.6 + react: '*' react-native: '*' + peerDependenciesMeta: + '@types/react': + optional: true + + '@rnx-kit/chromium-edge-launcher@1.0.0': + resolution: {integrity: sha512-lzD84av1ZQhYUS+jsGqJiCMaJO2dn9u+RTT9n9q6D3SaKVwWqv+7AoRKqBu19bkwyE+iFRl1ymr40QS90jVFYg==} + engines: {node: '>=14.15'} '@rollup/plugin-terser@0.4.1': resolution: {integrity: sha512-aKS32sw5a7hy+fEXVy+5T95aDIwjpGHCTv833HXVtyKMDoVS7pBr5K3L9hEQoNqbJFjfANPrNpIXlTQ7is00eA==} @@ -2360,6 +2825,86 @@ packages: rollup: optional: true + '@rollup/rollup-android-arm-eabi@4.18.0': + resolution: {integrity: sha512-Tya6xypR10giZV1XzxmH5wr25VcZSncG0pZIjfePT0OVBvqNEurzValetGNarVrGiq66EBVAFn15iYX4w6FKgQ==} + cpu: [arm] + os: [android] + + '@rollup/rollup-android-arm64@4.18.0': + resolution: {integrity: sha512-avCea0RAP03lTsDhEyfy+hpfr85KfyTctMADqHVhLAF3MlIkq83CP8UfAHUssgXTYd+6er6PaAhx/QGv4L1EiA==} + cpu: [arm64] + os: [android] + + '@rollup/rollup-darwin-arm64@4.18.0': + resolution: {integrity: sha512-IWfdwU7KDSm07Ty0PuA/W2JYoZ4iTj3TUQjkVsO/6U+4I1jN5lcR71ZEvRh52sDOERdnNhhHU57UITXz5jC1/w==} + cpu: [arm64] + os: [darwin] + + '@rollup/rollup-darwin-x64@4.18.0': + resolution: {integrity: sha512-n2LMsUz7Ynu7DoQrSQkBf8iNrjOGyPLrdSg802vk6XT3FtsgX6JbE8IHRvposskFm9SNxzkLYGSq9QdpLYpRNA==} + cpu: [x64] + os: [darwin] + + '@rollup/rollup-linux-arm-gnueabihf@4.18.0': + resolution: {integrity: sha512-C/zbRYRXFjWvz9Z4haRxcTdnkPt1BtCkz+7RtBSuNmKzMzp3ZxdM28Mpccn6pt28/UWUCTXa+b0Mx1k3g6NOMA==} + cpu: [arm] + os: [linux] + + '@rollup/rollup-linux-arm-musleabihf@4.18.0': + resolution: {integrity: sha512-l3m9ewPgjQSXrUMHg93vt0hYCGnrMOcUpTz6FLtbwljo2HluS4zTXFy2571YQbisTnfTKPZ01u/ukJdQTLGh9A==} + cpu: [arm] + os: [linux] + + '@rollup/rollup-linux-arm64-gnu@4.18.0': + resolution: {integrity: sha512-rJ5D47d8WD7J+7STKdCUAgmQk49xuFrRi9pZkWoRD1UeSMakbcepWXPF8ycChBoAqs1pb2wzvbY6Q33WmN2ftw==} + cpu: [arm64] + os: [linux] + + '@rollup/rollup-linux-arm64-musl@4.18.0': + resolution: {integrity: sha512-be6Yx37b24ZwxQ+wOQXXLZqpq4jTckJhtGlWGZs68TgdKXJgw54lUUoFYrg6Zs/kjzAQwEwYbp8JxZVzZLRepQ==} + cpu: [arm64] + os: [linux] + + '@rollup/rollup-linux-powerpc64le-gnu@4.18.0': + resolution: {integrity: sha512-hNVMQK+qrA9Todu9+wqrXOHxFiD5YmdEi3paj6vP02Kx1hjd2LLYR2eaN7DsEshg09+9uzWi2W18MJDlG0cxJA==} + cpu: [ppc64] + os: [linux] + + '@rollup/rollup-linux-riscv64-gnu@4.18.0': + resolution: {integrity: sha512-ROCM7i+m1NfdrsmvwSzoxp9HFtmKGHEqu5NNDiZWQtXLA8S5HBCkVvKAxJ8U+CVctHwV2Gb5VUaK7UAkzhDjlg==} + cpu: [riscv64] + os: [linux] + + '@rollup/rollup-linux-s390x-gnu@4.18.0': + resolution: {integrity: sha512-0UyyRHyDN42QL+NbqevXIIUnKA47A+45WyasO+y2bGJ1mhQrfrtXUpTxCOrfxCR4esV3/RLYyucGVPiUsO8xjg==} + cpu: [s390x] + os: [linux] + + '@rollup/rollup-linux-x64-gnu@4.18.0': + resolution: {integrity: sha512-xuglR2rBVHA5UsI8h8UbX4VJ470PtGCf5Vpswh7p2ukaqBGFTnsfzxUBetoWBWymHMxbIG0Cmx7Y9qDZzr648w==} + cpu: [x64] + os: [linux] + + '@rollup/rollup-linux-x64-musl@4.18.0': + resolution: {integrity: sha512-LKaqQL9osY/ir2geuLVvRRs+utWUNilzdE90TpyoX0eNqPzWjRm14oMEE+YLve4k/NAqCdPkGYDaDF5Sw+xBfg==} + cpu: [x64] + os: [linux] + + '@rollup/rollup-win32-arm64-msvc@4.18.0': + resolution: {integrity: sha512-7J6TkZQFGo9qBKH0pk2cEVSRhJbL6MtfWxth7Y5YmZs57Pi+4x6c2dStAUvaQkHQLnEQv1jzBUW43GvZW8OFqA==} + cpu: [arm64] + os: [win32] + + '@rollup/rollup-win32-ia32-msvc@4.18.0': + resolution: {integrity: sha512-Txjh+IxBPbkUB9+SXZMpv+b/vnTEtFyfWZgJ6iyCmt2tdx0OF5WhFowLmnh8ENGNpfUlUZkdI//4IEmhwPieNg==} + cpu: [ia32] + os: [win32] + + '@rollup/rollup-win32-x64-msvc@4.18.0': + resolution: {integrity: sha512-UOo5FdvOL0+eIVTgS4tIdbW+TtnBLWg1YBCcU2KWM7nuNwRz9bksDX1bekJJCpu25N1DVWaCwnT39dVQxzqS8g==} + cpu: [x64] + os: [win32] + '@segment/loosely-validate-event@2.0.0': resolution: {integrity: sha512-ZMCSfztDBqwotkl848ODgVcAmN4OItEWDCkshcKz0/W6gGSQayuuCtWV/MlodFivAZD793d6UgANd6wCXUfrIw==} @@ -2378,6 +2923,10 @@ packages: '@sinclair/typebox@0.29.6': resolution: {integrity: sha512-aX5IFYWlMa7tQ8xZr3b2gtVReCvg7f3LEhjir/JAjX2bJCMVJA5tIPv30wTD4KDfcwMd7DDYY3hFDeGmOgtrZQ==} + '@sindresorhus/is@4.6.0': + resolution: {integrity: sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw==} + engines: {node: '>=10'} + '@sinonjs/commons@3.0.1': resolution: {integrity: sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ==} @@ -2388,18 +2937,34 @@ packages: resolution: {integrity: sha512-wRlta7GuLWpTqtFfGo+nZyOO1vEvewdNR1R4rTxpC8XU6vG/NDyrFBhwLZsqg1NUoR1noVaXJPC/7ZK47QCySw==} engines: {node: '>=14.0.0'} + '@smithy/abort-controller@3.0.0': + resolution: {integrity: sha512-p6GlFGBt9K4MYLu72YuJ523NVR4A8oHlC5M2JO6OmQqN8kAc/uh1JqLE+FizTokrSJGg0CSvC+BrsmGzKtsZKA==} + engines: {node: '>=16.0.0'} + '@smithy/config-resolver@2.2.0': resolution: {integrity: sha512-fsiMgd8toyUba6n1WRmr+qACzXltpdDkPTAaDqc8QqPBUzO+/JKwL6bUBseHVi8tu9l+3JOK+tSf7cay+4B3LA==} engines: {node: '>=14.0.0'} + '@smithy/config-resolver@3.0.0': + resolution: {integrity: sha512-2GzOfADwYLQugYkKQhIyZyQlM05K+tMKvRnc6eFfZcpJGRfKoMUMYdPlBKmqHwQFXQKBrGV6cxL9oymWgDzvFw==} + engines: {node: '>=16.0.0'} + '@smithy/core@1.4.2': resolution: {integrity: sha512-2fek3I0KZHWJlRLvRTqxTEri+qV0GRHrJIoLFuBMZB4EMg4WgeBGfF0X6abnrNYpq55KJ6R4D6x4f0vLnhzinA==} engines: {node: '>=14.0.0'} + '@smithy/core@2.0.1': + resolution: {integrity: sha512-rcMkjvwxH/bER+oZUPR0yTA0ELD6m3A+d92+CFkdF6HJFCBB1bXo7P5pm21L66XwTN01B6bUhSCQ7cymWRD8zg==} + engines: {node: '>=16.0.0'} + '@smithy/credential-provider-imds@2.3.0': resolution: {integrity: sha512-BWB9mIukO1wjEOo1Ojgl6LrG4avcaC7T/ZP6ptmAaW4xluhSIPZhY+/PI5YKzlk+jsm+4sQZB45Bt1OfMeQa3w==} engines: {node: '>=14.0.0'} + '@smithy/credential-provider-imds@3.0.0': + resolution: {integrity: sha512-lfmBiFQcA3FsDAPxNfY0L7CawcWtbyWsBOHo34nF095728JLkBX4Y9q/VPPE2r7fqMVK+drmDigqE2/SSQeVRA==} + engines: {node: '>=16.0.0'} + '@smithy/eventstream-codec@2.2.0': resolution: {integrity: sha512-8janZoJw85nJmQZc4L8TuePp2pk1nxLgkxIR0TUjKJ5Dkj5oelB9WtiSSGXCQvNsJl0VSTvK/2ueMXxvpa9GVw==} @@ -2422,139 +2987,275 @@ packages: '@smithy/fetch-http-handler@2.5.0': resolution: {integrity: sha512-BOWEBeppWhLn/no/JxUL/ghTfANTjT7kg3Ww2rPqTUY9R4yHPXxJ9JhMe3Z03LN3aPwiwlpDIUcVw1xDyHqEhw==} + '@smithy/fetch-http-handler@3.0.1': + resolution: {integrity: sha512-uaH74i5BDj+rBwoQaXioKpI0SHBJFtOVwzrCpxZxphOW0ki5jhj7dXvDMYM2IJem8TpdFvS2iC08sjOblfFGFg==} + '@smithy/hash-node@2.2.0': resolution: {integrity: sha512-zLWaC/5aWpMrHKpoDF6nqpNtBhlAYKF/7+9yMN7GpdR8CzohnWfGtMznPybnwSS8saaXBMxIGwJqR4HmRp6b3g==} engines: {node: '>=14.0.0'} + '@smithy/hash-node@3.0.0': + resolution: {integrity: sha512-84qXstNemP3XS5jcof0el6+bDfjzuvhJPQTEfro3lgtbCtKgzPm3MgiS6ehXVPjeQ5+JS0HqmTz8f/RYfzHVxw==} + engines: {node: '>=16.0.0'} + '@smithy/invalid-dependency@2.2.0': resolution: {integrity: sha512-nEDASdbKFKPXN2O6lOlTgrEEOO9NHIeO+HVvZnkqc8h5U9g3BIhWsvzFo+UcUbliMHvKNPD/zVxDrkP1Sbgp8Q==} + '@smithy/invalid-dependency@3.0.0': + resolution: {integrity: sha512-F6wBBaEFgJzj0s4KUlliIGPmqXemwP6EavgvDqYwCH40O5Xr2iMHvS8todmGVZtuJCorBkXsYLyTu4PuizVq5g==} + '@smithy/is-array-buffer@2.2.0': resolution: {integrity: sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==} engines: {node: '>=14.0.0'} + '@smithy/is-array-buffer@3.0.0': + resolution: {integrity: sha512-+Fsu6Q6C4RSJiy81Y8eApjEB5gVtM+oFKTffg+jSuwtvomJJrhUJBu2zS8wjXSgH/g1MKEWrzyChTBe6clb5FQ==} + engines: {node: '>=16.0.0'} + '@smithy/middleware-content-length@2.2.0': resolution: {integrity: sha512-5bl2LG1Ah/7E5cMSC+q+h3IpVHMeOkG0yLRyQT1p2aMJkSrZG7RlXHPuAgb7EyaFeidKEnnd/fNaLLaKlHGzDQ==} engines: {node: '>=14.0.0'} + '@smithy/middleware-content-length@3.0.0': + resolution: {integrity: sha512-3C4s4d/iGobgCtk2tnWW6+zSTOBg1PRAm2vtWZLdriwTroFbbWNSr3lcyzHdrQHnEXYCC5K52EbpfodaIUY8sg==} + engines: {node: '>=16.0.0'} + '@smithy/middleware-endpoint@2.5.1': resolution: {integrity: sha512-1/8kFp6Fl4OsSIVTWHnNjLnTL8IqpIb/D3sTSczrKFnrE9VMNWxnrRKNvpUHOJ6zpGD5f62TPm7+17ilTJpiCQ==} engines: {node: '>=14.0.0'} + '@smithy/middleware-endpoint@3.0.0': + resolution: {integrity: sha512-aXOAWztw/5qAfp0NcA2OWpv6ZI/E+Dh9mByif7i91D/0iyYNUcKvskmXiowKESFkuZ7PIMd3VOR4fTibZDs2OQ==} + engines: {node: '>=16.0.0'} + '@smithy/middleware-retry@2.3.1': resolution: {integrity: sha512-P2bGufFpFdYcWvqpyqqmalRtwFUNUA8vHjJR5iGqbfR6mp65qKOLcUd6lTr4S9Gn/enynSrSf3p3FVgVAf6bXA==} engines: {node: '>=14.0.0'} + '@smithy/middleware-retry@3.0.1': + resolution: {integrity: sha512-hBhSEuL841FhJBK/19WpaGk5YWSzFk/P2UaVjANGKRv3eYNO8Y1lANWgqnuPWjOyCEWMPr58vELFDWpxvRKANw==} + engines: {node: '>=16.0.0'} + '@smithy/middleware-serde@2.3.0': resolution: {integrity: sha512-sIADe7ojwqTyvEQBe1nc/GXB9wdHhi9UwyX0lTyttmUWDJLP655ZYE1WngnNyXREme8I27KCaUhyhZWRXL0q7Q==} engines: {node: '>=14.0.0'} + '@smithy/middleware-serde@3.0.0': + resolution: {integrity: sha512-I1vKG1foI+oPgG9r7IMY1S+xBnmAn1ISqployvqkwHoSb8VPsngHDTOgYGYBonuOKndaWRUGJZrKYYLB+Ane6w==} + engines: {node: '>=16.0.0'} + '@smithy/middleware-stack@2.2.0': resolution: {integrity: sha512-Qntc3jrtwwrsAC+X8wms8zhrTr0sFXnyEGhZd9sLtsJ/6gGQKFzNB+wWbOcpJd7BR8ThNCoKt76BuQahfMvpeA==} engines: {node: '>=14.0.0'} + '@smithy/middleware-stack@3.0.0': + resolution: {integrity: sha512-+H0jmyfAyHRFXm6wunskuNAqtj7yfmwFB6Fp37enytp2q047/Od9xetEaUbluyImOlGnGpaVGaVfjwawSr+i6Q==} + engines: {node: '>=16.0.0'} + '@smithy/node-config-provider@2.3.0': resolution: {integrity: sha512-0elK5/03a1JPWMDPaS726Iw6LpQg80gFut1tNpPfxFuChEEklo2yL823V94SpTZTxmKlXFtFgsP55uh3dErnIg==} engines: {node: '>=14.0.0'} + '@smithy/node-config-provider@3.0.0': + resolution: {integrity: sha512-buqfaSdDh0zo62EPLf8rGDvcpKwGpO5ho4bXS2cdFhlOta7tBkWJt+O5uiaAeICfIOfPclNOndshDNSanX2X9g==} + engines: {node: '>=16.0.0'} + '@smithy/node-http-handler@2.5.0': resolution: {integrity: sha512-mVGyPBzkkGQsPoxQUbxlEfRjrj6FPyA3u3u2VXGr9hT8wilsoQdZdvKpMBFMB8Crfhv5dNkKHIW0Yyuc7eABqA==} engines: {node: '>=14.0.0'} + '@smithy/node-http-handler@3.0.0': + resolution: {integrity: sha512-3trD4r7NOMygwLbUJo4eodyQuypAWr7uvPnebNJ9a70dQhVn+US8j/lCnvoJS6BXfZeF7PkkkI0DemVJw+n+eQ==} + engines: {node: '>=16.0.0'} + '@smithy/property-provider@2.2.0': resolution: {integrity: sha512-+xiil2lFhtTRzXkx8F053AV46QnIw6e7MV8od5Mi68E1ICOjCeCHw2XfLnDEUHnT9WGUIkwcqavXjfwuJbGlpg==} engines: {node: '>=14.0.0'} + '@smithy/property-provider@3.0.0': + resolution: {integrity: sha512-LmbPgHBswdXCrkWWuUwBm9w72S2iLWyC/5jet9/Y9cGHtzqxi+GVjfCfahkvNV4KXEwgnH8EMpcrD9RUYe0eLQ==} + engines: {node: '>=16.0.0'} + '@smithy/protocol-http@3.3.0': resolution: {integrity: sha512-Xy5XK1AFWW2nlY/biWZXu6/krgbaf2dg0q492D8M5qthsnU2H+UgFeZLbM76FnH7s6RO/xhQRkj+T6KBO3JzgQ==} engines: {node: '>=14.0.0'} + '@smithy/protocol-http@4.0.0': + resolution: {integrity: sha512-qOQZOEI2XLWRWBO9AgIYuHuqjZ2csyr8/IlgFDHDNuIgLAMRx2Bl8ck5U5D6Vh9DPdoaVpuzwWMa0xcdL4O/AQ==} + engines: {node: '>=16.0.0'} + '@smithy/querystring-builder@2.2.0': resolution: {integrity: sha512-L1kSeviUWL+emq3CUVSgdogoM/D9QMFaqxL/dd0X7PCNWmPXqt+ExtrBjqT0V7HLN03Vs9SuiLrG3zy3JGnE5A==} engines: {node: '>=14.0.0'} + '@smithy/querystring-builder@3.0.0': + resolution: {integrity: sha512-bW8Fi0NzyfkE0TmQphDXr1AmBDbK01cA4C1Z7ggwMAU5RDz5AAv/KmoRwzQAS0kxXNf/D2ALTEgwK0U2c4LtRg==} + engines: {node: '>=16.0.0'} + '@smithy/querystring-parser@2.2.0': resolution: {integrity: sha512-BvHCDrKfbG5Yhbpj4vsbuPV2GgcpHiAkLeIlcA1LtfpMz3jrqizP1+OguSNSj1MwBHEiN+jwNisXLGdajGDQJA==} engines: {node: '>=14.0.0'} + '@smithy/querystring-parser@3.0.0': + resolution: {integrity: sha512-UzHwthk0UEccV4dHzPySnBy34AWw3V9lIqUTxmozQ+wPDAO9csCWMfOLe7V9A2agNYy7xE+Pb0S6K/J23JSzfQ==} + engines: {node: '>=16.0.0'} + '@smithy/service-error-classification@2.1.5': resolution: {integrity: sha512-uBDTIBBEdAQryvHdc5W8sS5YX7RQzF683XrHePVdFmAgKiMofU15FLSM0/HU03hKTnazdNRFa0YHS7+ArwoUSQ==} engines: {node: '>=14.0.0'} + '@smithy/service-error-classification@3.0.0': + resolution: {integrity: sha512-3BsBtOUt2Gsnc3X23ew+r2M71WwtpHfEDGhHYHSDg6q1t8FrWh15jT25DLajFV1H+PpxAJ6gqe9yYeRUsmSdFA==} + engines: {node: '>=16.0.0'} + '@smithy/shared-ini-file-loader@2.4.0': resolution: {integrity: sha512-WyujUJL8e1B6Z4PBfAqC/aGY1+C7T0w20Gih3yrvJSk97gpiVfB+y7c46T4Nunk+ZngLq0rOIdeVeIklk0R3OA==} engines: {node: '>=14.0.0'} - '@smithy/signature-v4@2.2.1': - resolution: {integrity: sha512-j5fHgL1iqKTsKJ1mTcw88p0RUcidDu95AWSeZTgiYJb+QcfwWU/UpBnaqiB59FNH5MiAZuSbOBnZlwzeeY2tIw==} + '@smithy/shared-ini-file-loader@3.0.0': + resolution: {integrity: sha512-REVw6XauXk8xE4zo5aGL7Rz4ywA8qNMUn8RtWeTRQsgAlmlvbJ7CEPBcaXU2NDC3AYBgYAXrGyWD8XrN8UGDog==} + engines: {node: '>=16.0.0'} + + '@smithy/signature-v4@2.3.0': + resolution: {integrity: sha512-ui/NlpILU+6HAQBfJX8BBsDXuKSNrjTSuOYArRblcrErwKFutjrCNb/OExfVRyj9+26F9J+ZmfWT+fKWuDrH3Q==} engines: {node: '>=14.0.0'} + '@smithy/signature-v4@3.0.0': + resolution: {integrity: sha512-kXFOkNX+BQHe2qnLxpMEaCRGap9J6tUGLzc3A9jdn+nD4JdMwCKTJ+zFwQ20GkY+mAXGatyTw3HcoUlR39HwmA==} + engines: {node: '>=16.0.0'} + '@smithy/smithy-client@2.5.1': resolution: {integrity: sha512-jrbSQrYCho0yDaaf92qWgd+7nAeap5LtHTI51KXqmpIFCceKU3K9+vIVTUH72bOJngBMqa4kyu1VJhRcSrk/CQ==} engines: {node: '>=14.0.0'} + '@smithy/smithy-client@3.0.1': + resolution: {integrity: sha512-KAiFY4Y4jdHxR+4zerH/VBhaFKM8pbaVmJZ/CWJRwtM/CmwzTfXfvYwf6GoUwiHepdv+lwiOXCuOl6UBDUEINw==} + engines: {node: '>=16.0.0'} + '@smithy/types@2.12.0': resolution: {integrity: sha512-QwYgloJ0sVNBeBuBs65cIkTbfzV/Q6ZNPCJ99EICFEdJYG50nGIY/uYXp+TbsdJReIuPr0a0kXmCvren3MbRRw==} engines: {node: '>=14.0.0'} + '@smithy/types@3.0.0': + resolution: {integrity: sha512-VvWuQk2RKFuOr98gFhjca7fkBS+xLLURT8bUjk5XQoV0ZLm7WPwWPPY3/AwzTLuUBDeoKDCthfe1AsTUWaSEhw==} + engines: {node: '>=16.0.0'} + '@smithy/url-parser@2.2.0': resolution: {integrity: sha512-hoA4zm61q1mNTpksiSWp2nEl1dt3j726HdRhiNgVJQMj7mLp7dprtF57mOB6JvEk/x9d2bsuL5hlqZbBuHQylQ==} + '@smithy/url-parser@3.0.0': + resolution: {integrity: sha512-2XLazFgUu+YOGHtWihB3FSLAfCUajVfNBXGGYjOaVKjLAuAxx3pSBY3hBgLzIgB17haf59gOG3imKqTy8mcrjw==} + '@smithy/util-base64@2.3.0': resolution: {integrity: sha512-s3+eVwNeJuXUwuMbusncZNViuhv2LjVJ1nMwTqSA0XAC7gjKhqqxRdJPhR8+YrkoZ9IiIbFk/yK6ACe/xlF+hw==} engines: {node: '>=14.0.0'} + '@smithy/util-base64@3.0.0': + resolution: {integrity: sha512-Kxvoh5Qtt0CDsfajiZOCpJxgtPHXOKwmM+Zy4waD43UoEMA+qPxxa98aE/7ZhdnBFZFXMOiBR5xbcaMhLtznQQ==} + engines: {node: '>=16.0.0'} + '@smithy/util-body-length-browser@2.2.0': resolution: {integrity: sha512-dtpw9uQP7W+n3vOtx0CfBD5EWd7EPdIdsQnWTDoFf77e3VUf05uA7R7TGipIo8e4WL2kuPdnsr3hMQn9ziYj5w==} + '@smithy/util-body-length-browser@3.0.0': + resolution: {integrity: sha512-cbjJs2A1mLYmqmyVl80uoLTJhAcfzMOyPgjwAYusWKMdLeNtzmMz9YxNl3/jRLoxSS3wkqkf0jwNdtXWtyEBaQ==} + '@smithy/util-body-length-node@2.3.0': resolution: {integrity: sha512-ITWT1Wqjubf2CJthb0BuT9+bpzBfXeMokH/AAa5EJQgbv9aPMVfnM76iFIZVFf50hYXGbtiV71BHAthNWd6+dw==} engines: {node: '>=14.0.0'} + '@smithy/util-body-length-node@3.0.0': + resolution: {integrity: sha512-Tj7pZ4bUloNUP6PzwhN7K386tmSmEET9QtQg0TgdNOnxhZvCssHji+oZTUIuzxECRfG8rdm2PMw2WCFs6eIYkA==} + engines: {node: '>=16.0.0'} + '@smithy/util-buffer-from@2.2.0': resolution: {integrity: sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==} engines: {node: '>=14.0.0'} + '@smithy/util-buffer-from@3.0.0': + resolution: {integrity: sha512-aEOHCgq5RWFbP+UDPvPot26EJHjOC+bRgse5A8V3FSShqd5E5UN4qc7zkwsvJPPAVsf73QwYcHN1/gt/rtLwQA==} + engines: {node: '>=16.0.0'} + '@smithy/util-config-provider@2.3.0': resolution: {integrity: sha512-HZkzrRcuFN1k70RLqlNK4FnPXKOpkik1+4JaBoHNJn+RnJGYqaa3c5/+XtLOXhlKzlRgNvyaLieHTW2VwGN0VQ==} engines: {node: '>=14.0.0'} + '@smithy/util-config-provider@3.0.0': + resolution: {integrity: sha512-pbjk4s0fwq3Di/ANL+rCvJMKM5bzAQdE5S/6RL5NXgMExFAi6UgQMPOm5yPaIWPpr+EOXKXRonJ3FoxKf4mCJQ==} + engines: {node: '>=16.0.0'} + '@smithy/util-defaults-mode-browser@2.2.1': resolution: {integrity: sha512-RtKW+8j8skk17SYowucwRUjeh4mCtnm5odCL0Lm2NtHQBsYKrNW0od9Rhopu9wF1gHMfHeWF7i90NwBz/U22Kw==} engines: {node: '>= 10.0.0'} + '@smithy/util-defaults-mode-browser@3.0.1': + resolution: {integrity: sha512-nW5kEzdJn1Bn5TF+gOPHh2rcPli8JU9vSSXLbfg7uPnfR1TMRQqs9zlYRhIb87NeSxIbpdXOI94tvXSy+fvDYg==} + engines: {node: '>= 10.0.0'} + '@smithy/util-defaults-mode-node@2.3.1': resolution: {integrity: sha512-vkMXHQ0BcLFysBMWgSBLSk3+leMpFSyyFj8zQtv5ZyUBx8/owVh1/pPEkzmW/DR/Gy/5c8vjLDD9gZjXNKbrpA==} engines: {node: '>= 10.0.0'} + '@smithy/util-defaults-mode-node@3.0.1': + resolution: {integrity: sha512-TFk+Qb+elLc/MOhtSp+50fstyfZ6avQbgH2d96xUBpeScu+Al9elxv+UFAjaTHe0HQe5n+wem8ZLpXvU8lwV6Q==} + engines: {node: '>= 10.0.0'} + '@smithy/util-endpoints@1.2.0': resolution: {integrity: sha512-BuDHv8zRjsE5zXd3PxFXFknzBG3owCpjq8G3FcsXW3CykYXuEqM3nTSsmLzw5q+T12ZYuDlVUZKBdpNbhVtlrQ==} engines: {node: '>= 14.0.0'} + '@smithy/util-endpoints@2.0.0': + resolution: {integrity: sha512-+exaXzEY3DNt2qtA2OtRNSDlVrE4p32j1JSsQkzA5AdP0YtJNjkYbYhJxkFmPYcjI1abuwopOZCwUmv682QkiQ==} + engines: {node: '>=16.0.0'} + '@smithy/util-hex-encoding@2.2.0': resolution: {integrity: sha512-7iKXR+/4TpLK194pVjKiasIyqMtTYJsgKgM242Y9uzt5dhHnUDvMNb+3xIhRJ9QhvqGii/5cRUt4fJn3dtXNHQ==} engines: {node: '>=14.0.0'} + '@smithy/util-hex-encoding@3.0.0': + resolution: {integrity: sha512-eFndh1WEK5YMUYvy3lPlVmYY/fZcQE1D8oSf41Id2vCeIkKJXPcYDCZD+4+xViI6b1XSd7tE+s5AmXzz5ilabQ==} + engines: {node: '>=16.0.0'} + '@smithy/util-middleware@2.2.0': resolution: {integrity: sha512-L1qpleXf9QD6LwLCJ5jddGkgWyuSvWBkJwWAZ6kFkdifdso+sk3L3O1HdmPvCdnCK3IS4qWyPxev01QMnfHSBw==} engines: {node: '>=14.0.0'} + '@smithy/util-middleware@3.0.0': + resolution: {integrity: sha512-q5ITdOnV2pXHSVDnKWrwgSNTDBAMHLptFE07ua/5Ty5WJ11bvr0vk2a7agu7qRhrCFRQlno5u3CneU5EELK+DQ==} + engines: {node: '>=16.0.0'} + '@smithy/util-retry@2.2.0': resolution: {integrity: sha512-q9+pAFPTfftHXRytmZ7GzLFFrEGavqapFc06XxzZFcSIGERXMerXxCitjOG1prVDR9QdjqotF40SWvbqcCpf8g==} engines: {node: '>= 14.0.0'} + '@smithy/util-retry@3.0.0': + resolution: {integrity: sha512-nK99bvJiziGv/UOKJlDvFF45F00WgPLKVIGUfAK+mDhzVN2hb/S33uW2Tlhg5PVBoqY7tDVqL0zmu4OxAHgo9g==} + engines: {node: '>=16.0.0'} + '@smithy/util-stream@2.2.0': resolution: {integrity: sha512-17faEXbYWIRst1aU9SvPZyMdWmqIrduZjVOqCPMIsWFNxs5yQQgFrJL6b2SdiCzyW9mJoDjFtgi53xx7EH+BXA==} engines: {node: '>=14.0.0'} + '@smithy/util-stream@3.0.1': + resolution: {integrity: sha512-7F7VNNhAsfMRA8I986YdOY5fE0/T1/ZjFF6OLsqkvQVNP3vZ/szYDfGCyphb7ioA09r32K/0qbSFfNFU68aSzA==} + engines: {node: '>=16.0.0'} + '@smithy/util-uri-escape@2.2.0': resolution: {integrity: sha512-jtmJMyt1xMD/d8OtbVJ2gFZOSKc+ueYJZPW20ULW1GOp/q/YIM0wNh+u8ZFao9UaIGz4WoPW8hC64qlWLIfoDA==} engines: {node: '>=14.0.0'} + '@smithy/util-uri-escape@3.0.0': + resolution: {integrity: sha512-LqR7qYLgZTD7nWLBecUi4aqolw8Mhza9ArpNEQ881MJJIU2sE5iHCK6TdyqqzcDLy0OPe10IY4T8ctVdtynubg==} + engines: {node: '>=16.0.0'} + '@smithy/util-utf8@2.3.0': resolution: {integrity: sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==} engines: {node: '>=14.0.0'} + '@smithy/util-utf8@3.0.0': + resolution: {integrity: sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA==} + engines: {node: '>=16.0.0'} + '@smithy/util-waiter@2.2.0': resolution: {integrity: sha512-IHk53BVw6MPMi2Gsn+hCng8rFA3ZmR3Rk7GllxDUW9qFJl/hiSvskn7XldkECapQVkIg/1dHpMAxI9xSTaLLSA==} engines: {node: '>=14.0.0'} @@ -2576,45 +3277,60 @@ packages: '@vue/compiler-sfc': optional: true + '@tsconfig/node10@1.0.11': + resolution: {integrity: sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==} + + '@tsconfig/node12@1.0.11': + resolution: {integrity: sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==} + + '@tsconfig/node14@1.0.3': + resolution: {integrity: sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==} + + '@tsconfig/node16@1.0.4': + resolution: {integrity: sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==} + + '@types/async-retry@1.4.8': + resolution: {integrity: sha512-Qup/B5PWLe86yI5I3av6ePGaeQrIHNKCwbsQotD6aHQ6YkHsMUxVZkZsmx/Ry3VZQ6uysHwTjQ7666+k6UjVJA==} + '@types/axios@0.14.0': resolution: {integrity: sha512-KqQnQbdYE54D7oa/UmYVMZKq7CO4l8DEENzOKc4aBRwxCXSlJXGz83flFx5L7AWrOQnmuN3kVsRdt+GZPPjiVQ==} deprecated: This is a stub types definition for axios (https://github.com/mzabriskie/axios). axios provides its own type definitions, so you don't need @types/axios installed! - '@types/better-sqlite3@7.6.4': - resolution: {integrity: sha512-dzrRZCYPXIXfSR1/surNbJ/grU3scTaygS0OMzjlGf71i9sc2fGyHPXXiXmEvNIoE0cGwsanEFMVJxPXmco9Eg==} + '@types/better-sqlite3@7.6.10': + resolution: {integrity: sha512-TZBjD+yOsyrUJGmcUj6OS3JADk3+UZcNv3NOBqGkM09bZdi28fNZw8ODqbMOLfKCu7RYCO62/ldq1iHbzxqoPw==} - '@types/body-parser@1.19.2': - resolution: {integrity: sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g==} + '@types/body-parser@1.19.5': + resolution: {integrity: sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg==} - '@types/chai-subset@1.3.3': - resolution: {integrity: sha512-frBecisrNGz+F4T6bcc+NLeolfiojh5FxW2klu669+8BARtyQv2C/GkNW6FUodVe4BroGMP/wER/YDGc7rEllw==} + '@types/connect@3.4.38': + resolution: {integrity: sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==} - '@types/chai@4.3.5': - resolution: {integrity: sha512-mEo1sAde+UCE6b2hxn332f1g1E8WfYRu6p5SvTKr2ZKC1f7gFJXk4h5PyGP9Dt6gCaG8y8XhwnXWC6Iy2cmBng==} + '@types/docker-modem@3.0.6': + resolution: {integrity: sha512-yKpAGEuKRSS8wwx0joknWxsmLha78wNMe9R2S3UNsVOkZded8UqOrV8KoeDXoXsjndxwyF3eIhyClGbO1SEhEg==} - '@types/connect@3.4.35': - resolution: {integrity: sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ==} + '@types/dockerode@3.3.29': + resolution: {integrity: sha512-5PRRq/yt5OT/Jf77ltIdz4EiR9+VLnPF+HpU4xGFwUqmV24Co2HKBNW3w+slqZ1CYchbcDeqJASHDYWzZCcMiQ==} - '@types/docker-modem@3.0.2': - resolution: {integrity: sha512-qC7prjoEYR2QEe6SmCVfB1x3rfcQtUr1n4x89+3e0wSTMQ/KYCyf+/RAA9n2tllkkNc6//JMUZePdFRiGIWfaQ==} - - '@types/dockerode@3.3.18': - resolution: {integrity: sha512-4EcP136jNMBZQ4zTHlI1VP2RpIQ2uJvRpjta3W2Cc7Ti7rk2r3TgVKjxR0Tb3NrT9ObXvl7Tv5nxra6BHEpkWg==} - - '@types/emscripten@1.39.6': - resolution: {integrity: sha512-H90aoynNhhkQP6DRweEjJp5vfUVdIj7tdPLsu7pq89vODD/lcugKfZOsfgwpvM6XUewEp2N5dCg1Uf3Qe55Dcg==} + '@types/emscripten@1.39.11': + resolution: {integrity: sha512-dOeX2BeNA7j6BTEqJQL3ut0bRCfsyQMd5i4FT8JfHfYhAOuJPCGh0dQFbxVJxUyQ+75x6enhDdndGb624/QszA==} '@types/estree@1.0.1': resolution: {integrity: sha512-LG4opVs2ANWZ1TJoKc937iMmNstM/d0ae1vNbnBvBhqCSezgVUOzcLCqbI5elV8Vy6WKwKjaqR+zO9VKirBBCA==} - '@types/express-serve-static-core@4.17.33': - resolution: {integrity: sha512-TPBqmR/HRYI3eC2E5hmiivIzv+bidAfXofM+sbonAGvyDhySGw9/PQZFt2BLOrjUUR++4eJVpx6KnLQK1Fk9tA==} + '@types/estree@1.0.5': + resolution: {integrity: sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==} + + '@types/express-serve-static-core@4.19.0': + resolution: {integrity: sha512-bGyep3JqPCRry1wq+O5n7oiBgGWmeIJXPjXXCo8EK0u8duZGSYar7cGqd3ML2JUsLGeB7fmc06KYo9fLGWqPvQ==} + + '@types/express@4.17.21': + resolution: {integrity: sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ==} - '@types/express@4.17.17': - resolution: {integrity: sha512-Q4FmmuLGBG58btUnfS1c1r/NQdlp3DMfGDGig8WhfpA2YRUtEkxAjkZb0yvplJGYdF1fsQ81iMDcH24sSCNC/Q==} + '@types/fs-extra@11.0.4': + resolution: {integrity: sha512-yTbItCNreRooED33qjunPthRcSjERP1r4MqCZc7wv0u2sUkzTFp45tgUfS5+r7FrZPdmCCNflLhVSP/o+SemsQ==} - '@types/fs-extra@11.0.1': - resolution: {integrity: sha512-MxObHvNl4A69ofaTRU8DFqvgzzv8s9yRtaPPm5gud9HDNvpB3GPQFvNuTWAI59B9huVGV5jXYJwbCsmBsOGYWA==} + '@types/http-errors@2.0.4': + resolution: {integrity: sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA==} '@types/istanbul-lib-coverage@2.0.6': resolution: {integrity: sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==} @@ -2631,11 +3347,11 @@ packages: '@types/json5@0.0.29': resolution: {integrity: sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==} - '@types/jsonfile@6.1.1': - resolution: {integrity: sha512-GSgiRCVeapDN+3pqA35IkQwasaCh/0YFH5dEF6S88iDvEn901DjOeH3/QPY+XYP1DFzDZPvIvfeEgk+7br5png==} + '@types/jsonfile@6.1.4': + resolution: {integrity: sha512-D5qGUYwjvnNNextdU59/+fI+spnwtTFmyQP0h+PfIOSkNfpU6AOICUOkm4i0OnSk+NyjdPJrxCDro0sJsWlRpQ==} - '@types/mime@3.0.1': - resolution: {integrity: sha512-Y4XFY5VJAuw0FgAqPNd6NNoV44jbq9Bz2L7Rh/J6jLTiHBSBJa9fxqQIvkIld4GsoDOcCbvzOUAbLPsSKKg+uA==} + '@types/mime@1.3.5': + resolution: {integrity: sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==} '@types/minimist@1.2.2': resolution: {integrity: sha512-jhuKLIRrhvCPLqwPcx6INqmKeiA5EWrsCOPhrlFSrbrmU4ZMPjj5Ul/oLCMDO98XRUIwVm78xICz4EPCektzeQ==} @@ -2643,68 +3359,68 @@ packages: '@types/node-fetch@2.6.11': resolution: {integrity: sha512-24xFj9R5+rfQJLRyM56qh+wnVSYhyXC2tkoBndtY0U+vubqNsYXGjufB2nn8Q6gt0LrARwL6UBtMCSVCwl4B1g==} + '@types/node-forge@1.3.11': + resolution: {integrity: sha512-FQx220y22OKNTqaByeBGqHWYz4cl94tpcxeFdvBo3wjG6XPBuZ0BNgNZRV5J5TFmmcsJ4IzsLkmGRiQbnYsBEQ==} + '@types/node@18.15.10': resolution: {integrity: sha512-9avDaQJczATcXgfmMAW3MIWArOO7A+m90vuCFLr8AotWf8igO/mRoYukrk2cqZVtv38tHs33retzHEilM7FpeQ==} - '@types/node@18.16.16': - resolution: {integrity: sha512-NpaM49IGQQAUlBhHMF82QH80J08os4ZmyF9MkpCzWAGuOHqE4gTEbhzd7L3l5LmWuZ6E0OiC1FweQ4tsiW35+g==} + '@types/node@18.19.33': + resolution: {integrity: sha512-NR9+KrpSajr2qBVp/Yt5TU/rp+b5Mayi3+OlMlcg2cVCfRmcG5PWZ7S4+MG9PZ5gWBoc9Pd0BKSRViuBCRPu0A==} '@types/node@20.10.1': resolution: {integrity: sha512-T2qwhjWwGH81vUEx4EXmBKsTJRXFXNZTL4v0gi01+zyBmCwzE6TyHszqX01m+QHTEq+EZNo13NeJIdEqf+Myrg==} - '@types/node@20.12.4': - resolution: {integrity: sha512-E+Fa9z3wSQpzgYQdYmme5X3OTuejnnTx88A6p6vkkJosR3KBz+HpE3kqNm98VE6cfLFcISx7zW7MsJkH6KwbTw==} - - '@types/node@20.2.5': - resolution: {integrity: sha512-JJulVEQXmiY9Px5axXHeYGLSjhkZEnD+MDPDGbCbIAbMslkKwmygtZFy1X6s/075Yo94sf8GuSlFfPzysQrWZQ==} - - '@types/node@20.8.7': - resolution: {integrity: sha512-21TKHHh3eUHIi2MloeptJWALuCu5H7HQTdTrWIFReA8ad+aggoX+lRes3ex7/FtpC+sVUpFMQ+QTfYr74mruiQ==} + '@types/node@20.12.12': + resolution: {integrity: sha512-eWLDGF/FOSPtAvEqeRAQ4C8LSA7M1I7i0ky1I8U7kD1J5ITyW3AsRhQrKVoWf5pFKZ2kILsEGJhsI9r93PYnOw==} '@types/normalize-package-data@2.4.1': resolution: {integrity: sha512-Gj7cI7z+98M282Tqmp2K5EIsoouUEzbBJhQQzDE3jSIRk6r9gsz0oUokqIUR4u1R3dMHo0pDHM7sNOHyhulypw==} - '@types/pg@8.10.1': - resolution: {integrity: sha512-AmEHA/XxMxemQom5iDwP62FYNkv+gDDnetRG7v2N2dPtju7UKI7FknUimcZo7SodKTHtckYPzaTqUEvUKbVJEA==} + '@types/pg@8.11.6': + resolution: {integrity: sha512-/2WmmBXHLsfRqzfHW7BNZ8SbYzE8OSk7i3WjFYvfgRHj7S1xj+16Je5fUKv3lVdVzk/zn9TXOqf+avFCFIE0yQ==} '@types/pg@8.6.6': resolution: {integrity: sha512-O2xNmXebtwVekJDD+02udOncjVcMZQuTEQEMpKJ0ZRf5E7/9JJX3izhKUcUifBkyKpljyUM6BTgy2trmviKlpw==} - '@types/prop-types@15.7.11': - resolution: {integrity: sha512-ga8y9v9uyeiLdpKddhxYQkxNDrfvuPrlFb0N1qnZZByvcElJaXthF1UhvCh9TLWJBEHeNtdnbysW7Y6Uq8CVng==} + '@types/prop-types@15.7.12': + resolution: {integrity: sha512-5zvhXYtRNRluoE/jAp4GVsSduVUzNWKkOZrCDBWYtE7biZywwdC2AcEzg+cSMLFRfVgeAFqpfNabiPjxFddV1Q==} '@types/ps-tree@1.1.2': resolution: {integrity: sha512-ZREFYlpUmPQJ0esjxoG1fMvB2HNaD3z+mjqdSosZvd3RalncI9NEur73P8ZJz4YQdL64CmV1w0RuqoRUlhQRBw==} - '@types/qs@6.9.7': - resolution: {integrity: sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==} + '@types/qs@6.9.15': + resolution: {integrity: sha512-uXHQKES6DQKKCLh441Xv/dwxOq1TVS3JPUMlEqoEglvlhR6Mxnlew/Xq/LRVHpLyk7iK3zODe1qYHIMltO7XGg==} - '@types/range-parser@1.2.4': - resolution: {integrity: sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==} + '@types/range-parser@1.2.7': + resolution: {integrity: sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==} - '@types/react@18.2.45': - resolution: {integrity: sha512-TtAxCNrlrBp8GoeEp1npd5g+d/OejJHFxS3OWmrPBMFaVQMSN0OFySozJio5BHxTuTeug00AVXVAjfDSfk+lUg==} + '@types/react@18.3.1': + resolution: {integrity: sha512-V0kuGBX3+prX+DQ/7r2qsv1NsdfnCLnTgnRJ1pYnxykBhGMz+qj+box5lq7XsO5mtZsBqpjwwTu/7wszPfMBcw==} - '@types/scheduler@0.16.8': - resolution: {integrity: sha512-WZLiwShhwLRmeV6zH+GkbOFT6Z6VklCItrDioxUnv+u4Ll+8vKeFySoFyK/0ctcRpOmwAicELfmys1sDc/Rw+A==} + '@types/retry@0.12.5': + resolution: {integrity: sha512-3xSjTp3v03X/lSQLkczaN9UIEwJMoMCA1+Nb5HfbJEQWogdeQIyVtTvxPXDQjZ5zws8rFQfVfRdz03ARihPJgw==} '@types/semver@7.5.3': resolution: {integrity: sha512-OxepLK9EuNEIPxWNME+C6WwbRAOOI2o2BaQEGzz5Lu2e4Z5eDnEo+/aVEDMIXywoJitJ7xWd641wrGLZdtwRyw==} - '@types/serve-static@1.15.1': - resolution: {integrity: sha512-NUo5XNiAdULrJENtJXZZ3fHtfMolzZwczzBbnAeBbqBwG+LaG6YaJtuwzwGSQZ2wsCrxjEhNNjAkKigy3n8teQ==} + '@types/send@0.17.4': + resolution: {integrity: sha512-x2EM6TJOybec7c52BX0ZspPodMsQUd5L6PRwOunVyVUhXiBSKf3AezDL8Dgvgt5o0UfKNfuA0eMLr2wLT4AiBA==} + + '@types/serve-static@1.15.7': + resolution: {integrity: sha512-W8Ym+h8nhuRwaKPaDw34QUkwsGi6Rc4yYqvKFo5rm2FUEhCFbzVWrxXUxuKK8TASjWsysJY0nsmNCGhCOIsrOw==} - '@types/sql.js@1.4.4': - resolution: {integrity: sha512-6EWU2wfiBtzgTy18WQoXZAGTreBjhZcBCfD8CDvyI1Nj0a4KNDDt41IYeAZ40cRUdfqWHb7VGx7t6nK0yBOI5A==} + '@types/sql.js@1.4.9': + resolution: {integrity: sha512-ep8b36RKHlgWPqjNG9ToUrPiwkhwh0AEzy883mO5Xnd+cL6VBH1EvSjBAAuxLUFF2Vn/moE3Me6v9E1Lo+48GQ==} - '@types/ssh2@1.11.11': - resolution: {integrity: sha512-LdnE7UBpvHCgUznvn2fwLt2hkaENcKPFqOyXGkvyTLfxCXBN6roc1RmECNYuzzbHePzD3PaAov5rri9hehzx9Q==} + '@types/ssh2@1.15.0': + resolution: {integrity: sha512-YcT8jP5F8NzWeevWvcyrrLB3zcneVjzYY9ZDSMAMboI+2zR1qYWFhwsyOFVzT7Jorn67vqxC0FRiw8YyG9P1ww==} '@types/stack-utils@2.0.3': resolution: {integrity: sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw==} - '@types/uuid@9.0.1': - resolution: {integrity: sha512-rFT3ak0/2trgvp4yYZo5iKFEPsET7vKydKF+VRCxlQ9bpheehyAJH89dAkaLEq/j/RZXJIqcgsmPJKUP1Z28HA==} + '@types/uuid@9.0.8': + resolution: {integrity: sha512-jg+97EGIcY9AGHJJRaaPVgetKDsrTgbRjQ5Msgjh/DQKEFl0DtyRr/VCOyD1T2R1MNeWPK/u7JoGhlDZnKBAfA==} '@types/which@3.0.0': resolution: {integrity: sha512-ASCxdbsrwNfSMXALlC3Decif9rwDMu+80KGp5zI2RLRotfMsTv7fHL8W8VDp24wymzDyIFudhUeSCugrgRFfHQ==} @@ -2856,8 +3572,8 @@ packages: resolution: {integrity: sha512-HEVXkU9IB+nk9o63CeICMHxFWbHWr3E1mpilIQBe9+7L/lH97rleFLVtYsfnWB+JVMaiFnEaxvknvmIzX+CqVg==} engines: {node: ^16.0.0 || >=18.0.0} - '@typescript/analyze-trace@0.10.0': - resolution: {integrity: sha512-VNoPaIcGrMnI0MQinlxg8IFAN7+xbqB0AdymUTHh6hIZqlhHFZr1X7xUBonTpL0xiDupHl+/GtP59pdOFOCqjw==} + '@typescript/analyze-trace@0.10.1': + resolution: {integrity: sha512-RnlSOPh14QbopGCApgkSx5UBgGda5MX1cHqp2fsqfiDyCwGL/m1jaeB9fzu7didVS81LQqGZZuxFBcg8YU8EVw==} hasBin: true '@ungap/structured-clone@1.2.0': @@ -2873,51 +3589,32 @@ packages: peerDependencies: graphql: ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 - '@vercel/postgres@0.3.0': - resolution: {integrity: sha512-cOC+x6qMnN54B4y0Fh0DV5LJQp2M7puIKbehQBMutY/8/zpzh+oKaQmnZb2QHn489MGOQKyRLJLgHa2P8M085Q==} - engines: {node: '>=14.6'} - '@vercel/postgres@0.8.0': resolution: {integrity: sha512-/QUV9ExwaNdKooRjOQqvrKNVnRvsaXeukPNI5DB1ovUTesglfR/fparw7ngo1KUWWKIVpEj2TRrA+ObRHRdaLg==} engines: {node: '>=14.6'} - '@vitest/expect@0.31.4': - resolution: {integrity: sha512-tibyx8o7GUyGHZGyPgzwiaPaLDQ9MMuCOrc03BYT0nryUuhLbL7NV2r/q98iv5STlwMgaKuFJkgBW/8iPKwlSg==} - - '@vitest/expect@0.34.6': - resolution: {integrity: sha512-QUzKpUQRc1qC7qdGo7rMK3AkETI7w18gTCUrsNnyjjJKYiuUB9+TQK3QnR1unhCnWRC0AbKv2omLGQDF/mIjOw==} - - '@vitest/runner@0.31.4': - resolution: {integrity: sha512-Wgm6UER+gwq6zkyrm5/wbpXGF+g+UBB78asJlFkIOwyse0pz8lZoiC6SW5i4gPnls/zUcPLWS7Zog0LVepXnpg==} + '@vitest/expect@1.6.0': + resolution: {integrity: sha512-ixEvFVQjycy/oNgHjqsL6AZCDduC+tflRluaHIzKIsdbzkLn2U/iBnVeJwB6HsIjQBdfMR8Z0tRxKUsvFJEeWQ==} - '@vitest/runner@0.34.6': - resolution: {integrity: sha512-1CUQgtJSLF47NnhN+F9X2ycxUP0kLHQ/JWvNHbeBfwW8CzEGgeskzNnHDyv1ieKTltuR6sdIHV+nmR6kPxQqzQ==} + '@vitest/runner@1.6.0': + resolution: {integrity: sha512-P4xgwPjwesuBiHisAVz/LSSZtDjOTPYZVmNAnpHHSR6ONrf8eCJOFRvUwdHn30F5M1fxhqtl7QZQUk2dprIXAg==} - '@vitest/snapshot@0.31.4': - resolution: {integrity: sha512-LemvNumL3NdWSmfVAMpXILGyaXPkZbG5tyl6+RQSdcHnTj6hvA49UAI8jzez9oQyE/FWLKRSNqTGzsHuk89LRA==} + '@vitest/snapshot@1.6.0': + resolution: {integrity: sha512-+Hx43f8Chus+DCmygqqfetcAZrDJwvTj0ymqjQq4CvmpKFSTVteEOBzCusu1x2tt4OJcvBflyHUE0DZSLgEMtQ==} - '@vitest/snapshot@0.34.6': - resolution: {integrity: sha512-B3OZqYn6k4VaN011D+ve+AA4whM4QkcwcrwaKwAbyyvS/NB1hCWjFIBQxAQQSQir9/RtyAAGuq+4RJmbn2dH4w==} + '@vitest/spy@1.6.0': + resolution: {integrity: sha512-leUTap6B/cqi/bQkXUu6bQV5TZPx7pmMBKBQiI0rJA8c3pB56ZsaTbREnF7CJfmvAS4V2cXIBAh/3rVwrrCYgw==} - '@vitest/spy@0.31.4': - resolution: {integrity: sha512-3ei5ZH1s3aqbEyftPAzSuunGICRuhE+IXOmpURFdkm5ybUADk+viyQfejNk6q8M5QGX8/EVKw+QWMEP3DTJDag==} - - '@vitest/spy@0.34.6': - resolution: {integrity: sha512-xaCvneSaeBw/cz8ySmF7ZwGvL0lBjfvqc1LpQ/vcdHEvpLn3Ff1vAvjw+CoGn0802l++5L/pxb7whwcWAw+DUQ==} - - '@vitest/ui@0.31.4': - resolution: {integrity: sha512-sKM16ITX6HrNFF+lNZ2AQAen4/6Bx2i6KlBfIvkUjcTgc5YII/j2ltcX14oCUv4EA0OTWGQuGhO3zDoAsTENGA==} + '@vitest/ui@1.6.0': + resolution: {integrity: sha512-k3Lyo+ONLOgylctiGovRKy7V4+dIN2yxstX3eY5cWFXH6WP+ooVX79YSyi0GagdTQzLmT43BF27T0s6dOIPBXA==} peerDependencies: - vitest: '>=0.30.1 <1' - - '@vitest/utils@0.31.4': - resolution: {integrity: sha512-DobZbHacWznoGUfYU8XDPY78UubJxXfMNY1+SUdOp1NsI34eopSA6aZMeaGu10waSOeYwE8lxrd/pLfT0RMxjQ==} + vitest: 1.6.0 - '@vitest/utils@0.34.6': - resolution: {integrity: sha512-IG5aDD8S6zlvloDsnzHw0Ut5xczlF+kv2BOTo+iXfPr54Yhi5qbVOgGB1hZaVq4iJ4C/MZ2J0y15IlsV/ZcI0A==} + '@vitest/utils@1.6.0': + resolution: {integrity: sha512-21cPiuGMoMZwiOHa2i4LXkMkMkCGzA+MVFV70jRwHo95dL4x/ts5GZhML1QWuy7yfp3WzK3lRvZi3JnXTYqrBw==} - '@xata.io/client@0.29.3': - resolution: {integrity: sha512-GsH3RNU2P0fP+YKTFVZZ/DAkczJ6/25xyXg383GIlgLW9juy5PpMumscFIgfjWIbvnasATKNVN2127C4ONfOTg==} + '@xata.io/client@0.29.4': + resolution: {integrity: sha512-dRff4E/wINr0SYIlOHwApo0h8jzpAHVf2RcbGMkK9Xrddbe90KmCEx/gue9hLhBOoCCp6qUht2h9BsuVPruymw==} peerDependencies: typescript: '>=4.5' @@ -2945,8 +3642,8 @@ packages: peerDependencies: acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 - acorn-walk@8.2.0: - resolution: {integrity: sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==} + acorn-walk@8.3.2: + resolution: {integrity: sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A==} engines: {node: '>=0.4.0'} acorn@8.10.0: @@ -2968,8 +3665,8 @@ packages: resolution: {integrity: sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==} engines: {node: '>= 6.0.0'} - agentkeepalive@4.3.0: - resolution: {integrity: sha512-7Epl1Blf4Sy37j4v9f9FjICCh4+KAQOyXgHEwlyBiAQLbhKdq/i2QQU3amQalS/wPhdPzDXPL5DMR5bkn+YeWg==} + agentkeepalive@4.5.0: + resolution: {integrity: sha512-5GG/5IbQQpC9FpkRGsSvZI5QYeSCzlJHdpBQntCsuTOxhKD8lqKhrleg2Yi7yvMIf82Ycmmqln9U8V9qwEiJew==} engines: {node: '>= 8.0.0'} aggregate-error@3.1.0: @@ -3048,14 +3745,13 @@ packages: aproba@2.0.0: resolution: {integrity: sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==} - are-we-there-yet@2.0.0: - resolution: {integrity: sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==} - engines: {node: '>=10'} - are-we-there-yet@3.0.1: resolution: {integrity: sha512-QZW4EDmGwlYur0Yyf/b2uGucHQMa8aFUP7eu9ddR73vvhFyt4V0Vl3QHPcTNJ8l6qYOBdxgXdnBXQrHilfRQBg==} engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} + arg@4.1.3: + resolution: {integrity: sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==} + arg@5.0.2: resolution: {integrity: sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==} @@ -3071,6 +3767,10 @@ packages: array-buffer-byte-length@1.0.0: resolution: {integrity: sha512-LPuwb2P+NrQw3XhxGc36+XSvuBPopovXYTR9Ew++Du9Yb/bx5AzBfrIsBoj0EZUifjQU+sHL21sseZ3jerWO/A==} + array-buffer-byte-length@1.0.1: + resolution: {integrity: sha512-ahC5W1xgou+KTXix4sAO8Ki12Q+jf4i0+tmk3sC+zgcynshkHxzpXdImBehiUYKKKDwvfFiJl1tZt6ewscS1Mg==} + engines: {node: '>= 0.4'} + array-find-index@1.0.2: resolution: {integrity: sha512-M1HQyIXcBGtVywBt8WVdim+lrNaK7VHp99Qt5pSNziXznKHViIBbXWtfRTpEFpF/c4FdfxNAsCCwPp5phBYJtw==} engines: {node: '>=0.10.0'} @@ -3102,6 +3802,10 @@ packages: resolution: {integrity: sha512-09x0ZWFEjj4WD8PDbykUwo3t9arLn8NIzmmYEJFpYekOAQjpkGSyrQhNoRTcwwcFRu+ycWF78QZ63oWTqSjBcw==} engines: {node: '>= 0.4'} + arraybuffer.prototype.slice@1.0.3: + resolution: {integrity: sha512-bMxMKAjg13EBSVscxTaYA4mRc5t1UAXa2kXiGTNfZ079HIWXEkKmkgFrh/nJqamaLSrXO5H4WFFkPEaLJWbs3A==} + engines: {node: '>= 0.4'} + arrgv@1.0.2: resolution: {integrity: sha512-a4eg4yhp7mmruZDQFqVMlxNRFGi/i1r87pt8SDHy0/I8PqSXoUTlWZRdAZo0VXgvEARcujbtTk8kiZRi1uDGRw==} engines: {node: '>=8.0.0'} @@ -3116,9 +3820,6 @@ packages: asn1@0.2.6: resolution: {integrity: sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==} - assert@2.1.0: - resolution: {integrity: sha512-eLHpSK/Y4nhMJ07gDaAzoX/XAKS8PSaojml3M0DM4JpV1LAi5JOJ/p6H/XWrl8L+DzVEvVCW1z3vWAaB9oTsQw==} - assertion-error@1.1.0: resolution: {integrity: sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==} @@ -3137,6 +3838,9 @@ packages: async-limiter@1.0.1: resolution: {integrity: sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ==} + async-retry@1.3.3: + resolution: {integrity: sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==} + asynckit@0.4.0: resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} @@ -3144,16 +3848,6 @@ packages: resolution: {integrity: sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==} engines: {node: '>= 4.0.0'} - ava@5.2.0: - resolution: {integrity: sha512-W8yxFXJr/P68JP55eMpQIa6AiXhCX3VeuajM8nolyWNExcMDD6rnIWKTjw0B/+GkFHBIaN6Jd0LtcMThcoqVfg==} - engines: {node: '>=14.19 <15 || >=16.15 <17 || >=18'} - hasBin: true - peerDependencies: - '@ava/typescript': '*' - peerDependenciesMeta: - '@ava/typescript': - optional: true - ava@5.3.0: resolution: {integrity: sha512-QYvBdyygl1LGX13IuYsC4bkwVCzZeovMGbxYkD73i7DVJxNlWnFa06YgrBOTbjw2QvSKUl5fOJ92Kj5WK9hSeg==} engines: {node: '>=14.19 <15 || >=16.15 <17 || >=18'} @@ -3168,16 +3862,20 @@ packages: resolution: {integrity: sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==} engines: {node: '>= 0.4'} - axios@1.4.0: - resolution: {integrity: sha512-S4XCWMEmzvo64T9GfvQDOXgYRDJ/wsSZc7Jvdgx5u1sd0JwsuPLqb3SYmusag+edF6ziyMensPVqLTSc1PiSEA==} + available-typed-arrays@1.0.7: + resolution: {integrity: sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==} + engines: {node: '>= 0.4'} + + axios@1.6.8: + resolution: {integrity: sha512-v/ZHtJDU39mDpyBoFVkETcd/uNdxrWRrg3bKpOKzXFA6Bvqopts6ALSMU3y6ijYxbw2B+wPrIv46egTzJXCLGQ==} babel-core@7.0.0-bridge.0: resolution: {integrity: sha512-poPX9mZH/5CSanm50Q+1toVci6pv5KSRv/5TWCwtzQS5XEwn40BcCrgIeMFWP9CKKIniKXNxoIOnOq4VVlGXhg==} peerDependencies: '@babel/core': ^7.0.0-0 - babel-plugin-polyfill-corejs2@0.4.10: - resolution: {integrity: sha512-rpIuu//y5OX6jVU+a5BCn1R5RSZYWAl2Nar76iwaOdycqb6JPxediskWFMMl7stfwNJR4b7eiQvh5fB5TEQJTQ==} + babel-plugin-polyfill-corejs2@0.4.11: + resolution: {integrity: sha512-sMEJ27L0gRHShOh5G54uAAPaiCOygY/5ratXuiyb2G46FmlSpc9eFCzYVyDiPxfNbwzA7mYahmjQc5q+CZQ09Q==} peerDependencies: '@babel/core': ^7.4.0 || ^8.0.0-0 <8.0.0 @@ -3186,27 +3884,19 @@ packages: peerDependencies: '@babel/core': ^7.4.0 || ^8.0.0-0 <8.0.0 - babel-plugin-polyfill-regenerator@0.6.1: - resolution: {integrity: sha512-JfTApdE++cgcTWjsiCQlLyFBMbTUft9ja17saCc93lgV33h4tuCVj7tlvu//qpLwaG+3yEz7/KhahGrUMkVq9g==} + babel-plugin-polyfill-regenerator@0.6.2: + resolution: {integrity: sha512-2R25rQZWP63nGwaAswvDazbPXfrM3HwVoBXK6HcqeKrSrL/JqcC/rDcf95l4r7LXLyxDXc8uQDa064GubtCABg==} peerDependencies: '@babel/core': ^7.4.0 || ^8.0.0-0 <8.0.0 - babel-plugin-react-native-web@0.18.12: - resolution: {integrity: sha512-4djr9G6fMdwQoD6LQ7hOKAm39+y12flWgovAqS1k5O8f42YQ3A1FFMyV5kKfetZuGhZO5BmNmOdRRZQ1TixtDw==} - - babel-plugin-syntax-trailing-function-commas@7.0.0-beta.0: - resolution: {integrity: sha512-Xj9XuRuz3nTSbaTXWv3itLOcxyF4oPD8douBBmj7U9BBC6nEBYfyOJYQMf/8PJAFotC62UY5dFfIGEPr7WswzQ==} + babel-plugin-react-native-web@0.19.12: + resolution: {integrity: sha512-eYZ4+P6jNcB37lObWIg0pUbi7+3PKoU1Oie2j0C8UF3cXyXoR74tO2NBjI/FORb2LJyItJZEAmjU5pSaJYEL1w==} babel-plugin-transform-flow-enums@0.0.2: resolution: {integrity: sha512-g4aaCrDDOsWjbm0PUUeVnkcVd6AKJsVc/MbnPhEotEpkeJQP6b8nzewohQi7+QS8UyPehOhGWn0nOwjvWpmMvQ==} - babel-preset-expo@10.0.1: - resolution: {integrity: sha512-uWIGmLfbP3dS5+8nesxaW6mQs41d4iP7X82ZwRdisB/wAhKQmuJM9Y1jQe4006uNYkw6Phf2TT03ykLVro7KuQ==} - - babel-preset-fbjs@3.4.0: - resolution: {integrity: sha512-9ywCsCvo1ojrw0b+XYk7aFvTH6D9064t0RIL1rtMf3nsa02Xw41MS7sZw216Im35xj/UY0PDBQsa1brUDDF1Ow==} - peerDependencies: - '@babel/core': ^7.0.0 + babel-preset-expo@11.0.6: + resolution: {integrity: sha512-jRi9I5/jT+dnIiNJDjDg+I/pV+AlxrIW/DNbdqYoRWPZA/LHDqD6IJnJXLxbuTcQ+llp+0LWcU7f/kC/PgGpkw==} balanced-match@1.0.2: resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} @@ -3221,8 +3911,11 @@ packages: resolution: {integrity: sha512-aVNobHnJqLiUelTaHat9DZ1qM2w0C0Eym4LPI/3JxOnSokGVdsl1T1kN7TFvsEAD8G47A6VKQ0TVHqbBnYMJlQ==} engines: {node: '>=12.0.0'} - better-sqlite3@8.4.0: - resolution: {integrity: sha512-NmsNW1CQvqMszu/CFAJ3pLct6NEFlNfuGM6vw72KHkjOD1UDnL96XNN1BMQc1hiHo8vE2GbOWQYIpZ+YM5wrZw==} + better-sqlite3@10.0.0: + resolution: {integrity: sha512-rOz0JY8bt9oMgrFssP7GnvA5R3yln73y/NizzWqy3WlFth8Ux8+g4r/N9fjX97nn4X1YX6MTER2doNpTu5pqiA==} + + better-sqlite3@8.7.0: + resolution: {integrity: sha512-99jZU4le+f3G6aIl6PmmV0cxUIWqKieHxsiF7G34CVFiE+/UabpYqkU0NJIkY/96mQKikHeBjtR27vFfs5JpEw==} big-integer@1.6.52: resolution: {integrity: sha512-QxD8cf2eVqJOOz63z6JIN9BzvVs/dlySa5HGSBH5xtR8dPteIRQnBxxKqkNTiT6jbDTF6jAfrd4oMcND9RGbQg==} @@ -3241,8 +3934,8 @@ packages: blueimp-md5@2.19.0: resolution: {integrity: sha512-DRQrD6gJyy8FbiE4s+bDoXS9hiW3Vbx5uCdwvcCf3zLHL+Iv7LtGHLpr+GZV8rHG8tK766FGYBwRbu8pELTt+w==} - body-parser@1.20.1: - resolution: {integrity: sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==} + body-parser@1.20.2: + resolution: {integrity: sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==} engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16} bowser@2.11.0: @@ -3269,6 +3962,10 @@ packages: resolution: {integrity: sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==} engines: {node: '>=8'} + braces@3.0.3: + resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} + engines: {node: '>=8'} + browserslist@4.23.0: resolution: {integrity: sha512-QW8HiM1shhT2GuzkvklfjcKDiWFXHOeFCIA/huJPwHsslwcydgk7X+z2zXpEijP98UCY7HbubZt5J2Zgvf0CaQ==} engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} @@ -3289,17 +3986,9 @@ packages: buffer-from@1.1.2: resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==} - buffer-writer@2.0.0: - resolution: {integrity: sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==} - engines: {node: '>=4'} - buffer@5.7.1: resolution: {integrity: sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==} - bufferutil@4.0.7: - resolution: {integrity: sha512-kukuqc39WOHtdxtw4UScxF/WVnMFVSQVKhtx3AjZJzhd0RGZZldcrfSEbVsWWe6KNH253574cq5F+wpv0G9pJw==} - engines: {node: '>=6.14.2'} - bufferutil@4.0.8: resolution: {integrity: sha512-4T53u4PdgsXqKaIctwF8ifXlRTTmEPJ8iEPWFdGZvcf7sbwYo6FKFEX9eNNAnzFZ7EzJAQ3CJeOtCRA4rDp7Pw==} engines: {node: '>=6.14.2'} @@ -3315,11 +4004,11 @@ packages: builtins@1.0.3: resolution: {integrity: sha512-uYBjakWipfaO/bXI7E8rq6kpwHRZK5cNYrUv2OzZSI/FvmdMyXJ2tG9dKcjEC5YHmHpUAwsargWIZNWdxb/bnQ==} - builtins@5.0.1: - resolution: {integrity: sha512-qwVpFEHNfhYJIzNRBvd2C1kyo6jz3ZSMPyyuR47OPdiKWlbYnZNyDWuyR175qDnAJLiCo5fBBqPb3RiXgWlkOQ==} + builtins@5.1.0: + resolution: {integrity: sha512-SW9lzGTLvWTP1AY8xeAMZimqDrIaSdLQUcVr9DMef51niJ022Ri87SwRRKYm4A6iHfkPaiVUu/Duw2Wc4J7kKg==} - bun-types@0.6.6: - resolution: {integrity: sha512-/LL3zPv7d+ZvHSD6TIhVB7l8h1rrMvuGlwILTGHrJJeAaHKq+7RgIV6N8A8kzhkYMFuTq9o2P/2o8gUL7RHtzg==} + bun-types@0.6.14: + resolution: {integrity: sha512-sRdvu+t59+H/TVOe7FSGFWYITbqkhiCx9NxVUHt2+JOXM9gUOe5uMPvVvcr/hGngnh+/yb5a7uPE4JaS6uxujg==} bun-types@1.0.3: resolution: {integrity: sha512-XlyKVdYCHa7K5PHYGcwOVOrGE/bMnLS51y7zFA3ZAAXyiQ6dTaNXNCWTTufgII/6ruN770uhAXphQmzvU/r2fQ==} @@ -3350,9 +4039,17 @@ packages: resolution: {integrity: sha512-VVdYzXEn+cnbXpFgWs5hTT7OScegHVmLhJIR8Ufqk3iFD6A6j5iSX1KuBTfNEv4tdJWE2PzA6IVFtcLC7fN9wQ==} engines: {node: '>= 10'} + cacache@18.0.3: + resolution: {integrity: sha512-qXCd4rh6I07cnDqh8V48/94Tc/WSfj+o3Gn6NZ0aZovS255bUx8O13uKxRFd2eWG0xgsco7+YItQNPaa5E85hg==} + engines: {node: ^16.14.0 || >=18.0.0} + call-bind@1.0.2: resolution: {integrity: sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==} + call-bind@1.0.7: + resolution: {integrity: sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==} + engines: {node: '>= 0.4'} + caller-callsite@2.0.0: resolution: {integrity: sha512-JuG3qI4QOftFsZyOn1qq87fq5grLIyk1JYd5lJmdA+fG7aQ9pA/i3JIJGcO3q0MrRcHlOt1U+ZeHW8Dq9axALQ==} engines: {node: '>=4'} @@ -3369,8 +4066,8 @@ packages: resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} engines: {node: '>=6'} - callsites@4.0.0: - resolution: {integrity: sha512-y3jRROutgpKdz5vzEhWM34TidDU8vkJppF8dszITeb1PQmSqV3DTxyV8G/lyO/DNvtE1YTedehmw9MPZsCBHxQ==} + callsites@4.1.0: + resolution: {integrity: sha512-aBMbD1Xxay75ViYezwT40aQONfr+pSXTHwNKvIXhXD6+LY3F1dLIcceoC5OZKBVHbXcysz1hL9D2w0JJIMXpUw==} engines: {node: '>=12.20'} camelcase@5.3.1: @@ -3385,8 +4082,8 @@ packages: resolution: {integrity: sha512-xlx1yCK2Oc1APsPXDL2LdlNP6+uu8OCDdhOBSVT279M/S+y75O30C2VuD8T2ogdePBBl7PfPF4504tnLgX3zfw==} engines: {node: '>=14.16'} - caniuse-lite@1.0.30001605: - resolution: {integrity: sha512-nXwGlFWo34uliI9z3n6Qc0wZaf7zaZWA1CPZ169La5mV3I/gem7bst0vr5XQH5TJXZIMfDeZyOrZnSlVzKxxHQ==} + caniuse-lite@1.0.30001624: + resolution: {integrity: sha512-0dWnQG87UevOCPYaOR49CBcLBwoZLpws+k6W37nLjWUhumP1Isusj0p2u+3KhjNloRWK9OKMgjBBzPujQHw4nA==} cardinal@2.1.1: resolution: {integrity: sha512-JSr5eOgoEymtYHBjNWyjrMqet9Am2miJhlfKNdqLp6zoeAh0KN5dRAcxlecj5mAJrmQomgiOBj35xHLrFjqBpw==} @@ -3396,12 +4093,8 @@ packages: resolution: {integrity: sha512-DwGjNW9omn6EwP70aXsn7FQJx5kO12tX0bZkaTjzdVFM6/7nhA4t0EENocKGx6D2Bch9PE2KzCUf5SceBdeijg==} engines: {node: '>=12.19'} - chai@4.3.10: - resolution: {integrity: sha512-0UXG04VuVbruMUYbJ6JctvH0YnC/4q3/AkT18q4NaITo91CUm0liMS9VqzT9vZhVQ/1eqPanMWjBM+Juhfb/9g==} - engines: {node: '>=4'} - - chai@4.3.7: - resolution: {integrity: sha512-HLnAzZ2iupm25PlN0xFreAlBA5zaBSv3og0DdeGA4Ar6h6rJ3A0rolRUKJhSF2V10GZKDgWF/VmAEsNWjCRB+A==} + chai@4.4.1: + resolution: {integrity: sha512-13sOfMv2+DWduEU+/xbun3LScLoqN17nBeTLUsmDfKdoiC1fr0n9PU4guu4AhRcOVFk/sW8LyZWHuhWtQZiF+g==} engines: {node: '>=4'} chalk@2.4.2: @@ -3412,20 +4105,17 @@ packages: resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} engines: {node: '>=10'} - chalk@5.2.0: - resolution: {integrity: sha512-ree3Gqw/nazQAPuJJEy+avdl7QfZMcUvmHIKgEZkGL+xOBzRvup5Hxo6LHuMceSxOabuJLJm5Yp/92R9eMmMvA==} - engines: {node: ^12.17.0 || ^14.13 || >=16.0.0} - chalk@5.3.0: resolution: {integrity: sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==} engines: {node: ^12.17.0 || ^14.13 || >=16.0.0} + char-regex@1.0.2: + resolution: {integrity: sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==} + engines: {node: '>=10'} + charenc@0.0.2: resolution: {integrity: sha512-yrLQ/yVUFXkzg7EDQsPieE/53+0RlaWTs+wBrvW36cyilJ2SaDWfl4Yj7MtLTXleV9uEKefbAGUPv2/iWSooRA==} - check-error@1.0.2: - resolution: {integrity: sha512-BrgHpW9NURQgzoNyjfq0Wu6VFO6D7IZEmJNdtgNqpzGG8RuNFHt2jQxWlAs4HMe119chBnv+34syEZtc6IhLtA==} - check-error@1.0.3: resolution: {integrity: sha512-iKEoDYaRmd1mxM90a2OEfWhjsjPpYPuQ+lMYsoxB126+t8fw7ySEO48nmDg5COTjxDI65/Y2OWpeEHk3ZOe8zg==} @@ -3445,9 +4135,6 @@ packages: engines: {node: '>=12.13.0'} hasBin: true - chromium-edge-launcher@1.0.0: - resolution: {integrity: sha512-pgtgjNKZ7i5U++1g1PWv75umkHvhVTDOQIZ+sjeUX9483S7Y6MUvO0lrd7ShGlQlFHMN4SwKTCq/X8hWrbv2KA==} - chunkd@2.0.1: resolution: {integrity: sha512-7d58XsFmOq0j6el67Ug9mHf9ELUXsQXYJBkyxhH/k+6Ke0qXRnv0kbemx+Twc6fRJ07C49lcbdgm9FL1Ei/6SQ==} @@ -3613,6 +4300,9 @@ packages: engines: {node: ^14.13.0 || >=16.0.0} hasBin: true + confbox@0.1.7: + resolution: {integrity: sha512-uJcB/FKZtBMCJpK8MQji6bJHgu1tixKPxRLeGkNzBoOZzpnZUJm0jm2/sBDWcuBx1dYgxV4JU+g5hmNxCyAmdA==} + connect@3.7.0: resolution: {integrity: sha512-ZqRXc+tZukToSNmh5C2iWMSoV3X1YUcPbqEM4DkEG5tNQXrQUZCNVGGv3IuicnkMtPfGf3Xtp8WCXs295iQ1pQ==} engines: {node: '>= 0.10.0'} @@ -3638,12 +4328,12 @@ packages: cookie-signature@1.0.6: resolution: {integrity: sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==} - cookie@0.5.0: - resolution: {integrity: sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==} + cookie@0.6.0: + resolution: {integrity: sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==} engines: {node: '>= 0.6'} - core-js-compat@3.36.1: - resolution: {integrity: sha512-Dk997v9ZCt3X/npqzyGdTlq6t7lDBhZwGvV94PKzDArjp7BTRm7WlDAXYd/OWdeFHO8OChQYRJNJvUCqCbrtKA==} + core-js-compat@3.37.1: + resolution: {integrity: sha512-9TNiImhKvQqSUkOvk/mMRZzOANTiEVC7WaBNhHcKM7x+/5E1l5NvsysR19zuDQScE8k+kfQXWRN3AtS/eOSHpg==} core-util-is@1.0.3: resolution: {integrity: sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==} @@ -3656,8 +4346,8 @@ packages: resolution: {integrity: sha512-vy2Vi1r2epK5WqxOLnskeKeZkdZvTKfFZQCplE3XWsP+SUJyd5XAUFC9lFgTjjXJF2GMne/UML14iEmkAaDfFg==} engines: {node: '>=14.16'} - cpu-features@0.0.9: - resolution: {integrity: sha512-AKjgn2rP2yJyfbepsmLfiYcmtNn/2eUvocUyM/09yB0YDiz39HteK/5/T4Onf0pmdYDMgkBoGvRLvEguzyL7wQ==} + cpu-features@0.0.10: + resolution: {integrity: sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==} engines: {node: '>=10.0.0'} cpy-cli@5.0.0: @@ -3669,6 +4359,14 @@ packages: resolution: {integrity: sha512-VC2Gs20JcTyeQob6UViBLnyP0bYHkBh6EiKzot9vi2DmeGlFT9Wd7VG3NBrkNx/jYvFBeyDOMMHdHQhbtKLgHQ==} engines: {node: '>=16'} + create-require@1.1.1: + resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==} + + cross-env@7.0.3: + resolution: {integrity: sha512-+/HKd6EgcQCJGh2PSjZuUitQBQynKor4wrFbRg4DtAgS1aWO+gU52xpH7M9ScGgXSYmAVS9bIJ8EzuaGw0oNAw==} + engines: {node: '>=10.14', npm: '>=6', yarn: '>=1'} + hasBin: true + cross-fetch@3.1.8: resolution: {integrity: sha512-cvA+JwZoU0Xq+h6WkMvAUqPEYy92Obet6UdKLfW60qn99ftItKjB5T+BkyWOFWe2pUyfQ+IJHmpOTznqk1M6Kg==} @@ -3708,6 +4406,18 @@ packages: resolution: {integrity: sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==} engines: {node: '>= 12'} + data-view-buffer@1.0.1: + resolution: {integrity: sha512-0lht7OugA5x3iJLOWFhWK/5ehONdprk0ISXqVFn/NFrDu+cuc8iADFrGQz5BnRK7LLU3JmkbXSxaqX+/mXYtUA==} + engines: {node: '>= 0.4'} + + data-view-byte-length@1.0.1: + resolution: {integrity: sha512-4J7wRJD3ABAzr8wP+OcIcqq2dlUKp4DVflx++hs5h5ZKydWMI6/D/fAot+yh6g2tHh8fLFTvNOaVN357NvSrOQ==} + engines: {node: '>= 0.4'} + + data-view-byte-offset@1.0.0: + resolution: {integrity: sha512-t/Ygsytq+R995EJ5PZlD4Cu56sWa8InXySaViRzw9apusqsOO2bQP+SbYzAhR0pFKoB+43lYy8rWban9JSuXnA==} + engines: {node: '>= 0.4'} + date-fns@2.30.0: resolution: {integrity: sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw==} engines: {node: '>=0.11'} @@ -3716,8 +4426,8 @@ packages: resolution: {integrity: sha512-uqCUKXE5q1PNBXjPqvwhwJf9SwMoAHBgWJ6DcrnS5o+W2JOiIILl0JEdVD8SGujrNS02GGxgwAg2PN2zONgtjg==} engines: {node: '>=6'} - dayjs@1.11.10: - resolution: {integrity: sha512-vjAczensTgRcqDERK0SR2XMwsF/tSvnvlv6VcF2GIhg6Sx4yOIt/irsr1RDJsKiIyBzJDpCoXiWWq28MqH2cnQ==} + dayjs@1.11.11: + resolution: {integrity: sha512-okzr3f11N6WuqYtZSvm+F776mB41wRZMhKP+hc34YdW+KmtYYK9iqvHSwo2k9FEH3fhGXvOPV6yz2IcSrfRUDg==} debug@2.6.9: resolution: {integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==} @@ -3774,6 +4484,10 @@ packages: defaults@1.0.4: resolution: {integrity: sha512-eFuaLoy/Rxalv2kr+lqMlUnrDWV+3j4pljOIJgLIhI058IQfWJ7vXhyEIHu+HtC738klGALYxOKDO0bQP3tg8A==} + define-data-property@1.1.4: + resolution: {integrity: sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==} + engines: {node: '>= 0.4'} + define-lazy-prop@2.0.0: resolution: {integrity: sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==} engines: {node: '>=8'} @@ -3782,14 +4496,14 @@ packages: resolution: {integrity: sha512-xvqAVKGfT1+UAvPwKTVw/njhdQ8ZhXK4lI0bCIuCMrp2up9nPnaDftrLtmpTazqd1o+UY4zgzU+avtMbDP+ldA==} engines: {node: '>= 0.4'} + define-properties@1.2.1: + resolution: {integrity: sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==} + engines: {node: '>= 0.4'} + del@6.1.1: resolution: {integrity: sha512-ua8BhapfP0JUJKC/zV9yHHDW/rDoDxP4Zhn3AkA6/xT6gY7jYXJiaeyBZznYVujhZZET+UgcbZiQ7sN3WqcImg==} engines: {node: '>=10'} - del@7.0.0: - resolution: {integrity: sha512-tQbV/4u5WVB8HMJr08pgw0b6nG4RGt/tj+7Numvq+zqcvUFeMaIWWOUFltiU+6go8BSO2/ogsB4EasDaj0y68Q==} - engines: {node: '>=14.16'} - delayed-stream@1.0.0: resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} engines: {node: '>=0.4.0'} @@ -3808,10 +4522,6 @@ packages: resolution: {integrity: sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==} engines: {node: '>= 0.8'} - deprecated-react-native-prop-types@5.0.0: - resolution: {integrity: sha512-cIK8KYiiGVOFsKdPMmm1L3tA/Gl+JopXL6F5+C7x39MyPsQYnP57Im/D6bNUzcborD7fcMwiwZqcBdBXXZucYQ==} - engines: {node: '>=18'} - dequal@2.0.3: resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==} engines: {node: '>=6'} @@ -3825,18 +4535,22 @@ packages: engines: {node: '>=0.10'} hasBin: true - detect-libc@2.0.1: - resolution: {integrity: sha512-463v3ZeIrcWtdgIg6vI6XUncguvr2TnGl4SzDXinkt9mSLpBJKXT3mW6xT3VQdDN11+WVs29pgvivTc4Lp8v+w==} - engines: {node: '>=8'} - detect-libc@2.0.2: resolution: {integrity: sha512-UX6sGumvvqSaXgdKGUsgZWqcUyIXZ/vZTrlRT/iobiKhGL0zL4d3osHj3uqllWJK+i+sixDS/3COVEOFbupFyw==} engines: {node: '>=8'} + detect-libc@2.0.3: + resolution: {integrity: sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw==} + engines: {node: '>=8'} + diff-sequences@29.6.3: resolution: {integrity: sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + diff@4.0.2: + resolution: {integrity: sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==} + engines: {node: '>=0.3.1'} + diff@5.1.0: resolution: {integrity: sha512-D+mk+qE8VC/PAUrlAU34N+VfXev0ghe5ywmpqrawphmVZc1bEfn56uo9qpyGp1p4xpzOHkSW4ztBd6L7Xx4ACw==} engines: {node: '>=0.3.1'} @@ -3864,24 +4578,20 @@ packages: resolution: {integrity: sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==} engines: {node: '>=6.0.0'} - dotenv-expand@10.0.0: - resolution: {integrity: sha512-GopVGCpVS1UKH75VKHGuQFqS1Gusej0z4FyQkPdwjil2gNIv+LNsqBlboOzpJFZKVT95GkCyWJbBSdFEFUWI2A==} + dotenv-expand@11.0.6: + resolution: {integrity: sha512-8NHi73otpWsZGBSZwwknTXS5pqMOrk9+Ssrna8xCaxkzEpU9OTf9R5ArQGVw03//Zmk9MOwLPng9WwndvpAJ5g==} engines: {node: '>=12'} dotenv@10.0.0: resolution: {integrity: sha512-rlBi9d8jpv9Sf1klPjNfFAuWDjKLwTIJJ/VxtoTwIR6hnZxcEOQCZg2oIL3MWBYw5GpUDKOEnND7LXTbIpQ03Q==} engines: {node: '>=10'} - dotenv@16.0.3: - resolution: {integrity: sha512-7GO6HghkA5fYG9TYnNxi14/7K9f5occMlp3zXAuSxn7CKCxt9xbNWG7yF8hTCSUchlfWSe3uLmlPfigevRItzQ==} - engines: {node: '>=12'} - - dotenv@16.1.4: - resolution: {integrity: sha512-m55RtE8AsPeJBpOIFKihEmqUcoVncQIwo7x9U8ZwLEZw9ZpXboz2c+rvog+jUaJvVrZ5kBOeYQBX5+8Aa/OZQw==} + dotenv@16.4.5: + resolution: {integrity: sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg==} engines: {node: '>=12'} - dprint@0.45.0: - resolution: {integrity: sha512-3444h7V47XoA16qgIWjw3CV/Eo/rQbT/XTGlbJ/6vJ+apQyuo0+M3Ai0GS3wu7X9HBUDcA0zIHA3mOxWNz6toA==} + dprint@0.46.3: + resolution: {integrity: sha512-ACEd7B7sO/uvPvV/nsHbtkIeMqeD2a8XGO1DokROtKDUmI5WbuflGZOwyjFCYwy4rkX6FXoYBzGdEQ6um7BjCA==} hasBin: true dreamopt@0.8.0: @@ -3954,6 +4664,10 @@ packages: sqlite3: optional: true + drizzle-prisma-generator@0.1.4: + resolution: {integrity: sha512-6gY17/wTWfNF40rKjiYeWdkU8Gi6FQiOlU4oXa8uuo3ZZ8E6FH3250AhgCOMWAKZLpjQnk8FSzS0GXzwHkShkQ==} + hasBin: true + duplexer@0.1.2: resolution: {integrity: sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==} @@ -3963,11 +4677,11 @@ packages: ee-first@1.1.1: resolution: {integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==} - electron-to-chromium@1.4.727: - resolution: {integrity: sha512-brpv4KTeC4g0Fx2FeIKytLd4UGn1zBQq5Lauy7zEWT9oqkaj5mgsxblEZIAOf1HHLlXxzr6adGViiBy5Z39/CA==} + electron-to-chromium@1.4.783: + resolution: {integrity: sha512-bT0jEz/Xz1fahQpbZ1D7LgmPYZ3iHVY39NcWWro1+hA2IvjiPeaXtfSqrQ+nXjApMvQRE2ASt1itSLRrebHMRQ==} - emittery@1.0.1: - resolution: {integrity: sha512-2ID6FdrMD9KDLldGesP6317G78K7km/kMcwItRtVFva7I/cSEOIaLpewaUb+YLXVwdAp3Ctfxh/V5zIl1sj7dQ==} + emittery@1.0.3: + resolution: {integrity: sha512-tJdCJitoy2lrC2ldJcqN4vkqJ00lT+tOWNT1hBJjO/3FDMJa5TTIiYGCKGkn/WfCyOzUMObeohbVTj00fhiLiA==} engines: {node: '>=14.16'} emoji-regex@8.0.0: @@ -3976,6 +4690,9 @@ packages: emoji-regex@9.2.2: resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==} + emojilib@2.4.0: + resolution: {integrity: sha512-5U0rVMU5Y2n2+ykNLQqMoqklN9ICBT/KsvC1Gz6vqHbz2AXXGkG+Pm5rMWk/8Vjrr/mY9985Hi8DYzn1F09Nyw==} + encodeurl@1.0.2: resolution: {integrity: sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==} engines: {node: '>= 0.8'} @@ -3994,8 +4711,8 @@ packages: resolution: {integrity: sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==} engines: {node: '>=6'} - envinfo@7.11.1: - resolution: {integrity: sha512-8PiZgZNIB4q/Lw4AhOvAfB/ityHAd2bli3lESSWmWSzSsl5dKpy5N1d1Rfkd2teq/g9xN90lc6o98DOjMeYHpg==} + envinfo@7.13.0: + resolution: {integrity: sha512-cvcaMr7KqXVh4nyzGTVqTum+gAiL265x5jUWQIDLq//zOGbW+gSW/C+OWLleY/rs9Qole6AZLMXPbtIFQbqu+Q==} engines: {node: '>=4'} hasBin: true @@ -4019,10 +4736,30 @@ packages: resolution: {integrity: sha512-ioRRcXMO6OFyRpyzV3kE1IIBd4WG5/kltnzdxSCqoP8CMGs/Li+M1uF5o7lOkZVFjDs+NLesthnF66Pg/0q0Lw==} engines: {node: '>= 0.4'} + es-abstract@1.23.3: + resolution: {integrity: sha512-e+HfNH61Bj1X9/jLc5v1owaLYuHdeHHSQlkhCBiTK8rBvKaULl/beGMxwrMXjpYrv4pz22BlY570vVePA2ho4A==} + engines: {node: '>= 0.4'} + + es-define-property@1.0.0: + resolution: {integrity: sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==} + engines: {node: '>= 0.4'} + + es-errors@1.3.0: + resolution: {integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==} + engines: {node: '>= 0.4'} + + es-object-atoms@1.0.0: + resolution: {integrity: sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==} + engines: {node: '>= 0.4'} + es-set-tostringtag@2.0.1: resolution: {integrity: sha512-g3OMbtlwY3QewlqAiMLI47KywjWZoEytKr8pf6iTC8uJq5bIAH52Z9pnQ8pVL6whrCto53JZDuUIsifGeLorTg==} engines: {node: '>= 0.4'} + es-set-tostringtag@2.0.3: + resolution: {integrity: sha512-3T8uNMC3OQTHkFUsFq8r/BwAXLHvU/9O9mE0fBc/MY5iq/8H7ncvO947LmYA6ldWw9Uh8Yhf25zu6n7nML5QWQ==} + engines: {node: '>= 0.4'} + es-shim-unscopables@1.0.0: resolution: {integrity: sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w==} @@ -4183,6 +4920,16 @@ packages: engines: {node: '>=12'} hasBin: true + esbuild@0.20.2: + resolution: {integrity: sha512-WdOOppmUNU+IbZ0PaDiTst80zjnrOkyJNHoKupIcVyU8Lvla3Ugx94VzkQ32Ijqd7UhHJy75gNWDMUekcrSJ6g==} + engines: {node: '>=12'} + hasBin: true + + esbuild@0.21.5: + resolution: {integrity: sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==} + engines: {node: '>=12'} + hasBin: true + escalade@3.1.1: resolution: {integrity: sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==} engines: {node: '>=6'} @@ -4329,6 +5076,9 @@ packages: estree-walker@2.0.2: resolution: {integrity: sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==} + estree-walker@3.0.3: + resolution: {integrity: sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==} + esutils@2.0.3: resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==} engines: {node: '>=0.10.0'} @@ -4362,6 +5112,10 @@ packages: resolution: {integrity: sha512-QVWlX2e50heYJcCPG0iWtf8r0xjEYfz/OYLGDYH+IyjWezzPNxz63qNFOu0l4YftGWuizFVZHHs8PrLU5p2IDA==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + execa@8.0.1: + resolution: {integrity: sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==} + engines: {node: '>=16.17'} + exit@0.1.2: resolution: {integrity: sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==} engines: {node: '>= 0.8.0'} @@ -4370,47 +5124,49 @@ packages: resolution: {integrity: sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==} engines: {node: '>=6'} - expo-asset@9.0.2: - resolution: {integrity: sha512-PzYKME1MgUOoUvwtdzhAyXkjXOXGiSYqGKG/MsXwWr0Ef5wlBaBm2DCO9V6KYbng5tBPFu6hTjoRNil1tBOSow==} + expo-asset@10.0.6: + resolution: {integrity: sha512-waP73/ccn/HZNNcGM4/s3X3icKjSSbEQ9mwc6tX34oYNg+XE5WdwOuZ9wgVVFrU7wZMitq22lQXd2/O0db8bxg==} + peerDependencies: + expo: '*' - expo-constants@15.4.5: - resolution: {integrity: sha512-1pVVjwk733hbbIjtQcvUFCme540v4gFemdNlaxM2UXKbfRCOh2hzgKN5joHMOysoXQe736TTUrRj7UaZI5Yyhg==} + expo-constants@16.0.1: + resolution: {integrity: sha512-s6aTHtglp926EsugWtxN7KnpSsE9FCEjb7CgEjQQ78Gpu4btj4wB+IXot2tlqNwqv+x7xFe5veoPGfJDGF/kVg==} peerDependencies: expo: '*' - expo-file-system@16.0.8: - resolution: {integrity: sha512-yDbVT0TUKd7ewQjaY5THum2VRFx2n/biskGhkUmLh3ai21xjIVtaeIzHXyv9ir537eVgt4ReqDNWi7jcXjdUcA==} + expo-file-system@17.0.1: + resolution: {integrity: sha512-dYpnZJqTGj6HCYJyXAgpFkQWsiCH3HY1ek2cFZVHFoEc5tLz9gmdEgTF6nFHurvmvfmXqxi7a5CXyVm0aFYJBw==} peerDependencies: expo: '*' - expo-font@11.10.3: - resolution: {integrity: sha512-q1Td2zUvmLbCA9GV4OG4nLPw5gJuNY1VrPycsnemN1m8XWTzzs8nyECQQqrcBhgulCgcKZZJJ6U0kC2iuSoQHQ==} + expo-font@12.0.5: + resolution: {integrity: sha512-h/VkN4jlHYDJ6T6pPgOYTVoDEfBY0CTKQe4pxnPDGQiE6H+DFdDgk+qWVABGpRMH0+zXoHB+AEi3OoQjXIynFA==} peerDependencies: expo: '*' - expo-keep-awake@12.8.2: - resolution: {integrity: sha512-uiQdGbSX24Pt8nGbnmBtrKq6xL/Tm3+DuDRGBk/3ZE/HlizzNosGRIufIMJ/4B4FRw4dw8KU81h2RLuTjbay6g==} + expo-keep-awake@13.0.2: + resolution: {integrity: sha512-kKiwkVg/bY0AJ5q1Pxnm/GvpeB6hbNJhcFsoOWDh2NlpibhCLaHL826KHUM+WsnJRbVRxJ+K9vbPRHEMvFpVyw==} peerDependencies: expo: '*' - expo-modules-autolinking@1.10.3: - resolution: {integrity: sha512-pn4n2Dl4iRh/zUeiChjRIe1C7EqOw1qhccr85viQV7W6l5vgRpY0osE51ij5LKg/kJmGRcJfs12+PwbdTplbKw==} + expo-modules-autolinking@1.11.1: + resolution: {integrity: sha512-2dy3lTz76adOl7QUvbreMCrXyzUiF8lygI7iFJLjgIQIVH+43KnFWE5zBumpPbkiaq0f0uaFpN9U0RGQbnKiMw==} hasBin: true - expo-modules-core@1.11.12: - resolution: {integrity: sha512-/e8g4kis0pFLer7C0PLyx98AfmztIM6gU9jLkYnB1pU9JAfQf904XEi3bmszO7uoteBQwSL6FLp1m3TePKhDaA==} + expo-modules-core@1.12.11: + resolution: {integrity: sha512-CF5G6hZo/6uIUz6tj4dNRlvE5L4lakYukXPqz5ZHQ+6fLk1NQVZbRdpHjMkxO/QSBQcKUzG/ngeytpoJus7poQ==} - expo-sqlite@13.2.0: - resolution: {integrity: sha512-TYpX+a+2oJOxzChug8+TkIob0lipl7rluCRBGXbGKG68kG4Reb6OCruRiQTJTnbGiEgnN4S+B0cT8f4ZXPUxBg==} + expo-sqlite@13.4.0: + resolution: {integrity: sha512-5f7d2EDM+pgerM33KndtX4gWw2nuVaXY68nnqx7PhkiYeyEmeNfZ29bIFtpBzNb/L5l0/DTtRxuSqftxbknFtw==} peerDependencies: expo: '*' - expo@50.0.14: - resolution: {integrity: sha512-yLPdxCMVAbmeEIpzzyAuJ79wvr6ToDDtQmuLDMAgWtjqP8x3CGddXxUe07PpKEQgzwJabdHvCLP5Bv94wMFIjQ==} + expo@51.0.8: + resolution: {integrity: sha512-bdTOiMb1f3PChtuqEZ9czUm2gMTmS0r1+H+Pkm2O3PsuLnOgxfIBzL6S37+J4cUocLBaENrmx9SOGKpzhBqXpg==} hasBin: true - express@4.18.2: - resolution: {integrity: sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==} + express@4.19.2: + resolution: {integrity: sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==} engines: {node: '>= 0.10.0'} ext@1.7.0: @@ -4422,10 +5178,6 @@ packages: fast-diff@1.3.0: resolution: {integrity: sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw==} - fast-glob@3.2.12: - resolution: {integrity: sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==} - engines: {node: '>=8.6.0'} - fast-glob@3.3.1: resolution: {integrity: sha512-kNFPyjhh5cKjrUltxs+wFx+ZkbRaxxmZ+X0ZU31SOsxCEtP9VPgtq2teZw1DebupL5GmDaNQ6yKMMVcM41iqDg==} engines: {node: '>=8.6.0'} @@ -4444,8 +5196,8 @@ packages: resolution: {integrity: sha512-B9/wizE4WngqQftFPmdaMYlXoJlJOYxGQOanC77fq9k8+Z0v5dDSVh+3glErdIROP//s/jgb7ZuxKfB8nVyo0g==} hasBin: true - fast-xml-parser@4.3.6: - resolution: {integrity: sha512-M2SovcRxD4+vC493Uc2GZVcZaj66CCJhWurC4viynVSTvrpErCShNcDz1lAho6n9REQKvL/ll4A4/fw6Y9z8nw==} + fast-xml-parser@4.4.0: + resolution: {integrity: sha512-kLY3jFlwIYwBNDojclKsNAC12sfD6NwW74QB2CoNGPvtVxjliYehVunB3HYyNi+n4Tt1dAcgwYvmKF/Z18flqg==} hasBin: true fastq@1.15.0: @@ -4467,14 +5219,11 @@ packages: resolution: {integrity: sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==} engines: {node: ^12.20 || >= 14.13} - fetch-ponyfill@7.1.0: - resolution: {integrity: sha512-FhbbL55dj/qdVO3YNK7ZEkshvj3eQ7EuIGV2I6ic/2YiocvyWv+7jg2s4AyS0wdRU75s3tA8ZxI/xPigb0v5Aw==} - fetch-retry@4.1.1: resolution: {integrity: sha512-e6eB7zN6UBSwGVwrbWVH+gdLnkW9WwHhmq2YDK1Sh30pzx1onRVGBvogTlUeWxwTa+L86NYdo4hFkh7O8ZjSnA==} - fflate@0.7.4: - resolution: {integrity: sha512-5u2V/CDW15QM1XbbgS+0DfPxVB+jUKhWEKuuFuHncbk3tEEqzmoXL+2KyOFuKGqOnmdIy0/davWF1CkuwtibCw==} + fflate@0.8.2: + resolution: {integrity: sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A==} figures@5.0.0: resolution: {integrity: sha512-ej8ksPF4x6e5wvK9yevct0UCXh8TTFlWGVLlgjZuoBH1HwjIfKE/IdL5mq89sFA7zELi1VhKpmtDnrs7zWyeyg==} @@ -4491,6 +5240,10 @@ packages: resolution: {integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==} engines: {node: '>=8'} + fill-range@7.1.1: + resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} + engines: {node: '>=8'} + finalhandler@1.1.2: resolution: {integrity: sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA==} engines: {node: '>= 0.8'} @@ -4526,21 +5279,21 @@ packages: resolution: {integrity: sha512-OHx4Qwrrt0E4jEIcI5/Xb+f+QmJYNj2rrK8wiIdQOIrB9WrrJL8cjZvXdXuBTkkEwEqLycb5BeZDV1o2i9bTew==} engines: {node: '>=12.0.0'} - flatted@3.2.7: - resolution: {integrity: sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==} - flatted@3.2.9: resolution: {integrity: sha512-36yxDn5H7OFZQla0/jFJmbIKTdZAQHngCedGxiMmpNfEZM0sdEeT+WczLQrjK6D7o2aiyLYDnkw0R3JK0Qv1RQ==} + flatted@3.3.1: + resolution: {integrity: sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw==} + flow-enums-runtime@0.0.6: resolution: {integrity: sha512-3PYnM29RFXwvAN6Pc/scUfkI7RwhQ/xqyLUyPNlXUp9S40zI8nup9tUSrTLSVnWGBN38FNiGWbwZOB6uR4OGdw==} - flow-parser@0.206.0: - resolution: {integrity: sha512-HVzoK3r6Vsg+lKvlIZzaWNBVai+FXTX1wdYhz/wVlH13tb/gOdLXmlTqy6odmTBhT5UoWUbq0k8263Qhr9d88w==} + flow-parser@0.236.0: + resolution: {integrity: sha512-0OEk9Gr+Yj7wjDW2KgaNYUypKau71jAfFyeLQF5iVtxqc6uJHag/MT7pmaEApf4qM7u86DkBcd4ualddYMfbLw==} engines: {node: '>=0.4.0'} - follow-redirects@1.15.2: - resolution: {integrity: sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==} + follow-redirects@1.15.6: + resolution: {integrity: sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==} engines: {node: '>=4.0'} peerDependencies: debug: '*' @@ -4608,6 +5361,10 @@ packages: resolution: {integrity: sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==} engines: {node: '>= 8'} + fs-minipass@3.0.3: + resolution: {integrity: sha512-XUBA9XClHbnJWSfBzjkm6RvPsyg3sryZt06BEQoXcF7EK/xpGaQYJgQKDJSUH5SGZ76Y7pFx1QBnXz09rU5Fbw==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + fs.realpath@1.0.0: resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} @@ -4626,6 +5383,10 @@ packages: resolution: {integrity: sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==} engines: {node: '>= 0.4'} + function.prototype.name@1.1.6: + resolution: {integrity: sha512-Z5kx79swU5P27WEayXM1tBi5Ze/lbIyiNgU3qyXUOf9b2rgXYyF9Dy9Cx+IQv/Lc8WCG6L82zwUPpSS9hGehIg==} + engines: {node: '>= 0.4'} + functions-have-names@1.2.3: resolution: {integrity: sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==} @@ -4633,10 +5394,6 @@ packages: resolution: {integrity: sha512-vKQDA9g868cZiW8ulgs2uN1yx1i7/nsS33jTMOxekk0Z03BJLffVcdW6AVD32fWb3E6RtmWWuBXBZOk8cLXFNQ==} hasBin: true - gauge@3.0.2: - resolution: {integrity: sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==} - engines: {node: '>=10'} - gauge@4.0.4: resolution: {integrity: sha512-f9m+BEN5jkg6a0fZjleidjN51VE1X+mPFQ2DJ0uv1V39oCLCbsGe6yjbBnp7eK7z/+GAon99a3nHuqbuuthyPg==} engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} @@ -4652,15 +5409,16 @@ packages: resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} engines: {node: 6.* || 8.* || >= 10.*} - get-func-name@2.0.0: - resolution: {integrity: sha512-Hm0ixYtaSZ/V7C8FJrtZIuBBI+iSgL+1Aq82zSu8VQNB4S3Gk8e7Qs3VwBDJAhmRZcFqkl3tQu36g/Foh5I5ig==} - get-func-name@2.0.2: resolution: {integrity: sha512-8vXOvuE167CtIc3OyItco7N/dpRtBbYOsPsXCz7X/PMnlGjYjSGuZJgM1Y7mmew7BKf9BqvLX2tnOVy1BBUsxQ==} get-intrinsic@1.2.1: resolution: {integrity: sha512-2DcsyfABl+gVHEfCOaTrWgyt+tb6MSEGmKq+kI5HwLbIYgjgmMcV8KQ41uaKz1xxUcn9tJtgFbQUEVcEbd0FYw==} + get-intrinsic@1.2.4: + resolution: {integrity: sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==} + engines: {node: '>= 0.4'} + get-package-type@0.1.0: resolution: {integrity: sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==} engines: {node: '>=8.0.0'} @@ -4669,8 +5427,8 @@ packages: resolution: {integrity: sha512-x5UJKlgeUiNT8nyo/AcnwLnZuZNcSjSw0kogRB+Whd1fjjFq4B1hySFxSFWWSn4mIBzg3sRNUDFYc4g5gjPoLg==} engines: {node: '>=4'} - get-port@7.0.0: - resolution: {integrity: sha512-mDHFgApoQd+azgMdwylJrv2DX47ywGq1i5VFJE7fZ0dttNq3iQMfsU4IvEgBHojA3KqEudyu7Vq+oN8kNaNkWw==} + get-port@7.1.0: + resolution: {integrity: sha512-QB9NKEeDg3xxVwCCwJQ9+xycaz6pBB6iQ76wiWMl1927n0Kir6alPiP+yuiICLLU4jpMe08dXfpebuQppFA2zw==} engines: {node: '>=16'} get-stream@4.1.0: @@ -4681,12 +5439,20 @@ packages: resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==} engines: {node: '>=10'} + get-stream@8.0.1: + resolution: {integrity: sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==} + engines: {node: '>=16'} + get-symbol-description@1.0.0: resolution: {integrity: sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==} engines: {node: '>= 0.4'} - get-tsconfig@4.5.0: - resolution: {integrity: sha512-MjhiaIWCJ1sAU4pIQ5i5OfOuHHxVo1oYeNsWTON7jxYkod8pHocXeh+SSbmu5OZZZK73B6cbJ2XADzXehLyovQ==} + get-symbol-description@1.0.2: + resolution: {integrity: sha512-g0QYk1dZBxGwk+Ngc+ltRH2IBp2f7zBkBMBJZCDerh6EhlhSR6+9irMCuT/09zD6qkarHUSn529sK/yL4S27mg==} + engines: {node: '>= 0.4'} + + get-tsconfig@4.7.5: + resolution: {integrity: sha512-ZCuZCnlqNzjb4QprAzXKdpp/gh6KTxSJuw3IBsPnV/7fV4NxC9ckB+vPTt8w7fJA0TaSD7c55BR47JD6MEDyDw==} getenv@1.0.0: resolution: {integrity: sha512-7yetJWqbS9sbn0vIfliPsFgoXMKn/YMF+Wuiog97x+urnSRRRZ7xB+uVkwGKzRgq9CDFfMQnE9ruL5DHv9c6Xg==} @@ -4716,6 +5482,11 @@ packages: engines: {node: '>=16 || 14 >=14.17'} hasBin: true + glob@10.4.1: + resolution: {integrity: sha512-2jelhlq3E4ho74ZyVLN03oKdAZVUa6UDZzFLVH1H7dnoax+y9qyaq8zBkfDIggjniU19z0wU18y16jMB2eyVIw==} + engines: {node: '>=16 || 14 >=14.18'} + hasBin: true + glob@6.0.4: resolution: {integrity: sha512-MKZeRNyYZAVVVG1oZeLaWie1uweH40m9AZwIwxyPbTSX4hHrVYSzLg0Ro5Z5R7XKkIX+Cc6oD1rqeDJnwsB8/A==} deprecated: Glob versions prior to v9 are no longer supported @@ -4746,16 +5517,16 @@ packages: resolution: {integrity: sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA==} engines: {node: '>= 0.4'} + globalthis@1.0.4: + resolution: {integrity: sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==} + engines: {node: '>= 0.4'} + globby@11.1.0: resolution: {integrity: sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==} engines: {node: '>=10'} - globby@13.1.3: - resolution: {integrity: sha512-8krCNHXvlCgHDpegPzleMq07yMYTO2sXKASmZmquEYWEmCx6J5UTRbp5RwMJkTJGtcQ44YpiUYUiN0b9mzy8Bw==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - - globby@13.1.4: - resolution: {integrity: sha512-iui/IiiW+QrJ1X1hKH5qwlMQyv34wJAYwH1vrf8b9kBA4sNiif3gKsMHa+BrdnOpEudWjpotfa7LrTzB1ERS/g==} + globby@13.2.2: + resolution: {integrity: sha512-Y1zNGV+pzQdh7H39l9zgB4PJqjRNqydvdYCDG4HFXM4XuvSaQQlEc91IU1yALL8gUTDomgBAfz3XJdmUS+oo0w==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} globrex@0.1.2: @@ -4797,10 +5568,17 @@ packages: has-property-descriptors@1.0.0: resolution: {integrity: sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==} + has-property-descriptors@1.0.2: + resolution: {integrity: sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==} + has-proto@1.0.1: resolution: {integrity: sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==} engines: {node: '>= 0.4'} + has-proto@1.0.3: + resolution: {integrity: sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==} + engines: {node: '>= 0.4'} + has-symbols@1.0.3: resolution: {integrity: sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==} engines: {node: '>= 0.4'} @@ -4809,6 +5587,10 @@ packages: resolution: {integrity: sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==} engines: {node: '>= 0.4'} + has-tostringtag@1.0.2: + resolution: {integrity: sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==} + engines: {node: '>= 0.4'} + has-unicode@2.0.1: resolution: {integrity: sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==} @@ -4823,14 +5605,14 @@ packages: heap@0.2.7: resolution: {integrity: sha512-2bsegYkkHO+h/9MGbn6KWcE45cHZgPANo5LXF7EvWdT0yT2EguSVO1nDgU5c8+ZOPwp2vMNa7YFsJhVcDR9Sdg==} - hermes-estree@0.15.0: - resolution: {integrity: sha512-lLYvAd+6BnOqWdnNbP/Q8xfl8LOGw4wVjfrNd9Gt8eoFzhNBRVD95n4l2ksfMVOoxuVyegs85g83KS9QOsxbVQ==} + hermes-estree@0.19.1: + resolution: {integrity: sha512-daLGV3Q2MKk8w4evNMKwS8zBE/rcpA800nu1Q5kM08IKijoSnPe9Uo1iIxzPKRkn95IxxsgBMPeYHt3VG4ej2g==} hermes-estree@0.20.1: resolution: {integrity: sha512-SQpZK4BzR48kuOg0v4pb3EAGNclzIlqMj3Opu/mu7bbAoFw6oig6cEt/RAi0zTFW/iW6Iz9X9ggGuZTAZ/yZHg==} - hermes-parser@0.15.0: - resolution: {integrity: sha512-Q1uks5rjZlE9RjMMjSUCkGrEIPI5pKJILeCtK1VmTj7U4pf3wVPoo+cxfu+s4cBAPy2JzikIIdCZgBoR6x7U1Q==} + hermes-parser@0.19.1: + resolution: {integrity: sha512-Vp+bXzxYJWrpEuJ/vXxUsLnt0+y4q9zyi4zUlkLqD8FKv4LjIfOvP69R/9Lty3dCyKh0E2BU7Eypqr63/rKT/A==} hermes-parser@0.20.1: resolution: {integrity: sha512-BL5P83cwCogI8D7rrDCgsFY0tdYUtmFP9XaXtl2IQjC+2Xo+4okjfXintlTxcIwl4qeGddEl28Z11kbVIw0aNA==} @@ -4839,8 +5621,8 @@ packages: resolution: {integrity: sha512-cnN7bQUm65UWOy6cbGcCcZ3rpwW8Q/j4OP5aWRhEry4Z2t2aR1cjrbp0BS+KiBN0smvP1caBgAuxutvyvJILzQ==} engines: {node: '>=8'} - hono@4.2.1: - resolution: {integrity: sha512-yDv/6esHiDgq5fvsALTNyNiRxktOplO6LfSMgIkGE+E5vF8axsUfcyfzzlbLYM9ZQDMuws/ZLGUGWwYnAwCcNw==} + hono@4.0.1: + resolution: {integrity: sha512-S9cREGPJIAK437RhroOf1PGlJPIlt5itl69OmQ6onPLo5pdCbSHGL8v4uAKxrdHjcTyuoyvKPqWm5jv0dGkdFA==} engines: {node: '>=16.0.0'} hosted-git-info@2.8.9: @@ -4873,6 +5655,10 @@ packages: resolution: {integrity: sha512-rQLskxnM/5OCldHo+wNXbpVgDn5A17CUoKX+7Sokwaknlq7CdSnphy0W39GU8dw59XiCXmFXDg4fRuckQRKewQ==} engines: {node: '>=12.20.0'} + human-signals@5.0.0: + resolution: {integrity: sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==} + engines: {node: '>=16.17.0'} + humanize-ms@1.2.1: resolution: {integrity: sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==} @@ -4947,6 +5733,10 @@ packages: resolution: {integrity: sha512-Y+R5hJrzs52QCG2laLn4udYVnxsfny9CpOhNhUvk/SSSVyF6T27FzRbF0sroPidSu3X8oEAkOn2K804mjpt6UQ==} engines: {node: '>= 0.4'} + internal-slot@1.0.7: + resolution: {integrity: sha512-NGnrKwXzSms2qUUih/ILZ5JBqNTSa1+ZmP6flaIp6KmSElgE9qdndzS3cqjrDovwFdmwsGsLdeFgB6suw+1e9g==} + engines: {node: '>= 0.4'} + interpret@2.2.0: resolution: {integrity: sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==} engines: {node: '>= 0.10'} @@ -4954,13 +5744,14 @@ packages: invariant@2.2.4: resolution: {integrity: sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==} + ip-address@9.0.5: + resolution: {integrity: sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g==} + engines: {node: '>= 12'} + ip-regex@2.1.0: resolution: {integrity: sha512-58yWmlHpp7VYfcdTwMTvwMmqx/Elfxjd9RXTDyMsbL7lLWmhMylLEqiYVLKuLzOZqVgiWXD9MfR62Vv89VRxkw==} engines: {node: '>=4'} - ip@2.0.0: - resolution: {integrity: sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==} - ipaddr.js@1.9.1: resolution: {integrity: sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==} engines: {node: '>= 0.10'} @@ -4969,13 +5760,13 @@ packages: resolution: {integrity: sha512-1ANGLZ+Nkv1ptFb2pa8oG8Lem4krflKuX/gINiHJHjJUKaJHk/SXk5x6K3J+39/p0h1RQ2saROclJJ+QLvETCQ==} engines: {node: '>=8'} - is-arguments@1.1.1: - resolution: {integrity: sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA==} - engines: {node: '>= 0.4'} - is-array-buffer@3.0.2: resolution: {integrity: sha512-y+FyyR/w8vfIRq4eQcM1EYgSTnmHXPqaF+IgzgraytCFq5Xh8lllDVmAZolPJiZttZLeFSINPYMaEJ7/vWUa1w==} + is-array-buffer@3.0.4: + resolution: {integrity: sha512-wcjaerHw0ydZwfhiKbXJWLDY8A7yV7KhjQOpb83hGgGfId/aQa4TOvwyzn2PuswW2gPCYEL/nEAiSVpdOj1lXw==} + engines: {node: '>= 0.4'} + is-arrayish@0.2.1: resolution: {integrity: sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==} @@ -5013,6 +5804,10 @@ packages: is-core-module@2.13.1: resolution: {integrity: sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw==} + is-data-view@1.0.1: + resolution: {integrity: sha512-AHkaJrsUVW6wq6JS8y3JnM/GJF/9cf+k20+iDzlSaJrinEo5+7vRiteOSwBhHRiAyQATN1AmY4hwzxJKPmYf+w==} + engines: {node: '>= 0.4'} + is-date-object@1.0.5: resolution: {integrity: sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==} engines: {node: '>= 0.4'} @@ -5049,10 +5844,6 @@ packages: resolution: {integrity: sha512-O4L094N2/dZ7xqVdrXhh9r1KODPJpFms8B5sGdJLPy664AgvXsreZUyCQQNItZRDlYug4xStLjNp/sz3HvBowQ==} engines: {node: '>=12'} - is-generator-function@1.0.10: - resolution: {integrity: sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A==} - engines: {node: '>= 0.4'} - is-glob@2.0.1: resolution: {integrity: sha512-a1dBeB19NXsf/E0+FHqkagizel/LQw2DjSQpvQrj3zT+jYPpaUCryPnrQajXKFLCMuf4I6FhRpaGtw4lPrG6Eg==} engines: {node: '>=0.10.0'} @@ -5072,14 +5863,14 @@ packages: is-lambda@1.0.1: resolution: {integrity: sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ==} - is-nan@1.3.2: - resolution: {integrity: sha512-E+zBKpQ2t6MEo1VsonYmluk9NxGrbzpeeLC2xIViuO2EjU2xsXsBPwTr3Ykv9l08UYEVEdWeRZNouaZqF6RN0w==} - engines: {node: '>= 0.4'} - is-negative-zero@2.0.2: resolution: {integrity: sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==} engines: {node: '>= 0.4'} + is-negative-zero@2.0.3: + resolution: {integrity: sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==} + engines: {node: '>= 0.4'} + is-number-object@1.0.7: resolution: {integrity: sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==} engines: {node: '>= 0.4'} @@ -5092,18 +5883,10 @@ packages: resolution: {integrity: sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ==} engines: {node: '>=6'} - is-path-cwd@3.0.0: - resolution: {integrity: sha512-kyiNFFLU0Ampr6SDZitD/DwUo4Zs1nSdnygUBqsu3LooL00Qvb5j+UnvApUn/TTj1J3OuE6BTdQ5rudKmU2ZaA==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - is-path-inside@3.0.3: resolution: {integrity: sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==} engines: {node: '>=8'} - is-path-inside@4.0.0: - resolution: {integrity: sha512-lJJV/5dYS+RcL8uQdBDW9c9uWFLLBNRyFhnAKXw5tVqLlKZ4RMGZKv+YQ/IA3OhD+RpbJa1LLFM1FQPGyIXvOA==} - engines: {node: '>=12'} - is-plain-object@2.0.4: resolution: {integrity: sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==} engines: {node: '>=0.10.0'} @@ -5128,6 +5911,10 @@ packages: is-shared-array-buffer@1.0.2: resolution: {integrity: sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==} + is-shared-array-buffer@1.0.3: + resolution: {integrity: sha512-nA2hv5XIhLR3uVzDDfCIknerhx8XUKnstuOERPNNIinXG7v9u+ohXF67vxm4TPTEPU6lm61ZkwP3c9PCB97rhg==} + engines: {node: '>= 0.4'} + is-stream@1.1.0: resolution: {integrity: sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ==} engines: {node: '>=0.10.0'} @@ -5152,6 +5939,10 @@ packages: resolution: {integrity: sha512-Z14TF2JNG8Lss5/HMqt0//T9JeHXttXy5pH/DBU4vi98ozO2btxzq9MwYDZYnKwU8nRsz/+GVFVRDq3DkVuSPg==} engines: {node: '>= 0.4'} + is-typed-array@1.1.13: + resolution: {integrity: sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw==} + engines: {node: '>= 0.4'} + is-unicode-supported@0.1.0: resolution: {integrity: sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==} engines: {node: '>=10'} @@ -5196,6 +5987,10 @@ packages: resolution: {integrity: sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ==} engines: {node: '>=14'} + jackspeak@3.1.2: + resolution: {integrity: sha512-kWmLKn2tRtfYMF/BakihVVRzBKOxz4gJMiL2Rj91WnAB5TPZumSH99R/Yf1qE1u4uRimvCSJfm6hnxohXeEXjQ==} + engines: {node: '>=14'} + javascript-natural-sort@0.7.1: resolution: {integrity: sha512-nO6jcEfZWQXDhOiBtG2KvKyEptz7RVbpGP4vTD2hLBdmNQSsCiicO2Ioinv6UI4y9ukqnBpy+XZ9H6uLNgJTlw==} @@ -5230,8 +6025,8 @@ packages: jimp-compact@0.16.1: resolution: {integrity: sha512-dZ6Ra7u1G8c4Letq/B5EzAxj4tLFHL+cGtdpR+PVm4yzPDj+lCk+AbivWt1eOM+ikzkowtyV7qSqX6qr3t71Ww==} - joi@17.12.3: - resolution: {integrity: sha512-2RRziagf555owrm9IRVtdKynOBeITiDpuZqIpgwqXShPncPKNiRQoiGsl/T8SQdq+8ugRzH2LqY67irr2y/d+g==} + joi@17.13.1: + resolution: {integrity: sha512-vaBlIKCyo4FCUtCm7Eu4QZd/q02bWcxfUO6YSXAZOWF6gzcLBeba8kwotUdYJjDLW8Cz8RywsSOqiNJZW0mNvg==} join-component@1.1.0: resolution: {integrity: sha512-bF7vcQxbODoGK1imE2P9GS9aw4zD0Sd+Hni68IMZLj7zRnquH7dXUmMw9hDI5S/Jzt7q+IyTXN0rSg2GI0IKhQ==} @@ -5246,8 +6041,8 @@ packages: resolution: {integrity: sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw==} engines: {node: '>=10'} - js-base64@3.7.5: - resolution: {integrity: sha512-3MEt5DTINKqfScXKfJFrRbxkrnk2AxPWGBL/ycjz4dK8iqiSJ06UxD8jh8xuh6p10TX4t2+7FsBYVxxQbMg+qA==} + js-base64@3.7.7: + resolution: {integrity: sha512-7rCnleh0z2CkXhH67J8K1Ytz0b2Y+yxTPL+/KOJoa20hfnVQ/3/T6W/KflYI4bRHRagNeXeU2bkNGI3v1oS/lw==} js-string-escape@1.0.1: resolution: {integrity: sha512-Smw4xcfIQ5LVjAOuJCvN/zIodzA/BBSsluuoSykP+lUvScIi4U6RJLfwHet5cxFnCswUjISV8oAXaqaJDY3chg==} @@ -5256,6 +6051,9 @@ packages: js-tokens@4.0.0: resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} + js-tokens@9.0.0: + resolution: {integrity: sha512-WriZw1luRMlmV3LGJaR6QOJjWwgLUTf89OwT2lUOyjX2dJGBwgmIkbcz+7WFZjrZM635JOIR517++e/67CP9dQ==} + js-yaml@3.14.1: resolution: {integrity: sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==} hasBin: true @@ -5264,6 +6062,9 @@ packages: resolution: {integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==} hasBin: true + jsbn@1.1.0: + resolution: {integrity: sha512-4bYVV3aAMtDTTu4+xsDYa6sy9GyJ69/amsu9sYF2zqjiEoZA5xJi3BrfX3uY+/IekIu7MwdObdbDWpoZdBv3/A==} + jsc-android@250231.0.0: resolution: {integrity: sha512-rS46PvsjYmdmuz1OAWXY/1kCYG7pnf1TBqeTiOJr1iDz7s5DLxxC9n/ZMknLDxzYzNVfI7R95MH10emSSG1Wuw==} @@ -5322,9 +6123,6 @@ packages: engines: {node: '>=6'} hasBin: true - jsonc-parser@3.2.0: - resolution: {integrity: sha512-gfFQZrcTc8CnKXp6Y4/CBT3fTc0OVuDofpre4aEeEpSBPV5X5v4+Vmx+8snU7RLPrNHPKSgLxGo9YuQzz20o+w==} - jsonfile@4.0.0: resolution: {integrity: sha512-m6F1R3z8jjlf2imQHS2Qez5sjKWQzbuuhuJ/FKYFRZvPE3PuHcSMVZzfsLhGVOkfd20obL5SWEBew5ShlquNxg==} @@ -5359,8 +6157,8 @@ packages: resolution: {integrity: sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==} engines: {node: '>=6'} - knex@2.4.2: - resolution: {integrity: sha512-tMI1M7a+xwHhPxjbl/H9K1kHX+VncEYcvCx5K00M16bWvpYPKAZd6QrCu68PtHAdIZNQPWZn0GVhqVBEthGWCg==} + knex@2.5.1: + resolution: {integrity: sha512-z78DgGKUr4SE/6cm7ku+jHvFT0X97aERh/f0MUKAKgFnwCYBEW4TFBqtHWFYiJFid7fMrtpZ/gxJthvz5mEByA==} engines: {node: '>=12'} hasBin: true peerDependencies: @@ -5387,10 +6185,42 @@ packages: tedious: optional: true + knex@3.1.0: + resolution: {integrity: sha512-GLoII6hR0c4ti243gMs5/1Rb3B+AjwMOfjYm97pu0FOQa7JH56hgBxYf5WK2525ceSbBY1cjeZ9yk99GPMB6Kw==} + engines: {node: '>=16'} + hasBin: true + peerDependencies: + better-sqlite3: '*' + mysql: '*' + mysql2: '*' + pg: '*' + pg-native: '*' + sqlite3: '*' + tedious: '*' + peerDependenciesMeta: + better-sqlite3: + optional: true + mysql: + optional: true + mysql2: + optional: true + pg: + optional: true + pg-native: + optional: true + sqlite3: + optional: true + tedious: + optional: true + kysely@0.25.0: resolution: {integrity: sha512-srn0efIMu5IoEBk0tBmtGnoUss4uwvxtbFQWG/U2MosfqIace1l43IFP1PmEpHRDp+Z79xIcKEqmHH3dAvQdQA==} engines: {node: '>=14.0.0'} + kysely@0.27.3: + resolution: {integrity: sha512-lG03Ru+XyOJFsjH3OMY6R/9U38IjDPfnOfDgO3ynhbDr+Dz8fak+X6L62vqu3iybQnj+lG84OttBuU9KY3L9kA==} + engines: {node: '>=14.0.0'} + leven@3.1.0: resolution: {integrity: sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==} engines: {node: '>=6'} @@ -5399,8 +6229,8 @@ packages: resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==} engines: {node: '>= 0.8.0'} - libsql@0.3.10: - resolution: {integrity: sha512-/8YMTbwWFPmrDWY+YFK3kYqVPFkMgQre0DGmBaOmjogMdSe+7GHm1/q9AZ61AWkEub/vHmi+bA4tqIzVhKnqzg==} + libsql@0.3.18: + resolution: {integrity: sha512-lvhKr7WV3NLWRbXkjn/MeKqXOAqWKU0PX9QYrvDh7fneukapj+iUQ4qgJASrQyxcCrEsClXCQiiK5W6OoYPAlA==} os: [darwin, linux, win32] lighthouse-logger@1.4.2: @@ -5412,52 +6242,110 @@ packages: cpu: [arm64] os: [darwin] + lightningcss-darwin-arm64@1.25.1: + resolution: {integrity: sha512-G4Dcvv85bs5NLENcu/s1f7ehzE3D5ThnlWSDwE190tWXRQCQaqwcuHe+MGSVI/slm0XrxnaayXY+cNl3cSricw==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [darwin] + lightningcss-darwin-x64@1.19.0: resolution: {integrity: sha512-Lif1wD6P4poaw9c/4Uh2z+gmrWhw/HtXFoeZ3bEsv6Ia4tt8rOJBdkfVaUJ6VXmpKHALve+iTyP2+50xY1wKPw==} engines: {node: '>= 12.0.0'} cpu: [x64] os: [darwin] + lightningcss-darwin-x64@1.25.1: + resolution: {integrity: sha512-dYWuCzzfqRueDSmto6YU5SoGHvZTMU1Em9xvhcdROpmtOQLorurUZz8+xFxZ51lCO2LnYbfdjZ/gCqWEkwixNg==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [darwin] + + lightningcss-freebsd-x64@1.25.1: + resolution: {integrity: sha512-hXoy2s9A3KVNAIoKz+Fp6bNeY+h9c3tkcx1J3+pS48CqAt+5bI/R/YY4hxGL57fWAIquRjGKW50arltD6iRt/w==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [freebsd] + lightningcss-linux-arm-gnueabihf@1.19.0: resolution: {integrity: sha512-P15VXY5682mTXaiDtbnLYQflc8BYb774j2R84FgDLJTN6Qp0ZjWEFyN1SPqyfTj2B2TFjRHRUvQSSZ7qN4Weig==} engines: {node: '>= 12.0.0'} cpu: [arm] os: [linux] + lightningcss-linux-arm-gnueabihf@1.25.1: + resolution: {integrity: sha512-tWyMgHFlHlp1e5iW3EpqvH5MvsgoN7ZkylBbG2R2LWxnvH3FuWCJOhtGcYx9Ks0Kv0eZOBud789odkYLhyf1ng==} + engines: {node: '>= 12.0.0'} + cpu: [arm] + os: [linux] + lightningcss-linux-arm64-gnu@1.19.0: resolution: {integrity: sha512-zwXRjWqpev8wqO0sv0M1aM1PpjHz6RVIsBcxKszIG83Befuh4yNysjgHVplF9RTU7eozGe3Ts7r6we1+Qkqsww==} engines: {node: '>= 12.0.0'} cpu: [arm64] os: [linux] + lightningcss-linux-arm64-gnu@1.25.1: + resolution: {integrity: sha512-Xjxsx286OT9/XSnVLIsFEDyDipqe4BcLeB4pXQ/FEA5+2uWCCuAEarUNQumRucnj7k6ftkAHUEph5r821KBccQ==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [linux] + lightningcss-linux-arm64-musl@1.19.0: resolution: {integrity: sha512-vSCKO7SDnZaFN9zEloKSZM5/kC5gbzUjoJQ43BvUpyTFUX7ACs/mDfl2Eq6fdz2+uWhUh7vf92c4EaaP4udEtA==} engines: {node: '>= 12.0.0'} cpu: [arm64] os: [linux] + lightningcss-linux-arm64-musl@1.25.1: + resolution: {integrity: sha512-IhxVFJoTW8wq6yLvxdPvyHv4NjzcpN1B7gjxrY3uaykQNXPHNIpChLB52+wfH+yS58zm1PL4LemUp8u9Cfp6Bw==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [linux] + lightningcss-linux-x64-gnu@1.19.0: resolution: {integrity: sha512-0AFQKvVzXf9byrXUq9z0anMGLdZJS+XSDqidyijI5njIwj6MdbvX2UZK/c4FfNmeRa2N/8ngTffoIuOUit5eIQ==} engines: {node: '>= 12.0.0'} cpu: [x64] os: [linux] + lightningcss-linux-x64-gnu@1.25.1: + resolution: {integrity: sha512-RXIaru79KrREPEd6WLXfKfIp4QzoppZvD3x7vuTKkDA64PwTzKJ2jaC43RZHRt8BmyIkRRlmywNhTRMbmkPYpA==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [linux] + lightningcss-linux-x64-musl@1.19.0: resolution: {integrity: sha512-SJoM8CLPt6ECCgSuWe+g0qo8dqQYVcPiW2s19dxkmSI5+Uu1GIRzyKA0b7QqmEXolA+oSJhQqCmJpzjY4CuZAg==} engines: {node: '>= 12.0.0'} cpu: [x64] os: [linux] + lightningcss-linux-x64-musl@1.25.1: + resolution: {integrity: sha512-TdcNqFsAENEEFr8fJWg0Y4fZ/nwuqTRsIr7W7t2wmDUlA8eSXVepeeONYcb+gtTj1RaXn/WgNLB45SFkz+XBZA==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [linux] + lightningcss-win32-x64-msvc@1.19.0: resolution: {integrity: sha512-C+VuUTeSUOAaBZZOPT7Etn/agx/MatzJzGRkeV+zEABmPuntv1zihncsi+AyGmjkkzq3wVedEy7h0/4S84mUtg==} engines: {node: '>= 12.0.0'} cpu: [x64] os: [win32] + lightningcss-win32-x64-msvc@1.25.1: + resolution: {integrity: sha512-9KZZkmmy9oGDSrnyHuxP6iMhbsgChUiu/NSgOx+U1I/wTngBStDf2i2aGRCHvFqj19HqqBEI4WuGVQBa2V6e0A==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [win32] + lightningcss@1.19.0: resolution: {integrity: sha512-yV5UR7og+Og7lQC+70DA7a8ta1uiOPnWPJfxa0wnxylev5qfo4P+4iMpzWAdYWOca4jdNQZii+bDL/l+4hUXIA==} engines: {node: '>= 12.0.0'} + lightningcss@1.25.1: + resolution: {integrity: sha512-V0RMVZzK1+rCHpymRv4URK2lNhIRyO8g7U7zOFwVAhJuat74HtkjIQpQRKNCwFEYkRGpafOpmXXLoaoBcyVtBg==} + engines: {node: '>= 12.0.0'} + lilconfig@2.1.0: resolution: {integrity: sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==} engines: {node: '>=10'} @@ -5473,8 +6361,8 @@ packages: resolution: {integrity: sha512-IXO6OCs9yg8tMKzfPZ1YmheJbZCiEsnBdcB03l0OcfK9prKnJb96siuHCr5Fl37/yo9DnKU+TLpxzTUspw9shg==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - local-pkg@0.4.3: - resolution: {integrity: sha512-SFppqq5p42fe2qcZQqqEOiVRXl+WCP1MdT6k7BDEW1j++sp5fIY+/fdRQitvKgB5BrBcmrs5m/L0v2FrU5MY1g==} + local-pkg@0.5.0: + resolution: {integrity: sha512-ok6z3qlYyCDS4ZEU27HaU6x/xZa9Whf8jD4ptH5UZTQYZVYeb9bnZ3ojVhiJNLiXK1Hfc0GNbLXcmZ5plLDDBg==} engines: {node: '>=14'} locate-path@3.0.0: @@ -5527,8 +6415,12 @@ packages: resolution: {integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==} hasBin: true - loupe@2.3.6: - resolution: {integrity: sha512-RaPMZKiMy8/JruncMU5Bt6na1eftNoo++R4Y+N2FrxkDVTrGvcyzFTsaGif4QTeKESheMGegbhw6iUAq+5A8zA==} + loupe@2.3.7: + resolution: {integrity: sha512-zSMINGVYkdpYSOBmLi0D1Uo7JU9nVdQKrHxC8eYlV+9YKK9WePqAlL7lSlorG/U2Fw1w0hTBmaa/jrQ3UbPHtA==} + + lru-cache@10.2.2: + resolution: {integrity: sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ==} + engines: {node: 14 || >=16.14} lru-cache@5.1.1: resolution: {integrity: sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==} @@ -5552,21 +6444,15 @@ packages: lru-queue@0.1.0: resolution: {integrity: sha512-BpdYkt9EvGl8OfWHDQPISVpcl5xZthb+XPsbELj5AQXxIC8IriDZIQYjBJPEm5rS420sjZ0TLEzRcq5KdBhYrQ==} - magic-string@0.30.0: - resolution: {integrity: sha512-LA+31JYDJLs82r2ScLrlz1GjSgu66ZV518eyWT+S8VhyQn/JL0u9MeBOvQMGYiPk1DBiSN9DDMOcXvigJZaViQ==} - engines: {node: '>=12'} - - magic-string@0.30.5: - resolution: {integrity: sha512-7xlpfBaQaP/T6Vh8MO/EqXSW5En6INHEvEXQiuff7Gku0PWjU3uf6w/j9o7O+SpB5fOAkrI5HeoNgwjEO0pFsA==} - engines: {node: '>=12'} + magic-string@0.30.10: + resolution: {integrity: sha512-iIRwTIf0QKV3UAnYK4PU8uiEc4SRh5jX0mwpIwETPpHdhVM4f53RSwS/vXvN1JhGX+Cs7B8qIq3d6AH49O5fAQ==} make-dir@2.1.0: resolution: {integrity: sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==} engines: {node: '>=6'} - make-dir@3.1.0: - resolution: {integrity: sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==} - engines: {node: '>=8'} + make-error@1.3.6: + resolution: {integrity: sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==} make-fetch-happen@9.1.0: resolution: {integrity: sha512-+zopwDy7DNknmwPQplem5lAZX/eCOzSvSNNcSKm5eVwTkOBzoktEfXsa9L23J/GIRhxRsaxzkPEhrJEpE2F4Gg==} @@ -5582,14 +6468,14 @@ packages: map-stream@0.1.0: resolution: {integrity: sha512-CkYQrPYZfWnu/DAmVCpTSX/xHpKZ80eKh2lAkyA6AJTef6bW+6JpbQZN5rofum7da+SyN1bi5ctTm+lTfcCW3g==} - marked-terminal@5.2.0: - resolution: {integrity: sha512-Piv6yNwAQXGFjZSaiNljyNFw7jKDdGrw70FSbtxEyldLsyeuV5ZHm/1wW++kWbrOF1VPnUgYOhB2oLL0ZpnekA==} - engines: {node: '>=14.13.1 || >=16.0.0'} + marked-terminal@6.2.0: + resolution: {integrity: sha512-ubWhwcBFHnXsjYNsu+Wndpg0zhY4CahSpPlA70PlO0rR9r2sZpkyU+rkCsOWH+KMEkx847UpALON+HWgxowFtw==} + engines: {node: '>=16.0.0'} peerDependencies: - marked: ^1.0.0 || ^2.0.0 || ^3.0.0 || ^4.0.0 || ^5.0.0 + marked: '>=1 <12' - marked@5.1.2: - resolution: {integrity: sha512-ahRPGXJpjMjwSOlBoTMZAK7ATXkli5qCPxZ21TG44rx1KEo44bii4ekgTDQPNRQ4Kh7JMb9Ub1PVk1NxRSsorg==} + marked@9.1.6: + resolution: {integrity: sha512-jcByLnIFkd5gSXZmjNvS1TlmRhCXZjIzHYlaGkPlLIekG55JDR2Z4va9tZwCiP+/RDERiNhMOFu01xd6O5ct1Q==} engines: {node: '>= 16'} hasBin: true @@ -5653,61 +6539,61 @@ packages: resolution: {integrity: sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==} engines: {node: '>= 0.6'} - metro-babel-transformer@0.80.8: - resolution: {integrity: sha512-TTzNwRZb2xxyv4J/+yqgtDAP2qVqH3sahsnFu6Xv4SkLqzrivtlnyUbaeTdJ9JjtADJUEjCbgbFgUVafrXdR9Q==} + metro-babel-transformer@0.80.9: + resolution: {integrity: sha512-d76BSm64KZam1nifRZlNJmtwIgAeZhZG3fi3K+EmPOlrR8rDtBxQHDSN3fSGeNB9CirdTyabTMQCkCup6BXFSQ==} engines: {node: '>=18'} - metro-cache-key@0.80.8: - resolution: {integrity: sha512-qWKzxrLsRQK5m3oH8ePecqCc+7PEhR03cJE6Z6AxAj0idi99dHOSitTmY0dclXVB9vP2tQIAE8uTd8xkYGk8fA==} + metro-cache-key@0.80.9: + resolution: {integrity: sha512-hRcYGhEiWIdM87hU0fBlcGr+tHDEAT+7LYNCW89p5JhErFt/QaAkVx4fb5bW3YtXGv5BTV7AspWPERoIb99CXg==} engines: {node: '>=18'} - metro-cache@0.80.8: - resolution: {integrity: sha512-5svz+89wSyLo7BxdiPDlwDTgcB9kwhNMfNhiBZPNQQs1vLFXxOkILwQiV5F2EwYT9DEr6OPZ0hnJkZfRQ8lDYQ==} + metro-cache@0.80.9: + resolution: {integrity: sha512-ujEdSI43QwI+Dj2xuNax8LMo8UgKuXJEdxJkzGPU6iIx42nYa1byQ+aADv/iPh5sh5a//h5FopraW5voXSgm2w==} engines: {node: '>=18'} - metro-config@0.80.8: - resolution: {integrity: sha512-VGQJpfJawtwRzGzGXVUoohpIkB0iPom4DmSbAppKfumdhtLA8uVeEPp2GM61kL9hRvdbMhdWA7T+hZFDlo4mJA==} + metro-config@0.80.9: + resolution: {integrity: sha512-28wW7CqS3eJrunRGnsibWldqgwRP9ywBEf7kg+uzUHkSFJNKPM1K3UNSngHmH0EZjomizqQA2Zi6/y6VdZMolg==} engines: {node: '>=18'} - metro-core@0.80.8: - resolution: {integrity: sha512-g6lud55TXeISRTleW6SHuPFZHtYrpwNqbyFIVd9j9Ofrb5IReiHp9Zl8xkAfZQp8v6ZVgyXD7c130QTsCz+vBw==} + metro-core@0.80.9: + resolution: {integrity: sha512-tbltWQn+XTdULkGdzHIxlxk4SdnKxttvQQV3wpqqFbHDteR4gwCyTR2RyYJvxgU7HELfHtrVbqgqAdlPByUSbg==} engines: {node: '>=18'} - metro-file-map@0.80.8: - resolution: {integrity: sha512-eQXMFM9ogTfDs2POq7DT2dnG7rayZcoEgRbHPXvhUWkVwiKkro2ngcBE++ck/7A36Cj5Ljo79SOkYwHaWUDYDw==} + metro-file-map@0.80.9: + resolution: {integrity: sha512-sBUjVtQMHagItJH/wGU9sn3k2u0nrCl0CdR4SFMO1tksXLKbkigyQx4cbpcyPVOAmGTVuy3jyvBlELaGCAhplQ==} engines: {node: '>=18'} - metro-minify-terser@0.80.8: - resolution: {integrity: sha512-y8sUFjVvdeUIINDuW1sejnIjkZfEF+7SmQo0EIpYbWmwh+kq/WMj74yVaBWuqNjirmUp1YNfi3alT67wlbBWBQ==} + metro-minify-terser@0.80.9: + resolution: {integrity: sha512-FEeCeFbkvvPuhjixZ1FYrXtO0araTpV6UbcnGgDUpH7s7eR5FG/PiJz3TsuuPP/HwCK19cZtQydcA2QrCw446A==} engines: {node: '>=18'} - metro-resolver@0.80.8: - resolution: {integrity: sha512-JdtoJkP27GGoZ2HJlEsxs+zO7jnDUCRrmwXJozTlIuzLHMRrxgIRRby9fTCbMhaxq+iA9c+wzm3iFb4NhPmLbQ==} + metro-resolver@0.80.9: + resolution: {integrity: sha512-wAPIjkN59BQN6gocVsAvvpZ1+LQkkqUaswlT++cJafE/e54GoVkMNCmrR4BsgQHr9DknZ5Um/nKueeN7kaEz9w==} engines: {node: '>=18'} - metro-runtime@0.80.8: - resolution: {integrity: sha512-2oScjfv6Yb79PelU1+p8SVrCMW9ZjgEiipxq7jMRn8mbbtWzyv3g8Mkwr+KwOoDFI/61hYPUbY8cUnu278+x1g==} + metro-runtime@0.80.9: + resolution: {integrity: sha512-8PTVIgrVcyU+X/rVCy/9yxNlvXsBCk5JwwkbAm/Dm+Abo6NBGtNjWF0M1Xo/NWCb4phamNWcD7cHdR91HhbJvg==} engines: {node: '>=18'} - metro-source-map@0.80.8: - resolution: {integrity: sha512-+OVISBkPNxjD4eEKhblRpBf463nTMk3KMEeYS8Z4xM/z3qujGJGSsWUGRtH27+c6zElaSGtZFiDMshEb8mMKQg==} + metro-source-map@0.80.9: + resolution: {integrity: sha512-RMn+XS4VTJIwMPOUSj61xlxgBvPeY4G6s5uIn6kt6HB6A/k9ekhr65UkkDD7WzHYs3a9o869qU8tvOZvqeQzgw==} engines: {node: '>=18'} - metro-symbolicate@0.80.8: - resolution: {integrity: sha512-nwhYySk79jQhwjL9QmOUo4wS+/0Au9joEryDWw7uj4kz2yvw1uBjwmlql3BprQCBzRdB3fcqOP8kO8Es+vE31g==} + metro-symbolicate@0.80.9: + resolution: {integrity: sha512-Ykae12rdqSs98hg41RKEToojuIW85wNdmSe/eHUgMkzbvCFNVgcC0w3dKZEhSsqQOXapXRlLtHkaHLil0UD/EA==} engines: {node: '>=18'} hasBin: true - metro-transform-plugins@0.80.8: - resolution: {integrity: sha512-sSu8VPL9Od7w98MftCOkQ1UDeySWbsIAS5I54rW22BVpPnI3fQ42srvqMLaJUQPjLehUanq8St6OMBCBgH/UWw==} + metro-transform-plugins@0.80.9: + resolution: {integrity: sha512-UlDk/uc8UdfLNJhPbF3tvwajyuuygBcyp+yBuS/q0z3QSuN/EbLllY3rK8OTD9n4h00qZ/qgxGv/lMFJkwP4vg==} engines: {node: '>=18'} - metro-transform-worker@0.80.8: - resolution: {integrity: sha512-+4FG3TQk3BTbNqGkFb2uCaxYTfsbuFOCKMMURbwu0ehCP8ZJuTUramkaNZoATS49NSAkRgUltgmBa4YaKZ5mqw==} + metro-transform-worker@0.80.9: + resolution: {integrity: sha512-c/IrzMUVnI0hSVVit4TXzt3A1GiUltGVlzCmLJWxNrBGHGrJhvgePj38+GXl1Xf4Fd4vx6qLUkKMQ3ux73bFLQ==} engines: {node: '>=18'} - metro@0.80.8: - resolution: {integrity: sha512-in7S0W11mg+RNmcXw+2d9S3zBGmCARDxIwoXJAmLUQOQoYsRP3cpGzyJtc7WOw8+FXfpgXvceD0u+PZIHXEL7g==} + metro@0.80.9: + resolution: {integrity: sha512-Bc57Xf3GO2Xe4UWQsBj/oW6YfLPABEu8jfDVDiNmJvoQW4CO34oDPuYKe4KlXzXhcuNsqOtSxpbjCRRVjhhREg==} engines: {node: '>=18'} hasBin: true @@ -5715,6 +6601,10 @@ packages: resolution: {integrity: sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==} engines: {node: '>=8.6'} + micromatch@4.0.7: + resolution: {integrity: sha512-LPP/3KorzCwBxfeUuZmaR6bG2kdeHSbe0P2tY3FLRU4vYrjYz5hI4QZwV0njUx3jeuKe67YukQ1LSPZBKDqO/Q==} + engines: {node: '>=8.6'} + mime-db@1.52.0: resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} engines: {node: '>= 0.6'} @@ -5768,6 +6658,10 @@ packages: resolution: {integrity: sha512-0jWhJpD/MdhPXwPuiRkCbfYfSKp2qnn2eOc279qI7f+osl/l+prKSrvhg157zSYvx/1nmgn2NqdT6k2Z7zSH9w==} engines: {node: '>=16 || 14 >=14.17'} + minimatch@9.0.4: + resolution: {integrity: sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==} + engines: {node: '>=16 || 14 >=14.17'} + minimist@1.2.8: resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==} @@ -5775,6 +6669,10 @@ packages: resolution: {integrity: sha512-6T6lH0H8OG9kITm/Jm6tdooIbogG9e0tLgpY6mphXSm/A9u8Nq1ryBG+Qspiub9LjWlBPsPS3tWQ/Botq4FdxA==} engines: {node: '>= 8'} + minipass-collect@2.0.1: + resolution: {integrity: sha512-D7V8PO9oaz7PWGLbCACuI1qEOsq7UKfLotx/C0Aet43fCUB/wfQ7DYeq2oR/svFJGYDHPr38SHATeaj/ZoKHKw==} + engines: {node: '>=16 || 14 >=14.17'} + minipass-fetch@1.4.1: resolution: {integrity: sha512-CGH1eblLq26Y15+Azk7ey4xh0J/XfJfrCox5LDJiKqI2Q2iwOLOKrlmIaODiSQS8d18jalF6y2K2ePUm0CmShw==} engines: {node: '>=8'} @@ -5795,14 +6693,14 @@ packages: resolution: {integrity: sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==} engines: {node: '>=8'} - minipass@4.2.5: - resolution: {integrity: sha512-+yQl7SX3bIT83Lhb4BVorMAHVuqsskxRdlmO9kTpyukp8vsm2Sn/fUOV9xlnG8/a5JsypJzap21lz/y3FBMJ8Q==} - engines: {node: '>=8'} - minipass@5.0.0: resolution: {integrity: sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==} engines: {node: '>=8'} + minipass@7.1.2: + resolution: {integrity: sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==} + engines: {node: '>=16 || 14 >=14.17'} + minizlib@2.1.2: resolution: {integrity: sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==} engines: {node: '>= 8'} @@ -5819,18 +6717,15 @@ packages: engines: {node: '>=10'} hasBin: true - mlly@1.3.0: - resolution: {integrity: sha512-HT5mcgIQKkOrZecOjOX3DJorTikWXwsBfpcr/MGBkhfWcjiqvnaL/9ppxvIUXfjT6xt4DVIAsN9fMUz1ev4bIw==} - - mlly@1.4.2: - resolution: {integrity: sha512-i/Ykufi2t1EZ6NaPLdfnZk2AX8cs0d+mTzVKuPfqPKPatxLApaBoxJQ9x1/uckXtrS/U5oisPMDkNs0yQTaBRg==} + mlly@1.7.0: + resolution: {integrity: sha512-U9SDaXGEREBYQgfejV97coK0UL1r+qnF2SyO9A3qcI8MzKnsIFKHNVEkrDyNncQTKQQumsasmeq84eNMdBfsNQ==} mri@1.2.0: resolution: {integrity: sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==} engines: {node: '>=4'} - mrmime@1.0.1: - resolution: {integrity: sha512-hzzEagAgDyoU1Q6yg5uI+AorQgdvMCur3FcKf7NhMKWsaYg+RnbTyHRa/9IlLF9rf455MOCtcqqrQQ83pPP7Uw==} + mrmime@2.0.0: + resolution: {integrity: sha512-eu38+hdgojoyq63s+yTpN4XMBdt5l8HhMhc4VKLO9KM5caLIBvUm4thi7fFaxyTmCKeNnXZ5pAlBwCUnhA09uw==} engines: {node: '>=10'} ms@2.0.0: @@ -5850,6 +6745,10 @@ packages: resolution: {integrity: sha512-MxDQJztArk4JFX1PKVjDhIXRzAmVJfuqZrVU+my6NeYBAA/XZRaDw5q7vga8TNvgyy3Lv3rivBFBBuJFbsdjaw==} engines: {node: '>= 8.0'} + mysql2@3.9.8: + resolution: {integrity: sha512-+5JKNjPuks1FNMoy9TYpl77f+5frbTklz7eb3XDwbpsERRLEeXiW2PDEkakYF50UuKU2qwfGnyXpKYvukv8mGA==} + engines: {node: '>= 8.0'} + mz@2.7.0: resolution: {integrity: sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==} @@ -5860,11 +6759,6 @@ packages: nan@2.19.0: resolution: {integrity: sha512-nO1xXxfh/RWNxfd/XPfbIfFk5vgLsAxUR9y5O0cHMJu/AW9U95JLXqthYHjEp+8gQ5p96K9jUp8nbVOxCdRbtw==} - nanoid@3.3.6: - resolution: {integrity: sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA==} - engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} - hasBin: true - nanoid@3.3.7: resolution: {integrity: sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==} engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} @@ -5903,15 +6797,16 @@ packages: resolution: {integrity: sha512-WDD0bdg9mbq6F4mRxEYcPWwfA1vxd0mrvKOyxI7Xj/atfRHVeutzuWByG//jfm4uPzp0y4Kj051EORCBSQMycw==} engines: {node: '>=12.0.0'} - node-abi@3.40.0: - resolution: {integrity: sha512-zNy02qivjjRosswoYmPi8hIKJRr8MpQyeKT6qlcq/OnOgA3Rhoae+IYOqsM9V5+JnHWmxKnWOT2GxvtqdtOCXA==} + node-abi@3.62.0: + resolution: {integrity: sha512-CPMcGa+y33xuL1E0TcNIu4YyaZCxnnvkVaEXrsosR3FxN+fV8xvb7Mzpb7IgKler10qeMkE6+Dp8qJhpzdq35g==} engines: {node: '>=10'} node-abort-controller@3.1.1: resolution: {integrity: sha512-AGK2yQKIjRuqnc6VkX2Xj5d+QW8xZ87pa1UK6yA6ouUyuxfHuMP6umE5QK7UmTeOAymo+Zx1Fxiuw9rVx8taHQ==} - node-addon-api@4.3.0: - resolution: {integrity: sha512-73sE9+3UaLYYFmDsFZnqCInzPyh3MqIwZO9cw58yIqAZhONrrabrYyYe3TuIqtIiOuTXVhsGau8hcrhhwSsDIQ==} + node-addon-api@7.1.0: + resolution: {integrity: sha512-mNcltoe1R8o7STTegSOHdnJNN7s5EUvhoS7ShnTHDyOSd+8H+UdWODq6qSv67PjC8Zc5JRT8+oLAMCr0SIXw7g==} + engines: {node: ^16 || ^18 || >= 20} node-dir@0.1.17: resolution: {integrity: sha512-tmPX422rYgofd4epzrNoOXiE8XFZYOcCq1vD7MAXCDO+O+zndlA2ztdKKMa+EeuBG5tHETpr4ml4RGgpqDCCAg==} @@ -5921,26 +6816,9 @@ packages: resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==} engines: {node: '>=10.5.0'} - node-emoji@1.11.0: - resolution: {integrity: sha512-wo2DpQkQp7Sjm2A0cq+sN7EHKO6Sl0ctXeBdFZrL9T9+UywORbufTcTZxom8YqpLQt/FqNMUkOpkZrJVYSKD3A==} - - node-fetch@2.6.11: - resolution: {integrity: sha512-4I6pdBY1EthSqDmJkiNk3JIT8cswwR9nfeW/cPdUagJYEQG7R95WRH74wpz7ma8Gh/9dI9FP+OU+0E4FvtA55w==} - engines: {node: 4.x || >=6.0.0} - peerDependencies: - encoding: ^0.1.0 - peerDependenciesMeta: - encoding: - optional: true - - node-fetch@2.6.9: - resolution: {integrity: sha512-DJm/CJkZkRjKKj4Zi4BsKVZh3ValV5IR5s7LVZnW+6YMh0W1BfNA8XSs6DLMGYlId5F3KnA70uu2qepcR08Qqg==} - engines: {node: 4.x || >=6.0.0} - peerDependencies: - encoding: ^0.1.0 - peerDependenciesMeta: - encoding: - optional: true + node-emoji@2.1.3: + resolution: {integrity: sha512-E2WEOVsgs7O16zsURJ/eH8BqhF029wGpEOnv7Urwdo2wmQanOACwJQh0devF9D9RhoZru0+9JXIS0dBXIAz+lA==} + engines: {node: '>=18'} node-fetch@2.7.0: resolution: {integrity: sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==} @@ -5963,8 +6841,8 @@ packages: resolution: {integrity: sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==} engines: {node: '>= 6.13.0'} - node-gyp-build@4.6.0: - resolution: {integrity: sha512-NTZVKn9IylLwUzaKjkas1e4u2DLNcV4rdYagA4PWdPwW87Bi7z+BznyKSRwS/761tV/lzCGXplWsiaMjLqP2zQ==} + node-gyp-build@4.8.1: + resolution: {integrity: sha512-OSs33Z9yWr148JZcbZd5WiAXhh/n9z8TxQcdMhIOlpN9AhWpLfvVFO73+m77bBABQMaY9XSvIa+qk0jlI7Gcaw==} hasBin: true node-gyp@8.4.1: @@ -6012,13 +6890,10 @@ packages: resolution: {integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==} engines: {node: '>=8'} - npm-run-path@5.1.0: - resolution: {integrity: sha512-sJOdmRGrY2sjNTRMbSvluQqg+8X7ZK61yvzBEIDhz4f8z1TZFYABsqjjCBd/0PUNE9M6QDgHJXQkGUEm7Q+l9Q==} + npm-run-path@5.3.0: + resolution: {integrity: sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - npmlog@5.0.1: - resolution: {integrity: sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==} - npmlog@6.0.2: resolution: {integrity: sha512-/vBvz5Jfr9dT/aFWd0FIRf+T/Q2WBsLENygUaFUqstqsycmZAP/t5BvFJTK0viFmSUxiUKTUplWy5vt+rvKIxg==} engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} @@ -6029,8 +6904,8 @@ packages: nullthrows@1.1.1: resolution: {integrity: sha512-2vPPEi+Z7WqML2jZYddDIfy5Dqb0r2fze2zTxNNknZaFpVHU3mFB3R+DWeJWGVx0ecvttSGlJTI+WG+8Z4cDWw==} - ob1@0.80.8: - resolution: {integrity: sha512-QHJQk/lXMmAW8I7AIM3in1MSlwe1umR72Chhi8B7Xnq6mzjhBKkA6Fy/zAhQnGkA4S912EPCEvTij5yh+EQTAA==} + ob1@0.80.9: + resolution: {integrity: sha512-v9yOxowkZbxWhKOaaTyLjIm1aLy4ebMNcSn4NYJKOAI/Qv+SkfEfszpLr2GIxsccmb2Y2HA9qtsqiIJ80ucpVA==} engines: {node: '>=18'} object-assign@4.1.1: @@ -6044,9 +6919,8 @@ packages: object-inspect@1.12.3: resolution: {integrity: sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g==} - object-is@1.1.5: - resolution: {integrity: sha512-3cyDsyHgtmi7I7DfSSI2LDp6SK2lwvtbg0p0R1e0RvTqF5ceGx+K2dfSjm1bKDMVCFEDAQvy+o8c6a7VujOddw==} - engines: {node: '>= 0.4'} + object-inspect@1.13.1: + resolution: {integrity: sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==} object-keys@1.1.1: resolution: {integrity: sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==} @@ -6056,6 +6930,10 @@ packages: resolution: {integrity: sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==} engines: {node: '>= 0.4'} + object.assign@4.1.5: + resolution: {integrity: sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ==} + engines: {node: '>= 0.4'} + object.fromentries@2.0.6: resolution: {integrity: sha512-VciD13dswC4j1Xt5394WR4MzmAQmlgN72phd/riNp9vtD7tp4QQWJ0R4wvclXcafgcYK8veHRed2W6XeGBvcfg==} engines: {node: '>= 0.4'} @@ -6113,8 +6991,8 @@ packages: resolution: {integrity: sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ==} engines: {node: '>=12'} - openid-client@5.6.5: - resolution: {integrity: sha512-5P4qO9nGJzB5PI0LFlhj4Dzg3m4odt0qsJTfyEtZyOlkgpILwEioOhVVJOrS1iVH494S4Ee5OCjjg6Bf5WOj3w==} + openid-client@5.6.4: + resolution: {integrity: sha512-T1h3B10BRPKfcObdBklX639tVz+xh34O7GjofqrqiAQdm7eHsQ00ih18x6wuJ/E6FxdtS2u3FmUGPDeEcMwzNA==} optionator@0.9.3: resolution: {integrity: sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg==} @@ -6168,6 +7046,10 @@ packages: resolution: {integrity: sha512-5b0R4txpzjPWVw/cXXUResoD4hb6U/x9BH08L7nw+GN1sezDzPdxeRvpc9c433fZhBan/wusjbCsqwqm4EIBIQ==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + p-limit@5.0.0: + resolution: {integrity: sha512-/Eaoq+QyLSiXQ4lyYV23f14mZRQcXnxfHrN0vCai+ak9G0pp9iEQukIIZq5NccEvwRB8PUnZT0KsOoDCINS1qQ==} + engines: {node: '>=18'} + p-locate@3.0.0: resolution: {integrity: sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==} engines: {node: '>=6'} @@ -6204,9 +7086,6 @@ packages: resolution: {integrity: sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==} engines: {node: '>=6'} - packet-reader@1.0.0: - resolution: {integrity: sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==} - parent-module@1.0.1: resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==} engines: {node: '>=6'} @@ -6272,6 +7151,10 @@ packages: resolution: {integrity: sha512-MkhCqzzBEpPvxxQ71Md0b1Kk51W01lrYvlMzSUaIzNsODdd7mqhiimSZlr+VegAz5Z6Vzt9Xg2ttE//XBhH3EQ==} engines: {node: '>=16 || 14 >=14.17'} + path-scurry@1.11.1: + resolution: {integrity: sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==} + engines: {node: '>=16 || 14 >=14.18'} + path-scurry@1.7.0: resolution: {integrity: sha512-UkZUeDjczjYRE495+9thsgcVgsaCPkaw80slmfVFgllxY+IO8ubTsOpFVjDPROBqJdHfVPUFRHPBV/WciOVfWg==} engines: {node: '>=16 || 14 >=14.17'} @@ -6283,8 +7166,8 @@ packages: resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==} engines: {node: '>=8'} - pathe@1.1.1: - resolution: {integrity: sha512-d+RQGp0MAYTIaDBIMmOfMwz3E+LOZnxx1HZd5R18mmCZY0QBlK0LDZfPc8FW8Ed2DlvsuE6PRjroDY+wg4+j/Q==} + pathe@1.1.2: + resolution: {integrity: sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ==} pathval@1.1.1: resolution: {integrity: sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==} @@ -6295,11 +7178,14 @@ packages: pg-cloudflare@1.1.1: resolution: {integrity: sha512-xWPagP/4B6BgFO+EKz3JONXv3YDgvkbVrGw2mTo3D6tVDQRh1e7cqVGvyR3BE+eQgAvx1XhW/iEASj4/jCWl3Q==} - pg-connection-string@2.5.0: - resolution: {integrity: sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ==} + pg-connection-string@2.6.1: + resolution: {integrity: sha512-w6ZzNu6oMmIzEAYVw+RLK0+nqHPt8K3ZnknKi+g48Ak2pr3dtljJW3o+D/n2zzCG07Zoe9VOX3aiKpj+BN0pjg==} - pg-connection-string@2.6.0: - resolution: {integrity: sha512-x14ibktcwlHKoHxx9X3uTVW9zIGR41ZB6QNhHb21OPNdCCO3NaRnpJuwKIQSR4u+Yqjx4HCvy7Hh7VSy1U4dGg==} + pg-connection-string@2.6.2: + resolution: {integrity: sha512-ch6OwaeaPYcova4kKZ15sbJ2hKb/VP48ZD2gE7i1J+L4MspCtBMAx8nMgz7bksc7IojCIIWuEhHibSMFH8m8oA==} + + pg-connection-string@2.6.4: + resolution: {integrity: sha512-v+Z7W/0EO707aNMaAEfiGnGL9sxxumwLl2fJvCQtMn9Fxsg+lPpPkdcyBSv/KFgpGdYkMfn+EI1Or2EHjpgLCA==} pg-int8@1.0.1: resolution: {integrity: sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==} @@ -6309,24 +7195,24 @@ packages: resolution: {integrity: sha512-BM/Thnrw5jm2kKLE5uJkXqqExRUY/toLHda65XgFTBTFYZyopbKjBe29Ii3RbkvlsMoFwD+tHeGaCjjv0gHlyw==} engines: {node: '>=4'} - pg-pool@3.6.0: - resolution: {integrity: sha512-clFRf2ksqd+F497kWFyM21tMjeikn60oGDmqMT8UBrynEwVEX/5R5xd2sdvdo1cZCFlguORNpVuqxIj+aK4cfQ==} + pg-pool@3.6.2: + resolution: {integrity: sha512-Htjbg8BlwXqSBQ9V8Vjtc+vzf/6fVUuak/3/XXKA9oxZprwW3IMDQTGHP+KDmVL7rtd+R1QjbnCFPuTHm3G4hg==} peerDependencies: pg: '>=8.0' - pg-protocol@1.6.0: - resolution: {integrity: sha512-M+PDm637OY5WM307051+bsDia5Xej6d9IR4GwJse1qA1DIhiKlksvrneZOYQq42OM+spubpcNYEo2FcKQrDk+Q==} + pg-protocol@1.6.1: + resolution: {integrity: sha512-jPIlvgoD63hrEuihvIg+tJhoGjUsLPn6poJY9N5CnlPd91c2T18T/9zBtLxZSb1EhYxBRoZJtzScCaWlYLtktg==} pg-types@2.2.0: resolution: {integrity: sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==} engines: {node: '>=4'} - pg-types@4.0.1: - resolution: {integrity: sha512-hRCSDuLII9/LE3smys1hRHcu5QGcLs9ggT7I/TCs0IE+2Eesxi9+9RWAAwZ0yaGjxoWICF/YHLOEjydGujoJ+g==} + pg-types@4.0.2: + resolution: {integrity: sha512-cRL3JpS3lKMGsKaWndugWQoLOCoP+Cic8oseVcbr0qhPzYD5DWXK+RZ9LY9wxRf7RQia4SCwQlXk0q6FCPrVng==} engines: {node: '>=10'} - pg@8.11.0: - resolution: {integrity: sha512-meLUVPn2TWgJyLmy7el3fQQVwft4gU5NGyvV0XbD41iU9Jbg8lCH4zexhIkihDzVHJStlt6r088G6/fWeNjhXA==} + pg@8.11.5: + resolution: {integrity: sha512-jqgNHSKL5cbDjFlHyYsCXmQDrfIX/3RsNwYqpd4N0Kt8niLuNoRNH+aazv6cOd43gPh9Y4DjQCtb+X0MH0Hvnw==} engines: {node: '>= 8.0.0'} peerDependencies: pg-native: '>=3.0.1' @@ -6340,6 +7226,9 @@ packages: picocolors@1.0.0: resolution: {integrity: sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==} + picocolors@1.0.1: + resolution: {integrity: sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==} + picomatch@2.3.1: resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} engines: {node: '>=8.6'} @@ -6364,8 +7253,8 @@ packages: resolution: {integrity: sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==} engines: {node: '>=6'} - pkg-types@1.0.3: - resolution: {integrity: sha512-nN7pYi0AQqJnoLPC9eHFQ8AcyaixBUOwvqc5TDnIKCMEE6I0y8P7OKA7fPexsXGCGxQDl/cmrLAp26LhcwxZ4A==} + pkg-types@1.1.0: + resolution: {integrity: sha512-/RpmvKdxKf8uILTtoOhAgf30wYbP2Qw+L9p3Rvshx1JZVX+XQNZQFjlbmGHEGIm4CkVPlSn+NXmIM8+9oWQaSA==} plist@3.1.0: resolution: {integrity: sha512-uysumyrvkUX0rX/dEVqt8gC3sTBzd4zoWfLeS29nb53imdaXVvLINYXTI2GNqzaMuvacNx4uJQ8+b3zXR0pkgQ==} @@ -6383,6 +7272,10 @@ packages: resolution: {integrity: sha512-NCrCHhWmnQklfH4MtJMRjZ2a8c80qXeMlQMv2uVp9ISJMTt562SbGd6n2oq0PaPgKm7Z6pL9E2UlLIhC+SHL3w==} engines: {node: '>=4.0.0'} + possible-typed-array-names@1.0.0: + resolution: {integrity: sha512-d7Uw+eZoloe0EHDIYoe+bQ5WXnGMOpmiZFTuMWCwpjzzkL2nTjcKiAk4hh8TjnGye2TwWOk3UXucZ+3rbmBa8Q==} + engines: {node: '>= 0.4'} + postcss-load-config@4.0.1: resolution: {integrity: sha512-vEJIc8RdiBRu3oRAI0ymerOn+7rPuMvRXslTvZUKZonDHFIczxztIyJ1urxM1x9JXEikvpWWTUUqal5j/8QgvA==} engines: {node: '>= 14'} @@ -6395,14 +7288,14 @@ packages: ts-node: optional: true - postcss@8.4.24: - resolution: {integrity: sha512-M0RzbcI0sO/XJNucsGjvWU9ERWxb/ytp1w6dKtxTKgixdtQDq4rmx/g8W1hnaheq9jgwL/oyEdH5Bc4WwJKMqg==} - engines: {node: ^10 || ^12 || >=14} - postcss@8.4.38: resolution: {integrity: sha512-Wglpdk03BSfXkHoQa3b/oulrotAkwrlLDRSOb9D0bN86FdRyE9lppSp33aHNPgBa0JKCoB+drFLZkQoRRYae5A==} engines: {node: ^10 || ^12 || >=14} + postcss@8.4.39: + resolution: {integrity: sha512-0vzE+lAiG7hZl1/9I8yzKLx3aR9Xbof3fBHKunvMfOCYAtMhrsnccJY2iTURb9EZd5+pLuiNV9/c/GZJOHsgIw==} + engines: {node: ^10 || ^12 || >=14} + postgres-array@2.0.0: resolution: {integrity: sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==} engines: {node: '>=4'} @@ -6423,8 +7316,8 @@ packages: resolution: {integrity: sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==} engines: {node: '>=0.10.0'} - postgres-date@2.0.1: - resolution: {integrity: sha512-YtMKdsDt5Ojv1wQRvUhnyDJNSr2dGIC96mQVKz7xufp07nfuFONzdaowrMHjlAzY6GDLd4f+LUHHAAM1h4MdUw==} + postgres-date@2.1.0: + resolution: {integrity: sha512-K7Juri8gtgXVcDfZttFKVmhglp7epKb1K4pgrkLxehjqkrgPhfG6OO8LHLkfaqkbpjNRnra018XwAr1yQFWGcA==} engines: {node: '>=12'} postgres-interval@1.2.0: @@ -6435,17 +7328,18 @@ packages: resolution: {integrity: sha512-BSNDnbyZCXSxgA+1f5UU2GmwhoI0aU5yMxRGO8CdFEcY2BQF9xm/7MqKnYoM1nJDk8nONNWDk9WeSmePFhQdlw==} engines: {node: '>=12'} - postgres-range@1.1.3: - resolution: {integrity: sha512-VdlZoocy5lCP0c/t66xAfclglEapXPCIVhqqJRncYpvbCgImF0w67aPKfbqUMr72tO2k5q0TdTZwCLjPTI6C9g==} + postgres-range@1.1.4: + resolution: {integrity: sha512-i/hbxIE9803Alj/6ytL7UHQxRvZkI9O4Sy+J3HGc4F4oo/2eQAjTSNJ0bfxyse3bH0nuVesCk+3IRLaMtG3H6w==} - postgres@3.3.5: - resolution: {integrity: sha512-+JD93VELV9gHkqpV5gdL5/70HdGtEw4/XE1S4BC8f1mcPmdib3K5XsKVbnR1XcAyC41zOnifJ+9YRKxdIsXiUw==} + postgres@3.4.4: + resolution: {integrity: sha512-IbyN+9KslkqcXa8AO9fxpk97PA4pzewvpi2B3Dwy9u4zpV32QicaEdgmF3eSQUzdRk7ttDHQejNgAEr4XoeH4A==} + engines: {node: '>=12'} pouchdb-collections@1.0.1: resolution: {integrity: sha512-31db6JRg4+4D5Yzc2nqsRqsA2oOkZS8DpFav3jf/qVNBxusKa2ClkEIZ2bJNpaDbMfWtnuSq59p6Bn+CipPMdg==} - prebuild-install@7.1.1: - resolution: {integrity: sha512-jAXscXWMcCK8GgCoHOfIr0ODh5ai8mj63L2nWrjuAgXE6tDyYGnx4/8o/rCgU+B4JSyZBKbeZqzhtwtC3ovxjw==} + prebuild-install@7.1.2: + resolution: {integrity: sha512-UnNke3IQb6sgarcZIDU3gbMeTp/9SSU1DAIkil7PrqG1vZlBtY5msYccSKSHDqa3hNg436IXK+SNImReuA1wEQ==} engines: {node: '>=10'} hasBin: true @@ -6466,10 +7360,6 @@ packages: resolution: {integrity: sha512-7AeGuCYNGmycyQbCqd/3PWH4eOoX/OiCa0uphp57NVTeAGdJGaAliecxwBDHYQCIvrW7aDBZCYeNTP/WX69mkg==} engines: {node: '>= 10'} - pretty-format@27.5.1: - resolution: {integrity: sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ==} - engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0} - pretty-format@29.7.0: resolution: {integrity: sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} @@ -6478,6 +7368,11 @@ packages: resolution: {integrity: sha512-ASJqOugUF1bbzI35STMBUpZqdfYKlJugy6JBziGi2EE+AL5JPJGSzvpeVXojxrr0ViUYoToUjb5kjSEGf7Y83Q==} engines: {node: '>=14.16'} + prisma@5.14.0: + resolution: {integrity: sha512-gCNZco7y5XtjrnQYeDJTiVZmT/ncqCr5RY1/Cf8X2wgLRmyh9ayPAGBNziI4qEE4S6SxCH5omQLVo9lmURaJ/Q==} + engines: {node: '>=16.13'} + hasBin: true + process-nextick-args@2.0.1: resolution: {integrity: sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==} @@ -6541,6 +7436,11 @@ packages: resolution: {integrity: sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==} engines: {node: '>=0.6'} + querystring@0.2.1: + resolution: {integrity: sha512-wkvS7mL/JMugcup3/rMitHmd9ecIGd2lhFhK9N3UUQ450h66d1r3Y9nvXzQAW1Lq+wyx61k/1pfKS5KuKiyEbg==} + engines: {node: '>=0.4.x'} + deprecated: The querystring API is considered Legacy. new code should use the URLSearchParams API instead. + queue-microtask@1.2.3: resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} @@ -6554,16 +7454,16 @@ packages: resolution: {integrity: sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==} engines: {node: '>= 0.6'} - raw-body@2.5.1: - resolution: {integrity: sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==} + raw-body@2.5.2: + resolution: {integrity: sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==} engines: {node: '>= 0.8'} rc@1.2.8: resolution: {integrity: sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==} hasBin: true - react-devtools-core@4.28.5: - resolution: {integrity: sha512-cq/o30z9W2Wb4rzBefjv5fBalHU0rJGZCHAkf/RHSBWSSYwh8PlQTqqOJmgIIbBtpj27T6FIPXeomIjZtCNVqA==} + react-devtools-core@5.2.0: + resolution: {integrity: sha512-vZK+/gvxxsieAoAyYaiRIVFxlajb7KXhgBDV7OsoMzaAE+IqGpoxusBjIgq5ibqA2IloKu0p9n7tE68z1xs18A==} react-is@16.13.1: resolution: {integrity: sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==} @@ -6574,15 +7474,22 @@ packages: react-is@18.2.0: resolution: {integrity: sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w==} - react-native@0.73.6: - resolution: {integrity: sha512-oqmZe8D2/VolIzSPZw+oUd6j/bEmeRHwsLn1xLA5wllEYsZ5zNuMsDus235ONOnCRwexqof/J3aztyQswSmiaA==} + react-is@18.3.1: + resolution: {integrity: sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==} + + react-native@0.74.1: + resolution: {integrity: sha512-0H2XpmghwOtfPpM2LKqHIN7gxy+7G/r1hwJHKLV6uoyXGC/gCojRtoo5NqyKrWpFC8cqyT6wTYCLuG7CxEKilg==} engines: {node: '>=18'} hasBin: true peerDependencies: + '@types/react': ^18.2.6 react: 18.2.0 + peerDependenciesMeta: + '@types/react': + optional: true - react-refresh@0.14.0: - resolution: {integrity: sha512-wViHqhAd8OHeLS/IRMJjTSDHF3U9eWi62F/MledQGPdJGDhodXJ9PBLNGr6WWL7qlH12Mt3TyTpbS+hGXMjCzQ==} + react-refresh@0.14.2: + resolution: {integrity: sha512-jCvmsr+1IUSMUyzOkRcvnVbX3ZYC6g9TDrDbFuFmRDq7PD4yaGbLKNQL6k2jnArV8hjYxh7hVhAZB6s9HDGpZA==} engines: {node: '>=0.10.0'} react-shallow-renderer@16.15.0: @@ -6590,8 +7497,8 @@ packages: peerDependencies: react: ^16.0.0 || ^17.0.0 || ^18.0.0 - react@18.2.0: - resolution: {integrity: sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ==} + react@18.3.1: + resolution: {integrity: sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==} engines: {node: '>=0.10.0'} read-pkg-up@7.0.1: @@ -6620,8 +7527,8 @@ packages: resolution: {integrity: sha512-hjMmLaUXAm1hIuTqOdeYObMslq/q+Xff6QE3Y2P+uoHAg2nmVlLBps2hzh1UJDdMtDTMXOFewK6ky51JQIeECg==} engines: {node: '>= 4'} - recast@0.23.4: - resolution: {integrity: sha512-qtEDqIZGVcSZCHniWwZWbRy79Dc6Wp3kT/UmDA2RJKBPg7+7k51aQBZirHmUGn5uvHf2rg8DkjizrN26k61ATw==} + recast@0.23.9: + resolution: {integrity: sha512-Hx/BGIbwj+Des3+xy5uAtAbdCyqK9y9wbBcDFDYanLS9JnMqf7OeF87HQwUimE87OEc72mr6tkKUKMBBL+hF9Q==} engines: {node: '>= 4'} rechoir@0.8.0: @@ -6658,6 +7565,10 @@ packages: resolution: {integrity: sha512-0SutC3pNudRKgquxGoRGIz946MZVHqbNfPjBdxeOhBrdgDKlRoXmYLQN9xRbrR09ZXWeGAdPuif7egofn6v5LA==} engines: {node: '>= 0.4'} + regexp.prototype.flags@1.5.2: + resolution: {integrity: sha512-NcDiDkTLuPR+++OCKB0nWafEmhg/Da8aUPLPMQbK+bxKKCm1/S5he+AqYa4PlMCVBalb4/yxIRub6qkEx5yJbw==} + engines: {node: '>= 0.4'} + regexpu-core@5.3.2: resolution: {integrity: sha512-RAM5FlZz+Lhmo7db9L298p2vHP5ZywrVXmVXpmAD9GuL5MPH6t9ROw1iA/wfHkQ76Qe7AaPF0nGuim96/IrQMQ==} engines: {node: '>=4'} @@ -6704,6 +7615,9 @@ packages: resolution: {integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==} engines: {node: '>=8'} + resolve-pkg-maps@1.0.0: + resolution: {integrity: sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==} + resolve-tspaths@0.8.16: resolution: {integrity: sha512-5c90plgcKFcCk66Ve1vFh6tm0fLKmSz6vaW4CezP6i69Q8fgWX3YGPYmKPEughem+nPHT1358P+rXrhw5pibwg==} hasBin: true @@ -6745,6 +7659,10 @@ packages: resolution: {integrity: sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==} engines: {node: '>= 4'} + retry@0.13.1: + resolution: {integrity: sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==} + engines: {node: '>= 4'} + reusify@1.0.4: resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==} engines: {iojs: '>=1.0.0', node: '>=0.10.0'} @@ -6783,6 +7701,11 @@ packages: engines: {node: '>=14.18.0', npm: '>=8.0.0'} hasBin: true + rollup@4.18.0: + resolution: {integrity: sha512-QmJz14PX3rzbJCN1SG4Xe/bAAX2a6NpCP8ab2vfu2GiUr8AQcr2nCV/oEO3yneFarB67zk8ShlIyWb2LGTb3Sg==} + engines: {node: '>=18.0.0', npm: '>=8.0.0'} + hasBin: true + run-parallel@1.2.0: resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} @@ -6797,6 +7720,10 @@ packages: resolution: {integrity: sha512-9dVEFruWIsnie89yym+xWTAYASdpw3CJV7Li/6zBewGf9z2i1j31rP6jnY0pHEO4QZh6N0K11bFjWmdR8UGdPQ==} engines: {node: '>=0.4'} + safe-array-concat@1.1.2: + resolution: {integrity: sha512-vj6RsCsWBCf19jIeHEfkRMw8DPiBb+DMXklQ/1SGDHOMlHdPUkZXFQ2YdplS23zESTijAcurb1aSgJA3AgMu1Q==} + engines: {node: '>=0.4'} + safe-buffer@5.1.2: resolution: {integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==} @@ -6809,15 +7736,23 @@ packages: safe-regex-test@1.0.0: resolution: {integrity: sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA==} + safe-regex-test@1.0.3: + resolution: {integrity: sha512-CdASjNJPvRa7roO6Ra/gLYBTzYzzPyyBXxIMdGW3USQLyjWEls2RgW5UBTXaQVp+OrpeCK3bLem8smtmheoRuw==} + engines: {node: '>= 0.4'} + safer-buffer@2.1.2: resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} - sax@1.3.0: - resolution: {integrity: sha512-0s+oAmw9zLl1V1cS9BtZN7JAd0cW5e0QH4W3LWEK6a4LaLEA2OTpGYWDY+6XasBLtz6wkm3u1xRw95mRuJ59WA==} + sax@1.4.1: + resolution: {integrity: sha512-+aWOz7yVScEGoKNd4PA10LZ8sk0A/z5+nXQG5giUO5rprX9jgYsTdov9qCchZiPIZezbZH+jRut8nPodFAX4Jg==} scheduler@0.24.0-canary-efb381bbf-20230505: resolution: {integrity: sha512-ABvovCDe/k9IluqSh4/ISoq8tIJnW8euVAWYt5j/bg6dRnqwQwiGO1F/V4AyK96NGF/FB04FhOUDuWj8IKfABA==} + selfsigned@2.4.1: + resolution: {integrity: sha512-th5B4L2U+eGLq1TVh7zNRGBapioSORUeymIydxgFpwww9d2qyKvtuPU2jJuHvYAwwqi2Y596QBL3eEqcPEYL8Q==} + engines: {node: '>=10'} + semver@5.7.2: resolution: {integrity: sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==} hasBin: true @@ -6826,28 +7761,23 @@ packages: resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==} hasBin: true - semver@7.3.2: - resolution: {integrity: sha512-OrOb32TeeambH6UrhtShmF7CRDqhL6/5XpPNp2DuRH6+9QLw/orhp72j87v8Qa1ScDkvrrBNpZcDejAirJmfXQ==} - engines: {node: '>=10'} - hasBin: true - semver@7.5.1: resolution: {integrity: sha512-Wvss5ivl8TMRZXXESstBA4uR5iXgEN/VC5/sOcuXdVLzcdkz4HWetIoRfG5gb5X+ij/G9rw9YoGn3QoQ8OCSpw==} engines: {node: '>=10'} hasBin: true - semver@7.5.3: - resolution: {integrity: sha512-QBlUtyVk/5EeHbi7X0fw6liDZc7BBmEaSYn01fMU1OUYbf6GPsbTtd8WmnqbI20SeycoHSeiybkE/q1Q+qlThQ==} + semver@7.5.4: + resolution: {integrity: sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==} engines: {node: '>=10'} hasBin: true - semver@7.5.4: - resolution: {integrity: sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==} + semver@7.6.1: + resolution: {integrity: sha512-f/vbBsu+fOiYt+lmwZV0rVwJScl46HppnOA1ZvIuBWKOTlllpyJ3bfVax76/OrhCH38dyxoDIA8K7uB963IYgA==} engines: {node: '>=10'} hasBin: true - semver@7.6.0: - resolution: {integrity: sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==} + semver@7.6.2: + resolution: {integrity: sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==} engines: {node: '>=10'} hasBin: true @@ -6879,6 +7809,14 @@ packages: set-cookie-parser@2.6.0: resolution: {integrity: sha512-RVnVQxTXuerk653XfuliOxBP81Sf0+qfQE73LIYKcyMYHG94AuH0kgrQpRDuTZnSmjpysHmzxJXKNfa6PjFhyQ==} + set-function-length@1.2.2: + resolution: {integrity: sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==} + engines: {node: '>= 0.4'} + + set-function-name@2.0.2: + resolution: {integrity: sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==} + engines: {node: '>= 0.4'} + setimmediate@1.0.5: resolution: {integrity: sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==} @@ -6911,6 +7849,10 @@ packages: side-channel@1.0.4: resolution: {integrity: sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==} + side-channel@1.0.6: + resolution: {integrity: sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==} + engines: {node: '>= 0.4'} + siginfo@2.0.0: resolution: {integrity: sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==} @@ -6921,6 +7863,10 @@ packages: resolution: {integrity: sha512-MY2/qGx4enyjprQnFaZsHib3Yadh3IXyV2C321GY0pjGfVBu4un0uDJkwgdxqO+Rdx8JMT8IfJIRwbYVz3Ob3Q==} engines: {node: '>=14'} + signal-exit@4.1.0: + resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} + engines: {node: '>=14'} + simple-concat@1.0.1: resolution: {integrity: sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==} @@ -6930,13 +7876,17 @@ packages: simple-plist@1.3.1: resolution: {integrity: sha512-iMSw5i0XseMnrhtIzRb7XpQEXepa9xhWxGUojHBL43SIpQuDQkh3Wpy67ZbDzZVr6EKxvwVChnVpdl8hEVLDiw==} - sirv@2.0.3: - resolution: {integrity: sha512-O9jm9BsID1P+0HOi81VpXPoDxYP374pkOLzACAoyUQ/3OUVndNpsz6wMnY2z+yOxzbllCKZrM+9QrWsv4THnyA==} + sirv@2.0.4: + resolution: {integrity: sha512-94Bdh3cC2PKrbgSOUqTiGPWVZeSiXfKOVZNJniWoqrWrRkB1CJzBU3NEbiTsPcYy1lDsANA/THzS+9WBiy5nfQ==} engines: {node: '>= 10'} sisteransi@1.0.5: resolution: {integrity: sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==} + skin-tone@2.0.0: + resolution: {integrity: sha512-kUMbT1oBJCpgrnKoSr0o6wPtvRWT9W9UKvGLwfJYO2WuahZRHOpEyL1ckyMGgMWh0UdpmaoFqKKD29WTomNEGA==} + engines: {node: '>=8'} + slash@3.0.0: resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} engines: {node: '>=8'} @@ -6968,13 +7918,9 @@ packages: resolution: {integrity: sha512-a6KW9G+6B3nWZ1yB8G7pJwL3ggLy1uTzKAgCb7ttblwqdz9fMGJUuTy3uFzEP48FAs9FLILlmzDlE2JJhVQaXQ==} engines: {node: '>= 10'} - socks@2.7.1: - resolution: {integrity: sha512-7maUZy1N7uo6+WVEX6psASxtNlKaNVMlGQKkG/63nEDdLOWNbiUMoLK7X4uYoLhQstau72mLgfEWcXcwsaHbYQ==} - engines: {node: '>= 10.13.0', npm: '>= 3.0.0'} - - source-map-js@1.0.2: - resolution: {integrity: sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==} - engines: {node: '>=0.10.0'} + socks@2.8.3: + resolution: {integrity: sha512-l5x7VUUWbjVFbafGLxPWkYsHIhEvmF85tbIeFZWc8ZPtoMyybuEhL7Jye/ooC4/d48FgOjSJXgsF/AJPYCW8Zw==} + engines: {node: '>= 10.0.0', npm: '>= 3.0.0'} source-map-js@1.2.0: resolution: {integrity: sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==} @@ -7033,26 +7979,33 @@ packages: sprintf-js@1.0.3: resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==} - sql.js@1.8.0: - resolution: {integrity: sha512-3HD8pSkZL+5YvYUI8nlvNILs61ALqq34xgmF+BHpqxe68yZIJ1H+sIVIODvni25+CcxHUxDyrTJUL0lE/m7afw==} + sprintf-js@1.1.3: + resolution: {integrity: sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==} - sqlite3@5.1.6: - resolution: {integrity: sha512-olYkWoKFVNSSSQNvxVUfjiVbz3YtBwTJj+mfV5zpHmqW3sELx2Cf4QCdirMelhM5Zh+KDVaKgQHqCxrqiWHybw==} + sql.js@1.10.3: + resolution: {integrity: sha512-H46aWtQkdyjZwFQgraUruy5h/DyJBbAK3EA/WEMqiqF6PGPfKBSKBj/er3dVyYqVIoYfRf5TFM/loEjtQIrqJg==} + + sqlite3@5.1.7: + resolution: {integrity: sha512-GGIyOiFaG+TUra3JIfkI/zGP8yZYLPQ0pl1bH+ODjiX57sPhrLU5sQJn1y9bDKZUFYkX1crlrPfSYt0BKKdkog==} sqlstring@2.3.3: resolution: {integrity: sha512-qC9iz2FlN7DQl3+wjwn3802RTyjCx7sDvfQEXchwa6CWOx07/WVfh91gBmQ9fahw8snwGEWU3xGzOt4tFyHLxg==} engines: {node: '>= 0.6'} - ssh2@1.11.0: - resolution: {integrity: sha512-nfg0wZWGSsfUe/IBJkXVll3PEZ//YH2guww+mP88gTpuSU4FtZN7zu9JoeTGOyCNx2dTDtT9fOpWwlzyj4uOOw==} + ssh2@1.15.0: + resolution: {integrity: sha512-C0PHgX4h6lBxYx7hcXwu3QWdh4tg6tZZsTfXcdvc5caW/EMxaB4H9dWsl7qk+F7LAW762hp8VbXOX7x4xUYvEw==} engines: {node: '>=10.16.0'} + ssri@10.0.6: + resolution: {integrity: sha512-MGrFH9Z4NP9Iyhqn16sDtBpRRNJ0Y2hNa6D65h736fVSaPCHr4DM4sWUNvVaSuC+0OBGhwsrydQwmgfg5LncqQ==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + ssri@8.0.1: resolution: {integrity: sha512-97qShzy1AiyxvPNIkLWoGua7xoQzzPjQ0HAH4B0rWKo7SZ6USuPcrUiAFrws0UH8RrbWmgq3LMTObhPIHbbBeQ==} engines: {node: '>= 8'} - sst@3.0.4: - resolution: {integrity: sha512-tbFv2dlPHyGQSV8admS3TMDxtR/Iv09+afjneJIkr/x4M1jKgH039uBf91LEmRYxRAuGALG4rIqOONeAU/oarg==} + sst@3.0.14: + resolution: {integrity: sha512-MC93uHwMxM1uwDg9Old8qo8LsmhvrMD3YFkS5Me8ThozwFIKzwqXicJWTE3iL+0DkPSPhdiSxafRdKhu/Qk5DA==} stack-utils@2.0.6: resolution: {integrity: sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==} @@ -7076,8 +8029,8 @@ packages: resolution: {integrity: sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==} engines: {node: '>= 0.8'} - std-env@3.3.3: - resolution: {integrity: sha512-Rz6yejtVyWnVjC1RFvNmYL10kgjC49EOghxWn0RFqlCHGFpQx+Xe7yW3I4ceK1SGrWIGMjD5Kbue8W/udkbMJg==} + std-env@3.7.0: + resolution: {integrity: sha512-JPbdCEQLj1w5GilpiHAx3qJvFndqybBysA3qUOnznweH4QbNYUsW/ea8QzSrnh0vNsezMMw5bcVool8lM0gwzg==} stream-buffers@2.2.0: resolution: {integrity: sha512-uyQK/mx5QjHun80FLJTfaWE7JtwfRMKBLkMne6udYOmvH0CawotVa7TfgYHzAnpphn4+TweIx1QKMnRIbipmUg==} @@ -7102,12 +8055,23 @@ packages: resolution: {integrity: sha512-p6TmeT1T3411M8Cgg9wBTMRtY2q9+PNy9EV1i2lIXUN/btt763oIfxwN3RR8VU6wHX8j/1CFy0L+YuThm6bgOg==} engines: {node: '>= 0.4'} + string.prototype.trim@1.2.9: + resolution: {integrity: sha512-klHuCNxiMZ8MlsOihJhJEBJAiMVqU3Z2nEXWfWnIqjN0gEFS9J9+IxKozWWtQGcgoa1WUZzLjKPTr4ZHNFTFxw==} + engines: {node: '>= 0.4'} + string.prototype.trimend@1.0.6: resolution: {integrity: sha512-JySq+4mrPf9EsDBEDYMOb/lM7XQLulwg5R/m1r0PXEFqrV0qHvl58sdTilSXtKOflCsK2E8jxf+GKC0T07RWwQ==} + string.prototype.trimend@1.0.8: + resolution: {integrity: sha512-p73uL5VCHCO2BZZ6krwwQE3kCzM7NKmis8S//xEC6fQonchbum4eP6kR4DLEjQFO3Wnj3Fuo8NM0kOSjVdHjZQ==} + string.prototype.trimstart@1.0.6: resolution: {integrity: sha512-omqjMDaY92pbn5HOX7f9IccLA+U1tA9GvtU4JrodiXFfYB7jPzzHpRzpglLAjtUV6bB557zwClJezTqnAiYnQA==} + string.prototype.trimstart@1.0.8: + resolution: {integrity: sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==} + engines: {node: '>= 0.4'} + string_decoder@1.1.1: resolution: {integrity: sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==} @@ -7122,10 +8086,6 @@ packages: resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} engines: {node: '>=8'} - strip-ansi@7.0.1: - resolution: {integrity: sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw==} - engines: {node: '>=12'} - strip-ansi@7.1.0: resolution: {integrity: sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==} engines: {node: '>=12'} @@ -7158,8 +8118,8 @@ packages: resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} engines: {node: '>=8'} - strip-literal@1.0.1: - resolution: {integrity: sha512-QZTsipNpa2Ppr6v1AmJHESqJ3Uz247MUS0OjrnnZjFAvEoWqxuyFuXn2xLgMtRnijJShAa1HL0gtJyUs7u7n3Q==} + strip-literal@2.1.0: + resolution: {integrity: sha512-Op+UycaUt/8FbN/Z2TWPBLge3jWrP3xj10f3fnYxf052bKuS3EKs1ZQcVGjnEMdsNVAM+plXRdmjrZ/KgG3Skw==} strnum@1.0.5: resolution: {integrity: sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA==} @@ -7201,6 +8161,10 @@ packages: resolution: {integrity: sha512-RpsAZlpWcDwOPQA22aCH4J0t7L8JmAvsCxfOSEwm7cQs3LshN36QaTkwd70DnBOXDWGssw2eUoc8CaRWT0XunA==} engines: {node: '>=8'} + supports-hyperlinks@3.0.0: + resolution: {integrity: sha512-QBDPHyPQDRTy9ku4URNGY5Lah8PAaXs6tAAwp55sL5WCsSW7GIfdf6W5ixfziW+t7wh3GVvHyHHyQ1ESsoRvaA==} + engines: {node: '>=14.18'} + supports-preserve-symlinks-flag@1.0.0: resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} engines: {node: '>= 0.4'} @@ -7215,10 +8179,6 @@ packages: resolution: {integrity: sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==} engines: {node: '>=6'} - tar@6.1.13: - resolution: {integrity: sha512-jdIBIN6LTIe2jqzay/2vtYLlBHa3JF42ot3h1dW8Q0PaAG4v8rm0cvpVePtau5C6OKXGGcgO9q2AMNSWxiLqKw==} - engines: {node: '>=10'} - tar@6.2.1: resolution: {integrity: sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==} engines: {node: '>=10'} @@ -7260,8 +8220,8 @@ packages: engines: {node: '>=10'} hasBin: true - terser@5.30.3: - resolution: {integrity: sha512-STdUgOUx8rLbMGO9IOwHLpCqolkDITFFQSMYYwKE1N2lY6MVSaeoi10z/EhWxRc6ybqoVmKSkhKYH/XUpl7vSA==} + terser@5.31.0: + resolution: {integrity: sha512-Q1JFAoUKE5IMfI4Z/lkE/E6+SwgzO+x4tq4v1AyBLRj8VSYvRO6A/rQrPg1yud4g0En9EKI1TvFRF2tQFcoUkg==} engines: {node: '>=10'} hasBin: true @@ -7298,22 +8258,21 @@ packages: timers-ext@0.1.7: resolution: {integrity: sha512-b85NUNzTSdodShTIbky6ZF02e8STtVVfD+fu4aXXShEELpozH+bCpJLYMPZbsABN2wDH7fJpqIoXxJpzbf0NqQ==} + tiny-invariant@1.3.3: + resolution: {integrity: sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==} + tiny-queue@0.2.1: resolution: {integrity: sha512-EijGsv7kzd9I9g0ByCl6h42BWNGUZrlCSejfrb3AKeHC33SGbASu1VDf5O3rRiiUOhAC9CHdZxFPbZu0HmR70A==} - tinybench@2.5.0: - resolution: {integrity: sha512-kRwSG8Zx4tjF9ZiyH4bhaebu+EDz1BOx9hOigYHlUW4xxI/wKIUQUqo018UlU4ar6ATPBsaMrdbKZ+tmPdohFA==} + tinybench@2.8.0: + resolution: {integrity: sha512-1/eK7zUnIklz4JUUlL+658n58XO2hHLQfSk1Zf2LKieUjxidN16eKFEoDEfjHc3ohofSSqK3X5yO6VGb6iW8Lw==} - tinypool@0.5.0: - resolution: {integrity: sha512-paHQtnrlS1QZYKF/GnLoOM/DN9fqaGOFbCbxzAhwniySnzl9Ebk8w73/dd34DAhe/obUbPAOldTyYXQZxnPBPQ==} + tinypool@0.8.4: + resolution: {integrity: sha512-i11VH5gS6IFeLY3gMBQ00/MmLncVP7JLXOw1vlgkytLmJK7QnEr7NXf0LBdxfmNPAeyetukOk0bOYrJrFGjYJQ==} engines: {node: '>=14.0.0'} - tinypool@0.7.0: - resolution: {integrity: sha512-zSYNUlYSMhJ6Zdou4cJwo/p7w5nmAH17GRfU/ui3ctvjXFErXXkruT4MWW6poDeXgCaIBlGLrfU6TbTXxyGMww==} - engines: {node: '>=14.0.0'} - - tinyspy@2.1.1: - resolution: {integrity: sha512-XPJL2uSzcOyBMky6OFrusqWlzfFrXtE0hPuMgW8A2HmaqrPo4ZQHRN/V0QXN3FSjKxpsbRrFc5LI7KOwBsT1/w==} + tinyspy@2.2.1: + resolution: {integrity: sha512-KYad6Vy5VDWV4GH3fjpseMQ/XU2BhIYP7Vzd0LG44qRWm/Yt2WCOTicFdvmgo6gWaqooMQCawTtILVQJupKu7A==} engines: {node: '>=14.0.0'} tmp@0.0.33: @@ -7345,8 +8304,8 @@ packages: tr46@1.0.1: resolution: {integrity: sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA==} - traverse@0.6.8: - resolution: {integrity: sha512-aXJDbk6SnumuaZSANd21XAo15ucCDE38H4fkqiGsc3MhCK+wOlZvLP9cB/TvpHT0mOyWgC4Z8EwRlzqYSUzdsA==} + traverse@0.6.9: + resolution: {integrity: sha512-7bBrcF+/LQzSgFmT0X5YclVqQxtv7TDJ1f8Wj7ibBu/U6BMLeOpUxuZjV7rMc44UtKxlnMFigdhFAIszSX1DMg==} engines: {node: '>= 0.4'} tree-kill@1.2.2: @@ -7363,15 +8322,32 @@ packages: peerDependencies: typescript: '>=4.2.0' + ts-expose-internals-conditionally@1.0.0-empty.0: + resolution: {integrity: sha512-F8m9NOF6ZhdOClDVdlM8gj3fDCav4ZIFSs/EI3ksQbAAXVSCN/Jh5OCJDDZWBuBy9psFc6jULGDlPwjMYMhJDw==} + ts-interface-checker@0.1.13: resolution: {integrity: sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==} - tsconfck@2.1.1: - resolution: {integrity: sha512-ZPCkJBKASZBmBUNqGHmRhdhM8pJYDdOXp4nRgj/O0JwUwsMq50lCDRQP/M5GBNAA0elPrq4gAeu4dkaVCuKWww==} - engines: {node: ^14.13.1 || ^16 || >=18} + ts-node@10.9.2: + resolution: {integrity: sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==} + hasBin: true + peerDependencies: + '@swc/core': '>=1.2.50' + '@swc/wasm': '>=1.2.50' + '@types/node': '*' + typescript: '>=2.7' + peerDependenciesMeta: + '@swc/core': + optional: true + '@swc/wasm': + optional: true + + tsconfck@3.0.3: + resolution: {integrity: sha512-4t0noZX9t6GcPTfBAbIbbIU4pfpCwh0ueq3S4O/5qXI1VwK1outmxhe9dOiEWqMz3MW2LKgDTpqWV+37IWuVbA==} + engines: {node: ^18 || >=20} hasBin: true peerDependencies: - typescript: ^4.3.5 || ^5.0.0 + typescript: ^5.0.0 peerDependenciesMeta: typescript: optional: true @@ -7382,12 +8358,6 @@ packages: tslib@1.14.1: resolution: {integrity: sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==} - tslib@2.5.2: - resolution: {integrity: sha512-5svOrSA2w3iGFDs1HibEVBGbDrAY82bFQ3HZ3ixB+88nsbsWQoKqDRb5UBYAUPEzbBn6dAp5gRNXglySbx1MlA==} - - tslib@2.5.3: - resolution: {integrity: sha512-mSxlJJwl3BMEQCUNnxXBU9jP4JBktcEGhURcPR6VQVlnP0FdDEsIaz0C35dXNGLyRfrATNofF0F5p2KPxQgB+w==} - tslib@2.6.2: resolution: {integrity: sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==} @@ -7413,12 +8383,18 @@ packages: peerDependencies: typescript: '>=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta' - tsx@3.12.6: - resolution: {integrity: sha512-q93WgS3lBdHlPgS0h1i+87Pt6n9K/qULIMNYZo07nSeu2z5QE2CellcAZfofVXBo2tQg9av2ZcRMQ2S2i5oadQ==} + tsx@3.14.0: + resolution: {integrity: sha512-xHtFaKtHxM9LOklMmJdI3BEnQq/D5F73Of2E1GDrITi9sgoVkvIsrQUTY1G8FlmGtA+awCI4EBlTRRYxkL2sRg==} hasBin: true - tsx@3.12.7: - resolution: {integrity: sha512-C2Ip+jPmqKd1GWVQDvz/Eyc6QJbGfE7NrR3fx5BpEHMZsEHoIxHL1j+lKdGobr8ovEyqeNkPLSKp6SCSOt7gmw==} + tsx@4.10.5: + resolution: {integrity: sha512-twDSbf7Gtea4I2copqovUiNTEDrT8XNFXsuHpfGbdpW/z9ZW4fTghzzhAG0WfrCuJmJiOEY1nLIjq4u3oujRWQ==} + engines: {node: '>=18.0.0'} + hasBin: true + + tsx@4.16.2: + resolution: {integrity: sha512-C1uWweJDgdtX2x600HjaFaucXTilT7tgUZHbOE4+ypskZ1OP8CRCSDkCxG6Vya9EwaFIVagWwpaVAn5wzypaqQ==} + engines: {node: '>=18.0.0'} hasBin: true tunnel-agent@0.6.0: @@ -7519,37 +8495,61 @@ packages: resolution: {integrity: sha512-Y8KTSIglk9OZEr8zywiIHG/kmQ7KWyjseXs1CbSo8vC42w7hg2HgYTxSWwP0+is7bWDc1H+Fo026CpHFwm8tkw==} engines: {node: '>= 0.4'} + typed-array-buffer@1.0.2: + resolution: {integrity: sha512-gEymJYKZtKXzzBzM4jqa9w6Q1Jjm7x2d+sh19AdsD4wqnMPDYyvwpsIc2Q/835kHuo3BEQ7CjelGhfTsoBb2MQ==} + engines: {node: '>= 0.4'} + typed-array-byte-length@1.0.0: resolution: {integrity: sha512-Or/+kvLxNpeQ9DtSydonMxCx+9ZXOswtwJn17SNLvhptaXYDJvkFFP5zbfU/uLmvnBJlI4yrnXRxpdWH/M5tNA==} engines: {node: '>= 0.4'} + typed-array-byte-length@1.0.1: + resolution: {integrity: sha512-3iMJ9q0ao7WE9tWcaYKIptkNBuOIcZCCT0d4MRvuuH88fEoEH62IuQe0OtraD3ebQEoTRk8XCBoknUNc1Y67pw==} + engines: {node: '>= 0.4'} + typed-array-byte-offset@1.0.0: resolution: {integrity: sha512-RD97prjEt9EL8YgAgpOkf3O4IF9lhJFr9g0htQkm0rchFp/Vx7LW5Q8fSXXub7BXAODyUQohRMyOc3faCPd0hg==} engines: {node: '>= 0.4'} + typed-array-byte-offset@1.0.2: + resolution: {integrity: sha512-Ous0vodHa56FviZucS2E63zkgtgrACj7omjwd/8lTEMEPFFyjfixMZ1ZXenpgCFBBt4EC1J2XsyVS2gkG0eTFA==} + engines: {node: '>= 0.4'} + typed-array-length@1.0.4: resolution: {integrity: sha512-KjZypGq+I/H7HI5HlOoGHkWUUGq+Q0TPhQurLbyrVrvnKTBgzLhIJ7j6J/XTQOi0d1RjyZ0wdas8bKs2p0x3Ng==} + typed-array-length@1.0.6: + resolution: {integrity: sha512-/OxDN6OtAk5KBpGb28T+HZc2M+ADtvRxXrKKbUwtsLgdoxgX13hyy7ek6bFRl5+aBs2yZzB0c4CnQfAtVypW/g==} + engines: {node: '>= 0.4'} + + typedarray.prototype.slice@1.0.3: + resolution: {integrity: sha512-8WbVAQAUlENo1q3c3zZYuy5k9VzBQvp8AX9WOtbvyWlLM1v5JaSRmjubLjzHF4JFtptjH/5c/i95yaElvcjC0A==} + engines: {node: '>= 0.4'} + typescript@5.2.2: resolution: {integrity: sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w==} engines: {node: '>=14.17'} hasBin: true - ua-parser-js@1.0.37: - resolution: {integrity: sha512-bhTyI94tZofjo+Dn8SN6Zv8nBDvyXTymAdM3LDI/0IboIUwTu1rEhW7v2TfiVsoYWgkQ4kOVqnI8APUFbIQIFQ==} + typescript@5.3.3: + resolution: {integrity: sha512-pXWcraxM0uxAS+tN0AG/BF2TyqmHO014Z070UsJ+pFvYuRSq8KH8DmWpnbXe0pEPDHXZV3FcAbJkijJ5oNEnWw==} + engines: {node: '>=14.17'} + hasBin: true + + typescript@5.4.5: + resolution: {integrity: sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==} + engines: {node: '>=14.17'} + hasBin: true - ufo@1.1.2: - resolution: {integrity: sha512-TrY6DsjTQQgyS3E3dBaOXf0TpPD8u9FVrVYmKVegJuFw51n/YB9XPt+U6ydzFG5ZIN7+DIjPbNmXoBj9esYhgQ==} + ua-parser-js@1.0.38: + resolution: {integrity: sha512-Aq5ppTOfvrCMgAPneW1HfWj66Xi7XL+/mIy996R1/CLS/rcyJQm6QZdsKrUeivDFQ+Oc9Wyuwor8Ze8peEoUoQ==} - ufo@1.3.1: - resolution: {integrity: sha512-uY/99gMLIOlJPwATcMVYfqDSxUR9//AUcgZMzwfSTJPDKzA1S8mX4VLqa+fiAtveraQUBCz4FFcwVZBGbwBXIw==} + ufo@1.5.3: + resolution: {integrity: sha512-Y7HYmWaFwPUmkoQCUIAYpKqkOf+SbVj/2fJJZ4RJMCfZp0rTGwRbzQD+HghfnhKOjL9E01okqz+ncJskGYfBNw==} unbox-primitive@1.0.2: resolution: {integrity: sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==} - undici-types@5.25.3: - resolution: {integrity: sha512-Ga1jfYwRn7+cP9v8auvEXN1rX3sWqlayd4HP7OKk4mZWylEmu3KzXDUGrQUN6Ol7qo1gPvB2e5gX6udnyEPgdA==} - undici-types@5.26.5: resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==} @@ -7561,6 +8561,10 @@ packages: resolution: {integrity: sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ==} engines: {node: '>=4'} + unicode-emoji-modifier-base@1.0.0: + resolution: {integrity: sha512-yLSH4py7oFH3oG/9K+XWrz1pSi3dfUrWEnInbxMfArOfc1+33BlGPQtLsOYwvdMy11AwUBetYuaRxSPqgkq+8g==} + engines: {node: '>=4'} + unicode-match-property-ecmascript@2.0.0: resolution: {integrity: sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q==} engines: {node: '>=4'} @@ -7576,9 +8580,17 @@ packages: unique-filename@1.1.1: resolution: {integrity: sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ==} + unique-filename@3.0.0: + resolution: {integrity: sha512-afXhuC55wkAmZ0P18QsVE6kp8JaxrEokN2HGIoIVv2ijHQd419H0+6EigAFcIzXeMIkcIkNBpB3L/DXB3cTS/g==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + unique-slug@2.0.2: resolution: {integrity: sha512-zoWr9ObaxALD3DOPfjPSqxt4fnZiWblxHIgeWqW8x7UqDzEtHEQLzji2cuJYQFCU6KmoJikOYAZlrTHHebjx2w==} + unique-slug@4.0.0: + resolution: {integrity: sha512-WrcA6AyEfqDX5bWige/4NQfPZMtASNVxdmWR76WESYQVAACSgWcR6e9i0mofqqBxYFtL4oAxPIptY73/0YE1DQ==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + unique-string@1.0.0: resolution: {integrity: sha512-ODgiYu03y5g76A1I9Gt0/chLCzQjvzDy7DsZGsLOE/1MrF6wriEskSncj1+/C58Xk/kPZDppSctDybCwOSaGAg==} engines: {node: '>=4'} @@ -7607,8 +8619,8 @@ packages: resolution: {integrity: sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==} engines: {node: '>= 0.8'} - update-browserslist-db@1.0.13: - resolution: {integrity: sha512-xebP81SNcPuNpPP3uzeW1NYXxI3rxyJzF3pD6sH4jE7o/IX+WtSpwnVU+qIsDPyk0d3hmFQ7mjqc6AtV604hbg==} + update-browserslist-db@1.0.16: + resolution: {integrity: sha512-KVbTxlBYlckhF5wgfyZXTWnMn7MMZjMu9XG8bPlliUOP9ThaF4QnhP8qrjrH7DRzHfSk0oQv1wToW+iA5GajEQ==} hasBin: true peerDependencies: browserslist: '>= 4.21.0' @@ -7629,9 +8641,6 @@ packages: util-deprecate@1.0.2: resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} - util@0.12.5: - resolution: {integrity: sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA==} - utils-merge@1.0.1: resolution: {integrity: sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==} engines: {node: '>= 0.4.0'} @@ -7644,10 +8653,6 @@ packages: resolution: {integrity: sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==} hasBin: true - uuid@9.0.0: - resolution: {integrity: sha512-MXcSTerfPa4uqyzStbRoTgt5XIe3x5+42+q1sDuy3R5MDk66URdLMOZe5aPX/SQd+kuYAh0FdP/pO28IkQyTeg==} - hasBin: true - uuid@9.0.1: resolution: {integrity: sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==} hasBin: true @@ -7657,6 +8662,9 @@ packages: engines: {node: '>=8'} hasBin: true + v8-compile-cache-lib@3.0.1: + resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==} + valibot@0.30.0: resolution: {integrity: sha512-5POBdbSkM+3nvJ6ZlyQHsggisfRtyT4tVTo1EIIShs6qCdXJnyWU5TJ68vr8iTg5zpOLjXLRiBqNx+9zwZz/rA==} @@ -7681,31 +8689,27 @@ packages: resolution: {integrity: sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==} engines: {node: '>= 0.8'} - vite-node@0.31.4: - resolution: {integrity: sha512-uzL377GjJtTbuc5KQxVbDu2xfU/x0wVjUtXQR2ihS21q/NK6ROr4oG0rsSkBBddZUVCwzfx22in76/0ZZHXgkQ==} - engines: {node: '>=v14.18.0'} + vite-node@1.6.0: + resolution: {integrity: sha512-de6HJgzC+TFzOu0NTC4RAIsyf/DY/ibWDYQUcuEA84EMHhcefTUGkjFHKKEJhQN4A+6I0u++kr3l36ZF2d7XRw==} + engines: {node: ^18.0.0 || >=20.0.0} hasBin: true - vite-node@0.34.6: - resolution: {integrity: sha512-nlBMJ9x6n7/Amaz6F3zJ97EBwR2FkzhBRxF5e+jE6LA3yi6Wtc2lyTij1OnDMIr34v5g/tVQtsVAzhT0jc5ygA==} - engines: {node: '>=v14.18.0'} - hasBin: true - - vite-tsconfig-paths@4.2.0: - resolution: {integrity: sha512-jGpus0eUy5qbbMVGiTxCL1iB9ZGN6Bd37VGLJU39kTDD6ZfULTTb1bcc5IeTWqWJKiWV5YihCaibeASPiGi8kw==} + vite-tsconfig-paths@4.3.2: + resolution: {integrity: sha512-0Vd/a6po6Q+86rPlntHye7F31zA2URZMbH8M3saAZ/xR9QoGN/L21bxEGfXdWmFdNkqPpRdxFT7nmNe12e9/uA==} peerDependencies: vite: '*' peerDependenciesMeta: vite: optional: true - vite@4.3.9: - resolution: {integrity: sha512-qsTNZjO9NoJNW7KnOrgYwczm0WctJ8m/yqYAMAK9Lxt4SoySUfS5S8ia9K7JHpa3KEeMfyF8LoJ3c5NeBJy6pg==} - engines: {node: ^14.18.0 || >=16.0.0} + vite@5.2.12: + resolution: {integrity: sha512-/gC8GxzxMK5ntBwb48pR32GGhENnjtY30G4A0jemunsBkiEZFw60s8InGpN8gkhHEkjnRK1aSAxeQgwvFhUHAA==} + engines: {node: ^18.0.0 || >=20.0.0} hasBin: true peerDependencies: - '@types/node': '>= 14' + '@types/node': ^18.0.0 || >=20.0.0 less: '*' + lightningcss: ^1.21.0 sass: '*' stylus: '*' sugarss: '*' @@ -7715,6 +8719,8 @@ packages: optional: true less: optional: true + lightningcss: + optional: true sass: optional: true stylus: @@ -7724,53 +8730,50 @@ packages: terser: optional: true - vitest@0.31.4: - resolution: {integrity: sha512-GoV0VQPmWrUFOZSg3RpQAPN+LPmHg2/gxlMNJlyxJihkz6qReHDV6b0pPDcqFLNEPya4tWJ1pgwUNP9MLmUfvQ==} - engines: {node: '>=v14.18.0'} + vite@5.3.3: + resolution: {integrity: sha512-NPQdeCU0Dv2z5fu+ULotpuq5yfCS1BzKUIPhNbP3YBfAMGJXbt2nS+sbTFu+qchaqWTD+H3JK++nRwr6XIcp6A==} + engines: {node: ^18.0.0 || >=20.0.0} hasBin: true peerDependencies: - '@edge-runtime/vm': '*' - '@vitest/browser': '*' - '@vitest/ui': '*' - happy-dom: '*' - jsdom: '*' - playwright: '*' - safaridriver: '*' - webdriverio: '*' + '@types/node': ^18.0.0 || >=20.0.0 + less: '*' + lightningcss: ^1.21.0 + sass: '*' + stylus: '*' + sugarss: '*' + terser: ^5.4.0 peerDependenciesMeta: - '@edge-runtime/vm': - optional: true - '@vitest/browser': + '@types/node': optional: true - '@vitest/ui': + less: optional: true - happy-dom: + lightningcss: optional: true - jsdom: + sass: optional: true - playwright: + stylus: optional: true - safaridriver: + sugarss: optional: true - webdriverio: + terser: optional: true - vitest@0.34.6: - resolution: {integrity: sha512-+5CALsOvbNKnS+ZHMXtuUC7nL8/7F1F2DnHGjSsszX8zCjWSSviphCb/NuS9Nzf4Q03KyyDRBAXhF/8lffME4Q==} - engines: {node: '>=v14.18.0'} + vitest@1.6.0: + resolution: {integrity: sha512-H5r/dN06swuFnzNFhq/dnz37bPXnq8xB2xB5JOVk8K09rUtoeNN+LHWkoQ0A/i3hvbUKKcCei9KpbxqHMLhLLA==} + engines: {node: ^18.0.0 || >=20.0.0} hasBin: true peerDependencies: '@edge-runtime/vm': '*' - '@vitest/browser': '*' - '@vitest/ui': '*' + '@types/node': ^18.0.0 || >=20.0.0 + '@vitest/browser': 1.6.0 + '@vitest/ui': 1.6.0 happy-dom: '*' jsdom: '*' - playwright: '*' - safaridriver: '*' - webdriverio: '*' peerDependenciesMeta: '@edge-runtime/vm': optional: true + '@types/node': + optional: true '@vitest/browser': optional: true '@vitest/ui': @@ -7779,12 +8782,6 @@ packages: optional: true jsdom: optional: true - playwright: - optional: true - safaridriver: - optional: true - webdriverio: - optional: true vlq@1.0.1: resolution: {integrity: sha512-gQpnTgkubC6hQgdIcRdYGDSDc+SaujOdyesZQMv6JlfQee/9Mp0Qhnys6WxDWvQnL5WZdT7o2Ul187aSt0Rq+w==} @@ -7840,6 +8837,10 @@ packages: resolution: {integrity: sha512-qe9UWWpkeG5yzZ0tNYxDmd7vo58HDBc39mZ0xWWpolAGADdFOzkfamWLDxkOWcvHQKVmdTyQdLD4NOfjLWTKew==} engines: {node: '>= 0.4'} + which-typed-array@1.1.15: + resolution: {integrity: sha512-oV0jmFtUky6CXfkqehVvBP/LSWJ2sy4vWMioiENyJLePrBO/yKyV9OyJySfAKosh+RYkIl5zJCNZ8/4JncrpdA==} + engines: {node: '>= 0.4'} + which@1.3.1: resolution: {integrity: sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==} hasBin: true @@ -7886,10 +8887,6 @@ packages: write-file-atomic@2.4.3: resolution: {integrity: sha512-GaETH5wwsX+GcnzhPgKcKjJ6M2Cq3/iZp1WyY/X1CSqrW+jVNM9Y7D8EC2sM4ZG/V8wZlSniJnCKWPmBYAucRQ==} - write-file-atomic@5.0.0: - resolution: {integrity: sha512-R7NYMnHSlV42K54lwY9lvW6MnSm1HSJqZL3xiSgi9E7//FYaI74r2G0rd+/X6VAMkHEdzxQaU5HUOXWUz5kA/w==} - engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} - write-file-atomic@5.0.1: resolution: {integrity: sha512-+QU2zd6OTD8XWIJCbffaiQeH9U73qIqafo1x6V1snCWYGJf6cVE0cDR4D8xRzcEnfI21IFrUPzPGtcPf8AC+Rw==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} @@ -7917,18 +8914,6 @@ packages: utf-8-validate: optional: true - ws@8.13.0: - resolution: {integrity: sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA==} - engines: {node: '>=10.0.0'} - peerDependencies: - bufferutil: ^4.0.1 - utf-8-validate: '>=5.0.2' - peerDependenciesMeta: - bufferutil: - optional: true - utf-8-validate: - optional: true - ws@8.14.2: resolution: {integrity: sha512-wEBG1ftX4jcglPxgFCMJmZ2PLtSbJ2Peg6TmpJFTbe9GZYOQCDPdMYu/Tm0/bGZkw8paZnJY45J4K2PZrLYq8g==} engines: {node: '>=10.0.0'} @@ -7941,8 +8926,8 @@ packages: utf-8-validate: optional: true - ws@8.16.0: - resolution: {integrity: sha512-HS0c//TP7Ina87TfiPUz1rQzMhHrl/SG2guqRcTOIUYD2q8uhUdNHZYJUaQ8aTGPzCh+c6oawMKW35nFl1dxyQ==} + ws@8.17.0: + resolution: {integrity: sha512-uJq6108EgZMAl20KagGkzCKfMEjxmKvZHG7Tlq0Z6nOky7YF7aq4mOx6xK8TJ/i1LeK4Qus7INktacctDgY8Ow==} engines: {node: '>=10.0.0'} peerDependencies: bufferutil: ^4.0.1 @@ -7994,8 +8979,8 @@ packages: resolution: {integrity: sha512-2eHWfjaoXgTBC2jNM1LRef62VQa0umtvRiDSk6HSzW7RvS5YtkabJrwYLLEKWBc8a5U2PTSCs+dJjUTJdlHsWQ==} engines: {node: '>= 14'} - yaml@2.4.1: - resolution: {integrity: sha512-pIXzoImaqmfOrL7teGUBt/T7ZDnyeGBWyXQBvOVhLkWLN37GXv8NMLK406UY6dS51JfcQHsmcW5cJ441bHg6Lg==} + yaml@2.4.2: + resolution: {integrity: sha512-B3VqDZ+JAg1nZpaEmWtTXUlBneoGx6CPM9b0TENK6aoSu5t73dItudwdgmi6tHlIZZId4dZ9skcAQ2UbcyAeVA==} engines: {node: '>= 14'} hasBin: true @@ -8019,14 +9004,14 @@ packages: resolution: {integrity: sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==} engines: {node: '>=10'} - yargs@17.7.1: - resolution: {integrity: sha512-cwiTb08Xuv5fqF4AovYacTFNxk62th7LKJ6BL9IGUpTJrWoU7/7WdQGTP2SjKf1dUNBGzDd28p/Yfs/GI6JrLw==} - engines: {node: '>=12'} - yargs@17.7.2: resolution: {integrity: sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==} engines: {node: '>=12'} + yn@3.1.1: + resolution: {integrity: sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==} + engines: {node: '>=6'} + yocto-queue@0.1.0: resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} engines: {node: '>=10'} @@ -8038,8 +9023,8 @@ packages: zod@3.21.4: resolution: {integrity: sha512-m46AKbrzKVzOzs/DZgVnG5H55N1sv1M8qZU3A8RIKbs3mrACDNeIOeilDymVb2HdmP8uwshOCF4uJ8uM9rCqJw==} - zod@3.22.2: - resolution: {integrity: sha512-wvWkphh5WQsJbVk1tbx1l1Ly4yg+XecD+Mq280uBGt9wa5BKSWf4Mhp6GmrkPixhMxmabYY7RbzlwVP32pbGCg==} + zod@3.23.7: + resolution: {integrity: sha512-NBeIoqbtOiUMomACV/y+V3Qfs9+Okr18vR5c/5pHClPpufWOrsx8TENboDPe265lFdfewX2yBtNTLPvnmCxwog==} zx@7.2.2: resolution: {integrity: sha512-50Gjicd6ijTt7Zcz5fNX+rHrmE0uVqC+X6lYKhf2Cu8wIxDpNIzXwTmzchNdW+JY3LFsRcU43B1lHE4HBMmKgQ==} @@ -8055,36 +9040,37 @@ snapshots: '@jridgewell/gen-mapping': 0.3.5 '@jridgewell/trace-mapping': 0.3.25 - '@andrewbranch/untar.js@1.0.2': {} + '@andrewbranch/untar.js@1.0.3': {} - '@arethetypeswrong/cli@0.12.1(encoding@0.1.13)': + '@arethetypeswrong/cli@0.15.3': dependencies: - '@arethetypeswrong/core': 0.12.1(encoding@0.1.13) + '@arethetypeswrong/core': 0.15.1 chalk: 4.1.2 cli-table3: 0.6.3 commander: 10.0.1 - marked: 5.1.2 - marked-terminal: 5.2.0(marked@5.1.2) - node-fetch: 2.6.11(encoding@0.1.13) - semver: 7.5.4 - transitivePeerDependencies: - - encoding + marked: 9.1.6 + marked-terminal: 6.2.0(marked@9.1.6) + semver: 7.6.2 - '@arethetypeswrong/core@0.12.1(encoding@0.1.13)': + '@arethetypeswrong/core@0.15.1': dependencies: - '@andrewbranch/untar.js': 1.0.2 - fetch-ponyfill: 7.1.0(encoding@0.1.13) - fflate: 0.7.4 - semver: 7.5.4 - typescript: 5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq) + '@andrewbranch/untar.js': 1.0.3 + fflate: 0.8.2 + semver: 7.6.2 + ts-expose-internals-conditionally: 1.0.0-empty.0 + typescript: 5.3.3 validate-npm-package-name: 5.0.0 - transitivePeerDependencies: - - encoding + + '@ava/typescript@5.0.0': + dependencies: + escape-string-regexp: 5.0.0 + execa: 8.0.1 + optional: true '@aws-crypto/crc32@3.0.0': dependencies: '@aws-crypto/util': 3.0.0 - '@aws-sdk/types': 3.342.0 + '@aws-sdk/types': 3.577.0 tslib: 1.14.1 '@aws-crypto/ie11-detection@3.0.0': @@ -8097,15 +9083,15 @@ snapshots: '@aws-crypto/sha256-js': 3.0.0 '@aws-crypto/supports-web-crypto': 3.0.0 '@aws-crypto/util': 3.0.0 - '@aws-sdk/types': 3.535.0 - '@aws-sdk/util-locate-window': 3.535.0 + '@aws-sdk/types': 3.577.0 + '@aws-sdk/util-locate-window': 3.568.0 '@aws-sdk/util-utf8-browser': 3.259.0 tslib: 1.14.1 '@aws-crypto/sha256-js@3.0.0': dependencies: '@aws-crypto/util': 3.0.0 - '@aws-sdk/types': 3.535.0 + '@aws-sdk/types': 3.577.0 tslib: 1.14.1 '@aws-crypto/supports-web-crypto@3.0.0': @@ -8114,26 +9100,27 @@ snapshots: '@aws-crypto/util@3.0.0': dependencies: - '@aws-sdk/types': 3.468.0 + '@aws-sdk/types': 3.577.0 '@aws-sdk/util-utf8-browser': 3.259.0 tslib: 1.14.1 - '@aws-sdk/client-cognito-identity@3.549.0': + '@aws-sdk/client-cognito-identity@3.569.0': dependencies: '@aws-crypto/sha256-browser': 3.0.0 '@aws-crypto/sha256-js': 3.0.0 - '@aws-sdk/client-sts': 3.549.0(@aws-sdk/credential-provider-node@3.549.0) - '@aws-sdk/core': 3.549.0 - '@aws-sdk/credential-provider-node': 3.549.0 - '@aws-sdk/middleware-host-header': 3.535.0 - '@aws-sdk/middleware-logger': 3.535.0 - '@aws-sdk/middleware-recursion-detection': 3.535.0 - '@aws-sdk/middleware-user-agent': 3.540.0 - '@aws-sdk/region-config-resolver': 3.535.0 - '@aws-sdk/types': 3.535.0 - '@aws-sdk/util-endpoints': 3.540.0 - '@aws-sdk/util-user-agent-browser': 3.535.0 - '@aws-sdk/util-user-agent-node': 3.535.0 + '@aws-sdk/client-sso-oidc': 3.569.0 + '@aws-sdk/client-sts': 3.569.0 + '@aws-sdk/core': 3.567.0 + '@aws-sdk/credential-provider-node': 3.569.0(@aws-sdk/client-sso-oidc@3.569.0)(@aws-sdk/client-sts@3.569.0) + '@aws-sdk/middleware-host-header': 3.567.0 + '@aws-sdk/middleware-logger': 3.568.0 + '@aws-sdk/middleware-recursion-detection': 3.567.0 + '@aws-sdk/middleware-user-agent': 3.567.0 + '@aws-sdk/region-config-resolver': 3.567.0 + '@aws-sdk/types': 3.567.0 + '@aws-sdk/util-endpoints': 3.567.0 + '@aws-sdk/util-user-agent-browser': 3.567.0 + '@aws-sdk/util-user-agent-node': 3.568.0 '@smithy/config-resolver': 2.2.0 '@smithy/core': 1.4.2 '@smithy/fetch-http-handler': 2.5.0 @@ -8213,22 +9200,68 @@ snapshots: transitivePeerDependencies: - aws-crt - '@aws-sdk/client-rds-data@3.549.0': + '@aws-sdk/client-rds-data@3.583.0': + dependencies: + '@aws-crypto/sha256-browser': 3.0.0 + '@aws-crypto/sha256-js': 3.0.0 + '@aws-sdk/client-sso-oidc': 3.583.0(@aws-sdk/client-sts@3.583.0) + '@aws-sdk/client-sts': 3.583.0 + '@aws-sdk/core': 3.582.0 + '@aws-sdk/credential-provider-node': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0)(@aws-sdk/client-sts@3.583.0) + '@aws-sdk/middleware-host-header': 3.577.0 + '@aws-sdk/middleware-logger': 3.577.0 + '@aws-sdk/middleware-recursion-detection': 3.577.0 + '@aws-sdk/middleware-user-agent': 3.583.0 + '@aws-sdk/region-config-resolver': 3.577.0 + '@aws-sdk/types': 3.577.0 + '@aws-sdk/util-endpoints': 3.583.0 + '@aws-sdk/util-user-agent-browser': 3.577.0 + '@aws-sdk/util-user-agent-node': 3.577.0 + '@smithy/config-resolver': 3.0.0 + '@smithy/core': 2.0.1 + '@smithy/fetch-http-handler': 3.0.1 + '@smithy/hash-node': 3.0.0 + '@smithy/invalid-dependency': 3.0.0 + '@smithy/middleware-content-length': 3.0.0 + '@smithy/middleware-endpoint': 3.0.0 + '@smithy/middleware-retry': 3.0.1 + '@smithy/middleware-serde': 3.0.0 + '@smithy/middleware-stack': 3.0.0 + '@smithy/node-config-provider': 3.0.0 + '@smithy/node-http-handler': 3.0.0 + '@smithy/protocol-http': 4.0.0 + '@smithy/smithy-client': 3.0.1 + '@smithy/types': 3.0.0 + '@smithy/url-parser': 3.0.0 + '@smithy/util-base64': 3.0.0 + '@smithy/util-body-length-browser': 3.0.0 + '@smithy/util-body-length-node': 3.0.0 + '@smithy/util-defaults-mode-browser': 3.0.1 + '@smithy/util-defaults-mode-node': 3.0.1 + '@smithy/util-endpoints': 2.0.0 + '@smithy/util-middleware': 3.0.0 + '@smithy/util-retry': 3.0.0 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.2 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/client-sso-oidc@3.569.0': dependencies: '@aws-crypto/sha256-browser': 3.0.0 '@aws-crypto/sha256-js': 3.0.0 - '@aws-sdk/client-sts': 3.549.0(@aws-sdk/credential-provider-node@3.549.0) - '@aws-sdk/core': 3.549.0 - '@aws-sdk/credential-provider-node': 3.549.0 - '@aws-sdk/middleware-host-header': 3.535.0 - '@aws-sdk/middleware-logger': 3.535.0 - '@aws-sdk/middleware-recursion-detection': 3.535.0 - '@aws-sdk/middleware-user-agent': 3.540.0 - '@aws-sdk/region-config-resolver': 3.535.0 - '@aws-sdk/types': 3.535.0 - '@aws-sdk/util-endpoints': 3.540.0 - '@aws-sdk/util-user-agent-browser': 3.535.0 - '@aws-sdk/util-user-agent-node': 3.535.0 + '@aws-sdk/client-sts': 3.569.0 + '@aws-sdk/core': 3.567.0 + '@aws-sdk/credential-provider-node': 3.569.0(@aws-sdk/client-sso-oidc@3.569.0)(@aws-sdk/client-sts@3.569.0) + '@aws-sdk/middleware-host-header': 3.567.0 + '@aws-sdk/middleware-logger': 3.568.0 + '@aws-sdk/middleware-recursion-detection': 3.567.0 + '@aws-sdk/middleware-user-agent': 3.567.0 + '@aws-sdk/region-config-resolver': 3.567.0 + '@aws-sdk/types': 3.567.0 + '@aws-sdk/util-endpoints': 3.567.0 + '@aws-sdk/util-user-agent-browser': 3.567.0 + '@aws-sdk/util-user-agent-node': 3.568.0 '@smithy/config-resolver': 2.2.0 '@smithy/core': 1.4.2 '@smithy/fetch-http-handler': 2.5.0 @@ -8258,22 +9291,66 @@ snapshots: transitivePeerDependencies: - aws-crt - '@aws-sdk/client-sso-oidc@3.549.0(@aws-sdk/credential-provider-node@3.549.0)': + '@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0)': dependencies: '@aws-crypto/sha256-browser': 3.0.0 '@aws-crypto/sha256-js': 3.0.0 - '@aws-sdk/client-sts': 3.549.0(@aws-sdk/credential-provider-node@3.549.0) - '@aws-sdk/core': 3.549.0 - '@aws-sdk/credential-provider-node': 3.549.0 - '@aws-sdk/middleware-host-header': 3.535.0 - '@aws-sdk/middleware-logger': 3.535.0 - '@aws-sdk/middleware-recursion-detection': 3.535.0 - '@aws-sdk/middleware-user-agent': 3.540.0 - '@aws-sdk/region-config-resolver': 3.535.0 - '@aws-sdk/types': 3.535.0 - '@aws-sdk/util-endpoints': 3.540.0 - '@aws-sdk/util-user-agent-browser': 3.535.0 - '@aws-sdk/util-user-agent-node': 3.535.0 + '@aws-sdk/client-sts': 3.583.0 + '@aws-sdk/core': 3.582.0 + '@aws-sdk/credential-provider-node': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0)(@aws-sdk/client-sts@3.583.0) + '@aws-sdk/middleware-host-header': 3.577.0 + '@aws-sdk/middleware-logger': 3.577.0 + '@aws-sdk/middleware-recursion-detection': 3.577.0 + '@aws-sdk/middleware-user-agent': 3.583.0 + '@aws-sdk/region-config-resolver': 3.577.0 + '@aws-sdk/types': 3.577.0 + '@aws-sdk/util-endpoints': 3.583.0 + '@aws-sdk/util-user-agent-browser': 3.577.0 + '@aws-sdk/util-user-agent-node': 3.577.0 + '@smithy/config-resolver': 3.0.0 + '@smithy/core': 2.0.1 + '@smithy/fetch-http-handler': 3.0.1 + '@smithy/hash-node': 3.0.0 + '@smithy/invalid-dependency': 3.0.0 + '@smithy/middleware-content-length': 3.0.0 + '@smithy/middleware-endpoint': 3.0.0 + '@smithy/middleware-retry': 3.0.1 + '@smithy/middleware-serde': 3.0.0 + '@smithy/middleware-stack': 3.0.0 + '@smithy/node-config-provider': 3.0.0 + '@smithy/node-http-handler': 3.0.0 + '@smithy/protocol-http': 4.0.0 + '@smithy/smithy-client': 3.0.1 + '@smithy/types': 3.0.0 + '@smithy/url-parser': 3.0.0 + '@smithy/util-base64': 3.0.0 + '@smithy/util-body-length-browser': 3.0.0 + '@smithy/util-body-length-node': 3.0.0 + '@smithy/util-defaults-mode-browser': 3.0.1 + '@smithy/util-defaults-mode-node': 3.0.1 + '@smithy/util-endpoints': 2.0.0 + '@smithy/util-middleware': 3.0.0 + '@smithy/util-retry': 3.0.0 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.2 + transitivePeerDependencies: + - '@aws-sdk/client-sts' + - aws-crt + + '@aws-sdk/client-sso@3.478.0': + dependencies: + '@aws-crypto/sha256-browser': 3.0.0 + '@aws-crypto/sha256-js': 3.0.0 + '@aws-sdk/core': 3.477.0 + '@aws-sdk/middleware-host-header': 3.468.0 + '@aws-sdk/middleware-logger': 3.468.0 + '@aws-sdk/middleware-recursion-detection': 3.468.0 + '@aws-sdk/middleware-user-agent': 3.478.0 + '@aws-sdk/region-config-resolver': 3.470.0 + '@aws-sdk/types': 3.468.0 + '@aws-sdk/util-endpoints': 3.478.0 + '@aws-sdk/util-user-agent-browser': 3.468.0 + '@aws-sdk/util-user-agent-node': 3.470.0 '@smithy/config-resolver': 2.2.0 '@smithy/core': 1.4.2 '@smithy/fetch-http-handler': 2.5.0 @@ -8296,27 +9373,26 @@ snapshots: '@smithy/util-defaults-mode-browser': 2.2.1 '@smithy/util-defaults-mode-node': 2.3.1 '@smithy/util-endpoints': 1.2.0 - '@smithy/util-middleware': 2.2.0 '@smithy/util-retry': 2.2.0 '@smithy/util-utf8': 2.3.0 tslib: 2.6.2 transitivePeerDependencies: - aws-crt - '@aws-sdk/client-sso@3.478.0': + '@aws-sdk/client-sso@3.568.0': dependencies: - '@aws-crypto/sha256-browser': 3.0.0 - '@aws-crypto/sha256-js': 3.0.0 - '@aws-sdk/core': 3.477.0 - '@aws-sdk/middleware-host-header': 3.468.0 - '@aws-sdk/middleware-logger': 3.468.0 - '@aws-sdk/middleware-recursion-detection': 3.468.0 - '@aws-sdk/middleware-user-agent': 3.478.0 - '@aws-sdk/region-config-resolver': 3.470.0 - '@aws-sdk/types': 3.468.0 - '@aws-sdk/util-endpoints': 3.478.0 - '@aws-sdk/util-user-agent-browser': 3.468.0 - '@aws-sdk/util-user-agent-node': 3.470.0 + '@aws-crypto/sha256-browser': 3.0.0 + '@aws-crypto/sha256-js': 3.0.0 + '@aws-sdk/core': 3.567.0 + '@aws-sdk/middleware-host-header': 3.567.0 + '@aws-sdk/middleware-logger': 3.568.0 + '@aws-sdk/middleware-recursion-detection': 3.567.0 + '@aws-sdk/middleware-user-agent': 3.567.0 + '@aws-sdk/region-config-resolver': 3.567.0 + '@aws-sdk/types': 3.567.0 + '@aws-sdk/util-endpoints': 3.567.0 + '@aws-sdk/util-user-agent-browser': 3.567.0 + '@aws-sdk/util-user-agent-node': 3.568.0 '@smithy/config-resolver': 2.2.0 '@smithy/core': 1.4.2 '@smithy/fetch-http-handler': 2.5.0 @@ -8339,26 +9415,71 @@ snapshots: '@smithy/util-defaults-mode-browser': 2.2.1 '@smithy/util-defaults-mode-node': 2.3.1 '@smithy/util-endpoints': 1.2.0 + '@smithy/util-middleware': 2.2.0 '@smithy/util-retry': 2.2.0 '@smithy/util-utf8': 2.3.0 tslib: 2.6.2 transitivePeerDependencies: - aws-crt - '@aws-sdk/client-sso@3.549.0': + '@aws-sdk/client-sso@3.583.0': + dependencies: + '@aws-crypto/sha256-browser': 3.0.0 + '@aws-crypto/sha256-js': 3.0.0 + '@aws-sdk/core': 3.582.0 + '@aws-sdk/middleware-host-header': 3.577.0 + '@aws-sdk/middleware-logger': 3.577.0 + '@aws-sdk/middleware-recursion-detection': 3.577.0 + '@aws-sdk/middleware-user-agent': 3.583.0 + '@aws-sdk/region-config-resolver': 3.577.0 + '@aws-sdk/types': 3.577.0 + '@aws-sdk/util-endpoints': 3.583.0 + '@aws-sdk/util-user-agent-browser': 3.577.0 + '@aws-sdk/util-user-agent-node': 3.577.0 + '@smithy/config-resolver': 3.0.0 + '@smithy/core': 2.0.1 + '@smithy/fetch-http-handler': 3.0.1 + '@smithy/hash-node': 3.0.0 + '@smithy/invalid-dependency': 3.0.0 + '@smithy/middleware-content-length': 3.0.0 + '@smithy/middleware-endpoint': 3.0.0 + '@smithy/middleware-retry': 3.0.1 + '@smithy/middleware-serde': 3.0.0 + '@smithy/middleware-stack': 3.0.0 + '@smithy/node-config-provider': 3.0.0 + '@smithy/node-http-handler': 3.0.0 + '@smithy/protocol-http': 4.0.0 + '@smithy/smithy-client': 3.0.1 + '@smithy/types': 3.0.0 + '@smithy/url-parser': 3.0.0 + '@smithy/util-base64': 3.0.0 + '@smithy/util-body-length-browser': 3.0.0 + '@smithy/util-body-length-node': 3.0.0 + '@smithy/util-defaults-mode-browser': 3.0.1 + '@smithy/util-defaults-mode-node': 3.0.1 + '@smithy/util-endpoints': 2.0.0 + '@smithy/util-middleware': 3.0.0 + '@smithy/util-retry': 3.0.0 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.2 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/client-sts@3.478.0': dependencies: '@aws-crypto/sha256-browser': 3.0.0 '@aws-crypto/sha256-js': 3.0.0 - '@aws-sdk/core': 3.549.0 - '@aws-sdk/middleware-host-header': 3.535.0 - '@aws-sdk/middleware-logger': 3.535.0 - '@aws-sdk/middleware-recursion-detection': 3.535.0 - '@aws-sdk/middleware-user-agent': 3.540.0 - '@aws-sdk/region-config-resolver': 3.535.0 - '@aws-sdk/types': 3.535.0 - '@aws-sdk/util-endpoints': 3.540.0 - '@aws-sdk/util-user-agent-browser': 3.535.0 - '@aws-sdk/util-user-agent-node': 3.535.0 + '@aws-sdk/core': 3.477.0 + '@aws-sdk/credential-provider-node': 3.478.0 + '@aws-sdk/middleware-host-header': 3.468.0 + '@aws-sdk/middleware-logger': 3.468.0 + '@aws-sdk/middleware-recursion-detection': 3.468.0 + '@aws-sdk/middleware-user-agent': 3.478.0 + '@aws-sdk/region-config-resolver': 3.470.0 + '@aws-sdk/types': 3.468.0 + '@aws-sdk/util-endpoints': 3.478.0 + '@aws-sdk/util-user-agent-browser': 3.468.0 + '@aws-sdk/util-user-agent-node': 3.470.0 '@smithy/config-resolver': 2.2.0 '@smithy/core': 1.4.2 '@smithy/fetch-http-handler': 2.5.0 @@ -8384,25 +9505,27 @@ snapshots: '@smithy/util-middleware': 2.2.0 '@smithy/util-retry': 2.2.0 '@smithy/util-utf8': 2.3.0 + fast-xml-parser: 4.2.5 tslib: 2.6.2 transitivePeerDependencies: - aws-crt - '@aws-sdk/client-sts@3.478.0': + '@aws-sdk/client-sts@3.569.0': dependencies: '@aws-crypto/sha256-browser': 3.0.0 '@aws-crypto/sha256-js': 3.0.0 - '@aws-sdk/core': 3.477.0 - '@aws-sdk/credential-provider-node': 3.478.0 - '@aws-sdk/middleware-host-header': 3.468.0 - '@aws-sdk/middleware-logger': 3.468.0 - '@aws-sdk/middleware-recursion-detection': 3.468.0 - '@aws-sdk/middleware-user-agent': 3.478.0 - '@aws-sdk/region-config-resolver': 3.470.0 - '@aws-sdk/types': 3.468.0 - '@aws-sdk/util-endpoints': 3.478.0 - '@aws-sdk/util-user-agent-browser': 3.468.0 - '@aws-sdk/util-user-agent-node': 3.470.0 + '@aws-sdk/client-sso-oidc': 3.569.0 + '@aws-sdk/core': 3.567.0 + '@aws-sdk/credential-provider-node': 3.569.0(@aws-sdk/client-sso-oidc@3.569.0)(@aws-sdk/client-sts@3.569.0) + '@aws-sdk/middleware-host-header': 3.567.0 + '@aws-sdk/middleware-logger': 3.568.0 + '@aws-sdk/middleware-recursion-detection': 3.567.0 + '@aws-sdk/middleware-user-agent': 3.567.0 + '@aws-sdk/region-config-resolver': 3.567.0 + '@aws-sdk/types': 3.567.0 + '@aws-sdk/util-endpoints': 3.567.0 + '@aws-sdk/util-user-agent-browser': 3.567.0 + '@aws-sdk/util-user-agent-node': 3.568.0 '@smithy/config-resolver': 2.2.0 '@smithy/core': 1.4.2 '@smithy/fetch-http-handler': 2.5.0 @@ -8428,26 +9551,26 @@ snapshots: '@smithy/util-middleware': 2.2.0 '@smithy/util-retry': 2.2.0 '@smithy/util-utf8': 2.3.0 - fast-xml-parser: 4.2.5 tslib: 2.6.2 transitivePeerDependencies: - aws-crt - '@aws-sdk/client-sts@3.549.0(@aws-sdk/credential-provider-node@3.549.0)': + '@aws-sdk/client-sts@3.569.0(@aws-sdk/client-sso-oidc@3.569.0)': dependencies: '@aws-crypto/sha256-browser': 3.0.0 '@aws-crypto/sha256-js': 3.0.0 - '@aws-sdk/core': 3.549.0 - '@aws-sdk/credential-provider-node': 3.549.0 - '@aws-sdk/middleware-host-header': 3.535.0 - '@aws-sdk/middleware-logger': 3.535.0 - '@aws-sdk/middleware-recursion-detection': 3.535.0 - '@aws-sdk/middleware-user-agent': 3.540.0 - '@aws-sdk/region-config-resolver': 3.535.0 - '@aws-sdk/types': 3.535.0 - '@aws-sdk/util-endpoints': 3.540.0 - '@aws-sdk/util-user-agent-browser': 3.535.0 - '@aws-sdk/util-user-agent-node': 3.535.0 + '@aws-sdk/client-sso-oidc': 3.569.0 + '@aws-sdk/core': 3.567.0 + '@aws-sdk/credential-provider-node': 3.569.0(@aws-sdk/client-sso-oidc@3.569.0)(@aws-sdk/client-sts@3.569.0(@aws-sdk/client-sso-oidc@3.569.0)) + '@aws-sdk/middleware-host-header': 3.567.0 + '@aws-sdk/middleware-logger': 3.568.0 + '@aws-sdk/middleware-recursion-detection': 3.567.0 + '@aws-sdk/middleware-user-agent': 3.567.0 + '@aws-sdk/region-config-resolver': 3.567.0 + '@aws-sdk/types': 3.567.0 + '@aws-sdk/util-endpoints': 3.567.0 + '@aws-sdk/util-user-agent-browser': 3.567.0 + '@aws-sdk/util-user-agent-node': 3.568.0 '@smithy/config-resolver': 2.2.0 '@smithy/core': 1.4.2 '@smithy/fetch-http-handler': 2.5.0 @@ -8474,6 +9597,52 @@ snapshots: '@smithy/util-retry': 2.2.0 '@smithy/util-utf8': 2.3.0 tslib: 2.6.2 + transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' + - aws-crt + + '@aws-sdk/client-sts@3.583.0': + dependencies: + '@aws-crypto/sha256-browser': 3.0.0 + '@aws-crypto/sha256-js': 3.0.0 + '@aws-sdk/client-sso-oidc': 3.583.0(@aws-sdk/client-sts@3.583.0) + '@aws-sdk/core': 3.582.0 + '@aws-sdk/credential-provider-node': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0)(@aws-sdk/client-sts@3.583.0) + '@aws-sdk/middleware-host-header': 3.577.0 + '@aws-sdk/middleware-logger': 3.577.0 + '@aws-sdk/middleware-recursion-detection': 3.577.0 + '@aws-sdk/middleware-user-agent': 3.583.0 + '@aws-sdk/region-config-resolver': 3.577.0 + '@aws-sdk/types': 3.577.0 + '@aws-sdk/util-endpoints': 3.583.0 + '@aws-sdk/util-user-agent-browser': 3.577.0 + '@aws-sdk/util-user-agent-node': 3.577.0 + '@smithy/config-resolver': 3.0.0 + '@smithy/core': 2.0.1 + '@smithy/fetch-http-handler': 3.0.1 + '@smithy/hash-node': 3.0.0 + '@smithy/invalid-dependency': 3.0.0 + '@smithy/middleware-content-length': 3.0.0 + '@smithy/middleware-endpoint': 3.0.0 + '@smithy/middleware-retry': 3.0.1 + '@smithy/middleware-serde': 3.0.0 + '@smithy/middleware-stack': 3.0.0 + '@smithy/node-config-provider': 3.0.0 + '@smithy/node-http-handler': 3.0.0 + '@smithy/protocol-http': 4.0.0 + '@smithy/smithy-client': 3.0.1 + '@smithy/types': 3.0.0 + '@smithy/url-parser': 3.0.0 + '@smithy/util-base64': 3.0.0 + '@smithy/util-body-length-browser': 3.0.0 + '@smithy/util-body-length-node': 3.0.0 + '@smithy/util-defaults-mode-browser': 3.0.1 + '@smithy/util-defaults-mode-node': 3.0.1 + '@smithy/util-endpoints': 2.0.0 + '@smithy/util-middleware': 3.0.0 + '@smithy/util-retry': 3.0.0 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.2 transitivePeerDependencies: - aws-crt @@ -8481,25 +9650,35 @@ snapshots: dependencies: '@smithy/core': 1.4.2 '@smithy/protocol-http': 3.3.0 - '@smithy/signature-v4': 2.2.1 + '@smithy/signature-v4': 2.3.0 '@smithy/smithy-client': 2.5.1 '@smithy/types': 2.12.0 tslib: 2.6.2 - '@aws-sdk/core@3.549.0': + '@aws-sdk/core@3.567.0': dependencies: '@smithy/core': 1.4.2 '@smithy/protocol-http': 3.3.0 - '@smithy/signature-v4': 2.2.1 + '@smithy/signature-v4': 2.3.0 '@smithy/smithy-client': 2.5.1 '@smithy/types': 2.12.0 fast-xml-parser: 4.2.5 tslib: 2.6.2 - '@aws-sdk/credential-provider-cognito-identity@3.549.0': + '@aws-sdk/core@3.582.0': + dependencies: + '@smithy/core': 2.0.1 + '@smithy/protocol-http': 4.0.0 + '@smithy/signature-v4': 3.0.0 + '@smithy/smithy-client': 3.0.1 + '@smithy/types': 3.0.0 + fast-xml-parser: 4.2.5 + tslib: 2.6.2 + + '@aws-sdk/credential-provider-cognito-identity@3.569.0': dependencies: - '@aws-sdk/client-cognito-identity': 3.549.0 - '@aws-sdk/types': 3.535.0 + '@aws-sdk/client-cognito-identity': 3.569.0 + '@aws-sdk/types': 3.567.0 '@smithy/property-provider': 2.2.0 '@smithy/types': 2.12.0 tslib: 2.6.2 @@ -8513,16 +9692,23 @@ snapshots: '@smithy/types': 2.12.0 tslib: 2.6.2 - '@aws-sdk/credential-provider-env@3.535.0': + '@aws-sdk/credential-provider-env@3.568.0': dependencies: - '@aws-sdk/types': 3.535.0 + '@aws-sdk/types': 3.567.0 '@smithy/property-provider': 2.2.0 '@smithy/types': 2.12.0 tslib: 2.6.2 - '@aws-sdk/credential-provider-http@3.535.0': + '@aws-sdk/credential-provider-env@3.577.0': + dependencies: + '@aws-sdk/types': 3.577.0 + '@smithy/property-provider': 3.0.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + + '@aws-sdk/credential-provider-http@3.568.0': dependencies: - '@aws-sdk/types': 3.535.0 + '@aws-sdk/types': 3.567.0 '@smithy/fetch-http-handler': 2.5.0 '@smithy/node-http-handler': 2.5.0 '@smithy/property-provider': 2.2.0 @@ -8532,6 +9718,18 @@ snapshots: '@smithy/util-stream': 2.2.0 tslib: 2.6.2 + '@aws-sdk/credential-provider-http@3.582.0': + dependencies: + '@aws-sdk/types': 3.577.0 + '@smithy/fetch-http-handler': 3.0.1 + '@smithy/node-http-handler': 3.0.0 + '@smithy/property-provider': 3.0.0 + '@smithy/protocol-http': 4.0.0 + '@smithy/smithy-client': 3.0.1 + '@smithy/types': 3.0.0 + '@smithy/util-stream': 3.0.1 + tslib: 2.6.2 + '@aws-sdk/credential-provider-ini@3.478.0': dependencies: '@aws-sdk/credential-provider-env': 3.468.0 @@ -8547,21 +9745,72 @@ snapshots: transitivePeerDependencies: - aws-crt - '@aws-sdk/credential-provider-ini@3.549.0(@aws-sdk/credential-provider-node@3.549.0)': + '@aws-sdk/credential-provider-ini@3.568.0(@aws-sdk/client-sso-oidc@3.569.0)(@aws-sdk/client-sts@3.569.0(@aws-sdk/client-sso-oidc@3.569.0))': + dependencies: + '@aws-sdk/client-sts': 3.569.0(@aws-sdk/client-sso-oidc@3.569.0) + '@aws-sdk/credential-provider-env': 3.568.0 + '@aws-sdk/credential-provider-process': 3.568.0 + '@aws-sdk/credential-provider-sso': 3.568.0(@aws-sdk/client-sso-oidc@3.569.0) + '@aws-sdk/credential-provider-web-identity': 3.568.0(@aws-sdk/client-sts@3.569.0(@aws-sdk/client-sso-oidc@3.569.0)) + '@aws-sdk/types': 3.567.0 + '@smithy/credential-provider-imds': 2.3.0 + '@smithy/property-provider': 2.2.0 + '@smithy/shared-ini-file-loader': 2.4.0 + '@smithy/types': 2.12.0 + tslib: 2.6.2 + transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' + - aws-crt + + '@aws-sdk/credential-provider-ini@3.568.0(@aws-sdk/client-sso-oidc@3.569.0)(@aws-sdk/client-sts@3.569.0)': + dependencies: + '@aws-sdk/client-sts': 3.569.0(@aws-sdk/client-sso-oidc@3.569.0) + '@aws-sdk/credential-provider-env': 3.568.0 + '@aws-sdk/credential-provider-process': 3.568.0 + '@aws-sdk/credential-provider-sso': 3.568.0(@aws-sdk/client-sso-oidc@3.569.0) + '@aws-sdk/credential-provider-web-identity': 3.568.0(@aws-sdk/client-sts@3.569.0) + '@aws-sdk/types': 3.567.0 + '@smithy/credential-provider-imds': 2.3.0 + '@smithy/property-provider': 2.2.0 + '@smithy/shared-ini-file-loader': 2.4.0 + '@smithy/types': 2.12.0 + tslib: 2.6.2 + transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' + - aws-crt + + '@aws-sdk/credential-provider-ini@3.568.0(@aws-sdk/client-sso-oidc@3.583.0)(@aws-sdk/client-sts@3.569.0(@aws-sdk/client-sso-oidc@3.569.0))': dependencies: - '@aws-sdk/client-sts': 3.549.0(@aws-sdk/credential-provider-node@3.549.0) - '@aws-sdk/credential-provider-env': 3.535.0 - '@aws-sdk/credential-provider-process': 3.535.0 - '@aws-sdk/credential-provider-sso': 3.549.0(@aws-sdk/credential-provider-node@3.549.0) - '@aws-sdk/credential-provider-web-identity': 3.549.0(@aws-sdk/credential-provider-node@3.549.0) - '@aws-sdk/types': 3.535.0 + '@aws-sdk/client-sts': 3.569.0(@aws-sdk/client-sso-oidc@3.569.0) + '@aws-sdk/credential-provider-env': 3.568.0 + '@aws-sdk/credential-provider-process': 3.568.0 + '@aws-sdk/credential-provider-sso': 3.568.0(@aws-sdk/client-sso-oidc@3.583.0) + '@aws-sdk/credential-provider-web-identity': 3.568.0(@aws-sdk/client-sts@3.569.0(@aws-sdk/client-sso-oidc@3.569.0)) + '@aws-sdk/types': 3.567.0 '@smithy/credential-provider-imds': 2.3.0 '@smithy/property-provider': 2.2.0 '@smithy/shared-ini-file-loader': 2.4.0 '@smithy/types': 2.12.0 tslib: 2.6.2 transitivePeerDependencies: - - '@aws-sdk/credential-provider-node' + - '@aws-sdk/client-sso-oidc' + - aws-crt + + '@aws-sdk/credential-provider-ini@3.583.0(@aws-sdk/client-sso-oidc@3.583.0)(@aws-sdk/client-sts@3.583.0)': + dependencies: + '@aws-sdk/client-sts': 3.583.0 + '@aws-sdk/credential-provider-env': 3.577.0 + '@aws-sdk/credential-provider-process': 3.577.0 + '@aws-sdk/credential-provider-sso': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0) + '@aws-sdk/credential-provider-web-identity': 3.577.0(@aws-sdk/client-sts@3.583.0) + '@aws-sdk/types': 3.577.0 + '@smithy/credential-provider-imds': 3.0.0 + '@smithy/property-provider': 3.0.0 + '@smithy/shared-ini-file-loader': 3.0.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' - aws-crt '@aws-sdk/credential-provider-node@3.478.0': @@ -8580,21 +9829,80 @@ snapshots: transitivePeerDependencies: - aws-crt - '@aws-sdk/credential-provider-node@3.549.0': + '@aws-sdk/credential-provider-node@3.569.0(@aws-sdk/client-sso-oidc@3.569.0)(@aws-sdk/client-sts@3.569.0(@aws-sdk/client-sso-oidc@3.569.0))': + dependencies: + '@aws-sdk/credential-provider-env': 3.568.0 + '@aws-sdk/credential-provider-http': 3.568.0 + '@aws-sdk/credential-provider-ini': 3.568.0(@aws-sdk/client-sso-oidc@3.569.0)(@aws-sdk/client-sts@3.569.0(@aws-sdk/client-sso-oidc@3.569.0)) + '@aws-sdk/credential-provider-process': 3.568.0 + '@aws-sdk/credential-provider-sso': 3.568.0(@aws-sdk/client-sso-oidc@3.569.0) + '@aws-sdk/credential-provider-web-identity': 3.568.0(@aws-sdk/client-sts@3.569.0(@aws-sdk/client-sso-oidc@3.569.0)) + '@aws-sdk/types': 3.567.0 + '@smithy/credential-provider-imds': 2.3.0 + '@smithy/property-provider': 2.2.0 + '@smithy/shared-ini-file-loader': 2.4.0 + '@smithy/types': 2.12.0 + tslib: 2.6.2 + transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' + - '@aws-sdk/client-sts' + - aws-crt + + '@aws-sdk/credential-provider-node@3.569.0(@aws-sdk/client-sso-oidc@3.569.0)(@aws-sdk/client-sts@3.569.0)': + dependencies: + '@aws-sdk/credential-provider-env': 3.568.0 + '@aws-sdk/credential-provider-http': 3.568.0 + '@aws-sdk/credential-provider-ini': 3.568.0(@aws-sdk/client-sso-oidc@3.569.0)(@aws-sdk/client-sts@3.569.0) + '@aws-sdk/credential-provider-process': 3.568.0 + '@aws-sdk/credential-provider-sso': 3.568.0(@aws-sdk/client-sso-oidc@3.569.0) + '@aws-sdk/credential-provider-web-identity': 3.568.0(@aws-sdk/client-sts@3.569.0) + '@aws-sdk/types': 3.567.0 + '@smithy/credential-provider-imds': 2.3.0 + '@smithy/property-provider': 2.2.0 + '@smithy/shared-ini-file-loader': 2.4.0 + '@smithy/types': 2.12.0 + tslib: 2.6.2 + transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' + - '@aws-sdk/client-sts' + - aws-crt + + '@aws-sdk/credential-provider-node@3.569.0(@aws-sdk/client-sso-oidc@3.583.0)(@aws-sdk/client-sts@3.569.0(@aws-sdk/client-sso-oidc@3.569.0))': dependencies: - '@aws-sdk/credential-provider-env': 3.535.0 - '@aws-sdk/credential-provider-http': 3.535.0 - '@aws-sdk/credential-provider-ini': 3.549.0(@aws-sdk/credential-provider-node@3.549.0) - '@aws-sdk/credential-provider-process': 3.535.0 - '@aws-sdk/credential-provider-sso': 3.549.0(@aws-sdk/credential-provider-node@3.549.0) - '@aws-sdk/credential-provider-web-identity': 3.549.0(@aws-sdk/credential-provider-node@3.549.0) - '@aws-sdk/types': 3.535.0 + '@aws-sdk/credential-provider-env': 3.568.0 + '@aws-sdk/credential-provider-http': 3.568.0 + '@aws-sdk/credential-provider-ini': 3.568.0(@aws-sdk/client-sso-oidc@3.583.0)(@aws-sdk/client-sts@3.569.0(@aws-sdk/client-sso-oidc@3.569.0)) + '@aws-sdk/credential-provider-process': 3.568.0 + '@aws-sdk/credential-provider-sso': 3.568.0(@aws-sdk/client-sso-oidc@3.583.0) + '@aws-sdk/credential-provider-web-identity': 3.568.0(@aws-sdk/client-sts@3.569.0(@aws-sdk/client-sso-oidc@3.569.0)) + '@aws-sdk/types': 3.567.0 '@smithy/credential-provider-imds': 2.3.0 '@smithy/property-provider': 2.2.0 '@smithy/shared-ini-file-loader': 2.4.0 '@smithy/types': 2.12.0 tslib: 2.6.2 transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' + - '@aws-sdk/client-sts' + - aws-crt + + '@aws-sdk/credential-provider-node@3.583.0(@aws-sdk/client-sso-oidc@3.583.0)(@aws-sdk/client-sts@3.583.0)': + dependencies: + '@aws-sdk/credential-provider-env': 3.577.0 + '@aws-sdk/credential-provider-http': 3.582.0 + '@aws-sdk/credential-provider-ini': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0)(@aws-sdk/client-sts@3.583.0) + '@aws-sdk/credential-provider-process': 3.577.0 + '@aws-sdk/credential-provider-sso': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0) + '@aws-sdk/credential-provider-web-identity': 3.577.0(@aws-sdk/client-sts@3.583.0) + '@aws-sdk/types': 3.577.0 + '@smithy/credential-provider-imds': 3.0.0 + '@smithy/property-provider': 3.0.0 + '@smithy/shared-ini-file-loader': 3.0.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' + - '@aws-sdk/client-sts' - aws-crt '@aws-sdk/credential-provider-process@3.468.0': @@ -8605,14 +9913,22 @@ snapshots: '@smithy/types': 2.12.0 tslib: 2.6.2 - '@aws-sdk/credential-provider-process@3.535.0': + '@aws-sdk/credential-provider-process@3.568.0': dependencies: - '@aws-sdk/types': 3.535.0 + '@aws-sdk/types': 3.567.0 '@smithy/property-provider': 2.2.0 '@smithy/shared-ini-file-loader': 2.4.0 '@smithy/types': 2.12.0 tslib: 2.6.2 + '@aws-sdk/credential-provider-process@3.577.0': + dependencies: + '@aws-sdk/types': 3.577.0 + '@smithy/property-provider': 3.0.0 + '@smithy/shared-ini-file-loader': 3.0.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + '@aws-sdk/credential-provider-sso@3.478.0': dependencies: '@aws-sdk/client-sso': 3.478.0 @@ -8625,17 +9941,43 @@ snapshots: transitivePeerDependencies: - aws-crt - '@aws-sdk/credential-provider-sso@3.549.0(@aws-sdk/credential-provider-node@3.549.0)': + '@aws-sdk/credential-provider-sso@3.568.0(@aws-sdk/client-sso-oidc@3.569.0)': + dependencies: + '@aws-sdk/client-sso': 3.568.0 + '@aws-sdk/token-providers': 3.568.0(@aws-sdk/client-sso-oidc@3.569.0) + '@aws-sdk/types': 3.567.0 + '@smithy/property-provider': 2.2.0 + '@smithy/shared-ini-file-loader': 2.4.0 + '@smithy/types': 2.12.0 + tslib: 2.6.2 + transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' + - aws-crt + + '@aws-sdk/credential-provider-sso@3.568.0(@aws-sdk/client-sso-oidc@3.583.0)': dependencies: - '@aws-sdk/client-sso': 3.549.0 - '@aws-sdk/token-providers': 3.549.0(@aws-sdk/credential-provider-node@3.549.0) - '@aws-sdk/types': 3.535.0 + '@aws-sdk/client-sso': 3.568.0 + '@aws-sdk/token-providers': 3.568.0(@aws-sdk/client-sso-oidc@3.583.0) + '@aws-sdk/types': 3.567.0 '@smithy/property-provider': 2.2.0 '@smithy/shared-ini-file-loader': 2.4.0 '@smithy/types': 2.12.0 tslib: 2.6.2 transitivePeerDependencies: - - '@aws-sdk/credential-provider-node' + - '@aws-sdk/client-sso-oidc' + - aws-crt + + '@aws-sdk/credential-provider-sso@3.583.0(@aws-sdk/client-sso-oidc@3.583.0)': + dependencies: + '@aws-sdk/client-sso': 3.583.0 + '@aws-sdk/token-providers': 3.577.0(@aws-sdk/client-sso-oidc@3.583.0) + '@aws-sdk/types': 3.577.0 + '@smithy/property-provider': 3.0.0 + '@smithy/shared-ini-file-loader': 3.0.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' - aws-crt '@aws-sdk/credential-provider-web-identity@3.468.0': @@ -8645,36 +9987,50 @@ snapshots: '@smithy/types': 2.12.0 tslib: 2.6.2 - '@aws-sdk/credential-provider-web-identity@3.549.0(@aws-sdk/credential-provider-node@3.549.0)': + '@aws-sdk/credential-provider-web-identity@3.568.0(@aws-sdk/client-sts@3.569.0(@aws-sdk/client-sso-oidc@3.569.0))': dependencies: - '@aws-sdk/client-sts': 3.549.0(@aws-sdk/credential-provider-node@3.549.0) - '@aws-sdk/types': 3.535.0 + '@aws-sdk/client-sts': 3.569.0(@aws-sdk/client-sso-oidc@3.569.0) + '@aws-sdk/types': 3.567.0 + '@smithy/property-provider': 2.2.0 + '@smithy/types': 2.12.0 + tslib: 2.6.2 + + '@aws-sdk/credential-provider-web-identity@3.568.0(@aws-sdk/client-sts@3.569.0)': + dependencies: + '@aws-sdk/client-sts': 3.569.0 + '@aws-sdk/types': 3.567.0 '@smithy/property-provider': 2.2.0 '@smithy/types': 2.12.0 tslib: 2.6.2 - transitivePeerDependencies: - - '@aws-sdk/credential-provider-node' - - aws-crt - '@aws-sdk/credential-providers@3.549.0': - dependencies: - '@aws-sdk/client-cognito-identity': 3.549.0 - '@aws-sdk/client-sso': 3.549.0 - '@aws-sdk/client-sts': 3.549.0(@aws-sdk/credential-provider-node@3.549.0) - '@aws-sdk/credential-provider-cognito-identity': 3.549.0 - '@aws-sdk/credential-provider-env': 3.535.0 - '@aws-sdk/credential-provider-http': 3.535.0 - '@aws-sdk/credential-provider-ini': 3.549.0(@aws-sdk/credential-provider-node@3.549.0) - '@aws-sdk/credential-provider-node': 3.549.0 - '@aws-sdk/credential-provider-process': 3.535.0 - '@aws-sdk/credential-provider-sso': 3.549.0(@aws-sdk/credential-provider-node@3.549.0) - '@aws-sdk/credential-provider-web-identity': 3.549.0(@aws-sdk/credential-provider-node@3.549.0) - '@aws-sdk/types': 3.535.0 + '@aws-sdk/credential-provider-web-identity@3.577.0(@aws-sdk/client-sts@3.583.0)': + dependencies: + '@aws-sdk/client-sts': 3.583.0 + '@aws-sdk/types': 3.577.0 + '@smithy/property-provider': 3.0.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + + '@aws-sdk/credential-providers@3.569.0(@aws-sdk/client-sso-oidc@3.583.0)': + dependencies: + '@aws-sdk/client-cognito-identity': 3.569.0 + '@aws-sdk/client-sso': 3.568.0 + '@aws-sdk/client-sts': 3.569.0(@aws-sdk/client-sso-oidc@3.569.0) + '@aws-sdk/credential-provider-cognito-identity': 3.569.0 + '@aws-sdk/credential-provider-env': 3.568.0 + '@aws-sdk/credential-provider-http': 3.568.0 + '@aws-sdk/credential-provider-ini': 3.568.0(@aws-sdk/client-sso-oidc@3.583.0)(@aws-sdk/client-sts@3.569.0(@aws-sdk/client-sso-oidc@3.569.0)) + '@aws-sdk/credential-provider-node': 3.569.0(@aws-sdk/client-sso-oidc@3.583.0)(@aws-sdk/client-sts@3.569.0(@aws-sdk/client-sso-oidc@3.569.0)) + '@aws-sdk/credential-provider-process': 3.568.0 + '@aws-sdk/credential-provider-sso': 3.568.0(@aws-sdk/client-sso-oidc@3.583.0) + '@aws-sdk/credential-provider-web-identity': 3.568.0(@aws-sdk/client-sts@3.569.0(@aws-sdk/client-sso-oidc@3.569.0)) + '@aws-sdk/types': 3.567.0 '@smithy/credential-provider-imds': 2.3.0 '@smithy/property-provider': 2.2.0 '@smithy/types': 2.12.0 tslib: 2.6.2 transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' - aws-crt '@aws-sdk/middleware-host-header@3.468.0': @@ -8684,25 +10040,38 @@ snapshots: '@smithy/types': 2.12.0 tslib: 2.6.2 - '@aws-sdk/middleware-host-header@3.535.0': + '@aws-sdk/middleware-host-header@3.567.0': dependencies: - '@aws-sdk/types': 3.535.0 + '@aws-sdk/types': 3.567.0 '@smithy/protocol-http': 3.3.0 '@smithy/types': 2.12.0 tslib: 2.6.2 + '@aws-sdk/middleware-host-header@3.577.0': + dependencies: + '@aws-sdk/types': 3.577.0 + '@smithy/protocol-http': 4.0.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + '@aws-sdk/middleware-logger@3.468.0': dependencies: '@aws-sdk/types': 3.468.0 '@smithy/types': 2.12.0 tslib: 2.6.2 - '@aws-sdk/middleware-logger@3.535.0': + '@aws-sdk/middleware-logger@3.568.0': dependencies: - '@aws-sdk/types': 3.535.0 + '@aws-sdk/types': 3.567.0 '@smithy/types': 2.12.0 tslib: 2.6.2 + '@aws-sdk/middleware-logger@3.577.0': + dependencies: + '@aws-sdk/types': 3.577.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + '@aws-sdk/middleware-recursion-detection@3.468.0': dependencies: '@aws-sdk/types': 3.468.0 @@ -8710,19 +10079,26 @@ snapshots: '@smithy/types': 2.12.0 tslib: 2.6.2 - '@aws-sdk/middleware-recursion-detection@3.535.0': + '@aws-sdk/middleware-recursion-detection@3.567.0': dependencies: - '@aws-sdk/types': 3.535.0 + '@aws-sdk/types': 3.567.0 '@smithy/protocol-http': 3.3.0 '@smithy/types': 2.12.0 tslib: 2.6.2 + '@aws-sdk/middleware-recursion-detection@3.577.0': + dependencies: + '@aws-sdk/types': 3.577.0 + '@smithy/protocol-http': 4.0.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + '@aws-sdk/middleware-signing@3.468.0': dependencies: '@aws-sdk/types': 3.468.0 '@smithy/property-provider': 2.2.0 '@smithy/protocol-http': 3.3.0 - '@smithy/signature-v4': 2.2.1 + '@smithy/signature-v4': 2.3.0 '@smithy/types': 2.12.0 '@smithy/util-middleware': 2.2.0 tslib: 2.6.2 @@ -8735,14 +10111,22 @@ snapshots: '@smithy/types': 2.12.0 tslib: 2.6.2 - '@aws-sdk/middleware-user-agent@3.540.0': + '@aws-sdk/middleware-user-agent@3.567.0': dependencies: - '@aws-sdk/types': 3.535.0 - '@aws-sdk/util-endpoints': 3.540.0 + '@aws-sdk/types': 3.567.0 + '@aws-sdk/util-endpoints': 3.567.0 '@smithy/protocol-http': 3.3.0 '@smithy/types': 2.12.0 tslib: 2.6.2 + '@aws-sdk/middleware-user-agent@3.583.0': + dependencies: + '@aws-sdk/types': 3.577.0 + '@aws-sdk/util-endpoints': 3.583.0 + '@smithy/protocol-http': 4.0.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + '@aws-sdk/region-config-resolver@3.470.0': dependencies: '@smithy/node-config-provider': 2.3.0 @@ -8751,15 +10135,24 @@ snapshots: '@smithy/util-middleware': 2.2.0 tslib: 2.6.2 - '@aws-sdk/region-config-resolver@3.535.0': + '@aws-sdk/region-config-resolver@3.567.0': dependencies: - '@aws-sdk/types': 3.535.0 + '@aws-sdk/types': 3.567.0 '@smithy/node-config-provider': 2.3.0 '@smithy/types': 2.12.0 '@smithy/util-config-provider': 2.3.0 '@smithy/util-middleware': 2.2.0 tslib: 2.6.2 + '@aws-sdk/region-config-resolver@3.577.0': + dependencies: + '@aws-sdk/types': 3.577.0 + '@smithy/node-config-provider': 3.0.0 + '@smithy/types': 3.0.0 + '@smithy/util-config-provider': 3.0.0 + '@smithy/util-middleware': 3.0.0 + tslib: 2.6.2 + '@aws-sdk/token-providers@3.478.0': dependencies: '@aws-crypto/sha256-browser': 3.0.0 @@ -8802,46 +10195,69 @@ snapshots: transitivePeerDependencies: - aws-crt - '@aws-sdk/token-providers@3.549.0(@aws-sdk/credential-provider-node@3.549.0)': + '@aws-sdk/token-providers@3.568.0(@aws-sdk/client-sso-oidc@3.569.0)': dependencies: - '@aws-sdk/client-sso-oidc': 3.549.0(@aws-sdk/credential-provider-node@3.549.0) - '@aws-sdk/types': 3.535.0 + '@aws-sdk/client-sso-oidc': 3.569.0 + '@aws-sdk/types': 3.567.0 + '@smithy/property-provider': 2.2.0 + '@smithy/shared-ini-file-loader': 2.4.0 + '@smithy/types': 2.12.0 + tslib: 2.6.2 + + '@aws-sdk/token-providers@3.568.0(@aws-sdk/client-sso-oidc@3.583.0)': + dependencies: + '@aws-sdk/client-sso-oidc': 3.583.0(@aws-sdk/client-sts@3.583.0) + '@aws-sdk/types': 3.567.0 '@smithy/property-provider': 2.2.0 '@smithy/shared-ini-file-loader': 2.4.0 '@smithy/types': 2.12.0 tslib: 2.6.2 - transitivePeerDependencies: - - '@aws-sdk/credential-provider-node' - - aws-crt - '@aws-sdk/types@3.342.0': + '@aws-sdk/token-providers@3.577.0(@aws-sdk/client-sso-oidc@3.583.0)': dependencies: - tslib: 2.5.3 + '@aws-sdk/client-sso-oidc': 3.583.0(@aws-sdk/client-sts@3.583.0) + '@aws-sdk/types': 3.577.0 + '@smithy/property-provider': 3.0.0 + '@smithy/shared-ini-file-loader': 3.0.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 '@aws-sdk/types@3.468.0': dependencies: '@smithy/types': 2.12.0 tslib: 2.6.2 - '@aws-sdk/types@3.535.0': + '@aws-sdk/types@3.567.0': dependencies: '@smithy/types': 2.12.0 tslib: 2.6.2 + '@aws-sdk/types@3.577.0': + dependencies: + '@smithy/types': 3.0.0 + tslib: 2.6.2 + '@aws-sdk/util-endpoints@3.478.0': dependencies: '@aws-sdk/types': 3.468.0 '@smithy/util-endpoints': 1.2.0 tslib: 2.6.2 - '@aws-sdk/util-endpoints@3.540.0': + '@aws-sdk/util-endpoints@3.567.0': dependencies: - '@aws-sdk/types': 3.535.0 + '@aws-sdk/types': 3.567.0 '@smithy/types': 2.12.0 '@smithy/util-endpoints': 1.2.0 tslib: 2.6.2 - '@aws-sdk/util-locate-window@3.535.0': + '@aws-sdk/util-endpoints@3.583.0': + dependencies: + '@aws-sdk/types': 3.577.0 + '@smithy/types': 3.0.0 + '@smithy/util-endpoints': 2.0.0 + tslib: 2.6.2 + + '@aws-sdk/util-locate-window@3.568.0': dependencies: tslib: 2.6.2 @@ -8852,13 +10268,20 @@ snapshots: bowser: 2.11.0 tslib: 2.6.2 - '@aws-sdk/util-user-agent-browser@3.535.0': + '@aws-sdk/util-user-agent-browser@3.567.0': dependencies: - '@aws-sdk/types': 3.535.0 + '@aws-sdk/types': 3.567.0 '@smithy/types': 2.12.0 bowser: 2.11.0 tslib: 2.6.2 + '@aws-sdk/util-user-agent-browser@3.577.0': + dependencies: + '@aws-sdk/types': 3.577.0 + '@smithy/types': 3.0.0 + bowser: 2.11.0 + tslib: 2.6.2 + '@aws-sdk/util-user-agent-node@3.470.0': dependencies: '@aws-sdk/types': 3.468.0 @@ -8866,20 +10289,27 @@ snapshots: '@smithy/types': 2.12.0 tslib: 2.6.2 - '@aws-sdk/util-user-agent-node@3.535.0': + '@aws-sdk/util-user-agent-node@3.568.0': dependencies: - '@aws-sdk/types': 3.535.0 + '@aws-sdk/types': 3.567.0 '@smithy/node-config-provider': 2.3.0 '@smithy/types': 2.12.0 tslib: 2.6.2 + '@aws-sdk/util-user-agent-node@3.577.0': + dependencies: + '@aws-sdk/types': 3.577.0 + '@smithy/node-config-provider': 3.0.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + '@aws-sdk/util-utf8-browser@3.259.0': dependencies: tslib: 2.6.2 '@babel/code-frame@7.10.4': dependencies: - '@babel/highlight': 7.24.2 + '@babel/highlight': 7.24.6 '@babel/code-frame@7.22.10': dependencies: @@ -8891,25 +10321,25 @@ snapshots: '@babel/highlight': 7.22.20 chalk: 2.4.2 - '@babel/code-frame@7.24.2': + '@babel/code-frame@7.24.6': dependencies: - '@babel/highlight': 7.24.2 - picocolors: 1.0.0 + '@babel/highlight': 7.24.6 + picocolors: 1.0.1 - '@babel/compat-data@7.24.4': {} + '@babel/compat-data@7.24.6': {} - '@babel/core@7.24.4': + '@babel/core@7.24.6': dependencies: '@ampproject/remapping': 2.3.0 - '@babel/code-frame': 7.24.2 - '@babel/generator': 7.24.4 - '@babel/helper-compilation-targets': 7.23.6 - '@babel/helper-module-transforms': 7.23.3(@babel/core@7.24.4) - '@babel/helpers': 7.24.4 - '@babel/parser': 7.24.4 - '@babel/template': 7.24.0 - '@babel/traverse': 7.24.1 - '@babel/types': 7.24.0 + '@babel/code-frame': 7.24.6 + '@babel/generator': 7.24.6 + '@babel/helper-compilation-targets': 7.24.6 + '@babel/helper-module-transforms': 7.24.6(@babel/core@7.24.6) + '@babel/helpers': 7.24.6 + '@babel/parser': 7.24.6 + '@babel/template': 7.24.6 + '@babel/traverse': 7.24.6 + '@babel/types': 7.24.6 convert-source-map: 2.0.0 debug: 4.3.4 gensync: 1.0.0-beta.2 @@ -8924,152 +10354,159 @@ snapshots: jsesc: 2.5.2 source-map: 0.5.7 - '@babel/generator@7.24.4': + '@babel/generator@7.24.6': dependencies: - '@babel/types': 7.24.0 + '@babel/types': 7.24.6 '@jridgewell/gen-mapping': 0.3.5 '@jridgewell/trace-mapping': 0.3.25 jsesc: 2.5.2 - '@babel/helper-annotate-as-pure@7.22.5': + '@babel/helper-annotate-as-pure@7.24.6': dependencies: - '@babel/types': 7.24.0 + '@babel/types': 7.24.6 - '@babel/helper-builder-binary-assignment-operator-visitor@7.22.15': + '@babel/helper-builder-binary-assignment-operator-visitor@7.24.6': dependencies: - '@babel/types': 7.24.0 + '@babel/types': 7.24.6 - '@babel/helper-compilation-targets@7.23.6': + '@babel/helper-compilation-targets@7.24.6': dependencies: - '@babel/compat-data': 7.24.4 - '@babel/helper-validator-option': 7.23.5 + '@babel/compat-data': 7.24.6 + '@babel/helper-validator-option': 7.24.6 browserslist: 4.23.0 lru-cache: 5.1.1 semver: 6.3.1 - '@babel/helper-create-class-features-plugin@7.24.4(@babel/core@7.24.4)': - dependencies: - '@babel/core': 7.24.4 - '@babel/helper-annotate-as-pure': 7.22.5 - '@babel/helper-environment-visitor': 7.22.20 - '@babel/helper-function-name': 7.23.0 - '@babel/helper-member-expression-to-functions': 7.23.0 - '@babel/helper-optimise-call-expression': 7.22.5 - '@babel/helper-replace-supers': 7.24.1(@babel/core@7.24.4) - '@babel/helper-skip-transparent-expression-wrappers': 7.22.5 - '@babel/helper-split-export-declaration': 7.22.6 + '@babel/helper-create-class-features-plugin@7.24.6(@babel/core@7.24.6)': + dependencies: + '@babel/core': 7.24.6 + '@babel/helper-annotate-as-pure': 7.24.6 + '@babel/helper-environment-visitor': 7.24.6 + '@babel/helper-function-name': 7.24.6 + '@babel/helper-member-expression-to-functions': 7.24.6 + '@babel/helper-optimise-call-expression': 7.24.6 + '@babel/helper-replace-supers': 7.24.6(@babel/core@7.24.6) + '@babel/helper-skip-transparent-expression-wrappers': 7.24.6 + '@babel/helper-split-export-declaration': 7.24.6 semver: 6.3.1 - '@babel/helper-create-regexp-features-plugin@7.22.15(@babel/core@7.24.4)': + '@babel/helper-create-regexp-features-plugin@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-annotate-as-pure': 7.22.5 + '@babel/core': 7.24.6 + '@babel/helper-annotate-as-pure': 7.24.6 regexpu-core: 5.3.2 semver: 6.3.1 - '@babel/helper-define-polyfill-provider@0.6.1(@babel/core@7.24.4)': + '@babel/helper-define-polyfill-provider@0.6.2(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-compilation-targets': 7.23.6 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-compilation-targets': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 debug: 4.3.4 lodash.debounce: 4.0.8 resolve: 1.22.8 transitivePeerDependencies: - supports-color - '@babel/helper-environment-visitor@7.22.20': {} - '@babel/helper-environment-visitor@7.22.5': {} + '@babel/helper-environment-visitor@7.24.6': {} + '@babel/helper-function-name@7.22.5': dependencies: '@babel/template': 7.22.5 '@babel/types': 7.22.10 - '@babel/helper-function-name@7.23.0': + '@babel/helper-function-name@7.24.6': dependencies: - '@babel/template': 7.24.0 - '@babel/types': 7.24.0 + '@babel/template': 7.24.6 + '@babel/types': 7.24.6 '@babel/helper-hoist-variables@7.22.5': dependencies: '@babel/types': 7.23.6 - '@babel/helper-member-expression-to-functions@7.23.0': + '@babel/helper-hoist-variables@7.24.6': dependencies: - '@babel/types': 7.24.0 + '@babel/types': 7.24.6 - '@babel/helper-module-imports@7.24.3': + '@babel/helper-member-expression-to-functions@7.24.6': dependencies: - '@babel/types': 7.24.0 + '@babel/types': 7.24.6 - '@babel/helper-module-transforms@7.23.3(@babel/core@7.24.4)': + '@babel/helper-module-imports@7.24.6': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-environment-visitor': 7.22.20 - '@babel/helper-module-imports': 7.24.3 - '@babel/helper-simple-access': 7.22.5 - '@babel/helper-split-export-declaration': 7.22.6 - '@babel/helper-validator-identifier': 7.22.20 + '@babel/types': 7.24.6 + + '@babel/helper-module-transforms@7.24.6(@babel/core@7.24.6)': + dependencies: + '@babel/core': 7.24.6 + '@babel/helper-environment-visitor': 7.24.6 + '@babel/helper-module-imports': 7.24.6 + '@babel/helper-simple-access': 7.24.6 + '@babel/helper-split-export-declaration': 7.24.6 + '@babel/helper-validator-identifier': 7.24.6 - '@babel/helper-optimise-call-expression@7.22.5': + '@babel/helper-optimise-call-expression@7.24.6': dependencies: - '@babel/types': 7.24.0 + '@babel/types': 7.24.6 - '@babel/helper-plugin-utils@7.24.0': {} + '@babel/helper-plugin-utils@7.24.6': {} - '@babel/helper-remap-async-to-generator@7.22.20(@babel/core@7.24.4)': + '@babel/helper-remap-async-to-generator@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-annotate-as-pure': 7.22.5 - '@babel/helper-environment-visitor': 7.22.20 - '@babel/helper-wrap-function': 7.22.20 + '@babel/core': 7.24.6 + '@babel/helper-annotate-as-pure': 7.24.6 + '@babel/helper-environment-visitor': 7.24.6 + '@babel/helper-wrap-function': 7.24.6 - '@babel/helper-replace-supers@7.24.1(@babel/core@7.24.4)': + '@babel/helper-replace-supers@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-environment-visitor': 7.22.20 - '@babel/helper-member-expression-to-functions': 7.23.0 - '@babel/helper-optimise-call-expression': 7.22.5 + '@babel/core': 7.24.6 + '@babel/helper-environment-visitor': 7.24.6 + '@babel/helper-member-expression-to-functions': 7.24.6 + '@babel/helper-optimise-call-expression': 7.24.6 - '@babel/helper-simple-access@7.22.5': + '@babel/helper-simple-access@7.24.6': dependencies: - '@babel/types': 7.24.0 + '@babel/types': 7.24.6 - '@babel/helper-skip-transparent-expression-wrappers@7.22.5': + '@babel/helper-skip-transparent-expression-wrappers@7.24.6': dependencies: - '@babel/types': 7.24.0 + '@babel/types': 7.24.6 '@babel/helper-split-export-declaration@7.22.6': dependencies: '@babel/types': 7.23.6 + '@babel/helper-split-export-declaration@7.24.6': + dependencies: + '@babel/types': 7.24.6 + '@babel/helper-string-parser@7.22.5': {} '@babel/helper-string-parser@7.23.4': {} - '@babel/helper-string-parser@7.24.1': {} + '@babel/helper-string-parser@7.24.6': {} '@babel/helper-validator-identifier@7.22.20': {} '@babel/helper-validator-identifier@7.22.5': {} - '@babel/helper-validator-option@7.23.5': {} + '@babel/helper-validator-identifier@7.24.6': {} + + '@babel/helper-validator-option@7.24.6': {} - '@babel/helper-wrap-function@7.22.20': + '@babel/helper-wrap-function@7.24.6': dependencies: - '@babel/helper-function-name': 7.23.0 - '@babel/template': 7.24.0 - '@babel/types': 7.24.0 + '@babel/helper-function-name': 7.24.6 + '@babel/template': 7.24.6 + '@babel/types': 7.24.6 - '@babel/helpers@7.24.4': + '@babel/helpers@7.24.6': dependencies: - '@babel/template': 7.24.0 - '@babel/traverse': 7.24.1 - '@babel/types': 7.24.0 - transitivePeerDependencies: - - supports-color + '@babel/template': 7.24.6 + '@babel/types': 7.24.6 '@babel/highlight@7.22.10': dependencies: @@ -9083,701 +10520,707 @@ snapshots: chalk: 2.4.2 js-tokens: 4.0.0 - '@babel/highlight@7.24.2': + '@babel/highlight@7.24.6': dependencies: - '@babel/helper-validator-identifier': 7.22.20 + '@babel/helper-validator-identifier': 7.24.6 chalk: 2.4.2 js-tokens: 4.0.0 - picocolors: 1.0.0 + picocolors: 1.0.1 '@babel/parser@7.22.10': dependencies: '@babel/types': 7.17.0 - '@babel/parser@7.24.4': + '@babel/parser@7.24.6': dependencies: - '@babel/types': 7.24.0 + '@babel/types': 7.24.6 - '@babel/plugin-bugfix-firefox-class-in-computed-class-key@7.24.4(@babel/core@7.24.4)': + '@babel/plugin-bugfix-firefox-class-in-computed-class-key@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-environment-visitor': 7.22.20 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-environment-visitor': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 - '@babel/helper-skip-transparent-expression-wrappers': 7.22.5 - '@babel/plugin-transform-optional-chaining': 7.24.1(@babel/core@7.24.4) + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + '@babel/helper-skip-transparent-expression-wrappers': 7.24.6 + '@babel/plugin-transform-optional-chaining': 7.24.6(@babel/core@7.24.6) - '@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-environment-visitor': 7.22.20 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-environment-visitor': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-proposal-async-generator-functions@7.20.7(@babel/core@7.24.4)': + '@babel/plugin-proposal-async-generator-functions@7.20.7(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-environment-visitor': 7.22.20 - '@babel/helper-plugin-utils': 7.24.0 - '@babel/helper-remap-async-to-generator': 7.22.20(@babel/core@7.24.4) - '@babel/plugin-syntax-async-generators': 7.8.4(@babel/core@7.24.4) + '@babel/core': 7.24.6 + '@babel/helper-environment-visitor': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + '@babel/helper-remap-async-to-generator': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-syntax-async-generators': 7.8.4(@babel/core@7.24.6) - '@babel/plugin-proposal-class-properties@7.18.6(@babel/core@7.24.4)': + '@babel/plugin-proposal-class-properties@7.18.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-create-class-features-plugin': 7.24.4(@babel/core@7.24.4) - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-create-class-features-plugin': 7.24.6(@babel/core@7.24.6) + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-proposal-decorators@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-proposal-decorators@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-create-class-features-plugin': 7.24.4(@babel/core@7.24.4) - '@babel/helper-plugin-utils': 7.24.0 - '@babel/plugin-syntax-decorators': 7.24.1(@babel/core@7.24.4) + '@babel/core': 7.24.6 + '@babel/helper-create-class-features-plugin': 7.24.6(@babel/core@7.24.6) + '@babel/helper-plugin-utils': 7.24.6 + '@babel/plugin-syntax-decorators': 7.24.6(@babel/core@7.24.6) - '@babel/plugin-proposal-export-default-from@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-proposal-export-default-from@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 - '@babel/plugin-syntax-export-default-from': 7.24.1(@babel/core@7.24.4) + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + '@babel/plugin-syntax-export-default-from': 7.24.6(@babel/core@7.24.6) - '@babel/plugin-proposal-nullish-coalescing-operator@7.18.6(@babel/core@7.24.4)': + '@babel/plugin-proposal-logical-assignment-operators@7.20.7(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 - '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3(@babel/core@7.24.4) + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + '@babel/plugin-syntax-logical-assignment-operators': 7.10.4(@babel/core@7.24.6) - '@babel/plugin-proposal-numeric-separator@7.18.6(@babel/core@7.24.4)': + '@babel/plugin-proposal-nullish-coalescing-operator@7.18.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 - '@babel/plugin-syntax-numeric-separator': 7.10.4(@babel/core@7.24.4) + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3(@babel/core@7.24.6) - '@babel/plugin-proposal-object-rest-spread@7.20.7(@babel/core@7.24.4)': + '@babel/plugin-proposal-numeric-separator@7.18.6(@babel/core@7.24.6)': dependencies: - '@babel/compat-data': 7.24.4 - '@babel/core': 7.24.4 - '@babel/helper-compilation-targets': 7.23.6 - '@babel/helper-plugin-utils': 7.24.0 - '@babel/plugin-syntax-object-rest-spread': 7.8.3(@babel/core@7.24.4) - '@babel/plugin-transform-parameters': 7.24.1(@babel/core@7.24.4) + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + '@babel/plugin-syntax-numeric-separator': 7.10.4(@babel/core@7.24.6) - '@babel/plugin-proposal-optional-catch-binding@7.18.6(@babel/core@7.24.4)': + '@babel/plugin-proposal-object-rest-spread@7.20.7(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 - '@babel/plugin-syntax-optional-catch-binding': 7.8.3(@babel/core@7.24.4) + '@babel/compat-data': 7.24.6 + '@babel/core': 7.24.6 + '@babel/helper-compilation-targets': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + '@babel/plugin-syntax-object-rest-spread': 7.8.3(@babel/core@7.24.6) + '@babel/plugin-transform-parameters': 7.24.6(@babel/core@7.24.6) - '@babel/plugin-proposal-optional-chaining@7.21.0(@babel/core@7.24.4)': + '@babel/plugin-proposal-optional-catch-binding@7.18.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 - '@babel/helper-skip-transparent-expression-wrappers': 7.22.5 - '@babel/plugin-syntax-optional-chaining': 7.8.3(@babel/core@7.24.4) + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + '@babel/plugin-syntax-optional-catch-binding': 7.8.3(@babel/core@7.24.6) - '@babel/plugin-proposal-private-property-in-object@7.21.0-placeholder-for-preset-env.2(@babel/core@7.24.4)': + '@babel/plugin-proposal-optional-chaining@7.21.0(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + '@babel/helper-skip-transparent-expression-wrappers': 7.24.6 + '@babel/plugin-syntax-optional-chaining': 7.8.3(@babel/core@7.24.6) - '@babel/plugin-syntax-async-generators@7.8.4(@babel/core@7.24.4)': + '@babel/plugin-proposal-private-property-in-object@7.21.0-placeholder-for-preset-env.2(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 - '@babel/plugin-syntax-class-properties@7.12.13(@babel/core@7.24.4)': + '@babel/plugin-syntax-async-generators@7.8.4(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-syntax-class-static-block@7.14.5(@babel/core@7.24.4)': + '@babel/plugin-syntax-class-properties@7.12.13(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-syntax-decorators@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-syntax-class-static-block@7.14.5(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-syntax-dynamic-import@7.8.3(@babel/core@7.24.4)': + '@babel/plugin-syntax-decorators@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-syntax-export-default-from@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-syntax-dynamic-import@7.8.3(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-syntax-export-namespace-from@7.8.3(@babel/core@7.24.4)': + '@babel/plugin-syntax-export-default-from@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-syntax-flow@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-syntax-export-namespace-from@7.8.3(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-syntax-import-assertions@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-syntax-flow@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-syntax-import-attributes@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-syntax-import-assertions@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-syntax-import-meta@7.10.4(@babel/core@7.24.4)': + '@babel/plugin-syntax-import-attributes@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-syntax-json-strings@7.8.3(@babel/core@7.24.4)': + '@babel/plugin-syntax-import-meta@7.10.4(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-syntax-jsx@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-syntax-json-strings@7.8.3(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-syntax-logical-assignment-operators@7.10.4(@babel/core@7.24.4)': + '@babel/plugin-syntax-jsx@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-syntax-nullish-coalescing-operator@7.8.3(@babel/core@7.24.4)': + '@babel/plugin-syntax-logical-assignment-operators@7.10.4(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-syntax-numeric-separator@7.10.4(@babel/core@7.24.4)': + '@babel/plugin-syntax-nullish-coalescing-operator@7.8.3(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-syntax-object-rest-spread@7.8.3(@babel/core@7.24.4)': + '@babel/plugin-syntax-numeric-separator@7.10.4(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-syntax-optional-catch-binding@7.8.3(@babel/core@7.24.4)': + '@babel/plugin-syntax-object-rest-spread@7.8.3(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-syntax-optional-chaining@7.8.3(@babel/core@7.24.4)': + '@babel/plugin-syntax-optional-catch-binding@7.8.3(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-syntax-private-property-in-object@7.14.5(@babel/core@7.24.4)': + '@babel/plugin-syntax-optional-chaining@7.8.3(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-syntax-top-level-await@7.14.5(@babel/core@7.24.4)': + '@babel/plugin-syntax-private-property-in-object@7.14.5(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-syntax-typescript@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-syntax-top-level-await@7.14.5(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-syntax-unicode-sets-regex@7.18.6(@babel/core@7.24.4)': + '@babel/plugin-syntax-typescript@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-create-regexp-features-plugin': 7.22.15(@babel/core@7.24.4) - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-transform-arrow-functions@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-syntax-unicode-sets-regex@7.18.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-create-regexp-features-plugin': 7.24.6(@babel/core@7.24.6) + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-transform-async-generator-functions@7.24.3(@babel/core@7.24.4)': + '@babel/plugin-transform-arrow-functions@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-environment-visitor': 7.22.20 - '@babel/helper-plugin-utils': 7.24.0 - '@babel/helper-remap-async-to-generator': 7.22.20(@babel/core@7.24.4) - '@babel/plugin-syntax-async-generators': 7.8.4(@babel/core@7.24.4) + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-transform-async-to-generator@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-async-generator-functions@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-module-imports': 7.24.3 - '@babel/helper-plugin-utils': 7.24.0 - '@babel/helper-remap-async-to-generator': 7.22.20(@babel/core@7.24.4) + '@babel/core': 7.24.6 + '@babel/helper-environment-visitor': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + '@babel/helper-remap-async-to-generator': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-syntax-async-generators': 7.8.4(@babel/core@7.24.6) - '@babel/plugin-transform-block-scoped-functions@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-async-to-generator@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-module-imports': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + '@babel/helper-remap-async-to-generator': 7.24.6(@babel/core@7.24.6) - '@babel/plugin-transform-block-scoping@7.24.4(@babel/core@7.24.4)': + '@babel/plugin-transform-block-scoped-functions@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-transform-class-properties@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-block-scoping@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-create-class-features-plugin': 7.24.4(@babel/core@7.24.4) - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-transform-class-static-block@7.24.4(@babel/core@7.24.4)': + '@babel/plugin-transform-class-properties@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-create-class-features-plugin': 7.24.4(@babel/core@7.24.4) - '@babel/helper-plugin-utils': 7.24.0 - '@babel/plugin-syntax-class-static-block': 7.14.5(@babel/core@7.24.4) + '@babel/core': 7.24.6 + '@babel/helper-create-class-features-plugin': 7.24.6(@babel/core@7.24.6) + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-transform-classes@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-class-static-block@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-annotate-as-pure': 7.22.5 - '@babel/helper-compilation-targets': 7.23.6 - '@babel/helper-environment-visitor': 7.22.20 - '@babel/helper-function-name': 7.23.0 - '@babel/helper-plugin-utils': 7.24.0 - '@babel/helper-replace-supers': 7.24.1(@babel/core@7.24.4) - '@babel/helper-split-export-declaration': 7.22.6 + '@babel/core': 7.24.6 + '@babel/helper-create-class-features-plugin': 7.24.6(@babel/core@7.24.6) + '@babel/helper-plugin-utils': 7.24.6 + '@babel/plugin-syntax-class-static-block': 7.14.5(@babel/core@7.24.6) + + '@babel/plugin-transform-classes@7.24.6(@babel/core@7.24.6)': + dependencies: + '@babel/core': 7.24.6 + '@babel/helper-annotate-as-pure': 7.24.6 + '@babel/helper-compilation-targets': 7.24.6 + '@babel/helper-environment-visitor': 7.24.6 + '@babel/helper-function-name': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + '@babel/helper-replace-supers': 7.24.6(@babel/core@7.24.6) + '@babel/helper-split-export-declaration': 7.24.6 globals: 11.12.0 - '@babel/plugin-transform-computed-properties@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-computed-properties@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 - '@babel/template': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + '@babel/template': 7.24.6 - '@babel/plugin-transform-destructuring@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-destructuring@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-transform-dotall-regex@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-dotall-regex@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-create-regexp-features-plugin': 7.22.15(@babel/core@7.24.4) - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-create-regexp-features-plugin': 7.24.6(@babel/core@7.24.6) + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-transform-duplicate-keys@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-duplicate-keys@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-transform-dynamic-import@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-dynamic-import@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 - '@babel/plugin-syntax-dynamic-import': 7.8.3(@babel/core@7.24.4) + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + '@babel/plugin-syntax-dynamic-import': 7.8.3(@babel/core@7.24.6) - '@babel/plugin-transform-exponentiation-operator@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-exponentiation-operator@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-builder-binary-assignment-operator-visitor': 7.22.15 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-builder-binary-assignment-operator-visitor': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-transform-export-namespace-from@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-export-namespace-from@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 - '@babel/plugin-syntax-export-namespace-from': 7.8.3(@babel/core@7.24.4) + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + '@babel/plugin-syntax-export-namespace-from': 7.8.3(@babel/core@7.24.6) - '@babel/plugin-transform-flow-strip-types@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-flow-strip-types@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 - '@babel/plugin-syntax-flow': 7.24.1(@babel/core@7.24.4) + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + '@babel/plugin-syntax-flow': 7.24.6(@babel/core@7.24.6) - '@babel/plugin-transform-for-of@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-for-of@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 - '@babel/helper-skip-transparent-expression-wrappers': 7.22.5 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + '@babel/helper-skip-transparent-expression-wrappers': 7.24.6 - '@babel/plugin-transform-function-name@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-function-name@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-compilation-targets': 7.23.6 - '@babel/helper-function-name': 7.23.0 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-compilation-targets': 7.24.6 + '@babel/helper-function-name': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-transform-json-strings@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-json-strings@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 - '@babel/plugin-syntax-json-strings': 7.8.3(@babel/core@7.24.4) + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + '@babel/plugin-syntax-json-strings': 7.8.3(@babel/core@7.24.6) - '@babel/plugin-transform-literals@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-literals@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-transform-logical-assignment-operators@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-logical-assignment-operators@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 - '@babel/plugin-syntax-logical-assignment-operators': 7.10.4(@babel/core@7.24.4) + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + '@babel/plugin-syntax-logical-assignment-operators': 7.10.4(@babel/core@7.24.6) - '@babel/plugin-transform-member-expression-literals@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-member-expression-literals@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-transform-modules-amd@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-modules-amd@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-module-transforms': 7.23.3(@babel/core@7.24.4) - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-module-transforms': 7.24.6(@babel/core@7.24.6) + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-transform-modules-commonjs@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-modules-commonjs@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-module-transforms': 7.23.3(@babel/core@7.24.4) - '@babel/helper-plugin-utils': 7.24.0 - '@babel/helper-simple-access': 7.22.5 + '@babel/core': 7.24.6 + '@babel/helper-module-transforms': 7.24.6(@babel/core@7.24.6) + '@babel/helper-plugin-utils': 7.24.6 + '@babel/helper-simple-access': 7.24.6 - '@babel/plugin-transform-modules-systemjs@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-modules-systemjs@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-hoist-variables': 7.22.5 - '@babel/helper-module-transforms': 7.23.3(@babel/core@7.24.4) - '@babel/helper-plugin-utils': 7.24.0 - '@babel/helper-validator-identifier': 7.22.20 + '@babel/core': 7.24.6 + '@babel/helper-hoist-variables': 7.24.6 + '@babel/helper-module-transforms': 7.24.6(@babel/core@7.24.6) + '@babel/helper-plugin-utils': 7.24.6 + '@babel/helper-validator-identifier': 7.24.6 - '@babel/plugin-transform-modules-umd@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-modules-umd@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-module-transforms': 7.23.3(@babel/core@7.24.4) - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-module-transforms': 7.24.6(@babel/core@7.24.6) + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-transform-named-capturing-groups-regex@7.22.5(@babel/core@7.24.4)': + '@babel/plugin-transform-named-capturing-groups-regex@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-create-regexp-features-plugin': 7.22.15(@babel/core@7.24.4) - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-create-regexp-features-plugin': 7.24.6(@babel/core@7.24.6) + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-transform-new-target@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-new-target@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-transform-nullish-coalescing-operator@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-nullish-coalescing-operator@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 - '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3(@babel/core@7.24.4) + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3(@babel/core@7.24.6) - '@babel/plugin-transform-numeric-separator@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-numeric-separator@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 - '@babel/plugin-syntax-numeric-separator': 7.10.4(@babel/core@7.24.4) + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + '@babel/plugin-syntax-numeric-separator': 7.10.4(@babel/core@7.24.6) - '@babel/plugin-transform-object-rest-spread@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-object-rest-spread@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-compilation-targets': 7.23.6 - '@babel/helper-plugin-utils': 7.24.0 - '@babel/plugin-syntax-object-rest-spread': 7.8.3(@babel/core@7.24.4) - '@babel/plugin-transform-parameters': 7.24.1(@babel/core@7.24.4) + '@babel/core': 7.24.6 + '@babel/helper-compilation-targets': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + '@babel/plugin-syntax-object-rest-spread': 7.8.3(@babel/core@7.24.6) + '@babel/plugin-transform-parameters': 7.24.6(@babel/core@7.24.6) - '@babel/plugin-transform-object-super@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-object-super@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 - '@babel/helper-replace-supers': 7.24.1(@babel/core@7.24.4) + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + '@babel/helper-replace-supers': 7.24.6(@babel/core@7.24.6) - '@babel/plugin-transform-optional-catch-binding@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-optional-catch-binding@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 - '@babel/plugin-syntax-optional-catch-binding': 7.8.3(@babel/core@7.24.4) + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + '@babel/plugin-syntax-optional-catch-binding': 7.8.3(@babel/core@7.24.6) - '@babel/plugin-transform-optional-chaining@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-optional-chaining@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 - '@babel/helper-skip-transparent-expression-wrappers': 7.22.5 - '@babel/plugin-syntax-optional-chaining': 7.8.3(@babel/core@7.24.4) + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + '@babel/helper-skip-transparent-expression-wrappers': 7.24.6 + '@babel/plugin-syntax-optional-chaining': 7.8.3(@babel/core@7.24.6) - '@babel/plugin-transform-parameters@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-parameters@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-transform-private-methods@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-private-methods@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-create-class-features-plugin': 7.24.4(@babel/core@7.24.4) - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-create-class-features-plugin': 7.24.6(@babel/core@7.24.6) + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-transform-private-property-in-object@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-private-property-in-object@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-annotate-as-pure': 7.22.5 - '@babel/helper-create-class-features-plugin': 7.24.4(@babel/core@7.24.4) - '@babel/helper-plugin-utils': 7.24.0 - '@babel/plugin-syntax-private-property-in-object': 7.14.5(@babel/core@7.24.4) + '@babel/core': 7.24.6 + '@babel/helper-annotate-as-pure': 7.24.6 + '@babel/helper-create-class-features-plugin': 7.24.6(@babel/core@7.24.6) + '@babel/helper-plugin-utils': 7.24.6 + '@babel/plugin-syntax-private-property-in-object': 7.14.5(@babel/core@7.24.6) - '@babel/plugin-transform-property-literals@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-property-literals@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-transform-react-display-name@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-react-display-name@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-transform-react-jsx-development@7.22.5(@babel/core@7.24.4)': + '@babel/plugin-transform-react-jsx-development@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/plugin-transform-react-jsx': 7.23.4(@babel/core@7.24.4) + '@babel/core': 7.24.6 + '@babel/plugin-transform-react-jsx': 7.24.6(@babel/core@7.24.6) - '@babel/plugin-transform-react-jsx-self@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-react-jsx-self@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-transform-react-jsx-source@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-react-jsx-source@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-transform-react-jsx@7.23.4(@babel/core@7.24.4)': + '@babel/plugin-transform-react-jsx@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-annotate-as-pure': 7.22.5 - '@babel/helper-module-imports': 7.24.3 - '@babel/helper-plugin-utils': 7.24.0 - '@babel/plugin-syntax-jsx': 7.24.1(@babel/core@7.24.4) - '@babel/types': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-annotate-as-pure': 7.24.6 + '@babel/helper-module-imports': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + '@babel/plugin-syntax-jsx': 7.24.6(@babel/core@7.24.6) + '@babel/types': 7.24.6 - '@babel/plugin-transform-react-pure-annotations@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-react-pure-annotations@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-annotate-as-pure': 7.22.5 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-annotate-as-pure': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-transform-regenerator@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-regenerator@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 regenerator-transform: 0.15.2 - '@babel/plugin-transform-reserved-words@7.24.1(@babel/core@7.24.4)': + '@babel/plugin-transform-reserved-words@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 - '@babel/plugin-transform-runtime@7.24.3(@babel/core@7.24.4)': + '@babel/plugin-transform-runtime@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-module-imports': 7.24.3 - '@babel/helper-plugin-utils': 7.24.0 - babel-plugin-polyfill-corejs2: 0.4.10(@babel/core@7.24.4) - babel-plugin-polyfill-corejs3: 0.10.4(@babel/core@7.24.4) - babel-plugin-polyfill-regenerator: 0.6.1(@babel/core@7.24.4) + '@babel/core': 7.24.6 + '@babel/helper-module-imports': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + babel-plugin-polyfill-corejs2: 0.4.11(@babel/core@7.24.6) + babel-plugin-polyfill-corejs3: 0.10.4(@babel/core@7.24.6) + babel-plugin-polyfill-regenerator: 0.6.2(@babel/core@7.24.6) semver: 6.3.1 transitivePeerDependencies: - supports-color - '@babel/plugin-transform-shorthand-properties@7.24.1(@babel/core@7.24.4)': - dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 - - '@babel/plugin-transform-spread@7.24.1(@babel/core@7.24.4)': - dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 - '@babel/helper-skip-transparent-expression-wrappers': 7.22.5 - - '@babel/plugin-transform-sticky-regex@7.24.1(@babel/core@7.24.4)': - dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 - - '@babel/plugin-transform-template-literals@7.24.1(@babel/core@7.24.4)': - dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 - - '@babel/plugin-transform-typeof-symbol@7.24.1(@babel/core@7.24.4)': - dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 - - '@babel/plugin-transform-typescript@7.24.4(@babel/core@7.24.4)': - dependencies: - '@babel/core': 7.24.4 - '@babel/helper-annotate-as-pure': 7.22.5 - '@babel/helper-create-class-features-plugin': 7.24.4(@babel/core@7.24.4) - '@babel/helper-plugin-utils': 7.24.0 - '@babel/plugin-syntax-typescript': 7.24.1(@babel/core@7.24.4) - - '@babel/plugin-transform-unicode-escapes@7.24.1(@babel/core@7.24.4)': - dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 - - '@babel/plugin-transform-unicode-property-regex@7.24.1(@babel/core@7.24.4)': - dependencies: - '@babel/core': 7.24.4 - '@babel/helper-create-regexp-features-plugin': 7.22.15(@babel/core@7.24.4) - '@babel/helper-plugin-utils': 7.24.0 - - '@babel/plugin-transform-unicode-regex@7.24.1(@babel/core@7.24.4)': - dependencies: - '@babel/core': 7.24.4 - '@babel/helper-create-regexp-features-plugin': 7.22.15(@babel/core@7.24.4) - '@babel/helper-plugin-utils': 7.24.0 - - '@babel/plugin-transform-unicode-sets-regex@7.24.1(@babel/core@7.24.4)': - dependencies: - '@babel/core': 7.24.4 - '@babel/helper-create-regexp-features-plugin': 7.22.15(@babel/core@7.24.4) - '@babel/helper-plugin-utils': 7.24.0 - - '@babel/preset-env@7.24.4(@babel/core@7.24.4)': - dependencies: - '@babel/compat-data': 7.24.4 - '@babel/core': 7.24.4 - '@babel/helper-compilation-targets': 7.23.6 - '@babel/helper-plugin-utils': 7.24.0 - '@babel/helper-validator-option': 7.23.5 - '@babel/plugin-bugfix-firefox-class-in-computed-class-key': 7.24.4(@babel/core@7.24.4) - '@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-proposal-private-property-in-object': 7.21.0-placeholder-for-preset-env.2(@babel/core@7.24.4) - '@babel/plugin-syntax-async-generators': 7.8.4(@babel/core@7.24.4) - '@babel/plugin-syntax-class-properties': 7.12.13(@babel/core@7.24.4) - '@babel/plugin-syntax-class-static-block': 7.14.5(@babel/core@7.24.4) - '@babel/plugin-syntax-dynamic-import': 7.8.3(@babel/core@7.24.4) - '@babel/plugin-syntax-export-namespace-from': 7.8.3(@babel/core@7.24.4) - '@babel/plugin-syntax-import-assertions': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-syntax-import-attributes': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-syntax-import-meta': 7.10.4(@babel/core@7.24.4) - '@babel/plugin-syntax-json-strings': 7.8.3(@babel/core@7.24.4) - '@babel/plugin-syntax-logical-assignment-operators': 7.10.4(@babel/core@7.24.4) - '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3(@babel/core@7.24.4) - '@babel/plugin-syntax-numeric-separator': 7.10.4(@babel/core@7.24.4) - '@babel/plugin-syntax-object-rest-spread': 7.8.3(@babel/core@7.24.4) - '@babel/plugin-syntax-optional-catch-binding': 7.8.3(@babel/core@7.24.4) - '@babel/plugin-syntax-optional-chaining': 7.8.3(@babel/core@7.24.4) - '@babel/plugin-syntax-private-property-in-object': 7.14.5(@babel/core@7.24.4) - '@babel/plugin-syntax-top-level-await': 7.14.5(@babel/core@7.24.4) - '@babel/plugin-syntax-unicode-sets-regex': 7.18.6(@babel/core@7.24.4) - '@babel/plugin-transform-arrow-functions': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-async-generator-functions': 7.24.3(@babel/core@7.24.4) - '@babel/plugin-transform-async-to-generator': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-block-scoped-functions': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-block-scoping': 7.24.4(@babel/core@7.24.4) - '@babel/plugin-transform-class-properties': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-class-static-block': 7.24.4(@babel/core@7.24.4) - '@babel/plugin-transform-classes': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-computed-properties': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-destructuring': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-dotall-regex': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-duplicate-keys': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-dynamic-import': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-exponentiation-operator': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-export-namespace-from': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-for-of': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-function-name': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-json-strings': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-literals': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-logical-assignment-operators': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-member-expression-literals': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-modules-amd': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-modules-commonjs': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-modules-systemjs': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-modules-umd': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-named-capturing-groups-regex': 7.22.5(@babel/core@7.24.4) - '@babel/plugin-transform-new-target': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-nullish-coalescing-operator': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-numeric-separator': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-object-rest-spread': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-object-super': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-optional-catch-binding': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-optional-chaining': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-parameters': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-private-methods': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-private-property-in-object': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-property-literals': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-regenerator': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-reserved-words': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-shorthand-properties': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-spread': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-sticky-regex': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-template-literals': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-typeof-symbol': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-unicode-escapes': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-unicode-property-regex': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-unicode-regex': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-unicode-sets-regex': 7.24.1(@babel/core@7.24.4) - '@babel/preset-modules': 0.1.6-no-external-plugins(@babel/core@7.24.4) - babel-plugin-polyfill-corejs2: 0.4.10(@babel/core@7.24.4) - babel-plugin-polyfill-corejs3: 0.10.4(@babel/core@7.24.4) - babel-plugin-polyfill-regenerator: 0.6.1(@babel/core@7.24.4) - core-js-compat: 3.36.1 + '@babel/plugin-transform-shorthand-properties@7.24.6(@babel/core@7.24.6)': + dependencies: + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + + '@babel/plugin-transform-spread@7.24.6(@babel/core@7.24.6)': + dependencies: + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + '@babel/helper-skip-transparent-expression-wrappers': 7.24.6 + + '@babel/plugin-transform-sticky-regex@7.24.6(@babel/core@7.24.6)': + dependencies: + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + + '@babel/plugin-transform-template-literals@7.24.6(@babel/core@7.24.6)': + dependencies: + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + + '@babel/plugin-transform-typeof-symbol@7.24.6(@babel/core@7.24.6)': + dependencies: + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + + '@babel/plugin-transform-typescript@7.24.6(@babel/core@7.24.6)': + dependencies: + '@babel/core': 7.24.6 + '@babel/helper-annotate-as-pure': 7.24.6 + '@babel/helper-create-class-features-plugin': 7.24.6(@babel/core@7.24.6) + '@babel/helper-plugin-utils': 7.24.6 + '@babel/plugin-syntax-typescript': 7.24.6(@babel/core@7.24.6) + + '@babel/plugin-transform-unicode-escapes@7.24.6(@babel/core@7.24.6)': + dependencies: + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + + '@babel/plugin-transform-unicode-property-regex@7.24.6(@babel/core@7.24.6)': + dependencies: + '@babel/core': 7.24.6 + '@babel/helper-create-regexp-features-plugin': 7.24.6(@babel/core@7.24.6) + '@babel/helper-plugin-utils': 7.24.6 + + '@babel/plugin-transform-unicode-regex@7.24.6(@babel/core@7.24.6)': + dependencies: + '@babel/core': 7.24.6 + '@babel/helper-create-regexp-features-plugin': 7.24.6(@babel/core@7.24.6) + '@babel/helper-plugin-utils': 7.24.6 + + '@babel/plugin-transform-unicode-sets-regex@7.24.6(@babel/core@7.24.6)': + dependencies: + '@babel/core': 7.24.6 + '@babel/helper-create-regexp-features-plugin': 7.24.6(@babel/core@7.24.6) + '@babel/helper-plugin-utils': 7.24.6 + + '@babel/preset-env@7.24.6(@babel/core@7.24.6)': + dependencies: + '@babel/compat-data': 7.24.6 + '@babel/core': 7.24.6 + '@babel/helper-compilation-targets': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + '@babel/helper-validator-option': 7.24.6 + '@babel/plugin-bugfix-firefox-class-in-computed-class-key': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-proposal-private-property-in-object': 7.21.0-placeholder-for-preset-env.2(@babel/core@7.24.6) + '@babel/plugin-syntax-async-generators': 7.8.4(@babel/core@7.24.6) + '@babel/plugin-syntax-class-properties': 7.12.13(@babel/core@7.24.6) + '@babel/plugin-syntax-class-static-block': 7.14.5(@babel/core@7.24.6) + '@babel/plugin-syntax-dynamic-import': 7.8.3(@babel/core@7.24.6) + '@babel/plugin-syntax-export-namespace-from': 7.8.3(@babel/core@7.24.6) + '@babel/plugin-syntax-import-assertions': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-syntax-import-attributes': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-syntax-import-meta': 7.10.4(@babel/core@7.24.6) + '@babel/plugin-syntax-json-strings': 7.8.3(@babel/core@7.24.6) + '@babel/plugin-syntax-logical-assignment-operators': 7.10.4(@babel/core@7.24.6) + '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3(@babel/core@7.24.6) + '@babel/plugin-syntax-numeric-separator': 7.10.4(@babel/core@7.24.6) + '@babel/plugin-syntax-object-rest-spread': 7.8.3(@babel/core@7.24.6) + '@babel/plugin-syntax-optional-catch-binding': 7.8.3(@babel/core@7.24.6) + '@babel/plugin-syntax-optional-chaining': 7.8.3(@babel/core@7.24.6) + '@babel/plugin-syntax-private-property-in-object': 7.14.5(@babel/core@7.24.6) + '@babel/plugin-syntax-top-level-await': 7.14.5(@babel/core@7.24.6) + '@babel/plugin-syntax-unicode-sets-regex': 7.18.6(@babel/core@7.24.6) + '@babel/plugin-transform-arrow-functions': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-async-generator-functions': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-async-to-generator': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-block-scoped-functions': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-block-scoping': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-class-properties': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-class-static-block': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-classes': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-computed-properties': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-destructuring': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-dotall-regex': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-duplicate-keys': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-dynamic-import': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-exponentiation-operator': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-export-namespace-from': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-for-of': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-function-name': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-json-strings': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-literals': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-logical-assignment-operators': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-member-expression-literals': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-modules-amd': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-modules-commonjs': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-modules-systemjs': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-modules-umd': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-named-capturing-groups-regex': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-new-target': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-nullish-coalescing-operator': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-numeric-separator': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-object-rest-spread': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-object-super': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-optional-catch-binding': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-optional-chaining': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-parameters': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-private-methods': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-private-property-in-object': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-property-literals': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-regenerator': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-reserved-words': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-shorthand-properties': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-spread': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-sticky-regex': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-template-literals': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-typeof-symbol': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-unicode-escapes': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-unicode-property-regex': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-unicode-regex': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-unicode-sets-regex': 7.24.6(@babel/core@7.24.6) + '@babel/preset-modules': 0.1.6-no-external-plugins(@babel/core@7.24.6) + babel-plugin-polyfill-corejs2: 0.4.11(@babel/core@7.24.6) + babel-plugin-polyfill-corejs3: 0.10.4(@babel/core@7.24.6) + babel-plugin-polyfill-regenerator: 0.6.2(@babel/core@7.24.6) + core-js-compat: 3.37.1 semver: 6.3.1 transitivePeerDependencies: - supports-color - '@babel/preset-flow@7.24.1(@babel/core@7.24.4)': + '@babel/preset-flow@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 - '@babel/helper-validator-option': 7.23.5 - '@babel/plugin-transform-flow-strip-types': 7.24.1(@babel/core@7.24.4) + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + '@babel/helper-validator-option': 7.24.6 + '@babel/plugin-transform-flow-strip-types': 7.24.6(@babel/core@7.24.6) - '@babel/preset-modules@0.1.6-no-external-plugins(@babel/core@7.24.4)': + '@babel/preset-modules@0.1.6-no-external-plugins(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 - '@babel/types': 7.24.0 + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + '@babel/types': 7.24.6 esutils: 2.0.3 - '@babel/preset-react@7.24.1(@babel/core@7.24.4)': + '@babel/preset-react@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 - '@babel/helper-validator-option': 7.23.5 - '@babel/plugin-transform-react-display-name': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-react-jsx': 7.23.4(@babel/core@7.24.4) - '@babel/plugin-transform-react-jsx-development': 7.22.5(@babel/core@7.24.4) - '@babel/plugin-transform-react-pure-annotations': 7.24.1(@babel/core@7.24.4) + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + '@babel/helper-validator-option': 7.24.6 + '@babel/plugin-transform-react-display-name': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-react-jsx': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-react-jsx-development': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-react-pure-annotations': 7.24.6(@babel/core@7.24.6) - '@babel/preset-typescript@7.24.1(@babel/core@7.24.4)': + '@babel/preset-typescript@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 - '@babel/helper-plugin-utils': 7.24.0 - '@babel/helper-validator-option': 7.23.5 - '@babel/plugin-syntax-jsx': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-modules-commonjs': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-typescript': 7.24.4(@babel/core@7.24.4) + '@babel/core': 7.24.6 + '@babel/helper-plugin-utils': 7.24.6 + '@babel/helper-validator-option': 7.24.6 + '@babel/plugin-syntax-jsx': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-modules-commonjs': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-typescript': 7.24.6(@babel/core@7.24.6) - '@babel/register@7.23.7(@babel/core@7.24.4)': + '@babel/register@7.24.6(@babel/core@7.24.6)': dependencies: - '@babel/core': 7.24.4 + '@babel/core': 7.24.6 clone-deep: 4.0.1 find-cache-dir: 2.1.0 make-dir: 2.1.0 @@ -9790,7 +11233,7 @@ snapshots: dependencies: regenerator-runtime: 0.14.0 - '@babel/runtime@7.24.4': + '@babel/runtime@7.24.6': dependencies: regenerator-runtime: 0.14.1 @@ -9800,11 +11243,11 @@ snapshots: '@babel/parser': 7.22.10 '@babel/types': 7.22.10 - '@babel/template@7.24.0': + '@babel/template@7.24.6': dependencies: - '@babel/code-frame': 7.24.2 - '@babel/parser': 7.24.4 - '@babel/types': 7.24.0 + '@babel/code-frame': 7.24.6 + '@babel/parser': 7.24.6 + '@babel/types': 7.24.6 '@babel/traverse@7.17.3': dependencies: @@ -9821,16 +11264,16 @@ snapshots: transitivePeerDependencies: - supports-color - '@babel/traverse@7.24.1': + '@babel/traverse@7.24.6': dependencies: - '@babel/code-frame': 7.24.2 - '@babel/generator': 7.24.4 - '@babel/helper-environment-visitor': 7.22.20 - '@babel/helper-function-name': 7.23.0 - '@babel/helper-hoist-variables': 7.22.5 - '@babel/helper-split-export-declaration': 7.22.6 - '@babel/parser': 7.24.4 - '@babel/types': 7.24.0 + '@babel/code-frame': 7.24.6 + '@babel/generator': 7.24.6 + '@babel/helper-environment-visitor': 7.24.6 + '@babel/helper-function-name': 7.24.6 + '@babel/helper-hoist-variables': 7.24.6 + '@babel/helper-split-export-declaration': 7.24.6 + '@babel/parser': 7.24.6 + '@babel/types': 7.24.6 debug: 4.3.4 globals: 11.12.0 transitivePeerDependencies: @@ -9853,48 +11296,50 @@ snapshots: '@babel/helper-validator-identifier': 7.22.20 to-fast-properties: 2.0.0 - '@babel/types@7.24.0': + '@babel/types@7.24.6': dependencies: - '@babel/helper-string-parser': 7.24.1 - '@babel/helper-validator-identifier': 7.22.20 + '@babel/helper-string-parser': 7.24.6 + '@babel/helper-validator-identifier': 7.24.6 to-fast-properties: 2.0.0 '@balena/dockerignore@1.0.2': {} - '@cloudflare/workers-types@4.20230904.0': {} + '@cloudflare/workers-types@4.20240512.0': {} + + '@cloudflare/workers-types@4.20240524.0': + optional: true '@colors/colors@1.5.0': optional: true - '@dprint/darwin-arm64@0.45.0': + '@cspotcode/source-map-support@0.8.1': + dependencies: + '@jridgewell/trace-mapping': 0.3.9 + + '@dprint/darwin-arm64@0.46.3': optional: true - '@dprint/darwin-x64@0.45.0': + '@dprint/darwin-x64@0.46.3': optional: true - '@dprint/linux-arm64-glibc@0.45.0': + '@dprint/linux-arm64-glibc@0.46.3': optional: true - '@dprint/linux-arm64-musl@0.45.0': + '@dprint/linux-arm64-musl@0.46.3': optional: true - '@dprint/linux-x64-glibc@0.45.0': + '@dprint/linux-x64-glibc@0.46.3': optional: true - '@dprint/linux-x64-musl@0.45.0': + '@dprint/linux-x64-musl@0.46.3': optional: true - '@dprint/win32-x64@0.45.0': + '@dprint/win32-x64@0.46.3': optional: true '@drizzle-team/studio@0.0.5': {} - '@electric-sql/pglite@0.1.1': {} - - '@esbuild-kit/cjs-loader@2.4.2': - dependencies: - '@esbuild-kit/core-utils': 3.1.0 - get-tsconfig: 4.5.0 + '@electric-sql/pglite@0.1.5': {} '@esbuild-kit/core-utils@3.1.0': dependencies: @@ -9904,7 +11349,13 @@ snapshots: '@esbuild-kit/esm-loader@2.5.5': dependencies: '@esbuild-kit/core-utils': 3.1.0 - get-tsconfig: 4.5.0 + get-tsconfig: 4.7.5 + + '@esbuild/aix-ppc64@0.20.2': + optional: true + + '@esbuild/aix-ppc64@0.21.5': + optional: true '@esbuild/android-arm64@0.17.19': optional: true @@ -9912,60 +11363,120 @@ snapshots: '@esbuild/android-arm64@0.18.20': optional: true + '@esbuild/android-arm64@0.20.2': + optional: true + + '@esbuild/android-arm64@0.21.5': + optional: true + '@esbuild/android-arm@0.17.19': optional: true '@esbuild/android-arm@0.18.20': optional: true + '@esbuild/android-arm@0.20.2': + optional: true + + '@esbuild/android-arm@0.21.5': + optional: true + '@esbuild/android-x64@0.17.19': optional: true '@esbuild/android-x64@0.18.20': optional: true + '@esbuild/android-x64@0.20.2': + optional: true + + '@esbuild/android-x64@0.21.5': + optional: true + '@esbuild/darwin-arm64@0.17.19': optional: true '@esbuild/darwin-arm64@0.18.20': optional: true + '@esbuild/darwin-arm64@0.20.2': + optional: true + + '@esbuild/darwin-arm64@0.21.5': + optional: true + '@esbuild/darwin-x64@0.17.19': optional: true '@esbuild/darwin-x64@0.18.20': optional: true + '@esbuild/darwin-x64@0.20.2': + optional: true + + '@esbuild/darwin-x64@0.21.5': + optional: true + '@esbuild/freebsd-arm64@0.17.19': optional: true '@esbuild/freebsd-arm64@0.18.20': optional: true + '@esbuild/freebsd-arm64@0.20.2': + optional: true + + '@esbuild/freebsd-arm64@0.21.5': + optional: true + '@esbuild/freebsd-x64@0.17.19': optional: true '@esbuild/freebsd-x64@0.18.20': optional: true + '@esbuild/freebsd-x64@0.20.2': + optional: true + + '@esbuild/freebsd-x64@0.21.5': + optional: true + '@esbuild/linux-arm64@0.17.19': optional: true '@esbuild/linux-arm64@0.18.20': optional: true + '@esbuild/linux-arm64@0.20.2': + optional: true + + '@esbuild/linux-arm64@0.21.5': + optional: true + '@esbuild/linux-arm@0.17.19': optional: true '@esbuild/linux-arm@0.18.20': optional: true + '@esbuild/linux-arm@0.20.2': + optional: true + + '@esbuild/linux-arm@0.21.5': + optional: true + '@esbuild/linux-ia32@0.17.19': optional: true '@esbuild/linux-ia32@0.18.20': optional: true + '@esbuild/linux-ia32@0.20.2': + optional: true + + '@esbuild/linux-ia32@0.21.5': + optional: true + '@esbuild/linux-loong64@0.14.54': optional: true @@ -9975,28 +11486,58 @@ snapshots: '@esbuild/linux-loong64@0.18.20': optional: true + '@esbuild/linux-loong64@0.20.2': + optional: true + + '@esbuild/linux-loong64@0.21.5': + optional: true + '@esbuild/linux-mips64el@0.17.19': optional: true '@esbuild/linux-mips64el@0.18.20': optional: true + '@esbuild/linux-mips64el@0.20.2': + optional: true + + '@esbuild/linux-mips64el@0.21.5': + optional: true + '@esbuild/linux-ppc64@0.17.19': optional: true '@esbuild/linux-ppc64@0.18.20': optional: true + '@esbuild/linux-ppc64@0.20.2': + optional: true + + '@esbuild/linux-ppc64@0.21.5': + optional: true + '@esbuild/linux-riscv64@0.17.19': optional: true '@esbuild/linux-riscv64@0.18.20': optional: true + '@esbuild/linux-riscv64@0.20.2': + optional: true + + '@esbuild/linux-riscv64@0.21.5': + optional: true + '@esbuild/linux-s390x@0.17.19': optional: true - '@esbuild/linux-s390x@0.18.20': + '@esbuild/linux-s390x@0.18.20': + optional: true + + '@esbuild/linux-s390x@0.20.2': + optional: true + + '@esbuild/linux-s390x@0.21.5': optional: true '@esbuild/linux-x64@0.17.19': @@ -10005,42 +11546,84 @@ snapshots: '@esbuild/linux-x64@0.18.20': optional: true + '@esbuild/linux-x64@0.20.2': + optional: true + + '@esbuild/linux-x64@0.21.5': + optional: true + '@esbuild/netbsd-x64@0.17.19': optional: true '@esbuild/netbsd-x64@0.18.20': optional: true + '@esbuild/netbsd-x64@0.20.2': + optional: true + + '@esbuild/netbsd-x64@0.21.5': + optional: true + '@esbuild/openbsd-x64@0.17.19': optional: true '@esbuild/openbsd-x64@0.18.20': optional: true + '@esbuild/openbsd-x64@0.20.2': + optional: true + + '@esbuild/openbsd-x64@0.21.5': + optional: true + '@esbuild/sunos-x64@0.17.19': optional: true '@esbuild/sunos-x64@0.18.20': optional: true + '@esbuild/sunos-x64@0.20.2': + optional: true + + '@esbuild/sunos-x64@0.21.5': + optional: true + '@esbuild/win32-arm64@0.17.19': optional: true '@esbuild/win32-arm64@0.18.20': optional: true + '@esbuild/win32-arm64@0.20.2': + optional: true + + '@esbuild/win32-arm64@0.21.5': + optional: true + '@esbuild/win32-ia32@0.17.19': optional: true '@esbuild/win32-ia32@0.18.20': optional: true + '@esbuild/win32-ia32@0.20.2': + optional: true + + '@esbuild/win32-ia32@0.21.5': + optional: true + '@esbuild/win32-x64@0.17.19': optional: true '@esbuild/win32-x64@0.18.20': optional: true + '@esbuild/win32-x64@0.20.2': + optional: true + + '@esbuild/win32-x64@0.21.5': + optional: true + '@eslint-community/eslint-utils@4.4.0(eslint@8.50.0)': dependencies: eslint: 8.50.0 @@ -10081,7 +11664,7 @@ snapshots: transitivePeerDependencies: - supports-color - '@eslint/eslintrc@3.0.2': + '@eslint/eslintrc@3.1.0': dependencies: ajv: 6.12.6 debug: 4.3.4 @@ -10106,37 +11689,38 @@ snapshots: mv: 2.1.1 safe-json-stringify: 1.2.0 - '@expo/cli@0.17.8(@react-native/babel-preset@0.73.21(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4)))(bufferutil@4.0.8)(encoding@0.1.13)(expo-modules-autolinking@1.10.3)(utf-8-validate@6.0.3)': + '@expo/cli@0.18.13(bufferutil@4.0.8)(encoding@0.1.13)(expo-modules-autolinking@1.11.1)(utf-8-validate@6.0.3)': dependencies: - '@babel/runtime': 7.24.4 + '@babel/runtime': 7.24.6 '@expo/code-signing-certificates': 0.0.5 - '@expo/config': 8.5.4 - '@expo/config-plugins': 7.8.4 - '@expo/devcert': 1.1.0 - '@expo/env': 0.2.2 - '@expo/image-utils': 0.4.1(encoding@0.1.13) - '@expo/json-file': 8.3.0 - '@expo/metro-config': 0.17.6(@react-native/babel-preset@0.73.21(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4))) - '@expo/osascript': 2.1.0 - '@expo/package-manager': 1.4.2 - '@expo/plist': 0.1.0 - '@expo/prebuild-config': 6.7.4(encoding@0.1.13)(expo-modules-autolinking@1.10.3) + '@expo/config': 9.0.2 + '@expo/config-plugins': 8.0.4 + '@expo/devcert': 1.1.2 + '@expo/env': 0.3.0 + '@expo/image-utils': 0.5.1(encoding@0.1.13) + '@expo/json-file': 8.3.3 + '@expo/metro-config': 0.18.4 + '@expo/osascript': 2.1.2 + '@expo/package-manager': 1.5.2 + '@expo/plist': 0.1.3 + '@expo/prebuild-config': 7.0.4(encoding@0.1.13)(expo-modules-autolinking@1.11.1) '@expo/rudder-sdk-node': 1.1.1(encoding@0.1.13) - '@expo/spawn-async': 1.5.0 + '@expo/spawn-async': 1.7.2 '@expo/xcpretty': 4.3.1 - '@react-native/dev-middleware': 0.73.8(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + '@react-native/dev-middleware': 0.74.83(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) '@urql/core': 2.3.6(graphql@15.8.0) '@urql/exchange-retry': 0.3.0(graphql@15.8.0) accepts: 1.3.8 arg: 5.0.2 better-opn: 3.0.2 bplist-parser: 0.3.2 - cacache: 15.3.0 + cacache: 18.0.3 chalk: 4.1.2 ci-info: 3.9.0 connect: 3.7.0 debug: 4.3.4 env-editor: 0.4.2 + fast-glob: 3.3.2 find-yarn-workspace-root: 2.0.0 form-data: 3.0.1 freeport-async: 2.0.0 @@ -10154,7 +11738,6 @@ snapshots: lodash.debounce: 4.0.8 md5hex: 1.0.0 minimatch: 3.1.2 - minipass: 3.3.6 node-fetch: 2.7.0(encoding@0.1.13) node-forge: 1.3.1 npm-package-arg: 7.0.0 @@ -10170,7 +11753,7 @@ snapshots: resolve: 1.22.8 resolve-from: 5.0.0 resolve.exports: 2.0.2 - semver: 7.6.0 + semver: 7.6.2 send: 0.18.0 slugify: 1.6.6 source-map-support: 0.5.21 @@ -10183,10 +11766,8 @@ snapshots: text-table: 0.2.0 url-join: 4.0.0 wrap-ansi: 7.0.0 - ws: 8.16.0(bufferutil@4.0.8)(utf-8-validate@6.0.3) + ws: 8.17.0(bufferutil@4.0.8)(utf-8-validate@6.0.3) transitivePeerDependencies: - - '@react-native/babel-preset' - - bluebird - bufferutil - encoding - expo-modules-autolinking @@ -10198,21 +11779,19 @@ snapshots: node-forge: 1.3.1 nullthrows: 1.1.1 - '@expo/config-plugins@7.8.4': + '@expo/config-plugins@8.0.4': dependencies: - '@expo/config-types': 50.0.0 - '@expo/fingerprint': 0.6.0 - '@expo/json-file': 8.3.0 - '@expo/plist': 0.1.0 + '@expo/config-types': 51.0.0 + '@expo/json-file': 8.3.3 + '@expo/plist': 0.1.3 '@expo/sdk-runtime-versions': 1.0.0 - '@react-native/normalize-color': 2.1.0 chalk: 4.1.2 debug: 4.3.4 find-up: 5.0.0 getenv: 1.0.0 glob: 7.1.6 resolve-from: 5.0.0 - semver: 7.6.0 + semver: 7.6.2 slash: 3.0.0 slugify: 1.6.6 xcode: 3.0.1 @@ -10220,25 +11799,25 @@ snapshots: transitivePeerDependencies: - supports-color - '@expo/config-types@50.0.0': {} + '@expo/config-types@51.0.0': {} - '@expo/config@8.5.4': + '@expo/config@9.0.2': dependencies: '@babel/code-frame': 7.10.4 - '@expo/config-plugins': 7.8.4 - '@expo/config-types': 50.0.0 - '@expo/json-file': 8.3.0 + '@expo/config-plugins': 8.0.4 + '@expo/config-types': 51.0.0 + '@expo/json-file': 8.3.3 getenv: 1.0.0 glob: 7.1.6 require-from-string: 2.0.2 resolve-from: 5.0.0 - semver: 7.5.3 + semver: 7.6.2 slugify: 1.6.6 sucrase: 3.34.0 transitivePeerDependencies: - supports-color - '@expo/devcert@1.1.0': + '@expo/devcert@1.1.2': dependencies: application-config-path: 0.1.1 command-exists: 1.2.9 @@ -10256,61 +11835,47 @@ snapshots: transitivePeerDependencies: - supports-color - '@expo/env@0.2.2': + '@expo/env@0.3.0': dependencies: chalk: 4.1.2 debug: 4.3.4 - dotenv: 16.0.3 - dotenv-expand: 10.0.0 + dotenv: 16.4.5 + dotenv-expand: 11.0.6 getenv: 1.0.0 transitivePeerDependencies: - supports-color - '@expo/fingerprint@0.6.0': + '@expo/image-utils@0.5.1(encoding@0.1.13)': dependencies: '@expo/spawn-async': 1.7.2 chalk: 4.1.2 - debug: 4.3.4 - find-up: 5.0.0 - minimatch: 3.1.2 - p-limit: 3.1.0 - resolve-from: 5.0.0 - transitivePeerDependencies: - - supports-color - - '@expo/image-utils@0.4.1(encoding@0.1.13)': - dependencies: - '@expo/spawn-async': 1.5.0 - chalk: 4.1.2 fs-extra: 9.0.0 getenv: 1.0.0 jimp-compact: 0.16.1 node-fetch: 2.7.0(encoding@0.1.13) parse-png: 2.1.0 resolve-from: 5.0.0 - semver: 7.3.2 + semver: 7.6.2 tempy: 0.3.0 transitivePeerDependencies: - encoding - '@expo/json-file@8.3.0': + '@expo/json-file@8.3.3': dependencies: '@babel/code-frame': 7.10.4 json5: 2.2.3 write-file-atomic: 2.4.3 - '@expo/metro-config@0.17.6(@react-native/babel-preset@0.73.21(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4)))': + '@expo/metro-config@0.18.4': dependencies: - '@babel/core': 7.24.4 - '@babel/generator': 7.24.4 - '@babel/parser': 7.24.4 - '@babel/types': 7.24.0 - '@expo/config': 8.5.4 - '@expo/env': 0.2.2 - '@expo/json-file': 8.3.0 + '@babel/core': 7.24.6 + '@babel/generator': 7.24.6 + '@babel/parser': 7.24.6 + '@babel/types': 7.24.6 + '@expo/config': 9.0.2 + '@expo/env': 0.3.0 + '@expo/json-file': 8.3.3 '@expo/spawn-async': 1.7.2 - '@react-native/babel-preset': 0.73.21(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4)) - babel-preset-fbjs: 3.4.0(@babel/core@7.24.4) chalk: 4.1.2 debug: 4.3.4 find-yarn-workspace-root: 2.0.0 @@ -10319,50 +11884,50 @@ snapshots: glob: 7.2.3 jsc-safe-url: 0.2.4 lightningcss: 1.19.0 - postcss: 8.4.38 + postcss: 8.4.39 resolve-from: 5.0.0 - sucrase: 3.34.0 transitivePeerDependencies: - supports-color - '@expo/osascript@2.1.0': + '@expo/osascript@2.1.2': dependencies: '@expo/spawn-async': 1.7.2 exec-async: 2.2.0 - '@expo/package-manager@1.4.2': + '@expo/package-manager@1.5.2': dependencies: - '@expo/json-file': 8.3.0 + '@expo/json-file': 8.3.3 '@expo/spawn-async': 1.7.2 ansi-regex: 5.0.1 chalk: 4.1.2 find-up: 5.0.0 find-yarn-workspace-root: 2.0.0 js-yaml: 3.14.1 - micromatch: 4.0.5 + micromatch: 4.0.7 npm-package-arg: 7.0.0 ora: 3.4.0 split: 1.0.1 sudo-prompt: 9.1.1 - '@expo/plist@0.1.0': + '@expo/plist@0.1.3': dependencies: '@xmldom/xmldom': 0.7.13 base64-js: 1.5.1 xmlbuilder: 14.0.0 - '@expo/prebuild-config@6.7.4(encoding@0.1.13)(expo-modules-autolinking@1.10.3)': + '@expo/prebuild-config@7.0.4(encoding@0.1.13)(expo-modules-autolinking@1.11.1)': dependencies: - '@expo/config': 8.5.4 - '@expo/config-plugins': 7.8.4 - '@expo/config-types': 50.0.0 - '@expo/image-utils': 0.4.1(encoding@0.1.13) - '@expo/json-file': 8.3.0 + '@expo/config': 9.0.2 + '@expo/config-plugins': 8.0.4 + '@expo/config-types': 51.0.0 + '@expo/image-utils': 0.5.1(encoding@0.1.13) + '@expo/json-file': 8.3.3 + '@react-native/normalize-colors': 0.74.83 debug: 4.3.4 - expo-modules-autolinking: 1.10.3 + expo-modules-autolinking: 1.11.1 fs-extra: 9.1.0 resolve-from: 5.0.0 - semver: 7.5.3 + semver: 7.6.2 xml2js: 0.6.0 transitivePeerDependencies: - encoding @@ -10382,15 +11947,13 @@ snapshots: '@expo/sdk-runtime-versions@1.0.0': {} - '@expo/spawn-async@1.5.0': - dependencies: - cross-spawn: 6.0.5 - '@expo/spawn-async@1.7.2': dependencies: cross-spawn: 7.0.3 - '@expo/vector-icons@14.0.0': {} + '@expo/vector-icons@14.0.2': + dependencies: + prop-types: 15.8.1 '@expo/websql@1.0.1': dependencies: @@ -10409,7 +11972,8 @@ snapshots: '@fastify/busboy@2.1.1': {} - '@gar/promisify@1.1.3': {} + '@gar/promisify@1.1.3': + optional: true '@graphql-typed-document-node/core@3.2.0(graphql@15.8.0)': dependencies: @@ -10464,14 +12028,14 @@ snapshots: dependencies: '@jest/fake-timers': 29.7.0 '@jest/types': 29.6.3 - '@types/node': 20.12.4 + '@types/node': 20.12.12 jest-mock: 29.7.0 '@jest/fake-timers@29.7.0': dependencies: '@jest/types': 29.6.3 '@sinonjs/fake-timers': 10.3.0 - '@types/node': 20.12.4 + '@types/node': 20.12.12 jest-message-util: 29.7.0 jest-mock: 29.7.0 jest-util: 29.7.0 @@ -10484,7 +12048,7 @@ snapshots: dependencies: '@types/istanbul-lib-coverage': 2.0.6 '@types/istanbul-reports': 3.0.4 - '@types/node': 20.12.4 + '@types/node': 20.12.12 '@types/yargs': 15.0.19 chalk: 4.1.2 @@ -10493,7 +12057,7 @@ snapshots: '@jest/schemas': 29.6.3 '@types/istanbul-lib-coverage': 2.0.6 '@types/istanbul-reports': 3.0.4 - '@types/node': 20.12.4 + '@types/node': 20.12.12 '@types/yargs': 17.0.32 chalk: 4.1.2 @@ -10541,81 +12105,67 @@ snapshots: '@jridgewell/resolve-uri': 3.1.2 '@jridgewell/sourcemap-codec': 1.4.15 - '@libsql/client@0.5.6(bufferutil@4.0.7)(encoding@0.1.13)(utf-8-validate@6.0.3)': + '@jridgewell/trace-mapping@0.3.9': dependencies: - '@libsql/core': 0.5.6 - '@libsql/hrana-client': 0.5.6(bufferutil@4.0.7)(encoding@0.1.13)(utf-8-validate@6.0.3) - js-base64: 3.7.5 - libsql: 0.3.10 - transitivePeerDependencies: - - bufferutil - - encoding - - utf-8-validate + '@jridgewell/resolve-uri': 3.1.2 + '@jridgewell/sourcemap-codec': 1.4.15 '@libsql/client@0.5.6(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)': dependencies: '@libsql/core': 0.5.6 '@libsql/hrana-client': 0.5.6(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) - js-base64: 3.7.5 - libsql: 0.3.10 + js-base64: 3.7.7 + libsql: 0.3.18 transitivePeerDependencies: - bufferutil - encoding - utf-8-validate - '@libsql/client@0.5.6(encoding@0.1.13)': + '@libsql/client@0.6.0': dependencies: - '@libsql/core': 0.5.6 - '@libsql/hrana-client': 0.5.6(encoding@0.1.13) - js-base64: 3.7.5 - libsql: 0.3.10 + '@libsql/core': 0.6.0 + '@libsql/hrana-client': 0.6.0 + js-base64: 3.7.7 + libsql: 0.3.18 transitivePeerDependencies: - bufferutil - - encoding - utf-8-validate optional: true '@libsql/core@0.5.6': dependencies: - js-base64: 3.7.5 + js-base64: 3.7.7 - '@libsql/darwin-arm64@0.3.10': + '@libsql/core@0.6.0': + dependencies: + js-base64: 3.7.7 optional: true - '@libsql/darwin-x64@0.3.10': + '@libsql/darwin-arm64@0.3.18': optional: true - '@libsql/hrana-client@0.5.6(bufferutil@4.0.7)(encoding@0.1.13)(utf-8-validate@6.0.3)': - dependencies: - '@libsql/isomorphic-fetch': 0.1.12(encoding@0.1.13) - '@libsql/isomorphic-ws': 0.1.5(bufferutil@4.0.7)(utf-8-validate@6.0.3) - js-base64: 3.7.5 - node-fetch: 3.3.2 - transitivePeerDependencies: - - bufferutil - - encoding - - utf-8-validate + '@libsql/darwin-x64@0.3.18': + optional: true '@libsql/hrana-client@0.5.6(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)': dependencies: '@libsql/isomorphic-fetch': 0.1.12(encoding@0.1.13) '@libsql/isomorphic-ws': 0.1.5(bufferutil@4.0.8)(utf-8-validate@6.0.3) - js-base64: 3.7.5 + js-base64: 3.7.7 node-fetch: 3.3.2 transitivePeerDependencies: - bufferutil - encoding - utf-8-validate - '@libsql/hrana-client@0.5.6(encoding@0.1.13)': + '@libsql/hrana-client@0.6.0': dependencies: - '@libsql/isomorphic-fetch': 0.1.12(encoding@0.1.13) - '@libsql/isomorphic-ws': 0.1.5 - js-base64: 3.7.5 + '@libsql/isomorphic-fetch': 0.2.1 + '@libsql/isomorphic-ws': 0.1.5(bufferutil@4.0.8)(utf-8-validate@6.0.3) + js-base64: 3.7.7 node-fetch: 3.3.2 transitivePeerDependencies: - bufferutil - - encoding - utf-8-validate optional: true @@ -10626,61 +12176,32 @@ snapshots: transitivePeerDependencies: - encoding - '@libsql/isomorphic-ws@0.1.5': - dependencies: - '@types/ws': 8.5.4 - ws: 8.14.2 - transitivePeerDependencies: - - bufferutil - - utf-8-validate + '@libsql/isomorphic-fetch@0.2.1': optional: true - '@libsql/isomorphic-ws@0.1.5(bufferutil@4.0.7)(utf-8-validate@6.0.3)': - dependencies: - '@types/ws': 8.5.4 - ws: 8.14.2(bufferutil@4.0.7)(utf-8-validate@6.0.3) - transitivePeerDependencies: - - bufferutil - - utf-8-validate - '@libsql/isomorphic-ws@0.1.5(bufferutil@4.0.8)(utf-8-validate@6.0.3)': dependencies: '@types/ws': 8.5.4 - ws: 8.14.2(bufferutil@4.0.8)(utf-8-validate@6.0.3) + ws: 8.17.0(bufferutil@4.0.8)(utf-8-validate@6.0.3) transitivePeerDependencies: - bufferutil - utf-8-validate - '@libsql/linux-arm64-gnu@0.3.10': + '@libsql/linux-arm64-gnu@0.3.18': optional: true - '@libsql/linux-arm64-musl@0.3.10': + '@libsql/linux-arm64-musl@0.3.18': optional: true - '@libsql/linux-x64-gnu@0.3.10': + '@libsql/linux-x64-gnu@0.3.18': optional: true - '@libsql/linux-x64-musl@0.3.10': + '@libsql/linux-x64-musl@0.3.18': optional: true - '@libsql/win32-x64-msvc@0.3.10': + '@libsql/win32-x64-msvc@0.3.18': optional: true - '@mapbox/node-pre-gyp@1.0.10(encoding@0.1.13)': - dependencies: - detect-libc: 2.0.1 - https-proxy-agent: 5.0.1 - make-dir: 3.1.0 - node-fetch: 2.6.9(encoding@0.1.13) - nopt: 5.0.0 - npmlog: 5.0.1 - rimraf: 3.0.2 - semver: 7.5.4 - tar: 6.1.13 - transitivePeerDependencies: - - encoding - - supports-color - '@miniflare/core@2.14.2': dependencies: '@iarna/toml': 2.2.5 @@ -10705,7 +12226,7 @@ snapshots: '@miniflare/shared@2.14.2': dependencies: - '@types/better-sqlite3': 7.6.4 + '@types/better-sqlite3': 7.6.10 kleur: 4.1.5 npx-import: 1.1.4 picomatch: 2.3.1 @@ -10716,10 +12237,6 @@ snapshots: '@neon-rs/load@0.0.4': {} - '@neondatabase/serverless@0.4.3': - dependencies: - '@types/pg': 8.10.1 - '@neondatabase/serverless@0.7.2': dependencies: '@types/pg': 8.6.6 @@ -10728,6 +12245,11 @@ snapshots: dependencies: '@types/pg': 8.6.6 + '@neondatabase/serverless@0.9.3': + dependencies: + '@types/pg': 8.11.6 + optional: true + '@nodelib/fs.scandir@2.1.5': dependencies: '@nodelib/fs.stat': 2.0.5 @@ -10743,19 +12265,25 @@ snapshots: '@npmcli/fs@1.1.1': dependencies: '@gar/promisify': 1.1.3 - semver: 7.5.4 + semver: 7.6.2 + optional: true + + '@npmcli/fs@3.1.1': + dependencies: + semver: 7.6.2 '@npmcli/move-file@1.1.2': dependencies: mkdirp: 1.0.4 rimraf: 3.0.2 + optional: true - '@op-engineering/op-sqlite@2.0.16(react-native@0.73.6(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4))(bufferutil@4.0.8)(encoding@0.1.13)(react@18.2.0)(utf-8-validate@6.0.3))(react@18.2.0)': + '@op-engineering/op-sqlite@2.0.22(react-native@0.74.1(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(@types/react@18.3.1)(bufferutil@4.0.8)(encoding@0.1.13)(react@18.3.1)(utf-8-validate@6.0.3))(react@18.3.1)': dependencies: - react: 18.2.0 - react-native: 0.73.6(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4))(bufferutil@4.0.8)(encoding@0.1.13)(react@18.2.0)(utf-8-validate@6.0.3) + react: 18.3.1 + react-native: 0.74.1(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(@types/react@18.3.1)(bufferutil@4.0.8)(encoding@0.1.13)(react@18.3.1)(utf-8-validate@6.0.3) - '@opentelemetry/api@1.4.1': {} + '@opentelemetry/api@1.8.0': {} '@originjs/vite-plugin-commonjs@1.0.3': dependencies: @@ -10764,136 +12292,173 @@ snapshots: '@pkgjs/parseargs@0.11.0': optional: true - '@planetscale/database@1.16.0': {} + '@planetscale/database@1.18.0': {} + + '@polka/url@1.0.0-next.25': {} + + '@prisma/client@5.14.0(prisma@5.14.0)': + optionalDependencies: + prisma: 5.14.0 + + '@prisma/debug@5.14.0': {} - '@polka/url@1.0.0-next.21': {} + '@prisma/debug@5.16.1': {} - '@react-native-community/cli-clean@12.3.6(encoding@0.1.13)': + '@prisma/engines-version@5.14.0-25.e9771e62de70f79a5e1c604a2d7c8e2a0a874b48': {} + + '@prisma/engines@5.14.0': + dependencies: + '@prisma/debug': 5.14.0 + '@prisma/engines-version': 5.14.0-25.e9771e62de70f79a5e1c604a2d7c8e2a0a874b48 + '@prisma/fetch-engine': 5.14.0 + '@prisma/get-platform': 5.14.0 + + '@prisma/fetch-engine@5.14.0': + dependencies: + '@prisma/debug': 5.14.0 + '@prisma/engines-version': 5.14.0-25.e9771e62de70f79a5e1c604a2d7c8e2a0a874b48 + '@prisma/get-platform': 5.14.0 + + '@prisma/generator-helper@5.16.1': + dependencies: + '@prisma/debug': 5.16.1 + + '@prisma/get-platform@5.14.0': + dependencies: + '@prisma/debug': 5.14.0 + + '@react-native-community/cli-clean@13.6.6(encoding@0.1.13)': dependencies: - '@react-native-community/cli-tools': 12.3.6(encoding@0.1.13) + '@react-native-community/cli-tools': 13.6.6(encoding@0.1.13) chalk: 4.1.2 execa: 5.1.1 + fast-glob: 3.3.2 transitivePeerDependencies: - encoding - '@react-native-community/cli-config@12.3.6(encoding@0.1.13)': + '@react-native-community/cli-config@13.6.6(encoding@0.1.13)': dependencies: - '@react-native-community/cli-tools': 12.3.6(encoding@0.1.13) + '@react-native-community/cli-tools': 13.6.6(encoding@0.1.13) chalk: 4.1.2 cosmiconfig: 5.2.1 deepmerge: 4.3.1 - glob: 7.2.3 - joi: 17.12.3 + fast-glob: 3.3.2 + joi: 17.13.1 transitivePeerDependencies: - encoding - '@react-native-community/cli-debugger-ui@12.3.6': + '@react-native-community/cli-debugger-ui@13.6.6': dependencies: serve-static: 1.15.0 transitivePeerDependencies: - supports-color - '@react-native-community/cli-doctor@12.3.6(encoding@0.1.13)': + '@react-native-community/cli-doctor@13.6.6(encoding@0.1.13)': dependencies: - '@react-native-community/cli-config': 12.3.6(encoding@0.1.13) - '@react-native-community/cli-platform-android': 12.3.6(encoding@0.1.13) - '@react-native-community/cli-platform-ios': 12.3.6(encoding@0.1.13) - '@react-native-community/cli-tools': 12.3.6(encoding@0.1.13) + '@react-native-community/cli-config': 13.6.6(encoding@0.1.13) + '@react-native-community/cli-platform-android': 13.6.6(encoding@0.1.13) + '@react-native-community/cli-platform-apple': 13.6.6(encoding@0.1.13) + '@react-native-community/cli-platform-ios': 13.6.6(encoding@0.1.13) + '@react-native-community/cli-tools': 13.6.6(encoding@0.1.13) chalk: 4.1.2 command-exists: 1.2.9 deepmerge: 4.3.1 - envinfo: 7.11.1 + envinfo: 7.13.0 execa: 5.1.1 hermes-profile-transformer: 0.0.6 node-stream-zip: 1.15.0 ora: 5.4.1 - semver: 7.6.0 + semver: 7.6.2 strip-ansi: 5.2.0 wcwidth: 1.0.1 - yaml: 2.4.1 + yaml: 2.4.2 transitivePeerDependencies: - encoding - '@react-native-community/cli-hermes@12.3.6(encoding@0.1.13)': + '@react-native-community/cli-hermes@13.6.6(encoding@0.1.13)': dependencies: - '@react-native-community/cli-platform-android': 12.3.6(encoding@0.1.13) - '@react-native-community/cli-tools': 12.3.6(encoding@0.1.13) + '@react-native-community/cli-platform-android': 13.6.6(encoding@0.1.13) + '@react-native-community/cli-tools': 13.6.6(encoding@0.1.13) chalk: 4.1.2 hermes-profile-transformer: 0.0.6 transitivePeerDependencies: - encoding - '@react-native-community/cli-platform-android@12.3.6(encoding@0.1.13)': + '@react-native-community/cli-platform-android@13.6.6(encoding@0.1.13)': dependencies: - '@react-native-community/cli-tools': 12.3.6(encoding@0.1.13) + '@react-native-community/cli-tools': 13.6.6(encoding@0.1.13) chalk: 4.1.2 execa: 5.1.1 - fast-xml-parser: 4.3.6 - glob: 7.2.3 + fast-glob: 3.3.2 + fast-xml-parser: 4.4.0 logkitty: 0.7.1 transitivePeerDependencies: - encoding - '@react-native-community/cli-platform-ios@12.3.6(encoding@0.1.13)': + '@react-native-community/cli-platform-apple@13.6.6(encoding@0.1.13)': dependencies: - '@react-native-community/cli-tools': 12.3.6(encoding@0.1.13) + '@react-native-community/cli-tools': 13.6.6(encoding@0.1.13) chalk: 4.1.2 execa: 5.1.1 - fast-xml-parser: 4.3.6 - glob: 7.2.3 + fast-glob: 3.3.2 + fast-xml-parser: 4.4.0 ora: 5.4.1 transitivePeerDependencies: - encoding - '@react-native-community/cli-plugin-metro@12.3.6': {} + '@react-native-community/cli-platform-ios@13.6.6(encoding@0.1.13)': + dependencies: + '@react-native-community/cli-platform-apple': 13.6.6(encoding@0.1.13) + transitivePeerDependencies: + - encoding - '@react-native-community/cli-server-api@12.3.6(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)': + '@react-native-community/cli-server-api@13.6.6(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)': dependencies: - '@react-native-community/cli-debugger-ui': 12.3.6 - '@react-native-community/cli-tools': 12.3.6(encoding@0.1.13) + '@react-native-community/cli-debugger-ui': 13.6.6 + '@react-native-community/cli-tools': 13.6.6(encoding@0.1.13) compression: 1.7.4 connect: 3.7.0 errorhandler: 1.5.1 nocache: 3.0.4 pretty-format: 26.6.2 serve-static: 1.15.0 - ws: 7.5.9(bufferutil@4.0.8)(utf-8-validate@6.0.3) + ws: 6.2.2(bufferutil@4.0.8)(utf-8-validate@6.0.3) transitivePeerDependencies: - bufferutil - encoding - supports-color - utf-8-validate - '@react-native-community/cli-tools@12.3.6(encoding@0.1.13)': + '@react-native-community/cli-tools@13.6.6(encoding@0.1.13)': dependencies: appdirsjs: 1.2.7 chalk: 4.1.2 + execa: 5.1.1 find-up: 5.0.0 mime: 2.6.0 node-fetch: 2.7.0(encoding@0.1.13) open: 6.4.0 ora: 5.4.1 - semver: 7.6.0 + semver: 7.6.2 shell-quote: 1.8.1 sudo-prompt: 9.2.1 transitivePeerDependencies: - encoding - '@react-native-community/cli-types@12.3.6': + '@react-native-community/cli-types@13.6.6': dependencies: - joi: 17.12.3 + joi: 17.13.1 - '@react-native-community/cli@12.3.6(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)': + '@react-native-community/cli@13.6.6(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)': dependencies: - '@react-native-community/cli-clean': 12.3.6(encoding@0.1.13) - '@react-native-community/cli-config': 12.3.6(encoding@0.1.13) - '@react-native-community/cli-debugger-ui': 12.3.6 - '@react-native-community/cli-doctor': 12.3.6(encoding@0.1.13) - '@react-native-community/cli-hermes': 12.3.6(encoding@0.1.13) - '@react-native-community/cli-plugin-metro': 12.3.6 - '@react-native-community/cli-server-api': 12.3.6(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) - '@react-native-community/cli-tools': 12.3.6(encoding@0.1.13) - '@react-native-community/cli-types': 12.3.6 + '@react-native-community/cli-clean': 13.6.6(encoding@0.1.13) + '@react-native-community/cli-config': 13.6.6(encoding@0.1.13) + '@react-native-community/cli-debugger-ui': 13.6.6 + '@react-native-community/cli-doctor': 13.6.6(encoding@0.1.13) + '@react-native-community/cli-hermes': 13.6.6(encoding@0.1.13) + '@react-native-community/cli-server-api': 13.6.6(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + '@react-native-community/cli-tools': 13.6.6(encoding@0.1.13) + '@react-native-community/cli-types': 13.6.6 chalk: 4.1.2 commander: 9.5.0 deepmerge: 4.3.1 @@ -10902,95 +12467,97 @@ snapshots: fs-extra: 8.1.0 graceful-fs: 4.2.11 prompts: 2.4.2 - semver: 7.6.0 + semver: 7.6.2 transitivePeerDependencies: - bufferutil - encoding - supports-color - utf-8-validate - '@react-native/assets-registry@0.73.1': {} + '@react-native/assets-registry@0.74.83': {} - '@react-native/babel-plugin-codegen@0.73.4(@babel/preset-env@7.24.4(@babel/core@7.24.4))': + '@react-native/babel-plugin-codegen@0.74.83(@babel/preset-env@7.24.6(@babel/core@7.24.6))': dependencies: - '@react-native/codegen': 0.73.3(@babel/preset-env@7.24.4(@babel/core@7.24.4)) + '@react-native/codegen': 0.74.83(@babel/preset-env@7.24.6(@babel/core@7.24.6)) transitivePeerDependencies: - '@babel/preset-env' - supports-color - '@react-native/babel-preset@0.73.21(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4))': - dependencies: - '@babel/core': 7.24.4 - '@babel/plugin-proposal-async-generator-functions': 7.20.7(@babel/core@7.24.4) - '@babel/plugin-proposal-class-properties': 7.18.6(@babel/core@7.24.4) - '@babel/plugin-proposal-export-default-from': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-proposal-nullish-coalescing-operator': 7.18.6(@babel/core@7.24.4) - '@babel/plugin-proposal-numeric-separator': 7.18.6(@babel/core@7.24.4) - '@babel/plugin-proposal-object-rest-spread': 7.20.7(@babel/core@7.24.4) - '@babel/plugin-proposal-optional-catch-binding': 7.18.6(@babel/core@7.24.4) - '@babel/plugin-proposal-optional-chaining': 7.21.0(@babel/core@7.24.4) - '@babel/plugin-syntax-dynamic-import': 7.8.3(@babel/core@7.24.4) - '@babel/plugin-syntax-export-default-from': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-syntax-flow': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3(@babel/core@7.24.4) - '@babel/plugin-syntax-optional-chaining': 7.8.3(@babel/core@7.24.4) - '@babel/plugin-transform-arrow-functions': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-async-to-generator': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-block-scoping': 7.24.4(@babel/core@7.24.4) - '@babel/plugin-transform-classes': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-computed-properties': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-destructuring': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-flow-strip-types': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-function-name': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-literals': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-modules-commonjs': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-named-capturing-groups-regex': 7.22.5(@babel/core@7.24.4) - '@babel/plugin-transform-parameters': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-private-methods': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-private-property-in-object': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-react-display-name': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-react-jsx': 7.23.4(@babel/core@7.24.4) - '@babel/plugin-transform-react-jsx-self': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-react-jsx-source': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-runtime': 7.24.3(@babel/core@7.24.4) - '@babel/plugin-transform-shorthand-properties': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-spread': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-sticky-regex': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-typescript': 7.24.4(@babel/core@7.24.4) - '@babel/plugin-transform-unicode-regex': 7.24.1(@babel/core@7.24.4) - '@babel/template': 7.24.0 - '@react-native/babel-plugin-codegen': 0.73.4(@babel/preset-env@7.24.4(@babel/core@7.24.4)) - babel-plugin-transform-flow-enums: 0.0.2(@babel/core@7.24.4) - react-refresh: 0.14.0 + '@react-native/babel-preset@0.74.83(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))': + dependencies: + '@babel/core': 7.24.6 + '@babel/plugin-proposal-async-generator-functions': 7.20.7(@babel/core@7.24.6) + '@babel/plugin-proposal-class-properties': 7.18.6(@babel/core@7.24.6) + '@babel/plugin-proposal-export-default-from': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-proposal-logical-assignment-operators': 7.20.7(@babel/core@7.24.6) + '@babel/plugin-proposal-nullish-coalescing-operator': 7.18.6(@babel/core@7.24.6) + '@babel/plugin-proposal-numeric-separator': 7.18.6(@babel/core@7.24.6) + '@babel/plugin-proposal-object-rest-spread': 7.20.7(@babel/core@7.24.6) + '@babel/plugin-proposal-optional-catch-binding': 7.18.6(@babel/core@7.24.6) + '@babel/plugin-proposal-optional-chaining': 7.21.0(@babel/core@7.24.6) + '@babel/plugin-syntax-dynamic-import': 7.8.3(@babel/core@7.24.6) + '@babel/plugin-syntax-export-default-from': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-syntax-flow': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3(@babel/core@7.24.6) + '@babel/plugin-syntax-optional-chaining': 7.8.3(@babel/core@7.24.6) + '@babel/plugin-transform-arrow-functions': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-async-to-generator': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-block-scoping': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-classes': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-computed-properties': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-destructuring': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-flow-strip-types': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-function-name': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-literals': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-modules-commonjs': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-named-capturing-groups-regex': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-parameters': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-private-methods': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-private-property-in-object': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-react-display-name': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-react-jsx': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-react-jsx-self': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-react-jsx-source': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-runtime': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-shorthand-properties': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-spread': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-sticky-regex': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-typescript': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-unicode-regex': 7.24.6(@babel/core@7.24.6) + '@babel/template': 7.24.6 + '@react-native/babel-plugin-codegen': 0.74.83(@babel/preset-env@7.24.6(@babel/core@7.24.6)) + babel-plugin-transform-flow-enums: 0.0.2(@babel/core@7.24.6) + react-refresh: 0.14.2 transitivePeerDependencies: - '@babel/preset-env' - supports-color - '@react-native/codegen@0.73.3(@babel/preset-env@7.24.4(@babel/core@7.24.4))': + '@react-native/codegen@0.74.83(@babel/preset-env@7.24.6(@babel/core@7.24.6))': dependencies: - '@babel/parser': 7.24.4 - '@babel/preset-env': 7.24.4(@babel/core@7.24.4) - flow-parser: 0.206.0 + '@babel/parser': 7.24.6 + '@babel/preset-env': 7.24.6(@babel/core@7.24.6) glob: 7.2.3 + hermes-parser: 0.19.1 invariant: 2.2.4 - jscodeshift: 0.14.0(@babel/preset-env@7.24.4(@babel/core@7.24.4)) + jscodeshift: 0.14.0(@babel/preset-env@7.24.6(@babel/core@7.24.6)) mkdirp: 0.5.6 nullthrows: 1.1.1 transitivePeerDependencies: - supports-color - '@react-native/community-cli-plugin@0.73.17(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)': + '@react-native/community-cli-plugin@0.74.83(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)': dependencies: - '@react-native-community/cli-server-api': 12.3.6(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) - '@react-native-community/cli-tools': 12.3.6(encoding@0.1.13) - '@react-native/dev-middleware': 0.73.8(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) - '@react-native/metro-babel-transformer': 0.73.15(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4)) + '@react-native-community/cli-server-api': 13.6.6(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + '@react-native-community/cli-tools': 13.6.6(encoding@0.1.13) + '@react-native/dev-middleware': 0.74.83(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + '@react-native/metro-babel-transformer': 0.74.83(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6)) chalk: 4.1.2 execa: 5.1.1 - metro: 0.80.8(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) - metro-config: 0.80.8(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) - metro-core: 0.80.8 + metro: 0.80.9(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + metro-config: 0.80.9(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + metro-core: 0.80.9 node-fetch: 2.7.0(encoding@0.1.13) + querystring: 0.2.1 readline: 1.3.0 transitivePeerDependencies: - '@babel/core' @@ -11000,18 +12567,20 @@ snapshots: - supports-color - utf-8-validate - '@react-native/debugger-frontend@0.73.3': {} + '@react-native/debugger-frontend@0.74.83': {} - '@react-native/dev-middleware@0.73.8(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)': + '@react-native/dev-middleware@0.74.83(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)': dependencies: '@isaacs/ttlcache': 1.4.1 - '@react-native/debugger-frontend': 0.73.3 + '@react-native/debugger-frontend': 0.74.83 + '@rnx-kit/chromium-edge-launcher': 1.0.0 chrome-launcher: 0.15.2 - chromium-edge-launcher: 1.0.0 connect: 3.7.0 debug: 2.6.9 node-fetch: 2.7.0(encoding@0.1.13) + nullthrows: 1.1.1 open: 7.4.2 + selfsigned: 2.4.1 serve-static: 1.15.0 temp-dir: 2.0.0 ws: 6.2.2(bufferutil@4.0.8)(utf-8-validate@6.0.3) @@ -11021,29 +12590,41 @@ snapshots: - supports-color - utf-8-validate - '@react-native/gradle-plugin@0.73.4': {} + '@react-native/gradle-plugin@0.74.83': {} - '@react-native/js-polyfills@0.73.1': {} + '@react-native/js-polyfills@0.74.83': {} - '@react-native/metro-babel-transformer@0.73.15(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4))': + '@react-native/metro-babel-transformer@0.74.83(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))': dependencies: - '@babel/core': 7.24.4 - '@react-native/babel-preset': 0.73.21(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4)) - hermes-parser: 0.15.0 + '@babel/core': 7.24.6 + '@react-native/babel-preset': 0.74.83(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6)) + hermes-parser: 0.19.1 nullthrows: 1.1.1 transitivePeerDependencies: - '@babel/preset-env' - supports-color - '@react-native/normalize-color@2.1.0': {} - - '@react-native/normalize-colors@0.73.2': {} + '@react-native/normalize-colors@0.74.83': {} - '@react-native/virtualized-lists@0.73.4(react-native@0.73.6(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4))(bufferutil@4.0.8)(encoding@0.1.13)(react@18.2.0)(utf-8-validate@6.0.3))': + '@react-native/virtualized-lists@0.74.83(@types/react@18.3.1)(react-native@0.74.1(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(@types/react@18.3.1)(bufferutil@4.0.8)(encoding@0.1.13)(react@18.3.1)(utf-8-validate@6.0.3))(react@18.3.1)': dependencies: invariant: 2.2.4 nullthrows: 1.1.1 - react-native: 0.73.6(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4))(bufferutil@4.0.8)(encoding@0.1.13)(react@18.2.0)(utf-8-validate@6.0.3) + react: 18.3.1 + react-native: 0.74.1(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(@types/react@18.3.1)(bufferutil@4.0.8)(encoding@0.1.13)(react@18.3.1)(utf-8-validate@6.0.3) + optionalDependencies: + '@types/react': 18.3.1 + + '@rnx-kit/chromium-edge-launcher@1.0.0': + dependencies: + '@types/node': 18.19.33 + escape-string-regexp: 4.0.0 + is-wsl: 2.2.0 + lighthouse-logger: 1.4.2 + mkdirp: 1.0.4 + rimraf: 3.0.2 + transitivePeerDependencies: + - supports-color '@rollup/plugin-terser@0.4.1(rollup@3.20.7)': dependencies: @@ -11061,20 +12642,20 @@ snapshots: optionalDependencies: rollup: 3.27.2 - '@rollup/plugin-typescript@11.1.0(rollup@3.20.7)(tslib@2.6.2)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq))': + '@rollup/plugin-typescript@11.1.0(rollup@3.20.7)(tslib@2.6.2)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': dependencies: '@rollup/pluginutils': 5.0.2(rollup@3.20.7) resolve: 1.22.1 - typescript: 5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq) + typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) optionalDependencies: rollup: 3.20.7 tslib: 2.6.2 - '@rollup/plugin-typescript@11.1.1(rollup@3.27.2)(tslib@2.6.2)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq))': + '@rollup/plugin-typescript@11.1.1(rollup@3.27.2)(tslib@2.6.2)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': dependencies: '@rollup/pluginutils': 5.0.2(rollup@3.27.2) resolve: 1.22.2 - typescript: 5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq) + typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) optionalDependencies: rollup: 3.27.2 tslib: 2.6.2 @@ -11095,6 +12676,54 @@ snapshots: optionalDependencies: rollup: 3.27.2 + '@rollup/rollup-android-arm-eabi@4.18.0': + optional: true + + '@rollup/rollup-android-arm64@4.18.0': + optional: true + + '@rollup/rollup-darwin-arm64@4.18.0': + optional: true + + '@rollup/rollup-darwin-x64@4.18.0': + optional: true + + '@rollup/rollup-linux-arm-gnueabihf@4.18.0': + optional: true + + '@rollup/rollup-linux-arm-musleabihf@4.18.0': + optional: true + + '@rollup/rollup-linux-arm64-gnu@4.18.0': + optional: true + + '@rollup/rollup-linux-arm64-musl@4.18.0': + optional: true + + '@rollup/rollup-linux-powerpc64le-gnu@4.18.0': + optional: true + + '@rollup/rollup-linux-riscv64-gnu@4.18.0': + optional: true + + '@rollup/rollup-linux-s390x-gnu@4.18.0': + optional: true + + '@rollup/rollup-linux-x64-gnu@4.18.0': + optional: true + + '@rollup/rollup-linux-x64-musl@4.18.0': + optional: true + + '@rollup/rollup-win32-arm64-msvc@4.18.0': + optional: true + + '@rollup/rollup-win32-ia32-msvc@4.18.0': + optional: true + + '@rollup/rollup-win32-x64-msvc@4.18.0': + optional: true + '@segment/loosely-validate-event@2.0.0': dependencies: component-type: 1.2.2 @@ -11112,6 +12741,8 @@ snapshots: '@sinclair/typebox@0.29.6': {} + '@sindresorhus/is@4.6.0': {} + '@sinonjs/commons@3.0.1': dependencies: type-detect: 4.0.8 @@ -11125,6 +12756,11 @@ snapshots: '@smithy/types': 2.12.0 tslib: 2.6.2 + '@smithy/abort-controller@3.0.0': + dependencies: + '@smithy/types': 3.0.0 + tslib: 2.6.2 + '@smithy/config-resolver@2.2.0': dependencies: '@smithy/node-config-provider': 2.3.0 @@ -11133,6 +12769,14 @@ snapshots: '@smithy/util-middleware': 2.2.0 tslib: 2.6.2 + '@smithy/config-resolver@3.0.0': + dependencies: + '@smithy/node-config-provider': 3.0.0 + '@smithy/types': 3.0.0 + '@smithy/util-config-provider': 3.0.0 + '@smithy/util-middleware': 3.0.0 + tslib: 2.6.2 + '@smithy/core@1.4.2': dependencies: '@smithy/middleware-endpoint': 2.5.1 @@ -11144,6 +12788,17 @@ snapshots: '@smithy/util-middleware': 2.2.0 tslib: 2.6.2 + '@smithy/core@2.0.1': + dependencies: + '@smithy/middleware-endpoint': 3.0.0 + '@smithy/middleware-retry': 3.0.1 + '@smithy/middleware-serde': 3.0.0 + '@smithy/protocol-http': 4.0.0 + '@smithy/smithy-client': 3.0.1 + '@smithy/types': 3.0.0 + '@smithy/util-middleware': 3.0.0 + tslib: 2.6.2 + '@smithy/credential-provider-imds@2.3.0': dependencies: '@smithy/node-config-provider': 2.3.0 @@ -11152,6 +12807,14 @@ snapshots: '@smithy/url-parser': 2.2.0 tslib: 2.6.2 + '@smithy/credential-provider-imds@3.0.0': + dependencies: + '@smithy/node-config-provider': 3.0.0 + '@smithy/property-provider': 3.0.0 + '@smithy/types': 3.0.0 + '@smithy/url-parser': 3.0.0 + tslib: 2.6.2 + '@smithy/eventstream-codec@2.2.0': dependencies: '@aws-crypto/crc32': 3.0.0 @@ -11190,6 +12853,14 @@ snapshots: '@smithy/util-base64': 2.3.0 tslib: 2.6.2 + '@smithy/fetch-http-handler@3.0.1': + dependencies: + '@smithy/protocol-http': 4.0.0 + '@smithy/querystring-builder': 3.0.0 + '@smithy/types': 3.0.0 + '@smithy/util-base64': 3.0.0 + tslib: 2.6.2 + '@smithy/hash-node@2.2.0': dependencies: '@smithy/types': 2.12.0 @@ -11197,21 +12868,43 @@ snapshots: '@smithy/util-utf8': 2.3.0 tslib: 2.6.2 + '@smithy/hash-node@3.0.0': + dependencies: + '@smithy/types': 3.0.0 + '@smithy/util-buffer-from': 3.0.0 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.2 + '@smithy/invalid-dependency@2.2.0': dependencies: '@smithy/types': 2.12.0 tslib: 2.6.2 + '@smithy/invalid-dependency@3.0.0': + dependencies: + '@smithy/types': 3.0.0 + tslib: 2.6.2 + '@smithy/is-array-buffer@2.2.0': dependencies: tslib: 2.6.2 + '@smithy/is-array-buffer@3.0.0': + dependencies: + tslib: 2.6.2 + '@smithy/middleware-content-length@2.2.0': dependencies: '@smithy/protocol-http': 3.3.0 '@smithy/types': 2.12.0 tslib: 2.6.2 + '@smithy/middleware-content-length@3.0.0': + dependencies: + '@smithy/protocol-http': 4.0.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + '@smithy/middleware-endpoint@2.5.1': dependencies: '@smithy/middleware-serde': 2.3.0 @@ -11222,6 +12915,16 @@ snapshots: '@smithy/util-middleware': 2.2.0 tslib: 2.6.2 + '@smithy/middleware-endpoint@3.0.0': + dependencies: + '@smithy/middleware-serde': 3.0.0 + '@smithy/node-config-provider': 3.0.0 + '@smithy/shared-ini-file-loader': 3.0.0 + '@smithy/types': 3.0.0 + '@smithy/url-parser': 3.0.0 + '@smithy/util-middleware': 3.0.0 + tslib: 2.6.2 + '@smithy/middleware-retry@2.3.1': dependencies: '@smithy/node-config-provider': 2.3.0 @@ -11234,16 +12937,38 @@ snapshots: tslib: 2.6.2 uuid: 9.0.1 + '@smithy/middleware-retry@3.0.1': + dependencies: + '@smithy/node-config-provider': 3.0.0 + '@smithy/protocol-http': 4.0.0 + '@smithy/service-error-classification': 3.0.0 + '@smithy/smithy-client': 3.0.1 + '@smithy/types': 3.0.0 + '@smithy/util-middleware': 3.0.0 + '@smithy/util-retry': 3.0.0 + tslib: 2.6.2 + uuid: 9.0.1 + '@smithy/middleware-serde@2.3.0': dependencies: '@smithy/types': 2.12.0 tslib: 2.6.2 + '@smithy/middleware-serde@3.0.0': + dependencies: + '@smithy/types': 3.0.0 + tslib: 2.6.2 + '@smithy/middleware-stack@2.2.0': dependencies: '@smithy/types': 2.12.0 tslib: 2.6.2 + '@smithy/middleware-stack@3.0.0': + dependencies: + '@smithy/types': 3.0.0 + tslib: 2.6.2 + '@smithy/node-config-provider@2.3.0': dependencies: '@smithy/property-provider': 2.2.0 @@ -11251,6 +12976,13 @@ snapshots: '@smithy/types': 2.12.0 tslib: 2.6.2 + '@smithy/node-config-provider@3.0.0': + dependencies: + '@smithy/property-provider': 3.0.0 + '@smithy/shared-ini-file-loader': 3.0.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + '@smithy/node-http-handler@2.5.0': dependencies: '@smithy/abort-controller': 2.2.0 @@ -11259,37 +12991,75 @@ snapshots: '@smithy/types': 2.12.0 tslib: 2.6.2 + '@smithy/node-http-handler@3.0.0': + dependencies: + '@smithy/abort-controller': 3.0.0 + '@smithy/protocol-http': 4.0.0 + '@smithy/querystring-builder': 3.0.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + '@smithy/property-provider@2.2.0': dependencies: '@smithy/types': 2.12.0 tslib: 2.6.2 + '@smithy/property-provider@3.0.0': + dependencies: + '@smithy/types': 3.0.0 + tslib: 2.6.2 + '@smithy/protocol-http@3.3.0': dependencies: '@smithy/types': 2.12.0 tslib: 2.6.2 + '@smithy/protocol-http@4.0.0': + dependencies: + '@smithy/types': 3.0.0 + tslib: 2.6.2 + '@smithy/querystring-builder@2.2.0': dependencies: '@smithy/types': 2.12.0 '@smithy/util-uri-escape': 2.2.0 tslib: 2.6.2 + '@smithy/querystring-builder@3.0.0': + dependencies: + '@smithy/types': 3.0.0 + '@smithy/util-uri-escape': 3.0.0 + tslib: 2.6.2 + '@smithy/querystring-parser@2.2.0': dependencies: '@smithy/types': 2.12.0 tslib: 2.6.2 + '@smithy/querystring-parser@3.0.0': + dependencies: + '@smithy/types': 3.0.0 + tslib: 2.6.2 + '@smithy/service-error-classification@2.1.5': dependencies: '@smithy/types': 2.12.0 + '@smithy/service-error-classification@3.0.0': + dependencies: + '@smithy/types': 3.0.0 + '@smithy/shared-ini-file-loader@2.4.0': dependencies: '@smithy/types': 2.12.0 tslib: 2.6.2 - '@smithy/signature-v4@2.2.1': + '@smithy/shared-ini-file-loader@3.0.0': + dependencies: + '@smithy/types': 3.0.0 + tslib: 2.6.2 + + '@smithy/signature-v4@2.3.0': dependencies: '@smithy/is-array-buffer': 2.2.0 '@smithy/types': 2.12.0 @@ -11299,6 +13069,16 @@ snapshots: '@smithy/util-utf8': 2.3.0 tslib: 2.6.2 + '@smithy/signature-v4@3.0.0': + dependencies: + '@smithy/is-array-buffer': 3.0.0 + '@smithy/types': 3.0.0 + '@smithy/util-hex-encoding': 3.0.0 + '@smithy/util-middleware': 3.0.0 + '@smithy/util-uri-escape': 3.0.0 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.2 + '@smithy/smithy-client@2.5.1': dependencies: '@smithy/middleware-endpoint': 2.5.1 @@ -11308,14 +13088,33 @@ snapshots: '@smithy/util-stream': 2.2.0 tslib: 2.6.2 + '@smithy/smithy-client@3.0.1': + dependencies: + '@smithy/middleware-endpoint': 3.0.0 + '@smithy/middleware-stack': 3.0.0 + '@smithy/protocol-http': 4.0.0 + '@smithy/types': 3.0.0 + '@smithy/util-stream': 3.0.1 + tslib: 2.6.2 + '@smithy/types@2.12.0': dependencies: tslib: 2.6.2 + '@smithy/types@3.0.0': + dependencies: + tslib: 2.6.2 + '@smithy/url-parser@2.2.0': dependencies: - '@smithy/querystring-parser': 2.2.0 - '@smithy/types': 2.12.0 + '@smithy/querystring-parser': 2.2.0 + '@smithy/types': 2.12.0 + tslib: 2.6.2 + + '@smithy/url-parser@3.0.0': + dependencies: + '@smithy/querystring-parser': 3.0.0 + '@smithy/types': 3.0.0 tslib: 2.6.2 '@smithy/util-base64@2.3.0': @@ -11324,23 +13123,46 @@ snapshots: '@smithy/util-utf8': 2.3.0 tslib: 2.6.2 + '@smithy/util-base64@3.0.0': + dependencies: + '@smithy/util-buffer-from': 3.0.0 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.2 + '@smithy/util-body-length-browser@2.2.0': dependencies: tslib: 2.6.2 + '@smithy/util-body-length-browser@3.0.0': + dependencies: + tslib: 2.6.2 + '@smithy/util-body-length-node@2.3.0': dependencies: tslib: 2.6.2 + '@smithy/util-body-length-node@3.0.0': + dependencies: + tslib: 2.6.2 + '@smithy/util-buffer-from@2.2.0': dependencies: '@smithy/is-array-buffer': 2.2.0 tslib: 2.6.2 + '@smithy/util-buffer-from@3.0.0': + dependencies: + '@smithy/is-array-buffer': 3.0.0 + tslib: 2.6.2 + '@smithy/util-config-provider@2.3.0': dependencies: tslib: 2.6.2 + '@smithy/util-config-provider@3.0.0': + dependencies: + tslib: 2.6.2 + '@smithy/util-defaults-mode-browser@2.2.1': dependencies: '@smithy/property-provider': 2.2.0 @@ -11349,6 +13171,14 @@ snapshots: bowser: 2.11.0 tslib: 2.6.2 + '@smithy/util-defaults-mode-browser@3.0.1': + dependencies: + '@smithy/property-provider': 3.0.0 + '@smithy/smithy-client': 3.0.1 + '@smithy/types': 3.0.0 + bowser: 2.11.0 + tslib: 2.6.2 + '@smithy/util-defaults-mode-node@2.3.1': dependencies: '@smithy/config-resolver': 2.2.0 @@ -11359,27 +13189,58 @@ snapshots: '@smithy/types': 2.12.0 tslib: 2.6.2 + '@smithy/util-defaults-mode-node@3.0.1': + dependencies: + '@smithy/config-resolver': 3.0.0 + '@smithy/credential-provider-imds': 3.0.0 + '@smithy/node-config-provider': 3.0.0 + '@smithy/property-provider': 3.0.0 + '@smithy/smithy-client': 3.0.1 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + '@smithy/util-endpoints@1.2.0': dependencies: '@smithy/node-config-provider': 2.3.0 '@smithy/types': 2.12.0 tslib: 2.6.2 + '@smithy/util-endpoints@2.0.0': + dependencies: + '@smithy/node-config-provider': 3.0.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + '@smithy/util-hex-encoding@2.2.0': dependencies: tslib: 2.6.2 + '@smithy/util-hex-encoding@3.0.0': + dependencies: + tslib: 2.6.2 + '@smithy/util-middleware@2.2.0': dependencies: '@smithy/types': 2.12.0 tslib: 2.6.2 + '@smithy/util-middleware@3.0.0': + dependencies: + '@smithy/types': 3.0.0 + tslib: 2.6.2 + '@smithy/util-retry@2.2.0': dependencies: '@smithy/service-error-classification': 2.1.5 '@smithy/types': 2.12.0 tslib: 2.6.2 + '@smithy/util-retry@3.0.0': + dependencies: + '@smithy/service-error-classification': 3.0.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + '@smithy/util-stream@2.2.0': dependencies: '@smithy/fetch-http-handler': 2.5.0 @@ -11391,15 +13252,35 @@ snapshots: '@smithy/util-utf8': 2.3.0 tslib: 2.6.2 + '@smithy/util-stream@3.0.1': + dependencies: + '@smithy/fetch-http-handler': 3.0.1 + '@smithy/node-http-handler': 3.0.0 + '@smithy/types': 3.0.0 + '@smithy/util-base64': 3.0.0 + '@smithy/util-buffer-from': 3.0.0 + '@smithy/util-hex-encoding': 3.0.0 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.2 + '@smithy/util-uri-escape@2.2.0': dependencies: tslib: 2.6.2 + '@smithy/util-uri-escape@3.0.0': + dependencies: + tslib: 2.6.2 + '@smithy/util-utf8@2.3.0': dependencies: '@smithy/util-buffer-from': 2.2.0 tslib: 2.6.2 + '@smithy/util-utf8@3.0.0': + dependencies: + '@smithy/util-buffer-from': 3.0.0 + tslib: 2.6.2 + '@smithy/util-waiter@2.2.0': dependencies: '@smithy/abort-controller': 2.2.0 @@ -11423,62 +13304,74 @@ snapshots: transitivePeerDependencies: - supports-color + '@tsconfig/node10@1.0.11': {} + + '@tsconfig/node12@1.0.11': {} + + '@tsconfig/node14@1.0.3': {} + + '@tsconfig/node16@1.0.4': {} + + '@types/async-retry@1.4.8': + dependencies: + '@types/retry': 0.12.5 + '@types/axios@0.14.0': dependencies: - axios: 1.4.0 + axios: 1.6.8 transitivePeerDependencies: - debug - '@types/better-sqlite3@7.6.4': - dependencies: - '@types/node': 20.8.7 - - '@types/body-parser@1.19.2': + '@types/better-sqlite3@7.6.10': dependencies: - '@types/connect': 3.4.35 - '@types/node': 20.8.7 + '@types/node': 20.12.12 - '@types/chai-subset@1.3.3': + '@types/body-parser@1.19.5': dependencies: - '@types/chai': 4.3.5 - - '@types/chai@4.3.5': {} + '@types/connect': 3.4.38 + '@types/node': 20.12.12 - '@types/connect@3.4.35': + '@types/connect@3.4.38': dependencies: - '@types/node': 20.8.7 + '@types/node': 20.12.12 - '@types/docker-modem@3.0.2': + '@types/docker-modem@3.0.6': dependencies: - '@types/node': 20.8.7 - '@types/ssh2': 1.11.11 + '@types/node': 20.12.12 + '@types/ssh2': 1.15.0 - '@types/dockerode@3.3.18': + '@types/dockerode@3.3.29': dependencies: - '@types/docker-modem': 3.0.2 - '@types/node': 20.8.7 + '@types/docker-modem': 3.0.6 + '@types/node': 20.12.12 + '@types/ssh2': 1.15.0 - '@types/emscripten@1.39.6': {} + '@types/emscripten@1.39.11': {} '@types/estree@1.0.1': {} - '@types/express-serve-static-core@4.17.33': + '@types/estree@1.0.5': {} + + '@types/express-serve-static-core@4.19.0': dependencies: - '@types/node': 20.8.7 - '@types/qs': 6.9.7 - '@types/range-parser': 1.2.4 + '@types/node': 20.12.12 + '@types/qs': 6.9.15 + '@types/range-parser': 1.2.7 + '@types/send': 0.17.4 - '@types/express@4.17.17': + '@types/express@4.17.21': dependencies: - '@types/body-parser': 1.19.2 - '@types/express-serve-static-core': 4.17.33 - '@types/qs': 6.9.7 - '@types/serve-static': 1.15.1 + '@types/body-parser': 1.19.5 + '@types/express-serve-static-core': 4.19.0 + '@types/qs': 6.9.15 + '@types/serve-static': 1.15.7 - '@types/fs-extra@11.0.1': + '@types/fs-extra@11.0.4': dependencies: - '@types/jsonfile': 6.1.1 - '@types/node': 20.8.7 + '@types/jsonfile': 6.1.4 + '@types/node': 20.12.12 + + '@types/http-errors@2.0.4': {} '@types/istanbul-lib-coverage@2.0.6': {} @@ -11494,92 +13387,97 @@ snapshots: '@types/json5@0.0.29': {} - '@types/jsonfile@6.1.1': + '@types/jsonfile@6.1.4': dependencies: - '@types/node': 20.8.7 + '@types/node': 20.12.12 - '@types/mime@3.0.1': {} + '@types/mime@1.3.5': {} '@types/minimist@1.2.2': {} '@types/node-fetch@2.6.11': dependencies: - '@types/node': 20.10.1 + '@types/node': 20.12.12 form-data: 4.0.0 - '@types/node@18.15.10': {} + '@types/node-forge@1.3.11': + dependencies: + '@types/node': 20.12.12 - '@types/node@18.16.16': {} + '@types/node@18.15.10': {} - '@types/node@20.10.1': + '@types/node@18.19.33': dependencies: undici-types: 5.26.5 - '@types/node@20.12.4': + '@types/node@20.10.1': dependencies: undici-types: 5.26.5 - '@types/node@20.2.5': {} - - '@types/node@20.8.7': + '@types/node@20.12.12': dependencies: - undici-types: 5.25.3 + undici-types: 5.26.5 '@types/normalize-package-data@2.4.1': {} - '@types/pg@8.10.1': + '@types/pg@8.11.6': dependencies: - '@types/node': 20.8.7 - pg-protocol: 1.6.0 - pg-types: 4.0.1 + '@types/node': 20.12.12 + pg-protocol: 1.6.1 + pg-types: 4.0.2 '@types/pg@8.6.6': dependencies: - '@types/node': 20.10.1 - pg-protocol: 1.6.0 + '@types/node': 20.12.12 + pg-protocol: 1.6.1 pg-types: 2.2.0 - '@types/prop-types@15.7.11': {} + '@types/prop-types@15.7.12': {} '@types/ps-tree@1.1.2': {} - '@types/qs@6.9.7': {} + '@types/qs@6.9.15': {} - '@types/range-parser@1.2.4': {} + '@types/range-parser@1.2.7': {} - '@types/react@18.2.45': + '@types/react@18.3.1': dependencies: - '@types/prop-types': 15.7.11 - '@types/scheduler': 0.16.8 + '@types/prop-types': 15.7.12 csstype: 3.1.3 - '@types/scheduler@0.16.8': {} + '@types/retry@0.12.5': {} '@types/semver@7.5.3': {} - '@types/serve-static@1.15.1': + '@types/send@0.17.4': + dependencies: + '@types/mime': 1.3.5 + '@types/node': 20.12.12 + + '@types/serve-static@1.15.7': dependencies: - '@types/mime': 3.0.1 - '@types/node': 20.8.7 + '@types/http-errors': 2.0.4 + '@types/node': 20.12.12 + '@types/send': 0.17.4 - '@types/sql.js@1.4.4': + '@types/sql.js@1.4.9': dependencies: - '@types/emscripten': 1.39.6 - '@types/node': 20.8.7 + '@types/emscripten': 1.39.11 + '@types/node': 20.12.12 - '@types/ssh2@1.11.11': + '@types/ssh2@1.15.0': dependencies: - '@types/node': 18.16.16 + '@types/node': 18.19.33 '@types/stack-utils@2.0.3': {} - '@types/uuid@9.0.1': {} + '@types/uuid@9.0.8': {} '@types/which@3.0.0': {} '@types/ws@8.5.4': dependencies: - '@types/node': 20.10.1 + '@types/node': 20.12.12 '@types/yargs-parser@21.0.3': {} @@ -11591,13 +13489,13 @@ snapshots: dependencies: '@types/yargs-parser': 21.0.3 - '@typescript-eslint/eslint-plugin@6.7.3(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)))(eslint@8.50.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq))': + '@typescript-eslint/eslint-plugin@6.7.3(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)))(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': dependencies: '@eslint-community/regexpp': 4.9.0 - '@typescript-eslint/parser': 6.7.3(eslint@8.50.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) + '@typescript-eslint/parser': 6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) '@typescript-eslint/scope-manager': 6.7.3 - '@typescript-eslint/type-utils': 6.7.3(eslint@8.50.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) - '@typescript-eslint/utils': 6.7.3(eslint@8.50.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) + '@typescript-eslint/type-utils': 6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + '@typescript-eslint/utils': 6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) '@typescript-eslint/visitor-keys': 6.7.3 debug: 4.3.4 eslint: 8.50.0 @@ -11605,51 +13503,51 @@ snapshots: ignore: 5.2.4 natural-compare: 1.4.0 semver: 7.5.4 - ts-api-utils: 1.0.3(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) + ts-api-utils: 1.0.3(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) optionalDependencies: - typescript: 5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq) + typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) transitivePeerDependencies: - supports-color - '@typescript-eslint/experimental-utils@5.62.0(eslint@8.50.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq))': + '@typescript-eslint/experimental-utils@5.62.0(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': dependencies: - '@typescript-eslint/utils': 5.62.0(eslint@8.50.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) + '@typescript-eslint/utils': 5.62.0(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) eslint: 8.50.0 transitivePeerDependencies: - supports-color - typescript - '@typescript-eslint/parser@6.10.0(eslint@8.53.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq))': + '@typescript-eslint/parser@6.10.0(eslint@8.53.0)(typescript@5.2.2)': dependencies: '@typescript-eslint/scope-manager': 6.10.0 '@typescript-eslint/types': 6.10.0 - '@typescript-eslint/typescript-estree': 6.10.0(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) + '@typescript-eslint/typescript-estree': 6.10.0(typescript@5.2.2) '@typescript-eslint/visitor-keys': 6.10.0 debug: 4.3.4 eslint: 8.53.0 optionalDependencies: - typescript: 5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq) + typescript: 5.2.2 transitivePeerDependencies: - supports-color - '@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq))': + '@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': dependencies: '@typescript-eslint/scope-manager': 6.7.3 '@typescript-eslint/types': 6.7.3 - '@typescript-eslint/typescript-estree': 6.7.3(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) + '@typescript-eslint/typescript-estree': 6.7.3(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) '@typescript-eslint/visitor-keys': 6.7.3 debug: 4.3.4 eslint: 8.50.0 optionalDependencies: - typescript: 5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq) + typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) transitivePeerDependencies: - supports-color - '@typescript-eslint/rule-tester@6.10.0(@eslint/eslintrc@3.0.2)(eslint@8.53.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq))': + '@typescript-eslint/rule-tester@6.10.0(@eslint/eslintrc@3.1.0)(eslint@8.53.0)(typescript@5.2.2)': dependencies: - '@eslint/eslintrc': 3.0.2 - '@typescript-eslint/typescript-estree': 6.10.0(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) - '@typescript-eslint/utils': 6.10.0(eslint@8.53.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) + '@eslint/eslintrc': 3.1.0 + '@typescript-eslint/typescript-estree': 6.10.0(typescript@5.2.2) + '@typescript-eslint/utils': 6.10.0(eslint@8.53.0)(typescript@5.2.2) ajv: 6.12.6 eslint: 8.53.0 lodash.merge: 4.6.2 @@ -11673,15 +13571,15 @@ snapshots: '@typescript-eslint/types': 6.7.3 '@typescript-eslint/visitor-keys': 6.7.3 - '@typescript-eslint/type-utils@6.7.3(eslint@8.50.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq))': + '@typescript-eslint/type-utils@6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': dependencies: - '@typescript-eslint/typescript-estree': 6.7.3(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) - '@typescript-eslint/utils': 6.7.3(eslint@8.50.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) + '@typescript-eslint/typescript-estree': 6.7.3(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + '@typescript-eslint/utils': 6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) debug: 4.3.4 eslint: 8.50.0 - ts-api-utils: 1.0.3(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) + ts-api-utils: 1.0.3(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) optionalDependencies: - typescript: 5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq) + typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) transitivePeerDependencies: - supports-color @@ -11691,7 +13589,7 @@ snapshots: '@typescript-eslint/types@6.7.3': {} - '@typescript-eslint/typescript-estree@5.62.0(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq))': + '@typescript-eslint/typescript-estree@5.62.0(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': dependencies: '@typescript-eslint/types': 5.62.0 '@typescript-eslint/visitor-keys': 5.62.0 @@ -11699,13 +13597,13 @@ snapshots: globby: 11.1.0 is-glob: 4.0.3 semver: 7.5.4 - tsutils: 3.21.0(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) + tsutils: 3.21.0(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) optionalDependencies: - typescript: 5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq) + typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) transitivePeerDependencies: - supports-color - '@typescript-eslint/typescript-estree@6.10.0(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq))': + '@typescript-eslint/typescript-estree@6.10.0(typescript@5.2.2)': dependencies: '@typescript-eslint/types': 6.10.0 '@typescript-eslint/visitor-keys': 6.10.0 @@ -11713,13 +13611,13 @@ snapshots: globby: 11.1.0 is-glob: 4.0.3 semver: 7.5.4 - ts-api-utils: 1.0.3(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) + ts-api-utils: 1.0.3(typescript@5.2.2) optionalDependencies: - typescript: 5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq) + typescript: 5.2.2 transitivePeerDependencies: - supports-color - '@typescript-eslint/typescript-estree@6.7.3(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq))': + '@typescript-eslint/typescript-estree@6.7.3(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': dependencies: '@typescript-eslint/types': 6.7.3 '@typescript-eslint/visitor-keys': 6.7.3 @@ -11727,20 +13625,20 @@ snapshots: globby: 11.1.0 is-glob: 4.0.3 semver: 7.5.4 - ts-api-utils: 1.0.3(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) + ts-api-utils: 1.0.3(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) optionalDependencies: - typescript: 5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq) + typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) transitivePeerDependencies: - supports-color - '@typescript-eslint/utils@5.62.0(eslint@8.50.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq))': + '@typescript-eslint/utils@5.62.0(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': dependencies: '@eslint-community/eslint-utils': 4.4.0(eslint@8.50.0) '@types/json-schema': 7.0.13 '@types/semver': 7.5.3 '@typescript-eslint/scope-manager': 5.62.0 '@typescript-eslint/types': 5.62.0 - '@typescript-eslint/typescript-estree': 5.62.0(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) + '@typescript-eslint/typescript-estree': 5.62.0(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) eslint: 8.50.0 eslint-scope: 5.1.1 semver: 7.5.4 @@ -11748,28 +13646,28 @@ snapshots: - supports-color - typescript - '@typescript-eslint/utils@6.10.0(eslint@8.53.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq))': + '@typescript-eslint/utils@6.10.0(eslint@8.53.0)(typescript@5.2.2)': dependencies: '@eslint-community/eslint-utils': 4.4.0(eslint@8.53.0) '@types/json-schema': 7.0.13 '@types/semver': 7.5.3 '@typescript-eslint/scope-manager': 6.10.0 '@typescript-eslint/types': 6.10.0 - '@typescript-eslint/typescript-estree': 6.10.0(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) + '@typescript-eslint/typescript-estree': 6.10.0(typescript@5.2.2) eslint: 8.53.0 semver: 7.5.4 transitivePeerDependencies: - supports-color - typescript - '@typescript-eslint/utils@6.7.3(eslint@8.50.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq))': + '@typescript-eslint/utils@6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': dependencies: '@eslint-community/eslint-utils': 4.4.0(eslint@8.50.0) '@types/json-schema': 7.0.13 '@types/semver': 7.5.3 '@typescript-eslint/scope-manager': 6.7.3 '@typescript-eslint/types': 6.7.3 - '@typescript-eslint/typescript-estree': 6.7.3(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) + '@typescript-eslint/typescript-estree': 6.7.3(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) eslint: 8.50.0 semver: 7.5.4 transitivePeerDependencies: @@ -11791,7 +13689,7 @@ snapshots: '@typescript-eslint/types': 6.7.3 eslint-visitor-keys: 3.4.3 - '@typescript/analyze-trace@0.10.0': + '@typescript/analyze-trace@0.10.1': dependencies: chalk: 4.1.2 exit: 0.1.2 @@ -11816,13 +13714,6 @@ snapshots: graphql: 15.8.0 wonka: 4.0.15 - '@vercel/postgres@0.3.0': - dependencies: - '@neondatabase/serverless': 0.4.3 - bufferutil: 4.0.7 - utf-8-validate: 6.0.3 - ws: 8.13.0(bufferutil@4.0.7)(utf-8-validate@6.0.3) - '@vercel/postgres@0.8.0': dependencies: '@neondatabase/serverless': 0.7.2 @@ -11830,95 +13721,56 @@ snapshots: utf-8-validate: 6.0.3 ws: 8.14.2(bufferutil@4.0.8)(utf-8-validate@6.0.3) - '@vitest/expect@0.31.4': - dependencies: - '@vitest/spy': 0.31.4 - '@vitest/utils': 0.31.4 - chai: 4.3.7 - - '@vitest/expect@0.34.6': - dependencies: - '@vitest/spy': 0.34.6 - '@vitest/utils': 0.34.6 - chai: 4.3.10 - - '@vitest/runner@0.31.4': - dependencies: - '@vitest/utils': 0.31.4 - concordance: 5.0.4 - p-limit: 4.0.0 - pathe: 1.1.1 - - '@vitest/runner@0.34.6': + '@vitest/expect@1.6.0': dependencies: - '@vitest/utils': 0.34.6 - p-limit: 4.0.0 - pathe: 1.1.1 + '@vitest/spy': 1.6.0 + '@vitest/utils': 1.6.0 + chai: 4.4.1 - '@vitest/snapshot@0.31.4': + '@vitest/runner@1.6.0': dependencies: - magic-string: 0.30.0 - pathe: 1.1.1 - pretty-format: 27.5.1 + '@vitest/utils': 1.6.0 + p-limit: 5.0.0 + pathe: 1.1.2 - '@vitest/snapshot@0.34.6': + '@vitest/snapshot@1.6.0': dependencies: - magic-string: 0.30.5 - pathe: 1.1.1 + magic-string: 0.30.10 + pathe: 1.1.2 pretty-format: 29.7.0 - '@vitest/spy@0.31.4': + '@vitest/spy@1.6.0': dependencies: - tinyspy: 2.1.1 + tinyspy: 2.2.1 - '@vitest/spy@0.34.6': + '@vitest/ui@1.6.0(vitest@1.6.0)': dependencies: - tinyspy: 2.1.1 - - '@vitest/ui@0.31.4(vitest@0.31.4)': - dependencies: - '@vitest/utils': 0.31.4 - fast-glob: 3.2.12 - fflate: 0.7.4 - flatted: 3.2.7 - pathe: 1.1.1 - picocolors: 1.0.0 - sirv: 2.0.3 - vitest: 0.31.4(@vitest/ui@0.31.4)(terser@5.30.3) - - '@vitest/ui@0.31.4(vitest@0.34.6)': - dependencies: - '@vitest/utils': 0.31.4 - fast-glob: 3.2.12 - fflate: 0.7.4 - flatted: 3.2.7 - pathe: 1.1.1 - picocolors: 1.0.0 - sirv: 2.0.3 - vitest: 0.34.6(@vitest/ui@0.31.4)(terser@5.30.3) - optional: true - - '@vitest/utils@0.31.4': - dependencies: - concordance: 5.0.4 - loupe: 2.3.6 - pretty-format: 27.5.1 + '@vitest/utils': 1.6.0 + fast-glob: 3.3.2 + fflate: 0.8.2 + flatted: 3.3.1 + pathe: 1.1.2 + picocolors: 1.0.1 + sirv: 2.0.4 + vitest: 1.6.0(@types/node@20.12.12)(@vitest/ui@1.6.0)(lightningcss@1.25.1)(terser@5.31.0) - '@vitest/utils@0.34.6': + '@vitest/utils@1.6.0': dependencies: diff-sequences: 29.6.3 - loupe: 2.3.6 + estree-walker: 3.0.3 + loupe: 2.3.7 pretty-format: 29.7.0 - '@xata.io/client@0.29.3(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq))': + '@xata.io/client@0.29.4(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': dependencies: - typescript: 5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq) + typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) '@xmldom/xmldom@0.7.13': {} '@xmldom/xmldom@0.8.10': {} - abbrev@1.1.1: {} + abbrev@1.1.1: + optional: true abort-controller@3.0.0: dependencies: @@ -11937,7 +13789,7 @@ snapshots: dependencies: acorn: 8.11.3 - acorn-walk@8.2.0: {} + acorn-walk@8.3.2: {} acorn@8.10.0: {} @@ -11951,13 +13803,9 @@ snapshots: transitivePeerDependencies: - supports-color - agentkeepalive@4.3.0: + agentkeepalive@4.5.0: dependencies: - debug: 4.3.4 - depd: 2.0.0 humanize-ms: 1.2.1 - transitivePeerDependencies: - - supports-color optional: true aggregate-error@3.1.0: @@ -12026,12 +13874,8 @@ snapshots: application-config-path@0.1.1: {} - aproba@2.0.0: {} - - are-we-there-yet@2.0.0: - dependencies: - delegates: 1.0.0 - readable-stream: 3.6.2 + aproba@2.0.0: + optional: true are-we-there-yet@3.0.1: dependencies: @@ -12039,6 +13883,8 @@ snapshots: readable-stream: 3.6.2 optional: true + arg@4.1.3: {} + arg@5.0.2: {} argparse@1.0.10: @@ -12054,6 +13900,11 @@ snapshots: call-bind: 1.0.2 is-array-buffer: 3.0.2 + array-buffer-byte-length@1.0.1: + dependencies: + call-bind: 1.0.7 + is-array-buffer: 3.0.4 + array-find-index@1.0.2: {} array-flatten@1.1.1: {} @@ -12099,6 +13950,17 @@ snapshots: is-array-buffer: 3.0.2 is-shared-array-buffer: 1.0.2 + arraybuffer.prototype.slice@1.0.3: + dependencies: + array-buffer-byte-length: 1.0.1 + call-bind: 1.0.7 + define-properties: 1.2.1 + es-abstract: 1.23.3 + es-errors: 1.3.0 + get-intrinsic: 1.2.4 + is-array-buffer: 3.0.4 + is-shared-array-buffer: 1.0.3 + arrgv@1.0.2: {} arrify@3.0.0: {} @@ -12109,14 +13971,6 @@ snapshots: dependencies: safer-buffer: 2.1.2 - assert@2.1.0: - dependencies: - call-bind: 1.0.2 - is-nan: 1.3.2 - object-is: 1.1.5 - object.assign: 4.1.4 - util: 0.12.5 - assertion-error@1.1.0: {} ast-types@0.15.2: @@ -12131,73 +13985,27 @@ snapshots: async-limiter@1.0.1: {} + async-retry@1.3.3: + dependencies: + retry: 0.13.1 + asynckit@0.4.0: {} at-least-node@1.0.0: {} - ava@5.2.0: - dependencies: - acorn: 8.8.2 - acorn-walk: 8.2.0 - ansi-styles: 6.2.1 - arrgv: 1.0.2 - arrify: 3.0.0 - callsites: 4.0.0 - cbor: 8.1.0 - chalk: 5.2.0 - chokidar: 3.5.3 - chunkd: 2.0.1 - ci-info: 3.8.0 - ci-parallel-vars: 1.0.1 - clean-yaml-object: 0.1.0 - cli-truncate: 3.1.0 - code-excerpt: 4.0.0 - common-path-prefix: 3.0.0 - concordance: 5.0.4 - currently-unhandled: 0.4.1 - debug: 4.3.4 - del: 7.0.0 - emittery: 1.0.1 - figures: 5.0.0 - globby: 13.1.3 - ignore-by-default: 2.1.0 - indent-string: 5.0.0 - is-error: 2.2.2 - is-plain-object: 5.0.0 - is-promise: 4.0.0 - matcher: 5.0.0 - mem: 9.0.2 - ms: 2.1.3 - p-event: 5.0.1 - p-map: 5.5.0 - picomatch: 2.3.1 - pkg-conf: 4.0.0 - plur: 5.1.0 - pretty-ms: 8.0.0 - resolve-cwd: 3.0.0 - slash: 3.0.0 - stack-utils: 2.0.6 - strip-ansi: 7.0.1 - supertap: 3.0.1 - temp-dir: 3.0.0 - write-file-atomic: 5.0.0 - yargs: 17.7.1 - transitivePeerDependencies: - - supports-color - - ava@5.3.0: + ava@5.3.0(@ava/typescript@5.0.0): dependencies: - acorn: 8.8.2 - acorn-walk: 8.2.0 + acorn: 8.11.3 + acorn-walk: 8.3.2 ansi-styles: 6.2.1 arrgv: 1.0.2 arrify: 3.0.0 - callsites: 4.0.0 + callsites: 4.1.0 cbor: 8.1.0 - chalk: 5.2.0 + chalk: 5.3.0 chokidar: 3.5.3 chunkd: 2.0.1 - ci-info: 3.8.0 + ci-info: 3.9.0 ci-parallel-vars: 1.0.1 clean-yaml-object: 0.1.0 cli-truncate: 3.1.0 @@ -12206,9 +14014,9 @@ snapshots: concordance: 5.0.4 currently-unhandled: 0.4.1 debug: 4.3.4 - emittery: 1.0.1 + emittery: 1.0.3 figures: 5.0.0 - globby: 13.1.4 + globby: 13.2.2 ignore-by-default: 2.1.0 indent-string: 5.0.0 is-error: 2.2.2 @@ -12230,103 +14038,77 @@ snapshots: temp-dir: 3.0.0 write-file-atomic: 5.0.1 yargs: 17.7.2 + optionalDependencies: + '@ava/typescript': 5.0.0 transitivePeerDependencies: - supports-color available-typed-arrays@1.0.5: {} - axios@1.4.0: + available-typed-arrays@1.0.7: + dependencies: + possible-typed-array-names: 1.0.0 + + axios@1.6.8: dependencies: - follow-redirects: 1.15.2 + follow-redirects: 1.15.6 form-data: 4.0.0 proxy-from-env: 1.1.0 transitivePeerDependencies: - debug - babel-core@7.0.0-bridge.0(@babel/core@7.24.4): + babel-core@7.0.0-bridge.0(@babel/core@7.24.6): dependencies: - '@babel/core': 7.24.4 + '@babel/core': 7.24.6 - babel-plugin-polyfill-corejs2@0.4.10(@babel/core@7.24.4): + babel-plugin-polyfill-corejs2@0.4.11(@babel/core@7.24.6): dependencies: - '@babel/compat-data': 7.24.4 - '@babel/core': 7.24.4 - '@babel/helper-define-polyfill-provider': 0.6.1(@babel/core@7.24.4) + '@babel/compat-data': 7.24.6 + '@babel/core': 7.24.6 + '@babel/helper-define-polyfill-provider': 0.6.2(@babel/core@7.24.6) semver: 6.3.1 transitivePeerDependencies: - supports-color - babel-plugin-polyfill-corejs3@0.10.4(@babel/core@7.24.4): + babel-plugin-polyfill-corejs3@0.10.4(@babel/core@7.24.6): dependencies: - '@babel/core': 7.24.4 - '@babel/helper-define-polyfill-provider': 0.6.1(@babel/core@7.24.4) - core-js-compat: 3.36.1 + '@babel/core': 7.24.6 + '@babel/helper-define-polyfill-provider': 0.6.2(@babel/core@7.24.6) + core-js-compat: 3.37.1 transitivePeerDependencies: - supports-color - babel-plugin-polyfill-regenerator@0.6.1(@babel/core@7.24.4): + babel-plugin-polyfill-regenerator@0.6.2(@babel/core@7.24.6): dependencies: - '@babel/core': 7.24.4 - '@babel/helper-define-polyfill-provider': 0.6.1(@babel/core@7.24.4) + '@babel/core': 7.24.6 + '@babel/helper-define-polyfill-provider': 0.6.2(@babel/core@7.24.6) transitivePeerDependencies: - supports-color - babel-plugin-react-native-web@0.18.12: {} - - babel-plugin-syntax-trailing-function-commas@7.0.0-beta.0: {} + babel-plugin-react-native-web@0.19.12: {} - babel-plugin-transform-flow-enums@0.0.2(@babel/core@7.24.4): + babel-plugin-transform-flow-enums@0.0.2(@babel/core@7.24.6): dependencies: - '@babel/plugin-syntax-flow': 7.24.1(@babel/core@7.24.4) + '@babel/plugin-syntax-flow': 7.24.6(@babel/core@7.24.6) transitivePeerDependencies: - '@babel/core' - babel-preset-expo@10.0.1(@babel/core@7.24.4): - dependencies: - '@babel/plugin-proposal-decorators': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-export-namespace-from': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-object-rest-spread': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-parameters': 7.24.1(@babel/core@7.24.4) - '@babel/preset-env': 7.24.4(@babel/core@7.24.4) - '@babel/preset-react': 7.24.1(@babel/core@7.24.4) - '@react-native/babel-preset': 0.73.21(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4)) - babel-plugin-react-native-web: 0.18.12 - react-refresh: 0.14.0 + babel-preset-expo@11.0.6(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6)): + dependencies: + '@babel/plugin-proposal-decorators': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-export-namespace-from': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-object-rest-spread': 7.24.6(@babel/core@7.24.6) + '@babel/plugin-transform-parameters': 7.24.6(@babel/core@7.24.6) + '@babel/preset-react': 7.24.6(@babel/core@7.24.6) + '@babel/preset-typescript': 7.24.6(@babel/core@7.24.6) + '@react-native/babel-preset': 0.74.83(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6)) + babel-plugin-react-native-web: 0.19.12 + react-refresh: 0.14.2 transitivePeerDependencies: - '@babel/core' + - '@babel/preset-env' - supports-color - babel-preset-fbjs@3.4.0(@babel/core@7.24.4): - dependencies: - '@babel/core': 7.24.4 - '@babel/plugin-proposal-class-properties': 7.18.6(@babel/core@7.24.4) - '@babel/plugin-proposal-object-rest-spread': 7.20.7(@babel/core@7.24.4) - '@babel/plugin-syntax-class-properties': 7.12.13(@babel/core@7.24.4) - '@babel/plugin-syntax-flow': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-syntax-jsx': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-syntax-object-rest-spread': 7.8.3(@babel/core@7.24.4) - '@babel/plugin-transform-arrow-functions': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-block-scoped-functions': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-block-scoping': 7.24.4(@babel/core@7.24.4) - '@babel/plugin-transform-classes': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-computed-properties': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-destructuring': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-flow-strip-types': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-for-of': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-function-name': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-literals': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-member-expression-literals': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-modules-commonjs': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-object-super': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-parameters': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-property-literals': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-react-display-name': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-react-jsx': 7.23.4(@babel/core@7.24.4) - '@babel/plugin-transform-shorthand-properties': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-spread': 7.24.1(@babel/core@7.24.4) - '@babel/plugin-transform-template-literals': 7.24.1(@babel/core@7.24.4) - babel-plugin-syntax-trailing-function-commas: 7.0.0-beta.0 - balanced-match@1.0.2: {} base64-js@1.5.1: {} @@ -12339,10 +14121,16 @@ snapshots: dependencies: open: 8.4.2 - better-sqlite3@8.4.0: + better-sqlite3@10.0.0: + dependencies: + bindings: 1.5.0 + prebuild-install: 7.1.2 + optional: true + + better-sqlite3@8.7.0: dependencies: bindings: 1.5.0 - prebuild-install: 7.1.1 + prebuild-install: 7.1.2 big-integer@1.6.52: {} @@ -12360,7 +14148,7 @@ snapshots: blueimp-md5@2.19.0: {} - body-parser@1.20.1: + body-parser@1.20.2: dependencies: bytes: 3.1.2 content-type: 1.0.5 @@ -12371,7 +14159,7 @@ snapshots: iconv-lite: 0.4.24 on-finished: 2.4.1 qs: 6.11.0 - raw-body: 2.5.1 + raw-body: 2.5.2 type-is: 1.6.18 unpipe: 1.0.0 transitivePeerDependencies: @@ -12404,12 +14192,16 @@ snapshots: dependencies: fill-range: 7.0.1 + braces@3.0.3: + dependencies: + fill-range: 7.1.1 + browserslist@4.23.0: dependencies: - caniuse-lite: 1.0.30001605 - electron-to-chromium: 1.4.727 + caniuse-lite: 1.0.30001624 + electron-to-chromium: 1.4.783 node-releases: 2.0.14 - update-browserslist-db: 1.0.13(browserslist@4.23.0) + update-browserslist-db: 1.0.16(browserslist@4.23.0) bser@2.1.1: dependencies: @@ -12426,20 +14218,14 @@ snapshots: buffer-from@1.1.2: {} - buffer-writer@2.0.0: {} - buffer@5.7.1: dependencies: base64-js: 1.5.1 ieee754: 1.2.1 - bufferutil@4.0.7: - dependencies: - node-gyp-build: 4.6.0 - bufferutil@4.0.8: dependencies: - node-gyp-build: 4.6.0 + node-gyp-build: 4.8.1 buildcheck@0.0.6: optional: true @@ -12448,11 +14234,11 @@ snapshots: builtins@1.0.3: {} - builtins@5.0.1: + builtins@5.1.0: dependencies: - semver: 7.5.4 + semver: 7.6.1 - bun-types@0.6.6: {} + bun-types@0.6.14: {} bun-types@1.0.3: {} @@ -12489,16 +14275,40 @@ snapshots: promise-inflight: 1.0.1 rimraf: 3.0.2 ssri: 8.0.1 - tar: 6.1.13 + tar: 6.2.1 unique-filename: 1.1.1 transitivePeerDependencies: - bluebird + optional: true + + cacache@18.0.3: + dependencies: + '@npmcli/fs': 3.1.1 + fs-minipass: 3.0.3 + glob: 10.4.1 + lru-cache: 10.2.2 + minipass: 7.1.2 + minipass-collect: 2.0.1 + minipass-flush: 1.0.5 + minipass-pipeline: 1.2.4 + p-map: 4.0.0 + ssri: 10.0.6 + tar: 6.2.1 + unique-filename: 3.0.0 call-bind@1.0.2: dependencies: function-bind: 1.1.1 get-intrinsic: 1.2.1 + call-bind@1.0.7: + dependencies: + es-define-property: 1.0.0 + es-errors: 1.3.0 + function-bind: 1.1.2 + get-intrinsic: 1.2.4 + set-function-length: 1.2.2 + caller-callsite@2.0.0: dependencies: callsites: 2.0.0 @@ -12511,7 +14321,7 @@ snapshots: callsites@3.1.0: {} - callsites@4.0.0: {} + callsites@4.1.0: {} camelcase@5.3.1: {} @@ -12519,7 +14329,7 @@ snapshots: camelcase@7.0.1: {} - caniuse-lite@1.0.30001605: {} + caniuse-lite@1.0.30001624: {} cardinal@2.1.1: dependencies: @@ -12530,23 +14340,13 @@ snapshots: dependencies: nofilter: 3.1.0 - chai@4.3.10: + chai@4.4.1: dependencies: assertion-error: 1.1.0 check-error: 1.0.3 deep-eql: 4.1.3 get-func-name: 2.0.2 - loupe: 2.3.6 - pathval: 1.1.1 - type-detect: 4.0.8 - - chai@4.3.7: - dependencies: - assertion-error: 1.1.0 - check-error: 1.0.2 - deep-eql: 4.1.3 - get-func-name: 2.0.0 - loupe: 2.3.6 + loupe: 2.3.7 pathval: 1.1.1 type-detect: 4.0.8 @@ -12561,13 +14361,11 @@ snapshots: ansi-styles: 4.3.0 supports-color: 7.2.0 - chalk@5.2.0: {} - chalk@5.3.0: {} - charenc@0.0.2: {} + char-regex@1.0.2: {} - check-error@1.0.2: {} + charenc@0.0.2: {} check-error@1.0.3: dependencies: @@ -12576,7 +14374,7 @@ snapshots: chokidar@3.5.3: dependencies: anymatch: 3.1.3 - braces: 3.0.2 + braces: 3.0.3 glob-parent: 5.1.2 is-binary-path: 2.1.0 is-glob: 4.0.3 @@ -12591,21 +14389,10 @@ snapshots: chrome-launcher@0.15.2: dependencies: - '@types/node': 20.12.4 - escape-string-regexp: 4.0.0 - is-wsl: 2.2.0 - lighthouse-logger: 1.4.2 - transitivePeerDependencies: - - supports-color - - chromium-edge-launcher@1.0.0: - dependencies: - '@types/node': 20.12.4 + '@types/node': 20.12.12 escape-string-regexp: 4.0.0 is-wsl: 2.2.0 lighthouse-logger: 1.4.2 - mkdirp: 1.0.4 - rimraf: 3.0.2 transitivePeerDependencies: - supports-color @@ -12704,7 +14491,8 @@ snapshots: color-name@1.1.4: {} - color-support@1.1.3: {} + color-support@1.1.3: + optional: true colorette@1.4.0: {} @@ -12775,6 +14563,8 @@ snapshots: tree-kill: 1.2.2 yargs: 17.7.2 + confbox@0.1.7: {} + connect@3.7.0: dependencies: debug: 2.6.9 @@ -12784,7 +14574,8 @@ snapshots: transitivePeerDependencies: - supports-color - console-control-strings@1.1.0: {} + console-control-strings@1.1.0: + optional: true content-disposition@0.5.4: dependencies: @@ -12798,9 +14589,9 @@ snapshots: cookie-signature@1.0.6: {} - cookie@0.5.0: {} + cookie@0.6.0: {} - core-js-compat@3.36.1: + core-js-compat@3.37.1: dependencies: browserslist: 4.23.0 @@ -12819,7 +14610,7 @@ snapshots: nested-error-stacks: 2.1.1 p-event: 5.0.1 - cpu-features@0.0.9: + cpu-features@0.0.10: dependencies: buildcheck: 0.0.6 nan: 2.19.0 @@ -12834,13 +14625,19 @@ snapshots: dependencies: arrify: 3.0.0 cp-file: 10.0.0 - globby: 13.1.4 + globby: 13.2.2 junk: 4.0.1 - micromatch: 4.0.5 + micromatch: 4.0.7 nested-error-stacks: 2.1.1 p-filter: 3.0.0 p-map: 6.0.0 + create-require@1.1.1: {} + + cross-env@7.0.3: + dependencies: + cross-spawn: 7.0.3 + cross-fetch@3.1.8(encoding@0.1.13): dependencies: node-fetch: 2.7.0(encoding@0.1.13) @@ -12882,6 +14679,24 @@ snapshots: data-uri-to-buffer@4.0.1: {} + data-view-buffer@1.0.1: + dependencies: + call-bind: 1.0.7 + es-errors: 1.3.0 + is-data-view: 1.0.1 + + data-view-byte-length@1.0.1: + dependencies: + call-bind: 1.0.7 + es-errors: 1.3.0 + is-data-view: 1.0.1 + + data-view-byte-offset@1.0.0: + dependencies: + call-bind: 1.0.7 + es-errors: 1.3.0 + is-data-view: 1.0.1 + date-fns@2.30.0: dependencies: '@babel/runtime': 7.22.10 @@ -12890,7 +14705,7 @@ snapshots: dependencies: time-zone: 1.0.0 - dayjs@1.11.10: {} + dayjs@1.11.11: {} debug@2.6.9: dependencies: @@ -12929,6 +14744,12 @@ snapshots: dependencies: clone: 1.0.4 + define-data-property@1.1.4: + dependencies: + es-define-property: 1.0.0 + es-errors: 1.3.0 + gopd: 1.0.1 + define-lazy-prop@2.0.0: {} define-properties@1.2.0: @@ -12936,6 +14757,12 @@ snapshots: has-property-descriptors: 1.0.0 object-keys: 1.1.1 + define-properties@1.2.1: + dependencies: + define-data-property: 1.1.4 + has-property-descriptors: 1.0.2 + object-keys: 1.1.1 + del@6.1.1: dependencies: globby: 11.1.0 @@ -12947,20 +14774,10 @@ snapshots: rimraf: 3.0.2 slash: 3.0.0 - del@7.0.0: - dependencies: - globby: 13.1.4 - graceful-fs: 4.2.11 - is-glob: 4.0.3 - is-path-cwd: 3.0.0 - is-path-inside: 4.0.0 - p-map: 5.5.0 - rimraf: 3.0.2 - slash: 4.0.0 - delayed-stream@1.0.0: {} - delegates@1.0.0: {} + delegates@1.0.0: + optional: true denodeify@1.2.1: {} @@ -12968,24 +14785,20 @@ snapshots: depd@2.0.0: {} - deprecated-react-native-prop-types@5.0.0: - dependencies: - '@react-native/normalize-colors': 0.73.2 - invariant: 2.2.4 - prop-types: 15.8.1 - dequal@2.0.3: {} destroy@1.2.0: {} detect-libc@1.0.3: {} - detect-libc@2.0.1: {} - detect-libc@2.0.2: {} + detect-libc@2.0.3: {} + diff-sequences@29.6.3: {} + diff@4.0.2: {} + diff@5.1.0: {} difflib@0.2.4: @@ -13001,7 +14814,7 @@ snapshots: debug: 4.3.4 readable-stream: 3.6.2 split-ca: 1.0.1 - ssh2: 1.11.0 + ssh2: 1.15.0 transitivePeerDependencies: - supports-color @@ -13021,23 +14834,23 @@ snapshots: dependencies: esutils: 2.0.3 - dotenv-expand@10.0.0: {} + dotenv-expand@11.0.6: + dependencies: + dotenv: 16.4.5 dotenv@10.0.0: {} - dotenv@16.0.3: {} - - dotenv@16.1.4: {} + dotenv@16.4.5: {} - dprint@0.45.0: + dprint@0.46.3: optionalDependencies: - '@dprint/darwin-arm64': 0.45.0 - '@dprint/darwin-x64': 0.45.0 - '@dprint/linux-arm64-glibc': 0.45.0 - '@dprint/linux-arm64-musl': 0.45.0 - '@dprint/linux-x64-glibc': 0.45.0 - '@dprint/linux-x64-musl': 0.45.0 - '@dprint/win32-x64': 0.45.0 + '@dprint/darwin-arm64': 0.46.3 + '@dprint/darwin-x64': 0.46.3 + '@dprint/linux-arm64-glibc': 0.46.3 + '@dprint/linux-arm64-musl': 0.46.3 + '@dprint/linux-x64-glibc': 0.46.3 + '@dprint/linux-x64-musl': 0.46.3 + '@dprint/win32-x64': 0.46.3 dreamopt@0.8.0: dependencies: @@ -13056,31 +14869,35 @@ snapshots: hanji: 0.0.5 json-diff: 0.9.0 minimatch: 7.4.6 - zod: 3.22.2 + zod: 3.23.7 transitivePeerDependencies: - supports-color - drizzle-orm@0.27.2(@aws-sdk/client-rds-data@3.549.0)(@cloudflare/workers-types@4.20230904.0)(@libsql/client@0.5.6(encoding@0.1.13))(@neondatabase/serverless@0.9.0)(@opentelemetry/api@1.4.1)(@planetscale/database@1.16.0)(@types/better-sqlite3@7.6.4)(@types/pg@8.10.1)(@types/sql.js@1.4.4)(@vercel/postgres@0.8.0)(better-sqlite3@8.4.0)(bun-types@1.0.3)(knex@2.4.2(better-sqlite3@8.4.0)(mysql2@3.3.3)(pg@8.11.0)(sqlite3@5.1.6(encoding@0.1.13)))(kysely@0.25.0)(mysql2@3.3.3)(pg@8.11.0)(postgres@3.3.5)(sql.js@1.8.0)(sqlite3@5.1.6(encoding@0.1.13)): + drizzle-orm@0.27.2(@aws-sdk/client-rds-data@3.583.0)(@cloudflare/workers-types@4.20240524.0)(@libsql/client@0.6.0)(@neondatabase/serverless@0.9.3)(@opentelemetry/api@1.8.0)(@planetscale/database@1.18.0)(@types/better-sqlite3@7.6.10)(@types/pg@8.11.6)(@types/sql.js@1.4.9)(@vercel/postgres@0.8.0)(better-sqlite3@10.0.0)(bun-types@1.0.3)(knex@3.1.0(better-sqlite3@10.0.0)(mysql2@3.9.8)(pg@8.11.5)(sqlite3@5.1.7))(kysely@0.27.3)(mysql2@3.9.8)(pg@8.11.5)(postgres@3.4.4)(sql.js@1.10.3)(sqlite3@5.1.7): optionalDependencies: - '@aws-sdk/client-rds-data': 3.549.0 - '@cloudflare/workers-types': 4.20230904.0 - '@libsql/client': 0.5.6(encoding@0.1.13) - '@neondatabase/serverless': 0.9.0 - '@opentelemetry/api': 1.4.1 - '@planetscale/database': 1.16.0 - '@types/better-sqlite3': 7.6.4 - '@types/pg': 8.10.1 - '@types/sql.js': 1.4.4 + '@aws-sdk/client-rds-data': 3.583.0 + '@cloudflare/workers-types': 4.20240524.0 + '@libsql/client': 0.6.0 + '@neondatabase/serverless': 0.9.3 + '@opentelemetry/api': 1.8.0 + '@planetscale/database': 1.18.0 + '@types/better-sqlite3': 7.6.10 + '@types/pg': 8.11.6 + '@types/sql.js': 1.4.9 '@vercel/postgres': 0.8.0 - better-sqlite3: 8.4.0 + better-sqlite3: 10.0.0 bun-types: 1.0.3 - knex: 2.4.2(better-sqlite3@8.4.0)(mysql2@3.3.3)(pg@8.11.0)(sqlite3@5.1.6(encoding@0.1.13)) - kysely: 0.25.0 - mysql2: 3.3.3 - pg: 8.11.0 - postgres: 3.3.5 - sql.js: 1.8.0 - sqlite3: 5.1.6(encoding@0.1.13) + knex: 3.1.0(better-sqlite3@10.0.0)(mysql2@3.9.8)(pg@8.11.5)(sqlite3@5.1.7) + kysely: 0.27.3 + mysql2: 3.9.8 + pg: 8.11.5 + postgres: 3.4.4 + sql.js: 1.10.3 + sqlite3: 5.1.7 + + drizzle-prisma-generator@0.1.4: + dependencies: + '@prisma/generator-helper': 5.16.1 duplexer@0.1.2: {} @@ -13088,14 +14905,16 @@ snapshots: ee-first@1.1.1: {} - electron-to-chromium@1.4.727: {} + electron-to-chromium@1.4.783: {} - emittery@1.0.1: {} + emittery@1.0.3: {} emoji-regex@8.0.0: {} emoji-regex@9.2.2: {} + emojilib@2.4.0: {} + encodeurl@1.0.2: {} encoding@0.1.13: @@ -13112,7 +14931,7 @@ snapshots: env-paths@2.2.1: optional: true - envinfo@7.11.1: {} + envinfo@7.13.0: {} eol@0.9.1: {} @@ -13172,7 +14991,66 @@ snapshots: typed-array-byte-offset: 1.0.0 typed-array-length: 1.0.4 unbox-primitive: 1.0.2 - which-typed-array: 1.1.11 + which-typed-array: 1.1.11 + + es-abstract@1.23.3: + dependencies: + array-buffer-byte-length: 1.0.1 + arraybuffer.prototype.slice: 1.0.3 + available-typed-arrays: 1.0.7 + call-bind: 1.0.7 + data-view-buffer: 1.0.1 + data-view-byte-length: 1.0.1 + data-view-byte-offset: 1.0.0 + es-define-property: 1.0.0 + es-errors: 1.3.0 + es-object-atoms: 1.0.0 + es-set-tostringtag: 2.0.3 + es-to-primitive: 1.2.1 + function.prototype.name: 1.1.6 + get-intrinsic: 1.2.4 + get-symbol-description: 1.0.2 + globalthis: 1.0.4 + gopd: 1.0.1 + has-property-descriptors: 1.0.2 + has-proto: 1.0.3 + has-symbols: 1.0.3 + hasown: 2.0.2 + internal-slot: 1.0.7 + is-array-buffer: 3.0.4 + is-callable: 1.2.7 + is-data-view: 1.0.1 + is-negative-zero: 2.0.3 + is-regex: 1.1.4 + is-shared-array-buffer: 1.0.3 + is-string: 1.0.7 + is-typed-array: 1.1.13 + is-weakref: 1.0.2 + object-inspect: 1.13.1 + object-keys: 1.1.1 + object.assign: 4.1.5 + regexp.prototype.flags: 1.5.2 + safe-array-concat: 1.1.2 + safe-regex-test: 1.0.3 + string.prototype.trim: 1.2.9 + string.prototype.trimend: 1.0.8 + string.prototype.trimstart: 1.0.8 + typed-array-buffer: 1.0.2 + typed-array-byte-length: 1.0.1 + typed-array-byte-offset: 1.0.2 + typed-array-length: 1.0.6 + unbox-primitive: 1.0.2 + which-typed-array: 1.1.15 + + es-define-property@1.0.0: + dependencies: + get-intrinsic: 1.2.4 + + es-errors@1.3.0: {} + + es-object-atoms@1.0.0: + dependencies: + es-errors: 1.3.0 es-set-tostringtag@2.0.1: dependencies: @@ -13180,6 +15058,12 @@ snapshots: has: 1.0.3 has-tostringtag: 1.0.0 + es-set-tostringtag@2.0.3: + dependencies: + get-intrinsic: 1.2.4 + has-tostringtag: 1.0.2 + hasown: 2.0.2 + es-shim-unscopables@1.0.0: dependencies: has: 1.0.3 @@ -13355,6 +15239,58 @@ snapshots: '@esbuild/win32-ia32': 0.18.20 '@esbuild/win32-x64': 0.18.20 + esbuild@0.20.2: + optionalDependencies: + '@esbuild/aix-ppc64': 0.20.2 + '@esbuild/android-arm': 0.20.2 + '@esbuild/android-arm64': 0.20.2 + '@esbuild/android-x64': 0.20.2 + '@esbuild/darwin-arm64': 0.20.2 + '@esbuild/darwin-x64': 0.20.2 + '@esbuild/freebsd-arm64': 0.20.2 + '@esbuild/freebsd-x64': 0.20.2 + '@esbuild/linux-arm': 0.20.2 + '@esbuild/linux-arm64': 0.20.2 + '@esbuild/linux-ia32': 0.20.2 + '@esbuild/linux-loong64': 0.20.2 + '@esbuild/linux-mips64el': 0.20.2 + '@esbuild/linux-ppc64': 0.20.2 + '@esbuild/linux-riscv64': 0.20.2 + '@esbuild/linux-s390x': 0.20.2 + '@esbuild/linux-x64': 0.20.2 + '@esbuild/netbsd-x64': 0.20.2 + '@esbuild/openbsd-x64': 0.20.2 + '@esbuild/sunos-x64': 0.20.2 + '@esbuild/win32-arm64': 0.20.2 + '@esbuild/win32-ia32': 0.20.2 + '@esbuild/win32-x64': 0.20.2 + + esbuild@0.21.5: + optionalDependencies: + '@esbuild/aix-ppc64': 0.21.5 + '@esbuild/android-arm': 0.21.5 + '@esbuild/android-arm64': 0.21.5 + '@esbuild/android-x64': 0.21.5 + '@esbuild/darwin-arm64': 0.21.5 + '@esbuild/darwin-x64': 0.21.5 + '@esbuild/freebsd-arm64': 0.21.5 + '@esbuild/freebsd-x64': 0.21.5 + '@esbuild/linux-arm': 0.21.5 + '@esbuild/linux-arm64': 0.21.5 + '@esbuild/linux-ia32': 0.21.5 + '@esbuild/linux-loong64': 0.21.5 + '@esbuild/linux-mips64el': 0.21.5 + '@esbuild/linux-ppc64': 0.21.5 + '@esbuild/linux-riscv64': 0.21.5 + '@esbuild/linux-s390x': 0.21.5 + '@esbuild/linux-x64': 0.21.5 + '@esbuild/netbsd-x64': 0.21.5 + '@esbuild/openbsd-x64': 0.21.5 + '@esbuild/sunos-x64': 0.21.5 + '@esbuild/win32-arm64': 0.21.5 + '@esbuild/win32-ia32': 0.21.5 + '@esbuild/win32-x64': 0.21.5 + escalade@3.1.1: {} escalade@3.1.2: {} @@ -13377,17 +15313,17 @@ snapshots: transitivePeerDependencies: - supports-color - eslint-module-utils@2.8.0(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)))(eslint-import-resolver-node@0.3.9)(eslint@8.50.0): + eslint-module-utils@2.8.0(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)))(eslint-import-resolver-node@0.3.9)(eslint@8.50.0): dependencies: debug: 3.2.7 optionalDependencies: - '@typescript-eslint/parser': 6.7.3(eslint@8.50.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) + '@typescript-eslint/parser': 6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) eslint: 8.50.0 eslint-import-resolver-node: 0.3.9 transitivePeerDependencies: - supports-color - eslint-plugin-import@2.28.1(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)))(eslint@8.50.0): + eslint-plugin-import@2.28.1(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)))(eslint@8.50.0): dependencies: array-includes: 3.1.6 array.prototype.findlastindex: 1.2.2 @@ -13397,7 +15333,7 @@ snapshots: doctrine: 2.1.0 eslint: 8.50.0 eslint-import-resolver-node: 0.3.9 - eslint-module-utils: 2.8.0(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)))(eslint-import-resolver-node@0.3.9)(eslint@8.50.0) + eslint-module-utils: 2.8.0(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)))(eslint-import-resolver-node@0.3.9)(eslint@8.50.0) has: 1.0.3 is-core-module: 2.13.0 is-glob: 4.0.3 @@ -13408,7 +15344,7 @@ snapshots: semver: 6.3.1 tsconfig-paths: 3.14.2 optionalDependencies: - '@typescript-eslint/parser': 6.7.3(eslint@8.50.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) + '@typescript-eslint/parser': 6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) transitivePeerDependencies: - eslint-import-resolver-typescript - eslint-import-resolver-webpack @@ -13435,12 +15371,12 @@ snapshots: semver: 7.5.4 strip-indent: 3.0.0 - eslint-plugin-unused-imports@3.0.0(@typescript-eslint/eslint-plugin@6.7.3(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)))(eslint@8.50.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)))(eslint@8.50.0): + eslint-plugin-unused-imports@3.0.0(@typescript-eslint/eslint-plugin@6.7.3(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)))(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)))(eslint@8.50.0): dependencies: eslint: 8.50.0 eslint-rule-composer: 0.3.0 optionalDependencies: - '@typescript-eslint/eslint-plugin': 6.7.3(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)))(eslint@8.50.0)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) + '@typescript-eslint/eslint-plugin': 6.7.3(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)))(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) eslint-rule-composer@0.3.0: {} @@ -13573,6 +15509,10 @@ snapshots: estree-walker@2.0.2: {} + estree-walker@3.0.3: + dependencies: + '@types/estree': 1.0.5 + esutils@2.0.3: {} etag@1.8.1: {} @@ -13625,101 +15565,107 @@ snapshots: human-signals: 3.0.1 is-stream: 3.0.0 merge-stream: 2.0.0 - npm-run-path: 5.1.0 + npm-run-path: 5.3.0 onetime: 6.0.0 signal-exit: 3.0.7 strip-final-newline: 3.0.0 + execa@8.0.1: + dependencies: + cross-spawn: 7.0.3 + get-stream: 8.0.1 + human-signals: 5.0.0 + is-stream: 3.0.0 + merge-stream: 2.0.0 + npm-run-path: 5.3.0 + onetime: 6.0.0 + signal-exit: 4.1.0 + strip-final-newline: 3.0.0 + exit@0.1.2: {} expand-template@2.0.3: {} - expo-asset@9.0.2(expo@50.0.14(@babel/core@7.24.4)(@react-native/babel-preset@0.73.21(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4)))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)): + expo-asset@10.0.6(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)): dependencies: - '@react-native/assets-registry': 0.73.1 - blueimp-md5: 2.19.0 - expo-constants: 15.4.5(expo@50.0.14(@babel/core@7.24.4)(@react-native/babel-preset@0.73.21(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4)))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)) - expo-file-system: 16.0.8(expo@50.0.14(@babel/core@7.24.4)(@react-native/babel-preset@0.73.21(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4)))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)) + '@react-native/assets-registry': 0.74.83 + expo: 51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + expo-constants: 16.0.1(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)) invariant: 2.2.4 md5-file: 3.2.3 transitivePeerDependencies: - - expo - supports-color - expo-constants@15.4.5(expo@50.0.14(@babel/core@7.24.4)(@react-native/babel-preset@0.73.21(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4)))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)): + expo-constants@16.0.1(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)): dependencies: - '@expo/config': 8.5.4 - expo: 50.0.14(@babel/core@7.24.4)(@react-native/babel-preset@0.73.21(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4)))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + '@expo/config': 9.0.2 + expo: 51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) transitivePeerDependencies: - supports-color - expo-file-system@16.0.8(expo@50.0.14(@babel/core@7.24.4)(@react-native/babel-preset@0.73.21(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4)))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)): + expo-file-system@17.0.1(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)): dependencies: - expo: 50.0.14(@babel/core@7.24.4)(@react-native/babel-preset@0.73.21(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4)))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + expo: 51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) - expo-font@11.10.3(expo@50.0.14(@babel/core@7.24.4)(@react-native/babel-preset@0.73.21(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4)))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)): + expo-font@12.0.5(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)): dependencies: - expo: 50.0.14(@babel/core@7.24.4)(@react-native/babel-preset@0.73.21(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4)))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + expo: 51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) fontfaceobserver: 2.3.0 - expo-keep-awake@12.8.2(expo@50.0.14(@babel/core@7.24.4)(@react-native/babel-preset@0.73.21(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4)))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)): + expo-keep-awake@13.0.2(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)): dependencies: - expo: 50.0.14(@babel/core@7.24.4)(@react-native/babel-preset@0.73.21(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4)))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + expo: 51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) - expo-modules-autolinking@1.10.3: + expo-modules-autolinking@1.11.1: dependencies: - '@expo/config': 8.5.4 chalk: 4.1.2 commander: 7.2.0 fast-glob: 3.3.2 find-up: 5.0.0 fs-extra: 9.1.0 - transitivePeerDependencies: - - supports-color - expo-modules-core@1.11.12: + expo-modules-core@1.12.11: dependencies: invariant: 2.2.4 - expo-sqlite@13.2.0(expo@50.0.14(@babel/core@7.24.4)(@react-native/babel-preset@0.73.21(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4)))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)): + expo-sqlite@13.4.0(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)): dependencies: '@expo/websql': 1.0.1 - expo: 50.0.14(@babel/core@7.24.4)(@react-native/babel-preset@0.73.21(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4)))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) - - expo@50.0.14(@babel/core@7.24.4)(@react-native/babel-preset@0.73.21(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4)))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3): - dependencies: - '@babel/runtime': 7.24.4 - '@expo/cli': 0.17.8(@react-native/babel-preset@0.73.21(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4)))(bufferutil@4.0.8)(encoding@0.1.13)(expo-modules-autolinking@1.10.3)(utf-8-validate@6.0.3) - '@expo/config': 8.5.4 - '@expo/config-plugins': 7.8.4 - '@expo/metro-config': 0.17.6(@react-native/babel-preset@0.73.21(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4))) - '@expo/vector-icons': 14.0.0 - babel-preset-expo: 10.0.1(@babel/core@7.24.4) - expo-asset: 9.0.2(expo@50.0.14(@babel/core@7.24.4)(@react-native/babel-preset@0.73.21(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4)))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)) - expo-file-system: 16.0.8(expo@50.0.14(@babel/core@7.24.4)(@react-native/babel-preset@0.73.21(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4)))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)) - expo-font: 11.10.3(expo@50.0.14(@babel/core@7.24.4)(@react-native/babel-preset@0.73.21(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4)))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)) - expo-keep-awake: 12.8.2(expo@50.0.14(@babel/core@7.24.4)(@react-native/babel-preset@0.73.21(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4)))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)) - expo-modules-autolinking: 1.10.3 - expo-modules-core: 1.11.12 + expo: 51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + + expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3): + dependencies: + '@babel/runtime': 7.24.6 + '@expo/cli': 0.18.13(bufferutil@4.0.8)(encoding@0.1.13)(expo-modules-autolinking@1.11.1)(utf-8-validate@6.0.3) + '@expo/config': 9.0.2 + '@expo/config-plugins': 8.0.4 + '@expo/metro-config': 0.18.4 + '@expo/vector-icons': 14.0.2 + babel-preset-expo: 11.0.6(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6)) + expo-asset: 10.0.6(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)) + expo-file-system: 17.0.1(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)) + expo-font: 12.0.5(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)) + expo-keep-awake: 13.0.2(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)) + expo-modules-autolinking: 1.11.1 + expo-modules-core: 1.12.11 fbemitter: 3.0.0(encoding@0.1.13) whatwg-url-without-unicode: 8.0.0-3 transitivePeerDependencies: - '@babel/core' - - '@react-native/babel-preset' - - bluebird + - '@babel/preset-env' - bufferutil - encoding - supports-color - utf-8-validate - express@4.18.2: + express@4.19.2: dependencies: accepts: 1.3.8 array-flatten: 1.1.1 - body-parser: 1.20.1 + body-parser: 1.20.2 content-disposition: 0.5.4 content-type: 1.0.5 - cookie: 0.5.0 + cookie: 0.6.0 cookie-signature: 1.0.6 debug: 2.6.9 depd: 2.0.0 @@ -13756,14 +15702,6 @@ snapshots: fast-diff@1.3.0: {} - fast-glob@3.2.12: - dependencies: - '@nodelib/fs.stat': 2.0.5 - '@nodelib/fs.walk': 1.2.8 - glob-parent: 5.1.2 - merge2: 1.4.1 - micromatch: 4.0.5 - fast-glob@3.3.1: dependencies: '@nodelib/fs.stat': 2.0.5 @@ -13778,7 +15716,7 @@ snapshots: '@nodelib/fs.walk': 1.2.8 glob-parent: 5.1.2 merge2: 1.4.1 - micromatch: 4.0.5 + micromatch: 4.0.7 fast-json-stable-stringify@2.1.0: {} @@ -13788,7 +15726,7 @@ snapshots: dependencies: strnum: 1.0.5 - fast-xml-parser@4.3.6: + fast-xml-parser@4.4.0: dependencies: strnum: 1.0.5 @@ -13816,7 +15754,7 @@ snapshots: object-assign: 4.1.1 promise: 7.3.1 setimmediate: 1.0.5 - ua-parser-js: 1.0.37 + ua-parser-js: 1.0.38 transitivePeerDependencies: - encoding @@ -13825,15 +15763,9 @@ snapshots: node-domexception: 1.0.0 web-streams-polyfill: 3.2.1 - fetch-ponyfill@7.1.0(encoding@0.1.13): - dependencies: - node-fetch: 2.6.11(encoding@0.1.13) - transitivePeerDependencies: - - encoding - fetch-retry@4.1.1: {} - fflate@0.7.4: {} + fflate@0.8.2: {} figures@5.0.0: dependencies: @@ -13850,6 +15782,10 @@ snapshots: dependencies: to-regex-range: 5.0.1 + fill-range@7.1.1: + dependencies: + to-regex-range: 5.0.1 + finalhandler@1.1.2: dependencies: debug: 2.6.9 @@ -13901,7 +15837,7 @@ snapshots: find-yarn-workspace-root@2.0.0: dependencies: - micromatch: 4.0.5 + micromatch: 4.0.7 flat-cache@3.1.0: dependencies: @@ -13909,15 +15845,15 @@ snapshots: keyv: 4.5.3 rimraf: 3.0.2 - flatted@3.2.7: {} - flatted@3.2.9: {} + flatted@3.3.1: {} + flow-enums-runtime@0.0.6: {} - flow-parser@0.206.0: {} + flow-parser@0.236.0: {} - follow-redirects@1.15.2: {} + follow-redirects@1.15.6: {} fontfaceobserver@2.3.0: {} @@ -13960,7 +15896,7 @@ snapshots: dependencies: graceful-fs: 4.2.11 jsonfile: 6.1.0 - universalify: 2.0.0 + universalify: 2.0.1 fs-extra@8.1.0: dependencies: @@ -13986,6 +15922,10 @@ snapshots: dependencies: minipass: 3.3.6 + fs-minipass@3.0.3: + dependencies: + minipass: 7.1.2 + fs.realpath@1.0.0: {} fsevents@2.3.3: @@ -14002,22 +15942,17 @@ snapshots: es-abstract: 1.22.1 functions-have-names: 1.2.3 + function.prototype.name@1.1.6: + dependencies: + call-bind: 1.0.7 + define-properties: 1.2.1 + es-abstract: 1.23.3 + functions-have-names: 1.2.3 + functions-have-names@1.2.3: {} fx@28.0.0: {} - gauge@3.0.2: - dependencies: - aproba: 2.0.0 - color-support: 1.1.3 - console-control-strings: 1.1.0 - has-unicode: 2.0.1 - object-assign: 4.1.1 - signal-exit: 3.0.7 - string-width: 4.2.3 - strip-ansi: 6.0.1 - wide-align: 1.1.5 - gauge@4.0.4: dependencies: aproba: 2.0.0 @@ -14038,8 +15973,6 @@ snapshots: get-caller-file@2.0.5: {} - get-func-name@2.0.0: {} - get-func-name@2.0.2: {} get-intrinsic@1.2.1: @@ -14049,11 +15982,19 @@ snapshots: has-proto: 1.0.1 has-symbols: 1.0.3 + get-intrinsic@1.2.4: + dependencies: + es-errors: 1.3.0 + function-bind: 1.1.2 + has-proto: 1.0.3 + has-symbols: 1.0.3 + hasown: 2.0.2 + get-package-type@0.1.0: {} get-port@3.2.0: {} - get-port@7.0.0: {} + get-port@7.1.0: {} get-stream@4.1.0: dependencies: @@ -14061,12 +16002,22 @@ snapshots: get-stream@6.0.1: {} + get-stream@8.0.1: {} + get-symbol-description@1.0.0: dependencies: call-bind: 1.0.2 get-intrinsic: 1.2.1 - get-tsconfig@4.5.0: {} + get-symbol-description@1.0.2: + dependencies: + call-bind: 1.0.7 + es-errors: 1.3.0 + get-intrinsic: 1.2.4 + + get-tsconfig@4.7.5: + dependencies: + resolve-pkg-maps: 1.0.0 getenv@1.0.0: {} @@ -14098,6 +16049,14 @@ snapshots: minipass: 5.0.0 path-scurry: 1.10.1 + glob@10.4.1: + dependencies: + foreground-child: 3.1.1 + jackspeak: 3.1.2 + minimatch: 9.0.4 + minipass: 7.1.2 + path-scurry: 1.11.1 + glob@6.0.4: dependencies: inflight: 1.0.6 @@ -14145,6 +16104,11 @@ snapshots: dependencies: define-properties: 1.2.0 + globalthis@1.0.4: + dependencies: + define-properties: 1.2.1 + gopd: 1.0.1 + globby@11.1.0: dependencies: array-union: 2.1.0 @@ -14154,19 +16118,11 @@ snapshots: merge2: 1.4.1 slash: 3.0.0 - globby@13.1.3: - dependencies: - dir-glob: 3.0.1 - fast-glob: 3.3.1 - ignore: 5.2.4 - merge2: 1.4.1 - slash: 4.0.0 - - globby@13.1.4: + globby@13.2.2: dependencies: dir-glob: 3.0.1 - fast-glob: 3.3.1 - ignore: 5.2.4 + fast-glob: 3.3.2 + ignore: 5.3.1 merge2: 1.4.1 slash: 4.0.0 @@ -14202,15 +16158,26 @@ snapshots: dependencies: get-intrinsic: 1.2.1 + has-property-descriptors@1.0.2: + dependencies: + es-define-property: 1.0.0 + has-proto@1.0.1: {} + has-proto@1.0.3: {} + has-symbols@1.0.3: {} has-tostringtag@1.0.0: dependencies: has-symbols: 1.0.3 - has-unicode@2.0.1: {} + has-tostringtag@1.0.2: + dependencies: + has-symbols: 1.0.3 + + has-unicode@2.0.1: + optional: true has@1.0.3: dependencies: @@ -14222,13 +16189,13 @@ snapshots: heap@0.2.7: {} - hermes-estree@0.15.0: {} + hermes-estree@0.19.1: {} hermes-estree@0.20.1: {} - hermes-parser@0.15.0: + hermes-parser@0.19.1: dependencies: - hermes-estree: 0.15.0 + hermes-estree: 0.19.1 hermes-parser@0.20.1: dependencies: @@ -14238,7 +16205,7 @@ snapshots: dependencies: source-map: 0.7.4 - hono@4.2.1: {} + hono@4.0.1: {} hosted-git-info@2.8.9: {} @@ -14277,6 +16244,8 @@ snapshots: human-signals@3.0.1: {} + human-signals@5.0.0: {} + humanize-ms@1.2.1: dependencies: ms: 2.1.3 @@ -14320,7 +16289,8 @@ snapshots: indent-string@5.0.0: {} - infer-owner@1.0.4: {} + infer-owner@1.0.4: + optional: true inflight@1.0.6: dependencies: @@ -14342,32 +16312,41 @@ snapshots: has: 1.0.3 side-channel: 1.0.4 + internal-slot@1.0.7: + dependencies: + es-errors: 1.3.0 + hasown: 2.0.2 + side-channel: 1.0.6 + interpret@2.2.0: {} invariant@2.2.4: dependencies: loose-envify: 1.4.0 - ip-regex@2.1.0: {} - - ip@2.0.0: + ip-address@9.0.5: + dependencies: + jsbn: 1.1.0 + sprintf-js: 1.1.3 optional: true + ip-regex@2.1.0: {} + ipaddr.js@1.9.1: {} irregular-plurals@3.5.0: {} - is-arguments@1.1.1: - dependencies: - call-bind: 1.0.2 - has-tostringtag: 1.0.0 - is-array-buffer@3.0.2: dependencies: call-bind: 1.0.2 get-intrinsic: 1.2.1 is-typed-array: 1.1.12 + is-array-buffer@3.0.4: + dependencies: + call-bind: 1.0.7 + get-intrinsic: 1.2.4 + is-arrayish@0.2.1: {} is-bigint@1.0.4: @@ -14407,6 +16386,10 @@ snapshots: dependencies: hasown: 2.0.2 + is-data-view@1.0.1: + dependencies: + is-typed-array: 1.1.13 + is-date-object@1.0.5: dependencies: has-tostringtag: 1.0.0 @@ -14427,10 +16410,6 @@ snapshots: is-fullwidth-code-point@4.0.0: {} - is-generator-function@1.0.10: - dependencies: - has-tostringtag: 1.0.0 - is-glob@2.0.1: dependencies: is-extglob: 1.0.0 @@ -14448,13 +16427,10 @@ snapshots: is-lambda@1.0.1: optional: true - is-nan@1.3.2: - dependencies: - call-bind: 1.0.2 - define-properties: 1.2.0 - is-negative-zero@2.0.2: {} + is-negative-zero@2.0.3: {} + is-number-object@1.0.7: dependencies: has-tostringtag: 1.0.0 @@ -14463,12 +16439,8 @@ snapshots: is-path-cwd@2.2.0: {} - is-path-cwd@3.0.0: {} - is-path-inside@3.0.3: {} - is-path-inside@4.0.0: {} - is-plain-object@2.0.4: dependencies: isobject: 3.0.1 @@ -14490,6 +16462,10 @@ snapshots: dependencies: call-bind: 1.0.2 + is-shared-array-buffer@1.0.3: + dependencies: + call-bind: 1.0.7 + is-stream@1.1.0: {} is-stream@2.0.1: {} @@ -14508,6 +16484,10 @@ snapshots: dependencies: which-typed-array: 1.1.11 + is-typed-array@1.1.13: + dependencies: + which-typed-array: 1.1.15 + is-unicode-supported@0.1.0: {} is-unicode-supported@1.3.0: {} @@ -14546,6 +16526,12 @@ snapshots: optionalDependencies: '@pkgjs/parseargs': 0.11.0 + jackspeak@3.1.2: + dependencies: + '@isaacs/cliui': 8.0.2 + optionalDependencies: + '@pkgjs/parseargs': 0.11.0 + javascript-natural-sort@0.7.1: {} jest-environment-node@29.7.0: @@ -14553,7 +16539,7 @@ snapshots: '@jest/environment': 29.7.0 '@jest/fake-timers': 29.7.0 '@jest/types': 29.6.3 - '@types/node': 20.12.4 + '@types/node': 20.12.12 jest-mock: 29.7.0 jest-util: 29.7.0 @@ -14561,12 +16547,12 @@ snapshots: jest-message-util@29.7.0: dependencies: - '@babel/code-frame': 7.24.2 + '@babel/code-frame': 7.24.6 '@jest/types': 29.6.3 '@types/stack-utils': 2.0.3 chalk: 4.1.2 graceful-fs: 4.2.11 - micromatch: 4.0.5 + micromatch: 4.0.7 pretty-format: 29.7.0 slash: 3.0.0 stack-utils: 2.0.6 @@ -14574,13 +16560,13 @@ snapshots: jest-mock@29.7.0: dependencies: '@jest/types': 29.6.3 - '@types/node': 20.12.4 + '@types/node': 20.12.12 jest-util: 29.7.0 jest-util@29.7.0: dependencies: '@jest/types': 29.6.3 - '@types/node': 20.12.4 + '@types/node': 20.12.12 chalk: 4.1.2 ci-info: 3.9.0 graceful-fs: 4.2.11 @@ -14597,14 +16583,14 @@ snapshots: jest-worker@29.7.0: dependencies: - '@types/node': 20.12.4 + '@types/node': 20.12.12 jest-util: 29.7.0 merge-stream: 2.0.0 supports-color: 8.1.1 jimp-compact@0.16.1: {} - joi@17.12.3: + joi@17.13.1: dependencies: '@hapi/hoek': 9.3.0 '@hapi/topo': 5.1.0 @@ -14620,12 +16606,14 @@ snapshots: joycon@3.1.1: {} - js-base64@3.7.5: {} + js-base64@3.7.7: {} js-string-escape@1.0.1: {} js-tokens@4.0.0: {} + js-tokens@9.0.0: {} + js-yaml@3.14.1: dependencies: argparse: 1.0.10 @@ -14635,27 +16623,30 @@ snapshots: dependencies: argparse: 2.0.1 + jsbn@1.1.0: + optional: true + jsc-android@250231.0.0: {} jsc-safe-url@0.2.4: {} - jscodeshift@0.14.0(@babel/preset-env@7.24.4(@babel/core@7.24.4)): - dependencies: - '@babel/core': 7.24.4 - '@babel/parser': 7.24.4 - '@babel/plugin-proposal-class-properties': 7.18.6(@babel/core@7.24.4) - '@babel/plugin-proposal-nullish-coalescing-operator': 7.18.6(@babel/core@7.24.4) - '@babel/plugin-proposal-optional-chaining': 7.21.0(@babel/core@7.24.4) - '@babel/plugin-transform-modules-commonjs': 7.24.1(@babel/core@7.24.4) - '@babel/preset-env': 7.24.4(@babel/core@7.24.4) - '@babel/preset-flow': 7.24.1(@babel/core@7.24.4) - '@babel/preset-typescript': 7.24.1(@babel/core@7.24.4) - '@babel/register': 7.23.7(@babel/core@7.24.4) - babel-core: 7.0.0-bridge.0(@babel/core@7.24.4) + jscodeshift@0.14.0(@babel/preset-env@7.24.6(@babel/core@7.24.6)): + dependencies: + '@babel/core': 7.24.6 + '@babel/parser': 7.24.6 + '@babel/plugin-proposal-class-properties': 7.18.6(@babel/core@7.24.6) + '@babel/plugin-proposal-nullish-coalescing-operator': 7.18.6(@babel/core@7.24.6) + '@babel/plugin-proposal-optional-chaining': 7.21.0(@babel/core@7.24.6) + '@babel/plugin-transform-modules-commonjs': 7.24.6(@babel/core@7.24.6) + '@babel/preset-env': 7.24.6(@babel/core@7.24.6) + '@babel/preset-flow': 7.24.6(@babel/core@7.24.6) + '@babel/preset-typescript': 7.24.6(@babel/core@7.24.6) + '@babel/register': 7.24.6(@babel/core@7.24.6) + babel-core: 7.0.0-bridge.0(@babel/core@7.24.6) chalk: 4.1.2 - flow-parser: 0.206.0 + flow-parser: 0.236.0 graceful-fs: 4.2.11 - micromatch: 4.0.5 + micromatch: 4.0.7 neo-async: 2.6.2 node-dir: 0.1.17 recast: 0.21.5 @@ -14690,7 +16681,7 @@ snapshots: lodash: 4.17.21 md5: 2.2.1 memory-cache: 0.2.0 - traverse: 0.6.8 + traverse: 0.6.9 valid-url: 1.0.9 json-schema-traverse@0.4.1: {} @@ -14703,8 +16694,6 @@ snapshots: json5@2.2.3: {} - jsonc-parser@3.2.0: {} - jsonfile@4.0.0: optionalDependencies: graceful-fs: 4.2.11 @@ -14734,32 +16723,60 @@ snapshots: kleur@4.1.5: {} - knex@2.4.2(better-sqlite3@8.4.0)(mysql2@3.3.3)(pg@8.11.0)(sqlite3@5.1.6(encoding@0.1.13)): + knex@2.5.1(better-sqlite3@8.7.0)(mysql2@3.3.3)(pg@8.11.5)(sqlite3@5.1.7): dependencies: colorette: 2.0.19 - commander: 9.5.0 + commander: 10.0.1 debug: 4.3.4 - escalade: 3.1.1 + escalade: 3.1.2 esm: 3.2.25 get-package-type: 0.1.0 getopts: 2.3.0 interpret: 2.2.0 lodash: 4.17.21 - pg-connection-string: 2.5.0 + pg-connection-string: 2.6.1 rechoir: 0.8.0 resolve-from: 5.0.0 tarn: 3.0.2 tildify: 2.0.0 optionalDependencies: - better-sqlite3: 8.4.0 + better-sqlite3: 8.7.0 mysql2: 3.3.3 - pg: 8.11.0 - sqlite3: 5.1.6(encoding@0.1.13) + pg: 8.11.5 + sqlite3: 5.1.7 + transitivePeerDependencies: + - supports-color + + knex@3.1.0(better-sqlite3@10.0.0)(mysql2@3.9.8)(pg@8.11.5)(sqlite3@5.1.7): + dependencies: + colorette: 2.0.19 + commander: 10.0.1 + debug: 4.3.4 + escalade: 3.1.2 + esm: 3.2.25 + get-package-type: 0.1.0 + getopts: 2.3.0 + interpret: 2.2.0 + lodash: 4.17.21 + pg-connection-string: 2.6.2 + rechoir: 0.8.0 + resolve-from: 5.0.0 + tarn: 3.0.2 + tildify: 2.0.0 + optionalDependencies: + better-sqlite3: 10.0.0 + mysql2: 3.9.8 + pg: 8.11.5 + sqlite3: 5.1.7 transitivePeerDependencies: - supports-color + optional: true kysely@0.25.0: {} + kysely@0.27.3: + optional: true + leven@3.1.0: {} levn@0.4.1: @@ -14767,18 +16784,18 @@ snapshots: prelude-ls: 1.2.1 type-check: 0.4.0 - libsql@0.3.10: + libsql@0.3.18: dependencies: '@neon-rs/load': 0.0.4 detect-libc: 2.0.2 optionalDependencies: - '@libsql/darwin-arm64': 0.3.10 - '@libsql/darwin-x64': 0.3.10 - '@libsql/linux-arm64-gnu': 0.3.10 - '@libsql/linux-arm64-musl': 0.3.10 - '@libsql/linux-x64-gnu': 0.3.10 - '@libsql/linux-x64-musl': 0.3.10 - '@libsql/win32-x64-msvc': 0.3.10 + '@libsql/darwin-arm64': 0.3.18 + '@libsql/darwin-x64': 0.3.18 + '@libsql/linux-arm64-gnu': 0.3.18 + '@libsql/linux-arm64-musl': 0.3.18 + '@libsql/linux-x64-gnu': 0.3.18 + '@libsql/linux-x64-musl': 0.3.18 + '@libsql/win32-x64-msvc': 0.3.18 lighthouse-logger@1.4.2: dependencies: @@ -14790,27 +16807,54 @@ snapshots: lightningcss-darwin-arm64@1.19.0: optional: true + lightningcss-darwin-arm64@1.25.1: + optional: true + lightningcss-darwin-x64@1.19.0: optional: true + lightningcss-darwin-x64@1.25.1: + optional: true + + lightningcss-freebsd-x64@1.25.1: + optional: true + lightningcss-linux-arm-gnueabihf@1.19.0: optional: true + lightningcss-linux-arm-gnueabihf@1.25.1: + optional: true + lightningcss-linux-arm64-gnu@1.19.0: optional: true + lightningcss-linux-arm64-gnu@1.25.1: + optional: true + lightningcss-linux-arm64-musl@1.19.0: optional: true + lightningcss-linux-arm64-musl@1.25.1: + optional: true + lightningcss-linux-x64-gnu@1.19.0: optional: true + lightningcss-linux-x64-gnu@1.25.1: + optional: true + lightningcss-linux-x64-musl@1.19.0: optional: true + lightningcss-linux-x64-musl@1.25.1: + optional: true + lightningcss-win32-x64-msvc@1.19.0: optional: true + lightningcss-win32-x64-msvc@1.25.1: + optional: true + lightningcss@1.19.0: dependencies: detect-libc: 1.0.3 @@ -14824,6 +16868,21 @@ snapshots: lightningcss-linux-x64-musl: 1.19.0 lightningcss-win32-x64-msvc: 1.19.0 + lightningcss@1.25.1: + dependencies: + detect-libc: 1.0.3 + optionalDependencies: + lightningcss-darwin-arm64: 1.25.1 + lightningcss-darwin-x64: 1.25.1 + lightningcss-freebsd-x64: 1.25.1 + lightningcss-linux-arm-gnueabihf: 1.25.1 + lightningcss-linux-arm64-gnu: 1.25.1 + lightningcss-linux-arm64-musl: 1.25.1 + lightningcss-linux-x64-gnu: 1.25.1 + lightningcss-linux-x64-musl: 1.25.1 + lightningcss-win32-x64-msvc: 1.25.1 + optional: true + lilconfig@2.1.0: {} lines-and-columns@1.2.4: {} @@ -14832,7 +16891,10 @@ snapshots: load-tsconfig@0.2.5: {} - local-pkg@0.4.3: {} + local-pkg@0.5.0: + dependencies: + mlly: 1.7.0 + pkg-types: 1.1.0 locate-path@3.0.0: dependencies: @@ -14873,7 +16935,7 @@ snapshots: logkitty@0.7.1: dependencies: ansi-fragments: 0.2.1 - dayjs: 1.11.10 + dayjs: 1.11.11 yargs: 15.4.1 long@5.2.3: {} @@ -14882,9 +16944,11 @@ snapshots: dependencies: js-tokens: 4.0.0 - loupe@2.3.6: + loupe@2.3.7: dependencies: - get-func-name: 2.0.0 + get-func-name: 2.0.2 + + lru-cache@10.2.2: {} lru-cache@5.1.1: dependencies: @@ -14904,11 +16968,7 @@ snapshots: dependencies: es5-ext: 0.10.62 - magic-string@0.30.0: - dependencies: - '@jridgewell/sourcemap-codec': 1.4.15 - - magic-string@0.30.5: + magic-string@0.30.10: dependencies: '@jridgewell/sourcemap-codec': 1.4.15 @@ -14917,13 +16977,11 @@ snapshots: pify: 4.0.1 semver: 5.7.2 - make-dir@3.1.0: - dependencies: - semver: 6.3.1 + make-error@1.3.6: {} make-fetch-happen@9.1.0: dependencies: - agentkeepalive: 4.3.0 + agentkeepalive: 4.5.0 cacache: 15.3.0 http-cache-semantics: 4.1.1 http-proxy-agent: 4.0.1 @@ -14954,17 +17012,17 @@ snapshots: map-stream@0.1.0: {} - marked-terminal@5.2.0(marked@5.1.2): + marked-terminal@6.2.0(marked@9.1.6): dependencies: ansi-escapes: 6.2.0 cardinal: 2.1.1 chalk: 5.3.0 cli-table3: 0.6.3 - marked: 5.1.2 - node-emoji: 1.11.0 - supports-hyperlinks: 2.3.0 + marked: 9.1.6 + node-emoji: 2.1.3 + supports-hyperlinks: 3.0.0 - marked@5.1.2: {} + marked@9.1.6: {} marky@1.2.5: {} @@ -15026,42 +17084,42 @@ snapshots: methods@1.1.2: {} - metro-babel-transformer@0.80.8: + metro-babel-transformer@0.80.9: dependencies: - '@babel/core': 7.24.4 + '@babel/core': 7.24.6 hermes-parser: 0.20.1 nullthrows: 1.1.1 transitivePeerDependencies: - supports-color - metro-cache-key@0.80.8: {} + metro-cache-key@0.80.9: {} - metro-cache@0.80.8: + metro-cache@0.80.9: dependencies: - metro-core: 0.80.8 + metro-core: 0.80.9 rimraf: 3.0.2 - metro-config@0.80.8(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3): + metro-config@0.80.9(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3): dependencies: connect: 3.7.0 cosmiconfig: 5.2.1 jest-validate: 29.7.0 - metro: 0.80.8(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) - metro-cache: 0.80.8 - metro-core: 0.80.8 - metro-runtime: 0.80.8 + metro: 0.80.9(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + metro-cache: 0.80.9 + metro-core: 0.80.9 + metro-runtime: 0.80.9 transitivePeerDependencies: - bufferutil - encoding - supports-color - utf-8-validate - metro-core@0.80.8: + metro-core@0.80.9: dependencies: lodash.throttle: 4.1.1 - metro-resolver: 0.80.8 + metro-resolver: 0.80.9 - metro-file-map@0.80.8: + metro-file-map@0.80.9: dependencies: anymatch: 3.1.3 debug: 2.6.9 @@ -15069,7 +17127,7 @@ snapshots: graceful-fs: 4.2.11 invariant: 2.2.4 jest-worker: 29.7.0 - micromatch: 4.0.5 + micromatch: 4.0.7 node-abort-controller: 3.1.1 nullthrows: 1.1.1 walker: 1.0.8 @@ -15078,33 +17136,33 @@ snapshots: transitivePeerDependencies: - supports-color - metro-minify-terser@0.80.8: + metro-minify-terser@0.80.9: dependencies: - terser: 5.30.3 + terser: 5.31.0 - metro-resolver@0.80.8: {} + metro-resolver@0.80.9: {} - metro-runtime@0.80.8: + metro-runtime@0.80.9: dependencies: - '@babel/runtime': 7.24.4 + '@babel/runtime': 7.24.6 - metro-source-map@0.80.8: + metro-source-map@0.80.9: dependencies: - '@babel/traverse': 7.24.1 - '@babel/types': 7.24.0 + '@babel/traverse': 7.24.6 + '@babel/types': 7.24.6 invariant: 2.2.4 - metro-symbolicate: 0.80.8 + metro-symbolicate: 0.80.9 nullthrows: 1.1.1 - ob1: 0.80.8 + ob1: 0.80.9 source-map: 0.5.7 vlq: 1.0.1 transitivePeerDependencies: - supports-color - metro-symbolicate@0.80.8: + metro-symbolicate@0.80.9: dependencies: invariant: 2.2.4 - metro-source-map: 0.80.8 + metro-source-map: 0.80.9 nullthrows: 1.1.1 source-map: 0.5.7 through2: 2.0.5 @@ -15112,29 +17170,29 @@ snapshots: transitivePeerDependencies: - supports-color - metro-transform-plugins@0.80.8: + metro-transform-plugins@0.80.9: dependencies: - '@babel/core': 7.24.4 - '@babel/generator': 7.24.4 - '@babel/template': 7.24.0 - '@babel/traverse': 7.24.1 + '@babel/core': 7.24.6 + '@babel/generator': 7.24.6 + '@babel/template': 7.24.6 + '@babel/traverse': 7.24.6 nullthrows: 1.1.1 transitivePeerDependencies: - supports-color - metro-transform-worker@0.80.8(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3): - dependencies: - '@babel/core': 7.24.4 - '@babel/generator': 7.24.4 - '@babel/parser': 7.24.4 - '@babel/types': 7.24.0 - metro: 0.80.8(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) - metro-babel-transformer: 0.80.8 - metro-cache: 0.80.8 - metro-cache-key: 0.80.8 - metro-minify-terser: 0.80.8 - metro-source-map: 0.80.8 - metro-transform-plugins: 0.80.8 + metro-transform-worker@0.80.9(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3): + dependencies: + '@babel/core': 7.24.6 + '@babel/generator': 7.24.6 + '@babel/parser': 7.24.6 + '@babel/types': 7.24.6 + metro: 0.80.9(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + metro-babel-transformer: 0.80.9 + metro-cache: 0.80.9 + metro-cache-key: 0.80.9 + metro-minify-terser: 0.80.9 + metro-source-map: 0.80.9 + metro-transform-plugins: 0.80.9 nullthrows: 1.1.1 transitivePeerDependencies: - bufferutil @@ -15142,15 +17200,15 @@ snapshots: - supports-color - utf-8-validate - metro@0.80.8(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3): + metro@0.80.9(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3): dependencies: - '@babel/code-frame': 7.24.2 - '@babel/core': 7.24.4 - '@babel/generator': 7.24.4 - '@babel/parser': 7.24.4 - '@babel/template': 7.24.0 - '@babel/traverse': 7.24.1 - '@babel/types': 7.24.0 + '@babel/code-frame': 7.24.6 + '@babel/core': 7.24.6 + '@babel/generator': 7.24.6 + '@babel/parser': 7.24.6 + '@babel/template': 7.24.6 + '@babel/traverse': 7.24.6 + '@babel/types': 7.24.6 accepts: 1.3.8 chalk: 4.1.2 ci-info: 2.0.0 @@ -15165,18 +17223,18 @@ snapshots: jest-worker: 29.7.0 jsc-safe-url: 0.2.4 lodash.throttle: 4.1.1 - metro-babel-transformer: 0.80.8 - metro-cache: 0.80.8 - metro-cache-key: 0.80.8 - metro-config: 0.80.8(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) - metro-core: 0.80.8 - metro-file-map: 0.80.8 - metro-resolver: 0.80.8 - metro-runtime: 0.80.8 - metro-source-map: 0.80.8 - metro-symbolicate: 0.80.8 - metro-transform-plugins: 0.80.8 - metro-transform-worker: 0.80.8(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + metro-babel-transformer: 0.80.9 + metro-cache: 0.80.9 + metro-cache-key: 0.80.9 + metro-config: 0.80.9(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + metro-core: 0.80.9 + metro-file-map: 0.80.9 + metro-resolver: 0.80.9 + metro-runtime: 0.80.9 + metro-source-map: 0.80.9 + metro-symbolicate: 0.80.9 + metro-transform-plugins: 0.80.9 + metro-transform-worker: 0.80.9(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) mime-types: 2.1.35 node-fetch: 2.7.0(encoding@0.1.13) nullthrows: 1.1.1 @@ -15198,6 +17256,11 @@ snapshots: braces: 3.0.2 picomatch: 2.3.1 + micromatch@4.0.7: + dependencies: + braces: 3.0.3 + picomatch: 2.3.1 + mime-db@1.52.0: {} mime-types@2.1.35: @@ -15234,11 +17297,20 @@ snapshots: dependencies: brace-expansion: 2.0.1 + minimatch@9.0.4: + dependencies: + brace-expansion: 2.0.1 + minimist@1.2.8: {} minipass-collect@1.0.2: dependencies: minipass: 3.3.6 + optional: true + + minipass-collect@2.0.1: + dependencies: + minipass: 7.1.2 minipass-fetch@1.4.1: dependencies: @@ -15266,10 +17338,10 @@ snapshots: dependencies: yallist: 4.0.0 - minipass@4.2.5: {} - minipass@5.0.0: {} + minipass@7.1.2: {} + minizlib@2.1.2: dependencies: minipass: 3.3.6 @@ -15283,23 +17355,16 @@ snapshots: mkdirp@1.0.4: {} - mlly@1.3.0: + mlly@1.7.0: dependencies: - acorn: 8.8.2 - pathe: 1.1.1 - pkg-types: 1.0.3 - ufo: 1.1.2 - - mlly@1.4.2: - dependencies: - acorn: 8.10.0 - pathe: 1.1.1 - pkg-types: 1.0.3 - ufo: 1.3.1 + acorn: 8.11.3 + pathe: 1.1.2 + pkg-types: 1.1.0 + ufo: 1.5.3 mri@1.2.0: {} - mrmime@1.0.1: {} + mrmime@2.0.0: {} ms@2.0.0: {} @@ -15325,6 +17390,18 @@ snapshots: seq-queue: 0.0.5 sqlstring: 2.3.3 + mysql2@3.9.8: + dependencies: + denque: 2.1.0 + generate-function: 2.3.1 + iconv-lite: 0.6.3 + long: 5.2.3 + lru-cache: 8.0.5 + named-placeholders: 1.1.3 + seq-queue: 0.0.5 + sqlstring: 2.3.3 + optional: true + mz@2.7.0: dependencies: any-promise: 1.3.0 @@ -15338,8 +17415,6 @@ snapshots: nan@2.19.0: optional: true - nanoid@3.3.6: {} - nanoid@3.3.7: {} napi-build-utils@1.0.2: {} @@ -15363,13 +17438,13 @@ snapshots: nocache@3.0.4: {} - node-abi@3.40.0: + node-abi@3.62.0: dependencies: - semver: 7.5.4 + semver: 7.6.1 node-abort-controller@3.1.1: {} - node-addon-api@4.3.0: {} + node-addon-api@7.1.0: {} node-dir@0.1.17: dependencies: @@ -15377,21 +17452,12 @@ snapshots: node-domexception@1.0.0: {} - node-emoji@1.11.0: - dependencies: - lodash: 4.17.21 - - node-fetch@2.6.11(encoding@0.1.13): - dependencies: - whatwg-url: 5.0.0 - optionalDependencies: - encoding: 0.1.13 - - node-fetch@2.6.9(encoding@0.1.13): + node-emoji@2.1.3: dependencies: - whatwg-url: 5.0.0 - optionalDependencies: - encoding: 0.1.13 + '@sindresorhus/is': 4.6.0 + char-regex: 1.0.2 + emojilib: 2.4.0 + skin-tone: 2.0.0 node-fetch@2.7.0(encoding@0.1.13): dependencies: @@ -15413,7 +17479,7 @@ snapshots: node-forge@1.3.1: {} - node-gyp-build@4.6.0: {} + node-gyp-build@4.8.1: {} node-gyp@8.4.1: dependencies: @@ -15424,8 +17490,8 @@ snapshots: nopt: 5.0.0 npmlog: 6.0.2 rimraf: 3.0.2 - semver: 7.5.4 - tar: 6.1.13 + semver: 7.6.2 + tar: 6.2.1 which: 2.0.2 transitivePeerDependencies: - bluebird @@ -15445,6 +17511,7 @@ snapshots: nopt@5.0.0: dependencies: abbrev: 1.1.1 + optional: true normalize-package-data@2.5.0: dependencies: @@ -15470,17 +17537,10 @@ snapshots: dependencies: path-key: 3.1.1 - npm-run-path@5.1.0: + npm-run-path@5.3.0: dependencies: path-key: 4.0.0 - npmlog@5.0.1: - dependencies: - are-we-there-yet: 2.0.0 - console-control-strings: 1.1.0 - gauge: 3.0.2 - set-blocking: 2.0.0 - npmlog@6.0.2: dependencies: are-we-there-yet: 3.0.1 @@ -15493,12 +17553,12 @@ snapshots: dependencies: execa: 6.1.0 parse-package-name: 1.0.0 - semver: 7.6.0 + semver: 7.6.1 validate-npm-package-name: 4.0.0 nullthrows@1.1.1: {} - ob1@0.80.8: {} + ob1@0.80.9: {} object-assign@4.1.1: {} @@ -15506,11 +17566,8 @@ snapshots: object-inspect@1.12.3: {} - object-is@1.1.5: - dependencies: - call-bind: 1.0.2 - define-properties: 1.2.0 - + object-inspect@1.13.1: {} + object-keys@1.1.1: {} object.assign@4.1.4: @@ -15520,6 +17577,13 @@ snapshots: has-symbols: 1.0.3 object-keys: 1.1.1 + object.assign@4.1.5: + dependencies: + call-bind: 1.0.7 + define-properties: 1.2.1 + has-symbols: 1.0.3 + object-keys: 1.1.1 + object.fromentries@2.0.6: dependencies: call-bind: 1.0.2 @@ -15584,7 +17648,7 @@ snapshots: is-docker: 2.2.1 is-wsl: 2.2.0 - openid-client@5.6.5: + openid-client@5.6.4: dependencies: jose: 4.15.5 lru-cache: 6.0.0 @@ -15654,6 +17718,10 @@ snapshots: dependencies: yocto-queue: 1.0.0 + p-limit@5.0.0: + dependencies: + yocto-queue: 1.0.0 + p-locate@3.0.0: dependencies: p-limit: 2.3.0 @@ -15684,8 +17752,6 @@ snapshots: p-try@2.2.0: {} - packet-reader@1.0.0: {} - parent-module@1.0.1: dependencies: callsites: 3.1.0 @@ -15738,6 +17804,11 @@ snapshots: lru-cache: 9.1.2 minipass: 5.0.0 + path-scurry@1.11.1: + dependencies: + lru-cache: 10.2.2 + minipass: 7.1.2 + path-scurry@1.7.0: dependencies: lru-cache: 9.1.2 @@ -15747,7 +17818,7 @@ snapshots: path-type@4.0.0: {} - pathe@1.1.1: {} + pathe@1.1.2: {} pathval@1.1.1: {} @@ -15758,19 +17829,22 @@ snapshots: pg-cloudflare@1.1.1: optional: true - pg-connection-string@2.5.0: {} + pg-connection-string@2.6.1: {} + + pg-connection-string@2.6.2: + optional: true - pg-connection-string@2.6.0: {} + pg-connection-string@2.6.4: {} pg-int8@1.0.1: {} pg-numeric@1.0.2: {} - pg-pool@3.6.0(pg@8.11.0): + pg-pool@3.6.2(pg@8.11.5): dependencies: - pg: 8.11.0 + pg: 8.11.5 - pg-protocol@1.6.0: {} + pg-protocol@1.6.1: {} pg-types@2.2.0: dependencies: @@ -15780,23 +17854,21 @@ snapshots: postgres-date: 1.0.7 postgres-interval: 1.2.0 - pg-types@4.0.1: + pg-types@4.0.2: dependencies: pg-int8: 1.0.1 pg-numeric: 1.0.2 postgres-array: 3.0.2 postgres-bytea: 3.0.0 - postgres-date: 2.0.1 + postgres-date: 2.1.0 postgres-interval: 3.0.0 - postgres-range: 1.1.3 + postgres-range: 1.1.4 - pg@8.11.0: + pg@8.11.5: dependencies: - buffer-writer: 2.0.0 - packet-reader: 1.0.0 - pg-connection-string: 2.6.0 - pg-pool: 3.6.0(pg@8.11.0) - pg-protocol: 1.6.0 + pg-connection-string: 2.6.4 + pg-pool: 3.6.2(pg@8.11.5) + pg-protocol: 1.6.1 pg-types: 2.2.0 pgpass: 1.0.5 optionalDependencies: @@ -15808,6 +17880,8 @@ snapshots: picocolors@1.0.0: {} + picocolors@1.0.1: {} + picomatch@2.3.1: {} picomatch@3.0.1: {} @@ -15825,11 +17899,11 @@ snapshots: dependencies: find-up: 3.0.0 - pkg-types@1.0.3: + pkg-types@1.1.0: dependencies: - jsonc-parser: 3.2.0 - mlly: 1.3.0 - pathe: 1.1.1 + confbox: 0.1.7 + mlly: 1.7.0 + pathe: 1.1.2 plist@3.1.0: dependencies: @@ -15845,23 +17919,26 @@ snapshots: pngjs@3.4.0: {} - postcss-load-config@4.0.1(postcss@8.4.38): + possible-typed-array-names@1.0.0: {} + + postcss-load-config@4.0.1(postcss@8.4.39)(ts-node@10.9.2(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))): dependencies: lilconfig: 2.1.0 yaml: 2.3.1 optionalDependencies: - postcss: 8.4.38 + postcss: 8.4.39 + ts-node: 10.9.2(@types/node@20.12.12)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) - postcss@8.4.24: + postcss@8.4.38: dependencies: - nanoid: 3.3.6 - picocolors: 1.0.0 - source-map-js: 1.0.2 + nanoid: 3.3.7 + picocolors: 1.0.1 + source-map-js: 1.2.0 - postcss@8.4.38: + postcss@8.4.39: dependencies: nanoid: 3.3.7 - picocolors: 1.0.0 + picocolors: 1.0.1 source-map-js: 1.2.0 postgres-array@2.0.0: {} @@ -15876,7 +17953,7 @@ snapshots: postgres-date@1.0.7: {} - postgres-date@2.0.1: {} + postgres-date@2.1.0: {} postgres-interval@1.2.0: dependencies: @@ -15884,21 +17961,21 @@ snapshots: postgres-interval@3.0.0: {} - postgres-range@1.1.3: {} + postgres-range@1.1.4: {} - postgres@3.3.5: {} + postgres@3.4.4: {} pouchdb-collections@1.0.1: {} - prebuild-install@7.1.1: + prebuild-install@7.1.2: dependencies: - detect-libc: 2.0.1 + detect-libc: 2.0.3 expand-template: 2.0.3 github-from-package: 0.0.0 minimist: 1.2.8 mkdirp-classic: 0.5.3 napi-build-utils: 1.0.2 - node-abi: 3.40.0 + node-abi: 3.62.0 pump: 3.0.0 rc: 1.2.8 simple-get: 4.0.1 @@ -15918,12 +17995,6 @@ snapshots: ansi-styles: 4.3.0 react-is: 17.0.2 - pretty-format@27.5.1: - dependencies: - ansi-regex: 5.0.1 - ansi-styles: 5.2.0 - react-is: 17.0.2 - pretty-format@29.7.0: dependencies: '@jest/schemas': 29.6.3 @@ -15934,11 +18005,16 @@ snapshots: dependencies: parse-ms: 3.0.0 + prisma@5.14.0: + dependencies: + '@prisma/engines': 5.14.0 + process-nextick-args@2.0.1: {} progress@2.0.3: {} - promise-inflight@1.0.1: {} + promise-inflight@1.0.1: + optional: true promise-retry@2.0.1: dependencies: @@ -15989,7 +18065,9 @@ snapshots: qs@6.11.0: dependencies: - side-channel: 1.0.4 + side-channel: 1.0.6 + + querystring@0.2.1: {} queue-microtask@1.2.3: {} @@ -16003,7 +18081,7 @@ snapshots: range-parser@1.2.1: {} - raw-body@2.5.1: + raw-body@2.5.2: dependencies: bytes: 3.1.2 http-errors: 2.0.0 @@ -16017,7 +18095,7 @@ snapshots: minimist: 1.2.8 strip-json-comments: 2.0.1 - react-devtools-core@4.28.5(bufferutil@4.0.8)(utf-8-validate@6.0.3): + react-devtools-core@5.2.0(bufferutil@4.0.8)(utf-8-validate@6.0.3): dependencies: shell-quote: 1.8.1 ws: 7.5.9(bufferutil@4.0.8)(utf-8-validate@6.0.3) @@ -16031,47 +18109,50 @@ snapshots: react-is@18.2.0: {} - react-native@0.73.6(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4))(bufferutil@4.0.8)(encoding@0.1.13)(react@18.2.0)(utf-8-validate@6.0.3): + react-is@18.3.1: {} + + react-native@0.74.1(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(@types/react@18.3.1)(bufferutil@4.0.8)(encoding@0.1.13)(react@18.3.1)(utf-8-validate@6.0.3): dependencies: '@jest/create-cache-key-function': 29.7.0 - '@react-native-community/cli': 12.3.6(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) - '@react-native-community/cli-platform-android': 12.3.6(encoding@0.1.13) - '@react-native-community/cli-platform-ios': 12.3.6(encoding@0.1.13) - '@react-native/assets-registry': 0.73.1 - '@react-native/codegen': 0.73.3(@babel/preset-env@7.24.4(@babel/core@7.24.4)) - '@react-native/community-cli-plugin': 0.73.17(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) - '@react-native/gradle-plugin': 0.73.4 - '@react-native/js-polyfills': 0.73.1 - '@react-native/normalize-colors': 0.73.2 - '@react-native/virtualized-lists': 0.73.4(react-native@0.73.6(@babel/core@7.24.4)(@babel/preset-env@7.24.4(@babel/core@7.24.4))(bufferutil@4.0.8)(encoding@0.1.13)(react@18.2.0)(utf-8-validate@6.0.3)) + '@react-native-community/cli': 13.6.6(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + '@react-native-community/cli-platform-android': 13.6.6(encoding@0.1.13) + '@react-native-community/cli-platform-ios': 13.6.6(encoding@0.1.13) + '@react-native/assets-registry': 0.74.83 + '@react-native/codegen': 0.74.83(@babel/preset-env@7.24.6(@babel/core@7.24.6)) + '@react-native/community-cli-plugin': 0.74.83(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + '@react-native/gradle-plugin': 0.74.83 + '@react-native/js-polyfills': 0.74.83 + '@react-native/normalize-colors': 0.74.83 + '@react-native/virtualized-lists': 0.74.83(@types/react@18.3.1)(react-native@0.74.1(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(@types/react@18.3.1)(bufferutil@4.0.8)(encoding@0.1.13)(react@18.3.1)(utf-8-validate@6.0.3))(react@18.3.1) abort-controller: 3.0.0 anser: 1.4.10 ansi-regex: 5.0.1 base64-js: 1.5.1 chalk: 4.1.2 - deprecated-react-native-prop-types: 5.0.0 event-target-shim: 5.0.1 flow-enums-runtime: 0.0.6 invariant: 2.2.4 jest-environment-node: 29.7.0 jsc-android: 250231.0.0 memoize-one: 5.2.1 - metro-runtime: 0.80.8 - metro-source-map: 0.80.8 + metro-runtime: 0.80.9 + metro-source-map: 0.80.9 mkdirp: 0.5.6 nullthrows: 1.1.1 pretty-format: 26.6.2 promise: 8.3.0 - react: 18.2.0 - react-devtools-core: 4.28.5(bufferutil@4.0.8)(utf-8-validate@6.0.3) - react-refresh: 0.14.0 - react-shallow-renderer: 16.15.0(react@18.2.0) + react: 18.3.1 + react-devtools-core: 5.2.0(bufferutil@4.0.8)(utf-8-validate@6.0.3) + react-refresh: 0.14.2 + react-shallow-renderer: 16.15.0(react@18.3.1) regenerator-runtime: 0.13.11 scheduler: 0.24.0-canary-efb381bbf-20230505 stacktrace-parser: 0.1.10 whatwg-fetch: 3.6.20 ws: 6.2.2(bufferutil@4.0.8)(utf-8-validate@6.0.3) yargs: 17.7.2 + optionalDependencies: + '@types/react': 18.3.1 transitivePeerDependencies: - '@babel/core' - '@babel/preset-env' @@ -16080,15 +18161,15 @@ snapshots: - supports-color - utf-8-validate - react-refresh@0.14.0: {} + react-refresh@0.14.2: {} - react-shallow-renderer@16.15.0(react@18.2.0): + react-shallow-renderer@16.15.0(react@18.3.1): dependencies: object-assign: 4.1.1 - react: 18.2.0 - react-is: 18.2.0 + react: 18.3.1 + react-is: 18.3.1 - react@18.2.0: + react@18.3.1: dependencies: loose-envify: 1.4.0 @@ -16134,17 +18215,17 @@ snapshots: source-map: 0.6.1 tslib: 2.6.2 - recast@0.23.4: + recast@0.23.9: dependencies: - assert: 2.1.0 ast-types: 0.16.1 esprima: 4.0.1 source-map: 0.6.1 + tiny-invariant: 1.3.3 tslib: 2.6.2 rechoir@0.8.0: dependencies: - resolve: 1.22.2 + resolve: 1.22.8 redeyed@2.1.1: dependencies: @@ -16164,7 +18245,7 @@ snapshots: regenerator-transform@0.15.2: dependencies: - '@babel/runtime': 7.24.4 + '@babel/runtime': 7.24.6 regexp-tree@0.1.27: {} @@ -16174,6 +18255,13 @@ snapshots: define-properties: 1.2.0 functions-have-names: 1.2.3 + regexp.prototype.flags@1.5.2: + dependencies: + call-bind: 1.0.7 + define-properties: 1.2.1 + es-errors: 1.3.0 + set-function-name: 2.0.2 + regexpu-core@5.3.2: dependencies: '@babel/regjsgen': 0.8.0 @@ -16215,12 +18303,14 @@ snapshots: resolve-from@5.0.0: {} - resolve-tspaths@0.8.16(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)): + resolve-pkg-maps@1.0.0: {} + + resolve-tspaths@0.8.16(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)): dependencies: ansi-colors: 4.1.3 commander: 11.0.0 fast-glob: 3.3.1 - typescript: 5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq) + typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) resolve.exports@2.0.2: {} @@ -16265,6 +18355,8 @@ snapshots: retry@0.12.0: optional: true + retry@0.13.1: {} + reusify@1.0.4: {} rimraf@2.4.5: @@ -16296,6 +18388,28 @@ snapshots: optionalDependencies: fsevents: 2.3.3 + rollup@4.18.0: + dependencies: + '@types/estree': 1.0.5 + optionalDependencies: + '@rollup/rollup-android-arm-eabi': 4.18.0 + '@rollup/rollup-android-arm64': 4.18.0 + '@rollup/rollup-darwin-arm64': 4.18.0 + '@rollup/rollup-darwin-x64': 4.18.0 + '@rollup/rollup-linux-arm-gnueabihf': 4.18.0 + '@rollup/rollup-linux-arm-musleabihf': 4.18.0 + '@rollup/rollup-linux-arm64-gnu': 4.18.0 + '@rollup/rollup-linux-arm64-musl': 4.18.0 + '@rollup/rollup-linux-powerpc64le-gnu': 4.18.0 + '@rollup/rollup-linux-riscv64-gnu': 4.18.0 + '@rollup/rollup-linux-s390x-gnu': 4.18.0 + '@rollup/rollup-linux-x64-gnu': 4.18.0 + '@rollup/rollup-linux-x64-musl': 4.18.0 + '@rollup/rollup-win32-arm64-msvc': 4.18.0 + '@rollup/rollup-win32-ia32-msvc': 4.18.0 + '@rollup/rollup-win32-x64-msvc': 4.18.0 + fsevents: 2.3.3 + run-parallel@1.2.0: dependencies: queue-microtask: 1.2.3 @@ -16315,6 +18429,13 @@ snapshots: has-symbols: 1.0.3 isarray: 2.0.5 + safe-array-concat@1.1.2: + dependencies: + call-bind: 1.0.7 + get-intrinsic: 1.2.4 + has-symbols: 1.0.3 + isarray: 2.0.5 + safe-buffer@5.1.2: {} safe-buffer@5.2.1: {} @@ -16328,35 +18449,40 @@ snapshots: get-intrinsic: 1.2.1 is-regex: 1.1.4 + safe-regex-test@1.0.3: + dependencies: + call-bind: 1.0.7 + es-errors: 1.3.0 + is-regex: 1.1.4 + safer-buffer@2.1.2: {} - sax@1.3.0: {} + sax@1.4.1: {} scheduler@0.24.0-canary-efb381bbf-20230505: dependencies: loose-envify: 1.4.0 + selfsigned@2.4.1: + dependencies: + '@types/node-forge': 1.3.11 + node-forge: 1.3.1 + semver@5.7.2: {} semver@6.3.1: {} - semver@7.3.2: {} - semver@7.5.1: dependencies: lru-cache: 6.0.0 - semver@7.5.3: - dependencies: - lru-cache: 6.0.0 - semver@7.5.4: dependencies: lru-cache: 6.0.0 - semver@7.6.0: - dependencies: - lru-cache: 6.0.0 + semver@7.6.1: {} + + semver@7.6.2: {} send@0.18.0: dependencies: @@ -16401,6 +18527,22 @@ snapshots: set-cookie-parser@2.6.0: {} + set-function-length@1.2.2: + dependencies: + define-data-property: 1.1.4 + es-errors: 1.3.0 + function-bind: 1.1.2 + get-intrinsic: 1.2.4 + gopd: 1.0.1 + has-property-descriptors: 1.0.2 + + set-function-name@2.0.2: + dependencies: + define-data-property: 1.1.4 + es-errors: 1.3.0 + functions-have-names: 1.2.3 + has-property-descriptors: 1.0.2 + setimmediate@1.0.5: {} setprototypeof@1.2.0: {} @@ -16429,12 +18571,21 @@ snapshots: get-intrinsic: 1.2.1 object-inspect: 1.12.3 + side-channel@1.0.6: + dependencies: + call-bind: 1.0.7 + es-errors: 1.3.0 + get-intrinsic: 1.2.4 + object-inspect: 1.13.1 + siginfo@2.0.0: {} signal-exit@3.0.7: {} signal-exit@4.0.2: {} + signal-exit@4.1.0: {} + simple-concat@1.0.1: {} simple-get@4.0.1: @@ -16449,14 +18600,18 @@ snapshots: bplist-parser: 0.3.1 plist: 3.1.0 - sirv@2.0.3: + sirv@2.0.4: dependencies: - '@polka/url': 1.0.0-next.21 - mrmime: 1.0.1 + '@polka/url': 1.0.0-next.25 + mrmime: 2.0.0 totalist: 3.0.1 sisteransi@1.0.5: {} + skin-tone@2.0.0: + dependencies: + unicode-emoji-modifier-base: 1.0.0 + slash@3.0.0: {} slash@4.0.0: {} @@ -16483,19 +18638,17 @@ snapshots: dependencies: agent-base: 6.0.2 debug: 4.3.4 - socks: 2.7.1 + socks: 2.8.3 transitivePeerDependencies: - supports-color optional: true - socks@2.7.1: + socks@2.8.3: dependencies: - ip: 2.0.0 + ip-address: 9.0.5 smart-buffer: 4.2.0 optional: true - source-map-js@1.0.2: {} - source-map-js@1.2.0: {} source-map-support@0.5.21: @@ -16547,40 +18700,48 @@ snapshots: sprintf-js@1.0.3: {} - sql.js@1.8.0: {} + sprintf-js@1.1.3: + optional: true + + sql.js@1.10.3: {} - sqlite3@5.1.6(encoding@0.1.13): + sqlite3@5.1.7: dependencies: - '@mapbox/node-pre-gyp': 1.0.10(encoding@0.1.13) - node-addon-api: 4.3.0 - tar: 6.1.13 + bindings: 1.5.0 + node-addon-api: 7.1.0 + prebuild-install: 7.1.2 + tar: 6.2.1 optionalDependencies: node-gyp: 8.4.1 transitivePeerDependencies: - bluebird - - encoding - supports-color sqlstring@2.3.3: {} - ssh2@1.11.0: + ssh2@1.15.0: dependencies: asn1: 0.2.6 bcrypt-pbkdf: 1.0.2 optionalDependencies: - cpu-features: 0.0.9 + cpu-features: 0.0.10 nan: 2.19.0 + ssri@10.0.6: + dependencies: + minipass: 7.1.2 + ssri@8.0.1: dependencies: minipass: 3.3.6 + optional: true - sst@3.0.4: + sst@3.0.14: dependencies: '@aws-sdk/client-lambda': 3.478.0 - hono: 4.2.1 + hono: 4.0.1 jose: 5.2.3 - openid-client: 5.6.5 + openid-client: 5.6.4 transitivePeerDependencies: - aws-crt @@ -16600,7 +18761,7 @@ snapshots: statuses@2.0.1: {} - std-env@3.3.3: {} + std-env@3.7.0: {} stream-buffers@2.2.0: {} @@ -16628,18 +18789,37 @@ snapshots: define-properties: 1.2.0 es-abstract: 1.22.1 + string.prototype.trim@1.2.9: + dependencies: + call-bind: 1.0.7 + define-properties: 1.2.1 + es-abstract: 1.23.3 + es-object-atoms: 1.0.0 + string.prototype.trimend@1.0.6: dependencies: call-bind: 1.0.2 define-properties: 1.2.0 es-abstract: 1.22.1 + string.prototype.trimend@1.0.8: + dependencies: + call-bind: 1.0.7 + define-properties: 1.2.1 + es-object-atoms: 1.0.0 + string.prototype.trimstart@1.0.6: dependencies: call-bind: 1.0.2 define-properties: 1.2.0 es-abstract: 1.22.1 + string.prototype.trimstart@1.0.8: + dependencies: + call-bind: 1.0.7 + define-properties: 1.2.1 + es-object-atoms: 1.0.0 + string_decoder@1.1.1: dependencies: safe-buffer: 5.1.2 @@ -16656,10 +18836,6 @@ snapshots: dependencies: ansi-regex: 5.0.1 - strip-ansi@7.0.1: - dependencies: - ansi-regex: 6.0.1 - strip-ansi@7.1.0: dependencies: ansi-regex: 6.0.1 @@ -16680,9 +18856,9 @@ snapshots: strip-json-comments@3.1.1: {} - strip-literal@1.0.1: + strip-literal@2.1.0: dependencies: - acorn: 8.8.2 + js-tokens: 9.0.0 strnum@1.0.5: {} @@ -16728,6 +18904,11 @@ snapshots: has-flag: 4.0.0 supports-color: 7.2.0 + supports-hyperlinks@3.0.0: + dependencies: + has-flag: 4.0.0 + supports-color: 7.2.0 + supports-preserve-symlinks-flag@1.0.0: {} tar-fs@2.0.1: @@ -16752,15 +18933,6 @@ snapshots: inherits: 2.0.4 readable-stream: 3.6.2 - tar@6.1.13: - dependencies: - chownr: 2.0.0 - fs-minipass: 2.1.0 - minipass: 4.2.5 - minizlib: 2.1.2 - mkdirp: 1.0.4 - yallist: 4.0.0 - tar@6.2.1: dependencies: chownr: 2.0.0 @@ -16808,7 +18980,7 @@ snapshots: commander: 2.20.3 source-map-support: 0.5.21 - terser@5.30.3: + terser@5.31.0: dependencies: '@jridgewell/source-map': 0.3.6 acorn: 8.11.3 @@ -16847,15 +19019,15 @@ snapshots: es5-ext: 0.10.62 next-tick: 1.1.0 - tiny-queue@0.2.1: {} + tiny-invariant@1.3.3: {} - tinybench@2.5.0: {} + tiny-queue@0.2.1: {} - tinypool@0.5.0: {} + tinybench@2.8.0: {} - tinypool@0.7.0: {} + tinypool@0.8.4: {} - tinyspy@2.1.1: {} + tinyspy@2.2.1: {} tmp@0.0.33: dependencies: @@ -16879,21 +19051,49 @@ snapshots: dependencies: punycode: 2.3.0 - traverse@0.6.8: {} + traverse@0.6.9: + dependencies: + gopd: 1.0.1 + typedarray.prototype.slice: 1.0.3 + which-typed-array: 1.1.15 tree-kill@1.2.2: {} treeify@1.1.0: {} - ts-api-utils@1.0.3(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)): + ts-api-utils@1.0.3(typescript@5.2.2): + dependencies: + typescript: 5.2.2 + + ts-api-utils@1.0.3(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)): dependencies: - typescript: 5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq) + typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) + + ts-expose-internals-conditionally@1.0.0-empty.0: {} ts-interface-checker@0.1.13: {} - tsconfck@2.1.1(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)): + ts-node@10.9.2(@types/node@20.12.12)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)): + dependencies: + '@cspotcode/source-map-support': 0.8.1 + '@tsconfig/node10': 1.0.11 + '@tsconfig/node12': 1.0.11 + '@tsconfig/node14': 1.0.3 + '@tsconfig/node16': 1.0.4 + '@types/node': 20.12.12 + acorn: 8.11.3 + acorn-walk: 8.3.2 + arg: 4.1.3 + create-require: 1.1.1 + diff: 4.0.2 + make-error: 1.3.6 + typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) + v8-compile-cache-lib: 3.0.1 + yn: 3.1.1 + + tsconfck@3.0.3(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)): optionalDependencies: - typescript: 5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq) + typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) tsconfig-paths@3.14.2: dependencies: @@ -16904,13 +19104,9 @@ snapshots: tslib@1.14.1: {} - tslib@2.5.2: {} - - tslib@2.5.3: {} - tslib@2.6.2: {} - tsup@7.2.0(postcss@8.4.38)(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)): + tsup@7.2.0(postcss@8.4.39)(ts-node@10.9.2(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)))(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)): dependencies: bundle-require: 4.0.2(esbuild@0.18.20) cac: 6.7.14 @@ -16920,37 +19116,43 @@ snapshots: execa: 5.1.1 globby: 11.1.0 joycon: 3.1.1 - postcss-load-config: 4.0.1(postcss@8.4.38) + postcss-load-config: 4.0.1(postcss@8.4.39)(ts-node@10.9.2(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))) resolve-from: 5.0.0 rollup: 3.27.2 source-map: 0.8.0-beta.0 sucrase: 3.34.0 tree-kill: 1.2.2 optionalDependencies: - postcss: 8.4.38 - typescript: 5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq) + postcss: 8.4.39 + typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) transitivePeerDependencies: - supports-color - ts-node - tsutils@3.21.0(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)): + tsutils@3.21.0(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)): dependencies: tslib: 1.14.1 - typescript: 5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq) + typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) - tsx@3.12.6: + tsx@3.14.0: dependencies: - '@esbuild-kit/cjs-loader': 2.4.2 - '@esbuild-kit/core-utils': 3.1.0 - '@esbuild-kit/esm-loader': 2.5.5 + esbuild: 0.18.20 + get-tsconfig: 4.7.5 + source-map-support: 0.5.21 optionalDependencies: fsevents: 2.3.3 - tsx@3.12.7: + tsx@4.10.5: dependencies: - '@esbuild-kit/cjs-loader': 2.4.2 - '@esbuild-kit/core-utils': 3.1.0 - '@esbuild-kit/esm-loader': 2.5.5 + esbuild: 0.20.2 + get-tsconfig: 4.7.5 + optionalDependencies: + fsevents: 2.3.3 + + tsx@4.16.2: + dependencies: + esbuild: 0.21.5 + get-tsconfig: 4.7.5 optionalDependencies: fsevents: 2.3.3 @@ -17026,6 +19228,12 @@ snapshots: get-intrinsic: 1.2.1 is-typed-array: 1.1.12 + typed-array-buffer@1.0.2: + dependencies: + call-bind: 1.0.7 + es-errors: 1.3.0 + is-typed-array: 1.1.13 + typed-array-byte-length@1.0.0: dependencies: call-bind: 1.0.2 @@ -17033,6 +19241,14 @@ snapshots: has-proto: 1.0.1 is-typed-array: 1.1.12 + typed-array-byte-length@1.0.1: + dependencies: + call-bind: 1.0.7 + for-each: 0.3.3 + gopd: 1.0.1 + has-proto: 1.0.3 + is-typed-array: 1.1.13 + typed-array-byte-offset@1.0.0: dependencies: available-typed-arrays: 1.0.5 @@ -17041,19 +19257,48 @@ snapshots: has-proto: 1.0.1 is-typed-array: 1.1.12 + typed-array-byte-offset@1.0.2: + dependencies: + available-typed-arrays: 1.0.7 + call-bind: 1.0.7 + for-each: 0.3.3 + gopd: 1.0.1 + has-proto: 1.0.3 + is-typed-array: 1.1.13 + typed-array-length@1.0.4: dependencies: call-bind: 1.0.2 for-each: 0.3.3 is-typed-array: 1.1.12 - typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq): {} + typed-array-length@1.0.6: + dependencies: + call-bind: 1.0.7 + for-each: 0.3.3 + gopd: 1.0.1 + has-proto: 1.0.3 + is-typed-array: 1.1.13 + possible-typed-array-names: 1.0.0 + + typedarray.prototype.slice@1.0.3: + dependencies: + call-bind: 1.0.7 + define-properties: 1.2.1 + es-abstract: 1.23.3 + es-errors: 1.3.0 + typed-array-buffer: 1.0.2 + typed-array-byte-offset: 1.0.2 + + typescript@5.2.2: {} + + typescript@5.3.3: {} - ua-parser-js@1.0.37: {} + typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme): {} - ufo@1.1.2: {} + ua-parser-js@1.0.38: {} - ufo@1.3.1: {} + ufo@1.5.3: {} unbox-primitive@1.0.2: dependencies: @@ -17062,8 +19307,6 @@ snapshots: has-symbols: 1.0.3 which-boxed-primitive: 1.0.2 - undici-types@5.25.3: {} - undici-types@5.26.5: {} undici@5.28.2: @@ -17072,6 +19315,8 @@ snapshots: unicode-canonical-property-names-ecmascript@2.0.0: {} + unicode-emoji-modifier-base@1.0.0: {} + unicode-match-property-ecmascript@2.0.0: dependencies: unicode-canonical-property-names-ecmascript: 2.0.0 @@ -17084,10 +19329,20 @@ snapshots: unique-filename@1.1.1: dependencies: unique-slug: 2.0.2 + optional: true + + unique-filename@3.0.0: + dependencies: + unique-slug: 4.0.0 unique-slug@2.0.2: dependencies: imurmurhash: 0.1.4 + optional: true + + unique-slug@4.0.0: + dependencies: + imurmurhash: 0.1.4 unique-string@1.0.0: dependencies: @@ -17107,11 +19362,11 @@ snapshots: unpipe@1.0.0: {} - update-browserslist-db@1.0.13(browserslist@4.23.0): + update-browserslist-db@1.0.16(browserslist@4.23.0): dependencies: browserslist: 4.23.0 escalade: 3.1.2 - picocolors: 1.0.0 + picocolors: 1.0.1 uri-js@4.4.1: dependencies: @@ -17123,26 +19378,16 @@ snapshots: utf-8-validate@6.0.3: dependencies: - node-gyp-build: 4.6.0 + node-gyp-build: 4.8.1 util-deprecate@1.0.2: {} - util@0.12.5: - dependencies: - inherits: 2.0.4 - is-arguments: 1.1.1 - is-generator-function: 1.0.10 - is-typed-array: 1.1.12 - which-typed-array: 1.1.11 - utils-merge@1.0.1: {} uuid@7.0.3: {} uuid@8.3.2: {} - uuid@9.0.0: {} - uuid@9.0.1: {} uvu@0.5.6: @@ -17152,6 +19397,8 @@ snapshots: kleur: 4.1.5 sade: 1.8.1 + v8-compile-cache-lib@3.0.1: {} + valibot@0.30.0: {} valid-url@1.0.9: {} @@ -17167,156 +19414,249 @@ snapshots: validate-npm-package-name@4.0.0: dependencies: - builtins: 5.0.1 + builtins: 5.1.0 validate-npm-package-name@5.0.0: dependencies: - builtins: 5.0.1 + builtins: 5.1.0 vary@1.1.2: {} - vite-node@0.31.4(@types/node@20.8.7)(terser@5.30.3): + vite-node@1.6.0(@types/node@18.15.10)(lightningcss@1.25.1)(terser@5.31.0): dependencies: cac: 6.7.14 debug: 4.3.4 - mlly: 1.3.0 - pathe: 1.1.1 - picocolors: 1.0.0 - vite: 4.3.9(@types/node@20.8.7)(terser@5.30.3) + pathe: 1.1.2 + picocolors: 1.0.1 + vite: 5.3.3(@types/node@18.15.10)(lightningcss@1.25.1)(terser@5.31.0) transitivePeerDependencies: - '@types/node' - less + - lightningcss - sass - stylus - sugarss - supports-color - terser - vite-node@0.34.6(@types/node@20.10.1)(terser@5.30.3): + vite-node@1.6.0(@types/node@20.10.1)(lightningcss@1.25.1)(terser@5.31.0): dependencies: cac: 6.7.14 debug: 4.3.4 - mlly: 1.4.2 - pathe: 1.1.1 - picocolors: 1.0.0 - vite: 4.3.9(@types/node@20.10.1)(terser@5.30.3) + pathe: 1.1.2 + picocolors: 1.0.1 + vite: 5.3.3(@types/node@20.10.1)(lightningcss@1.25.1)(terser@5.31.0) + transitivePeerDependencies: + - '@types/node' + - less + - lightningcss + - sass + - stylus + - sugarss + - supports-color + - terser + + vite-node@1.6.0(@types/node@20.12.12)(lightningcss@1.25.1)(terser@5.31.0): + dependencies: + cac: 6.7.14 + debug: 4.3.4 + pathe: 1.1.2 + picocolors: 1.0.1 + vite: 5.3.3(@types/node@20.12.12)(lightningcss@1.25.1)(terser@5.31.0) transitivePeerDependencies: - '@types/node' - less + - lightningcss - sass - stylus - sugarss - supports-color - terser - vite-tsconfig-paths@4.2.0(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq))(vite@4.3.9(@types/node@20.2.5)(terser@5.30.3)): + vite-tsconfig-paths@4.3.2(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))(vite@5.3.3(@types/node@18.15.10)(lightningcss@1.25.1)(terser@5.31.0)): dependencies: debug: 4.3.4 globrex: 0.1.2 - tsconfck: 2.1.1(typescript@5.2.2(patch_hash=wmhs4olj6eveeldp6si4l46ssq)) + tsconfck: 3.0.3(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) optionalDependencies: - vite: 4.3.9(@types/node@20.2.5)(terser@5.30.3) + vite: 5.3.3(@types/node@18.15.10)(lightningcss@1.25.1)(terser@5.31.0) transitivePeerDependencies: - supports-color - typescript - vite@4.3.9(@types/node@20.10.1)(terser@5.30.3): + vite-tsconfig-paths@4.3.2(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))(vite@5.3.3(@types/node@20.12.12)(lightningcss@1.25.1)(terser@5.31.0)): dependencies: - esbuild: 0.17.19 - postcss: 8.4.24 - rollup: 3.27.2 + debug: 4.3.4 + globrex: 0.1.2 + tsconfck: 3.0.3(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + optionalDependencies: + vite: 5.3.3(@types/node@20.12.12)(lightningcss@1.25.1)(terser@5.31.0) + transitivePeerDependencies: + - supports-color + - typescript + + vite@5.2.12(@types/node@18.15.10)(lightningcss@1.25.1)(terser@5.31.0): + dependencies: + esbuild: 0.20.2 + postcss: 8.4.38 + rollup: 4.18.0 + optionalDependencies: + '@types/node': 18.15.10 + fsevents: 2.3.3 + lightningcss: 1.25.1 + terser: 5.31.0 + + vite@5.2.12(@types/node@20.10.1)(lightningcss@1.25.1)(terser@5.31.0): + dependencies: + esbuild: 0.20.2 + postcss: 8.4.38 + rollup: 4.18.0 optionalDependencies: '@types/node': 20.10.1 fsevents: 2.3.3 - terser: 5.30.3 + lightningcss: 1.25.1 + terser: 5.31.0 - vite@4.3.9(@types/node@20.2.5)(terser@5.30.3): + vite@5.2.12(@types/node@20.12.12)(lightningcss@1.25.1)(terser@5.31.0): dependencies: - esbuild: 0.17.19 - postcss: 8.4.24 - rollup: 3.27.2 + esbuild: 0.20.2 + postcss: 8.4.38 + rollup: 4.18.0 optionalDependencies: - '@types/node': 20.2.5 + '@types/node': 20.12.12 fsevents: 2.3.3 - terser: 5.30.3 + lightningcss: 1.25.1 + terser: 5.31.0 - vite@4.3.9(@types/node@20.8.7)(terser@5.30.3): + vite@5.3.3(@types/node@18.15.10)(lightningcss@1.25.1)(terser@5.31.0): dependencies: - esbuild: 0.17.19 - postcss: 8.4.24 - rollup: 3.27.2 + esbuild: 0.21.5 + postcss: 8.4.39 + rollup: 4.18.0 optionalDependencies: - '@types/node': 20.8.7 + '@types/node': 18.15.10 fsevents: 2.3.3 - terser: 5.30.3 - - vitest@0.31.4(@vitest/ui@0.31.4)(terser@5.30.3): - dependencies: - '@types/chai': 4.3.5 - '@types/chai-subset': 1.3.3 - '@types/node': 20.8.7 - '@vitest/expect': 0.31.4 - '@vitest/runner': 0.31.4 - '@vitest/snapshot': 0.31.4 - '@vitest/spy': 0.31.4 - '@vitest/utils': 0.31.4 - acorn: 8.8.2 - acorn-walk: 8.2.0 - cac: 6.7.14 - chai: 4.3.7 - concordance: 5.0.4 + lightningcss: 1.25.1 + terser: 5.31.0 + + vite@5.3.3(@types/node@20.10.1)(lightningcss@1.25.1)(terser@5.31.0): + dependencies: + esbuild: 0.21.5 + postcss: 8.4.39 + rollup: 4.18.0 + optionalDependencies: + '@types/node': 20.10.1 + fsevents: 2.3.3 + lightningcss: 1.25.1 + terser: 5.31.0 + + vite@5.3.3(@types/node@20.12.12)(lightningcss@1.25.1)(terser@5.31.0): + dependencies: + esbuild: 0.21.5 + postcss: 8.4.39 + rollup: 4.18.0 + optionalDependencies: + '@types/node': 20.12.12 + fsevents: 2.3.3 + lightningcss: 1.25.1 + terser: 5.31.0 + + vitest@1.6.0(@types/node@18.15.10)(@vitest/ui@1.6.0)(lightningcss@1.25.1)(terser@5.31.0): + dependencies: + '@vitest/expect': 1.6.0 + '@vitest/runner': 1.6.0 + '@vitest/snapshot': 1.6.0 + '@vitest/spy': 1.6.0 + '@vitest/utils': 1.6.0 + acorn-walk: 8.3.2 + chai: 4.4.1 debug: 4.3.4 - local-pkg: 0.4.3 - magic-string: 0.30.0 - pathe: 1.1.1 + execa: 8.0.1 + local-pkg: 0.5.0 + magic-string: 0.30.10 + pathe: 1.1.2 picocolors: 1.0.0 - std-env: 3.3.3 - strip-literal: 1.0.1 - tinybench: 2.5.0 - tinypool: 0.5.0 - vite: 4.3.9(@types/node@20.8.7)(terser@5.30.3) - vite-node: 0.31.4(@types/node@20.8.7)(terser@5.30.3) + std-env: 3.7.0 + strip-literal: 2.1.0 + tinybench: 2.8.0 + tinypool: 0.8.4 + vite: 5.2.12(@types/node@18.15.10)(lightningcss@1.25.1)(terser@5.31.0) + vite-node: 1.6.0(@types/node@18.15.10)(lightningcss@1.25.1)(terser@5.31.0) why-is-node-running: 2.2.2 optionalDependencies: - '@vitest/ui': 0.31.4(vitest@0.31.4) + '@types/node': 18.15.10 + '@vitest/ui': 1.6.0(vitest@1.6.0) transitivePeerDependencies: - less + - lightningcss - sass - stylus - sugarss - supports-color - terser - vitest@0.34.6(@vitest/ui@0.31.4)(terser@5.30.3): + vitest@1.6.0(@types/node@20.10.1)(@vitest/ui@1.6.0)(lightningcss@1.25.1)(terser@5.31.0): dependencies: - '@types/chai': 4.3.5 - '@types/chai-subset': 1.3.3 + '@vitest/expect': 1.6.0 + '@vitest/runner': 1.6.0 + '@vitest/snapshot': 1.6.0 + '@vitest/spy': 1.6.0 + '@vitest/utils': 1.6.0 + acorn-walk: 8.3.2 + chai: 4.4.1 + debug: 4.3.4 + execa: 8.0.1 + local-pkg: 0.5.0 + magic-string: 0.30.10 + pathe: 1.1.2 + picocolors: 1.0.0 + std-env: 3.7.0 + strip-literal: 2.1.0 + tinybench: 2.8.0 + tinypool: 0.8.4 + vite: 5.2.12(@types/node@20.10.1)(lightningcss@1.25.1)(terser@5.31.0) + vite-node: 1.6.0(@types/node@20.10.1)(lightningcss@1.25.1)(terser@5.31.0) + why-is-node-running: 2.2.2 + optionalDependencies: '@types/node': 20.10.1 - '@vitest/expect': 0.34.6 - '@vitest/runner': 0.34.6 - '@vitest/snapshot': 0.34.6 - '@vitest/spy': 0.34.6 - '@vitest/utils': 0.34.6 - acorn: 8.10.0 - acorn-walk: 8.2.0 - cac: 6.7.14 - chai: 4.3.10 + '@vitest/ui': 1.6.0(vitest@1.6.0) + transitivePeerDependencies: + - less + - lightningcss + - sass + - stylus + - sugarss + - supports-color + - terser + + vitest@1.6.0(@types/node@20.12.12)(@vitest/ui@1.6.0)(lightningcss@1.25.1)(terser@5.31.0): + dependencies: + '@vitest/expect': 1.6.0 + '@vitest/runner': 1.6.0 + '@vitest/snapshot': 1.6.0 + '@vitest/spy': 1.6.0 + '@vitest/utils': 1.6.0 + acorn-walk: 8.3.2 + chai: 4.4.1 debug: 4.3.4 - local-pkg: 0.4.3 - magic-string: 0.30.5 - pathe: 1.1.1 + execa: 8.0.1 + local-pkg: 0.5.0 + magic-string: 0.30.10 + pathe: 1.1.2 picocolors: 1.0.0 - std-env: 3.3.3 - strip-literal: 1.0.1 - tinybench: 2.5.0 - tinypool: 0.7.0 - vite: 4.3.9(@types/node@20.10.1)(terser@5.30.3) - vite-node: 0.34.6(@types/node@20.10.1)(terser@5.30.3) + std-env: 3.7.0 + strip-literal: 2.1.0 + tinybench: 2.8.0 + tinypool: 0.8.4 + vite: 5.2.12(@types/node@20.12.12)(lightningcss@1.25.1)(terser@5.31.0) + vite-node: 1.6.0(@types/node@20.12.12)(lightningcss@1.25.1)(terser@5.31.0) why-is-node-running: 2.2.2 optionalDependencies: - '@vitest/ui': 0.31.4(vitest@0.34.6) + '@types/node': 20.12.12 + '@vitest/ui': 1.6.0(vitest@1.6.0) transitivePeerDependencies: - less + - lightningcss - sass - stylus - sugarss @@ -17382,6 +19722,14 @@ snapshots: gopd: 1.0.1 has-tostringtag: 1.0.0 + which-typed-array@1.1.15: + dependencies: + available-typed-arrays: 1.0.7 + call-bind: 1.0.7 + for-each: 0.3.3 + gopd: 1.0.1 + has-tostringtag: 1.0.2 + which@1.3.1: dependencies: isexe: 2.0.0 @@ -17402,6 +19750,7 @@ snapshots: wide-align@1.1.5: dependencies: string-width: 4.2.3 + optional: true wonka@4.0.15: {} @@ -17433,11 +19782,6 @@ snapshots: imurmurhash: 0.1.4 signal-exit: 3.0.7 - write-file-atomic@5.0.0: - dependencies: - imurmurhash: 0.1.4 - signal-exit: 3.0.7 - write-file-atomic@5.0.1: dependencies: imurmurhash: 0.1.4 @@ -17455,25 +19799,12 @@ snapshots: bufferutil: 4.0.8 utf-8-validate: 6.0.3 - ws@8.13.0(bufferutil@4.0.7)(utf-8-validate@6.0.3): - optionalDependencies: - bufferutil: 4.0.7 - utf-8-validate: 6.0.3 - - ws@8.14.2: - optional: true - - ws@8.14.2(bufferutil@4.0.7)(utf-8-validate@6.0.3): - optionalDependencies: - bufferutil: 4.0.7 - utf-8-validate: 6.0.3 - ws@8.14.2(bufferutil@4.0.8)(utf-8-validate@6.0.3): optionalDependencies: bufferutil: 4.0.8 utf-8-validate: 6.0.3 - ws@8.16.0(bufferutil@4.0.8)(utf-8-validate@6.0.3): + ws@8.17.0(bufferutil@4.0.8)(utf-8-validate@6.0.3): optionalDependencies: bufferutil: 4.0.8 utf-8-validate: 6.0.3 @@ -17485,7 +19816,7 @@ snapshots: xml2js@0.6.0: dependencies: - sax: 1.3.0 + sax: 1.4.1 xmlbuilder: 11.0.1 xmlbuilder@11.0.1: {} @@ -17506,7 +19837,7 @@ snapshots: yaml@2.3.1: {} - yaml@2.4.1: {} + yaml@2.4.2: {} yargs-parser@18.1.3: dependencies: @@ -17534,23 +19865,13 @@ snapshots: yargs@16.2.0: dependencies: cliui: 7.0.4 - escalade: 3.1.1 + escalade: 3.1.2 get-caller-file: 2.0.5 require-directory: 2.1.1 string-width: 4.2.3 y18n: 5.0.8 yargs-parser: 20.2.9 - yargs@17.7.1: - dependencies: - cliui: 8.0.1 - escalade: 3.1.1 - get-caller-file: 2.0.5 - require-directory: 2.1.1 - string-width: 4.2.3 - y18n: 5.0.8 - yargs-parser: 21.1.1 - yargs@17.7.2: dependencies: cliui: 8.0.1 @@ -17561,28 +19882,30 @@ snapshots: y18n: 5.0.8 yargs-parser: 21.1.1 + yn@3.1.1: {} + yocto-queue@0.1.0: {} yocto-queue@1.0.0: {} zod@3.21.4: {} - zod@3.22.2: {} + zod@3.23.7: {} zx@7.2.2: dependencies: - '@types/fs-extra': 11.0.1 + '@types/fs-extra': 11.0.4 '@types/minimist': 1.2.2 - '@types/node': 18.16.16 + '@types/node': 18.19.33 '@types/ps-tree': 1.1.2 '@types/which': 3.0.0 chalk: 5.3.0 fs-extra: 11.1.1 fx: 28.0.0 - globby: 13.1.4 + globby: 13.2.2 minimist: 1.2.8 node-fetch: 3.3.1 ps-tree: 1.2.0 webpod: 0.0.2 which: 3.0.1 - yaml: 2.3.1 + yaml: 2.4.2