diff --git a/.env b/.env index 221802a..0bb8df8 100644 --- a/.env +++ b/.env @@ -1,4 +1,12 @@ +PGHOST=localhost +PGPORT=5432 +PGDATABASE=travis_ci_test +PGUSER=postgres +PGPASSWORD=postgres +PGSSL=false JWT_SECRET=dummysecret VHOST=api.destinyitemmanager.com STATELY_STORE_ID=4691621389625154 STATELY_REGION=us-west-2 +VHOST=api.destinyitemmanager.com +STATELY_ACCESS_KEY="CAISRzBFAiEAhBwlEIYOXbLFzWyqZsTn3iLbyBUCjOVL8HzaAwDz6WkCIBf1QSVzt5VLV0VckmSsv2D3OHvnzHnctfTDDPifUOsDGgkI0sHq9_DVqAM" \ No newline at end of file diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 156d237..3de3252 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -50,6 +50,31 @@ jobs: - name: Save DigitalOcean kubeconfig with short-lived credentials run: doctl kubernetes cluster kubeconfig save --expiry-seconds 600 ${{secrets.K8S_CLUSTER}} + - name: Add IP address to trusted source (managed database) + uses: GarreauArthur/manage-digital-ocean-managed-database-trusted-sources-gh-action@main + with: + action: 'add' + database_id: ${{ secrets.DATABASE_ID }} + digitalocean_token: ${{ secrets.DIGITALOCEAN_TOKEN }} + + - name: Run DB migrations + run: cd api && npx db-migrate up -e prod + env: + DATABASE_USER: ${{ secrets.DATABASE_USER }} + DATABASE_PASSWORD: ${{ secrets.DATABASE_PASSWORD }} + DATABASE_HOST: ${{ secrets.DATABASE_HOST }} + DATABASE_NAME: ${{ secrets.DATABASE_NAME }} + DATABASE_PORT: ${{ secrets.DATABASE_PORT }} + + - name: Remove IP address to trusted source (managed database) + if: always() + continue-on-error: true + uses: GarreauArthur/manage-digital-ocean-managed-database-trusted-sources-gh-action@main + with: + action: 'remove' + database_id: ${{ secrets.DATABASE_ID }} + digitalocean_token: ${{ secrets.DIGITALOCEAN_TOKEN }} + - name: Build and deploy run: pnpm run deploy diff --git a/.github/workflows/pr-build.yml b/.github/workflows/pr-build.yml index b8bdbe6..2db7c69 100644 --- a/.github/workflows/pr-build.yml +++ b/.github/workflows/pr-build.yml @@ -7,7 +7,7 @@ on: pull_request jobs: build: runs-on: ubuntu-latest - environment: 'test' + environment: "test" steps: - uses: actions/checkout@v4 @@ -17,7 +17,7 @@ jobs: - name: Setup Node uses: actions/setup-node@v4 with: - node-version-file: '.nvmrc' + node-version-file: ".nvmrc" cache: pnpm - name: Install diff --git a/api/apps/index.ts b/api/apps/index.ts index 66e2c2f..0ddcd2d 100644 --- a/api/apps/index.ts +++ b/api/apps/index.ts @@ -1,10 +1,11 @@ import * as Sentry from '@sentry/node'; -import { ListToken } from '@stately-cloud/client'; import { keyBy } from 'es-toolkit'; import { RequestHandler } from 'express'; +import { addAllApps, getAllApps as getAllAppsPostgres } from '../db/apps-queries.js'; +import { pool } from '../db/index.js'; import { metrics } from '../metrics/index.js'; import { ApiApp } from '../shapes/app.js'; -import { getAllApps, updateApps } from '../stately/apps-queries.js'; +import { getAllApps as getAllAppsStately } from '../stately/apps-queries.js'; /** * Express middleware that requires an API key be provided in a header @@ -42,7 +43,6 @@ let apps: ApiApp[] = []; let appsByApiKey: { [apiKey: string]: ApiApp }; let origins = new Set(); let appsInterval: NodeJS.Timeout | null = null; -let token: ListToken | undefined; export function stopAppsRefresh() { if (appsInterval) { @@ -70,21 +70,27 @@ export async function refreshApps(): Promise { stopAppsRefresh(); try { - if (!token) { - // First time, get 'em all - const [appsFromStately, newToken] = await getAllApps(); + if (apps.length === 0) { + // Start off with a copy from StatelyDB, just in case postgres is having + // problems. + const [appsFromStately] = await getAllAppsStately(); if (appsFromStately.length > 0) { apps = appsFromStately; digestApps(); - token = newToken; } + } + + const appsFromPostgres = await fetchAppsFromPostgres(); + + if (appsFromPostgres.length > 0) { + apps = appsFromPostgres; + digestApps(); } else { - // After that, use a sync to update them - const [appsFromStately, newToken] = await updateApps(token, apps); - if (appsFromStately.length > 0) { - apps = appsFromStately; - digestApps(); - token = newToken; + // import them into Postgres + try { + await addAllApps(apps); + } catch (e) { + console.error('Error importing apps into Postgres', e); } } metrics.increment('apps.refresh.success.count'); @@ -101,6 +107,16 @@ export async function refreshApps(): Promise { } } +async function fetchAppsFromPostgres() { + const client = await pool.connect(); + try { + const appsFromPostgres = await getAllAppsPostgres(client); + return appsFromPostgres; + } finally { + client.release(); + } +} + function digestApps() { appsByApiKey = keyBy(apps, (a) => a.dimApiKey.toLowerCase()); origins = new Set(); diff --git a/api/database.json b/api/database.json index 055e2df..3500f08 100644 --- a/api/database.json +++ b/api/database.json @@ -4,7 +4,8 @@ "user": "postgres", "password": "postgres", "host": "localhost", - "database": "travis_ci_test" + "database": "travis_ci_test", + "port": 5432 }, "dev": { "driver": "pg", @@ -16,14 +17,24 @@ }, "prod": { "driver": "pg", - "user": { "ENV": "DATABASE_USER" }, - "password": { "ENV": "DATABASE_PASSWORD" }, - "host": { "ENV": "DATABASE_HOST" }, - "database": { "ENV": "DATABASE_NAME" }, - "port": { "ENV": "DATABASE_PORT" }, + "user": { + "ENV": "DATABASE_USER" + }, + "password": { + "ENV": "DATABASE_PASSWORD" + }, + "host": { + "ENV": "DATABASE_HOST" + }, + "database": { + "ENV": "DATABASE_NAME" + }, + "port": { + "ENV": "DATABASE_PORT" + }, "ssl": { "rejectUnauthorized": false, "ca": "./ca-certificate.crt" } } -} +} \ No newline at end of file diff --git a/api/db/apps-queries.test.ts b/api/db/apps-queries.test.ts new file mode 100644 index 0000000..d6c687d --- /dev/null +++ b/api/db/apps-queries.test.ts @@ -0,0 +1,52 @@ +import { DatabaseError } from 'pg-protocol'; +import { v4 as uuid } from 'uuid'; +import { ApiApp } from '../shapes/app.js'; +import { getAllApps, getAppById, insertApp } from './apps-queries.js'; +import { closeDbPool, pool, transaction } from './index.js'; + +const appId = 'apps-queries-test-app'; +const app: ApiApp = { + id: 'apps-queries-test-app', + bungieApiKey: 'foo', + origin: 'https://localhost', + dimApiKey: uuid(), +}; + +beforeEach(async () => pool.query({ text: 'delete from apps where id = $1', values: [appId] })); + +afterAll(async () => closeDbPool()); + +it('can create a new app', async () => { + await transaction(async (client) => { + expect(await getAppById(client, appId)).toBeNull(); + + await insertApp(client, app); + + const fetchedApp = await getAppById(client, appId); + expect(fetchedApp?.dimApiKey).toEqual(app.dimApiKey); + }); +}); + +it('cannot create a new app with the same name as an existing one', async () => { + await transaction(async (client) => { + await insertApp(client, app); + try { + await insertApp(client, app); + } catch (e) { + if (!(e instanceof DatabaseError)) { + fail('should have thrown a DatabaseError'); + } + expect(e.code).toBe('23505'); + } + }); +}); + +it('can get all apps', async () => { + await transaction(async (client) => { + await insertApp(client, app); + + const apps = await getAllApps(client); + expect(apps.length).toBeGreaterThanOrEqual(1); + expect(apps.find((a) => a.id === appId)?.dimApiKey).toBe(app.dimApiKey); + }); +}); diff --git a/api/db/apps-queries.ts b/api/db/apps-queries.ts new file mode 100644 index 0000000..685545d --- /dev/null +++ b/api/db/apps-queries.ts @@ -0,0 +1,51 @@ +import { ClientBase, QueryResult } from 'pg'; +import { ApiApp } from '../shapes/app.js'; +import { camelize, KeysToSnakeCase, TypesForKeys } from '../utils.js'; +import { transaction } from './index.js'; + +/** + * Get all registered apps. + */ +export async function getAllApps(client: ClientBase): Promise { + const results = await client.query>({ + name: 'get_all_apps', + text: 'SELECT * FROM apps', + }); + return results.rows.map((row) => camelize(row)); +} + +export async function addAllApps(apps: ApiApp[]): Promise { + await transaction(async (client) => { + for (const app of apps) { + await insertApp(client, app); + } + }); +} + +/** + * Get an app by its ID. + */ +export async function getAppById(client: ClientBase, id: string): Promise { + const results = await client.query>({ + name: 'get_apps', + text: 'SELECT * FROM apps where id = $1', + values: [id], + }); + if (results.rows.length > 0) { + return camelize(results.rows[0]); + } else { + return null; + } +} + +/** + * Insert a new app into the list of registered apps. + */ +export async function insertApp(client: ClientBase, app: ApiApp): Promise { + return client.query>({ + name: 'insert_app', + text: `insert into apps (id, bungie_api_key, dim_api_key, origin) +values ($1, $2, $3, $4)`, + values: [app.id, app.bungieApiKey, app.dimApiKey, app.origin], + }); +} diff --git a/api/db/dbmigrate-permissions.sql b/api/db/dbmigrate-permissions.sql new file mode 100644 index 0000000..fd58731 --- /dev/null +++ b/api/db/dbmigrate-permissions.sql @@ -0,0 +1,12 @@ +-- Grant permissions to dbmigrate user for defaultdb schema +GRANT USAGE ON SCHEMA public TO dbmigrate; + +-- Grant CREATE privilege on schema to allow table creation +GRANT CREATE ON SCHEMA public TO dbmigrate; + +-- Set default privileges for future tables +ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL PRIVILEGES ON TABLES TO dbmigrate; +ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT SELECT, INSERT, UPDATE, DELETE ON TABLES TO dimapi; + +-- Set default privileges for future sequences +ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL PRIVILEGES ON SEQUENCES TO dbmigrate; diff --git a/api/db/global-settings-queries.ts b/api/db/global-settings-queries.ts new file mode 100644 index 0000000..8d2db9c --- /dev/null +++ b/api/db/global-settings-queries.ts @@ -0,0 +1,23 @@ +import { GlobalSettings } from '../shapes/global-settings.js'; +import { pool } from './index.js'; + +export async function getGlobalSettingsQuery(flavor: string) { + return pool.query<{ settings: GlobalSettings }>({ + name: 'get_global_settings', + text: 'SELECT * FROM global_settings where flavor = $1 LIMIT 1', + values: [flavor], + }); +} + +export async function setGlobalSettings(flavor: string, settings: Partial) { + return pool.query({ + name: 'set_global_settings', + text: ` + INSERT INTO global_settings (flavor, settings, updated_at) + VALUES ($1, $2, NOW()) + ON CONFLICT (flavor) + DO UPDATE SET settings = (global_settings.settings || $2) + `, + values: [flavor, settings], + }); +} diff --git a/api/db/index.test.ts b/api/db/index.test.ts new file mode 100644 index 0000000..1a78f5f --- /dev/null +++ b/api/db/index.test.ts @@ -0,0 +1,96 @@ +import { closeDbPool, pool, readTransaction, transaction } from './index.js'; + +beforeEach(async () => { + try { + await pool.query(`DROP TABLE transaction_test`); + } catch {} + await pool.query(`CREATE TABLE transaction_test ( + id int PRIMARY KEY NOT NULL, + test text + )`); +}); + +interface TransactionTestRow { + id: number; + test: string; +} + +afterAll(async () => { + try { + await pool.query(`DROP TABLE transaction_test`); + } catch {} + await closeDbPool(); +}); + +describe('transaction', () => { + it('rolls back on errors', async () => { + await pool.query("insert into transaction_test (id, test) values (1, 'testing')"); + + try { + await transaction(async (client) => { + await client.query("insert into transaction_test (id, test) values (2, 'testing')"); + throw new Error('oops'); + }); + fail('should have thrown an error'); + } catch (e) { + expect((e as Error).message).toBe('oops'); + } + + const result = await pool.query('select * from transaction_test'); + expect(result.rows.length).toBe(1); + expect(result.rows[0].id).toBe(1); + }); + + it('commits automatically', async () => { + await transaction(async (client) => { + await client.query("insert into transaction_test (id, test) values (3, 'testing commits')"); + }); + + const result = await pool.query('select * from transaction_test'); + expect(result.rows.length).toBe(1); + expect(result.rows[0].test).toBe('testing commits'); + }); +}); + +describe('readTransaction', () => { + it('has read-committed isolation', async () => { + await pool.query("insert into transaction_test (id, test) values (1, 'testing')"); + + await readTransaction(async (client) => { + // In a different client, update a row + const otherClient = await pool.connect(); + try { + await otherClient.query('BEGIN'); + + await otherClient.query("update transaction_test set test = 'updated' where id = 1"); + + // Now request that info from our original client. + // should be read-committed, so we shouldn't see that update + const result = await client.query( + 'select * from transaction_test where id = 1', + ); + expect(result.rows[0].test).toBe('testing'); + + // Commit the update + await otherClient.query('COMMIT'); + } catch (e) { + await otherClient.query('ROLLBACK'); + throw e; + } finally { + otherClient.release(); + } + + // once that other transaction commits, we'll see its update + const result = await client.query( + 'select * from transaction_test where id = 1', + ); + expect(result.rows[0].test).toBe('updated'); + }); + + // outside, we should still see the transactional update + const result = await pool.query( + 'select * from transaction_test where id = 1', + ); + expect(result.rows[0].test).toBe('updated'); + }); +}); diff --git a/api/db/index.ts b/api/db/index.ts new file mode 100644 index 0000000..588295e --- /dev/null +++ b/api/db/index.ts @@ -0,0 +1,75 @@ +import pg, { ClientBase } from 'pg'; +import { metrics } from '../metrics/index.js'; + +// pools will use environment variables +// for connection information (from .env or a ConfigMap) +export const pool = new pg.Pool({ + max: 8, + ssl: process.env.PGSSL ? process.env.PGSSL === 'true' : { rejectUnauthorized: false }, + connectionTimeoutMillis: 500, + // Statement query is at the Postgres side, times out any individual query + statement_timeout: 750, + // Query timeout is on the NodeJS side, it times out the an operation on the client + query_timeout: 1000, +}); + +pool.on('connect', () => { + metrics.increment('db.pool.connect.count'); +}); +pool.on('acquire', () => { + metrics.increment('db.pool.acquire.count'); +}); +pool.on('error', (e: Error) => { + metrics.increment('db.pool.error.count'); + metrics.increment(`db.pool.error.${e.name}.count`); +}); +pool.on('remove', () => { + metrics.increment('db.pool.remove.count'); +}); + +const metricsInterval = setInterval(() => { + metrics.gauge('db.pool.total', pool.totalCount); + metrics.gauge('db.pool.idle', pool.idleCount); + metrics.gauge('db.pool.waiting', pool.waitingCount); +}, 10000); + +export async function closeDbPool() { + clearInterval(metricsInterval); + return pool.end(); +} + +/** + * A helper that gets a connection from the pool and then executes fn within a transaction. + */ +export async function transaction(fn: (client: ClientBase) => Promise) { + const client = await pool.connect(); + try { + await client.query('BEGIN'); + + const result = await fn(client); + + await client.query('COMMIT'); + + return result; + } catch (e) { + await client.query('ROLLBACK'); + throw e; + } finally { + client.release(); + } +} + +/** + * A helper that gets a connection from the pool and then executes fn within a transaction that's only meant for reads. + */ +export async function readTransaction(fn: (client: ClientBase) => Promise) { + const client = await pool.connect(); + try { + // We used to wrap multiple reads in a transaction but I'm not sure it matters all that much. + // await client.query('BEGIN'); + return await fn(client); + } finally { + // await client.query('ROLLBACK'); + client.release(); + } +} diff --git a/api/db/item-annotations-queries.test.ts b/api/db/item-annotations-queries.test.ts new file mode 100644 index 0000000..de1c4c7 --- /dev/null +++ b/api/db/item-annotations-queries.test.ts @@ -0,0 +1,134 @@ +import { closeDbPool, transaction } from './index.js'; +import { + deleteAllItemAnnotations, + deleteItemAnnotation, + deleteItemAnnotationList, + getItemAnnotationsForProfile, + updateItemAnnotation, +} from './item-annotations-queries.js'; + +const platformMembershipId = '213512057'; +const bungieMembershipId = 4321; + +beforeEach(() => + transaction(async (client) => { + await deleteAllItemAnnotations(client, bungieMembershipId); + }), +); + +afterAll(async () => closeDbPool()); + +it('can insert tags where none exist before', async () => { + await transaction(async (client) => { + await updateItemAnnotation(client, bungieMembershipId, platformMembershipId, 2, { + id: '123456', + tag: 'favorite', + notes: 'the best', + }); + + const annotations = await getItemAnnotationsForProfile(client, platformMembershipId, 2); + expect(annotations[0]).toEqual({ + id: '123456', + tag: 'favorite', + notes: 'the best', + }); + }); +}); + +it('can update tags where none exist before', async () => { + await transaction(async (client) => { + await updateItemAnnotation(client, bungieMembershipId, platformMembershipId, 2, { + id: '123456', + tag: 'favorite', + notes: 'the best', + }); + + await updateItemAnnotation(client, bungieMembershipId, platformMembershipId, 2, { + id: '123456', + tag: 'junk', + notes: 'the worst', + }); + + const annotations = await getItemAnnotationsForProfile(client, platformMembershipId, 2); + expect(annotations[0]).toEqual({ + id: '123456', + tag: 'junk', + notes: 'the worst', + }); + }); +}); + +it('can update tags clearing value', async () => { + await transaction(async (client) => { + await updateItemAnnotation(client, bungieMembershipId, platformMembershipId, 2, { + id: '123456', + tag: 'favorite', + notes: 'the best', + }); + + await updateItemAnnotation(client, bungieMembershipId, platformMembershipId, 2, { + id: '123456', + tag: null, + }); + + const annotations = await getItemAnnotationsForProfile(client, platformMembershipId, 2); + expect(annotations[0]).toEqual({ + id: '123456', + notes: 'the best', + }); + }); +}); + +it('can delete tags', async () => { + await transaction(async (client) => { + await updateItemAnnotation(client, bungieMembershipId, platformMembershipId, 2, { + id: '123456', + tag: 'favorite', + notes: 'the best', + }); + + await deleteItemAnnotation(client, platformMembershipId, '123456'); + + const annotations = await getItemAnnotationsForProfile(client, platformMembershipId, 2); + expect(annotations).toEqual([]); + }); +}); + +it('can delete tags by setting both values to null/empty', async () => { + await transaction(async (client) => { + await updateItemAnnotation(client, bungieMembershipId, platformMembershipId, 2, { + id: '123456', + tag: 'favorite', + notes: 'the best', + }); + + await updateItemAnnotation(client, bungieMembershipId, platformMembershipId, 2, { + id: '123456', + tag: null, + notes: '', + }); + + const annotations = await getItemAnnotationsForProfile(client, platformMembershipId, 2); + expect(annotations).toEqual([]); + }); +}); + +it('can clear tags', async () => { + await transaction(async (client) => { + await updateItemAnnotation(client, bungieMembershipId, platformMembershipId, 2, { + id: '123456', + tag: 'favorite', + notes: 'the best', + }); + await updateItemAnnotation(client, bungieMembershipId, platformMembershipId, 2, { + id: '654321', + tag: 'junk', + notes: 'the worst', + }); + + await deleteItemAnnotationList(client, platformMembershipId, ['123456', '654321']); + + const annotations = await getItemAnnotationsForProfile(client, platformMembershipId, 2); + expect(annotations).toEqual([]); + }); +}); diff --git a/api/db/item-annotations-queries.ts b/api/db/item-annotations-queries.ts new file mode 100644 index 0000000..b1bbc7c --- /dev/null +++ b/api/db/item-annotations-queries.ts @@ -0,0 +1,183 @@ +import { ClientBase, QueryResult } from 'pg'; +import { metrics } from '../metrics/index.js'; +import { DestinyVersion } from '../shapes/general.js'; +import { ItemAnnotation, TagValue } from '../shapes/item-annotations.js'; + +interface ItemAnnotationRow { + inventory_item_id: string; + tag: TagValue | null; + notes: string | null; + crafted_date: Date | null; +} + +// eslint-disable-next-line no-restricted-syntax +export enum TagValueEnum { + clear = 0, + favorite = 1, + keep = 2, + infuse = 3, + junk = 4, + archive = 5, +} + +/** + * Get all of the item annotations for a particular platform_membership_id and destiny_version. + */ +export async function getItemAnnotationsForProfile( + client: ClientBase, + platformMembershipId: string, + destinyVersion: DestinyVersion, +): Promise { + const results = await client.query({ + name: 'get_item_annotations', + text: 'SELECT inventory_item_id, tag, notes, crafted_date FROM item_annotations WHERE platform_membership_id = $1 and destiny_version = $2 and deleted_at IS NULL', + values: [platformMembershipId, destinyVersion], + }); + return results.rows.map(convertItemAnnotation); +} + +/** + * Get ALL of the item annotations for a particular user across all platforms. + */ +export async function getAllItemAnnotationsForUser( + client: ClientBase, + bungieMembershipId: number, +): Promise< + { + platformMembershipId: string; + destinyVersion: DestinyVersion; + annotation: ItemAnnotation; + }[] +> { + // TODO: this isn't indexed! + const results = await client.query< + ItemAnnotationRow & { platform_membership_id: string; destiny_version: DestinyVersion } + >({ + name: 'get_all_item_annotations', + text: 'SELECT platform_membership_id, destiny_version, inventory_item_id, tag, notes, crafted_date FROM item_annotations WHERE inventory_item_id != 0 and platform_membership_id = $1 and deleted_at IS NULL', + values: [bungieMembershipId], + }); + return results.rows.map((row) => ({ + platformMembershipId: row.platform_membership_id, + destinyVersion: row.destiny_version, + annotation: convertItemAnnotation(row), + })); +} + +function convertItemAnnotation(row: ItemAnnotationRow): ItemAnnotation { + const result: ItemAnnotation = { + id: row.inventory_item_id, + }; + if (row.tag) { + result.tag = TagValueEnum[row.tag] as unknown as TagValue; + } + if (row.notes) { + result.notes = row.notes; + } + if (row.crafted_date) { + result.craftedDate = row.crafted_date.getTime() / 1000; + } + return result; +} + +/** + * Insert or update (upsert) a single item annotation. + */ +export async function updateItemAnnotation( + client: ClientBase, + bungieMembershipId: number, + platformMembershipId: string, + destinyVersion: DestinyVersion, + itemAnnotation: ItemAnnotation, +): Promise { + const tagValue = clearValue(itemAnnotation.tag); + const notesValue = clearValue(itemAnnotation.notes); + + if (tagValue === 'clear' && notesValue === 'clear') { + return deleteItemAnnotation(client, platformMembershipId, itemAnnotation.id); + } + const response = await client.query({ + name: 'upsert_item_annotation', + text: `insert INTO item_annotations (membership_id, platform_membership_id, destiny_version, inventory_item_id, tag, notes, crafted_date) +values ($1, $2, $3, $4, (CASE WHEN $5 = 0 THEN NULL ELSE $5 END), (CASE WHEN $6 = 'clear' THEN NULL ELSE $6 END), $7) +on conflict (platform_membership_id, inventory_item_id) +do update set (tag, notes, crafted_date, deleted_at) = ((CASE WHEN $5 = 0 THEN NULL WHEN $5 IS NULL THEN item_annotations.tag ELSE $5 END), (CASE WHEN $6 = 'clear' THEN NULL WHEN $6 IS NULL THEN item_annotations.notes ELSE $6 END), $7, null)`, + values: [ + bungieMembershipId, // $1 + platformMembershipId, // $2 + destinyVersion, // $3 + itemAnnotation.id, // $4 + tagValue === null ? null : TagValueEnum[tagValue], // $5 + notesValue, // $6 + itemAnnotation.craftedDate ? new Date(itemAnnotation.craftedDate * 1000) : null, // $7 + ], + }); + + if (response.rowCount! < 1) { + // This should never happen! + metrics.increment('db.itemAnnotations.noRowUpdated.count', 1); + throw new Error('tags - No row was updated'); + } + + return response; +} + +/** + * If the value is explicitly set to null or empty string, we return "clear" which will remove the value from the database. + * If it's undefined we return null, which will preserve the existing value. + * If it's set, we'll return the input which will update the existing value. + */ +function clearValue(val: T | null | undefined): T | 'clear' | null { + if (val === null || val?.length === 0) { + return 'clear'; + } else if (!val) { + return null; + } else { + return val; + } +} + +/** + * Delete an item annotation. + */ +export async function deleteItemAnnotation( + client: ClientBase, + platformMembershipId: string, + inventoryItemId: string, +): Promise { + return client.query({ + name: 'delete_item_annotation', + text: `update item_annotations set (tag, notes, deleted_at) = (null, null, now()) where platform_membership_id = $1 and inventory_item_id = $2`, + values: [platformMembershipId, inventoryItemId], + }); +} + +/** + * Delete an list of annotations. + */ +export async function deleteItemAnnotationList( + client: ClientBase, + platformMembershipId: string, + inventoryItemIds: string[], +): Promise { + return client.query({ + name: 'delete_item_annotation_list', + text: `update item_annotations set (tag, notes, deleted_at) = (null, null, now()) where platform_membership_id = $1 and inventory_item_id::bigint = ANY($2::bigint[])`, + values: [platformMembershipId, inventoryItemIds], + }); +} + +/** + * Delete all item annotations for a user (on all platforms). + * @deprecated + */ +export async function deleteAllItemAnnotations( + client: ClientBase, + bungieMembershipId: number, +): Promise { + return client.query({ + name: 'delete_all_item_annotations', + text: `delete from item_annotations where membership_id = $1`, + values: [bungieMembershipId], + }); +} diff --git a/api/db/item-hash-tags-queries.test.ts b/api/db/item-hash-tags-queries.test.ts new file mode 100644 index 0000000..4998e78 --- /dev/null +++ b/api/db/item-hash-tags-queries.test.ts @@ -0,0 +1,113 @@ +import { closeDbPool, transaction } from './index.js'; +import { + deleteAllItemHashTags, + deleteItemHashTag, + getItemHashTagsForProfile, + updateItemHashTag, +} from './item-hash-tags-queries.js'; + +const bungieMembershipId = 4321; +const platformMembershipId = '213512057'; + +beforeEach(() => + transaction(async (client) => { + await deleteAllItemHashTags(client, platformMembershipId); + }), +); + +afterAll(async () => closeDbPool()); + +it('can insert item hash tags where none exist before', async () => { + await transaction(async (client) => { + await updateItemHashTag(client, bungieMembershipId, platformMembershipId, { + hash: 2926662838, + tag: 'favorite', + notes: 'the best', + }); + + const annotations = await getItemHashTagsForProfile(client, platformMembershipId); + expect(annotations[0]).toEqual({ + hash: 2926662838, + tag: 'favorite', + notes: 'the best', + }); + }); +}); + +it('can update item hash tags where none exist before', async () => { + await transaction(async (client) => { + await updateItemHashTag(client, bungieMembershipId, platformMembershipId, { + hash: 2926662838, + tag: 'favorite', + notes: 'the best', + }); + + await updateItemHashTag(client, bungieMembershipId, platformMembershipId, { + hash: 2926662838, + tag: 'junk', + notes: 'the worst', + }); + + const annotations = await getItemHashTagsForProfile(client, platformMembershipId); + expect(annotations[0]).toEqual({ + hash: 2926662838, + tag: 'junk', + notes: 'the worst', + }); + }); +}); + +it('can update item hash tags clearing value', async () => { + await transaction(async (client) => { + await updateItemHashTag(client, bungieMembershipId, platformMembershipId, { + hash: 2926662838, + tag: 'favorite', + notes: 'the best', + }); + + await updateItemHashTag(client, bungieMembershipId, platformMembershipId, { + hash: 2926662838, + tag: null, + }); + + const annotations = await getItemHashTagsForProfile(client, platformMembershipId); + expect(annotations[0]).toEqual({ + hash: 2926662838, + notes: 'the best', + }); + }); +}); + +it('can delete item hash tags', async () => { + await transaction(async (client) => { + await updateItemHashTag(client, bungieMembershipId, platformMembershipId, { + hash: 2926662838, + tag: 'favorite', + notes: 'the best', + }); + + await deleteItemHashTag(client, platformMembershipId, 2926662838); + + const annotations = await getItemHashTagsForProfile(client, platformMembershipId); + expect(annotations).toEqual([]); + }); +}); + +it('can delete item hash tags by setting both values to null/empty', async () => { + await transaction(async (client) => { + await updateItemHashTag(client, bungieMembershipId, platformMembershipId, { + hash: 2926662838, + tag: 'favorite', + notes: 'the best', + }); + + await updateItemHashTag(client, bungieMembershipId, platformMembershipId, { + hash: 2926662838, + tag: null, + notes: '', + }); + + const annotations = await getItemHashTagsForProfile(client, platformMembershipId); + expect(annotations).toEqual([]); + }); +}); diff --git a/api/db/item-hash-tags-queries.ts b/api/db/item-hash-tags-queries.ts new file mode 100644 index 0000000..361aab3 --- /dev/null +++ b/api/db/item-hash-tags-queries.ts @@ -0,0 +1,122 @@ +import { ClientBase, QueryResult } from 'pg'; +import { metrics } from '../metrics/index.js'; +import { ItemHashTag, TagValue } from '../shapes/item-annotations.js'; +import { TagValueEnum } from './item-annotations-queries.js'; + +interface ItemHashTagRow { + item_hash: string; + tag: TagValue | null; + notes: string | null; +} + +/** + * Get all of the hash tags for a particular platform_membership_id and destiny_version. + */ +export async function getItemHashTagsForProfile( + client: ClientBase, + platformMembershipId: string, +): Promise { + const results = await client.query({ + name: 'get_item_hash_tags', + text: 'SELECT item_hash, tag, notes FROM item_hash_tags WHERE platform_membership_id = $1 and deleted_at IS NULL', + values: [platformMembershipId], + }); + return results.rows.map(convertItemHashTag); +} + +function convertItemHashTag(row: ItemHashTagRow): ItemHashTag { + const result: ItemHashTag = { + hash: parseInt(row.item_hash, 10), + }; + if (row.tag) { + result.tag = TagValueEnum[row.tag] as unknown as TagValue; + } + if (row.notes) { + result.notes = row.notes; + } + return result; +} + +/** + * Insert or update (upsert) a single item annotation. Loadouts are totally replaced when updated. + */ +export async function updateItemHashTag( + client: ClientBase, + bungieMembershipId: number, + platformMembershipId: string, + itemHashTag: ItemHashTag, +): Promise { + const tagValue = clearValue(itemHashTag.tag); + const notesValue = clearValue(itemHashTag.notes); + + if (tagValue === 'clear' && notesValue === 'clear') { + return deleteItemHashTag(client, platformMembershipId, itemHashTag.hash); + } + + const response = await client.query({ + name: 'upsert_hash_tag', + text: `insert INTO item_hash_tags (membership_id, platform_membership_id, item_hash, tag, notes) +values ($1, $2, $3, (CASE WHEN $4 = 0 THEN NULL ELSE $4 END), (CASE WHEN $5 = 'clear' THEN NULL ELSE $5 END)) +on conflict (platform_membership_id, item_hash) +do update set (tag, notes, deleted_at) = ((CASE WHEN $4 = 0 THEN NULL WHEN $4 IS NULL THEN item_hash_tags.tag ELSE $4 END), (CASE WHEN $5 = 'clear' THEN NULL WHEN $5 IS NULL THEN item_hash_tags.notes ELSE $5 END), null)`, + values: [ + bungieMembershipId, + platformMembershipId, + itemHashTag.hash, + tagValue === null ? null : TagValueEnum[tagValue], + notesValue, + ], + }); + + if (response.rowCount! < 1) { + // This should never happen! + metrics.increment('db.itemHashTags.noRowUpdated.count', 1); + throw new Error('hash tags - No row was updated'); + } + + return response; +} + +/** + * If the value is explicitly set to null or empty string, we return "clear" which will remove the value from the database. + * If it's undefined we return null, which will preserve the existing value. + * If it's set, we'll return the input which will update the existing value. + */ +function clearValue(val: T | null | undefined): T | 'clear' | null { + if (val === null || val?.length === 0) { + return 'clear'; + } else if (!val) { + return null; + } else { + return val; + } +} + +/** + * Delete an item hash tags. + */ +export async function deleteItemHashTag( + client: ClientBase, + platformMembershipId: string, + itemHash: number, +): Promise { + return client.query({ + name: 'delete_item_hash_tag', + text: `update item_hash_tags set (tag, notes, deleted_at) = (null, null, now()) where platform_membership_id = $1 and item_hash = $2`, + values: [platformMembershipId, itemHash], + }); +} + +/** + * Delete all item hash tags for a user. + */ +export async function deleteAllItemHashTags( + client: ClientBase, + platformMembershipId: string, +): Promise { + return client.query({ + name: 'delete_all_item_hash_tags', + text: `delete from item_hash_tags where platform_membership_id = $1`, + values: [platformMembershipId], + }); +} diff --git a/api/db/loadout-share-queries.test.ts b/api/db/loadout-share-queries.test.ts new file mode 100644 index 0000000..ca964dc --- /dev/null +++ b/api/db/loadout-share-queries.test.ts @@ -0,0 +1,68 @@ +import { v4 as uuid } from 'uuid'; +import { Loadout, LoadoutItem } from '../shapes/loadouts.js'; +import { closeDbPool, transaction } from './index.js'; +import { addLoadoutShare, getLoadoutShare, recordAccess } from './loadout-share-queries.js'; + +const bungieMembershipId = 4321; +const platformMembershipId = '213512057'; + +const shareID = 'ABCDEFG'; + +beforeEach(() => + transaction(async (client) => { + await client.query("delete from loadout_shares where id = 'ABCDEFG'"); + }), +); + +afterAll(() => closeDbPool()); + +const loadout: Loadout = { + id: uuid(), + name: 'Test Loadout', + classType: 1, + equipped: [ + { + hash: 100, + id: '1234', + socketOverrides: { 7: 9 }, + }, + ], + unequipped: [ + // This item has an extra property which shouldn't be saved + { + hash: 200, + id: '5678', + amount: 10, + fizbuzz: 11, + } as any as LoadoutItem, + ], +}; + +it('can record a shared loadout', async () => { + await transaction(async (client) => { + await addLoadoutShare(client, bungieMembershipId, platformMembershipId, shareID, loadout); + + const sharedLoadout = await getLoadoutShare(client, shareID); + + expect(sharedLoadout?.name).toBe(loadout.name); + }); +}); + +it('rejects multiple shares with the same ID', async () => { + await transaction(async (client) => { + await addLoadoutShare(client, bungieMembershipId, platformMembershipId, shareID, loadout); + + try { + await addLoadoutShare(client, bungieMembershipId, platformMembershipId, shareID, loadout); + fail('Expected this to throw an error'); + } catch {} + }); +}); + +it('can record visits', async () => { + await transaction(async (client) => { + await addLoadoutShare(client, bungieMembershipId, platformMembershipId, shareID, loadout); + + await recordAccess(client, shareID); + }); +}); diff --git a/api/db/loadout-share-queries.ts b/api/db/loadout-share-queries.ts new file mode 100644 index 0000000..87f70e2 --- /dev/null +++ b/api/db/loadout-share-queries.ts @@ -0,0 +1,80 @@ +import { ClientBase, QueryResult } from 'pg'; +import { metrics } from '../metrics/index.js'; +import { Loadout } from '../shapes/loadouts.js'; +import { cleanItem, convertLoadout, LoadoutRow } from './loadouts-queries.js'; + +/** + * Get a specific loadout share by its share ID. + */ +export async function getLoadoutShare( + client: ClientBase, + shareId: string, +): Promise { + const results = await client.query({ + name: 'get_loadout_share', + text: 'SELECT id, name, notes, class_type, items, parameters, created_at FROM loadout_shares WHERE id = $1', + values: [shareId], + }); + if (results.rowCount === 1) { + return convertLoadout(results.rows[0]); + } else { + return undefined; + } +} + +/** + * Create a new loadout share. These are intended to be immutable. + */ +export async function addLoadoutShare( + client: ClientBase, + bungieMembershipId: number, + platformMembershipId: string, + shareId: string, + loadout: Loadout, +): Promise { + const response = await client.query({ + name: 'add_loadout_share', + text: `insert into loadout_shares (id, membership_id, platform_membership_id, name, notes, class_type, items, parameters) +values ($1, $2, $3, $4, $5, $6, $7, $8)`, + values: [ + shareId, + bungieMembershipId, + platformMembershipId, + loadout.name, + loadout.notes, + loadout.classType, + { + equipped: loadout.equipped.map(cleanItem), + unequipped: loadout.unequipped.map(cleanItem), + }, + loadout.parameters, + ], + }); + + if (response.rowCount! < 1) { + // This should never happen! + metrics.increment('db.loadoutShares.noRowUpdated.count', 1); + throw new Error('loadout share - No row was updated'); + } + + return response; +} + +/** + * Touch the last_accessed_at and visits fields to keep track of access. + */ +export async function recordAccess(client: ClientBase, shareId: string): Promise { + const response = await client.query({ + name: 'loadout_share_record_access', + text: `update loadout_shares set last_accessed_at = current_timestamp, view_count = view_count + 1 where id = $1`, + values: [shareId], + }); + + if (response.rowCount! < 1) { + // This should never happen! + metrics.increment('db.loadoutShares.noRowUpdated.count', 1); + throw new Error('loadout share - No row was updated'); + } + + return response; +} diff --git a/api/db/loadouts-queries.test.ts b/api/db/loadouts-queries.test.ts new file mode 100644 index 0000000..e6999d5 --- /dev/null +++ b/api/db/loadouts-queries.test.ts @@ -0,0 +1,94 @@ +import { v4 as uuid } from 'uuid'; +import { Loadout, LoadoutItem } from '../shapes/loadouts.js'; +import { closeDbPool, transaction } from './index.js'; +import { deleteLoadout, getLoadoutsForProfile, updateLoadout } from './loadouts-queries.js'; + +const bungieMembershipId = 4321; +const platformMembershipId = '213512057'; + +beforeEach(() => + transaction(async (client) => { + await client.query(`delete from loadouts where membership_id = ${bungieMembershipId}`); + }), +); + +afterAll(async () => closeDbPool()); + +const loadout: Loadout = { + id: uuid(), + name: 'Test Loadout', + classType: 1, + equipped: [ + { + hash: 100, + id: '1234', + socketOverrides: { 7: 9 }, + }, + { + hash: 200, + id: '4567', + craftedDate: 1000, + }, + ], + unequipped: [ + // This item has an extra property which shouldn't be saved + { + hash: 200, + id: '5678', + amount: 10, + fizbuzz: 11, + } as any as LoadoutItem, + ], +}; + +it('can record a loadout', async () => { + await transaction(async (client) => { + await updateLoadout(client, bungieMembershipId, platformMembershipId, 2, loadout); + + const loadouts = await getLoadoutsForProfile(client, platformMembershipId, 2); + + expect(loadouts.length).toBe(1); + + const firstLoadout = loadouts[0]; + expect(firstLoadout.createdAt).toBeDefined(); + delete firstLoadout.createdAt; + expect(firstLoadout.lastUpdatedAt).toBeDefined(); + delete firstLoadout.lastUpdatedAt; + expect(firstLoadout.unequipped.length).toBe(1); + expect((firstLoadout.unequipped[0] as { fizbuzz?: number }).fizbuzz).toBeUndefined(); + (firstLoadout.unequipped[0] as { fizbuzz?: number }).fizbuzz = 11; + expect(firstLoadout).toEqual(loadout); + }); +}); + +it('can update a loadout', async () => { + await transaction(async (client) => { + await updateLoadout(client, bungieMembershipId, platformMembershipId, 2, loadout); + + await updateLoadout(client, bungieMembershipId, platformMembershipId, 2, { + ...loadout, + name: 'Updated', + unequipped: [], + }); + + const loadouts = await getLoadoutsForProfile(client, platformMembershipId, 2); + + expect(loadouts.length).toBe(1); + expect(loadouts[0].name).toEqual('Updated'); + expect(loadouts[0].unequipped.length).toBe(0); + expect(loadouts[0].equipped).toEqual(loadout.equipped); + }); +}); + +it('can delete a loadout', async () => { + await transaction(async (client) => { + await updateLoadout(client, bungieMembershipId, platformMembershipId, 2, loadout); + + const success = await deleteLoadout(client, platformMembershipId, loadout.id); + expect(success).toBe(true); + + const loadouts = await getLoadoutsForProfile(client, platformMembershipId, 2); + + expect(loadouts.length).toBe(0); + }); +}); diff --git a/api/db/loadouts-queries.ts b/api/db/loadouts-queries.ts new file mode 100644 index 0000000..0e8d5d5 --- /dev/null +++ b/api/db/loadouts-queries.ts @@ -0,0 +1,187 @@ +import { ClientBase, QueryResult } from 'pg'; +import { metrics } from '../metrics/index.js'; +import { DestinyVersion } from '../shapes/general.js'; +import { Loadout, LoadoutItem } from '../shapes/loadouts.js'; +import { isValidItemId, KeysToSnakeCase } from '../utils.js'; + +export interface LoadoutRow + extends KeysToSnakeCase< + Omit + > { + created_at: Date; + last_updated_at: Date | null; + items: { equipped: LoadoutItem[]; unequipped: LoadoutItem[] }; +} + +/** + * Get all of the loadouts for a particular platform_membership_id and destiny_version. + */ +export async function getLoadoutsForProfile( + client: ClientBase, + platformMembershipId: string, + destinyVersion: DestinyVersion, +): Promise { + const results = await client.query({ + name: 'get_loadouts_for_platform_membership_id', + text: 'SELECT id, name, notes, class_type, items, parameters, created_at, last_updated_at FROM loadouts WHERE platform_membership_id = $1 and destiny_version = $2 and deleted_at IS NULL', + values: [platformMembershipId, destinyVersion], + }); + return results.rows.map(convertLoadout); +} + +/** + * Get ALL of loadouts for a particular user across all platforms. + * @deprecated + */ +export async function getAllLoadoutsForUser( + client: ClientBase, + bungieMembershipId: number, +): Promise< + { + platformMembershipId: string; + destinyVersion: DestinyVersion; + loadout: Loadout; + }[] +> { + const results = await client.query< + LoadoutRow & { platform_membership_id: string; destiny_version: DestinyVersion } + >({ + name: 'get_all_loadouts_for_user', + text: 'SELECT membership_id, platform_membership_id, destiny_version, id, name, notes, class_type, items, parameters, created_at, last_updated_at FROM loadouts WHERE membership_id = $1', + values: [bungieMembershipId], + }); + return results.rows.map((row) => { + const loadout = convertLoadout(row); + return { + platformMembershipId: row.platform_membership_id, + destinyVersion: row.destiny_version, + loadout, + }; + }); +} + +export function convertLoadout(row: LoadoutRow): Loadout { + const loadout: Loadout = { + id: row.id, + name: row.name, + classType: row.class_type, + equipped: row.items.equipped || [], + unequipped: row.items.unequipped || [], + createdAt: row.created_at.getTime(), + lastUpdatedAt: row.last_updated_at?.getTime(), + }; + if (row.notes) { + loadout.notes = row.notes; + } + if (row.parameters) { + loadout.parameters = row.parameters; + } + return loadout; +} + +/** + * Insert or update (upsert) a loadout. Loadouts are totally replaced when updated. + */ +export async function updateLoadout( + client: ClientBase, + bungieMembershipId: number, + platformMembershipId: string, + destinyVersion: DestinyVersion, + loadout: Loadout, +): Promise { + const response = await client.query({ + name: 'upsert_loadout', + text: `insert into loadouts (id, membership_id, platform_membership_id, destiny_version, name, notes, class_type, items, parameters) +values ($1, $2, $3, $4, $5, $6, $7, $8, $9) +on conflict (platform_membership_id, id) +do update set (name, notes, class_type, items, parameters) = ($5, $6, $7, $8, $9)`, + values: [ + loadout.id, + bungieMembershipId, + platformMembershipId, + destinyVersion, + loadout.name, + loadout.notes, + loadout.classType, + { + equipped: loadout.equipped.map(cleanItem), + unequipped: loadout.unequipped.map(cleanItem), + }, + loadout.parameters, + ], + }); + + if (response.rowCount! < 1) { + // This should never happen! + metrics.increment('db.loadouts.noRowUpdated.count', 1); + throw new Error('loadouts - No row was updated'); + } + + return response; +} + +/** + * Make sure items are stored minimally and extra properties don't sneak in + */ +export function cleanItem(item: LoadoutItem): LoadoutItem { + const hash = item.hash; + if (!Number.isFinite(hash)) { + throw new Error('hash must be a number'); + } + + const result: LoadoutItem = { + hash, + }; + + if (item.amount && Number.isFinite(item.amount)) { + result.amount = item.amount; + } + + if (item.id) { + if (!isValidItemId(item.id)) { + throw new Error(`item ID ${item.id} is not in the right format`); + } + result.id = item.id; + } + + if (item.socketOverrides) { + result.socketOverrides = item.socketOverrides; + } + + if (item.craftedDate && Number.isFinite(item.craftedDate)) { + result.craftedDate = item.craftedDate; + } + + return result; +} + +/** + * Delete a loadout. Loadouts are totally replaced when updated. + */ +export async function deleteLoadout( + client: ClientBase, + platformMembershipId: string, + loadoutId: string, +): Promise { + const response = await client.query({ + name: 'delete_loadout', + text: `update loadouts set deleted_at = now() where platform_membership_id = $1 and id = $2`, + values: [platformMembershipId, loadoutId], + }); + + return response.rowCount! >= 1; +} + +/** + * Delete all loadouts for a user (on all platforms). + */ +export async function deleteAllLoadouts( + client: ClientBase, + platformMembershipId: string, +): Promise { + return client.query({ + name: 'delete_all_loadouts', + text: `delete from loadouts where platform_membership_id = $1`, + values: [platformMembershipId], + }); +} diff --git a/api/db/migration-state-queries.ts b/api/db/migration-state-queries.ts new file mode 100644 index 0000000..0ce9107 --- /dev/null +++ b/api/db/migration-state-queries.ts @@ -0,0 +1,255 @@ +import { ClientBase } from 'pg'; +import { metrics } from '../metrics/index.js'; +import { transaction } from './index.js'; + +export const MAX_MIGRATION_ATTEMPTS = 3; + +export const enum MigrationState { + Invalid = 0, + Stately = 1, + MigratingToPostgres = 2, + Postgres = 3, +} + +export interface MigrationStateInfo { + platformMembershipId: string; + bungieMembershipId: number; + state: MigrationState; + lastStateChangeAt: number; + attemptCount: number; + lastError?: string; +} + +interface MigrationStateRow { + membership_id: number; + platform_membership_id: string; + state: number; + last_state_change_at: Date; + attempt_count: number; + last_error: string | null; +} + +export async function getUsersToMigrate(client: ClientBase): Promise { + const results = await client.query({ + name: 'get_users_to_migrate', + text: 'select membership_id from migration_state where state != 3 limit 1000', + }); + return results.rows.map((row) => row.membership_id); +} + +export async function getMigrationState( + client: ClientBase, + platformMembershipId: string, +): Promise { + const results = await client.query({ + name: 'get_migration_state', + text: 'SELECT membership_id, platform_membership_id, state, last_state_change_at, attempt_count, last_error FROM migration_state WHERE platform_membership_id = $1', + values: [platformMembershipId], + }); + if (results.rows.length > 0) { + return convert(results.rows[0]); + } else { + return { + bungieMembershipId: 0, + platformMembershipId, + state: MigrationState.Stately, + lastStateChangeAt: 0, + attemptCount: 0, + }; + } +} + +function convert(row: MigrationStateRow): MigrationStateInfo { + return { + bungieMembershipId: row.membership_id, + platformMembershipId: row.platform_membership_id, + state: row.state, + lastStateChangeAt: row.last_state_change_at.getTime(), + attemptCount: row.attempt_count, + lastError: row.last_error ?? undefined, + }; +} + +export function startMigrationToPostgres( + client: ClientBase, + bungieMembershipId: number, + platformMembershipId: string, +): Promise { + return updateMigrationState( + client, + bungieMembershipId, + platformMembershipId, + MigrationState.MigratingToPostgres, + MigrationState.Stately, + true, + ); +} + +export function finishMigrationToPostgres( + client: ClientBase, + bungieMembershipId: number, + platformMembershipId: string, +): Promise { + return updateMigrationState( + client, + bungieMembershipId, + platformMembershipId, + MigrationState.Postgres, + MigrationState.MigratingToPostgres, + false, + ); +} + +export function abortMigrationToPostgres( + client: ClientBase, + bungieMembershipId: number, + platformMembershipId: string, + err: string, +): Promise { + return updateMigrationState( + client, + bungieMembershipId, + platformMembershipId, + MigrationState.Stately, + MigrationState.MigratingToPostgres, + false, + err, + ); +} + +async function updateMigrationState( + client: ClientBase, + bungieMembershipId: number, + platformMembershipId: string, + state: MigrationState, + expectedState: MigrationState, + incrementAttempt = true, + err?: string, +): Promise { + // Postgres upserts are awkward but nice to have + const response = await client.query({ + name: 'update_migration_state', + text: `insert into migration_state (platform_membership_id, membership_id, state, last_state_change_at, attempt_count, last_error) VALUES ($1, $2, $3, current_timestamp, $4, $5) +on conflict (platform_membership_id) +do update set state = $2, last_state_change_at = current_timestamp, attempt_count = migration_state.attempt_count + $3, last_error = coalesce($4, migration_state.last_error) +where migration_state.state = $5`, + values: [ + platformMembershipId, + bungieMembershipId, + state, + incrementAttempt ? 1 : 0, + err ?? null, + expectedState, + ], + }); + if (response.rowCount === 0) { + throw new Error('Migration state was not in expected state'); + } +} + +// Mostly for tests and delete-my-data +export async function deleteMigrationState( + client: ClientBase, + platformMembershipId: string, +): Promise { + await client.query({ + name: 'delete_migration_state', + text: 'DELETE FROM migration_state WHERE platform_membership_id = $1', + values: [platformMembershipId], + }); +} + +// const forcePostgresMembershipIds = new Set([ +// // Ben +// 7094, +// // Test user +// 1234, +// ]); + +// const dialPercentage = 1.0; // 0 - 1.0 + +// This would be better as a uniform hash but this is good enough for now +// function isUserDialedIn(bungieMembershipId: number) { +// return (bungieMembershipId % 10000) / 10000 < dialPercentage; +// } + +export async function getDesiredMigrationState(_migrationState: MigrationStateInfo) { + return MigrationState.Stately; + + // TODO: we'll handle this later + + // // TODO: use a uniform hash and a percentage dial to control this + // const desiredState = + // forceStatelyMembershipIds.has(migrationState.bungieMembershipId) || + // isUserDialedIn(migrationState.bungieMembershipId) + // ? MigrationState.Stately + // : MigrationState.Postgres; + + // if (desiredState === migrationState.state) { + // return migrationState.state; + // } + + // if ( + // desiredState === MigrationState.Stately && + // migrationState.state === MigrationState.Postgres && + // migrationState.attemptCount >= MAX_MIGRATION_ATTEMPTS + // ) { + // return MigrationState.Postgres; + // } + + // if ( + // migrationState.state === MigrationState.MigratingToStately && + // // If we've been in this state for more than 15 minutes, just move on + // migrationState.lastStateChangeAt < Date.now() - 1000 * 60 * 15 + // ) { + // await transaction(async (client) => { + // abortMigrationToStately(client, migrationState.bungieMembershipId, 'Migration timed out'); + // }); + // return MigrationState.Postgres; + // } + + // if (migrationState.state === MigrationState.MigratingToStately) { + // throw new Error('Unable to update - please wait a bit and try again.'); + // } + + // return desiredState; +} + +/** + * Wrap the migration process - start a migration, run fn(), finish the + * migration. Abort on failure. + */ +export async function doMigration( + bungieMembershipId: number, + platformMembershipId: string, + fn: () => Promise, + onBeforeFinish?: (client: ClientBase) => Promise, +): Promise { + try { + metrics.increment('migration.start.count'); + await transaction(async (client) => { + await startMigrationToPostgres(client, bungieMembershipId, platformMembershipId); + }); + await fn(); + await transaction(async (client) => { + await onBeforeFinish?.(client); + await finishMigrationToPostgres(client, bungieMembershipId, platformMembershipId); + }); + metrics.increment('migration.finish.count'); + } catch (e) { + console.error( + `Stately migration failed for ${platformMembershipId} (${bungieMembershipId})`, + e, + ); + await transaction(async (client) => { + await abortMigrationToPostgres( + client, + bungieMembershipId, + platformMembershipId, + e instanceof Error ? e.message : 'Unknown error', + ); + }); + metrics.increment('migration.abort.count'); + throw e; + } +} diff --git a/api/db/searches-queries.test.ts b/api/db/searches-queries.test.ts new file mode 100644 index 0000000..e5ae657 --- /dev/null +++ b/api/db/searches-queries.test.ts @@ -0,0 +1,244 @@ +import { SearchType } from '../shapes/search.js'; +import { closeDbPool, transaction } from './index.js'; +import { + deleteAllSearches, + deleteSearch, + getSearchesForProfile, + getSearchesForUser, + importSearch, + saveSearch, + updateUsedSearch, +} from './searches-queries.js'; + +const bungieMembershipId = 4321; +const platformMembershipId = '213512057'; + +beforeEach(() => + transaction(async (client) => { + await deleteAllSearches(client, platformMembershipId); + }), +); + +afterAll(async () => closeDbPool()); + +it('can record a used search where none was recorded before', async () => { + await transaction(async (client) => { + await updateUsedSearch( + client, + bungieMembershipId, + platformMembershipId, + 2, + 'tag:junk', + SearchType.Item, + ); + + const searches = (await getSearchesForProfile(client, bungieMembershipId, 2)).filter( + (s) => s.usageCount > 0, + ); + expect(searches[0].query).toBe('tag:junk'); + expect(searches[0].saved).toBe(false); + expect(searches[0].usageCount).toBe(1); + }); +}); + +it('can track search multiple times', async () => { + await transaction(async (client) => { + await updateUsedSearch( + client, + bungieMembershipId, + platformMembershipId, + 2, + 'tag:junk', + SearchType.Item, + ); + await updateUsedSearch( + client, + bungieMembershipId, + platformMembershipId, + 2, + 'tag:junk', + SearchType.Item, + ); + + const searches = (await getSearchesForProfile(client, bungieMembershipId, 2)).filter( + (s) => s.usageCount > 0, + ); + expect(searches[0].query).toBe('tag:junk'); + expect(searches[0].saved).toBe(false); + expect(searches[0].usageCount).toBe(2); + }); +}); + +it('can mark a search as favorite', async () => { + await transaction(async (client) => { + await updateUsedSearch( + client, + bungieMembershipId, + platformMembershipId, + 2, + 'tag:junk', + SearchType.Item, + ); + await saveSearch( + client, + bungieMembershipId, + platformMembershipId, + 2, + 'tag:junk', + SearchType.Item, + true, + ); + + const searches = (await getSearchesForProfile(client, bungieMembershipId, 2)).filter( + (s) => s.usageCount > 0, + ); + expect(searches[0].query).toBe('tag:junk'); + expect(searches[0].saved).toBe(true); + expect(searches[0].usageCount).toBe(1); + + await saveSearch( + client, + bungieMembershipId, + platformMembershipId, + 2, + 'tag:junk', + SearchType.Item, + false, + ); + + const searches2 = await getSearchesForProfile(client, bungieMembershipId, 2); + expect(searches2[0].query).toBe('tag:junk'); + expect(searches2[0].saved).toBe(false); + // Save/unsave doesn't modify usage count + expect(searches2[0].usageCount).toBe(1); + expect(searches2[0].lastUsage).toBe(searches2[0].lastUsage); + }); +}); +it('can mark a search as favorite even when it hasnt been used', async () => { + await transaction(async (client) => { + await saveSearch( + client, + bungieMembershipId, + platformMembershipId, + 2, + 'tag:junk', + SearchType.Item, + true, + ); + + const searches = (await getSearchesForProfile(client, bungieMembershipId, 2)).filter( + (s) => s.usageCount > 0, + ); + expect(searches[0].query).toBe('tag:junk'); + expect(searches[0].saved).toBe(true); + expect(searches[0].usageCount).toBe(1); + }); +}); + +it('can get all searches across profiles', async () => { + await transaction(async (client) => { + await updateUsedSearch( + client, + bungieMembershipId, + platformMembershipId, + 2, + 'tag:junk', + SearchType.Item, + ); + await updateUsedSearch( + client, + bungieMembershipId, + platformMembershipId, + 1, + 'is:tagged', + SearchType.Item, + ); + + const searches = await getSearchesForUser(client, bungieMembershipId); + expect(searches.length).toEqual(2); + }); +}); + +it('can increment usage for one of the built-in searches', async () => { + await transaction(async (client) => { + const searches = await getSearchesForProfile(client, bungieMembershipId, 2); + const query = searches[searches.length - 1].query; + + await updateUsedSearch( + client, + bungieMembershipId, + platformMembershipId, + 2, + query, + SearchType.Item, + ); + + const searches2 = await getSearchesForProfile(client, bungieMembershipId, 2); + const search = searches2.find((s) => s.query === query); + expect(search?.usageCount).toBe(1); + expect(searches2.length).toBe(searches.length); + }); +}); + +it('can delete a search', async () => { + await transaction(async (client) => { + await updateUsedSearch( + client, + bungieMembershipId, + platformMembershipId, + 2, + 'tag:junk', + SearchType.Item, + ); + await deleteSearch(client, platformMembershipId, 2, 'tag:junk', SearchType.Item); + + const searches = (await getSearchesForProfile(client, bungieMembershipId, 2)).filter( + (s) => s.usageCount > 0, + ); + expect(searches.length).toBe(0); + }); +}); + +it('can import a search', async () => { + await transaction(async (client) => { + await importSearch( + client, + bungieMembershipId, + platformMembershipId, + 2, + 'tag:junk', + true, + 1598199188576, + 5, + SearchType.Item, + ); + + const searches = (await getSearchesForProfile(client, bungieMembershipId, 2)).filter( + (s) => s.usageCount > 0, + ); + expect(searches[0].query).toBe('tag:junk'); + expect(searches[0].saved).toBe(true); + expect(searches[0].usageCount).toBe(5); + }); +}); + +it('can record searches for loadouts', async () => { + await transaction(async (client) => { + await updateUsedSearch( + client, + bungieMembershipId, + platformMembershipId, + 2, + 'subclass:void', + SearchType.Loadout, + ); + + const searches = (await getSearchesForProfile(client, bungieMembershipId, 2)).filter( + (s) => s.usageCount > 0, + ); + expect(searches[0].query).toBe('subclass:void'); + expect(searches[0].saved).toBe(false); + expect(searches[0].usageCount).toBe(1); + expect(searches[0].type).toBe(SearchType.Loadout); + }); +}); diff --git a/api/db/searches-queries.ts b/api/db/searches-queries.ts new file mode 100644 index 0000000..2d0fcf9 --- /dev/null +++ b/api/db/searches-queries.ts @@ -0,0 +1,229 @@ +import { uniqBy } from 'es-toolkit'; +import { ClientBase, QueryResult } from 'pg'; +import { metrics } from '../metrics/index.js'; +import { ExportResponse } from '../shapes/export.js'; +import { DestinyVersion } from '../shapes/general.js'; +import { Search, SearchType } from '../shapes/search.js'; +import { KeysToSnakeCase } from '../utils.js'; + +interface SearchRow extends KeysToSnakeCase> { + last_updated_at: Date; + search_type: SearchType; +} + +/* + * These "canned searches" get sent to everyone as a "starter pack" of example searches that'll show up in the recent search dropdown and autocomplete. + */ +const cannedSearchesForD2: Search[] = [ + 'is:blue is:haspower -is:maxpower', + '-is:equipped is:haspower is:incurrentchar', + '-is:exotic -is:locked -is:maxpower -is:tagged stat:total:<55', +].map((query) => ({ + query, + saved: false, + usageCount: 0, + lastUsage: 0, + type: SearchType.Item, +})); + +const cannedSearchesForD1: Search[] = ['-is:equipped is:haslight is:incurrentchar'].map( + (query) => ({ + query, + saved: false, + usageCount: 0, + lastUsage: 0, + type: SearchType.Item, + }), +); +/* + * Searches are stored in a single table, scoped by Bungie.net account and destiny version (D1 searches are separate from D2 searches). + * Favorites and recent searches are stored the same - there's just a favorite flag for saved searches. There is also a usage count + * and a last_updated_at time, so we can order by both frequency and recency (or a combination of both) and we can age out less-used + * searches. For the best results, searches should be normalized so they match up more often. + * + * We can merge this with a list of global suggested searches to avoid an empty menu. + */ + +/** + * Get all of the searches for a particular destiny_version. + */ +export async function getSearchesForProfile( + client: ClientBase, + bungieMembershipId: number, + destinyVersion: DestinyVersion, +): Promise { + const results = await client.query({ + name: 'get_searches', + // TODO: order by frecency + text: 'SELECT query, saved, usage_count, search_type, last_updated_at FROM searches WHERE membership_id = $1 and destiny_version = $2 order by last_updated_at DESC, usage_count DESC LIMIT 500', + values: [bungieMembershipId, destinyVersion], + }); + return uniqBy( + results.rows + .map(convertSearch) + .concat(destinyVersion === 2 ? cannedSearchesForD2 : cannedSearchesForD1), + (s) => s.query, + ); +} + +/** + * Get ALL of the searches for a particular user across all destiny versions. + */ +export async function getSearchesForUser( + client: ClientBase, + bungieMembershipId: number, +): Promise { + // TODO: this isn't indexed! + const results = await client.query({ + name: 'get_all_searches', + text: 'SELECT destiny_version, query, saved, usage_count, search_type, last_updated_at FROM searches WHERE membership_id = $1', + values: [bungieMembershipId], + }); + return results.rows.map((row) => ({ + destinyVersion: row.destiny_version, + search: convertSearch(row), + })); +} + +function convertSearch(row: SearchRow): Search { + return { + query: row.query, + usageCount: row.usage_count, + saved: row.saved, + lastUsage: row.last_updated_at.getTime(), + type: row.search_type, + }; +} + +/** + * Insert or update (upsert) a single search. + * + * It's a bit odd that saving/unsaving a search counts as a "usage" but that's probably OK + */ +export async function updateUsedSearch( + client: ClientBase, + bungieMembershipId: number, + platformMembershipId: string, + destinyVersion: DestinyVersion, + query: string, + type: SearchType, +): Promise { + const response = await client.query({ + name: 'upsert_search', + text: `insert INTO searches (membership_id, platform_membership_id, destiny_version, query, search_type) +values ($1, $2, $3, $4, $5) +on conflict (platform_membership_id, destiny_version, qhash) +do update set (usage_count, last_used, deleted_at) = (searches.usage_count + 1, current_timestamp, null)`, + values: [bungieMembershipId, platformMembershipId, destinyVersion, query, type], + }); + + if (response.rowCount! < 1) { + // This should never happen! + metrics.increment('db.searches.noRowUpdated.count', 1); + throw new Error('searches - No row was updated'); + } + + return response; +} + +/** + * Save/unsave a search. This assumes the search exists. + */ +export async function saveSearch( + client: ClientBase, + bungieMembershipId: number, + platformMembershipId: string, + destinyVersion: DestinyVersion, + query: string, + type: SearchType, + saved?: boolean, +): Promise { + const response = await client.query({ + name: 'save_search', + text: `UPDATE searches SET saved = $4 WHERE platform_membership_id = $1 AND destiny_version = $2 AND qhash = decode(md5($3), 'hex') AND query = $3 and search_type = $5`, + values: [platformMembershipId, destinyVersion, query, saved, type], + }); + + if (response.rowCount! < 1) { + // Someone saved a search they haven't used! + metrics.increment('db.searches.noRowUpdated.count', 1); + const insertSavedResponse = await client.query({ + name: 'insert_search_fallback', + text: `insert INTO searches (membership_id, platform_membership_id, destiny_version, query, search_type, saved) + values ($1, $2, $3, $4, $5, true)`, + values: [bungieMembershipId, platformMembershipId, destinyVersion, query, type], + }); + return insertSavedResponse; + } + + return response; +} +/** + * Insert a single search as part of an import. + */ +export async function importSearch( + client: ClientBase, + bungieMembershipId: number, + platformMembershipId: string, + destinyVersion: DestinyVersion, + query: string, + saved: boolean, + lastUsage: number, + usageCount: number, + type: SearchType, +): Promise { + const response = await client.query({ + name: 'insert_search', + text: `insert INTO searches (membership_id, platform_membership_id, destiny_version, query, saved, search_type, usage_count, last_used) +values ($1, $2, $3, $4, $5, $6, $7, $8)`, + values: [ + bungieMembershipId, + platformMembershipId, + destinyVersion, + query, + saved, + type, + usageCount, + new Date(lastUsage), + ], + }); + + if (response.rowCount! < 1) { + // This should never happen! + metrics.increment('db.searches.noRowUpdated.count', 1); + throw new Error('searches - No row was updated'); + } + + return response; +} + +/** + * Delete a single search + */ +export async function deleteSearch( + client: ClientBase, + platformMembershipId: string, + destinyVersion: DestinyVersion, + query: string, + type: SearchType, +): Promise { + return client.query({ + name: 'delete_search', + text: `update searches set deleted_at = now(), usage_count = 0, last_used = now() where platform_membership_id = $1 and destiny_version = $2 and qhash = decode(md5($3), 'hex') and query = $3 and search_type = $4`, + values: [platformMembershipId, destinyVersion, query, type], + }); +} + +/** + * Delete all searches for a user (for all destiny versions). + */ +export async function deleteAllSearches( + client: ClientBase, + platformMembershipId: string, +): Promise { + return client.query({ + name: 'delete_all_searches', + text: `delete from searches where platform_membership_id = $1`, + values: [platformMembershipId], + }); +} diff --git a/api/db/settings-queries.test.ts b/api/db/settings-queries.test.ts new file mode 100644 index 0000000..c026d9a --- /dev/null +++ b/api/db/settings-queries.test.ts @@ -0,0 +1,53 @@ +import { closeDbPool, transaction } from './index.js'; +import { getSettings, setSetting } from './settings-queries.js'; + +const bungieMembershipId = 4321; + +afterAll(async () => closeDbPool()); + +it('can insert settings where none exist before', async () => { + await transaction(async (client) => { + await setSetting(client, bungieMembershipId, { + showNewItems: true, + }); + + const settings = await getSettings(client, bungieMembershipId); + expect(settings.showNewItems).toBe(true); + }); +}); + +it('can update settings', async () => { + await transaction(async (client) => { + await setSetting(client, bungieMembershipId, { + showNewItems: true, + }); + + const settings = await getSettings(client, bungieMembershipId); + expect(settings.showNewItems).toBe(true); + + await setSetting(client, bungieMembershipId, { + showNewItems: false, + }); + + const settings2 = await getSettings(client, bungieMembershipId); + expect(settings2.showNewItems).toBe(false); + }); +}); + +it('can partially update settings', async () => { + await transaction(async (client) => { + await setSetting(client, bungieMembershipId, { + showNewItems: true, + }); + + const settings = await getSettings(client, bungieMembershipId); + expect(settings.showNewItems).toBe(true); + + await setSetting(client, bungieMembershipId, { + singleCharacter: true, + }); + + const settings2 = await getSettings(client, bungieMembershipId); + expect(settings2.showNewItems).toBe(true); + }); +}); diff --git a/api/db/settings-queries.ts b/api/db/settings-queries.ts new file mode 100644 index 0000000..1c30bc6 --- /dev/null +++ b/api/db/settings-queries.ts @@ -0,0 +1,69 @@ +import { ClientBase, QueryResult } from 'pg'; +import { Settings } from '../shapes/settings.js'; + +/** + * Get settings for a particular account. + */ +export async function getSettings( + client: ClientBase, + bungieMembershipId: number, +): Promise> { + const results = await client.query<{ settings: Settings }>({ + name: 'get_settings', + text: 'SELECT settings FROM settings WHERE membership_id = $1 and deleted_at IS NULL', + values: [bungieMembershipId], + }); + return results.rows.length > 0 ? results.rows[0].settings : {}; +} + +/** + * Insert or update (upsert) an entire settings tree, totally replacing whatever's there. + */ +export async function replaceSettings( + client: ClientBase, + bungieMembershipId: number, + settings: Partial, +): Promise { + const result = await client.query({ + name: 'upsert_settings', + text: `insert into settings (membership_id, settings) +values ($1, $2) +on conflict (membership_id) +do update set settings = $2, deleted_at = null`, + values: [bungieMembershipId, settings], + }); + return result; +} + +/** + * Update specific key/value pairs within settings, leaving the rest alone. Creates the settings row if it doesn't exist. + */ +export async function setSetting( + client: ClientBase, + bungieMembershipId: number, + settings: Partial, +): Promise { + return client.query({ + name: 'set_setting', + text: `insert into settings (membership_id, settings) +values ($1, $2) +on conflict (membership_id) +do update set settings = (settings.settings || $2), deleted_at = null`, + // The `||` operator merges two JSONB objects, with the right-hand object's keys taking precedence. + values: [bungieMembershipId, settings], + }); +} + +/** + * Delete the settings row for a particular user. + */ +export async function deleteSettings( + client: ClientBase, + bungieMembershipId: number, +): Promise { + return client.query({ + name: 'delete_settings', + text: `update settings set deleted_at = now(), settings = '{}'::jsonb WHERE membership_id = $1`, + values: [bungieMembershipId], + }); +} diff --git a/api/db/triumphs-queries.test.ts b/api/db/triumphs-queries.test.ts new file mode 100644 index 0000000..7cc3a27 --- /dev/null +++ b/api/db/triumphs-queries.test.ts @@ -0,0 +1,62 @@ +import { closeDbPool, transaction } from './index.js'; +import { + deleteAllTrackedTriumphs, + getAllTrackedTriumphsForUser, + getTrackedTriumphsForProfile, + trackTriumph, + unTrackTriumph, +} from './triumphs-queries.js'; + +const platformMembershipId = '213512057'; +const bungieMembershipId = 4321; + +beforeEach(() => + transaction(async (client) => { + await deleteAllTrackedTriumphs(client, bungieMembershipId); + }), +); + +afterAll(async () => closeDbPool()); + +it('can track a triumph where none was tracked before', async () => { + await transaction(async (client) => { + await trackTriumph(client, bungieMembershipId, platformMembershipId, 3851137658); + + const triumphs = await getTrackedTriumphsForProfile(client, platformMembershipId); + expect(triumphs[0]).toEqual(3851137658); + }); +}); + +it('can track a triumph that was already tracked', async () => { + await transaction(async (client) => { + await trackTriumph(client, bungieMembershipId, platformMembershipId, 3851137658); + + await trackTriumph(client, bungieMembershipId, platformMembershipId, 3851137658); + + const triumphs = await getTrackedTriumphsForProfile(client, platformMembershipId); + expect(triumphs[0]).toEqual(3851137658); + }); +}); + +it('can untrack a triumph', async () => { + await transaction(async (client) => { + await trackTriumph(client, bungieMembershipId, platformMembershipId, 3851137658); + + await unTrackTriumph(client, platformMembershipId, 3851137658); + + const triumphs = await getTrackedTriumphsForProfile(client, platformMembershipId); + expect(triumphs.length).toEqual(0); + }); +}); + +it('can get all tracked triumphs across profiles', async () => { + await transaction(async (client) => { + await trackTriumph(client, bungieMembershipId, platformMembershipId, 3851137658); + await trackTriumph(client, bungieMembershipId, '54321', 3851137658); + + await trackTriumph(client, bungieMembershipId, platformMembershipId, 3851137658); + + const triumphs = await getAllTrackedTriumphsForUser(client, bungieMembershipId); + expect(triumphs.length).toEqual(2); + }); +}); diff --git a/api/db/triumphs-queries.ts b/api/db/triumphs-queries.ts new file mode 100644 index 0000000..7b8cb2f --- /dev/null +++ b/api/db/triumphs-queries.ts @@ -0,0 +1,105 @@ +import { ClientBase, QueryResult } from 'pg'; +import { metrics } from '../metrics/index.js'; + +/** + * Get all of the tracked triumphs for a particular platform_membership_id. + */ +export async function getTrackedTriumphsForProfile( + client: ClientBase, + platformMembershipId: string, +): Promise { + const results = await client.query<{ record_hash: string }>({ + name: 'get_tracked_triumphs', + text: 'SELECT record_hash FROM tracked_triumphs WHERE platform_membership_id = $1 and deleted_at IS NULL', + values: [platformMembershipId], + }); + return results.rows.map((row) => parseInt(row.record_hash, 10)); +} + +/** + * Get ALL of the tracked triumphs for a particular user across all platforms. + * @deprecated + */ +// TODO: get rid of this! +export async function getAllTrackedTriumphsForUser( + client: ClientBase, + bungieMembershipId: number, +): Promise< + { + platformMembershipId: string; + triumphs: number[]; + }[] +> { + const results = await client.query<{ platform_membership_id: string; record_hash: string }>({ + name: 'get_all_tracked_triumphs', + text: 'SELECT platform_membership_id, record_hash FROM tracked_triumphs WHERE membership_id = $1', + values: [bungieMembershipId], + }); + + const triumphsByAccount: { [platformMembershipId: string]: number[] } = {}; + + for (const row of results.rows) { + (triumphsByAccount[row.platform_membership_id] ||= []).push(parseInt(row.record_hash, 10)); + } + + return Object.entries(triumphsByAccount).map(([platformMembershipId, triumphs]) => ({ + platformMembershipId, + triumphs, + })); +} + +/** + * Add a tracked triumph. + */ +export async function trackTriumph( + client: ClientBase, + bungieMembershipId: number, + platformMembershipId: string, + recordHash: number, +): Promise { + const response = await client.query({ + name: 'insert_tracked_triumph', + text: `insert INTO tracked_triumphs (membership_id, platform_membership_id, record_hash) +values ($1, $2, $3) +on conflict (platform_membership_id, record_hash) do update set deleted_at = null, membership_id = $1`, + values: [bungieMembershipId, platformMembershipId, recordHash], + }); + + return response; +} + +/** + * Remove a tracked triumph. + */ +export async function unTrackTriumph( + client: ClientBase, + platformMembershipId: string, + recordHash: number, +): Promise { + const response = await client.query({ + name: 'delete_tracked_triumph', + text: `update tracked_triumphs set deleted_at = now() where platform_membership_id = $1 and record_hash = $2`, + values: [platformMembershipId, recordHash], + }); + + if (response.rowCount! < 1) { + // This should never happen but it's OK + metrics.increment('db.triumphs.noRowDeleted.count', 1); + } + + return response; +} + +/** + * Delete all item annotations for a user (on all platforms). + */ +export async function deleteAllTrackedTriumphs( + client: ClientBase, + platformMembershipId: number, +): Promise { + return client.query({ + name: 'delete_all_tracked_triumphs', + text: `delete from tracked_triumphs where platform_membership_id = $1`, + values: [platformMembershipId], + }); +} diff --git a/api/index.ts b/api/index.ts index 164634c..fabeae1 100644 --- a/api/index.ts +++ b/api/index.ts @@ -6,6 +6,7 @@ import http from 'http'; import morgan from 'morgan'; import vhost from 'vhost'; import { refreshApps, stopAppsRefresh } from './apps/index.js'; +import { closeDbPool } from './db/index.js'; import { app as dimGgApp } from './dim-gg/server.js'; import { metrics } from './metrics/index.js'; import { app as dimApiApp } from './server.js'; @@ -123,6 +124,7 @@ createTerminus(server, { onShutdown: async () => { console.log('Shutting down'); stopAppsRefresh(); + closeDbPool(); }, }); diff --git a/api/migrations/20251210022848-settings-table.js b/api/migrations/20251210022848-settings-table.js new file mode 100644 index 0000000..0141581 --- /dev/null +++ b/api/migrations/20251210022848-settings-table.js @@ -0,0 +1,41 @@ +'use strict'; + +var dbm; +var type; +var seed; + +/** + * We receive the dbmigrate dependency from dbmigrate initially. + * This enables us to not have to rely on NODE_PATH. + */ +exports.setup = function (options, seedLink) { + dbm = options.dbmigrate; + type = dbm.dataType; + seed = seedLink; +}; + +/** + * The settings table stores its data in a single JSONB column. This allows us to easily + * add and remove settings, and to only store settings that differ from the defaults. + */ +exports.up = function (db, callback) { + // TODO: Maybe users should be linked to memberships? Add a profile IDs array column? Or just have a mapping table... + db.runSql( + `CREATE TABLE settings ( + membership_id int PRIMARY KEY NOT NULL, + settings jsonb NOT NULL default '{}'::jsonb, + created_at timestamp NOT NULL default current_timestamp, + last_updated_at timestamp NOT NULL default current_timestamp, + deleted_at timestamp /* soft delete timestamp, applies to the whole account */ + )`, + callback, + ); +}; + +exports.down = function (db, callback) { + db.dropTable('settings', callback); +}; + +exports._meta = { + version: 1, +}; diff --git a/api/migrations/20251210023226-global-settings-table.js b/api/migrations/20251210023226-global-settings-table.js new file mode 100644 index 0000000..6c1b388 --- /dev/null +++ b/api/migrations/20251210023226-global-settings-table.js @@ -0,0 +1,39 @@ +'use strict'; + +var dbm; +var type; +var seed; + +/** + * We receive the dbmigrate dependency from dbmigrate initially. + * This enables us to not have to rely on NODE_PATH. + */ +exports.setup = function (options, seedLink) { + dbm = options.dbmigrate; + type = dbm.dataType; + seed = seedLink; +}; + +exports.up = function (db, callback) { + // It's a single-row table (for now at least) to hold global settings as a JSON blob. + db.runSql( + `CREATE TABLE global_settings ( + flavor text PRIMARY KEY NOT NULL default 'app', + settings jsonb NOT NULL default '{}'::jsonb + ); + + INSERT INTO global_settings (flavor, settings) VALUES ('app', '{}'::jsonb); + INSERT INTO global_settings (flavor, settings) VALUES ('dev', '{}'::jsonb); + INSERT INTO global_settings (flavor, settings) VALUES ('beta', '{}'::jsonb); + `, + callback, + ); +}; + +exports.down = function (db, callback) { + db.dropTable('global_settings', callback); +}; + +exports._meta = { + version: 1, +}; diff --git a/api/migrations/20251210023442-apps-table.js b/api/migrations/20251210023442-apps-table.js new file mode 100644 index 0000000..5970e29 --- /dev/null +++ b/api/migrations/20251210023442-apps-table.js @@ -0,0 +1,37 @@ +'use strict'; + +var dbm; +var type; +var seed; + +/** + * We receive the dbmigrate dependency from dbmigrate initially. + * This enables us to not have to rely on NODE_PATH. + */ +exports.setup = function (options, seedLink) { + dbm = options.dbmigrate; + type = dbm.dataType; + seed = seedLink; +}; + +exports.up = function (db, callback) { + db.runSql( + `CREATE TABLE apps ( + id text PRIMARY KEY NOT NULL, + bungie_api_key text NOT NULL, + dim_api_key UUID NOT NULL default gen_random_uuid(), + origin text NOT NULL, + created_at timestamp NOT NULL default current_timestamp + ); + `, + callback, + ); +}; + +exports.down = function (db, callback) { + db.dropTable('apps', callback); +}; + +exports._meta = { + version: 1, +}; diff --git a/api/migrations/20251210024101-loadouts-table.js b/api/migrations/20251210024101-loadouts-table.js new file mode 100644 index 0000000..cd67012 --- /dev/null +++ b/api/migrations/20251210024101-loadouts-table.js @@ -0,0 +1,65 @@ +'use strict'; + +var dbm; +var type; +var seed; + +/** + * We receive the dbmigrate dependency from dbmigrate initially. + * This enables us to not have to rely on NODE_PATH. + */ +exports.setup = function (options, seedLink) { + dbm = options.dbmigrate; + type = dbm.dataType; + seed = seedLink; +}; + +exports.up = function (db, callback) { + db.runSql( + `CREATE TABLE loadouts ( + id UUID NOT NULL, /* loadout ID, primary key but not indexed because who cares */ + platform_membership_id bigint NOT NULL, + destiny_version smallint NOT NULL default 2, + membership_id int NOT NULL, /* Not especially useful but good to keep track of for emergencies */ + + name text NOT NULL, + notes text, + class_type smallint NOT NULL default 3, + /* Items in a loadout are just JSON */ + items jsonb NOT NULL default '{}'::jsonb, + parameters jsonb, + + created_at timestamp NOT NULL default current_timestamp, + last_updated_at timestamp NOT NULL default current_timestamp, + deleted_at timestamp, /* soft delete timestamp */ + + /* loadouts are unique by platform_membership_id ID and loadout ID - effectively they're scoped by user */ + PRIMARY KEY(platform_membership_id, id) + ); + + /* This can be reused on other tables as well. */ + CREATE FUNCTION sync_lastmod() RETURNS trigger AS $$ + BEGIN + NEW.last_updated_at := NOW(); + RETURN NEW; + END; + $$ LANGUAGE plpgsql; + + CREATE TRIGGER + loadouts_last_updated + BEFORE UPDATE ON + loadouts + FOR EACH ROW EXECUTE PROCEDURE + sync_lastmod(); + `, + callback, + ); +}; + +exports.down = function (db, callback) { + db.dropTable('loadouts', callback); +}; + +exports._meta = { + version: 1, +}; diff --git a/api/migrations/20251210025824-item-annotations-table.js b/api/migrations/20251210025824-item-annotations-table.js new file mode 100644 index 0000000..9e01b6c --- /dev/null +++ b/api/migrations/20251210025824-item-annotations-table.js @@ -0,0 +1,58 @@ +'use strict'; + +var dbm; +var type; +var seed; + +/** + * We receive the dbmigrate dependency from dbmigrate initially. + * This enables us to not have to rely on NODE_PATH. + */ +exports.setup = function (options, seedLink) { + dbm = options.dbmigrate; + type = dbm.dataType; + seed = seedLink; +}; + +// TODO: Would it be better to have a separate index that includes last_updated_at? Should we handle uniqueness automatically? +exports.up = function (db, callback) { + db.runSql( + ` + CREATE TABLE item_annotations ( + inventory_item_id bigint NOT NULL, /* uint64 inventory item ID from Bungie */ + platform_membership_id bigint NOT NULL, + destiny_version smallint NOT NULL default 2, + membership_id int NOT NULL, /* Not especially useful but good to keep track of for emergencies */ + + tag smallint, /* nullable tag enum defined in shapes - null means no tag */ + notes text, /* nullable user notes - null means no notes */ + crafted_date timestamp, /* Items get reissued with a new ID when they are recrafted */ + + created_at timestamp NOT NULL default current_timestamp, + last_updated_at timestamp NOT NULL default current_timestamp, + deleted_at timestamp, /* soft delete timestamp */ + + /* tags are unique by platform_membership_id ID and inventory item ID - effectively they're scoped by user. */ + PRIMARY KEY(platform_membership_id, inventory_item_id) + ); + + CREATE TRIGGER + item_annotations_last_updated + BEFORE UPDATE ON + item_annotations + FOR EACH ROW EXECUTE PROCEDURE + sync_lastmod(); + `, + callback, + ); +}; + +exports.down = function (db, callback) { + db.dropTable('item_annotations', () => { + db.runSql('drop type item_tag', callback); + }); +}; + +exports._meta = { + version: 1, +}; diff --git a/api/migrations/20251217192118-triumphs-table.js b/api/migrations/20251217192118-triumphs-table.js new file mode 100644 index 0000000..3023f52 --- /dev/null +++ b/api/migrations/20251217192118-triumphs-table.js @@ -0,0 +1,55 @@ +'use strict'; + +var dbm; +var type; +var seed; + +/** + * We receive the dbmigrate dependency from dbmigrate initially. + * This enables us to not have to rely on NODE_PATH. + */ +exports.setup = function (options, seedLink) { + dbm = options.dbmigrate; + type = dbm.dataType; + seed = seedLink; +}; + +/** + * Entries for each triumph tracked by a user. Presence in this table indicates the triumph + * is tracked - otherwise it is simply missing. + */ +exports.up = function (db, callback) { + db.runSql( + ` + CREATE TABLE tracked_triumphs ( + record_hash bigint NOT NULL, + platform_membership_id bigint NOT NULL, + membership_id int NOT NULL, /* Not especially useful but good to keep track of for emergencies */ + /* triumphs are only for D2 so we don't need a destiny_version column */ + + created_at timestamp NOT NULL default current_timestamp, + last_updated_at timestamp NOT NULL default current_timestamp, + deleted_at timestamp, /* soft delete timestamp */ + + /* Tracked triumphs can be different for different profiles */ + PRIMARY KEY(platform_membership_id, record_hash) + ); + + CREATE TRIGGER + tracked_triumphs_last_updated + BEFORE UPDATE ON + tracked_triumphs + FOR EACH ROW EXECUTE PROCEDURE + sync_lastmod(); + `, + callback, + ); +}; + +exports.down = function (db, callback) { + db.dropTable('tracked_triumphs', callback); +}; + +exports._meta = { + version: 1, +}; diff --git a/api/migrations/20251217192708-searches-table.js b/api/migrations/20251217192708-searches-table.js new file mode 100644 index 0000000..50c1680 --- /dev/null +++ b/api/migrations/20251217192708-searches-table.js @@ -0,0 +1,49 @@ +'use strict'; + +var dbm; +var type; +var seed; + +/** + * We receive the dbmigrate dependency from dbmigrate initially. + * This enables us to not have to rely on NODE_PATH. + */ +exports.setup = function (options, seedLink) { + dbm = options.dbmigrate; + type = dbm.dataType; + seed = seedLink; +}; + +exports.up = function (db, callback) { + db.runSql( + ` + CREATE TABLE searches ( + platform_membership_id bigint NOT NULL, + destiny_version smallint NOT NULL default 2, + membership_id int NOT NULL, /* Not especially useful but good to keep track of for emergencies */ + + query text NOT NULL, + qhash bytea GENERATED ALWAYS AS (decode(md5(query), 'hex')) STORED, + saved boolean NOT NULL default false, + usage_count int NOT NULL default 1, + last_used timestamp NOT NULL default current_timestamp, + search_type smallint NOT NULL DEFAULT 1, + + created_at timestamp NOT NULL default current_timestamp, + last_updated_at timestamp NOT NULL default current_timestamp, + deleted_at timestamp, /* soft delete timestamp */ + + PRIMARY KEY(platform_membership_id, qhash, destiny_version) + ); + `, + callback, + ); +}; + +exports.down = function (db, callback) { + db.dropTable('searches', callback); +}; + +exports._meta = { + version: 1, +}; diff --git a/api/migrations/20251217193412-item-hash-tags-table.js b/api/migrations/20251217193412-item-hash-tags-table.js new file mode 100644 index 0000000..364156e --- /dev/null +++ b/api/migrations/20251217193412-item-hash-tags-table.js @@ -0,0 +1,53 @@ +'use strict'; + +var dbm; +var type; +var seed; + +/** + * We receive the dbmigrate dependency from dbmigrate initially. + * This enables us to not have to rely on NODE_PATH. + */ +exports.setup = function (options, seedLink) { + dbm = options.dbmigrate; + type = dbm.dataType; + seed = seedLink; +}; + +exports.up = function (db, callback) { + db.runSql( + ` + CREATE TABLE item_hash_tags ( + item_hash bigint NOT NULL, + platform_membership_id bigint NOT NULL, + membership_id int NOT NULL, /* Not especially useful but good to keep track of for emergencies */ + + tag smallint, /* nullable tag enum defined in shapes - null means no tag */ + notes text, /* nullable user notes - null means no notes */ + + created_at timestamp NOT NULL default current_timestamp, + last_updated_at timestamp NOT NULL default current_timestamp, + deleted_at timestamp, /* soft delete timestamp */ + + /* tags are unique by platform_membership_id ID and item hash - effectively they're scoped by user. */ + PRIMARY KEY(platform_membership_id, item_hash) + ); + + CREATE TRIGGER + item_hash_tags_last_updated + BEFORE UPDATE ON + item_hash_tags + FOR EACH ROW EXECUTE PROCEDURE + sync_lastmod(); + `, + callback, + ); +}; + +exports.down = function (db, callback) { + db.dropTable('item_hash_tags', callback); +}; + +exports._meta = { + version: 1, +}; diff --git a/api/migrations/20251217194620-loadout-share-table.js b/api/migrations/20251217194620-loadout-share-table.js new file mode 100644 index 0000000..391793e --- /dev/null +++ b/api/migrations/20251217194620-loadout-share-table.js @@ -0,0 +1,65 @@ +'use strict'; + +var dbm; +var type; +var seed; + +/** + * We receive the dbmigrate dependency from dbmigrate initially. + * This enables us to not have to rely on NODE_PATH. + */ +exports.setup = function (options, seedLink) { + dbm = options.dbmigrate; + type = dbm.dataType; + seed = seedLink; +}; + +// TODO: Index on platform_membership_id to allow us to quickly find all loadouts for a given user. +exports.up = function (db, callback) { + db.runSql( + `CREATE TABLE loadout_shares ( + /* + * A globally unique short random string to be used when sharing the loadout, but which is hard to guess. + * This is essentially 35 random bits encoded via base32 into a 7-character string. It'd be neat if we could + * support that, with a parameterizable string length. + */ + id text NOT NULL, + platform_membership_id bigint NOT NULL, + destiny_version smallint NOT NULL default 2, + membership_id int NOT NULL, /* Not especially useful but good to keep track of for emergencies */ + + name text NOT NULL, + notes text, + class_type smallint NOT NULL default 3, + /* Items in a loadout are just JSON */ + items jsonb NOT NULL default '{}'::jsonb, + parameters jsonb, + + view_count int NOT NULL default 0, + last_accessed_at timestamp, + + created_at timestamp NOT NULL default current_timestamp, + last_updated_at timestamp NOT NULL default current_timestamp, + deleted_at timestamp, /* soft delete timestamp */ + + PRIMARY KEY(id) + ); + + CREATE TRIGGER + loadout_shares_last_updated + BEFORE UPDATE ON + loadout_shares + FOR EACH ROW EXECUTE PROCEDURE + sync_lastmod(); + `, + callback, + ); +}; + +exports.down = function (db, callback) { + db.dropTable('loadout_shares', callback); +}; + +exports._meta = { + version: 1, +}; diff --git a/api/migrations/20251230003453-migration-state-table.js b/api/migrations/20251230003453-migration-state-table.js new file mode 100644 index 0000000..07da89e --- /dev/null +++ b/api/migrations/20251230003453-migration-state-table.js @@ -0,0 +1,47 @@ +'use strict'; + +var dbm; +var type; +var seed; + +/** + * We receive the dbmigrate dependency from dbmigrate initially. + * This enables us to not have to rely on NODE_PATH. + */ +exports.setup = function (options, seedLink) { + dbm = options.dbmigrate; + type = dbm.dataType; + seed = seedLink; +}; + +exports.up = function (db, callback) { + db.runSql( + `CREATE TABLE migration_state ( + platform_membership_id bigint NOT NULL, + membership_id int NOT NULL, /* Not especially useful but good to keep track of for emergencies */ + state smallint NOT NULL default 1, + last_state_change_at timestamp NOT NULL default current_timestamp, + attempt_count int NOT NULL default 0, + last_error text, + created_at timestamp NOT NULL default current_timestamp, + last_updated_at timestamp NOT NULL default current_timestamp + ); + + CREATE TRIGGER + migration_state_last_updated + BEFORE UPDATE ON + migration_state + FOR EACH ROW EXECUTE PROCEDURE + sync_lastmod(); + `, + callback, + ); +}; + +exports.down = function (db, callback) { + db.dropTable('migration_state', callback); +}; + +exports._meta = { + version: 1, +}; diff --git a/api/migrations/package.json b/api/migrations/package.json new file mode 100644 index 0000000..878e6bf --- /dev/null +++ b/api/migrations/package.json @@ -0,0 +1,6 @@ +{ + "name": "dim-api-migrations", + "private": true, + "description": "https://github.com/db-migrate/node-db-migrate/pull/724/files", + "type": "commonjs" +} diff --git a/api/routes/create-app.ts b/api/routes/create-app.ts index 812b0c9..1a4fc79 100644 --- a/api/routes/create-app.ts +++ b/api/routes/create-app.ts @@ -1,7 +1,10 @@ import asyncHandler from 'express-async-handler'; +import { DatabaseError } from 'pg-protocol'; import { v4 as uuid } from 'uuid'; +import { getAppById, insertApp as insertAppPostgres } from '../db/apps-queries.js'; +import { transaction } from '../db/index.js'; import { ApiApp, CreateAppRequest } from '../shapes/app.js'; -import { insertApp } from '../stately/apps-queries.js'; +import { insertApp as insertAppStately } from '../stately/apps-queries.js'; import { badRequest } from '../utils.js'; const localHosts = @@ -47,7 +50,26 @@ export const createAppHandler = asyncHandler(async (req, res) => { }; // Put it in StatelyDB - app = await insertApp(app); + app = await insertAppStately(app); + + // Put it in Postgres + await transaction(async (client) => { + try { + await insertAppPostgres(client, app); + } catch (e) { + // This is a unique constraint violation, so just get the app! + if (e instanceof DatabaseError && e.code === '23505') { + await client.query('ROLLBACK'); + + const existingApp = await getAppById(client, request.id); + if (existingApp) { + app = existingApp; + } + } else { + throw e; + } + } + }); // Only return the recovered app if it's for the same origin and key if (app.origin === originUrl.origin && app.bungieApiKey === request.bungieApiKey) { diff --git a/api/routes/delete-all-data.ts b/api/routes/delete-all-data.ts index 3adb688..e7210d6 100644 --- a/api/routes/delete-all-data.ts +++ b/api/routes/delete-all-data.ts @@ -8,10 +8,53 @@ import { deleteAllDataForUser } from '../stately/bulk-queries.js'; export const deleteAllDataHandler = asyncHandler(async (req, res) => { const { bungieMembershipId, profileIds } = req.user as UserInfo; + // const migrationState = await readTransaction(async (client) => + // getMigrationState(client, bungieMembershipId), + // ); + const result = await deleteAllDataForUser(bungieMembershipId, profileIds); + // switch (migrationState.state) { + // case MigrationState.Postgres: + // // Also delete from Stately, just to honor the "no data left here" promise + // try { + // await deleteAllDataForUser(bungieMembershipId, profileIds); + // } catch (e) { + // console.error('Error deleting data from Stately', e); + // } + // result = await transaction(async (client) => deleteAllData(client, bungieMembershipId)); + // break; + // case MigrationState.Stately: + // // Also delete from Postgres, just to honor the "no data left here" promise + // try { + // await transaction(async (client) => deleteAllData(client, bungieMembershipId)); + // } catch (e) { + // console.error('Error deleting data from Postgres', e); + // } + // result = await deleteAllDataForUser(bungieMembershipId, profileIds); + // break; + // default: + // // We're in the middle of a migration + // throw new Error(`Unable to delete data - please wait a bit and try again.`); + // } + // default 200 OK res.status(200).send({ deleted: result, }); }); + +// /** Postgres delete-all-data implementation just individually deletes from each table */ +// export async function deleteAllData( +// client: ClientBase, +// bungieMembershipId: number, +// ): Promise { +// return { +// settings: (await deleteSettings(client, bungieMembershipId)).rowCount!, +// loadouts: (await deleteAllLoadouts(client, bungieMembershipId)).rowCount!, +// tags: (await deleteAllItemAnnotations(client, bungieMembershipId)).rowCount!, +// itemHashTags: (await deleteAllItemHashTags(client, bungieMembershipId)).rowCount!, +// triumphs: (await deleteAllTrackedTriumphs(client, bungieMembershipId)).rowCount!, +// searches: (await deleteAllSearches(client, bungieMembershipId)).rowCount!, +// }; +// } diff --git a/api/routes/export.ts b/api/routes/export.ts index 9476528..c55ede2 100644 --- a/api/routes/export.ts +++ b/api/routes/export.ts @@ -5,9 +5,48 @@ import { exportDataForUser } from '../stately/bulk-queries.js'; export const exportHandler = asyncHandler(async (req, res) => { const { bungieMembershipId, profileIds } = req.user as UserInfo; + // const migrationState = await readTransaction(async (client) => + // getMigrationState(client, bungieMembershipId), + // ); + const response = await exportDataForUser(bungieMembershipId, profileIds); + // switch (migrationState.state) { + // case MigrationState.Postgres: + // case MigrationState.MigratingToStately: // in-progress migration is the same as PG + // response = await pgExport(bungieMembershipId); + // break; + // case MigrationState.Stately: + // response = await exportDataForUser(bungieMembershipId, profileIds); + // break; + // default: + // // invalid state + // throw new Error(`Unable to export data - please wait a bit and try again.`); + // } + // Instruct CF not to cache this res.set('Cache-Control', 'no-cache, no-store, max-age=0'); res.send(response); }); + +// export async function pgExport(bungieMembershipId: number): Promise { +// const response = await readTransaction(async (client) => { +// const settings = await getSettings(client, bungieMembershipId); +// const loadouts = await getAllLoadoutsForUser(client, bungieMembershipId); +// const itemAnnotations = await getAllItemAnnotationsForUser(client, bungieMembershipId); +// const itemHashTags = await getItemHashTagsForProfile(client, bungieMembershipId); +// const triumphs = await getAllTrackedTriumphsForUser(client, bungieMembershipId); +// const searches = await getSearchesForUser(client, bungieMembershipId); + +// const response: ExportResponse = { +// settings, +// loadouts, +// tags: itemAnnotations, +// itemHashTags, +// triumphs, +// searches, +// }; +// return response; +// }); +// return response; +// } diff --git a/api/routes/import.ts b/api/routes/import.ts index 92cd0f0..b80d84c 100644 --- a/api/routes/import.ts +++ b/api/routes/import.ts @@ -40,16 +40,38 @@ export const importHandler = asyncHandler(async (req, res) => { return; } - const numTriumphs = await statelyImport( - bungieMembershipId, - profileIds, - settings, - loadouts, - itemAnnotations, - triumphs, - searches, - itemHashTags, - ); + // const migrationState = await readTransaction(async (client) => + // getMigrationState(client, bungieMembershipId), + // ); + + let numTriumphs = 0; + const importToStately = async () => { + numTriumphs = await statelyImport( + bungieMembershipId, + profileIds, + settings, + loadouts, + itemAnnotations, + triumphs, + searches, + itemHashTags, + ); + }; + + await importToStately(); + + // switch (migrationState.state) { + // case MigrationState.Postgres: + // await doMigration(bungieMembershipId, importToStately); + // break; + // case MigrationState.Stately: + // await importToStately(); + // break; + // default: + // // in-progress migration + // badRequest(res, `Unable to import data - please wait a bit and try again.`); + // return; + // } const response: ImportResponse = { loadouts: loadouts.length, diff --git a/api/routes/loadout-share.ts b/api/routes/loadout-share.ts index 3e714ce..bb2d8cb 100644 --- a/api/routes/loadout-share.ts +++ b/api/routes/loadout-share.ts @@ -115,10 +115,25 @@ export const getLoadoutShareHandler = asyncHandler(async (req, res) => { }); export async function loadLoadoutShare(shareId: string) { - const loadout = await getLoadoutShareStately(shareId); - if (loadout) { - // Record when this was viewed and increment the view counter. Not using it much for now but I'd like to know. - await recordAccessStately(shareId); - return loadout; + // First look in Stately + try { + const loadout = await getLoadoutShareStately(shareId); + if (loadout) { + // Record when this was viewed and increment the view counter. Not using it much for now but I'd like to know. + await recordAccessStately(shareId); + return loadout; + } + } catch (e) { + console.error('Failed to load loadout share from Stately', e); } + + // // Fall back to Postgres + // return transaction(async (client) => { + // const loadout = await getLoadoutShare(client, shareId); + // if (loadout) { + // // Record when this was viewed and increment the view counter. Not using it much for now but I'd like to know. + // await recordAccess(client, shareId); + // } + // return loadout; + // }); } diff --git a/api/routes/platform-info.ts b/api/routes/platform-info.ts index 86f68ba..494c786 100644 --- a/api/routes/platform-info.ts +++ b/api/routes/platform-info.ts @@ -1,12 +1,25 @@ import asyncHandler from 'express-async-handler'; -import { defaultGlobalSettings } from '../shapes/global-settings.js'; +import { getGlobalSettingsQuery } from '../db/global-settings-queries.js'; +import { defaultGlobalSettings, GlobalSettings } from '../shapes/global-settings.js'; import { getGlobalSettings } from '../stately/global-settings.js'; export const platformInfoHandler = asyncHandler(async (req, res) => { const flavor = (req.query.flavor as string) ?? 'app'; - // Try StatelyDB first, then fall back to Postgres - const settings = (await getGlobalSettings(flavor)) ?? defaultGlobalSettings; + let settings: GlobalSettings | undefined = undefined; + + // Try Postgres first, then fall back to StatelyDB + + try { + const result = await getGlobalSettingsQuery(flavor); + if (result.rowCount! > 0) { + settings = { ...defaultGlobalSettings, ...result.rows[0].settings }; + } else { + settings = defaultGlobalSettings; + } + } catch { + settings = (await getGlobalSettings(flavor)) ?? defaultGlobalSettings; + } // Instruct CF to cache for 15 minutes res.set('Cache-Control', 'public, max-age=900'); diff --git a/api/routes/update.ts b/api/routes/update.ts index a1d1ee1..22526d6 100644 --- a/api/routes/update.ts +++ b/api/routes/update.ts @@ -88,6 +88,15 @@ export const updateHandler = asyncHandler(async (req, res) => { return; } + // const migrationState = await readTransaction(async (client) => + // getMigrationState(client, bungieMembershipId), + // ); + + // const desiredMigrationState = await getDesiredMigrationState(migrationState); + // const shouldMigrateToStately = + // desiredMigrationState === MigrationState.Stately && + // migrationState.state !== desiredMigrationState; + const results: ProfileUpdateResult[] = validateUpdates(req, updates, platformMembershipId, appId); // Only attempt updates that pass validation const updatesToApply = updates.filter((_, index) => results[index].status === 'Success'); @@ -99,6 +108,64 @@ export const updateHandler = asyncHandler(async (req, res) => { destinyVersion, ); + // const importToStately = async () => { + // // Export from Postgres + // const exportResponse = await pgExport(bungieMembershipId); + + // const { settings, loadouts, itemAnnotations, triumphs, searches, itemHashTags } = + // extractImportData(exportResponse); + + // if ( + // isEmpty(settings) && + // loadouts.length === 0 && + // itemAnnotations.length === 0 && + // triumphs.length === 0 && + // searches.length === 0 + // ) { + // // Nothing to import! + // return; + // } + // await statelyImport( + // bungieMembershipId, + // profileIds, + // settings, + // loadouts, + // itemAnnotations, + // triumphs, + // searches, + // itemHashTags, + // ); + // }; + + // switch (migrationState.state) { + // case MigrationState.Postgres: + // if (shouldMigrateToStately) { + // // For now let's leave the old data in Postgres as a backup + // await doMigration(bungieMembershipId, importToStately); + // await statelyUpdate( + // updatesToApply, + // bungieMembershipId, + // platformMembershipId ?? profileIds[0], + // destinyVersion, + // ); + // } else { + // await pgUpdate(updatesToApply, bungieMembershipId, platformMembershipId, destinyVersion); + // } + // break; + // case MigrationState.Stately: + // await statelyUpdate( + // updatesToApply, + // bungieMembershipId, + // platformMembershipId ?? profileIds[0], + // destinyVersion, + // ); + // break; + // default: + // // in-progress migration + // badRequest(res, `Unable to import data - please wait a bit and try again.`); + // return; + // } + res.send({ results, }); @@ -311,6 +378,118 @@ async function statelyUpdate( } } +// async function pgUpdate( +// updates: ProfileUpdate[], +// bungieMembershipId: number, +// platformMembershipId: string | undefined, +// destinyVersion: DestinyVersion, +// ) { +// return transaction(async (client) => { +// for (const update of updates) { +// switch (update.action) { +// case 'setting': +// await updateSetting(client, bungieMembershipId, update.payload); +// break; + +// case 'loadout': +// await updateLoadout( +// client, +// bungieMembershipId, +// platformMembershipId!, +// destinyVersion, +// update.payload, +// ); +// break; + +// case 'delete_loadout': +// await deleteLoadout(client, platformMembershipId!, update.payload); +// break; + +// case 'tag': +// await updateItemAnnotation( +// client, +// bungieMembershipId, +// platformMembershipId!, +// destinyVersion, +// update.payload, +// ); +// break; + +// case 'tag_cleanup': +// await tagCleanup(client, platformMembershipId!, update.payload); +// break; + +// case 'item_hash_tag': +// await updateItemHashTag( +// client, +// bungieMembershipId, +// platformMembershipId!, +// update.payload, +// ); +// break; + +// case 'track_triumph': +// await trackTriumph(client, bungieMembershipId, platformMembershipId!, update.payload); +// break; + +// case 'search': +// await recordSearch( +// client, +// bungieMembershipId, +// platformMembershipId!, +// destinyVersion, +// update.payload, +// ); +// break; + +// case 'save_search': +// await saveSearch( +// client, +// bungieMembershipId, +// platformMembershipId!, +// destinyVersion, +// update.payload, +// ); +// break; + +// case 'delete_search': +// await deleteSearch(client, platformMembershipId!, destinyVersion, update.payload); +// break; +// } +// } +// }); +// } + +// async function updateSetting( +// client: ClientBase, +// bungieMembershipId: number, +// settings: Partial, +// ): Promise { +// // TODO: how do we set settings back to the default? Maybe just load and replace the whole settings object. + +// const start = new Date(); +// await setSettingInDb(client, bungieMembershipId, settings); +// metrics.timing('update.setting', start); +// } + +// async function updateLoadout( +// client: ClientBase, +// bungieMembershipId: number, +// platformMembershipId: string, +// destinyVersion: DestinyVersion, +// loadout: Loadout, +// ): Promise { +// const start = new Date(); +// await updateLoadoutInDb( +// client, +// bungieMembershipId, +// platformMembershipId, +// destinyVersion, +// loadout, +// ); +// metrics.timing('update.loadout', start); +// } + function validateUpdateLoadout(loadout: Loadout): ProfileUpdateResult { return validateLoadout('update', loadout) ?? { status: 'Success' }; } @@ -400,6 +579,34 @@ export function validateLoadout(metricPrefix: string, loadout: Loadout) { return undefined; } +// async function deleteLoadout( +// client: ClientBase, +// platformMembershipId: string, +// loadoutId: string, +// ): Promise { +// const start = new Date(); +// await deleteLoadoutInDb(client, platformMembershipId, loadoutId); +// metrics.timing('update.deleteLoadout', start); +// } + +// async function updateItemAnnotation( +// client: ClientBase, +// bungieMembershipId: number, +// platformMembershipId: string, +// destinyVersion: DestinyVersion, +// itemAnnotation: ItemAnnotation, +// ): Promise { +// const start = new Date(); +// await updateItemAnnotationInDb( +// client, +// bungieMembershipId, +// platformMembershipId, +// destinyVersion, +// itemAnnotation, +// ); +// metrics.timing('update.tag', start); +// } + function validateUpdateItemAnnotation(itemAnnotation: ItemAnnotation): ProfileUpdateResult { if (!isValidItemId(itemAnnotation.id)) { metrics.increment('update.validation.badItemId.count'); @@ -460,6 +667,74 @@ function validateUpdateItemHashTag(itemAnnotation: ItemHashTag): ProfileUpdateRe return { status: 'Success' }; } +// async function tagCleanup( +// client: ClientBase, +// platformMembershipId: string, +// inventoryItemIds: string[], +// ): Promise { +// const start = new Date(); +// await deleteItemAnnotationList( +// client, +// platformMembershipId, +// inventoryItemIds.filter(isValidItemId), +// ); +// metrics.timing('update.tagCleanup', start); + +// return { status: 'Success' }; +// } + +// async function trackTriumph( +// client: ClientBase, +// bungieMembershipId: number, +// platformMembershipId: string, +// payload: TrackTriumphUpdate['payload'], +// ): Promise { +// const start = new Date(); +// payload.tracked +// ? await trackTriumphInDb(client, bungieMembershipId, platformMembershipId, payload.recordHash) +// : await unTrackTriumph(client, platformMembershipId, payload.recordHash); +// metrics.timing('update.trackTriumph', start); +// } + +// async function recordSearch( +// client: ClientBase, +// bungieMembershipId: number, +// platformMembershipId: string, +// destinyVersion: DestinyVersion, +// payload: UsedSearchUpdate['payload'], +// ): Promise { +// const start = new Date(); +// await updateUsedSearch( +// client, +// bungieMembershipId, +// platformMembershipId, +// destinyVersion, +// payload.query, +// payload.type ?? SearchType.Item, +// ); +// metrics.timing('update.recordSearch', start); +// } + +// async function saveSearch( +// client: ClientBase, +// bungieMembershipId: number, +// platformMembershipId: string, +// destinyVersion: DestinyVersion, +// payload: SavedSearchUpdate['payload'], +// ): Promise { +// const start = new Date(); +// await saveSearchInDb( +// client, +// bungieMembershipId, +// platformMembershipId, +// destinyVersion, +// payload.query, +// payload.type ?? SearchType.Item, +// payload.saved, +// ); +// metrics.timing('update.saveSearch', start); +// } + function validateSearch(payload: UsedSearchUpdate['payload']): ProfileUpdateResult { if (payload.query.length > 2048) { metrics.increment('update.validation.searchTooLong.count'); @@ -477,6 +752,34 @@ function validateSearch(payload: UsedSearchUpdate['payload']): ProfileUpdateResu return { status: 'Success' }; } +// async function deleteSearch( +// client: ClientBase, +// platformMembershipId: string, +// destinyVersion: DestinyVersion, +// payload: DeleteSearchUpdate['payload'], +// ): Promise { +// const start = new Date(); +// await deleteSearchInDb( +// client, +// platformMembershipId, +// destinyVersion, +// payload.query, +// payload.type ?? SearchType.Item, +// ); +// metrics.timing('update.deleteSearch', start); +// } + +// async function updateItemHashTag( +// client: ClientBase, +// bungieMembershipId: number, +// platformMembershipId: string, +// payload: ItemHashTagUpdate['payload'], +// ): Promise { +// const start = new Date(); +// await updateItemHashTagInDb(client, bungieMembershipId, platformMembershipId, payload); +// metrics.timing('update.updateItemHashTag', start); +// } + function consolidateSearchUpdates( updates: (UsedSearchUpdate | SavedSearchUpdate | DeleteSearchUpdate)[], ) { diff --git a/api/server.test.ts b/api/server.test.ts index 215a38a..80e30d7 100644 --- a/api/server.test.ts +++ b/api/server.test.ts @@ -3,7 +3,8 @@ import jwt from 'jsonwebtoken'; import { makeFetch } from 'supertest-fetch'; import { promisify } from 'util'; import { v4 as uuid } from 'uuid'; -import { refreshApps } from './apps/index.js'; +import { refreshApps, stopAppsRefresh } from './apps/index.js'; +import { closeDbPool } from './db/index.js'; import { app } from './server.js'; import { ApiApp } from './shapes/app.js'; import { DeleteAllResponse } from './shapes/delete-all.js'; @@ -58,6 +59,11 @@ beforeAll(async () => { await client.putBatch(...globalSettings); }); +afterAll(async () => { + stopAppsRefresh(); + await closeDbPool(); +}); + it('returns basic info from GET /', async () => { // Sends GET Request to / endpoint const response = await fetch('/'); @@ -267,7 +273,7 @@ describe('profile', () => { }); describe('settings', () => { - beforeEach(() => postRequestAuthed('/delete_all_data').expect(200)); + beforeEach(async () => postRequestAuthed('/delete_all_data').expect(200)); it('returns default settings', async () => { const profileResponse = (await getRequestAuthed('/profile?components=settings') @@ -304,7 +310,6 @@ const loadout: Loadout = { id: uuid(), name: 'Test Loadout', classType: 1, - clearSpace: false, equipped: [ { hash: 100, @@ -324,7 +329,7 @@ const loadout: Loadout = { }; describe('loadouts', () => { - beforeEach(() => postRequestAuthed('/delete_all_data').expect(200)); + beforeEach(async () => postRequestAuthed('/delete_all_data').expect(200)); it('can add a loadout', async () => { const request: ProfileUpdateRequest = { @@ -456,7 +461,7 @@ describe('loadouts', () => { }); describe('tags', () => { - beforeEach(() => postRequestAuthed('/delete_all_data').expect(200)); + beforeEach(async () => postRequestAuthed('/delete_all_data').expect(200)); it('can add a tag', async () => { const request: ProfileUpdateRequest = { @@ -703,7 +708,7 @@ describe('tags', () => { }); describe('item hash tags', () => { - beforeEach(() => postRequestAuthed('/delete_all_data').expect(200)); + beforeEach(async () => postRequestAuthed('/delete_all_data').expect(200)); it('can add an item hash tag', async () => { const request: ProfileUpdateRequest = { @@ -891,7 +896,7 @@ describe('item hash tags', () => { }); describe('triumphs', () => { - beforeEach(() => postRequestAuthed('/delete_all_data').expect(200)); + beforeEach(async () => postRequestAuthed('/delete_all_data').expect(200)); it('can add a tracked triumph', async () => { const request: ProfileUpdateRequest = { @@ -1032,7 +1037,7 @@ describe('triumphs', () => { }); describe('searches', () => { - beforeEach(() => postRequestAuthed('/delete_all_data').expect(200)); + beforeEach(async () => postRequestAuthed('/delete_all_data').expect(200)); it('can add a recent search', async () => { const request: ProfileUpdateRequest = { diff --git a/api/shapes/global-settings.ts b/api/shapes/global-settings.ts index cc14422..b078588 100644 --- a/api/shapes/global-settings.ts +++ b/api/shapes/global-settings.ts @@ -23,7 +23,7 @@ export const defaultGlobalSettings: GlobalSettings = { destinyProfileRefreshInterval: 120, autoRefresh: true, refreshProfileOnVisible: true, - dimProfileMinimumRefreshInterval: 600, + dimProfileMinimumRefreshInterval: 1, showIssueBanner: false, lastUpdated: 0, }; diff --git a/api/shapes/item-annotations.ts b/api/shapes/item-annotations.ts index 8371e21..0bff8fa 100644 --- a/api/shapes/item-annotations.ts +++ b/api/shapes/item-annotations.ts @@ -1,10 +1,5 @@ export type TagValue = 'favorite' | 'keep' | 'infuse' | 'junk' | 'archive'; -export const enum TagVariant { - PVP = 1, - PVE = 2, -} - /** Any extra info added by the user to individual items - tags, notes, etc. */ export interface ItemAnnotation extends Annotation { /** The item instance ID for an individual item */ @@ -28,12 +23,4 @@ interface Annotation { tag?: TagValue | null; /** Optional text notes on the item. */ notes?: string | null; - /** - * An optional "variant" for the tag that only has meaning if the tag is set. - * This provides a backwards and forwards compatible way to say a roll is - * "Keep-PVP" or "Keep-PVE". Clients that don't understand this flag will - * simply show "Keep". This is only really meant to be used with the "keep" - * tag. - */ - v?: TagVariant; } diff --git a/api/shapes/loadouts.ts b/api/shapes/loadouts.ts index 4c63d07..a5a6e23 100644 --- a/api/shapes/loadouts.ts +++ b/api/shapes/loadouts.ts @@ -41,19 +41,6 @@ export interface Loadout { * to. This is optional (set to Unknown for loadouts that can be used anywhere). */ classType: DestinyClass; - /** - * DestinyInventoryItemDefinition hash of an emblem to use as - * an icon for this loadout. - * - * @deprecated this was added for Little Light but never used by DIM. - */ - emblemHash?: number; - /** - * Whether to clear out other items when applying this loadout - * @deprecated in favor of parameters.clearWeapons and parameters.clearArmor - */ - /** Whether to clear out other items when applying this loadout */ - clearSpace?: boolean; /** List of equipped items in the loadout */ equipped: LoadoutItem[]; /** List of unequipped items in the loadout */ @@ -64,16 +51,6 @@ export interface Loadout { createdAt?: number; /** When was this Loadout last changed? Tracked automatically by the API - when saving a loadout this field is ignored. */ lastUpdatedAt?: number; - /** - * Automatically added stat mods. These are separate from the manually chosen - * mods in parameters.mods, and are saved here to avoid having to figure them - * out all over again every time (especially since our algorithm might - * change). Combine this list and parameters.mods when displaying or actually - * applying the loadout. - * - * @deprecated we just throw away stat mods when using LO auto stats - */ - autoStatMods?: number[]; } /** Whether armor of this type will have assumed masterworked stats in the Loadout Optimizer. */ diff --git a/api/stately/apps-queries.test.ts b/api/stately/apps-queries.test.ts index 3f21007..4daf6aa 100644 --- a/api/stately/apps-queries.test.ts +++ b/api/stately/apps-queries.test.ts @@ -10,7 +10,7 @@ const app: ApiApp = { dimApiKey: uuid(), }; -beforeEach(() => deleteApp(appId)); +beforeEach(async () => deleteApp(appId)); it('can create a new app', async () => { expect(await getAppById(appId)).toBeUndefined(); diff --git a/api/stately/generated/.gitattributes b/api/stately/generated/.gitattributes new file mode 100644 index 0000000..9343183 --- /dev/null +++ b/api/stately/generated/.gitattributes @@ -0,0 +1,7 @@ +index.js linguist-generated=true +index.d.ts linguist-generated=true +stately_pb.js linguist-generated=true +stately_pb.d.ts linguist-generated=true +stately_item_types.js linguist-generated=true +stately_item_types.d.ts linguist-generated=true +README.md linguist-generated=true diff --git a/api/stately/init/migrate-loadout-shares.ts b/api/stately/init/migrate-loadout-shares.ts new file mode 100644 index 0000000..a5c8108 --- /dev/null +++ b/api/stately/init/migrate-loadout-shares.ts @@ -0,0 +1,6 @@ +// import { migrateLoadoutShareChunk } from '../migrator/loadout-shares.js'; + +// while (true) { +// await migrateLoadoutShareChunk(); +// console.log('Migrated loadout shares'); +// } diff --git a/api/stately/init/migrate-users.ts b/api/stately/init/migrate-users.ts new file mode 100644 index 0000000..41fcd20 --- /dev/null +++ b/api/stately/init/migrate-users.ts @@ -0,0 +1,34 @@ +// import { chunk } from 'es-toolkit'; +// import { readTransaction } from '../../db/index.js'; +// import { getUsersToMigrate } from '../../db/migration-state-queries.js'; +// import { delay } from '../../utils.js'; +// import { migrateUser } from '../migrator/user.js'; + +// while (true) { +// try { +// const bungieMembershipIds = await readTransaction(async (client) => getUsersToMigrate(client)); +// if (bungieMembershipIds.length === 0) { +// console.log('No users to migrate'); +// break; +// } +// for (const idChunk of chunk(bungieMembershipIds, 10)) { +// await Promise.all( +// idChunk.map(async (bungieMembershipId) => { +// try { +// await migrateUser(bungieMembershipId); +// console.log(`Migrated user ${bungieMembershipId}`); +// } catch (e) { +// if (e instanceof Error) { +// console.error(`Error migrating user ${bungieMembershipId}: ${e}`); +// } +// } +// }), +// ); +// } +// } catch (e) { +// if (e instanceof Error) { +// console.error(`Error getting users to migrate: ${e}`); +// } +// await delay(1000); +// } +// } diff --git a/api/stately/loadout-share-queries.test.ts b/api/stately/loadout-share-queries.test.ts index e534ade..9dffcf7 100644 --- a/api/stately/loadout-share-queries.test.ts +++ b/api/stately/loadout-share-queries.test.ts @@ -11,13 +11,12 @@ const platformMembershipId = '213512057'; const shareID = 'ABCDEFG'; -beforeEach(() => deleteLoadoutShare(shareID)); +beforeEach(async () => deleteLoadoutShare(shareID)); const loadout: Loadout = { id: uuid(), name: 'Test Loadout', classType: 1, - clearSpace: false, equipped: [ { hash: 100, diff --git a/api/stately/loadout-share-queries.ts b/api/stately/loadout-share-queries.ts index b7fc52b..86c1ab0 100644 --- a/api/stately/loadout-share-queries.ts +++ b/api/stately/loadout-share-queries.ts @@ -1,4 +1,4 @@ -import { keyPath, StatelyError, WithPutOptions } from '@stately-cloud/client'; +import { keyPath, StatelyError } from '@stately-cloud/client'; import { Loadout } from '../shapes/loadouts.js'; import { delay } from '../utils.js'; import { client } from './client.js'; @@ -57,28 +57,6 @@ export async function addLoadoutShare( } } -/** - * Put loadout shares - this is meant for migrations. - */ -export async function addLoadoutSharesForMigration( - shares: { - platformMembershipId: string; - shareId: string; - loadout: Loadout; - }[], -): Promise { - const statelyShares = shares.map( - ({ platformMembershipId, shareId, loadout }): WithPutOptions => ({ - item: convertLoadoutShareToStately(loadout, platformMembershipId, shareId), - // Preserve the original timestamps - overwriteMetadataTimestamps: true, - }), - ); - - // We overwrite here - shares are immutable, so this is fine. - await client.putBatch(...statelyShares); -} - /** * Touch the last_accessed_at and visits fields to keep track of access. */ diff --git a/api/stately/migrator/user.ts b/api/stately/migrator/user.ts new file mode 100644 index 0000000..2b725f4 --- /dev/null +++ b/api/stately/migrator/user.ts @@ -0,0 +1,44 @@ +// import { isEmpty } from 'es-toolkit/compat'; +// import { doMigration } from '../../db/migration-state-queries.js'; +// import { pgExport } from '../../routes/export.js'; +// import { extractImportData, statelyImport } from '../../routes/import.js'; + +// export async function migrateUser(bungieMembershipId: number): Promise { +// const importToStately = async () => { +// // Export from Postgres +// const exportResponse = await pgExport(bungieMembershipId); + +// const { settings, loadouts, itemAnnotations, triumphs, searches, itemHashTags } = +// extractImportData(exportResponse); + +// const profileIds = new Set(); +// exportResponse.loadouts.forEach((l) => profileIds.add(l.platformMembershipId)); +// exportResponse.tags.forEach((t) => profileIds.add(t.platformMembershipId)); +// exportResponse.triumphs.forEach((t) => profileIds.add(t.platformMembershipId)); + +// if ( +// isEmpty(settings) && +// loadouts.length === 0 && +// itemAnnotations.length === 0 && +// triumphs.length === 0 && +// searches.length === 0 +// ) { +// // Nothing to import! +// return; +// } +// await statelyImport( +// bungieMembershipId, +// [...profileIds], +// settings, +// loadouts, +// itemAnnotations, +// triumphs, +// searches, +// itemHashTags, +// false, +// ); +// }; + +// // For now let's leave the old data in Postgres as a backup +// await doMigration(bungieMembershipId, importToStately); +// } diff --git a/api/test/postgres.setup.mjs b/api/test/postgres.setup.mjs new file mode 100644 index 0000000..0f4bfd0 --- /dev/null +++ b/api/test/postgres.setup.mjs @@ -0,0 +1,46 @@ +/* eslint-disable */ +import DBMigrate from 'db-migrate'; +import { execSync } from 'node:child_process'; + +export default async function setupDatabase() { + try { + execSync('docker stop dim-api-postgres', { + stdio: 'inherit', + sterr: 'inherit', + }); + } catch {} + + try { + execSync('docker rm dim-api-postgres', { + stdio: 'inherit', + sterr: 'inherit', + }); + } catch {} + + try { + execSync( + 'docker run --name dim-api-postgres -e POSTGRES_USER=postgres -e POSTGRES_PASSWORD=postgres -e POSTGRES_DB=travis_ci_test -p 5432:5432 -d postgres', + { + stdio: 'inherit', + sterr: 'inherit', + }, + ); + } catch (error) { + console.error('Failed to start Docker container:', error.message); + throw error; + } + + await new Promise((resolve) => setTimeout(resolve, 1000)); + + const dbmigrate = DBMigrate.getInstance(true, { + config: './api/database.json', + cmdOptions: { + 'migrations-dir': './api/migrations', + }, + env: 'test', + }); + + await dbmigrate.up(); + + return true; +} diff --git a/api/test/postgres.teardown.mjs b/api/test/postgres.teardown.mjs new file mode 100644 index 0000000..f5b6587 --- /dev/null +++ b/api/test/postgres.teardown.mjs @@ -0,0 +1,21 @@ +import { execSync } from 'node:child_process'; + +export default async function setupDatabase() { + try { + execSync('docker stop dim-api-postgres', { + stdio: 'inherit', + }); + } catch (error) { + console.error('Failed to start Docker container:', error.message); + throw error; + } + + try { + execSync('docker rm dim-api-postgres', { + stdio: 'inherit', + }); + } catch (error) { + console.error('Failed to start Docker container:', error.message); + throw error; + } +} diff --git a/build/.env.travis b/build/.env.travis index 2336de6..dd9fb3d 100644 --- a/build/.env.travis +++ b/build/.env.travis @@ -1 +1,7 @@ +PGHOST=localhost +PGPORT=5432 +PGDATABASE=travis_ci_test +PGUSER=postgres +PGPASSWORD=postgres JWT_SECRET=dummysecret +PGSSL=false \ No newline at end of file diff --git a/eslint.config.js b/eslint.config.js index b3c24ee..b6c6588 100644 --- a/eslint.config.js +++ b/eslint.config.js @@ -22,7 +22,12 @@ export default tseslint.config( { name: 'sonarjs/recommended', ...sonarjs.configs.recommended }, { name: 'global ignores', - ignores: ['*.test.ts', 'api/migrations/*', 'api/stately/generated/*.js'], + ignores: [ + '*.test.ts', + 'api/migrations/*', + 'api/stately/generated/*.js', + 'api/test/postgres.*.mjs', + ], }, { name: 'dim-api-custom', diff --git a/jest.config.js b/jest.config.js index 1f6452d..165420f 100644 --- a/jest.config.js +++ b/jest.config.js @@ -23,4 +23,6 @@ export default { }, modulePathIgnorePatterns: ['/dist/'], setupFiles: ['dotenv/config'], + globalSetup: '/api/test/postgres.setup.mjs', + globalTeardown: '/api/test/postgres.teardown.mjs', }; diff --git a/kubernetes/dim-api-configmap.yaml b/kubernetes/dim-api-configmap.yaml index de76d63..e2e588c 100644 --- a/kubernetes/dim-api-configmap.yaml +++ b/kubernetes/dim-api-configmap.yaml @@ -5,5 +5,10 @@ metadata: labels: app: dim-api data: + PGHOST: placeholder + PGPORT: placeholder + PGDATABASE: placeholder + PGUSER: placeholder + PGSSLMODE: require SENTRY_DSN: placeholder - STATELY_STORE_ID: placeholder + SENTRY_STORE_ID: placeholder diff --git a/kubernetes/dim-api-deployment.yaml b/kubernetes/dim-api-deployment.yaml index 1418290..1ac44b4 100644 --- a/kubernetes/dim-api-deployment.yaml +++ b/kubernetes/dim-api-deployment.yaml @@ -49,6 +49,11 @@ spec: cpu: "150m" memory: "50Mi" env: + - name: PGPASSWORD + valueFrom: + secretKeyRef: + name: dim-api-secret + key: pg_password - name: JWT_SECRET valueFrom: secretKeyRef: diff --git a/package.json b/package.json index 60c6c37..dfdec26 100644 --- a/package.json +++ b/package.json @@ -46,10 +46,13 @@ "@types/jest": "^29.5.14", "@types/jsonwebtoken": "^9.0.10", "@types/morgan": "^1.9.10", + "@types/pg": "^8.11.10", "@types/uuid": "^10.0.0", "@types/vhost": "3.0.7", "@typescript-eslint/eslint-plugin": "^8.48.1", "@typescript-eslint/parser": "^8.48.1", + "db-migrate": "^0.11.14", + "db-migrate-pg": "^1.5.2", "eslint": "^9.39.1", "eslint-plugin-regexp": "^2.10.0", "eslint-plugin-sonarjs": "^1.0.4", @@ -85,9 +88,11 @@ "hot-shots": "^10.2.1", "jsonwebtoken": "^9.0.3", "morgan": "^1.10.1", + "pg": "8.13.1", + "pg-protocol": "1.7.0", "slugify": "^1.6.6", "uuid": "^11.1.0", "vhost": "^3.0.2" }, "packageManager": "pnpm@8.9.0+sha256.8f5264ad1d100da11a6add6bb8a94c6f1e913f9e9261b2a551fabefad2ec0fec" -} +} \ No newline at end of file diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index e8fec30..2de29d5 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -59,6 +59,12 @@ dependencies: morgan: specifier: ^1.10.1 version: 1.10.1 + pg: + specifier: 8.13.1 + version: 8.13.1 + pg-protocol: + specifier: 1.7.0 + version: 1.7.0 slugify: specifier: ^1.6.6 version: 1.6.6 @@ -121,6 +127,9 @@ devDependencies: '@types/morgan': specifier: ^1.9.10 version: 1.9.10 + '@types/pg': + specifier: ^8.11.10 + version: 8.15.6 '@types/uuid': specifier: ^10.0.0 version: 10.0.0 @@ -133,6 +142,12 @@ devDependencies: '@typescript-eslint/parser': specifier: ^8.48.1 version: 8.48.1(eslint@9.39.1)(typescript@5.9.3) + db-migrate: + specifier: ^0.11.14 + version: 0.11.14 + db-migrate-pg: + specifier: ^1.5.2 + version: 1.5.2 eslint: specifier: ^9.39.1 version: 9.39.1 @@ -1423,6 +1438,11 @@ packages: /@bufbuild/protobuf@2.10.1: resolution: {integrity: sha512-ckS3+vyJb5qGpEYv/s1OebUHDi/xSNtfgw1wqKZo7MR9F2z+qXr0q5XagafAG/9O0QPVIUfST0smluYSTpYFkg==} + /@colors/colors@1.5.0: + resolution: {integrity: sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==} + engines: {node: '>=0.1.90'} + dev: true + /@connectrpc/connect-node@2.1.1(@bufbuild/protobuf@2.10.1)(@connectrpc/connect@2.1.1): resolution: {integrity: sha512-s3TfsI1XF+n+1z6MBS9rTnFsxxR4Rw5wmdEnkQINli81ESGxcsfaEet8duzq8LVuuCupmhUsgpRo0Nv9pZkufg==} engines: {node: '>=20'} @@ -2650,6 +2670,14 @@ packages: dependencies: undici-types: 7.16.0 + /@types/pg@8.15.6: + resolution: {integrity: sha512-NoaMtzhxOrubeL/7UZuNTrejB4MPAJ0RpxZqXQf2qXuVlTPuG6Y8p4u9dKRaue4yjmC7ZhzVO2/Yyyn25znrPQ==} + dependencies: + '@types/node': 24.10.1 + pg-protocol: 1.7.0 + pg-types: 2.2.0 + dev: true + /@types/qs@6.14.0: resolution: {integrity: sha512-eOunJqu0K1923aExK6y8p6fsihYEn/BYuQ4g0CxAAgFc4b/ZLN4CrsRZ55srTdqoiLzU2B2evC+apEIxprEzkQ==} dev: true @@ -2953,6 +2981,22 @@ packages: resolution: {integrity: sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==} dev: false + /asn1@0.2.6: + resolution: {integrity: sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==} + dependencies: + safer-buffer: 2.1.2 + dev: true + + /async@2.6.4: + resolution: {integrity: sha512-mzo5dfJYwAn29PeiJ0zvwTo04zj8HDJj0Mn8TD7sno7q12prdbnasKJHhkm2c1LgrhlJ0teaea8860oxi51mGA==} + dependencies: + lodash: 4.17.21 + dev: true + + /async@3.2.3: + resolution: {integrity: sha512-spZRyzKL5l5BZQrr/6m/SqFdBN0q3OCI0f9rjfBzCMBIP4p75P620rR3gTmaksNOhmzgdxcaxdNfMy6anrbM0g==} + dev: true + /async@3.2.6: resolution: {integrity: sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==} dev: false @@ -3083,6 +3127,12 @@ packages: safe-buffer: 5.1.2 dev: false + /bcrypt-pbkdf@1.0.2: + resolution: {integrity: sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==} + dependencies: + tweetnacl: 0.14.5 + dev: true + /binary-extensions@2.3.0: resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==} engines: {node: '>=8'} @@ -3096,6 +3146,10 @@ packages: dev: false optional: true + /bluebird@3.7.2: + resolution: {integrity: sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==} + dev: true + /body-parser@1.20.4: resolution: {integrity: sha512-ZTgYYLMOXY9qKU/57FAo8F+HA2dGX7bqGc71txDRC1rS4frdFI5R7NhluHxH6M0YItAP0sHB4uqAOcYKxO6uGA==} engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16} @@ -3250,6 +3304,14 @@ packages: resolution: {integrity: sha512-9z8TZaGM1pfswYeXrUpzPrkx8UnWYdhJclsiYMm6x/w5+nN+8Tf/LnAgfLGQCm59qAOxU8WwHEq2vNwF6i4j+Q==} dev: true + /cliui@6.0.0: + resolution: {integrity: sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==} + dependencies: + string-width: 4.2.3 + strip-ansi: 6.0.1 + wrap-ansi: 6.2.0 + dev: true + /cliui@8.0.1: resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==} engines: {node: '>=12'} @@ -3279,6 +3341,11 @@ packages: resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} dev: true + /colors@1.0.3: + resolution: {integrity: sha512-pFGrxThWcWQ2MsAz6RtgeWe4NK2kUE1WfsrvvlctdII745EW9I0yflqhe7++M5LEc7bV2c/9/5zc8sFcpL0Drw==} + engines: {node: '>=0.1.90'} + dev: true + /comment-parser@1.4.1: resolution: {integrity: sha512-buhp5kePrmda3vhc5B9t7pUQXAb2Tnd0qgpkIhPhkHXxJpiPJ11H0ZEU0oBpJ2QztSbzG/ZxMj/CHsYJqRHmyg==} engines: {node: '>= 12.0.0'} @@ -3327,6 +3394,15 @@ packages: vary: 1.1.2 dev: false + /cpu-features@0.0.2: + resolution: {integrity: sha512-/2yieBqvMcRj8McNzkycjW2v3OIUOibBfd2dLEJ0nWts8NobAxwiyw9phVNS6oDL8x8tz9F7uNVFEVpJncQpeA==} + engines: {node: '>=8.0.0'} + requiresBuild: true + dependencies: + nan: 2.24.0 + dev: true + optional: true + /create-jest@29.7.0(@types/node@24.10.1)(ts-node@10.9.2): resolution: {integrity: sha512-Adz2bdH0Vq3F53KEMJOoftQFutWCukm6J24wbPWRO4k1kMY7gS7ds/uoJkNuV8wDCtWWnuwGcJwpWcih+zEW1Q==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} @@ -3359,6 +3435,56 @@ packages: which: 2.0.2 dev: true + /cycle@1.0.3: + resolution: {integrity: sha512-TVF6svNzeQCOpjCqsy0/CSy8VgObG3wXusJ73xW2GbG5rGx7lC8zxDSURicsXI2UsGdi2L0QNRCi745/wUDvsA==} + engines: {node: '>=0.4.0'} + dev: true + + /db-migrate-base@2.3.1: + resolution: {integrity: sha512-HewYQ3HPmy7NOWmhhMLg9TzN1StEtSqGL3w8IbBRCxEsJ+oM3bDUQ/z5fqpYKfIUK07mMXieCmZYwFpwWkSHDw==} + dependencies: + bluebird: 3.7.2 + dev: true + + /db-migrate-pg@1.5.2: + resolution: {integrity: sha512-agbT9biJi43E7wld9JgnpMKadYgIobMlRXdtRO8JLRWHI1Jc7mObl9pM7iv4AQ4UTLDgjtkqUqtXlfeWtRuRbA==} + dependencies: + bluebird: 3.7.2 + db-migrate-base: 2.3.1 + pg: 8.13.1 + semver: 7.7.3 + transitivePeerDependencies: + - pg-native + dev: true + + /db-migrate-shared@1.2.0: + resolution: {integrity: sha512-65k86bVeHaMxb2L0Gw3y5V+CgZSRwhVQMwDMydmw5MvIpHHwD6SmBciqIwHsZfzJ9yzV/yYhdRefRM6FV5/siw==} + dev: true + + /db-migrate@0.11.14: + resolution: {integrity: sha512-8e+/YsIlM3d69hj+cb6qG6WyubR8nwXfDf9gGLWl4AxHpI6mTomcqhRLNfPrs7Le7AZv2eEsgK8hkXDSQqfIvg==} + engines: {node: '>=8.0.0'} + hasBin: true + dependencies: + balanced-match: 1.0.2 + bluebird: 3.7.2 + db-migrate-shared: 1.2.0 + deep-extend: 0.6.0 + dotenv: 5.0.1 + final-fs: 1.6.1 + inflection: 1.13.4 + mkdirp: 0.5.6 + parse-database-url: 0.3.0 + prompt: 1.3.0 + rc: 1.2.8 + resolve: 1.22.11 + semver: 5.7.2 + tunnel-ssh: 4.1.6 + yargs: 15.4.1 + transitivePeerDependencies: + - supports-color + dev: true + /debug@2.6.9: resolution: {integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==} peerDependencies: @@ -3368,7 +3494,6 @@ packages: optional: true dependencies: ms: 2.0.0 - dev: false /debug@4.4.3: resolution: {integrity: sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==} @@ -3382,6 +3507,11 @@ packages: ms: 2.1.3 dev: true + /decamelize@1.2.0: + resolution: {integrity: sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==} + engines: {node: '>=0.10.0'} + dev: true + /dedent@1.7.0: resolution: {integrity: sha512-HGFtf8yhuhGhqO07SV79tRp+br4MnbdjeVxotpn1QBl30pcLLCQjX5b2295ll0fv8RKDKsmWYrl05usHM9CewQ==} peerDependencies: @@ -3391,6 +3521,11 @@ packages: optional: true dev: true + /deep-extend@0.6.0: + resolution: {integrity: sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==} + engines: {node: '>=4.0.0'} + dev: true + /deep-is@0.1.4: resolution: {integrity: sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==} dev: true @@ -3430,6 +3565,11 @@ packages: engines: {node: '>=12'} dev: false + /dotenv@5.0.1: + resolution: {integrity: sha512-4As8uPrjfwb7VXC+WnLCbXK7y+Ueb2B3zgNCePYfhxS1PYeaO1YTeplffTEcbfLhvFNGLAz90VvJs9yomG7bow==} + engines: {node: '>=4.6.0'} + dev: true + /dunder-proto@1.0.1: resolution: {integrity: sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==} engines: {node: '>= 0.4'} @@ -3799,6 +3939,11 @@ packages: - supports-color dev: false + /eyes@0.1.8: + resolution: {integrity: sha512-GipyPsXO1anza0AOZdy69Im7hGFCNB7Y/NGjDlZGJ3GJJLtwNSb2vrzYrTYJRrRloVx7pl+bhUaTB8yiccPvFQ==} + engines: {node: '> 0.1.90'} + dev: true + /fast-deep-equal@3.1.3: resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} dev: true @@ -3860,6 +4005,13 @@ packages: to-regex-range: 5.0.1 dev: true + /final-fs@1.6.1: + resolution: {integrity: sha512-r5dgz23H8qh1LxKVJK84zet2PhWSWkIOgbLVUd5PlNFAULD/kCDBH9JEMwJt9dpdTnLsSD4rEqS56p2MH7Wbvw==} + dependencies: + node-fs: 0.1.7 + when: 2.0.1 + dev: true + /finalhandler@1.3.2: resolution: {integrity: sha512-aA4RyPcd3badbdABGDuTXCMTtOneUCAYH/gxoYRTZlIJdF0YPWuGqiAsIrhNnnqdXGswYk6dGujem4w80UJFhg==} engines: {node: '>= 0.8'} @@ -4156,6 +4308,11 @@ packages: engines: {node: '>=0.8.19'} dev: true + /inflection@1.13.4: + resolution: {integrity: sha512-6I/HUDeYFfuNCVS3td055BaXBwKYuzw7K3ExVMStBowKo9oOAMJIXIHvdyR3iboTCp1b+1i5DSkIZTcwIktuDw==} + engines: {'0': node >= 0.4.0} + dev: true + /inflight@1.0.6: resolution: {integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==} deprecated: This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful. @@ -4167,6 +4324,10 @@ packages: /inherits@2.0.4: resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} + /ini@1.3.8: + resolution: {integrity: sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==} + dev: true + /ipaddr.js@1.9.1: resolution: {integrity: sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==} engines: {node: '>= 0.10'} @@ -4230,6 +4391,10 @@ packages: resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} dev: true + /isstream@0.1.2: + resolution: {integrity: sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g==} + dev: true + /istanbul-lib-coverage@3.2.2: resolution: {integrity: sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==} engines: {node: '>=8'} @@ -4851,6 +5016,10 @@ packages: resolution: {integrity: sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==} dev: true + /lodash.defaults@4.2.0: + resolution: {integrity: sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ==} + dev: true + /lodash.includes@4.3.0: resolution: {integrity: sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==} dev: false @@ -4887,6 +5056,10 @@ packages: resolution: {integrity: sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==} dev: false + /lodash@4.17.21: + resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==} + dev: true + /lru-cache@5.1.1: resolution: {integrity: sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==} dependencies: @@ -4988,12 +5161,24 @@ packages: resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==} dev: true + /mkdirp@0.5.6: + resolution: {integrity: sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==} + hasBin: true + dependencies: + minimist: 1.2.8 + dev: true + /mkdirp@1.0.4: resolution: {integrity: sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==} engines: {node: '>=10'} hasBin: true dev: true + /mongodb-uri@0.9.7: + resolution: {integrity: sha512-s6BdnqNoEYfViPJgkH85X5Nw5NpzxN8hoflKLweNa7vBxt2V7kaS06d74pAtqDxde8fn4r9h4dNdLiFGoNV0KA==} + engines: {node: '>= 0.6.0'} + dev: true + /morgan@1.10.1: resolution: {integrity: sha512-223dMRJtI/l25dJKWpgij2cMtywuG/WiUKXdvwfbhGKBhy1puASqXwFzmWZ7+K73vUPoR7SS2Qz2cI/g9MKw0A==} engines: {node: '>= 0.8.0'} @@ -5009,15 +5194,17 @@ packages: /ms@2.0.0: resolution: {integrity: sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==} - dev: false /ms@2.1.3: resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + /mute-stream@0.0.8: + resolution: {integrity: sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==} + dev: true + /nan@2.24.0: resolution: {integrity: sha512-Vpf9qnVW1RaDkoNKFUvfxqAbtI8ncb8OJlqZ9wwpXzWPEsvsB1nvdUi6oYrHIkQ1Y/tMDnr1h4nczS0VB9Xykg==} requiresBuild: true - dev: false optional: true /natural-compare@1.4.0: @@ -5045,6 +5232,12 @@ packages: whatwg-url: 5.0.0 dev: true + /node-fs@0.1.7: + resolution: {integrity: sha512-XqDBlmUKgDGe76+lZ/0sRBF3XW2vVcK07+ZPvdpUTK8jrvtPahUd0aBqJ9+ZjB01ANjZLuvK3O/eoMVmz62rpA==} + engines: {node: '>=0.1.97'} + os: [linux, darwin, freebsd, win32, smartos, sunos] + dev: true + /node-int64@0.4.0: resolution: {integrity: sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==} dev: true @@ -5163,6 +5356,13 @@ packages: callsites: 3.1.0 dev: true + /parse-database-url@0.3.0: + resolution: {integrity: sha512-YRxDoVBAUk3ksGF9pud+aqWwXmThZzhX9Z1PPxKU03BB3/gu2RcgyMA4rktMYhkIJ9KxwW7lIj00U+TSNz80wg==} + engines: {node: '>= 0.6'} + dependencies: + mongodb-uri: 0.9.7 + dev: true + /parse-json@5.2.0: resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==} engines: {node: '>=8'} @@ -5201,6 +5401,60 @@ packages: resolution: {integrity: sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==} dev: false + /pg-cloudflare@1.2.7: + resolution: {integrity: sha512-YgCtzMH0ptvZJslLM1ffsY4EuGaU0cx4XSdXLRFae8bPP4dS5xL1tNB3k2o/N64cHJpwU7dxKli/nZ2lUa5fLg==} + requiresBuild: true + optional: true + + /pg-connection-string@2.9.1: + resolution: {integrity: sha512-nkc6NpDcvPVpZXxrreI/FOtX3XemeLl8E0qFr6F2Lrm/I8WOnaWNhIPK2Z7OHpw7gh5XJThi6j6ppgNoaT1w4w==} + + /pg-int8@1.0.1: + resolution: {integrity: sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==} + engines: {node: '>=4.0.0'} + + /pg-pool@3.10.1(pg@8.13.1): + resolution: {integrity: sha512-Tu8jMlcX+9d8+QVzKIvM/uJtp07PKr82IUOYEphaWcoBhIYkoHpLXN3qO59nAI11ripznDsEzEv8nUxBVWajGg==} + peerDependencies: + pg: '>=8.0' + dependencies: + pg: 8.13.1 + + /pg-protocol@1.7.0: + resolution: {integrity: sha512-hTK/mE36i8fDDhgDFjy6xNOG+LCorxLG3WO17tku+ij6sVHXh1jQUJ8hYAnRhNla4QVD2H8er/FOjc/+EgC6yQ==} + + /pg-types@2.2.0: + resolution: {integrity: sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==} + engines: {node: '>=4'} + dependencies: + pg-int8: 1.0.1 + postgres-array: 2.0.0 + postgres-bytea: 1.0.0 + postgres-date: 1.0.7 + postgres-interval: 1.2.0 + + /pg@8.13.1: + resolution: {integrity: sha512-OUir1A0rPNZlX//c7ksiu7crsGZTKSOXJPgtNiHGIlC9H0lO+NC6ZDYksSgBYY/thSWhnSRBv8w1lieNNGATNQ==} + engines: {node: '>= 8.0.0'} + peerDependencies: + pg-native: '>=3.0.1' + peerDependenciesMeta: + pg-native: + optional: true + dependencies: + pg-connection-string: 2.9.1 + pg-pool: 3.10.1(pg@8.13.1) + pg-protocol: 1.7.0 + pg-types: 2.2.0 + pgpass: 1.0.5 + optionalDependencies: + pg-cloudflare: 1.2.7 + + /pgpass@1.0.5: + resolution: {integrity: sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==} + dependencies: + split2: 4.2.0 + /picocolors@1.1.1: resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==} @@ -5226,6 +5480,24 @@ packages: find-up: 4.1.0 dev: true + /postgres-array@2.0.0: + resolution: {integrity: sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==} + engines: {node: '>=4'} + + /postgres-bytea@1.0.0: + resolution: {integrity: sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==} + engines: {node: '>=0.10.0'} + + /postgres-date@1.0.7: + resolution: {integrity: sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==} + engines: {node: '>=0.10.0'} + + /postgres-interval@1.2.0: + resolution: {integrity: sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==} + engines: {node: '>=0.10.0'} + dependencies: + xtend: 4.0.2 + /prelude-ls@1.2.1: resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==} engines: {node: '>= 0.8.0'} @@ -5265,6 +5537,17 @@ packages: engines: {node: '>=0.4.0'} dev: true + /prompt@1.3.0: + resolution: {integrity: sha512-ZkaRWtaLBZl7KKAKndKYUL8WqNT+cQHKRZnT4RYYms48jQkFw3rrBL+/N5K/KtdEveHkxs982MX2BkDKub2ZMg==} + engines: {node: '>= 6.0.0'} + dependencies: + '@colors/colors': 1.5.0 + async: 3.2.3 + read: 1.0.7 + revalidator: 0.1.8 + winston: 2.4.7 + dev: true + /prompts@2.4.2: resolution: {integrity: sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==} engines: {node: '>= 6'} @@ -5316,10 +5599,27 @@ packages: unpipe: 1.0.0 dev: false + /rc@1.2.8: + resolution: {integrity: sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==} + hasBin: true + dependencies: + deep-extend: 0.6.0 + ini: 1.3.8 + minimist: 1.2.8 + strip-json-comments: 2.0.1 + dev: true + /react-is@18.3.1: resolution: {integrity: sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==} dev: true + /read@1.0.7: + resolution: {integrity: sha512-rSOKNYUmaxy0om1BNjMN4ezNT6VKK+2xF4GBhc81mkH7L60i6dp8qPYrkndNLT3QPphoII3maL9PVC9XmhHwVQ==} + engines: {node: '>=0.8'} + dependencies: + mute-stream: 0.0.8 + dev: true + /readdirp@3.6.0: resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} engines: {node: '>=8.10.0'} @@ -5381,6 +5681,10 @@ packages: engines: {node: '>=0.10.0'} dev: true + /require-main-filename@2.0.0: + resolution: {integrity: sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==} + dev: true + /resolve-cwd@3.0.0: resolution: {integrity: sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==} engines: {node: '>=8'} @@ -5417,6 +5721,11 @@ packages: supports-preserve-symlinks-flag: 1.0.0 dev: true + /revalidator@0.1.8: + resolution: {integrity: sha512-xcBILK2pA9oh4SiinPEZfhP8HfrB/ha+a2fTMyl7Om2WjlDVrOQy99N2MXXlUHqGJz4qEu2duXxHJjDWuK/0xg==} + engines: {node: '>= 0.4.0'} + dev: true + /rimraf@2.7.1: resolution: {integrity: sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==} deprecated: Rimraf versions prior to v4 are no longer supported @@ -5467,7 +5776,6 @@ packages: /safer-buffer@2.1.2: resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} - dev: false /scslre@0.3.0: resolution: {integrity: sha512-3A6sD0WYP7+QrjbfNA2FN3FsOaGGFoekCVgTyypy53gPxhbkCIjtO6YWgdrfM+n/8sI8JeXZOIxsHjMTNxQ4nQ==} @@ -5478,6 +5786,11 @@ packages: regexp-ast-analysis: 0.7.1 dev: true + /semver@5.7.2: + resolution: {integrity: sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==} + hasBin: true + dev: true + /semver@6.3.1: resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==} hasBin: true @@ -5542,6 +5855,10 @@ packages: - supports-color dev: false + /set-blocking@2.0.0: + resolution: {integrity: sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==} + dev: true + /setprototypeof@1.2.0: resolution: {integrity: sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==} dev: false @@ -5635,10 +5952,30 @@ packages: engines: {node: '>=0.10.0'} dev: true + /split2@4.2.0: + resolution: {integrity: sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==} + engines: {node: '>= 10.x'} + /sprintf-js@1.0.3: resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==} dev: true + /ssh2@1.4.0: + resolution: {integrity: sha512-XvXwcXKvS452DyQvCa6Ct+chpucwc/UyxgliYz+rWXJ3jDHdtBb9xgmxJdMmnIn5bpgGAEV3KaEsH98ZGPHqwg==} + engines: {node: '>=10.16.0'} + requiresBuild: true + dependencies: + asn1: 0.2.6 + bcrypt-pbkdf: 1.0.2 + optionalDependencies: + cpu-features: 0.0.2 + nan: 2.24.0 + dev: true + + /stack-trace@0.0.10: + resolution: {integrity: sha512-KGzahc7puUKkzyMt+IqAep+TVNbKP+k2Lmwhub39m1AsTSkaDutx56aDCo+HLDzf/D26BIHTJWNiTG1KAJiQCg==} + dev: true + /stack-utils@2.0.6: resolution: {integrity: sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==} engines: {node: '>=10'} @@ -5905,6 +6242,20 @@ packages: fsevents: 2.3.3 dev: true + /tunnel-ssh@4.1.6: + resolution: {integrity: sha512-y7+x+T3F3rkx2Zov5Tk9DGfeEBVAdWU3A/91E0Dk5rrZ/VFIlpV2uhhRuaISJUdyG0N+Lcp1fXZMXz+ovPt5vA==} + dependencies: + debug: 2.6.9 + lodash.defaults: 4.2.0 + ssh2: 1.4.0 + transitivePeerDependencies: + - supports-color + dev: true + + /tweetnacl@0.14.5: + resolution: {integrity: sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==} + dev: true + /type-check@0.4.0: resolution: {integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==} engines: {node: '>= 0.8.0'} @@ -6074,6 +6425,14 @@ packages: webidl-conversions: 3.0.1 dev: true + /when@2.0.1: + resolution: {integrity: sha512-h0l57vFJ4YQe1/U+C+oqBfAoopxXABUm6VqWM0x2gg4pARru4IUWo/PAxyawWgbGtndXrZbA41EzsfxacZVEXQ==} + dev: true + + /which-module@2.0.1: + resolution: {integrity: sha512-iBdZ57RDvnOR9AGBhML2vFZf7h8vmBjhoaZqODJBFWHVtKkDmKuHai3cx5PgVMrX5YDNp27AofYbAwctSS+vhQ==} + dev: true + /which@2.0.2: resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} engines: {node: '>= 8'} @@ -6082,6 +6441,18 @@ packages: isexe: 2.0.0 dev: true + /winston@2.4.7: + resolution: {integrity: sha512-vLB4BqzCKDnnZH9PHGoS2ycawueX4HLqENXQitvFHczhgW2vFpSOn31LZtVr1KU8YTw7DS4tM+cqyovxo8taVg==} + engines: {node: '>= 0.10.0'} + dependencies: + async: 2.6.4 + colors: 1.0.3 + cycle: 1.0.3 + eyes: 0.1.8 + isstream: 0.1.2 + stack-trace: 0.0.10 + dev: true + /word-wrap@1.2.5: resolution: {integrity: sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==} engines: {node: '>=0.10.0'} @@ -6091,6 +6462,15 @@ packages: resolution: {integrity: sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==} dev: true + /wrap-ansi@6.2.0: + resolution: {integrity: sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==} + engines: {node: '>=8'} + dependencies: + ansi-styles: 4.3.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + dev: true + /wrap-ansi@7.0.0: resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} engines: {node: '>=10'} @@ -6115,6 +6495,9 @@ packages: /xtend@4.0.2: resolution: {integrity: sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==} engines: {node: '>=0.4'} + + /y18n@4.0.3: + resolution: {integrity: sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==} dev: true /y18n@5.0.8: @@ -6126,11 +6509,36 @@ packages: resolution: {integrity: sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==} dev: true + /yargs-parser@18.1.3: + resolution: {integrity: sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==} + engines: {node: '>=6'} + dependencies: + camelcase: 5.3.1 + decamelize: 1.2.0 + dev: true + /yargs-parser@21.1.1: resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==} engines: {node: '>=12'} dev: true + /yargs@15.4.1: + resolution: {integrity: sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==} + engines: {node: '>=8'} + dependencies: + cliui: 6.0.0 + decamelize: 1.2.0 + find-up: 4.1.0 + get-caller-file: 2.0.5 + require-directory: 2.1.1 + require-main-filename: 2.0.0 + set-blocking: 2.0.0 + string-width: 4.2.3 + which-module: 2.0.1 + y18n: 4.0.3 + yargs-parser: 18.1.3 + dev: true + /yargs@17.7.2: resolution: {integrity: sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==} engines: {node: '>=12'} diff --git a/tsconfig.json b/tsconfig.json index 3daffc9..458b95a 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -2,6 +2,7 @@ "compilerOptions": { "strict": true, "esModuleInterop": true, + "isolatedModules": true, "sourceMap": true, "module": "NodeNext", "target": "ESNext",