Skip to content

Commit

Permalink
Typed representation of serialized SQLite JSON/JSONB
Browse files Browse the repository at this point in the history
Ref #659

Better enforces the difference between a column that is `NULL` or a
column that contains JSON containing `null`, and removes the need for
unsafe manual casting.
  • Loading branch information
goto-bus-stop committed Nov 26, 2024
1 parent f65fdd6 commit c9b06be
Show file tree
Hide file tree
Showing 10 changed files with 197 additions and 86 deletions.
10 changes: 8 additions & 2 deletions src/controllers/search.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import lodash from 'lodash';
import { SourceNotFoundError } from '../errors/index.js';
import toListResponse from '../utils/toListResponse.js';
import { json, jsonb } from '../utils/sqlite.js';

const { isEqual } = lodash;

Expand Down Expand Up @@ -47,7 +48,7 @@ function updateSourceData(uw, updates) {
for (const [id, sourceData] of updates.entries()) {
await tx.updateTable('media')
.where('id', '=', id)
.set({ sourceData })
.set({ sourceData: sourceData == null ? null : jsonb(sourceData) })
.executeTakeFirst();
}
});
Expand Down Expand Up @@ -91,7 +92,12 @@ async function search(req) {
const mediasNeedSourceDataUpdate = new Map();

const mediasInSearchResults = await db.selectFrom('media')
.select(['id', 'sourceType', 'sourceID', 'sourceData'])
.select([
'id',
'sourceType',
'sourceID',
(eb) => json(eb.fn.coalesce(eb.ref('sourceData'), jsonb(null))).as('sourceData'),
])
.where('sourceType', '=', sourceName)
.where('sourceID', 'in', Array.from(searchResultsByID.keys()))
.execute();
Expand Down
14 changes: 9 additions & 5 deletions src/plugins/acl.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,12 @@
import { sql } from 'kysely';
import defaultRoles from '../config/defaultRoles.js';
import routes from '../routes/acl.js';
import { isForeignKeyError, jsonb, jsonEach } from '../utils/sqlite.js';
import {
isForeignKeyError,
fromJson,
json,
jsonb,
jsonEach,
} from '../utils/sqlite.js';
import { RoleNotFoundError } from '../errors/index.js';

/**
Expand Down Expand Up @@ -75,14 +80,13 @@ class Acl {
* @returns {Promise<Record<string, Permission[]>>}
*/
async getAllRoles(tx = this.#uw.db) {
// TODO: `json()` should be strongly typed
const list = await tx.selectFrom('roles')
.select(['id', sql`json(permissions)`.as('permissions')])
.select(['id', (eb) => json(eb.ref('permissions')).as('permissions')])
.execute();

const roles = Object.fromEntries(list.map((role) => [
role.id,
/** @type {Permission[]} */ (JSON.parse(/** @type {string} */ (role.permissions))),
fromJson(role.permissions),
]));

return roles;
Expand Down
33 changes: 20 additions & 13 deletions src/plugins/booth.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import RedLock from 'redlock';
import { EmptyPlaylistError, PlaylistItemNotFoundError } from '../errors/index.js';
import routes from '../routes/booth.js';
import { randomUUID } from 'node:crypto';
import { jsonb } from '../utils/sqlite.js';
import { fromJson, jsonb, jsonGroupArray } from '../utils/sqlite.js';

/**
* @typedef {import('../schema.js').UserID} UserID
Expand Down Expand Up @@ -136,17 +136,17 @@ class Booth {
(eb) => eb.selectFrom('feedback')
.where('historyEntryID', '=', eb.ref('historyEntries.id'))
.where('vote', '=', 1)
.select((eb) => eb.fn.agg('json_group_array', ['userID']).as('userIDs'))
.select((eb) => jsonGroupArray(eb.ref('userID')).as('userIDs'))
.as('upvotes'),
(eb) => eb.selectFrom('feedback')
.where('historyEntryID', '=', eb.ref('historyEntries.id'))
.where('vote', '=', -1)
.select((eb) => eb.fn.agg('json_group_array', ['userID']).as('userIDs'))
.select((eb) => jsonGroupArray(eb.ref('userID')).as('userIDs'))
.as('downvotes'),
(eb) => eb.selectFrom('feedback')
.where('historyEntryID', '=', eb.ref('historyEntries.id'))
.where('favorite', '=', 1)
.select((eb) => eb.fn.agg('json_group_array', ['userID']).as('userIDs'))
.select((eb) => jsonGroupArray(eb.ref('userID')).as('userIDs'))
.as('favorites'),
])
.where('historyEntries.id', '=', historyID)
Expand Down Expand Up @@ -179,9 +179,9 @@ class Booth {
end: entry.end,
createdAt: entry.createdAt,
},
upvotes: /** @type {UserID[]} */ (JSON.parse(entry.upvotes)),
downvotes: /** @type {UserID[]} */ (JSON.parse(entry.downvotes)),
favorites: /** @type {UserID[]} */ (JSON.parse(entry.favorites)),
upvotes: entry.upvotes != null ? fromJson(entry.upvotes) : [],
downvotes: entry.downvotes != null ? fromJson(entry.downvotes) : [],
favorites: entry.favorites != null ? fromJson(entry.favorites) : [],
} : null;
}

Expand Down Expand Up @@ -236,6 +236,7 @@ class Booth {
end: playlistItem.end,
/** @type {null | JsonObject} */
sourceData: null,
createdAt: new Date(),
},
};
}
Expand Down Expand Up @@ -435,11 +436,12 @@ class Booth {
title: next.playlistItem.title,
}, 'next track');
const sourceData = await this.#getSourceDataForPlayback(next);
if (sourceData) {
next.historyEntry.sourceData = sourceData;
}
const historyEntry = await tx.insertInto('historyEntries')
.returningAll()

// Conservatively, we should take *all* the data from the inserted values.
// But then we need to reparse the source data... It's easier to only take
// the actually generated value from there :')
const { createdAt } = await tx.insertInto('historyEntries')
.returning('createdAt')
.values({
id: next.historyEntry.id,
userID: next.user.id,
Expand All @@ -452,8 +454,13 @@ class Booth {
})
.executeTakeFirstOrThrow();

if (sourceData != null) {
next.historyEntry.sourceData = sourceData;
}
next.historyEntry.createdAt = createdAt;

result = {
historyEntry,
historyEntry: next.historyEntry,
playlist: next.playlist,
user: next.user,
media: next.media,
Expand Down
12 changes: 6 additions & 6 deletions src/plugins/configStore.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import jsonMergePatch from 'json-merge-patch';
import sjson from 'secure-json-parse';
import ValidationError from '../errors/ValidationError.js';
import { sql } from 'kysely';
import { jsonb } from '../utils/sqlite.js';
import { fromJson, json, jsonb } from '../utils/sqlite.js';

/**
* @typedef {import('type-fest').JsonObject} JsonObject
Expand Down Expand Up @@ -114,7 +114,7 @@ class ConfigStore {

const previous = await db.transaction().execute(async (tx) => {
const row = await tx.selectFrom('configuration')
.select(sql`json(value)`.as('value'))
.select((eb) => json(eb.ref('value')).as('value'))
.where('name', '=', name)
.executeTakeFirst();

Expand All @@ -123,7 +123,7 @@ class ConfigStore {
.onConflict((oc) => oc.column('name').doUpdateSet({ value: jsonb(value) }))
.execute();

return row?.value != null ? JSON.parse(/** @type {string} */ (row.value)) : null;
return row?.value != null ? fromJson(row.value) : null;
});

return previous;
Expand All @@ -137,14 +137,14 @@ class ConfigStore {
const { db } = this.#uw;

const row = await db.selectFrom('configuration')
.select(sql`json(value)`.as('value'))
.select((eb) => json(eb.ref('value')).as('value'))
.where('name', '=', key)
.executeTakeFirst();
if (!row) {
if (row == null || row.value == null) {
return null;
}

return JSON.parse(/** @type {string} */ (row.value));
return fromJson(row.value);
}

/**
Expand Down
27 changes: 14 additions & 13 deletions src/plugins/history.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import lodash from 'lodash';
import Page from '../Page.js';
import { fromJson, json, jsonb } from '../utils/sqlite.js';

const { clamp } = lodash;

Expand All @@ -14,7 +15,8 @@ const historyEntrySelection = /** @type {const} */ ([
'historyEntries.title',
'historyEntries.start',
'historyEntries.end',
'historyEntries.sourceData',
/** @param {import('kysely').ExpressionBuilder<Database, 'historyEntries'>} eb */
(eb) => json(eb.fn.coalesce(eb.ref('historyEntries.sourceData'), jsonb(null))).as('sourceData'),
'historyEntries.createdAt as playedAt',
'users.id as user.id',
'users.username as user.username',
Expand All @@ -27,7 +29,8 @@ const historyEntrySelection = /** @type {const} */ ([
'media.duration as media.duration',
'media.sourceType as media.sourceType',
'media.sourceID as media.sourceID',
'media.sourceData as media.sourceData',
/** @param {import('kysely').ExpressionBuilder<Database, 'media'>} eb */
(eb) => json(eb.fn.coalesce(eb.ref('media.sourceData'), jsonb(null))).as('media.sourceData'),
/** @param {import('kysely').ExpressionBuilder<Database, 'historyEntries'>} eb */
(eb) => eb.selectFrom('feedback')
.where('historyEntryID', '=', eb.ref('historyEntries.id'))
Expand Down Expand Up @@ -55,7 +58,7 @@ const historyEntrySelection = /** @type {const} */ ([
* title: string,
* start: number,
* end: number,
* sourceData: import('type-fest').JsonObject | null,
* sourceData: import('../utils/sqlite.js').SerializedJSON<import('type-fest').JsonObject | null>,
* playedAt: Date,
* 'user.id': UserID,
* 'user.username': string,
Expand All @@ -64,14 +67,15 @@ const historyEntrySelection = /** @type {const} */ ([
* 'media.id': MediaID,
* 'media.sourceType': string,
* 'media.sourceID': string,
* 'media.sourceData': import('type-fest').JsonObject | null,
* 'media.sourceData': import('../utils/sqlite.js').SerializedJSON<
* import('type-fest').JsonObject | null>,
* 'media.artist': string,
* 'media.title': string,
* 'media.thumbnail': string,
* 'media.duration': number,
* upvotes: string,
* downvotes: string,
* favorites: string,
* upvotes: import('../utils/sqlite.js').SerializedJSON<UserID>,
* downvotes: import('../utils/sqlite.js').SerializedJSON<UserID>,
* favorites: import('../utils/sqlite.js').SerializedJSON<UserID>,
* }} row
*/
function historyEntryFromRow(row) {
Expand Down Expand Up @@ -101,12 +105,9 @@ function historyEntryFromRow(row) {
duration: row['media.duration'],
},
},
/** @type {UserID[]} */
upvotes: JSON.parse(row.upvotes),
/** @type {UserID[]} */
downvotes: JSON.parse(row.downvotes),
/** @type {UserID[]} */
favorites: JSON.parse(row.favorites),
upvotes: fromJson(row.upvotes),
downvotes: fromJson(row.downvotes),
favorites: fromJson(row.favorites),
};
}

Expand Down
Loading

0 comments on commit c9b06be

Please sign in to comment.