-
-
Notifications
You must be signed in to change notification settings - Fork 37
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #482 from jsdelivr/new-adopted-table
feat: change adopted probes sql table
- Loading branch information
Showing
24 changed files
with
784 additions
and
144 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,137 @@ | ||
import { createHash } from 'node:crypto'; | ||
import type { Knex } from 'knex'; | ||
import TTLCache from '@isaacs/ttlcache'; | ||
import { scopedLogger } from '../logger.js'; | ||
import { client } from '../sql/client.js'; | ||
|
||
export const GP_TOKENS_TABLE = 'gp_tokens'; | ||
export const USERS_TABLE = 'directus_users'; | ||
|
||
const logger = scopedLogger('auth'); | ||
|
||
const TOKEN_TTL = 2 * 60 * 1000; | ||
|
||
export type Token = { | ||
user_created: string, | ||
value: string, | ||
expire: Date | null, | ||
origins: string[], | ||
date_last_used: Date | null | ||
} | ||
|
||
type Row = Omit<Token, 'origins'> & { | ||
origins: string | null, | ||
} | ||
|
||
export class Auth { | ||
private validTokens = new TTLCache<string, Token>({ ttl: TOKEN_TTL }); | ||
private invalidTokens = new TTLCache<string, true>({ ttl: TOKEN_TTL }); | ||
constructor (private readonly sql: Knex) {} | ||
|
||
scheduleSync () { | ||
setTimeout(() => { | ||
this.syncTokens() | ||
.finally(() => this.scheduleSync()) | ||
.catch(error => logger.error(error)); | ||
}, 60_000); | ||
} | ||
|
||
async syncTokens () { | ||
const tokens = await this.fetchTokens(); | ||
const newValidTokens = new TTLCache<string, Token>({ ttl: TOKEN_TTL }); | ||
const newInvalidTokens = new TTLCache<string, true>({ ttl: TOKEN_TTL }); | ||
|
||
tokens.forEach((token) => { | ||
if (token.expire && this.isExpired(token.expire)) { | ||
newInvalidTokens.set(token.value, true); | ||
} else { | ||
newValidTokens.set(token.value, token); | ||
} | ||
}); | ||
|
||
this.validTokens = newValidTokens; | ||
this.invalidTokens = newInvalidTokens; | ||
} | ||
|
||
async syncSpecificToken (hash: string) { | ||
const tokens = await this.fetchTokens({ value: hash }); | ||
|
||
if (tokens.length === 0) { | ||
this.invalidTokens.set(hash, true); | ||
return undefined; | ||
} | ||
|
||
const token = tokens[0]!; | ||
|
||
if (token.expire && this.isExpired(token.expire)) { | ||
this.invalidTokens.set(hash, true); | ||
return undefined; | ||
} | ||
|
||
this.validTokens.set(hash, token); | ||
return token; | ||
} | ||
|
||
async fetchTokens (filter: Partial<Row> = {}) { | ||
const rows = await this.sql(GP_TOKENS_TABLE).where(filter) | ||
.select<Row[]>([ 'user_created', 'value', 'expire', 'origins', 'date_last_used' ]); | ||
|
||
const tokens: Token[] = rows.map(row => ({ | ||
...row, | ||
origins: (row.origins ? JSON.parse(row.origins) as string[] : []), | ||
})); | ||
|
||
return tokens; | ||
} | ||
|
||
async validate (tokenString: string, origin: string) { | ||
const bytes = Buffer.from(tokenString, 'base64'); | ||
const hash = createHash('sha256').update(bytes).digest('base64'); | ||
|
||
if (this.invalidTokens.get(hash)) { | ||
return null; | ||
} | ||
|
||
let token = this.validTokens.get(hash); | ||
|
||
if (!token) { | ||
token = await this.syncSpecificToken(hash); | ||
} | ||
|
||
if (!token) { | ||
return null; | ||
} | ||
|
||
if (!this.isValidOrigin(origin, token.origins)) { | ||
return null; | ||
} | ||
|
||
await this.updateLastUsedDate(token); | ||
return token.user_created; | ||
} | ||
|
||
private async updateLastUsedDate (token: Token) { | ||
if (!token.date_last_used || !this.isToday(token.date_last_used)) { | ||
const date = new Date(); | ||
await this.sql(GP_TOKENS_TABLE).where({ value: token.value }).update({ date_last_used: date }); | ||
token.date_last_used = date; | ||
} | ||
} | ||
|
||
private isExpired (date: Date) { | ||
const currentDate = new Date(); | ||
currentDate.setHours(0, 0, 0, 0); | ||
return date < currentDate; | ||
} | ||
|
||
private isValidOrigin (origin: string, validOrigins: string[]) { | ||
return validOrigins.length > 0 ? validOrigins.includes(origin) : true; | ||
} | ||
|
||
private isToday (date: Date) { | ||
const currentDate = new Date(); | ||
return date.toDateString() === currentDate.toDateString(); | ||
} | ||
} | ||
|
||
export const auth = new Auth(client); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,30 @@ | ||
import { auth } from '../auth.js'; | ||
import type { ExtendedMiddleware } from '../../../types.js'; | ||
|
||
export const authenticate: ExtendedMiddleware = async (ctx, next) => { | ||
const { headers } = ctx.request; | ||
|
||
if (headers && headers.authorization) { | ||
const parts = headers.authorization.split(' '); | ||
|
||
if (parts.length !== 2 || parts[0] !== 'Bearer') { | ||
ctx.status = 401; | ||
return; | ||
} | ||
|
||
const token = parts[1]!; | ||
const origin = ctx.get('Origin'); | ||
const userId = await auth.validate(token, origin); | ||
|
||
if (!userId) { | ||
ctx.status = 401; | ||
return; | ||
} | ||
|
||
ctx.state.userId = userId; | ||
} | ||
|
||
return next(); | ||
}; | ||
|
||
export type AuthenticateState = { userId?: string }; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,59 @@ | ||
import config from 'config'; | ||
import type { Context } from 'koa'; | ||
import { RateLimiterRedis, RateLimiterRes } from 'rate-limiter-flexible'; | ||
import requestIp from 'request-ip'; | ||
import { createPersistentRedisClient } from './redis/persistent-client.js'; | ||
import createHttpError from 'http-errors'; | ||
import type { ExtendedContext } from '../types.js'; | ||
|
||
const redisClient = await createPersistentRedisClient({ legacyMode: true }); | ||
|
||
export const anonymousRateLimiter = new RateLimiterRedis({ | ||
storeClient: redisClient, | ||
keyPrefix: 'rate:anon', | ||
points: config.get<number>('measurement.anonymousRateLimit'), | ||
duration: config.get<number>('measurement.rateLimitReset'), | ||
}); | ||
|
||
export const authenticatedRateLimiter = new RateLimiterRedis({ | ||
storeClient: redisClient, | ||
keyPrefix: 'rate:auth', | ||
points: config.get<number>('measurement.authenticatedRateLimit'), | ||
duration: config.get<number>('measurement.rateLimitReset'), | ||
}); | ||
|
||
export const rateLimit = async (ctx: ExtendedContext, numberOfProbes: number) => { | ||
if (ctx['isAdmin']) { | ||
return; | ||
} | ||
|
||
let rateLimiter: RateLimiterRedis; | ||
let id: string; | ||
|
||
if (ctx.state.userId) { | ||
rateLimiter = authenticatedRateLimiter; | ||
id = ctx.state.userId; | ||
} else { | ||
rateLimiter = anonymousRateLimiter; | ||
id = requestIp.getClientIp(ctx.req) ?? ''; | ||
} | ||
|
||
try { | ||
const result = await rateLimiter.consume(id, numberOfProbes); | ||
setRateLimitHeaders(ctx, result, rateLimiter); | ||
} catch (error) { | ||
if (error instanceof RateLimiterRes) { | ||
const result = await rateLimiter.reward(id, numberOfProbes); | ||
setRateLimitHeaders(ctx, result, rateLimiter); | ||
throw createHttpError(429, 'Too Many Probes Requested', { type: 'too_many_probes' }); | ||
} | ||
|
||
throw createHttpError(500); | ||
} | ||
}; | ||
|
||
const setRateLimitHeaders = (ctx: Context, result: RateLimiterRes, rateLimiter: RateLimiterRedis) => { | ||
ctx.set('X-RateLimit-Reset', `${Math.round(result.msBeforeNext / 1000)}`); | ||
ctx.set('X-RateLimit-Limit', `${rateLimiter.points}`); | ||
ctx.set('X-RateLimit-Remaining', `${result.remainingPoints}`); | ||
}; |
This file was deleted.
Oops, something went wrong.
Oops, something went wrong.