-
Notifications
You must be signed in to change notification settings - Fork 26
feat: implement asynchronous audit logging with RabbitMQ and DLQ #100
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,105 @@ | ||
| import { startAuditConsumer } from "./auditConsumer"; | ||
| import { prisma } from "../config/database"; | ||
| import { | ||
| getRabbitMQChannel, | ||
| QUEUES, | ||
| assertQueueWithDLQ, | ||
| } from "../config/rabbitmq"; | ||
|
|
||
| jest.mock("../config/database", () => ({ | ||
| prisma: { | ||
| auditTrail: { | ||
| create: jest.fn(), | ||
| }, | ||
| }, | ||
| })); | ||
|
|
||
| jest.mock("../config/rabbitmq", () => ({ | ||
| getRabbitMQChannel: jest.fn(), | ||
| assertQueueWithDLQ: jest.fn().mockResolvedValue({}), | ||
| QUEUES: { | ||
| AUDIT_LOGS: "audit_logs", | ||
| }, | ||
| })); | ||
|
|
||
| jest.mock("../config/logger", () => ({ | ||
| logger: { | ||
| warn: jest.fn(), | ||
| error: jest.fn(), | ||
| info: jest.fn(), | ||
| }, | ||
| })); | ||
|
|
||
| describe("AuditConsumer", () => { | ||
| const mockChannel = { | ||
| consume: jest.fn(), | ||
| ack: jest.fn(), | ||
| nack: jest.fn(), | ||
| }; | ||
|
|
||
| const entry = { | ||
| eventType: "USER_LOGIN", | ||
| action: "LOGIN", | ||
| performedBy: "user-1", | ||
| timestamp: new Date().toISOString(), | ||
| }; | ||
|
|
||
| const mockMsg = { | ||
| content: Buffer.from(JSON.stringify(entry)), | ||
| }; | ||
|
|
||
| beforeEach(() => { | ||
| jest.clearAllMocks(); | ||
| (getRabbitMQChannel as jest.Mock).mockReturnValue(mockChannel); | ||
| }); | ||
|
|
||
| it("should consume and save audit entry to database", async () => { | ||
| (prisma.auditTrail.create as jest.Mock).mockResolvedValue({ id: "1" }); | ||
|
|
||
| // Trigger the consumer callback manually | ||
| mockChannel.consume.mockImplementation((queue, callback) => { | ||
| callback(mockMsg); | ||
| }); | ||
|
|
||
| await startAuditConsumer(); | ||
|
|
||
| expect(assertQueueWithDLQ).toHaveBeenCalledWith(QUEUES.AUDIT_LOGS, { | ||
| durable: true, | ||
| }); | ||
| expect(prisma.auditTrail.create).toHaveBeenCalledWith( | ||
| expect.objectContaining({ | ||
| data: expect.objectContaining({ | ||
| eventType: entry.eventType, | ||
| action: entry.action, | ||
| }), | ||
| }), | ||
| ); | ||
| expect(mockChannel.ack).toHaveBeenCalledWith(mockMsg); | ||
| }); | ||
|
|
||
| it("should retry on database failure and eventually nack to DLQ", async () => { | ||
| ((prisma.auditTrail.create as jest.Mock).mockRejectedValue( | ||
| new Error("DB connection failed"), | ||
| ), | ||
| // Fast forward timers for retry | ||
| jest.useFakeTimers()); | ||
|
|
||
| mockChannel.consume.mockImplementation((queue, callback) => { | ||
| callback(mockMsg); | ||
| }); | ||
|
|
||
| await startAuditConsumer(); | ||
|
|
||
| // We need to resolve the promises inside the loop | ||
| for (let i = 0; i < 4; i++) { | ||
| await Promise.resolve(); // allow the loop to run | ||
| jest.runAllTimers(); | ||
| await Promise.resolve(); | ||
| } | ||
|
|
||
| expect(prisma.auditTrail.create).toHaveBeenCalledTimes(4); // 0, 1, 2, 3 | ||
| expect(mockChannel.nack).toHaveBeenCalledWith(mockMsg, false, false); | ||
|
|
||
| jest.useRealTimers(); | ||
| }); | ||
| }); |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,67 @@ | ||
| import { prisma } from "../config/database"; | ||
| import { logger } from "../config/logger"; | ||
| import { QUEUES, assertQueueWithDLQ } from "../config/rabbitmq"; | ||
| import { getRabbitMQChannel } from "../config/rabbitmq"; | ||
|
|
||
| const MAX_RETRIES = 3; | ||
| const INITIAL_BACKOFF_MS = 1000; | ||
|
|
||
| export async function startAuditConsumer() { | ||
| const channel = getRabbitMQChannel(); | ||
|
|
||
| await assertQueueWithDLQ(QUEUES.AUDIT_LOGS, { durable: true }); | ||
|
|
||
| channel.consume(QUEUES.AUDIT_LOGS, async (msg) => { | ||
| if (!msg) return; | ||
|
|
||
| const content = msg.content.toString(); | ||
| const entry = JSON.parse(content); | ||
|
Comment on lines
+17
to
+18
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Nack malformed messages instead of bypassing the DLQ path.
Suggested fix- const content = msg.content.toString();
- const entry = JSON.parse(content);
+ let entry: any;
+ try {
+ entry = JSON.parse(msg.content.toString());
+ } catch (error: any) {
+ logger.error("Invalid audit payload, moving message to DLQ", {
+ error: error.message || error,
+ });
+ channel.nack(msg, false, false);
+ return;
+ }Also applies to: 39-55 🤖 Prompt for AI Agents |
||
|
|
||
| let attempt = 0; | ||
| let success = false; | ||
|
|
||
| while (attempt <= MAX_RETRIES && !success) { | ||
| try { | ||
| await prisma.auditTrail.create({ | ||
| data: { | ||
| eventType: entry.eventType, | ||
| entityType: entry.entityType ?? null, | ||
| entityId: entry.entityId ?? null, | ||
| action: entry.action, | ||
| oldValue: entry.oldValue ?? (undefined as any), | ||
| newValue: entry.newValue ?? (undefined as any), | ||
| performedBy: entry.performedBy ?? null, | ||
| timestamp: entry.timestamp ? new Date(entry.timestamp) : undefined, | ||
| }, | ||
| }); | ||
| success = true; | ||
| channel.ack(msg); | ||
| } catch (error: any) { | ||
| attempt++; | ||
| if (attempt <= MAX_RETRIES) { | ||
| const backoff = INITIAL_BACKOFF_MS * Math.pow(2, attempt - 1); | ||
| logger.warn( | ||
| `Audit consumer retry ${attempt}/${MAX_RETRIES} in ${backoff}ms`, | ||
| { | ||
| error: error.message || error, | ||
| eventType: entry.eventType, | ||
| }, | ||
| ); | ||
| await new Promise((resolve) => setTimeout(resolve, backoff)); | ||
| } else { | ||
| logger.error( | ||
| "Audit consumer failed after max retries, moving to DLQ", | ||
| { | ||
| error: error.message || error, | ||
| entry, | ||
| }, | ||
| ); | ||
| // Reject to DLQ | ||
| channel.nack(msg, false, false); | ||
| } | ||
| } | ||
| } | ||
| }); | ||
|
|
||
| logger.info("Audit consumer started"); | ||
| } | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,123 @@ | ||
| import { logAudit } from "./auditService"; | ||
| import { logger } from "../../config/logger"; | ||
| import { sendEmail } from "../notification"; | ||
| import fs from "fs"; | ||
| import { config } from "../../config/env"; | ||
| import { getRabbitMQChannel, QUEUES } from "../../config/rabbitmq"; | ||
|
|
||
| jest.mock("../../config/rabbitmq", () => ({ | ||
| getRabbitMQChannel: jest.fn(), | ||
| QUEUES: { | ||
| AUDIT_LOGS: "audit_logs", | ||
| }, | ||
| })); | ||
|
|
||
| jest.mock("../notification", () => ({ | ||
| sendEmail: jest.fn().mockResolvedValue(undefined), | ||
| })); | ||
|
|
||
| jest.mock("../../config/logger", () => ({ | ||
| logger: { | ||
| warn: jest.fn(), | ||
| error: jest.fn(), | ||
| info: jest.fn(), | ||
| debug: jest.fn(), | ||
| verbose: jest.fn(), | ||
| }, | ||
| })); | ||
|
|
||
| describe("AuditService Reliability (RabbitMQ)", () => { | ||
| const entry = { | ||
| eventType: "USER_LOGIN", | ||
| action: "LOGIN", | ||
| performedBy: "user-1", | ||
| }; | ||
|
|
||
| const mockChannel = { | ||
| sendToQueue: jest.fn(), | ||
| }; | ||
|
|
||
| beforeEach(() => { | ||
| jest.clearAllMocks(); | ||
| (getRabbitMQChannel as jest.Mock).mockReturnValue(mockChannel); | ||
| }); | ||
|
|
||
| it("should publish to RabbitMQ successfully", async () => { | ||
| mockChannel.sendToQueue.mockReturnValue(true); | ||
|
|
||
| await logAudit(entry); | ||
|
|
||
| expect(mockChannel.sendToQueue).toHaveBeenCalledWith( | ||
| QUEUES.AUDIT_LOGS, | ||
| expect.any(Buffer), | ||
| { persistent: true }, | ||
| ); | ||
| expect(logger.debug).toHaveBeenCalledWith( | ||
| expect.stringContaining("Audit entry published to queue"), | ||
| expect.anything(), | ||
| ); | ||
| }); | ||
|
|
||
| it("should fall back to file if publish fails (returns false)", async () => { | ||
| mockChannel.sendToQueue.mockReturnValue(false); | ||
|
|
||
| // Mock FS | ||
| const appendFileSyncSpy = jest | ||
| .spyOn(fs, "appendFileSync") | ||
| .mockImplementation(() => {}); | ||
| const mkdirSyncSpy = jest | ||
| .spyOn(fs, "mkdirSync") | ||
| .mockImplementation(() => ""); | ||
| const existsSyncSpy = jest.spyOn(fs, "existsSync").mockReturnValue(true); | ||
|
|
||
| await logAudit(entry); | ||
|
|
||
| expect(logger.warn).toHaveBeenCalledWith( | ||
| expect.stringContaining("RabbitMQ audit publish failed"), | ||
| expect.anything(), | ||
| ); | ||
| expect(appendFileSyncSpy).toHaveBeenCalled(); | ||
|
|
||
| appendFileSyncSpy.mockRestore(); | ||
| mkdirSyncSpy.mockRestore(); | ||
| existsSyncSpy.mockRestore(); | ||
| }); | ||
|
|
||
| it("should fall back to file if RabbitMQ throws error", async () => { | ||
| (getRabbitMQChannel as jest.Mock).mockImplementation(() => { | ||
| throw new Error("RabbitMQ Down"); | ||
| }); | ||
|
|
||
| // Mock FS | ||
| const appendFileSyncSpy = jest | ||
| .spyOn(fs, "appendFileSync") | ||
| .mockImplementation(() => {}); | ||
| const mkdirSyncSpy = jest | ||
| .spyOn(fs, "mkdirSync") | ||
| .mockImplementation(() => ""); | ||
| const existsSyncSpy = jest.spyOn(fs, "existsSync").mockReturnValue(true); | ||
|
|
||
| // Set alert email in config for this test | ||
| const originalAlertEmail = config.notification.alertEmail; | ||
| config.notification.alertEmail = "admin@example.com"; | ||
|
|
||
| await logAudit(entry); | ||
|
|
||
| expect(logger.error).toHaveBeenCalledWith( | ||
| expect.stringContaining("CRITICAL: Audit logging failed"), | ||
| expect.anything(), | ||
| ); | ||
| expect(appendFileSyncSpy).toHaveBeenCalled(); | ||
| expect(sendEmail).toHaveBeenCalledWith( | ||
| "admin@example.com", | ||
| expect.stringContaining("CRITICAL: Audit Log System Failure"), | ||
| expect.anything(), | ||
| ); | ||
|
|
||
| // Restore | ||
| config.notification.alertEmail = originalAlertEmail; | ||
| appendFileSyncSpy.mockRestore(); | ||
| mkdirSyncSpy.mockRestore(); | ||
| existsSyncSpy.mockRestore(); | ||
| }); | ||
| }); |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Isolate audit-consumer startup failure from API startup.
This
awaitsits inside the outerstartServer()try/catch, so anygetRabbitMQChannel()/queue-assertion failure fromstartAuditConsumer()now takes the whole service down at Lines 188-190. That contradicts the earlier “continue without queue-based features” behavior and turns an audit-queue misconfiguration into a full API outage.Suggested fix
📝 Committable suggestion
🤖 Prompt for AI Agents