Skip to content

Commit

Permalink
Merge pull request #199 from nazywam/feature/redis-cache
Browse files Browse the repository at this point in the history
Add optional Redis analysis caching
  • Loading branch information
ninoseki authored Feb 2, 2024
2 parents 611916d + 8bfdb71 commit 122a1a5
Show file tree
Hide file tree
Showing 13 changed files with 131 additions and 10 deletions.
3 changes: 2 additions & 1 deletion backend/api/api.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
from fastapi import APIRouter

from backend.api.endpoints import analyze, submit
from backend.api.endpoints import analyze, submit, lookup

api_router = APIRouter()
api_router.include_router(analyze.router, prefix="/analyze", tags=["analyze"])
api_router.include_router(submit.router, prefix="/submit", tags=["submit"])
api_router.include_router(lookup.router, prefix="/lookup", tags=["lookup"])
13 changes: 12 additions & 1 deletion backend/api/endpoints/analyze.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
from redis import StrictRedis
from fastapi import APIRouter, File, HTTPException, status
from fastapi.encoders import jsonable_encoder
from pydantic import ValidationError

from backend.factories.response import ResponseFactory
from backend.schemas.payload import FilePayload, Payload
from backend.schemas.response import Response
from backend.core.settings import REDIS_HOST, REDIS_PASSWORD, REDIS_EXPIRE

router = APIRouter()

Expand All @@ -18,7 +20,16 @@ async def _analyze(file: bytes) -> Response:
detail=jsonable_encoder(exc.errors()),
) from exc

return await ResponseFactory.from_bytes(payload.file)
data = await ResponseFactory.from_bytes(payload.file)

if REDIS_HOST:
redis_conn = StrictRedis(host=REDIS_HOST, password=REDIS_PASSWORD)
redis_conn.hset(data.identifier, "analysis", data.json())

if REDIS_EXPIRE != -1:
redis_conn.expire(name=data.identifier, time=REDIS_EXPIRE)

return data


@router.post(
Expand Down
33 changes: 33 additions & 0 deletions backend/api/endpoints/lookup.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
import json

from redis import StrictRedis
from fastapi import APIRouter, HTTPException, status

from backend.schemas.response import Response
from backend.core.settings import REDIS_HOST, REDIS_PASSWORD

router = APIRouter()

@router.get(
"/{identifier}/",
response_description="Return an analysis result",
summary="Lookup cached analysis",
description="Try to fetch existing analysis from database",
status_code=200,
)
async def lookup(identifier: str) -> Response:
if not REDIS_HOST:
raise HTTPException(
status_code=status.HTTP_501_NOT_IMPLEMENTED,
detail="Redis cache is not enabled",
)

redis_conn = StrictRedis(host=REDIS_HOST, password=REDIS_PASSWORD)
data = redis_conn.hget(identifier, "analysis")
if not data:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Analysis cache not found",
)

return json.loads(data)
4 changes: 4 additions & 0 deletions backend/core/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,3 +23,7 @@
VIRUSTOTAL_API_KEY: Secret = config("VIRUSTOTAL_API_KEY", cast=Secret, default="")

INQUEST_API_KEY: Secret = config("INQUEST_API_KEY", cast=Secret, default="")

REDIS_HOST : str = config("REDIS_HOST", cast=str, default="")
REDIS_PASSWORD: str = config("REDIS_PASSWORD", cast=str, default="changeme")
REDIS_EXPIRE: str = config("REDIS_EXPIRE", cast=int, default=3600)
2 changes: 2 additions & 0 deletions backend/factories/eml.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from io import BytesIO
from typing import Any
from hashlib import sha256

import arrow
import dateparser
Expand Down Expand Up @@ -37,6 +38,7 @@ def __init__(self, eml_file: bytes):
self.eml_file = eml_file
parser = EmlParser(include_raw_body=True, include_attachment_data=True)
self.parsed = parser.decode_email_bytes(eml_file)
self.parsed["identifier"] = sha256(eml_file).hexdigest()

def _normalize_received_date(self, received: dict):
date = received.get("date", "")
Expand Down
2 changes: 1 addition & 1 deletion backend/factories/response.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ async def to_model(self) -> Response:
verdicts.append(OleIDVerdictFactory.from_attachments(eml.attachments))
# Add VT verdict

return Response(eml=eml, verdicts=verdicts)
return Response(eml=eml, verdicts=verdicts, identifier=eml.identifier)

@classmethod
async def from_bytes(cls, eml_file: bytes) -> Response:
Expand Down
1 change: 1 addition & 0 deletions backend/schemas/eml.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,3 +65,4 @@ class Eml(APIModel):
attachments: list[Attachment]
bodies: list[Body]
header: Header
identifier: str
1 change: 1 addition & 0 deletions backend/schemas/response.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,4 @@
class Response(APIModel):
eml: Eml
verdicts: list[Verdict]
identifier: str
6 changes: 6 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,12 @@ services:
- INQUEST_API_KEY=${INQUEST_API_KEY}
- SPAMASSASSIN_HOST=spamassassin
- SPAMASSASSIN_PORT=${SPAMASSASSIN_PORT:-783}
- REDIS_HOST=redis
restart: always
depends_on:
- spamassassin
redis:
image: library/redis
command: /bin/sh -c "redis-server --requirepass ${REDIS_PASSWORD:-changeme}"
ports:
- 6379:6379
4 changes: 4 additions & 0 deletions frontend/src/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,5 +17,9 @@ export const API = {
}
})
return res.data
},
async lookupFile(identifier: string) : Promise<Response> {
const res = await client.get<Response>(`/api/lookup/${identifier}/`);
return res.data
}
}
40 changes: 34 additions & 6 deletions frontend/src/views/Home.vue
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@

<script lang="ts">
import Vue from "vue"
import { defineComponent, ref } from "vue"
import { onMounted, defineComponent, ref } from "vue"
import { useAsyncTask } from "vue-concurrency"
import { API } from "@/api"
Expand All @@ -58,19 +58,40 @@ export default defineComponent({
const root = ref<HTMLElement>()
const emlFile = ref<File | undefined>(undefined)
const analyzeFileTask = useAsyncTask<Response, [File | undefined]>(
async (_signal, file: File | undefined) => {
return await API.analyzeFile(file)
const analyzeFileTask = useAsyncTask<Response, [File | undefined, String | undefined]>(
async (_signal, file: File | undefined, identifier: String | undefined) => {
if(identifier){
return await API.lookupFile(identifier)
} else {
return await API.analyzeFile(file)
}
}
)
const lookup = async (fileIdentifier) => {
const loadingComponent = buefy.loading.open({
container: root.value
})
try {
await analyzeFileTask.perform(null, fileIdentifier)
loadingComponent.close()
} catch (error) {
loadingComponent.close()
const data = (error as any).response.data as ErrorData
alertError(data, buefy)
}
}
const analyze = async () => {
const loadingComponent = buefy.loading.open({
container: root.value
})
try {
await analyzeFileTask.perform(emlFile.value)
await analyzeFileTask.perform(emlFile.value, null)
loadingComponent.close()
} catch (error) {
loadingComponent.close()
Expand All @@ -80,7 +101,14 @@ export default defineComponent({
}
}
return { analyze, analyzeFileTask, emlFile, root }
onMounted(() => {
const locationHash = location.hash;
if(locationHash != ""){
lookup(locationHash.substring(1));
}
})
return { analyze, lookup, analyzeFileTask, emlFile, root }
}
})
</script>
Expand Down
31 changes: 30 additions & 1 deletion poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ pydantic = "^2.5"
pyhumps = "^3.8.0"
python-magic = "^0.4.27"
python-multipart = "^0.0"
redis = "^5.0.0"
uvicorn = "^0.25"
vt-py = "^0.18"

Expand Down

0 comments on commit 122a1a5

Please sign in to comment.