Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: javascript post requests #38

Merged
merged 2 commits into from
Sep 20, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 0 additions & 11 deletions src/aind_data_transfer_service/forms.py

This file was deleted.

63 changes: 2 additions & 61 deletions src/aind_data_transfer_service/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,12 @@
from fastapi.responses import JSONResponse
from fastapi.templating import Jinja2Templates
from starlette.applications import Starlette
from starlette.middleware.sessions import SessionMiddleware
from starlette.routing import Route
from starlette_wtf import CSRFProtectMiddleware, csrf_protect

from aind_data_transfer_service.configs.job_configs import (
BasicUploadJobConfigs,
HpcJobConfigs,
)
from aind_data_transfer_service.forms import JobManifestForm
from aind_data_transfer_service.hpc.client import HpcClient, HpcClientConfigs
from aind_data_transfer_service.hpc.models import (
HpcJobStatusResponse,
Expand Down Expand Up @@ -96,7 +93,7 @@ async def submit_basic_jobs(request: Request):
response_json = response.json()
responses.append(response_json)
# Add pause to stagger job requests to the hpc
await sleep(0.05)
await sleep(0.2)
except Exception as e:
logging.error(repr(e))
hpc_errors.append(
Expand All @@ -119,60 +116,11 @@ async def submit_basic_jobs(request: Request):
)


@csrf_protect
async def index(request: Request):
"""GET|POST /: form handler"""
hpc_client_conf = HpcClientConfigs()
hpc_client = HpcClient(configs=hpc_client_conf)
job_manifest_form = await JobManifestForm.from_formdata(request)
jobs = []
if job_manifest_form.is_submitted():
upload_csv = job_manifest_form.data.get("upload_csv")
submit_jobs = job_manifest_form.data.get("submit_jobs")

if upload_csv:
file = job_manifest_form["upload_csv"].data
content = await file.read()
data = content.decode("utf-8")

csv_reader = csv.DictReader(io.StringIO(data))
for row in csv_reader:
job = BasicUploadJobConfigs.from_csv_row(
row=row,
aws_param_store_name=os.getenv("HPC_AWS_PARAM_STORE_NAME"),
temp_directory=os.getenv("HPC_STAGING_DIRECTORY"),
)
# Construct hpc job setting most of the vars from the env
hpc_job = HpcJobConfigs(basic_upload_job_configs=job)
jobs.append(hpc_job)

if submit_jobs:
if jobs:
responses = []
for job in jobs:
job_def = job.job_definition
response = hpc_client.submit_job(job_def)
response_json = response.json()
responses.append(response_json)
# Add pause to stagger job requests to the hpc
await sleep(1)

return JSONResponse(
content={
"message": "Successfully submitted job.",
"data": responses,
},
status_code=200,
)
else:
return JSONResponse(
content={"error": "Error collecting csv data."},
status_code=400,
)

return templates.TemplateResponse(
name="index.html",
context=({"request": request, "form": job_manifest_form}),
context=({"request": request}),
)


Expand Down Expand Up @@ -217,10 +165,3 @@ async def jobs(request: Request):
]

app = Starlette(routes=routes)
app.add_middleware(
SessionMiddleware, secret_key=os.getenv("APP_SECRET_KEY", "test_app_key")
)
app.add_middleware(
CSRFProtectMiddleware,
csrf_secret=os.getenv("APP_CSRF_SECRET_KEY", "test_csrf_key"),
)
115 changes: 105 additions & 10 deletions src/aind_data_transfer_service/templates/index.html
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
<!DOCTYPE html>
<html>
<head>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.6.4/jquery.min.js"></script>
<meta charset="UTF-8">
<title>{% block title %} {% endblock %} AIND Data Transfer Service</title>
<style>
Expand All @@ -15,10 +16,6 @@
text-align: left;
padding: 8px;
}

tr:nth-child(even) {
background-color: #dddddd;
}
</style>
</head>
<body>
Expand All @@ -27,13 +24,111 @@
<a href="/jobs">Job Status</a>
</nav>
<h2>Submit Jobs</h2>
<form action="/" method="post" enctype="multipart/form-data">
{{ form.csrf_token() }}
<form id="preview_form" method="post" enctype="multipart/form-data">
<label for="file">Please select a CSV file:</label>
{{ form.upload_csv() }}
<div></div>
<div></div>
{{ form.submit_jobs() }}
<input type="file" id="file" name="file"><br><br>
<input type="submit" id="preview" value="preview"><br><br>
</form>
<button type="button" onclick="submitJobs()">Submit</button>
<div id="message"></div><br>
<div id="response"></div>
<script>
var jobs = []
var parsing_errors = []
$(function() {
$("#preview_form").on("submit", function(e) {
e.preventDefault();
var formData = new FormData(this);
$.ajax({
url: "/api/validate_csv",
type: "POST",
data: formData,
cache: false,
contentType: false,
processData: false,
beforeSend: function() {
$("#message").html("sending...");
},
success: function(data) {
$("#message").html("Returned response.");
jobs = data["data"]["jobs"];
parsing_errors = []
let jobsLength = jobs.length;
var table = document.createElement('table'), tr, td, row, cell;
for (row = 0; row < jobsLength; row++) {
let job = JSON.parse(jobs[row]);
tr = document.createElement('tr');
td = document.createElement('td');
tr.appendChild(td);
td.innerHTML = job.s3_bucket;
td = document.createElement('td');
tr.appendChild(td);
td.innerHTML = job.experiment_type;
td = document.createElement('td');
tr.appendChild(td);
td.innerHTML = job.subject_id;
td = document.createElement('td');
tr.appendChild(td);
td.innerHTML = job.acq_date;
td = document.createElement('td');
tr.appendChild(td);
td.innerHTML = job.acq_time;
table.appendChild(tr);
let modalities = job.modalities;
let modalitiesLength = modalities.length;
for (mRow = 0; mRow < modalitiesLength; mRow++){
let modality = modalities[mRow]
tr = document.createElement('tr');
td = document.createElement('td');
tr.appendChild(td);
td = document.createElement('td');
tr.appendChild(td);
td.innerHTML = modality.modality.abbreviation;
td = document.createElement('td');
tr.appendChild(td);
td.innerHTML = modality.source;
td.setAttribute("colspan", "4");
table.appendChild(tr);
}
}
document.getElementById('response').appendChild(table);
},
error: function(data) {
jobs = []
parsing_errors = data.responseJSON["data"]["errors"]
$("#message").html("Returned error");
document.getElementById("response").innerHTML = "";
$("#response").html(parsing_errors);
}
});
});
});
submitJobs = function() {
if(jobs.length > 0 && parsing_errors.length == 0){
$.ajax({
url: "/api/submit_basic_jobs",
type: "POST",
data: JSON.stringify({"jobs": jobs}),
contentType: 'application/json; charset=utf-8',
beforeSend: function() {
$("#message").html("Submitting jobs. Please don't refresh or re-submit...");
},
success: function (data) {
jobs = []
parsing_errors = []
$("#message").html("Submitted Jobs.");
$("#response").html(data);
},
error: function(data) {
jobs = []
$("#message").html("Returned error");
document.getElementById("response").innerHTML = "";
parsing_errors = data.responseJSON["data"]["errors"]
$("#response").html(parsing_errors);
}
});
}
};
</script>
</body>
</html>
79 changes: 1 addition & 78 deletions tests/test_server.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,12 @@
"""Tests server module."""

import io
import json
import os
import unittest
from copy import deepcopy
from pathlib import Path
from unittest.mock import MagicMock, call, patch
from unittest.mock import MagicMock, patch

from bs4 import BeautifulSoup
from fastapi.testclient import TestClient
from requests import Response

Expand Down Expand Up @@ -204,81 +202,6 @@ def test_index(self):
self.assertEqual(response.status_code, 200)
self.assertIn("Submit Jobs", response.text)

@patch.dict(os.environ, EXAMPLE_ENV_VAR1, clear=True)
def test_post_upload_csv(self):
"""Tests that valid csv is posted as expected."""
with TestClient(app) as client:
get_response = client.get("/")
soup = BeautifulSoup(get_response.text, "html.parser")
csrf_token = soup.find("input", attrs={"name": "csrf_token"})[
"value"
]
headers = {"X-CSRF-Token": csrf_token}
response = client.post(
"/",
files={
"upload_csv": (
"resources/sample.csv",
io.BytesIO(self.csv_content.encode("utf-8")),
"text/csv",
)
},
headers=headers,
)
self.assertEqual(response.status_code, 200)
self.assertIn("text/html", response.headers["content-type"])

@patch.dict(os.environ, EXAMPLE_ENV_VAR1, clear=True)
def test_post_submit_jobs_failure(self):
"""Tests that form fails to submit when there's no data as expected."""
with TestClient(app) as client:
get_response = client.get("/")
soup = BeautifulSoup(get_response.text, "html.parser")
csrf_token = soup.find("input", attrs={"name": "csrf_token"})[
"value"
]
headers = {"X-CSRF-Token": csrf_token}
response = client.post(
"/", data={"submit_jobs": "Submit"}, headers=headers
)
self.assertEqual(response.status_code, 400)
self.assertIn("Error collecting csv data.", response.text)

@patch.dict(os.environ, EXAMPLE_ENV_VAR1, clear=True)
@patch("requests.post")
@patch("aind_data_transfer_service.server.sleep", return_value=None)
def test_post_submit_jobs_success(
self, mock_sleep: MagicMock, mock_post: MagicMock
):
"""Tests that form successfully submits as expected."""
mock_response = Response()
mock_response.status_code = 200
mock_response._content = b'{"message": "success"}'
mock_post.return_value = mock_response

with TestClient(app) as client:
get_response = client.get("/")
soup = BeautifulSoup(get_response.text, "html.parser")
csrf_token = soup.find("input", attrs={"name": "csrf_token"})[
"value"
]
headers = {"X-CSRF-Token": csrf_token}
response = client.post(
"/",
files={
"upload_csv": (
"resources/sample.csv",
io.BytesIO(self.csv_content.encode("utf-8")),
"text/csv",
)
},
data={"submit_jobs": "Submit"},
headers=headers,
)
self.assertEqual(response.status_code, 200)
self.assertIn("Successfully submitted job.", response.text)
mock_sleep.assert_has_calls([call(1), call(1), call(1)])

@patch.dict(os.environ, EXAMPLE_ENV_VAR1, clear=True)
@patch("requests.get")
def test_jobs_success(self, mock_get: MagicMock):
Expand Down