Skip to content

Commit 9880f76

Browse files
CameronMcClymonttiborsimko
authored andcommitted
fix(rest): correct response when fetching workflow logs by steps (#660)
Fixes unintentional code indentation that happened in #651 and that was not returning proper endpoint response when fetching workflow logs by steps. Adds a test case to cover this situation. Closes #659
1 parent 5ca4f44 commit 9880f76

File tree

2 files changed

+100
-22
lines changed

2 files changed

+100
-22
lines changed

reana_workflow_controller/rest/workflows_status.py

Lines changed: 21 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -150,6 +150,7 @@ def get_workflow_logs(workflow_id_or_name, paginate=None, **kwargs): # noqa
150150
steps = None
151151
if request.is_json:
152152
steps = request.json
153+
153154
if steps:
154155
workflow_logs = {
155156
"workflow_logs": None,
@@ -172,30 +173,29 @@ def get_workflow_logs(workflow_id_or_name, paginate=None, **kwargs): # noqa
172173
workflow_logs = {
173174
"workflow_logs": logs or workflow.logs,
174175
"job_logs": build_workflow_logs(workflow, paginate=paginate),
175-
"service_logs": {},
176+
"service_logs": {
177+
s.name: sorted(
178+
[log.log for log in s.logs],
179+
key=lambda x: x["component"]
180+
!= "scheduler", # scheduler logs first
181+
)
182+
for s in workflow.services
183+
},
176184
"engine_specific": workflow.engine_specific,
177185
}
178186

179-
workflow_logs["service_logs"] = {
180-
s.name: sorted(
181-
[log.log for log in s.logs],
182-
key=lambda x: x["component"] != "scheduler", # scheduler logs first
183-
)
184-
for s in workflow.services
185-
}
186-
187-
return (
188-
jsonify(
189-
{
190-
"workflow_id": workflow.id_,
191-
"workflow_name": get_workflow_name(workflow),
192-
"logs": json.dumps(workflow_logs),
193-
"user": user_uuid,
194-
"live_logs_enabled": REANA_OPENSEARCH_ENABLED,
195-
}
196-
),
197-
200,
198-
)
187+
return (
188+
jsonify(
189+
{
190+
"workflow_id": workflow.id_,
191+
"workflow_name": get_workflow_name(workflow),
192+
"logs": json.dumps(workflow_logs),
193+
"user": user_uuid,
194+
"live_logs_enabled": REANA_OPENSEARCH_ENABLED,
195+
}
196+
),
197+
200,
198+
)
199199

200200
except ValueError:
201201
return (

tests/test_views.py

Lines changed: 79 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1240,7 +1240,85 @@ def test_get_created_workflow_logs(
12401240
),
12411241
}
12421242
assert response_data == expected_data
1243-
mock_method.call_count == 2
1243+
assert mock_method.call_count == 2
1244+
1245+
1246+
def test_get_created_workflow_logs_by_steps(
1247+
app,
1248+
user0,
1249+
cwl_workflow_with_name,
1250+
tmp_shared_volume_path,
1251+
session,
1252+
):
1253+
"""Test get workflow logs, filtering by steps."""
1254+
with app.test_client() as client:
1255+
# Create the workflow
1256+
res = client.post(
1257+
url_for("workflows.create_workflow"),
1258+
query_string={
1259+
"user": user0.id_,
1260+
"workspace_root_path": tmp_shared_volume_path,
1261+
},
1262+
content_type="application/json",
1263+
data=json.dumps(cwl_workflow_with_name),
1264+
)
1265+
1266+
# Get the generated workflow UUID
1267+
response_data = json.loads(res.get_data(as_text=True))
1268+
workflow_uuid = response_data.get("workflow_id")
1269+
workflow_name = response_data.get("workflow_name")
1270+
1271+
# Create a job for the workflow
1272+
workflow_job = Job(
1273+
id_=uuid.UUID("9a22c3a4-6d72-4812-93e7-7e0efdeb985d"),
1274+
workflow_uuid=workflow_uuid,
1275+
)
1276+
workflow_job.status = "running"
1277+
workflow_job.logs = "test job logs"
1278+
workflow_job.job_name = "gendata"
1279+
session.add(workflow_job)
1280+
session.commit()
1281+
1282+
# Call the API to fetch the workflow logs, filtering by steps
1283+
res = client.get(
1284+
url_for("statuses.get_workflow_logs", workflow_id_or_name=workflow_uuid),
1285+
query_string={"user": user0.id_},
1286+
content_type="application/json",
1287+
data=json.dumps(["gendata", "fitdata"]),
1288+
)
1289+
1290+
# Expect a successful response
1291+
assert res.status_code == 200
1292+
1293+
# Check the response data is as expected
1294+
response_data = json.loads(res.get_data(as_text=True))
1295+
expected_data = {
1296+
"workflow_id": workflow_uuid,
1297+
"workflow_name": workflow_name,
1298+
"user": str(user0.id_),
1299+
"live_logs_enabled": False,
1300+
"logs": json.dumps(
1301+
{
1302+
"workflow_logs": None,
1303+
"job_logs": {
1304+
str(workflow_job.id_): {
1305+
"workflow_uuid": str(workflow_job.workflow_uuid),
1306+
"job_name": workflow_job.job_name,
1307+
"compute_backend": "",
1308+
"backend_job_id": "",
1309+
"docker_img": "",
1310+
"cmd": "",
1311+
"status": workflow_job.status.name,
1312+
"logs": "test job logs",
1313+
"started_at": None,
1314+
"finished_at": None,
1315+
}
1316+
},
1317+
"engine_specific": None,
1318+
}
1319+
),
1320+
}
1321+
assert response_data == expected_data
12441322

12451323

12461324
def test_get_created_workflow_opensearch_disabled(

0 commit comments

Comments
 (0)