Skip to content

Commit

Permalink
Merge pull request #883 from kedhammar/ci-fixes
Browse files Browse the repository at this point in the history
CI pt. 2: Fixes
  • Loading branch information
kedhammar authored Oct 24, 2024
2 parents 21e4014 + 0ed5291 commit f87693d
Show file tree
Hide file tree
Showing 33 changed files with 272 additions and 317 deletions.
6 changes: 3 additions & 3 deletions setup.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
#!/usr/bin/env python
"""Setup file and install script SciLife python scripts.
"""
from setuptools import setup, find_packages
from setuptools import find_packages, setup

try:
with open("requirements.txt", "r") as f:
with open("requirements.txt") as f:
install_requires = [x.strip() for x in f.readlines()]
except IOError:
except OSError:
install_requires = []

setup(
Expand Down
1 change: 1 addition & 0 deletions status/applications.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
"""
import json
from collections import Counter

from status.util import SafeHandler


Expand Down
11 changes: 5 additions & 6 deletions status/authorization.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
import tornado.web
import tornado.auth
import json

from status.util import UnsafeHandler, GoogleUser
import tornado.auth
import tornado.web

from status.util import GoogleUser, UnsafeHandler


class LoginHandler(tornado.web.RequestHandler, tornado.auth.GoogleOAuth2Mixin):
Expand Down Expand Up @@ -34,9 +35,7 @@ def get(self):
if url is None:
url = "/"
else:
url = "/unauthorized?email={0}&contact={1}".format(
user.emails[0], self.application.settings["contact_person"]
)
url = f"/unauthorized?email={user.emails[0]}&contact={self.application.settings['contact_person']}"
self.redirect(url)

else:
Expand Down
13 changes: 7 additions & 6 deletions status/barcode.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
"""Handlers related to test for barcode printing
"""

import re
import subprocess

from status.util import SafeHandler
import re


class BarcodeHandler(SafeHandler):
Expand Down Expand Up @@ -156,7 +157,7 @@ def make_barcode(label, print_bc):
xpositionText = "440" # moves the text position because the bc is longer
textHeight = "38"
formattedLabel.append(
"^FO{0},27^AFN,{1},{2}^FN1^FS".format(xpositionText, textHeight, ch_size)
f"^FO{xpositionText},27^AFN,{textHeight},{ch_size}^FN1^FS"
) # AF = assign font F, field number 1 (FN1), print text at position field origin (FO) rel. to home
formattedLabel.append(
"^FO80,17^BCN,70,N,N^FN2^FS"
Expand All @@ -172,14 +173,14 @@ def make_barcode(label, print_bc):
yposition = "30"
# Scalable font ^A0N,32,32 should fit roughly 42 chars on our current labels
formattedLabel.append(
"^FO20,{0}^A0N,{1},{1}^FB640,1,0,C,0^FN1^FS".format(yposition, ch_size)
f"^FO20,{yposition}^A0N,{ch_size},{ch_size}^FB640,1,0,C,0^FN1^FS"
) # FO = x,y relative field origin; A0N = scalable font height,width; FB = make into one line field block and center
formattedLabel.append("^XZ") # end format
formattedLabel.append("^XA") # start of label format
formattedLabel.append("^XFFORMAT^FS") # label home posision
formattedLabel.append("^FN1^FD{}^FS".format(label)) # this is readable
formattedLabel.append(f"^FN1^FD{label}^FS") # this is readable
if print_bc:
formattedLabel.append("^FN2^FD{}^FS".format(label)) # this is the barcode
formattedLabel.append(f"^FN2^FD{label}^FS") # this is the barcode
formattedLabel.append("^XZ")
return formattedLabel

Expand All @@ -204,5 +205,5 @@ def print_barcode(barcodeFile):
sp.stdin.write(barcodeFile.encode("utf-8"))
print("lp command is called for printing.")
stdout, stderr = sp.communicate() # Will wait for sp to finish
print("lp stdout: {0}".format(stdout))
print(f"lp stdout: {stdout}")
sp.stdin.close()
11 changes: 5 additions & 6 deletions status/bioinfo_analysis.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
import json
import datetime
import dateutil
import json
import traceback

import dateutil

from status.util import SafeHandler


Expand Down Expand Up @@ -77,12 +78,10 @@ def post(self, project_id):
# couchdb bulk update
try:
save_result = self.application.bioinfo_db.update(to_save)
except Exception as err:
except Exception:
self.set_status(400)
self.finish(
"<html><body><p>Could not save bioinfo data. Please try again later.</p><pre>{}</pre></body></html>".format(
traceback.format_exc()
)
f"<html><body><p>Could not save bioinfo data. Please try again later.</p><pre>{traceback.format_exc()}</pre></body></html>"
)
return None
neg_save_res = []
Expand Down
6 changes: 3 additions & 3 deletions status/clone_project.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@


import requests
from status.util import SafeHandler

from genologics import lims
from genologics.config import BASEURI, PASSWORD, USERNAME
from genologics.entities import Project
from genologics.config import BASEURI, USERNAME, PASSWORD

from status.util import SafeHandler


class CloneProjectHandler(SafeHandler):
Expand Down
5 changes: 3 additions & 2 deletions status/controls.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
"""
Handler related to Controls page
"""
from genologics.config import BASEURI

from status.util import SafeHandler
from genologics import lims
from genologics.config import BASEURI, USERNAME, PASSWORD


class ControlsHandler(SafeHandler):

Expand Down
3 changes: 2 additions & 1 deletion status/data_deliveries_plot.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import json
import datetime
import json

from status.util import SafeHandler


Expand Down
23 changes: 11 additions & 12 deletions status/deliveries.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
from collections import OrderedDict

from status.util import SafeHandler
from status.running_notes import LatestRunningNoteHandler

from genologics.config import BASEURI, USERNAME, PASSWORD
from genologics import lims
from genologics.entities import Udfconfig, Project as LIMSProject
from genologics.config import BASEURI, PASSWORD, USERNAME
from genologics.entities import Project as LIMSProject
from genologics.entities import Udfconfig

from status.running_notes import LatestRunningNoteHandler
from status.util import SafeHandler

lims = lims.Lims(BASEURI, USERNAME, PASSWORD)

Expand All @@ -21,13 +22,13 @@ def post(self):
lims_project = LIMSProject(lims, id=project_id)
if not lims_project:
self.set_status(400)
self.write("lims project not found: {}".format(project_id))
self.write(f"lims project not found: {project_id}")
return
project_name = lims_project.name
stepname = ["Project Summary 1.3"]
processes = lims.get_processes(type=stepname, projectname=project_name)
if processes == []:
error = "{} for {} is not available in LIMS.".format(stepname, project_name)
error = f"{stepname} for {project_name} is not available in LIMS."
self.set_status(400)
self.write(error)
return
Expand All @@ -51,7 +52,7 @@ def post(self):
for row in view[project_id]:
doc_id = row.value
break
if doc_id == None:
if doc_id is None:
self.set_status(400)
self.write("Status DB has not been updated: project not found")
return
Expand Down Expand Up @@ -290,17 +291,15 @@ def get(self):

else:
project_data = {
"error": "could not find project information for {}".format(
project_id
)
"error": f"could not find project information for {project_id}"
}

ongoing_deliveries[project_id].update(project_data)
try:
lims_responsibles = ["unassigned"] + sorted(
Udfconfig(lims, id="1128").presets
)
except Exception as e:
except Exception:
lims_responsibles = ["unassigned"] + sorted(responsible_list)
template = self.application.loader.load("deliveries.html")
self.write(
Expand Down
9 changes: 5 additions & 4 deletions status/flowcell.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
from status.util import SafeHandler
import os
import re
from datetime import datetime
from typing import Optional

import pandas as pd
import re
import os

from status.util import SafeHandler

thresholds = {
"HiSeq X": 320,
Expand Down Expand Up @@ -453,7 +454,7 @@ def get(self, name):
reports_dir = self.application.minknow_path
report_path = os.path.join(reports_dir, f"report_{name}.html")

self.write(open(report_path, "r").read())
self.write(open(report_path).read())

class ElementFlowcellHandler(SafeHandler):
def get(self, name):
Expand Down
37 changes: 16 additions & 21 deletions status/flowcells.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,20 @@
"""Set of handlers related with Flowcells
"""

import json
import datetime
import re
import json
import logging

from dateutil.relativedelta import relativedelta
import re
from collections import OrderedDict

from genologics import lims
from genologics.config import BASEURI, USERNAME, PASSWORD
import pandas as pd
from dateutil.relativedelta import relativedelta
from genologics import lims
from genologics.config import BASEURI, PASSWORD, USERNAME

from status.util import SafeHandler
from status.flowcell import fetch_ont_run_stats, thresholds
from status.running_notes import LatestRunningNoteHandler
from status.util import SafeHandler

application_log = logging.getLogger("tornado.application")

Expand Down Expand Up @@ -142,7 +141,7 @@ def list_ont_flowcells(self):
view_mux_scans=view_mux_scans,
view_pore_count_history=view_pore_count_history,
)
except Exception as e:
except Exception:
unfetched_runs.append(row.key)
application_log.exception(f"Failed to fetch run {row.key}")

Expand All @@ -156,8 +155,8 @@ def list_ont_flowcells(self):

# Convert back to dictionary and return
ont_flowcells = df.to_dict(orient="index")
except Exception as e:
application_log.exception(f"Failed to compile ONT flowcell dataframe")
except Exception:
application_log.exception("Failed to compile ONT flowcell dataframe")

return ont_flowcells, unfetched_runs

Expand Down Expand Up @@ -300,9 +299,7 @@ def search_flowcell_names(self, search_string=""):
if search_string in row_key.lower():
splitted_fc = row_key.split("_")
fc = {
"url": "/flowcells/{}_{}".format(
splitted_fc[0], splitted_fc[-1]
),
"url": f"/flowcells/{splitted_fc[0]}_{splitted_fc[-1]}",
"name": row_key,
}
flowcells.append(fc)
Expand All @@ -325,9 +322,7 @@ def search_flowcell_names(self, search_string=""):
if search_string in row_key.lower():
splitted_fc = row_key.split("_")
fc = {
"url": "/flowcells/{}_{}".format(
splitted_fc[0], splitted_fc[-1]
),
"url": f"/flowcells/{splitted_fc[0]}_{splitted_fc[-1]}",
"name": row_key,
}
flowcells.append(fc)
Expand Down Expand Up @@ -424,12 +419,12 @@ def get(self, flowcell):
self.set_header("Content-type", "application/json")
try:
p = get_container_from_id(flowcell)
except (KeyError, IndexError) as e:
except (KeyError, IndexError):
self.write("{}")
else:
try:
links = json.loads(p.udf["Links"]) if "Links" in p.udf else {}
except KeyError as e:
except KeyError:
links = {}

# Sort by descending date, then hopefully have deviations on top
Expand All @@ -454,7 +449,7 @@ def post(self, flowcell):
else:
try:
p = get_container_from_id(flowcell)
except (KeyError, IndexError) as e:
except (KeyError, IndexError):
self.status(400)
self.write("Flowcell not found")
else:
Expand Down Expand Up @@ -505,8 +500,8 @@ def get(self, query):
"info/summary", descending=True
)

for row in xfc_view[query : "{}Z".format(query)]:
if not row.key in data:
for row in xfc_view[query : f"{query}Z"]:
if row.key not in data:
data[row.key] = []
# To add correct threshold values
fc_long_name = row.value["fcp"].split(":")[0]
Expand Down
9 changes: 5 additions & 4 deletions status/instruments.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
from status.util import SafeHandler

import datetime
import json

from dateutil import tz
from dateutil.relativedelta import relativedelta
import json

from status.util import SafeHandler


def recover_logs(handler, search_string=None, inst_type="bravo"):
Expand Down Expand Up @@ -43,7 +44,7 @@ def recover_logs(handler, search_string=None, inst_type="bravo"):

for row in handler.application.biomek_errs_db.view("dates/timestamp", startkey=date_earlier, endkey=date_later):
date = datetime.datetime.strptime(row.key, "%Y-%m-%dT%H:%M:%S.%fZ").replace(tzinfo=tz.tzutc()).astimezone(tz.tzlocal())
inst = f"{instruments_list[row.value["inst_id"]]}({row.value["inst_id"]})"
inst = f"{instruments_list[row.value['inst_id']]}({row.value['inst_id']})"
method = row.value.get("method", 'diff')
errs = row.value["errors"]
valid_rows.append({"timestamp": f"{date}", "instrument_name": inst, "method": method, "message": errs})
Expand Down
Loading

0 comments on commit f87693d

Please sign in to comment.