From 9a4fa4ac4086d7b63d6540f0e9f1513f59b4c933 Mon Sep 17 00:00:00 2001 From: kedhammar Date: Thu, 27 Jun 2024 11:07:40 +0200 Subject: [PATCH 01/40] add CI stuff from scilifelab/scilifelab_epps --- .editorconfig | 12 +++ .github/workflows/lint-code.yml | 130 ++++++++++++++++++++++++++++++++ pyproject.toml | 27 +++++++ 3 files changed, 169 insertions(+) create mode 100644 .editorconfig create mode 100644 .github/workflows/lint-code.yml create mode 100644 pyproject.toml diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..70c7a9a --- /dev/null +++ b/.editorconfig @@ -0,0 +1,12 @@ +root = true + +[*] +charset = utf-8 +end_of_line = lf +insert_final_newline = true +trim_trailing_whitespace = true +indent_size = 4 +indent_style = space + +[*.{md,yml,yaml,cff}] +indent_size = 2 diff --git a/.github/workflows/lint-code.yml b/.github/workflows/lint-code.yml new file mode 100644 index 0000000..84cf192 --- /dev/null +++ b/.github/workflows/lint-code.yml @@ -0,0 +1,130 @@ +name: Lint code +on: [push, pull_request] + +jobs: + # Use ruff to check for code style violations + ruff-check: + runs-on: ubuntu-latest + steps: + - name: Checkout repo + uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.10" + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install ruff + - name: ruff --> Check for style violations + # Configured in pyproject.toml + run: ruff check . + + # Use ruff to check code formatting + ruff-format: + runs-on: ubuntu-latest + steps: + - name: Checkout repo + uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.10" + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install ruff + - name: ruff --> Check code formatting + run: ruff format --check . + + # Use mypy for static type checking + mypy-check: + runs-on: ubuntu-latest + steps: + - name: Checkout repo + uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.10" + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install mypy + # Start by installing type stubs + - name: mypy --> Install stubs + run: echo -e "y" | mypy --install-types . || exit 0 + - name: mypy --> Static type checking + # Configured in pyprojet.toml + run: mypy . + + # Use pipreqs to check for missing dependencies + pipreqs-check: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.10" + + - name: Install pipreqs + run: pip install pipreqs + + - name: Install requirements + run: pip install -r requirements.txt + + - name: Run pipreqs + run: pipreqs --savepath pipreqs.txt + + - name: Compare requirements + run: | + # Extract and sort package names + awk '{print $1}' $1 | sort -u > "$1".compare + awk -F'==' '{print $1}' $2 | sort -u > "$2".compare + + # Compare package lists + if cmp -s "$1".compare "$2".compare + then + echo "Requirements are the same" + exit 0 + else + echo "Requirements are different" + exit 1 + fi + + # Use Prettier to check various file formats + prettier: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Setup node + uses: actions/setup-node@v4 + with: + node-version: "20" + + - name: Install Prettier + run: npm install -g prettier + + - name: Run Prettier --check + run: prettier --check . + + # Use editorconfig to check all remaining file formats + editorconfig: + runs-on: ubuntu-latest + steps: + - name: Checkout repo + uses: actions/checkout@v4 + + - name: Setup node + uses: actions/setup-node@v4 + with: + node-version: "20" + + - name: Install editorconfig-checker + run: npm install -g editorconfig-checker + + - name: editorconfig --> Lint files + run: editorconfig-checker $(git ls-files | grep -v '.py\|.md\|.json\|.yml\|.yaml\|.html') diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..4bb3c6a --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,27 @@ +title = "scilifelab_epps" + + +[tool.ruff.lint] +select =[ + # Ruff default rules + # ------------------------------ + "E4", # pycodestyle Imports + "E7", # pycodestyle Statements + "E9", # pycodestyle Runtime + "F", # Pyflakes + + # Additional Comment + # ------------------------------------------------------ + "I", # isort Best-practice sorting of imports + "UP", # pyupgrade Make sure syntax is up-to-date +] +ignore = [ + "E402", # Module level import not at top of file + "E722", # Do not use bare 'except' + "E741", # Ambiguous variable name +] + + +[tool.mypy] +ignore_missing_imports = true +follow_imports = 'skip' From 55ce1d16464169f444f519551b7c16fd4f483c32 Mon Sep 17 00:00:00 2001 From: kedhammar Date: Thu, 27 Jun 2024 11:17:43 +0200 Subject: [PATCH 02/40] add ci packages --- requirements.txt | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 0ec5581..33da504 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,7 @@ -requests -pytest mock +mypy +pipreqs +pytest +requests +ruff six From 429872dc8123126282f73c0c34ab2221fac00c90 Mon Sep 17 00:00:00 2001 From: kedhammar Date: Thu, 27 Jun 2024 11:18:15 +0200 Subject: [PATCH 03/40] ruff format --- docs/conf.py | 161 ++--- examples/attach_delivery_report.py | 6 +- examples/epp_script.py | 23 +- examples/get_application.py | 4 +- examples/get_artifacts.py | 8 +- examples/get_containers.py | 30 +- examples/get_labs.py | 13 +- examples/get_processes.py | 8 +- examples/get_projects.py | 18 +- examples/get_samples.py | 22 +- examples/get_samples2.py | 9 +- examples/set_project_queued.py | 6 +- examples/set_sample_name.py | 10 +- genologics/__init__.py | 1 + genologics/config.py | 52 +- genologics/constants.py | 62 +- genologics/descriptors.py | 231 +++---- genologics/entities.py | 934 +++++++++++++++++------------ genologics/epp.py | 287 +++++---- genologics/internal_classes.py | 51 +- genologics/lims.py | 512 ++++++++++------ genologics/lims_utils.py | 60 +- genologics/test_utils.py | 26 +- genologics/version.py | 2 +- setup.py | 65 +- tests/test_descriptors.py | 158 ++--- tests/test_entities.py | 367 ++++++++---- tests/test_example.py | 17 +- tests/test_lims.py | 139 +++-- tests/to_rewrite_test_logging.py | 46 +- 30 files changed, 1953 insertions(+), 1375 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index c2b63dc..f9feb6b 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -16,201 +16,209 @@ # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -#sys.path.insert(0, os.path.abspath('.')) +# sys.path.insert(0, os.path.abspath('.')) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' +# needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.coverage', - 'sphinx.ext.pngmath', 'sphinx.ext.ifconfig', - 'sphinx.ext.viewcode', 'sphinxcontrib.programoutput'] +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.coverage", + "sphinx.ext.pngmath", + "sphinx.ext.ifconfig", + "sphinx.ext.viewcode", + "sphinxcontrib.programoutput", +] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix of source filenames. -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. -#source_encoding = 'utf-8-sig' +# source_encoding = 'utf-8-sig' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = 'Genologics' -copyright = '2013, Per Kraulis, Johannes Alneberg' +project = "Genologics" +copyright = "2013, Per Kraulis, Johannes Alneberg" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. -version = '1.0.0' +version = "1.0.0" # The full version, including alpha/beta/rc tags. -release = '1.0.0' +release = "1.0.0" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. -#language = None +# language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: -#today = '' +# today = '' # Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' +# today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ['_build'] +exclude_patterns = ["_build"] # The reST default role (used for this markup: `text`) to use for all documents. -#default_role = None +# default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +# add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). -#add_module_names = True +# add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. -#show_authors = False +# show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] +# modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. -#keep_warnings = False +# keep_warnings = False # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'default' +html_theme = "default" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -#html_theme_options = {} +# html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] +# html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". -#html_title = None +# html_title = None # A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None +# html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. -#html_logo = None +# html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. -#html_favicon = None +# html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +html_static_path = ["_static"] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' +# html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. -#html_use_smartypants = True +# html_use_smartypants = True # Custom sidebar templates, maps document names to template names. -#html_sidebars = {} +# html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. -#html_additional_pages = {} +# html_additional_pages = {} # If false, no module index is generated. -#html_domain_indices = True +# html_domain_indices = True # If false, no index is generated. -#html_use_index = True +# html_use_index = True # If true, the index is split into individual pages for each letter. -#html_split_index = False +# html_split_index = False # If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True +# html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True +# html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True +# html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. -#html_use_opensearch = '' +# html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None +# html_file_suffix = None # Output file base name for HTML help builder. -htmlhelp_basename = 'Genologicsdoc' +htmlhelp_basename = "Genologicsdoc" # -- Options for LaTeX output -------------------------------------------------- latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', - -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', - -# Additional stuff for the LaTeX preamble. -#'preamble': '', + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ - ('index', 'Genologics.tex', 'Genologics Documentation', - 'Per Kraulis, Johannes Alneberg', 'manual'), + ( + "index", + "Genologics.tex", + "Genologics Documentation", + "Per Kraulis, Johannes Alneberg", + "manual", + ), ] # The name of an image file (relative to this directory) to place at the top of # the title page. -#latex_logo = None +# latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. -#latex_use_parts = False +# latex_use_parts = False # If true, show page references after internal links. -#latex_show_pagerefs = False +# latex_show_pagerefs = False # If true, show URL addresses after external links. -#latex_show_urls = False +# latex_show_urls = False # Documents to append as an appendix to all manuals. -#latex_appendices = [] +# latex_appendices = [] # If false, no module index is generated. -#latex_domain_indices = True +# latex_domain_indices = True # -- Options for manual page output -------------------------------------------- @@ -218,12 +226,17 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - ('index', 'genologics', 'Genologics Documentation', - ['Per Kraulis, Johannes Alneberg'], 1) + ( + "index", + "genologics", + "Genologics Documentation", + ["Per Kraulis, Johannes Alneberg"], + 1, + ) ] # If true, show URL addresses after external links. -#man_show_urls = False +# man_show_urls = False # -- Options for Texinfo output ------------------------------------------------ @@ -232,19 +245,25 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - ('index', 'Genologics', 'Genologics Documentation', - 'Per Kraulis, Johannes Alneberg', 'Genologics', 'One line description of project.', - 'Miscellaneous'), + ( + "index", + "Genologics", + "Genologics Documentation", + "Per Kraulis, Johannes Alneberg", + "Genologics", + "One line description of project.", + "Miscellaneous", + ), ] # Documents to append as an appendix to all manuals. -#texinfo_appendices = [] +# texinfo_appendices = [] # If false, no module index is generated. -#texinfo_domain_indices = True +# texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' +# texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. -#texinfo_no_detailmenu = False +# texinfo_no_detailmenu = False diff --git a/examples/attach_delivery_report.py b/examples/attach_delivery_report.py index 58af9e7..afb5d35 100644 --- a/examples/attach_delivery_report.py +++ b/examples/attach_delivery_report.py @@ -20,12 +20,12 @@ project = Project(lims, id="P193") -print('UDFs:') +print("UDFs:") pprint(list(project.udf.items())) -print('files:') +print("files:") for file in project.files: print(file.content_location) -project.udf['Delivery Report'] = "http://example.com/delivery_note.pdf" +project.udf["Delivery Report"] = "http://example.com/delivery_note.pdf" project.put() diff --git a/examples/epp_script.py b/examples/epp_script.py index 7dcce16..a8e7678 100644 --- a/examples/epp_script.py +++ b/examples/epp_script.py @@ -12,43 +12,44 @@ Johannes Alneberg, Science for Life Laboratory, Stockholm, Sweden. """ + from argparse import ArgumentParser from genologics.lims import Lims from genologics.entities import Process -from genologics.config import BASEURI,USERNAME,PASSWORD +from genologics.config import BASEURI, USERNAME, PASSWORD from genologics.epp import EppLogger, attach_file import sys -def main(lims,pid,file): + +def main(lims, pid, file): """Uploads a given file to the first output artifact of the process lims: The LIMS instance pid: Process Lims id file: File to be attached """ - p=Process(lims,id=pid) + p = Process(lims, id=pid) # Fetch all input-output artifact pairs io = p.input_output_maps # Filter them so that only PerInput output artifacts remains - io_filtered = [x for x in io if x[1]['output-generation-type']=='PerInput'] + io_filtered = [x for x in io if x[1]["output-generation-type"] == "PerInput"] # Fetch the first input-output artifact pair - (input,output) = io_filtered[0] + (input, output) = io_filtered[0] # Instantiate the output artifact - output_artifact = Artifact(output['limsid']) + output_artifact = Artifact(output["limsid"]) # Attach the file - attach_file(args.file,output_artifact) + attach_file(args.file, output_artifact) if __name__ == "__main__": parser = ArgumentParser() # Arguments that are useful in all EPP scripts - parser.add_argument("--log",default=sys.stdout, - help="Log file") + parser.add_argument("--log", default=sys.stdout, help="Log file") # Arguments specific for this scripts task parser.add_argument("--pid", help="Process id") @@ -58,7 +59,7 @@ def main(lims,pid,file): # Log everything to log argument with EppLogger(args.log): - lims = Lims(BASEURI,USERNAME,PASSWORD) + lims = Lims(BASEURI, USERNAME, PASSWORD) lims.check_version() - main(lims,args.pid,args.file) + main(lims, args.pid, args.file) diff --git a/examples/get_application.py b/examples/get_application.py index 84462a6..2e8adda 100644 --- a/examples/get_application.py +++ b/examples/get_application.py @@ -20,7 +20,7 @@ project = Project(lims, id="P193") -print('UDFs:') +print("UDFs:") pprint(list(project.udf.items())) -print(project.udf['Application']) +print(project.udf["Application"]) diff --git a/examples/get_artifacts.py b/examples/get_artifacts.py index 0577a9c..331bfd3 100644 --- a/examples/get_artifacts.py +++ b/examples/get_artifacts.py @@ -33,17 +33,17 @@ ## artifacts = lims.get_artifacts(working_flag=True) ## print len(artifacts), 'Working-flag True artifacts' -name = 'jgr33' +name = "jgr33" artifacts = lims.get_artifacts(sample_name=name) -print(len(artifacts), 'artifacts for sample name', name) +print(len(artifacts), "artifacts for sample name", name) artifacts = lims.get_batch(artifacts) for artifact in artifacts: print(artifact, artifact.name, artifact.state) print() -artifacts = lims.get_artifacts(qc_flag='PASSED') -print(len(artifacts), 'QC PASSED artifacts') +artifacts = lims.get_artifacts(qc_flag="PASSED") +print(len(artifacts), "QC PASSED artifacts") artifacts = lims.get_batch(artifacts) for artifact in artifacts: print(artifact, artifact.name, artifact.state) diff --git a/examples/get_containers.py b/examples/get_containers.py index 6447bcd..dfe9a33 100644 --- a/examples/get_containers.py +++ b/examples/get_containers.py @@ -26,7 +26,7 @@ ## containers = lims.get_containers(state=state) ## print len(containers), state, 'containers' -containers = lims.get_containers(type='96 well plate') +containers = lims.get_containers(type="96 well plate") print(len(containers)) container = containers[2] @@ -37,18 +37,22 @@ print(location, artifact.name, id(artifact), repr(artifact), artifact.root) containertype = container.type -print(containertype, containertype.name, containertype.x_dimension, containertype.y_dimension) +print( + containertype, + containertype.name, + containertype.x_dimension, + containertype.y_dimension, +) - -containers = lims.get_containers(type='Illumina Flow Cell',state='Populated') +containers = lims.get_containers(type="Illumina Flow Cell", state="Populated") for container in containers: - print(container.name) - print(container.id) - print(list(container.placements.keys())) - arts=lims.get_artifacts(containername=container.name) - for art in arts: - print(art.name) - print(art.type) - print(list(art.udf.items())) - print(art.parent_process.type.name) + print(container.name) + print(container.id) + print(list(container.placements.keys())) + arts = lims.get_artifacts(containername=container.name) + for art in arts: + print(art.name) + print(art.type) + print(list(art.udf.items())) + print(art.parent_process.type.name) diff --git a/examples/get_labs.py b/examples/get_labs.py index 0b63e08..cf8b832 100644 --- a/examples/get_labs.py +++ b/examples/get_labs.py @@ -6,6 +6,7 @@ Per Kraulis, Science for Life Laboratory, Stockholm, Sweden. """ + from __future__ import unicode_literals from genologics.lims import * @@ -17,18 +18,18 @@ lims.check_version() # Get the list of all projects. -labs = lims.get_labs(name='SciLifeLab') -print(len(labs), 'labs in total') +labs = lims.get_labs(name="SciLifeLab") +print(len(labs), "labs in total") for lab in labs: print(lab, id(lab), lab.name, lab.uri, lab.id) print(list(lab.shipping_address.items())) for key, value in lab.udf.items(): - print(' ', key, '=', value) + print(" ", key, "=", value) udt = lab.udt if udt: - print('UDT:', udt.udt) + print("UDT:", udt.udt) for key, value in udt.items(): - print(' ', key, '=', value) + print(" ", key, "=", value) -lab = Lab(lims, id='2') +lab = Lab(lims, id="2") print(lab, id(lab), lab.name, lab.uri, lab.id) diff --git a/examples/get_processes.py b/examples/get_processes.py index 0bd0cf0..0fb87d8 100644 --- a/examples/get_processes.py +++ b/examples/get_processes.py @@ -18,12 +18,12 @@ # Get the list of all processes. processes = lims.get_processes() -print(len(processes), 'processes in total') +print(len(processes), "processes in total") -process = Process(lims, id='QCF-PJK-120703-24-1140') +process = Process(lims, id="QCF-PJK-120703-24-1140") print(process, process.id, process.type, process.type.name) for input, output in process.input_output_maps: if input: - print('input:', list(input.items())) + print("input:", list(input.items())) if output: - print('output:', list(output.items())) + print("output:", list(output.items())) diff --git a/examples/get_projects.py b/examples/get_projects.py index ab78d91..9317230 100644 --- a/examples/get_projects.py +++ b/examples/get_projects.py @@ -20,27 +20,27 @@ # Get the list of all projects. projects = lims.get_projects() -print(len(projects), 'projects in total') +print(len(projects), "projects in total") # Get the list of all projects opened since May 30th 2012. -day = '2012-05-30' +day = "2012-05-30" projects = lims.get_projects(open_date=day) -print(len(projects), 'projects opened since', day) +print(len(projects), "projects opened since", day) # Get the project with the specified LIMS id, and print some info. -project = Project(lims, id='P193') +project = Project(lims, id="P193") print(project, project.name, project.open_date, project.close_date) -print(' UDFs:') +print(" UDFs:") for key, value in project.udf.items(): - print(' ', key, '=', value) + print(" ", key, "=", value) udt = project.udt -print(' UDT:', udt.udt) +print(" UDT:", udt.udt) for key, value in udt.items(): - print(' ', key, '=', value) + print(" ", key, "=", value) -print(' files:') +print(" files:") for file in project.files: print(file.id) print(file.content_location) diff --git a/examples/get_samples.py b/examples/get_samples.py index 7006bd6..6c1e1c4 100644 --- a/examples/get_samples.py +++ b/examples/get_samples.py @@ -19,35 +19,35 @@ # Get the list of all samples. samples = lims.get_samples() -print(len(samples), 'samples in total') +print(len(samples), "samples in total") # Get the list of samples in the project with the LIMS id KLL60. -project = Project(lims, id='KRA61') +project = Project(lims, id="KRA61") samples = lims.get_samples(projectlimsid=project.id) -print(len(samples), 'samples in', project) +print(len(samples), "samples in", project) print() # Get the sample with the LIMS id KRA61A1 -sample = Sample(lims, id='KRA61A1') -print(sample.id, sample.name, sample.date_received, sample.uri, end=' ') +sample = Sample(lims, id="KRA61A1") +print(sample.id, sample.name, sample.date_received, sample.uri, end=" ") for key, value in sample.udf.items(): - print(' ', key, '=', value) + print(" ", key, "=", value) for note in sample.notes: - print('Note', note.uri, note.content) + print("Note", note.uri, note.content) for file in sample.files: - print('File', file.content_location) + print("File", file.content_location) # Get the sample with the name 'spruce_a'. # Check that it is the sample as the previously obtained sample; # not just equal, but exactly the same instance, courtesy of the Lims cache. -samples = lims.get_samples(name='spruce_a') +samples = lims.get_samples(name="spruce_a") print(samples[0].name, samples[0] is sample) ## # Get the samples having a UDF Color with values Blue or Orange. -samples = lims.get_samples(udf={'Color': ['Blue', 'Orange']}) +samples = lims.get_samples(udf={"Color": ["Blue", "Orange"]}) print(len(samples)) for sample in samples: - print(sample, sample.name, sample.udf['Color']) + print(sample, sample.name, sample.udf["Color"]) sample = samples[0] diff --git a/examples/get_samples2.py b/examples/get_samples2.py index 6748778..23e68fa 100644 --- a/examples/get_samples2.py +++ b/examples/get_samples2.py @@ -10,18 +10,19 @@ from genologics.lims import * from genologics.config import BASEURI, USERNAME, PASSWORD + lims = Lims(BASEURI, USERNAME, PASSWORD) lims.check_version() -project = Project(lims, id='KRA61') +project = Project(lims, id="KRA61") samples = lims.get_samples(projectlimsid=project.id) -print(len(samples), 'samples in', project) +print(len(samples), "samples in", project) for sample in samples: print(sample, sample.name, sample.date_received, sample.artifact) -name = 'spruce_a' +name = "spruce_a" artifacts = lims.get_artifacts(sample_name=name) -print(len(artifacts), 'artifacts for sample', name) +print(len(artifacts), "artifacts for sample", name) for artifact in artifacts: print(artifact, artifact.name, artifact.qc_flag) diff --git a/examples/set_project_queued.py b/examples/set_project_queued.py index d9a74a6..1a139e8 100644 --- a/examples/set_project_queued.py +++ b/examples/set_project_queued.py @@ -19,12 +19,12 @@ lims.check_version() # Get the project with the LIMS id KLL60, and print some info. -project = Project(lims, id='KLL60') +project = Project(lims, id="KLL60") print(project, project.name, project.open_date) print(list(project.udf.items())) -d = datetime.date(2012,1,2) +d = datetime.date(2012, 1, 2) print(d) -project.udf['Queued'] = d +project.udf["Queued"] = d project.put() diff --git a/examples/set_sample_name.py b/examples/set_sample_name.py index 945c583..b09e5b9 100644 --- a/examples/set_sample_name.py +++ b/examples/set_sample_name.py @@ -17,15 +17,15 @@ lims.check_version() # Get the sample with the given LIMS identifier, and output its current name. -sample = Sample(lims, id='JGR58A21') +sample = Sample(lims, id="JGR58A21") print(sample, sample.name) -sample.name = 'Joels extra-proper sample-20' +sample.name = "Joels extra-proper sample-20" # Set the value of one of the UDFs -sample.udf['Emmas field 2'] = 5 +sample.udf["Emmas field 2"] = 5 for key, value in sample.udf.items(): - print(' ', key, '=', value) + print(" ", key, "=", value) sample.put() -print('Updated sample', sample) +print("Updated sample", sample) diff --git a/genologics/__init__.py b/genologics/__init__.py index 93b6e40..37fd7e4 100644 --- a/genologics/__init__.py +++ b/genologics/__init__.py @@ -1,2 +1,3 @@ from . import version + __version__ = version.__version__ diff --git a/genologics/config.py b/genologics/config.py index 547c490..ce34fa3 100644 --- a/genologics/config.py +++ b/genologics/config.py @@ -8,47 +8,53 @@ from configparser import SafeConfigParser - -''' +""" Usage: from genologics.config import BASEURI, USERNAME, PASSWORD Alternate Usage: from genologics import config BASEURI, USERNAME, PASSWORD, VERSION, MAIN_LOG = config.load_config(specified_config = ) -''' +""" spec_config = None + def get_config_info(config_file): config = SafeConfigParser() config.readfp(open(config_file)) - - - BASEURI = config.get('genologics', 'BASEURI').rstrip() - USERNAME = config.get('genologics', 'USERNAME').rstrip() - PASSWORD = config.get('genologics', 'PASSWORD').rstrip() - - if config.has_section('genologics') and config.has_option('genologics','VERSION'): - VERSION = config.get('genologics', 'VERSION').rstrip() + + BASEURI = config.get("genologics", "BASEURI").rstrip() + USERNAME = config.get("genologics", "USERNAME").rstrip() + PASSWORD = config.get("genologics", "PASSWORD").rstrip() + + if config.has_section("genologics") and config.has_option("genologics", "VERSION"): + VERSION = config.get("genologics", "VERSION").rstrip() else: - VERSION = 'v2' - - if config.has_section('logging') and config.has_option('logging','MAIN_LOG'): - MAIN_LOG = config.get('logging', 'MAIN_LOG').rstrip() + VERSION = "v2" + + if config.has_section("logging") and config.has_option("logging", "MAIN_LOG"): + MAIN_LOG = config.get("logging", "MAIN_LOG").rstrip() else: MAIN_LOG = None return BASEURI, USERNAME, PASSWORD, VERSION, MAIN_LOG - -def load_config(specified_config = None): + +def load_config(specified_config=None): if specified_config != None: config_file = specified_config else: config = SafeConfigParser() try: - conf_file = config.read([os.path.expanduser('~/.genologicsrc'), '.genologicsrc', - 'genologics.conf', 'genologics.cfg', '/etc/genologics.conf']) + conf_file = config.read( + [ + os.path.expanduser("~/.genologicsrc"), + ".genologicsrc", + "genologics.conf", + "genologics.cfg", + "/etc/genologics.conf", + ] + ) # First config file found wins config_file = conf_file[0] @@ -58,7 +64,9 @@ def load_config(specified_config = None): BASEURI, USERNAME, PASSWORD, VERSION, MAIN_LOG = get_config_info(config_file) - return BASEURI, USERNAME, PASSWORD, VERSION, MAIN_LOG - + return BASEURI, USERNAME, PASSWORD, VERSION, MAIN_LOG + -BASEURI, USERNAME, PASSWORD, VERSION, MAIN_LOG = load_config(specified_config = spec_config) +BASEURI, USERNAME, PASSWORD, VERSION, MAIN_LOG = load_config( + specified_config=spec_config +) diff --git a/genologics/constants.py b/genologics/constants.py index 42f9bb4..29ce4d6 100644 --- a/genologics/constants.py +++ b/genologics/constants.py @@ -10,46 +10,46 @@ from xml.etree import ElementTree _NSMAP = dict( - art='http://genologics.com/ri/artifact', - artgr='http://genologics.com/ri/artifactgroup', - cnf='http://genologics.com/ri/configuration', - con='http://genologics.com/ri/container', - ctp='http://genologics.com/ri/containertype', - exc='http://genologics.com/ri/exception', - file='http://genologics.com/ri/file', - inst='http://genologics.com/ri/instrument', - lab='http://genologics.com/ri/lab', - prc='http://genologics.com/ri/process', - prj='http://genologics.com/ri/project', - prop='http://genologics.com/ri/property', - protcnf='http://genologics.com/ri/protocolconfiguration', - protstepcnf='http://genologics.com/ri/stepconfiguration', - prx='http://genologics.com/ri/processexecution', - ptm='http://genologics.com/ri/processtemplate', - ptp='http://genologics.com/ri/processtype', - res='http://genologics.com/ri/researcher', - ri='http://genologics.com/ri', - rt='http://genologics.com/ri/routing', - rtp='http://genologics.com/ri/reagenttype', - kit='http://genologics.com/ri/reagentkit', - lot='http://genologics.com/ri/reagentlot', - smp='http://genologics.com/ri/sample', - stg='http://genologics.com/ri/stage', - stp='http://genologics.com/ri/step', - udf='http://genologics.com/ri/userdefined', - ver='http://genologics.com/ri/version', - wkfcnf='http://genologics.com/ri/workflowconfiguration' + art="http://genologics.com/ri/artifact", + artgr="http://genologics.com/ri/artifactgroup", + cnf="http://genologics.com/ri/configuration", + con="http://genologics.com/ri/container", + ctp="http://genologics.com/ri/containertype", + exc="http://genologics.com/ri/exception", + file="http://genologics.com/ri/file", + inst="http://genologics.com/ri/instrument", + lab="http://genologics.com/ri/lab", + prc="http://genologics.com/ri/process", + prj="http://genologics.com/ri/project", + prop="http://genologics.com/ri/property", + protcnf="http://genologics.com/ri/protocolconfiguration", + protstepcnf="http://genologics.com/ri/stepconfiguration", + prx="http://genologics.com/ri/processexecution", + ptm="http://genologics.com/ri/processtemplate", + ptp="http://genologics.com/ri/processtype", + res="http://genologics.com/ri/researcher", + ri="http://genologics.com/ri", + rt="http://genologics.com/ri/routing", + rtp="http://genologics.com/ri/reagenttype", + kit="http://genologics.com/ri/reagentkit", + lot="http://genologics.com/ri/reagentlot", + smp="http://genologics.com/ri/sample", + stg="http://genologics.com/ri/stage", + stp="http://genologics.com/ri/step", + udf="http://genologics.com/ri/userdefined", + ver="http://genologics.com/ri/version", + wkfcnf="http://genologics.com/ri/workflowconfiguration", ) for prefix, uri in _NSMAP.items(): ElementTree._namespace_map[uri] = prefix -_NSPATTERN = re.compile(r'(\{)(.+?)(\})') +_NSPATTERN = re.compile(r"(\{)(.+?)(\})") def nsmap(tag): "Convert from normal XML-ish namespace tag to ElementTree variant." - parts = tag.split(':') + parts = tag.split(":") if len(parts) != 2: raise ValueError("no namespace specifier in tag") return "{%s}%s" % (_NSMAP[parts[0]], parts[1]) diff --git a/genologics/descriptors.py b/genologics/descriptors.py index 57b4b67..5aae925 100644 --- a/genologics/descriptors.py +++ b/genologics/descriptors.py @@ -5,6 +5,7 @@ Per Kraulis, Science for Life Laboratory, Stockholm, Sweden. Copyright (C) 2012 Per Kraulis """ + import six from genologics.constants import nsmap @@ -139,7 +140,7 @@ class BooleanDescriptor(StringDescriptor): def __get__(self, instance, cls): text = super(BooleanDescriptor, self).__get__(instance, cls) if text is not None: - return text.lower() == 'true' + return text.lower() == "true" def __set__(self, instance, value): super(BooleanDescriptor, self).__set__(instance, str(value).lower()) @@ -153,7 +154,7 @@ def _is_string(self, value): def __init__(self, instance, *args, **kwargs): self.instance = instance - self._udt = kwargs.pop('udt', False) + self._udt = kwargs.pop("udt", False) self.rootkeys = args self._rootnode = None self._update_elems() @@ -177,23 +178,23 @@ def get_udt(self): def set_udt(self, name): assert isinstance(name, str) if not self._udt: - raise AttributeError('cannot set name for a UDF dictionary') + raise AttributeError("cannot set name for a UDF dictionary") self._udt = name - elem = self.rootnode.find(nsmap('udf:type')) + elem = self.rootnode.find(nsmap("udf:type")) assert elem is not None - elem.set('name', name) + elem.set("name", name) udt = property(get_udt, set_udt) def _update_elems(self): self._elems = [] if self._udt: - elem = self.rootnode.find(nsmap('udf:type')) + elem = self.rootnode.find(nsmap("udf:type")) if elem is not None: - self._udt = elem.attrib['name'] - self._elems = elem.findall(nsmap('udf:field')) + self._udt = elem.attrib["name"] + self._elems = elem.findall(nsmap("udf:field")) else: - tag = nsmap('udf:field') + tag = nsmap("udf:field") for elem in list(self.rootnode): if elem.tag == tag: self._elems.append(elem) @@ -201,20 +202,20 @@ def _update_elems(self): def _prepare_lookup(self): self._lookup = dict() for elem in self._elems: - type = elem.attrib['type'].lower() + type = elem.attrib["type"].lower() value = elem.text if not value: value = None - elif type == 'numeric': + elif type == "numeric": try: value = int(value) except ValueError: value = float(value) - elif type == 'boolean': - value = value == 'true' - elif type == 'date': + elif type == "boolean": + value = value == "true" + elif type == "date": value = datetime.date(*time.strptime(value, "%Y-%m-%d")[:3]) - self._lookup[elem.attrib['name']] = value + self._lookup[elem.attrib["name"]] = value def __contains__(self, key): try: @@ -229,81 +230,82 @@ def __getitem__(self, key): def __setitem__(self, key, value): self._lookup[key] = value for node in self._elems: - if node.attrib['name'] != key: continue - vtype = node.attrib['type'].lower() + if node.attrib["name"] != key: + continue + vtype = node.attrib["type"].lower() if value is None: - value='' - elif vtype == 'string': + value = "" + elif vtype == "string": if not self._is_string(value): - raise TypeError('String UDF requires str or unicode value') - elif vtype == 'str': + raise TypeError("String UDF requires str or unicode value") + elif vtype == "str": if not self._is_string(value): - raise TypeError('String UDF requires str or unicode value') - elif vtype == 'text': + raise TypeError("String UDF requires str or unicode value") + elif vtype == "text": if not self._is_string(value): - raise TypeError('Text UDF requires str or unicode value') - elif vtype == 'numeric': - if not isinstance(value, (int, float, Decimal)) and value != '': - raise TypeError('Numeric UDF requires int or float value') + raise TypeError("Text UDF requires str or unicode value") + elif vtype == "numeric": + if not isinstance(value, (int, float, Decimal)) and value != "": + raise TypeError("Numeric UDF requires int or float value") else: value = str(value) - elif vtype == 'boolean': + elif vtype == "boolean": if not isinstance(value, bool): - raise TypeError('Boolean UDF requires bool value') - value = value and 'true' or 'false' - elif vtype == 'date': + raise TypeError("Boolean UDF requires bool value") + value = value and "true" or "false" + elif vtype == "date": if not isinstance(value, datetime.date): # Too restrictive? - raise TypeError('Date UDF requires datetime.date value') + raise TypeError("Date UDF requires datetime.date value") value = str(value) - elif vtype == 'uri': + elif vtype == "uri": if not self._is_string(value): - raise TypeError('URI UDF requires str or punycode (unicode) value') + raise TypeError("URI UDF requires str or punycode (unicode) value") value = str(value) else: raise NotImplemented("UDF type '%s'" % vtype) if not isinstance(value, str): if not self._is_string(value): - value = str(value).encode('UTF-8') + value = str(value).encode("UTF-8") node.text = value break else: # Create new entry; heuristics for type if self._is_string(value): - vtype = '\n' in value and 'Text' or 'String' + vtype = "\n" in value and "Text" or "String" elif isinstance(value, bool): - vtype = 'Boolean' - value = value and 'true' or 'false' + vtype = "Boolean" + value = value and "true" or "false" elif isinstance(value, (int, float, Decimal)): - vtype = 'Numeric' + vtype = "Numeric" value = str(value) elif isinstance(value, datetime.date): - vtype = 'Date' + vtype = "Date" value = str(value) else: - raise NotImplementedError("Cannot handle value of type '%s'" - " for UDF" % type(value)) + raise NotImplementedError( + "Cannot handle value of type '%s'" " for UDF" % type(value) + ) if self._udt: - root = self.rootnode.find(nsmap('udf:type')) + root = self.rootnode.find(nsmap("udf:type")) else: root = self.rootnode - elem = ElementTree.SubElement(root, - nsmap('udf:field'), - type=vtype, - name=key) + elem = ElementTree.SubElement( + root, nsmap("udf:field"), type=vtype, name=key + ) if not isinstance(value, str): if not self._is_string(value): - value = str(value).encode('UTF-8') + value = str(value).encode("UTF-8") elem.text = value - #update the internal elements and lookup with new values + # update the internal elements and lookup with new values self._update_elems() self._prepare_lookup() def __delitem__(self, key): del self._lookup[key] for node in self._elems: - if node.attrib['name'] == key: + if node.attrib["name"] == key: self.rootnode.remove(node) break @@ -336,6 +338,7 @@ class UdfDictionaryDescriptor(BaseDescriptor): """An instance attribute containing a dictionary of UDF values represented by multiple XML elements. """ + _UDT = False def __init__(self, *args): @@ -370,11 +373,12 @@ class PlacementDictionaryDescriptor(TagDescriptor): def __get__(self, instance, cls): from genologics.entities import Artifact + instance.get() self.value = dict() for node in instance.root.findall(self.tag): - key = node.find('value').text - self.value[key] = Artifact(instance.lims, uri=node.attrib['uri']) + key = node.find("value").text + self.value[key] = Artifact(instance.lims, uri=node.attrib["uri"]) return self.value @@ -386,8 +390,8 @@ class ExternalidListDescriptor(BaseDescriptor): def __get__(self, instance, cls): instance.get() result = [] - for node in instance.root.findall(nsmap('ri:externalid')): - result.append((node.attrib.get('id'), node.attrib.get('uri'))) + for node in instance.root.findall(nsmap("ri:externalid")): + result.append((node.attrib.get("id"), node.attrib.get("uri"))) return result @@ -404,7 +408,7 @@ def __get__(self, instance, cls): if node is None: return None else: - return self.klass(instance.lims, uri=node.attrib['uri']) + return self.klass(instance.lims, uri=node.attrib["uri"]) def __set__(self, instance, value): instance.get() @@ -413,9 +417,9 @@ def __set__(self, instance, value): # create the new tag node = ElementTree.Element(self.tag) instance.root.append(node) - node.attrib['uri'] = value.uri - if value._TAG in ['project', 'sample', 'artifact', 'container']: - node.attrib['limsid'] = value.id + node.attrib["uri"] = value.uri + if value._TAG in ["project", "sample", "artifact", "container"]: + node.attrib["limsid"] = value.id class EntityListDescriptor(EntityDescriptor): @@ -427,10 +431,11 @@ def __get__(self, instance, cls): instance.get() result = [] for node in instance.root.findall(self.tag): - result.append(self.klass(instance.lims, uri=node.attrib['uri'])) + result.append(self.klass(instance.lims, uri=node.attrib["uri"])) return result + class NestedBooleanDescriptor(TagDescriptor): def __init__(self, tag, *args): super(NestedBooleanDescriptor, self).__init__(tag) @@ -442,7 +447,7 @@ def __get__(self, instance, cls): rootnode = instance.root for rootkey in self.rootkeys: rootnode = rootnode.find(rootkey) - result = rootnode.find(self.tag).text.lower() == 'true' + result = rootnode.find(self.tag).text.lower() == "true" return result def __set__(self, instance, value): @@ -451,6 +456,7 @@ def __set__(self, instance, value): rootnode = rootnode.find(rootkey) rootnode.find(self.tag).text = str(value).lower() + class NestedStringDescriptor(TagDescriptor): def __init__(self, tag, *args): super(NestedStringDescriptor, self).__init__(tag) @@ -471,9 +477,10 @@ def __set__(self, instance, value): rootnode = rootnode.find(rootkey) rootnode.find(self.tag).text = value + class NestedAttributeListDescriptor(StringAttributeDescriptor): """An instance yielding a list of dictionnaries of attributes - for a nested xml list of XML elements""" + for a nested xml list of XML elements""" def __init__(self, tag, *args): super(StringAttributeDescriptor, self).__init__(tag) @@ -493,7 +500,7 @@ def __get__(self, instance, cls): class NestedStringListDescriptor(StringListDescriptor): """An instance yielding a list of strings - for a nested list of xml elements""" + for a nested list of xml elements""" def __init__(self, tag, *args): super(StringListDescriptor, self).__init__(tag) @@ -527,10 +534,11 @@ def __get__(self, instance, cls): for rootkey in self.rootkeys: rootnode = rootnode.find(rootkey) for node in rootnode.findall(self.tag): - result.append(self.klass(instance.lims, uri=node.attrib['uri'])) + result.append(self.klass(instance.lims, uri=node.attrib["uri"])) return result + class MultiPageNestedEntityListDescriptor(EntityListDescriptor): """same as NestedEntityListDescriptor, but works on multiple pages, for Queues""" @@ -547,15 +555,16 @@ def __get__(self, instance, cls): for rootkey in self.rootkeys: rootnode = rootnode.find(rootkey) for node in rootnode.findall(self.tag): - result.append(self.klass(instance.lims, uri=node.attrib['uri'])) + result.append(self.klass(instance.lims, uri=node.attrib["uri"])) - if instance.root.find('next-page') is not None: - next_queue_page = instance.__class__(instance.lims, uri=instance.root.find('next-page').attrib.get('uri')) + if instance.root.find("next-page") is not None: + next_queue_page = instance.__class__( + instance.lims, uri=instance.root.find("next-page").attrib.get("uri") + ) result.extend(next_queue_page.artifacts) return result - class DimensionDescriptor(TagDescriptor): """An instance attribute containing a dictionary specifying the properties of a dimension of a container type. @@ -564,10 +573,11 @@ class DimensionDescriptor(TagDescriptor): def __get__(self, instance, cls): instance.get() node = instance.root.find(self.tag) - return dict(is_alpha=node.find('is-alpha').text.lower() == 'true', - offset=int(node.find('offset').text), - size=int(node.find('size').text) - ) + return dict( + is_alpha=node.find("is-alpha").text.lower() == "true", + offset=int(node.find("offset").text), + size=int(node.find("size").text), + ) class LocationDescriptor(TagDescriptor): @@ -577,12 +587,13 @@ class LocationDescriptor(TagDescriptor): def __get__(self, instance, cls): from genologics.entities import Container + instance.get() node = instance.root.find(self.tag) if node is None: - return (None,None) - uri = node.find('container').attrib['uri'] - return Container(instance.lims, uri=uri), node.find('value').text + return (None, None) + uri = node.find("container").attrib["uri"] + return Container(instance.lims, uri=uri), node.find("value").text class ReagentLabelList(BaseDescriptor): @@ -591,9 +602,9 @@ class ReagentLabelList(BaseDescriptor): def __get__(self, instance, cls): instance.get() self.value = [] - for node in instance.root.findall('reagent-label'): + for node in instance.root.findall("reagent-label"): try: - self.value.append(node.attrib['name']) + self.value.append(node.attrib["name"]) except: pass return self.value @@ -608,30 +619,35 @@ class OutputReagentList(BaseDescriptor): output_artifact_2:[reagent_label_name_3, reagent_label_name_4,...] } """ + def __init__(self, artifact_class): self.klass = artifact_class def __get__(self, instance, cls): instance.get() self.value = {} - for node in instance.root.iter('output'): - self.value[self.klass(instance.lims, uri=node.attrib['uri'])] = [subnode.attrib['name'] for subnode in node.findall('reagent-label')] + for node in instance.root.iter("output"): + self.value[self.klass(instance.lims, uri=node.attrib["uri"])] = [ + subnode.attrib["name"] for subnode in node.findall("reagent-label") + ] return self.value def __set__(self, instance, value): - out_r = ElementTree.Element('output-reagents') + out_r = ElementTree.Element("output-reagents") for artifact in value: - out_a = ElementTree.SubElement(out_r, 'output', attrib={'uri':artifact.uri}) + out_a = ElementTree.SubElement( + out_r, "output", attrib={"uri": artifact.uri} + ) for reagent_label_name in value[artifact]: - rea_l = ElementTree.SubElement(out_a, 'reagent-label', attrib={'name':reagent_label_name}) + rea_l = ElementTree.SubElement( + out_a, "reagent-label", attrib={"name": reagent_label_name} + ) - instance.root.remove(instance.root.find('output-reagents')) + instance.root.remove(instance.root.find("output-reagents")) instance.root.append(out_r) - - class InputOutputMapList(BaseDescriptor): """An instance attribute yielding a list of tuples (input, output) where each item is a dictionary, representing the input/output @@ -648,33 +664,34 @@ def __get__(self, instance, cls): rootnode = instance.root for rootkey in self.rootkeys: rootnode = rootnode.find(rootkey) - for node in rootnode.findall('input-output-map'): - input = self.get_dict(instance.lims, node.find('input')) - output = self.get_dict(instance.lims, node.find('output')) + for node in rootnode.findall("input-output-map"): + input = self.get_dict(instance.lims, node.find("input")) + output = self.get_dict(instance.lims, node.find("output")) self.value.append((input, output)) return self.value def get_dict(self, lims, node): from genologics.entities import Artifact, Process - if node is None: return None + + if node is None: + return None result = dict() - for key in ['limsid', 'output-type', 'output-generation-type']: + for key in ["limsid", "output-type", "output-generation-type"]: try: result[key] = node.attrib[key] except KeyError: pass - for uri in ['uri', 'post-process-uri']: + for uri in ["uri", "post-process-uri"]: try: result[uri] = Artifact(lims, uri=node.attrib[uri]) except KeyError: pass - node = node.find('parent-process') + node = node.find("parent-process") if node is not None: - result['parent-process'] = Process(lims, node.attrib['uri']) + result["parent-process"] = Process(lims, node.attrib["uri"]) return result - class ProcessTypeParametersDescriptor(object): def __getitem__(self, index): return self.params[index] @@ -683,12 +700,13 @@ def __setitem__(self, index, value): self.params[index] = value def __delitem__(self, index): - del(self.params[index]) + del self.params[index] def __init__(self, pt_instance): from genologics.internal_classes import ProcessTypeParameter + pt_instance.get() - self.tag = 'parameter' + self.tag = "parameter" self.params = [] for node in pt_instance.root.findall(self.tag): self.params.append(ProcessTypeParameter(pt_instance, node)) @@ -698,7 +716,6 @@ def __repr__(self): class ProcessTypeProcessInputDescriptor(TagDescriptor): - def __getitem__(self, index): return self._inputs[index] @@ -706,16 +723,17 @@ def __setitem__(self, index, value): self._inputs[index] = value def __delitem__(self, index): - del(self._inputs[index]) + del self._inputs[index] def __init__(self): - self._inputs=[] - self.tag = 'process-input' + self._inputs = [] + self.tag = "process-input" super(ProcessTypeProcessInputDescriptor, self).__init__(tag=self.tag) def __get__(self, instance, owner): from genologics.internal_classes import ProcessTypeProcessInput - for node in instance.root.findall(self.tag): + + for node in instance.root.findall(self.tag): self._inputs.append(ProcessTypeProcessInput(instance, node)) return self @@ -724,7 +742,6 @@ def __repr__(self): class ProcessTypeProcessOutputDescriptor(TagDescriptor): - def __getitem__(self, index): return self._inputs[index] @@ -732,15 +749,16 @@ def __setitem__(self, index, value): self._inputs[index] = value def __delitem__(self, index): - del(self._inputs[index]) + del self._inputs[index] def __init__(self): - self._inputs=[] - self.tag = 'process-output' + self._inputs = [] + self.tag = "process-output" super(ProcessTypeProcessOutputDescriptor, self).__init__(tag=self.tag) def __get__(self, instance, owner): from genologics.internal_classes import ProcessTypeProcessOutput + for node in instance.root.findall(self.tag): self._inputs.append(ProcessTypeProcessOutput(instance, node)) return self @@ -750,9 +768,8 @@ def __repr__(self): class NamedStringDescriptor(TagDescriptor): - def __get__(self, instance, owner): - self._internals={} + self._internals = {} for node in instance.root.findall(self.tag): - self._internals[node.attrib['name']] = node.text + self._internals[node.attrib["name"]] = node.text return self._internals diff --git a/genologics/entities.py b/genologics/entities.py index 96c7fbb..c295f1d 100644 --- a/genologics/entities.py +++ b/genologics/entities.py @@ -7,13 +7,36 @@ """ from genologics.constants import nsmap -from genologics.descriptors import StringDescriptor, StringDictionaryDescriptor, UdfDictionaryDescriptor, \ - UdtDictionaryDescriptor, ExternalidListDescriptor, EntityDescriptor, BooleanDescriptor, EntityListDescriptor, \ - StringAttributeDescriptor, StringListDescriptor, DimensionDescriptor, IntegerDescriptor, \ - PlacementDictionaryDescriptor, InputOutputMapList, LocationDescriptor, ReagentLabelList, NestedEntityListDescriptor, \ - NestedStringListDescriptor, NestedAttributeListDescriptor, IntegerAttributeDescriptor, NestedStringDescriptor, \ - NestedBooleanDescriptor, MultiPageNestedEntityListDescriptor, ProcessTypeParametersDescriptor, \ - ProcessTypeProcessInputDescriptor, ProcessTypeProcessOutputDescriptor, NamedStringDescriptor, OutputReagentList +from genologics.descriptors import ( + StringDescriptor, + StringDictionaryDescriptor, + UdfDictionaryDescriptor, + UdtDictionaryDescriptor, + ExternalidListDescriptor, + EntityDescriptor, + BooleanDescriptor, + EntityListDescriptor, + StringAttributeDescriptor, + StringListDescriptor, + DimensionDescriptor, + IntegerDescriptor, + PlacementDictionaryDescriptor, + InputOutputMapList, + LocationDescriptor, + ReagentLabelList, + NestedEntityListDescriptor, + NestedStringListDescriptor, + NestedAttributeListDescriptor, + IntegerAttributeDescriptor, + NestedStringDescriptor, + NestedBooleanDescriptor, + MultiPageNestedEntityListDescriptor, + ProcessTypeParametersDescriptor, + ProcessTypeProcessInputDescriptor, + ProcessTypeProcessOutputDescriptor, + NamedStringDescriptor, + OutputReagentList, +) try: from urllib.parse import urlsplit, urlparse, parse_qs, urlunparse @@ -31,8 +54,15 @@ class SampleHistory: """Class handling the history generation for a given sample/artifact AFAIK the only fields of the history that are read are proc.type and outart""" - def __init__(self, sample_name=None, output_artifact=None, input_artifact=None, lims=None, pro_per_art=None, - test=False): + def __init__( + self, + sample_name=None, + output_artifact=None, + input_artifact=None, + lims=None, + pro_per_art=None, + test=False, + ): self.processes_per_artifact = pro_per_art if lims: self.lims = lims @@ -48,11 +78,12 @@ def __init__(self, sample_name=None, output_artifact=None, input_artifact=None, self.get_analyte_hist_sorted(output_artifact, input_artifact) else: logger.error("Tried to build History without lims") - raise AttributeError("History cannot be computed without a valid lims object") + raise AttributeError( + "History cannot be computed without a valid lims object" + ) def control(self): - """this can be used to check the content of the object. - """ + """this can be used to check the content of the object.""" logger.info("SAMPLE NAME: {}".format(self.sample_name)) logger.info("outart : {}".format(self.history_list[0])) # logger.info ("\nmap :") @@ -65,7 +96,11 @@ def control(self): for key2, dict2 in dict.items(): logger.info("\t{}".format(key2)) for key, value in dict2.items(): - logger.info("\t\t{0}->{1}".format(key, (value if value is not None else "None"))) + logger.info( + "\t\t{0}->{1}".format( + key, (value if value is not None else "None") + ) + ) logger.info("\nHistory List") for art in self.history_list: logger.info(art) @@ -78,13 +113,18 @@ def make_sample_artifact_map(self): and creates an entry like this : output -> (process, input)""" samp_art_map = {} if self.sample_name: - artifacts = self.lims.get_artifacts(sample_name=self.sample_name, type='Analyte', resolve=False) + artifacts = self.lims.get_artifacts( + sample_name=self.sample_name, type="Analyte", resolve=False + ) for one_art in artifacts: input_arts = one_art.input_artifact_list() for input_art in input_arts: for samp in input_art.samples: if samp.name == self.sample_name: - samp_art_map[one_art.id] = (one_art.parent_process, input_art.id) + samp_art_map[one_art.id] = ( + one_art.parent_process, + input_art.id, + ) self.art_map = samp_art_map @@ -97,7 +137,9 @@ def alternate_history(self, out_art, in_art=None): history = {} hist_list = [] # getting the list of all expected analytes. - artifacts = self.lims.get_artifacts(sample_name=self.sample_name, type='Analyte', resolve=False) + artifacts = self.lims.get_artifacts( + sample_name=self.sample_name, type="Analyte", resolve=False + ) processes = [] inputs = [] if in_art: @@ -112,12 +154,18 @@ def alternate_history(self, out_art, in_art=None): valid_pcs = self.lims.get_processes(inputartifactlimsid=in_art) for tempProcess in valid_pcs: - history[in_art][tempProcess.id] = {'date': tempProcess.date_run, - 'id': tempProcess.id, - 'outart': (out_art if out_art in [out.id for out in tempProcess.all_outputs()] else None), - 'inart': in_art, - 'type': tempProcess.type.id, - 'name': tempProcess.type.name} + history[in_art][tempProcess.id] = { + "date": tempProcess.date_run, + "id": tempProcess.id, + "outart": ( + out_art + if out_art in [out.id for out in tempProcess.all_outputs()] + else None + ), + "inart": in_art, + "type": tempProcess.type.id, + "name": tempProcess.type.name, + } else: starting_art = out_art # main iteration @@ -142,18 +190,28 @@ def alternate_history(self, out_art, in_art=None): logger.info(i.id) if i in artifacts: history[i.id] = {} - for tempProcess in (self.processes_per_artifact[i.id] if self.processes_per_artifact else self.lims.get_processes(inputartifactlimsid=i.id)): # If there is a loacl map, use it. else, query the lims. - history[i.id][tempProcess.id] = {'date': tempProcess.date_run, - 'id': tempProcess.id, - 'outart': ( - o.id if tempProcess.id == o.parent_process.id else None), - 'inart': i.id, - 'type': tempProcess.type.id, - 'name': tempProcess.type.name} + for tempProcess in ( + self.processes_per_artifact[i.id] + if self.processes_per_artifact + else self.lims.get_processes(inputartifactlimsid=i.id) + ): # If there is a loacl map, use it. else, query the lims. + history[i.id][tempProcess.id] = { + "date": tempProcess.date_run, + "id": tempProcess.id, + "outart": ( + o.id + if tempProcess.id == o.parent_process.id + else None + ), + "inart": i.id, + "type": tempProcess.type.id, + "name": tempProcess.type.name, + } logger.info("found input " + i.id) inputs.append( - i.id) # this will be the sorted list of artifacts used to rebuild the history in order + i.id + ) # this will be the sorted list of artifacts used to rebuild the history in order # while increment starting_art = i.id @@ -196,12 +254,16 @@ def get_analyte_hist_sorted(self, out_artifact, input_art=None): # pro = In_art.parent_process.id # except: # pro = None - history, out_artifact = self._add_out_art_process_conection_list(input_art, out_artifact, history) + history, out_artifact = self._add_out_art_process_conection_list( + input_art, out_artifact, history + ) hist_list.append(input_art) while out_artifact in self.art_map: pro, input_art = self.art_map[out_artifact] hist_list.append(input_art) - history, out_artifact = self._add_out_art_process_conection_list(input_art, out_artifact, history) + history, out_artifact = self._add_out_art_process_conection_list( + input_art, out_artifact, history + ) self.history = history self.history_list = hist_list @@ -213,17 +275,22 @@ def _add_out_art_process_conection_list(self, input_art, out_artifact, history={ processes that the input artifact has been involved in, but that are not part of the historychain get the outart set to None. This is very important.""" # Use the local process map if we have one, else, query the lims - for process in self.processes_per_artifact[input_art] if self.processes_per_artifact else lims.get_processes( - inputartifactlimsid=input_art): + for process in ( + self.processes_per_artifact[input_art] + if self.processes_per_artifact + else lims.get_processes(inputartifactlimsid=input_art) + ): # outputs = map(lambda a: (a.id), process.all_outputs()) outputs = [a.id for a in process.all_outputs()] outart = out_artifact if out_artifact in outputs else None - step_info = {'date': process.date_run, - 'id': process.id, - 'outart': outart, - 'inart': input_art, - 'type': process.type.id, - 'name': process.type.name} + step_info = { + "date": process.date_run, + "id": process.id, + "outart": outart, + "inart": input_art, + "type": process.type.id, + "name": process.type.name, + } if input_art in history: history[input_art][process.id] = step_info else: @@ -255,7 +322,8 @@ def __new__(cls, lims, uri=None, id=None, _create_new=False): def __init__(self, lims, uri=None, id=None, _create_new=False): assert uri or id or _create_new if not _create_new: - if hasattr(self, 'lims'): return + if hasattr(self, "lims"): + return if not uri: uri = lims.get_uri(self._URI, id) lims.cache[uri] = self @@ -281,11 +349,12 @@ def uri(self): def id(self): "Return the LIMS id; obtained from the URI." parts = urlsplit(self.uri) - return parts.path.split('/')[-1] + return parts.path.split("/")[-1] def get(self, force=False): "Get the XML data for this instance." - if not force and self.root is not None: return + if not force and self.root is not None: + return self.root = self.lims.get(self.uri) def put(self): @@ -305,21 +374,26 @@ def xml(self): def _create(cls, lims, creation_tag=None, udfs=None, **kwargs): """Create an instance from attributes and return it""" if not udfs: - udfs={} + udfs = {} instance = cls(lims, _create_new=True) if creation_tag: - instance.root = ElementTree.Element(nsmap(cls._PREFIX + ':' + creation_tag)) + instance.root = ElementTree.Element(nsmap(cls._PREFIX + ":" + creation_tag)) elif cls._TAG: - instance.root = ElementTree.Element(nsmap(cls._PREFIX + ':' + cls._TAG)) + instance.root = ElementTree.Element(nsmap(cls._PREFIX + ":" + cls._TAG)) else: - instance.root = ElementTree.Element(nsmap(cls._PREFIX + ':' + cls.__name__.lower())) + instance.root = ElementTree.Element( + nsmap(cls._PREFIX + ":" + cls.__name__.lower()) + ) for key in udfs: - instance.udf[key]=udfs[key] + instance.udf[key] = udfs[key] for attribute in kwargs: if hasattr(instance, attribute): setattr(instance, attribute, kwargs.get(attribute)) else: - raise TypeError("%s create: got an unexpected keyword argument '%s'" % (cls.__name__, attribute)) + raise TypeError( + "%s create: got an unexpected keyword argument '%s'" + % (cls.__name__, attribute) + ) return instance @@ -329,79 +403,85 @@ def create(cls, lims, creation_tag=None, **kwargs): instance = cls._create(lims, creation_tag=creation_tag, **kwargs) data = lims.tostring(ElementTree.ElementTree(instance.root)) instance.root = lims.post(uri=lims.get_uri(cls._URI), data=data) - instance._uri = instance.root.attrib['uri'] + instance._uri = instance.root.attrib["uri"] return instance class Instrument(Entity): - """Lab Instrument - """ + """Lab Instrument""" + _URI = "instruments" _TAG = "instrument" _PREFIX = "inst" - name = StringDescriptor('name') - type = StringDescriptor('type') - serial_number = StringDescriptor('serial-number') - expiry_date = StringDescriptor('expiry-date') - archived = BooleanDescriptor('archived') + name = StringDescriptor("name") + type = StringDescriptor("type") + serial_number = StringDescriptor("serial-number") + expiry_date = StringDescriptor("expiry-date") + archived = BooleanDescriptor("archived") + class Lab(Entity): "Lab; container of researchers." - _URI = 'labs' - _PREFIX = 'lab' + _URI = "labs" + _PREFIX = "lab" + + name = StringDescriptor("name") + billing_address = StringDictionaryDescriptor("billing-address") + shipping_address = StringDictionaryDescriptor("shipping-address") + udf = UdfDictionaryDescriptor() + udt = UdtDictionaryDescriptor() + externalids = ExternalidListDescriptor() + website = StringDescriptor("website") - name = StringDescriptor('name') - billing_address = StringDictionaryDescriptor('billing-address') - shipping_address = StringDictionaryDescriptor('shipping-address') - udf = UdfDictionaryDescriptor() - udt = UdtDictionaryDescriptor() - externalids = ExternalidListDescriptor() - website = StringDescriptor('website') class Researcher(Entity): "Person; client scientist or lab personnel. Associated with a lab." - _URI = 'researchers' - _PREFIX = 'res' - - first_name = StringDescriptor('first-name') - last_name = StringDescriptor('last-name') - phone = StringDescriptor('phone') - fax = StringDescriptor('fax') - email = StringDescriptor('email') - initials = StringDescriptor('initials') - lab = EntityDescriptor('lab', Lab) - udf = UdfDictionaryDescriptor() - udt = UdtDictionaryDescriptor() + _URI = "researchers" + _PREFIX = "res" + + first_name = StringDescriptor("first-name") + last_name = StringDescriptor("last-name") + phone = StringDescriptor("phone") + fax = StringDescriptor("fax") + email = StringDescriptor("email") + initials = StringDescriptor("initials") + lab = EntityDescriptor("lab", Lab) + udf = UdfDictionaryDescriptor() + udt = UdtDictionaryDescriptor() externalids = ExternalidListDescriptor() # credentials XXX - username = NestedStringDescriptor('username', 'credentials') - account_locked = NestedBooleanDescriptor('account-locked', 'credentials') + username = NestedStringDescriptor("username", "credentials") + account_locked = NestedBooleanDescriptor("account-locked", "credentials") @property def name(self): return "%s %s" % (self.first_name, self.last_name) + class Permission(Entity): """A Clarity permission. Only supports GET""" - name = StringDescriptor('name') - action = StringDescriptor('action') - description = StringDescriptor('description') + + name = StringDescriptor("name") + action = StringDescriptor("action") + description = StringDescriptor("description") class Role(Entity): """Clarity Role, hosting permissions""" - name = StringDescriptor('name') - researchers = NestedEntityListDescriptor('researcher', Researcher, 'researchers') - permissions = NestedEntityListDescriptor('permission', Permission, 'permissions') + + name = StringDescriptor("name") + researchers = NestedEntityListDescriptor("researcher", Researcher, "researchers") + permissions = NestedEntityListDescriptor("permission", Permission, "permissions") class Reagent_label(Entity): """Reagent label element""" - reagent_label = StringDescriptor('reagent-label') + + reagent_label = StringDescriptor("reagent-label") class Note(Entity): @@ -413,99 +493,100 @@ class Note(Entity): class File(Entity): "File attached to a project or a sample." - attached_to = StringDescriptor('attached-to') - content_location = StringDescriptor('content-location') - original_location = StringDescriptor('original-location') - is_published = BooleanDescriptor('is-published') + attached_to = StringDescriptor("attached-to") + content_location = StringDescriptor("content-location") + original_location = StringDescriptor("original-location") + is_published = BooleanDescriptor("is-published") class Project(Entity): "Project concerning a number of samples; associated with a researcher." - _URI = 'projects' - _TAG = 'project' - _PREFIX = 'prj' - - name = StringDescriptor('name') - open_date = StringDescriptor('open-date') - close_date = StringDescriptor('close-date') - invoice_date = StringDescriptor('invoice-date') - researcher = EntityDescriptor('researcher', Researcher) - udf = UdfDictionaryDescriptor() - udt = UdtDictionaryDescriptor() - files = EntityListDescriptor(nsmap('file:file'), File) - externalids = ExternalidListDescriptor() + _URI = "projects" + _TAG = "project" + _PREFIX = "prj" + + name = StringDescriptor("name") + open_date = StringDescriptor("open-date") + close_date = StringDescriptor("close-date") + invoice_date = StringDescriptor("invoice-date") + researcher = EntityDescriptor("researcher", Researcher) + udf = UdfDictionaryDescriptor() + udt = UdtDictionaryDescriptor() + files = EntityListDescriptor(nsmap("file:file"), File) + externalids = ExternalidListDescriptor() # permissions XXX class Sample(Entity): "Customer's sample to be analyzed; associated with a project." - _URI = 'samples' - _TAG = 'sample' - _PREFIX = 'smp' + _URI = "samples" + _TAG = "sample" + _PREFIX = "smp" - name = StringDescriptor('name') - date_received = StringDescriptor('date-received') - date_completed = StringDescriptor('date-completed') - project = EntityDescriptor('project', Project) - submitter = EntityDescriptor('submitter', Researcher) + name = StringDescriptor("name") + date_received = StringDescriptor("date-received") + date_completed = StringDescriptor("date-completed") + project = EntityDescriptor("project", Project) + submitter = EntityDescriptor("submitter", Researcher) # artifact: defined below - udf = UdfDictionaryDescriptor() - udt = UdtDictionaryDescriptor() - notes = EntityListDescriptor('note', Note) - files = EntityListDescriptor(nsmap('file:file'), File) - externalids = ExternalidListDescriptor() + udf = UdfDictionaryDescriptor() + udt = UdtDictionaryDescriptor() + notes = EntityListDescriptor("note", Note) + files = EntityListDescriptor(nsmap("file:file"), File) + externalids = ExternalidListDescriptor() # biosource XXX - @classmethod def create(cls, lims, container, position, udfs=None, **kwargs): """Create an instance of Sample from attributes then post it to the LIMS""" if udfs is None: udfs = {} if not isinstance(container, Container): - raise TypeError('%s is not of type Container'%container) - instance = super(Sample, cls)._create(lims, creation_tag='samplecreation',udfs=udfs, **kwargs) + raise TypeError("%s is not of type Container" % container) + instance = super(Sample, cls)._create( + lims, creation_tag="samplecreation", udfs=udfs, **kwargs + ) - location = ElementTree.SubElement(instance.root, 'location') - ElementTree.SubElement(location, 'container', dict(uri=container.uri)) - position_element = ElementTree.SubElement(location, 'value') + location = ElementTree.SubElement(instance.root, "location") + ElementTree.SubElement(location, "container", dict(uri=container.uri)) + position_element = ElementTree.SubElement(location, "value") position_element.text = position data = lims.tostring(ElementTree.ElementTree(instance.root)) instance.root = lims.post(uri=lims.get_uri(cls._URI), data=data) - instance._uri = instance.root.attrib['uri'] + instance._uri = instance.root.attrib["uri"] return instance class Containertype(Entity): "Type of container for analyte artifacts." - _TAG = 'container-type' - _URI = 'containertypes' - _PREFIX = 'ctp' + _TAG = "container-type" + _URI = "containertypes" + _PREFIX = "ctp" - name = StringAttributeDescriptor('name') - calibrant_wells = StringListDescriptor('calibrant-well') - unavailable_wells = StringListDescriptor('unavailable-well') - x_dimension = DimensionDescriptor('x-dimension') - y_dimension = DimensionDescriptor('y-dimension') + name = StringAttributeDescriptor("name") + calibrant_wells = StringListDescriptor("calibrant-well") + unavailable_wells = StringListDescriptor("unavailable-well") + x_dimension = DimensionDescriptor("x-dimension") + y_dimension = DimensionDescriptor("y-dimension") class Container(Entity): "Container for analyte artifacts." - _URI = 'containers' - _TAG = 'container' - _PREFIX = 'con' + _URI = "containers" + _TAG = "container" + _PREFIX = "con" - name = StringDescriptor('name') - type = EntityDescriptor('type', Containertype) - occupied_wells = IntegerDescriptor('occupied-wells') - placements = PlacementDictionaryDescriptor('placement') - udf = UdfDictionaryDescriptor() - udt = UdtDictionaryDescriptor() - state = StringDescriptor('state') + name = StringDescriptor("name") + type = EntityDescriptor("type", Containertype) + occupied_wells = IntegerDescriptor("occupied-wells") + placements = PlacementDictionaryDescriptor("placement") + udf = UdfDictionaryDescriptor() + udt = UdtDictionaryDescriptor() + state = StringDescriptor("state") def get_placements(self): """Get the dictionary of locations and artifacts @@ -518,87 +599,88 @@ def delete(self): self.lims.delete(self.uri) - - class Udfconfig(Entity): "Instance of field type (cnf namespace)." - _URI = 'configuration/udfs' - - name = StringDescriptor('name') - attach_to_name = StringDescriptor('attach-to-name') - attach_to_category = StringDescriptor('attach-to-category') - show_in_lablink = BooleanDescriptor('show-in-lablink') - allow_non_preset_values = BooleanDescriptor('allow-non-preset-values') - first_preset_is_default_value = BooleanDescriptor('first-preset-is-default-value') - show_in_tables = BooleanDescriptor('show-in-tables') - is_editable = BooleanDescriptor('is-editable') - is_required = BooleanDescriptor('is-required') - is_deviation = BooleanDescriptor('is-deviation') - is_controlled_vocabulary = BooleanDescriptor('is-controlled-vocabulary') - presets = StringListDescriptor('preset') + + _URI = "configuration/udfs" + + name = StringDescriptor("name") + attach_to_name = StringDescriptor("attach-to-name") + attach_to_category = StringDescriptor("attach-to-category") + show_in_lablink = BooleanDescriptor("show-in-lablink") + allow_non_preset_values = BooleanDescriptor("allow-non-preset-values") + first_preset_is_default_value = BooleanDescriptor("first-preset-is-default-value") + show_in_tables = BooleanDescriptor("show-in-tables") + is_editable = BooleanDescriptor("is-editable") + is_required = BooleanDescriptor("is-required") + is_deviation = BooleanDescriptor("is-deviation") + is_controlled_vocabulary = BooleanDescriptor("is-controlled-vocabulary") + presets = StringListDescriptor("preset") class Processtype(Entity): - _TAG = 'process-type' - _URI = 'processtypes' - _PREFIX = 'ptp' + _TAG = "process-type" + _URI = "processtypes" + _PREFIX = "ptp" def __init__(self, lims, uri=None, id=None, _create_new=False): super(Processtype, self).__init__(lims, uri, id, _create_new) self.parameters = ProcessTypeParametersDescriptor(self) - name = StringAttributeDescriptor('name') - field_definition = EntityListDescriptor('field-definition', Udfconfig) + name = StringAttributeDescriptor("name") + field_definition = EntityListDescriptor("field-definition", Udfconfig) process_inputs = ProcessTypeProcessInputDescriptor() process_outputs = ProcessTypeProcessOutputDescriptor() - process_type_attribute = NamedStringDescriptor('process-type-attribute') - + process_type_attribute = NamedStringDescriptor("process-type-attribute") @property def process_input(self): return self.process_inputs[0] + class ControlType(Entity): _URI = "controltypes" _TAG = "control-type" - _PREFIX = 'ctrltp' + _PREFIX = "ctrltp" - name = StringAttributeDescriptor('name') - supplier = StringDescriptor('supplier') - archived = BooleanDescriptor('archived') - single_step = BooleanDescriptor('single_step') + name = StringAttributeDescriptor("name") + supplier = StringDescriptor("supplier") + archived = BooleanDescriptor("archived") + single_step = BooleanDescriptor("single_step") class Process(Entity): "Process (instance of Processtype) executed producing ouputs from inputs." - _URI = 'processes' - _PREFIX = 'prc' + _URI = "processes" + _PREFIX = "prc" - type = EntityDescriptor('type', Processtype) - date_run = StringDescriptor('date-run') - technician = EntityDescriptor('technician', Researcher) - protocol_name = StringDescriptor('protocol-name') + type = EntityDescriptor("type", Processtype) + date_run = StringDescriptor("date-run") + technician = EntityDescriptor("technician", Researcher) + protocol_name = StringDescriptor("protocol-name") input_output_maps = InputOutputMapList() - udf = UdfDictionaryDescriptor() - udt = UdtDictionaryDescriptor() - files = EntityListDescriptor(nsmap('file:file'), File) - process_parameter = StringDescriptor('process-parameter') - instrument = EntityDescriptor('instrument', Instrument) + udf = UdfDictionaryDescriptor() + udt = UdtDictionaryDescriptor() + files = EntityListDescriptor(nsmap("file:file"), File) + process_parameter = StringDescriptor("process-parameter") + instrument = EntityDescriptor("instrument", Instrument) # process_parameters XXX - def outputs_per_input(self, inart, ResultFile=False, SharedResultFile=False, Analyte=False): + def outputs_per_input( + self, inart, ResultFile=False, SharedResultFile=False, Analyte=False + ): """Getting all the output artifacts related to a particual input artifact""" - inouts = [io for io in self.input_output_maps if io[0]['limsid'] == inart] + inouts = [io for io in self.input_output_maps if io[0]["limsid"] == inart] if ResultFile: - inouts = [io for io in inouts if io[1]['output-type'] == 'ResultFile'] + inouts = [io for io in inouts if io[1]["output-type"] == "ResultFile"] elif SharedResultFile: - inouts = [io for io in inouts if io[1]['output-type'] == 'SharedResultFile'] + inouts = [io for io in inouts if io[1]["output-type"] == "SharedResultFile"] elif Analyte: - inouts = [io for io in inouts if io[1]['output-type'] == 'Analyte'] - outs = [io[1]['uri'] for io in inouts] + inouts = [io for io in inouts if io[1]["output-type"] == "Analyte"] + outs = [io[1]["uri"] for io in inouts] return outs def input_per_sample(self, sample): @@ -617,14 +699,16 @@ def all_inputs(self, unique=True, resolve=False): """ # if the process has no input, that is not standard and we want to know about it try: - ids = [io[0]['limsid'] for io in self.input_output_maps] + ids = [io[0]["limsid"] for io in self.input_output_maps] except TypeError: logger.error("Process ", self, " has no input artifacts") raise TypeError if unique: ids = list(frozenset(ids)) if resolve: - return self.lims.get_batch([Artifact(self.lims, id=id) for id in ids if id is not None]) + return self.lims.get_batch( + [Artifact(self.lims, id=id) for id in ids if id is not None] + ) else: return [Artifact(self.lims, id=id) for id in ids if id is not None] @@ -633,36 +717,38 @@ def all_outputs(self, unique=True, resolve=False): if unique is true, no duplicates are returned. """ # Given how ids is structured, io[1] might be None : some process don't have an output. - ids = [io[1]['limsid'] for io in self.input_output_maps if io[1] is not None] + ids = [io[1]["limsid"] for io in self.input_output_maps if io[1] is not None] if unique: ids = list(frozenset(ids)) if resolve: - return self.lims.get_batch([Artifact(self.lims, id=id) for id in ids if id is not None]) + return self.lims.get_batch( + [Artifact(self.lims, id=id) for id in ids if id is not None] + ) else: return [Artifact(self.lims, id=id) for id in ids if id is not None] def shared_result_files(self): """Retreve all resultfiles of output-generation-type PerAllInputs.""" artifacts = self.all_outputs(unique=True) - return [a for a in artifacts if a.output_type == 'SharedResultFile'] + return [a for a in artifacts if a.output_type == "SharedResultFile"] def result_files(self): """Retreve all resultfiles of output-generation-type perInput.""" artifacts = self.all_outputs(unique=True) - return [a for a in artifacts if a.output_type == 'ResultFile'] + return [a for a in artifacts if a.output_type == "ResultFile"] def analytes(self): """Retreving the output Analytes of the process, if existing. If the process is not producing any output analytes, the input analytes are returned. Input/Output is returned as a information string. Makes aggregate processes and normal processes look the same.""" - info = 'Output' + info = "Output" artifacts = self.all_outputs(unique=True) - analytes = [a for a in artifacts if a.type == 'Analyte'] + analytes = [a for a in artifacts if a.type == "Analyte"] if len(analytes) == 0: artifacts = self.all_inputs(unique=True) - analytes = [a for a in artifacts if a.type == 'Analyte'] - info = 'Input' + analytes = [a for a in artifacts if a.type == "Analyte"] + info = "Input" return analytes, info def parent_processes(self): @@ -686,22 +772,22 @@ def step(self): class Artifact(Entity): "Any process input or output; analyte or file." - _URI = 'artifacts' - _TAG = 'artifact' - _PREFIX = 'art' - - name = StringDescriptor('name') - type = StringDescriptor('type') - output_type = StringDescriptor('output-type') - parent_process = EntityDescriptor('parent-process', Process) - volume = StringDescriptor('volume') - concentration = StringDescriptor('concentration') - qc_flag = StringDescriptor('qc-flag') - location = LocationDescriptor('location') - working_flag = BooleanDescriptor('working-flag') - samples = EntityListDescriptor('sample', Sample) - udf = UdfDictionaryDescriptor() - files = EntityListDescriptor(nsmap('file:file'), File) + _URI = "artifacts" + _TAG = "artifact" + _PREFIX = "art" + + name = StringDescriptor("name") + type = StringDescriptor("type") + output_type = StringDescriptor("output-type") + parent_process = EntityDescriptor("parent-process", Process) + volume = StringDescriptor("volume") + concentration = StringDescriptor("concentration") + qc_flag = StringDescriptor("qc-flag") + location = LocationDescriptor("location") + working_flag = BooleanDescriptor("working-flag") + samples = EntityListDescriptor("sample", Sample) + udf = UdfDictionaryDescriptor() + files = EntityListDescriptor(nsmap("file:file"), File) reagent_labels = ReagentLabelList() # artifact_flags XXX @@ -712,8 +798,8 @@ def input_artifact_list(self): input_artifact_list = [] try: for tuple in self.parent_process.input_output_maps: - if tuple[1]['limsid'] == self.id: - input_artifact_list.append(tuple[0]['uri']) # ['limsid']) + if tuple[1]["limsid"] == self.id: + input_artifact_list.append(tuple[0]["uri"]) # ['limsid']) except: pass return input_artifact_list @@ -723,7 +809,7 @@ def get_state(self): parts = urlparse(self.uri) params = parse_qs(parts.query) try: - return params['state'][0] + return params["state"][0] except (KeyError, IndexError): return None @@ -738,8 +824,8 @@ def container(self): def stateless(self): "returns the artefact independently of it's state" parts = urlparse(self.uri) - if 'state' in parts[4]: - stateless_uri = urlunparse([parts[0], parts[1], parts[2], parts[3], '', '']) + if "state" in parts[4]: + stateless_uri = urlunparse([parts[0], parts[1], parts[2], parts[3], "", ""]) return Artifact(self.lims, uri=stateless_uri) else: return self @@ -751,9 +837,15 @@ def stateless(self): def _get_workflow_stages_and_statuses(self): self.get() result = [] - rootnode = self.root.find('workflow-stages') - for node in rootnode.findall('workflow-stage'): - result.append((Stage(self.lims, uri=node.attrib['uri']), node.attrib['status'], node.attrib['name'])) + rootnode = self.root.find("workflow-stages") + for node in rootnode.findall("workflow-stage"): + result.append( + ( + Stage(self.lims, uri=node.attrib["uri"]), + node.attrib["status"], + node.attrib["name"], + ) + ) return result workflow_stages_and_statuses = property(_get_workflow_stages_and_statuses) @@ -764,22 +856,25 @@ class StepPools(Entity): pools : [ {'output' : output_art, 'name' : 'AAAA', 'inputs':[input_art_1, input_art_2, ...]}, ...] available_inputs : {input1:{'replicates':N}} When POSTing, only pools need to be updated, available_inputs can be left as is. - In pools, output can be left blank, Clarity will generate an output artifact. """ + In pools, output can be left blank, Clarity will generate an output artifact.""" _pools = None _available_inputs = None def _remove_available_inputs(self, input_art): - """ removes an input from the available inputs, one replicate at a time - """ + """removes an input from the available inputs, one replicate at a time""" self.get_available_inputs() - rep = self._available_inputs.get(input_art, {'replicates': 0}).get('replicates', 1) + rep = self._available_inputs.get(input_art, {"replicates": 0}).get( + "replicates", 1 + ) if rep > 1: - self._available_inputs[input_art]['replicates'] = rep - 1 + self._available_inputs[input_art]["replicates"] = rep - 1 elif rep == 1: - del(self._available_inputs[input_art]) + del self._available_inputs[input_art] else: - logger.info("using more inputs than replicates for input {0}".format(input_art.uri)) + logger.info( + "using more inputs than replicates for input {0}".format(input_art.uri) + ) self.available_inputs = self._available_inputs def set_available_inputs(self, available_inputs): @@ -787,8 +882,10 @@ def set_available_inputs(self, available_inputs): available_inputs_root.clear() for input_art in available_inputs: current_elem = ElementTree.SubElement(available_inputs_root, "input") - current_elem.attrib['uri'] = input_art.uri - current_elem.attrib['replicates'] = str(available_inputs[input_art]['replicates']) + current_elem.attrib["uri"] = input_art.uri + current_elem.attrib["replicates"] = str( + available_inputs[input_art]["replicates"] + ) self._available_inputs = available_inputs def get_available_inputs(self): @@ -796,10 +893,12 @@ def get_available_inputs(self): self.get() self._available_inputs = {} for ai_node in self.root.find("available-inputs").findall("input"): - input = Artifact(self.lims, uri=ai_node.attrib['uri']) + input = Artifact(self.lims, uri=ai_node.attrib["uri"]) self._available_inputs[input] = {} - if 'replicates' in ai_node.attrib: - self._available_inputs[input]['replicates'] = int(ai_node.attrib['replicates']) + if "replicates" in ai_node.attrib: + self._available_inputs[input]["replicates"] = int( + ai_node.attrib["replicates"] + ) return self._available_inputs @@ -808,14 +907,18 @@ def get_pools(self): self.get() self._pools = [] - for idx, pool_node in enumerate(self.root.find("pooled-inputs").findall("pool")): - pool_name = pool_node.attrib.get('name', "Pool #{0}".format(idx+1)) - pool_object = {'name': pool_name, 'inputs': [], 'output': None} - if pool_node.attrib.get('output-uri', False): - pool_object['output'] = Artifact(self.lims, uri=pool_node.attrib['output-uri']) + for idx, pool_node in enumerate( + self.root.find("pooled-inputs").findall("pool") + ): + pool_name = pool_node.attrib.get("name", "Pool #{0}".format(idx + 1)) + pool_object = {"name": pool_name, "inputs": [], "output": None} + if pool_node.attrib.get("output-uri", False): + pool_object["output"] = Artifact( + self.lims, uri=pool_node.attrib["output-uri"] + ) for input_node in pool_node.findall("input"): - input = Artifact(self.lims, uri=input_node.attrib['uri']) - pool_object['inputs'].append(input) + input = Artifact(self.lims, uri=input_node.attrib["uri"]) + pool_object["inputs"].append(input) self._pools.append(pool_object) @@ -825,13 +928,15 @@ def set_pools(self, pools): pool_root = self.root.find("pooled-inputs") pool_root.clear() for idx, pool_obj in enumerate(pools): - current_pool = ElementTree.SubElement(pool_root, 'pool') - if pool_obj.get('output', False): - current_pool.attrib['output-uri'] = pool_obj['output'].uri - current_pool.attrib['name'] = pool_obj.get('name', 'Pool #{0}'.format(idx+1)) - for input_art in pool_obj.get('inputs', []): - current_input = ElementTree.SubElement(current_pool, 'input') - current_input.attrib['uri'] = input_art.uri + current_pool = ElementTree.SubElement(pool_root, "pool") + if pool_obj.get("output", False): + current_pool.attrib["output-uri"] = pool_obj["output"].uri + current_pool.attrib["name"] = pool_obj.get( + "name", "Pool #{0}".format(idx + 1) + ) + for input_art in pool_obj.get("inputs", []): + current_input = ElementTree.SubElement(current_pool, "input") + current_input.attrib["uri"] = input_art.uri self._remove_available_inputs(input_art) self._pools = pools @@ -842,6 +947,7 @@ def set_pools(self, pools): class StepPlacements(Entity): """Placements from within a step. Supports POST""" + _placementslist = None # [[A,(C,'A:1')][A,(C,'A:2')]] where A is an Artifact and C a Container @@ -850,13 +956,16 @@ def get_placement_list(self): # Only fetch the data once. self.get() self._placementslist = [] - for node in self.root.find('output-placements').findall('output-placement'): - input = Artifact(self.lims, uri=node.attrib['uri']) + for node in self.root.find("output-placements").findall("output-placement"): + input = Artifact(self.lims, uri=node.attrib["uri"]) location = (None, None) - if node.find('location') is not None: + if node.find("location") is not None: location = ( - Container(self.lims, uri=node.find('location').find('container').attrib['uri']), - node.find('location').find('value').text + Container( + self.lims, + uri=node.find("location").find("container").attrib["uri"], + ), + node.find("location").find("value").text, ) self._placementslist.append([input, location]) return self._placementslist @@ -864,30 +973,33 @@ def get_placement_list(self): def set_placement_list(self, value): containers = set() self.get_placement_list() - placement_dict = {x[0].stateless.uri:x for x in value } - for node in self.root.find('output-placements').findall('output-placement'): - location = placement_dict[node.attrib['uri']][1] + placement_dict = {x[0].stateless.uri: x for x in value} + for node in self.root.find("output-placements").findall("output-placement"): + location = placement_dict[node.attrib["uri"]][1] container = location[0] well = location[1] if container and location: containers.add(container) - if node.find('location') is not None: - cont_el = node.find('location').find('container') - cont_el.attrib['uri'] = container.uri - cont_el.attrib['limsid'] = container.id - value_el = node.find('location').find('value') + if node.find("location") is not None: + cont_el = node.find("location").find("container") + cont_el.attrib["uri"] = container.uri + cont_el.attrib["limsid"] = container.id + value_el = node.find("location").find("value") value_el.text = well else: - loc_el = ElementTree.SubElement(node, 'location') - cont_el = ElementTree.SubElement(loc_el, 'container', - {'uri': container.uri, 'limsid': container.id}) - well_el = ElementTree.SubElement(loc_el, 'value') + loc_el = ElementTree.SubElement(node, "location") + cont_el = ElementTree.SubElement( + loc_el, + "container", + {"uri": container.uri, "limsid": container.id}, + ) + well_el = ElementTree.SubElement(loc_el, "value") well_el.text = well # not supported in the constructor # Handle selected containers sc = self.root.find("selected-containers") sc.clear() for cont in containers: - ElementTree.SubElement(sc, 'container', uri=cont.uri) + ElementTree.SubElement(sc, "container", uri=cont.uri) self._placementslist = value placement_list = property(get_placement_list, set_placement_list) @@ -898,8 +1010,10 @@ def get_selected_containers(self): _selected_containers = [] if not _selected_containers: self.get() - for node in self.root.find('selected-containers').findall('container'): - _selected_containers.append(Container(self.lims, uri=node.attrib['uri'])) + for node in self.root.find("selected-containers").findall("container"): + _selected_containers.append( + Container(self.lims, uri=node.attrib["uri"]) + ) return _selected_containers @@ -908,6 +1022,7 @@ def get_selected_containers(self): class StepActions(Entity): """Actions associated with a step""" + _escalation = None @property @@ -915,59 +1030,70 @@ def escalation(self): if not self._escalation: self.get() self._escalation = {} - for node in self.root.findall('escalation'): - self._escalation['artifacts'] = [] - self._escalation['author'] = Researcher(self.lims, - uri=node.find('request').find('author').attrib.get('uri')) + for node in self.root.findall("escalation"): + self._escalation["artifacts"] = [] + self._escalation["author"] = Researcher( + self.lims, uri=node.find("request").find("author").attrib.get("uri") + ) try: - self._escalation['request'] = node.find('request').find('comment').text + self._escalation["request"] = ( + node.find("request").find("comment").text + ) except: - self._escalation['request'] = "" + self._escalation["request"] = "" - if node.find('review') is not None: # recommended by the Etree doc - self._escalation['status'] = 'Reviewed' - self._escalation['reviewer'] = Researcher(self.lims, - uri=node.find('review').find('author').attrib.get('uri')) + if node.find("review") is not None: # recommended by the Etree doc + self._escalation["status"] = "Reviewed" + self._escalation["reviewer"] = Researcher( + self.lims, + uri=node.find("review").find("author").attrib.get("uri"), + ) try: - self._escalation['answer'] = node.find('review').find('comment').text + self._escalation["answer"] = ( + node.find("review").find("comment").text + ) except: - self._escalation['answer'] = "" + self._escalation["answer"] = "" else: - self._escalation['status'] = 'Pending' + self._escalation["status"] = "Pending" - for node2 in node.findall('escalated-artifacts'): - art = self.lims.get_batch([Artifact(self.lims, uri=ch.attrib.get('uri')) for ch in node2]) - self._escalation['artifacts'].extend(art) + for node2 in node.findall("escalated-artifacts"): + art = self.lims.get_batch( + [Artifact(self.lims, uri=ch.attrib.get("uri")) for ch in node2] + ) + self._escalation["artifacts"].extend(art) return self._escalation def get_next_actions(self): actions = [] self.get() - if self.root.find('next-actions') is not None: - for node in self.root.find('next-actions').findall('next-action'): + if self.root.find("next-actions") is not None: + for node in self.root.find("next-actions").findall("next-action"): action = { - 'artifact': Artifact(self.lims, node.attrib.get('artifact-uri')), - 'action': node.attrib.get('action'), + "artifact": Artifact(self.lims, node.attrib.get("artifact-uri")), + "action": node.attrib.get("action"), } - if node.attrib.get('step-uri'): - action['step'] = Step(self.lims, uri=node.attrib.get('step-uri')) - if node.attrib.get('rework-step-uri'): - action['rework-step'] = Step(self.lims, uri=node.attrib.get('rework-step-uri')) + if node.attrib.get("step-uri"): + action["step"] = Step(self.lims, uri=node.attrib.get("step-uri")) + if node.attrib.get("rework-step-uri"): + action["rework-step"] = Step( + self.lims, uri=node.attrib.get("rework-step-uri") + ) actions.append(action) return actions def set_next_actions(self, actions): - action_dict = {a['artifact'].uri:a for a in actions} - for node in self.root.find('next-actions').findall('next-action'): - art_uri = node.attrib.get('artifact-uri') + action_dict = {a["artifact"].uri: a for a in actions} + for node in self.root.find("next-actions").findall("next-action"): + art_uri = node.attrib.get("artifact-uri") action = action_dict[art_uri] - if 'action' in action: - node.attrib['action'] = action.get('action') - if 'step-uri' in action: - node.attrib['step-uri'] = action.get('step-uri') - if 'rework-step-uri' in action: - node.attrib['rework-step-uri'] = action.get('rework-step-uri') + if "action" in action: + node.attrib["action"] = action.get("action") + if "step-uri" in action: + node.attrib["step-uri"] = action.get("step-uri") + if "rework-step-uri" in action: + node.attrib["rework-step-uri"] = action.get("rework-step-uri") next_actions = property(get_next_actions, set_next_actions) @@ -976,78 +1102,82 @@ class StepProgramStatus(Entity): """Allows custom handling of program status. message supports HTML. Cross handling of EPPs is possible. Supports PUT""" - status = StringDescriptor('status') - message = StringDescriptor('message') + + status = StringDescriptor("status") + message = StringDescriptor("message") class ReagentKit(Entity): """Type of Reagent with information about the provider""" + _URI = "reagentkits" _TAG = "reagent-kit" - _PREFIX = 'kit' + _PREFIX = "kit" - name = StringDescriptor('name') - supplier = StringDescriptor('supplier') - website = StringDescriptor('website') - archived = BooleanDescriptor('archived') + name = StringDescriptor("name") + supplier = StringDescriptor("supplier") + website = StringDescriptor("website") + archived = BooleanDescriptor("archived") class ReagentLot(Entity): """Reagent Lots contain information about a particualr lot of reagent used in a step""" + _URI = "reagentlots" _TAG = "reagent-lot" - _PREFIX = 'lot' + _PREFIX = "lot" - reagent_kit = EntityDescriptor('reagent-kit', ReagentKit) - name = StringDescriptor('name') - lot_number = StringDescriptor('lot-number') - created_date = StringDescriptor('created-date') - last_modified_date = StringDescriptor('last-modified-date') - expiry_date = StringDescriptor('expiry-date') - created_by = EntityDescriptor('created-by', Researcher) - last_modified_by = EntityDescriptor('last-modified-by', Researcher) - status = StringDescriptor('status') - usage_count = IntegerDescriptor('usage-count') + reagent_kit = EntityDescriptor("reagent-kit", ReagentKit) + name = StringDescriptor("name") + lot_number = StringDescriptor("lot-number") + created_date = StringDescriptor("created-date") + last_modified_date = StringDescriptor("last-modified-date") + expiry_date = StringDescriptor("expiry-date") + created_by = EntityDescriptor("created-by", Researcher) + last_modified_by = EntityDescriptor("last-modified-by", Researcher) + status = StringDescriptor("status") + usage_count = IntegerDescriptor("usage-count") class StepReagentLots(Entity): - reagent_lots = NestedEntityListDescriptor('reagent-lot', ReagentLot, 'reagent-lots') + reagent_lots = NestedEntityListDescriptor("reagent-lot", ReagentLot, "reagent-lots") + class StepDetails(Entity): """Detail associated with a step""" - input_output_maps = InputOutputMapList('input-output-maps') - udf = UdfDictionaryDescriptor('fields') - udt = UdtDictionaryDescriptor('fields') + input_output_maps = InputOutputMapList("input-output-maps") + udf = UdfDictionaryDescriptor("fields") + udt = UdtDictionaryDescriptor("fields") -class StepReagents(Entity): - reagent_category = StringDescriptor('reagent-category') +class StepReagents(Entity): + reagent_category = StringDescriptor("reagent-category") output_reagents = OutputReagentList(Artifact) class Step(Entity): "Step, as defined by the genologics API." - _URI = 'steps' - _PREFIX = 'stp' + _URI = "steps" + _PREFIX = "stp" - current_state = StringAttributeDescriptor('current-state') - _reagent_lots = EntityDescriptor('reagent-lots', StepReagentLots) - actions = EntityDescriptor('actions', StepActions) - date_started = StringDescriptor('date-started') - date_completed = StringDescriptor('date-completed') - placements = EntityDescriptor('placements', StepPlacements) - details = EntityDescriptor('details', StepDetails) - step_pools = EntityDescriptor('pools', StepPools) - program_status = EntityDescriptor('program-status', StepProgramStatus) - reagents = EntityDescriptor('reagents', StepReagents) + current_state = StringAttributeDescriptor("current-state") + _reagent_lots = EntityDescriptor("reagent-lots", StepReagentLots) + actions = EntityDescriptor("actions", StepActions) + date_started = StringDescriptor("date-started") + date_completed = StringDescriptor("date-completed") + placements = EntityDescriptor("placements", StepPlacements) + details = EntityDescriptor("details", StepDetails) + step_pools = EntityDescriptor("pools", StepPools) + program_status = EntityDescriptor("program-status", StepProgramStatus) + reagents = EntityDescriptor("reagents", StepReagents) def advance(self): self.get() self.root = self.lims.post( uri="{0}/advance".format(self.uri), - data=self.lims.tostring(ElementTree.ElementTree(self.root)) + data=self.lims.tostring(ElementTree.ElementTree(self.root)), ) @property @@ -1064,33 +1194,35 @@ def create(cls, lims, protocol_step, container_type, inputs, **kwargs): These need to be queued for that step for the query to be successful. """ if not isinstance(protocol_step, ProtocolStep): - raise TypeError('%s is not of type ProtocolStep'%protocol_step) + raise TypeError("%s is not of type ProtocolStep" % protocol_step) elif not isinstance(container_type, Containertype): - raise TypeError('%s is not of type Containertype'%container_type) + raise TypeError("%s is not of type Containertype" % container_type) elif not all([isinstance(input, Artifact) for input in inputs]): - raise TypeError('%s does not contain only items of type Artifact'%inputs) + raise TypeError("%s does not contain only items of type Artifact" % inputs) - instance = super(Step, cls)._create(lims, creation_tag='step-creation', **kwargs) + instance = super(Step, cls)._create( + lims, creation_tag="step-creation", **kwargs + ) # Setup configuration element - configuration_element = ElementTree.SubElement(instance.root, 'configuration') - configuration_element.attrib['uri'] = protocol_step.uri + configuration_element = ElementTree.SubElement(instance.root, "configuration") + configuration_element.attrib["uri"] = protocol_step.uri configuration_element.text = protocol_step.name # Setup container type element - container_type_element = ElementTree.SubElement(instance.root, 'container-type') + container_type_element = ElementTree.SubElement(instance.root, "container-type") container_type_element.text = container_type.name # Setup inputs element - inputs_element = ElementTree.SubElement(instance.root, 'inputs') + inputs_element = ElementTree.SubElement(instance.root, "inputs") for input in inputs: - input_element = ElementTree.SubElement(inputs_element, 'input') - input_element.attrib['uri'] = input.uri + input_element = ElementTree.SubElement(inputs_element, "input") + input_element.attrib["uri"] = input.uri data = lims.tostring(ElementTree.ElementTree(instance.root)) instance.root = lims.post(uri=lims.get_uri(cls._URI), data=data) - instance._uri = instance.root.attrib['uri'] + instance._uri = instance.root.attrib["uri"] return instance @@ -1098,95 +1230,111 @@ def create(cls, lims, protocol_step, container_type, inputs, **kwargs): class ProtocolStep(Entity): """Steps key in the Protocol object""" - _TAG = 'step' - - name = StringAttributeDescriptor("name") - type = EntityDescriptor('process-type', Processtype) - permittedcontainers = NestedStringListDescriptor('container-type', 'permitted-containers') - permitted_control_types = NestedEntityListDescriptor('control-type', ControlType, 'permitted-control-types') - required_reagent_kits = NestedEntityListDescriptor('reagent-kit', ReagentKit, 'required-reagent-kits') - queue_fields = NestedAttributeListDescriptor('queue-field', 'queue-fields') - step_fields = NestedAttributeListDescriptor('step-field', 'step-fields') - sample_fields = NestedAttributeListDescriptor('sample-field', 'sample-fields') - step_properties = NestedAttributeListDescriptor('step-property', 'step-properties') - epp_triggers = NestedAttributeListDescriptor('epp-trigger', 'epp-triggers') + _TAG = "step" + + name = StringAttributeDescriptor("name") + type = EntityDescriptor("process-type", Processtype) + permittedcontainers = NestedStringListDescriptor( + "container-type", "permitted-containers" + ) + permitted_control_types = NestedEntityListDescriptor( + "control-type", ControlType, "permitted-control-types" + ) + required_reagent_kits = NestedEntityListDescriptor( + "reagent-kit", ReagentKit, "required-reagent-kits" + ) + queue_fields = NestedAttributeListDescriptor("queue-field", "queue-fields") + step_fields = NestedAttributeListDescriptor("step-field", "step-fields") + sample_fields = NestedAttributeListDescriptor("sample-field", "sample-fields") + step_properties = NestedAttributeListDescriptor("step-property", "step-properties") + epp_triggers = NestedAttributeListDescriptor("epp-trigger", "epp-triggers") class Protocol(Entity): """Protocol, holding ProtocolSteps and protocol-properties""" - _URI = 'configuration/protocols' - _TAG = 'protocol' - steps = NestedEntityListDescriptor('step', ProtocolStep, 'steps') - properties = NestedAttributeListDescriptor('protocol-property', 'protocol-properties') + _URI = "configuration/protocols" + _TAG = "protocol" + steps = NestedEntityListDescriptor("step", ProtocolStep, "steps") + properties = NestedAttributeListDescriptor( + "protocol-property", "protocol-properties" + ) class Automation(Entity): """Automation, holding Automation configurations""" - _URI = 'configuration/automations' - _TAG = 'automation' - process_types = NestedEntityListDescriptor('process-type', Processtype, 'process-types') - string = NestedStringDescriptor('string') - name = StringAttributeDescriptor('name') - context = NestedStringDescriptor('context') + _URI = "configuration/automations" + _TAG = "automation" + process_types = NestedEntityListDescriptor( + "process-type", Processtype, "process-types" + ) + string = NestedStringDescriptor("string") + name = StringAttributeDescriptor("name") + context = NestedStringDescriptor("context") class Stage(Entity): """Holds Protocol/Workflow""" - name = StringAttributeDescriptor('name') - index = IntegerAttributeDescriptor('index') - protocol = EntityDescriptor('protocol', Protocol) - step = EntityDescriptor('step', ProtocolStep) + + name = StringAttributeDescriptor("name") + index = IntegerAttributeDescriptor("index") + protocol = EntityDescriptor("protocol", Protocol) + step = EntityDescriptor("step", ProtocolStep) class Workflow(Entity): - """ Workflow, introduced in 3.5""" + """Workflow, introduced in 3.5""" + _URI = "configuration/workflows" _TAG = "workflow" - name = StringAttributeDescriptor("name") - status = StringAttributeDescriptor("status") - protocols = NestedEntityListDescriptor('protocol', Protocol, 'protocols') - stages = NestedEntityListDescriptor('stage', Stage, 'stages') + name = StringAttributeDescriptor("name") + status = StringAttributeDescriptor("status") + protocols = NestedEntityListDescriptor("protocol", Protocol, "protocols") + stages = NestedEntityListDescriptor("stage", Stage, "stages") class ReagentType(Entity): """Reagent Type, usually, indexes for sequencing""" + _URI = "reagenttypes" _TAG = "reagent-type" - _PREFIX = 'rtp' + _PREFIX = "rtp" - category = StringDescriptor('reagent-category') - name = StringAttributeDescriptor("name") + category = StringDescriptor("reagent-category") + name = StringAttributeDescriptor("name") def __init__(self, lims, uri=None, id=None): super(ReagentType, self).__init__(lims, uri, id) assert self.uri is not None self.root = lims.get(self.uri) self.sequence = None - for t in self.root.findall('special-type'): + for t in self.root.findall("special-type"): if t.attrib.get("name") == "Index": for child in t.findall("attribute"): if child.attrib.get("name") == "Sequence": self.sequence = child.attrib.get("value") + class Queue(Entity): """Queue of a given step. Will recursively get all the pages of artifacts, and therefore, can be quite slow to load""" + _URI = "queues" - _TAG= "queue" + _TAG = "queue" _PREFIX = "que" - artifacts = MultiPageNestedEntityListDescriptor("artifact", Artifact, "artifacts") -Sample.artifact = EntityDescriptor('artifact', Artifact) -StepActions.step = EntityDescriptor('step', Step) -Stage.workflow = EntityDescriptor('workflow', Workflow) -Artifact.workflow_stages = NestedEntityListDescriptor('workflow-stage', Stage, 'workflow-stages') -Step.configuration = EntityDescriptor('configuration', ProtocolStep) -StepProgramStatus.configuration = EntityDescriptor('configuration', ProtocolStep) -Researcher.roles = NestedEntityListDescriptor('role', Role, 'credentials') +Sample.artifact = EntityDescriptor("artifact", Artifact) +StepActions.step = EntityDescriptor("step", Step) +Stage.workflow = EntityDescriptor("workflow", Workflow) +Artifact.workflow_stages = NestedEntityListDescriptor( + "workflow-stage", Stage, "workflow-stages" +) +Step.configuration = EntityDescriptor("configuration", ProtocolStep) +StepProgramStatus.configuration = EntityDescriptor("configuration", ProtocolStep) +Researcher.roles = NestedEntityListDescriptor("role", Role, "credentials") diff --git a/genologics/epp.py b/genologics/epp.py index 08e18a1..58d529e 100644 --- a/genologics/epp.py +++ b/genologics/epp.py @@ -1,4 +1,5 @@ from __future__ import print_function + """Contains useful and reusable code for EPP scripts. Classes, methods and exceptions. @@ -20,70 +21,81 @@ from time import strftime, localtime import csv -def attach_file(src,resource): + +def attach_file(src, resource): """Attach file at src to given resource Copies the file to the current directory, EPP node will upload this file automatically if the process output is properly set up""" original_name = os.path.basename(src) - new_name = resource.id + '_' + original_name + new_name = resource.id + "_" + original_name dir = os.getcwd() - location = os.path.join(dir,new_name) - copy(src,location) + location = os.path.join(dir, new_name) + copy(src, location) return location + class EmptyError(ValueError): "Raised if an iterator is unexpectedly empty." + pass + class NotUniqueError(ValueError): "Raised if there are unexpectedly more than 1 item in an iterator" + pass -def unique_check(l,msg): + +def unique_check(l, msg): "Check that l is of length 1, otherwise raise error, with msg appended" - if len(l)==0: + if len(l) == 0: raise EmptyError("No item found for {0}".format(msg)) - elif len(l)!=1: + elif len(l) != 1: raise NotUniqueError("Multiple items found for {0}".format(msg)) -def set_field(element): + +def set_field(element): try: element.put() except (TypeError, HTTPError) as e: logging.warning("Error while updating element: {0}".format(e)) - -class EppLogger(object): + +class EppLogger(object): """Context manager for logging module useful for EPP script execution. This context manager (CM) automatically logs what script that is executed, with what parameters it was executed and what version (including) commit hash of the genologics package used. Since EPP scripts are often ran - automatically by the genologics LIMS client, the stdout and stderr is + automatically by the genologics LIMS client, the stdout and stderr is captured and logged within this CM. Stderr is duplicated so that the last line can be shown in the GUI. In order to track multiple runs - of the same process from the genologics LIMS GUI, the previous log + of the same process from the genologics LIMS GUI, the previous log files can be prepended. Also a main log file can be used that is supposed to be common for all scripts executed on the server. - + """ - PACKAGE = 'genologics' + PACKAGE = "genologics" + def __enter__(self): - logging.info('Executing file: {0}'.format(sys.argv[0])) - logging.info('with parameters: {0}'.format(sys.argv[1:])) + logging.info("Executing file: {0}".format(sys.argv[0])) + logging.info("with parameters: {0}".format(sys.argv[1:])) try: - logging.info('Version of {0}: '.format(self.PACKAGE) + - pkg_resources.require(self.PACKAGE)[0].version) + logging.info( + "Version of {0}: ".format(self.PACKAGE) + + pkg_resources.require(self.PACKAGE)[0].version + ) except DistributionNotFound as e: logging.error(e) - logging.error(('Make sure you have the {0} ' - 'package installed').format(self.PACKAGE)) + logging.error( + ("Make sure you have the {0} " "package installed").format(self.PACKAGE) + ) sys.exit(-1) return self - def __exit__(self,exc_type,exc_val,exc_tb): + def __exit__(self, exc_type, exc_val, exc_tb): # If no exception has occured in block, turn off logging. if not exc_type: logging.shutdown() @@ -92,12 +104,12 @@ def __exit__(self,exc_type,exc_val,exc_tb): # Do not repress possible exception return False - def __init__(self,log_file=None,level=logging.INFO,lims=None,prepend=False): - """ Initialize the logger with custom settings. + def __init__(self, log_file=None, level=logging.INFO, lims=None, prepend=False): + """Initialize the logger with custom settings. Arguments: log_file -- file to write individual log to - + Keyword Arguments: level -- Logging level, default logging.INFO lims -- Lims instance, needed for prepend to work @@ -112,45 +124,44 @@ def __init__(self,log_file=None,level=logging.INFO,lims=None,prepend=False): self.prepend_old_log() # Loggers that will capture stdout and stderr respectively - stdout_logger = logging.getLogger('STDOUT') + stdout_logger = logging.getLogger("STDOUT") self.slo = self.StreamToLogger(stdout_logger, logging.INFO) self.saved_stdout = sys.stdout sys.stdout = self.slo - stderr_logger = logging.getLogger('STDERR') + stderr_logger = logging.getLogger("STDERR") self.saved_stderr = sys.stderr # Duplicate stderr stream to log - self.sle = self.StreamToLogger(stderr_logger, logging.INFO, - self.saved_stderr) + self.sle = self.StreamToLogger(stderr_logger, logging.INFO, self.saved_stderr) sys.stderr = self.sle # Root logger with filehandler(s) self.logger = logging.getLogger() self.logger.setLevel(self.level) - formatter = logging.Formatter( - '%(asctime)s:%(levelname)s:%(name)s:%(message)s') + formatter = logging.Formatter("%(asctime)s:%(levelname)s:%(name)s:%(message)s") if self.log_file: - individual_fh = logging.FileHandler(self.log_file,mode='a') + individual_fh = logging.FileHandler(self.log_file, mode="a") individual_fh.setFormatter(formatter) self.logger.addHandler(individual_fh) if MAIN_LOG: # Rotating file handler, that will create up to 10 backup logs, # each no bigger than 100MB. - main_fh = RotatingFileHandler(MAIN_LOG,mode='a', - maxBytes=1e8,backupCount=10) + main_fh = RotatingFileHandler( + MAIN_LOG, mode="a", maxBytes=1e8, backupCount=10 + ) main_fh.setFormatter(formatter) self.logger.addHandler(main_fh) else: - self.logger.warning('No main log file found.') + self.logger.warning("No main log file found.") - def prepend_old_log(self, external_log_file = None): - """Prepend the old log to the new log. + def prepend_old_log(self, external_log_file=None): + """Prepend the old log to the new log. - The location of the old log file is retrieved through the REST api. + The location of the old log file is retrieved through the REST api. In order to work, the script should be executed on the LIMS server since the location on the disk is parsed out from the sftp string - and then used for local copy of file. + and then used for local copy of file. This method does not use logging since that could mess up the logging settings, instead warnings are printed to stderr.""" @@ -162,36 +173,44 @@ def prepend_old_log(self, external_log_file = None): local_log_path = os.path.join(os.getcwd(), log_file_name) if not os.path.isfile(local_log_path): try: - log_artifact = Artifact(self.lims,id = log_file_name) + log_artifact = Artifact(self.lims, id=log_file_name) log_artifact.get() if log_artifact.files: log_path = log_artifact.files[0].content_location.split( - self.lims.baseuri.split(':')[1])[1] + self.lims.baseuri.split(":")[1] + )[1] copy(log_path, local_log_path) - with open(local_log_path,'a') as f: - f.write('='*80+'\n') - except HTTPError: # Probably no artifact found, skip prepending - print(('No log file artifact found ' - 'for id: {0}').format(log_file_name), file=sys.stderr) - except IOError as e: # Probably some path was wrong in copy - print(('Log could not be prepended, ' - 'make sure {0} and {1} are ' - 'proper paths.').format(log_path, - log_file_name), file=sys.stderr) + with open(local_log_path, "a") as f: + f.write("=" * 80 + "\n") + except HTTPError: # Probably no artifact found, skip prepending + print( + ("No log file artifact found " "for id: {0}").format(log_file_name), + file=sys.stderr, + ) + except IOError as e: # Probably some path was wrong in copy + print( + ( + "Log could not be prepended, " + "make sure {0} and {1} are " + "proper paths." + ).format(log_path, log_file_name), + file=sys.stderr, + ) raise e class StreamToLogger(object): - """Fake file-like stream object that redirects writes to a logger + """Fake file-like stream object that redirects writes to a logger instance. - - source: + + source: http://www.electricmonk.nl/log/2011/08/14/ redirect-stdout-and-stderr-to-a-logger-in-python/ """ + def __init__(self, logger, log_level=logging.INFO, stream=None): self.logger = logger self.log_level = log_level - self.linebuf = '' + self.linebuf = "" self.stream = stream def write(self, buf): @@ -200,26 +219,27 @@ def write(self, buf): for line in buf.rstrip().splitlines(): self.logger.log(self.log_level, line.rstrip()) -class ReadResultFiles(): + +class ReadResultFiles: """Class to read pars different kinds of result files from a process. - The class stores the parsed content of all shared result files in a - dictionary 'shared_files'. The data is parsed as lists of lists. """ + The class stores the parsed content of all shared result files in a + dictionary 'shared_files'. The data is parsed as lists of lists.""" def __init__(self, process): self.process = process - self.shared_files = self._pars_file('SharedResultFile') - self.perinput_files = self._pars_file('ResultFile') + self.shared_files = self._pars_file("SharedResultFile") + self.perinput_files = self._pars_file("ResultFile") def get_file_path(self, artifact): if len(artifact.files) > 0: file = artifact.files[0] - file_path = file.content_location.split('scilifelab.se')[1] - if len(file_path.split('.')) > 1: + file_path = file.content_location.split("scilifelab.se")[1] + if len(file_path.split(".")) > 1: return file_path return None def _pars_file(self, output_type): - """Reads a csv or txt into a list of lists, where sub lists are lines + """Reads a csv or txt into a list of lists, where sub lists are lines of the csv.""" outs = self.process.all_outputs() outarts = [a for a in outs if a.output_type == output_type] @@ -227,90 +247,101 @@ def _pars_file(self, output_type): for outart in outarts: file_path = self.get_file_path(outart) if file_path: - of = open(file_path ,'r') - file_ext = file_path.split('.')[-1] - if file_ext == 'csv': + of = open(file_path, "r") + file_ext = file_path.split(".")[-1] + if file_ext == "csv": pf = [row for row in csv.reader(of.read().splitlines())] parsed_files[outart.name] = pf - elif file_ext == 'txt': - pf = [row.strip().strip('\\').split('\t') for row in of.readlines()] + elif file_ext == "txt": + pf = [row.strip().strip("\\").split("\t") for row in of.readlines()] parsed_files[outart.name] = pf of.close() return parsed_files - def format_file(self, parsed_file, name = '', first_header = None, - header_row = None, root_key_col = 0, find_keys = []): + def format_file( + self, + parsed_file, + name="", + first_header=None, + header_row=None, + root_key_col=0, + find_keys=[], + ): """Function to format a parsed csv or txt file. Arguments and Output: parsed_file A list of lists where sublists are rows of the csv. - name Name of parsed file. - first_header First column of the heather section in the file. + name Name of parsed file. + first_header First column of the heather section in the file. default value is 'None' - root_key_col If you want the root keys to be given by some other - column than the first one, set root_key_col to the + root_key_col If you want the root keys to be given by some other + column than the first one, set root_key_col to the column number. - header_row Instead of specifying first_header you can choose - from what line to reed by setting header_row to the + header_row Instead of specifying first_header you can choose + from what line to reed by setting header_row to the row number where you want to start reading. - find_keys List of row names to look for. Will exclude all + find_keys List of row names to look for. Will exclude all others. - file_info Dict of dicts. Keys of root dict are the first - column in the csv starting from the line after the - heather line. Keys of sub dicts are the columns of + file_info Dict of dicts. Keys of root dict are the first + column in the csv starting from the line after the + heather line. Keys of sub dicts are the columns of the heather line.""" file_info = {} keys = [] - error_message = '' + error_message = "" duplicated_lines = [] - exeptions = ['Sample','Fail', ''] + exeptions = ["Sample", "Fail", ""] if type(first_header) is not list: if first_header: - first_header=[first_header] + first_header = [first_header] else: - first_header=[] + first_header = [] for row, line in enumerate(parsed_file): - if keys and len(line)==len(keys): + if keys and len(line) == len(keys): root_key = line[root_key_col] cond1 = find_keys == [] and root_key not in exeptions cond2 = root_key in find_keys if root_key in file_info: duplicated_lines.append(root_key) - elif (cond1 or cond2): + elif cond1 or cond2: file_info[root_key] = {} if not duplicated_lines: for col in range(len(keys)): - if keys[col] != '': + if keys[col] != "": file_info[root_key][keys[col]] = line[col] - elif keys[col-1] != '': - tupl = (file_info[root_key][keys[col-1]], line[col]) - file_info[root_key][keys[col-1]] = tupl - + elif keys[col - 1] != "": + tupl = (file_info[root_key][keys[col - 1]], line[col]) + file_info[root_key][keys[col - 1]] = tupl + head = line[root_key_col] if len(line) > root_key_col else None if first_header and head in first_header: keys = line elif header_row and row == header_row: keys = line if duplicated_lines: - error_message = ("Row names {0} occurs more than once in file {1}. " - "Fix the file to continue. ").format(','.join(duplicated_lines), name) + error_message = ( + "Row names {0} occurs more than once in file {1}. " + "Fix the file to continue. " + ).format(",".join(duplicated_lines), name) if not file_info: - error_message = error_message + "Could not format parsed file {0}.".format(name) + error_message = error_message + "Could not format parsed file {0}.".format( + name + ) if error_message: print(error_message, file=sys.stderr) - sys.exit(-1) + sys.exit(-1) return file_info class CopyField(object): - """Class to copy any filed (or udf) from any lims element to any + """Class to copy any filed (or udf) from any lims element to any udf on any other lims element arguments: s_elt source element - instance of a type d_elt destination element - instance of a type - s_field_name name of source field (or udf) to be copied + s_field_name name of source field (or udf) to be copied d_udf_name name of destination udf name. If not specified s_field_name will be used. @@ -319,7 +350,8 @@ class CopyField(object): Written by Maya Brandi and Johannes Alnberg """ - def __init__(self, s_elt, d_elt, s_field_name, d_udf_name = None): + + def __init__(self, s_elt, d_elt, s_field_name, d_udf_name=None): if not d_udf_name: d_udf_name = s_field_name self.s_elt = s_elt @@ -354,31 +386,43 @@ def _set_udf(self, elt, udf_name, val): def _log_before_change(self, changelog_f=None): if changelog_f: - d = {'ct' : self._current_time(), - 's_udf' : self.s_field_name, - 'sn' : self.d_elt.name, - 'si' : self.d_elt.id, - 'su' : self.old_dest_udf, - 'nv' : self.s_field, - 'd_elt_type': self.d_type} - - changelog_f.write(("{ct}: udf: '{s_udf}' on {d_elt_type}: '{sn}' (" - "id: {si}) is changed from '{su}' to '{nv}'.\n").format(**d)) - - logging.info(("Copying from element with id: {0} to element with " - " id: {1}").format(self.s_elt.id, self.d_elt.id)) + d = { + "ct": self._current_time(), + "s_udf": self.s_field_name, + "sn": self.d_elt.name, + "si": self.d_elt.id, + "su": self.old_dest_udf, + "nv": self.s_field, + "d_elt_type": self.d_type, + } + + changelog_f.write( + ( + "{ct}: udf: '{s_udf}' on {d_elt_type}: '{sn}' (" + "id: {si}) is changed from '{su}' to '{nv}'.\n" + ).format(**d) + ) + + logging.info( + ("Copying from element with id: {0} to element with " " id: {1}").format( + self.s_elt.id, self.d_elt.id + ) + ) def _log_after_change(self): - d = {'s_udf': self.s_field_name, - 'd_udf': self.d_udf_name, - 'su': self.old_dest_udf, - 'nv': self.s_field, - 'd_elt_type': self.d_type} - - logging.info("Updated {d_elt_type} udf: {d_udf}, from {su} to " - "{nv}.".format(**d)) - - def copy_udf(self, changelog_f = None): + d = { + "s_udf": self.s_field_name, + "d_udf": self.d_udf_name, + "su": self.old_dest_udf, + "nv": self.s_field, + "d_elt_type": self.d_type, + } + + logging.info( + "Updated {d_elt_type} udf: {d_udf}, from {su} to " "{nv}.".format(**d) + ) + + def copy_udf(self, changelog_f=None): if self.s_field != self.old_dest_udf: self._log_before_change(changelog_f) log = self._set_udf(self.d_elt, self.d_udf_name, self.s_field) @@ -386,6 +430,3 @@ def copy_udf(self, changelog_f = None): return log else: return False - - - diff --git a/genologics/internal_classes.py b/genologics/internal_classes.py index 184de5e..6387890 100644 --- a/genologics/internal_classes.py +++ b/genologics/internal_classes.py @@ -1,25 +1,29 @@ -from genologics.descriptors import StringDescriptor, EntityListDescriptor, BooleanDescriptor, IntegerDescriptor +from genologics.descriptors import ( + StringDescriptor, + EntityListDescriptor, + BooleanDescriptor, + IntegerDescriptor, +) from genologics.entities import File, Udfconfig from genologics.constants import nsmap class ProcessTypeParameter(object): - instance = None name = None root = None - tag = 'parameter' + tag = "parameter" - string = StringDescriptor('string') - run_program_per_event = StringDescriptor('run-program-per-event') - channel = StringDescriptor('channel') - invocation_type = StringDescriptor('invocation-type') - file = EntityListDescriptor(nsmap('file:file'), File) + string = StringDescriptor("string") + run_program_per_event = StringDescriptor("run-program-per-event") + channel = StringDescriptor("channel") + invocation_type = StringDescriptor("invocation-type") + file = EntityListDescriptor(nsmap("file:file"), File) def __init__(self, pt_instance, node): self.instance = pt_instance self.root = node - self.name = self.root.attrib['name'] + self.name = self.root.attrib["name"] def __repr__(self): return "{0}({1})".format(self.__class__.__name__, self.name) @@ -27,16 +31,16 @@ def __repr__(self): def get(self): pass -class ProcessTypeProcessInput(object): +class ProcessTypeProcessInput(object): instance = None name = None root = None - tag = '' + tag = "" - artifact_type = StringDescriptor('artifact-type') - display_name = StringDescriptor('display-name') - remove_working_flag = BooleanDescriptor('remove-working-flag') + artifact_type = StringDescriptor("artifact-type") + display_name = StringDescriptor("display-name") + remove_working_flag = BooleanDescriptor("remove-working-flag") def __init__(self, pt_instance, node): self.instance = pt_instance @@ -51,19 +55,18 @@ def get(self): class ProcessTypeProcessOutput(object): - instance = None name = None root = None - tag = '' - - artifact_type = StringDescriptor('artifact-type') - display_name = StringDescriptor('display-name') - output_generation_type = StringDescriptor('output-generation-type') - variability_type = StringDescriptor('variability-type') - number_of_outputs = IntegerDescriptor('number-of-outputs') - output_name = StringDescriptor('output-name') - field_definitions = EntityListDescriptor('field-definition', Udfconfig) + tag = "" + + artifact_type = StringDescriptor("artifact-type") + display_name = StringDescriptor("display-name") + output_generation_type = StringDescriptor("output-generation-type") + variability_type = StringDescriptor("variability-type") + number_of_outputs = IntegerDescriptor("number-of-outputs") + output_name = StringDescriptor("output-name") + field_definitions = EntityListDescriptor("field-definition", Udfconfig) def __init__(self, pt_instance, node): self.instance = pt_instance diff --git a/genologics/lims.py b/genologics/lims.py index 221f662..83a9cea 100644 --- a/genologics/lims.py +++ b/genologics/lims.py @@ -6,9 +6,18 @@ Copyright (C) 2012 Per Kraulis """ -__all__ = ['Lab', 'Researcher', 'Project', 'Sample', - 'Containertype', 'Container', 'Processtype', 'Process', - 'Artifact', 'Lims'] +__all__ = [ + "Lab", + "Researcher", + "Project", + "Sample", + "Containertype", + "Container", + "Processtype", + "Process", + "Artifact", + "Lims", +] import os import re @@ -31,14 +40,17 @@ # Python 2.6 support work-arounds # - Exception ElementTree.ParseError does not exist # - ElementTree.ElementTree.write does not take arg. xml_declaration -if version_info[:2] < (2,7): +if version_info[:2] < (2, 7): from xml.parsers import expat + ElementTree.ParseError = expat.ExpatError p26_write = ElementTree.ElementTree.write + def write_with_xml_declaration(self, file, encoding, xml_declaration): - assert xml_declaration is True # Support our use case only + assert xml_declaration is True # Support our use case only file.write("\n") p26_write(self, file, encoding=encoding) + ElementTree.ElementTree.write = write_with_xml_declaration TIMEOUT = 16 @@ -47,7 +59,7 @@ def write_with_xml_declaration(self, file, encoding, xml_declaration): class Lims(object): "LIMS interface through which all entity instances are retrieved." - VERSION = 'v2' + VERSION = "v2" def __init__(self, baseuri, username, password, version=VERSION): """baseuri: Base URI for the GenoLogics server, excluding @@ -57,7 +69,7 @@ def __init__(self, baseuri, username, password, version=VERSION): password: The password for the user account to login as. version: The optional LIMS API version, by default 'v2' """ - self.baseuri = baseuri.rstrip('/') + '/' + self.baseuri = baseuri.rstrip("/") + "/" self.username = username self.password = password self.VERSION = version @@ -65,24 +77,29 @@ def __init__(self, baseuri, username, password, version=VERSION): # For optimization purposes, enables requests to persist connections self.request_session = requests.Session() # The connection pool has a default size of 10 - self.adapter = requests.adapters.HTTPAdapter(pool_connections=100, pool_maxsize=100) - self.request_session.mount('http://', self.adapter) + self.adapter = requests.adapters.HTTPAdapter( + pool_connections=100, pool_maxsize=100 + ) + self.request_session.mount("http://", self.adapter) def get_uri(self, *segments, **query): "Return the full URI given the path segments and optional query." - segments = ['api', self.VERSION] + list(segments) - url = urljoin(self.baseuri, '/'.join(segments)) + segments = ["api", self.VERSION] + list(segments) + url = urljoin(self.baseuri, "/".join(segments)) if query: - url += '?' + urlencode(query) + url += "?" + urlencode(query) return url def get(self, uri, params=dict()): "GET data from the URI. Return the response XML as an ElementTree." try: - r = self.request_session.get(uri, params=params, - auth=(self.username, self.password), - headers=dict(accept='application/xml'), - timeout=TIMEOUT) + r = self.request_session.get( + uri, + params=params, + auth=(self.username, self.password), + headers=dict(accept="application/xml"), + timeout=TIMEOUT, + ) except requests.exceptions.Timeout as e: raise type(e)("{0}, Error trying to reach {1}".format(str(e), uri)) @@ -92,15 +109,17 @@ def get(self, uri, params=dict()): def get_file_contents(self, id=None, uri=None): """Returns the contents of the file of or """ if id: - segments = ['api', self.VERSION, 'files', id, 'download'] + segments = ["api", self.VERSION, "files", id, "download"] elif uri: - segments = [uri, 'download'] + segments = [uri, "download"] else: raise ValueError("id or uri required") - url = urljoin(self.baseuri, '/'.join(segments)) - r = self.request_session.get(url, auth=(self.username, self.password), timeout=TIMEOUT, stream=True) + url = urljoin(self.baseuri, "/".join(segments)) + r = self.request_session.get( + url, auth=(self.username, self.password), timeout=TIMEOUT, stream=True + ) self.validate_response(r) - if 'text' in r.headers['Content-Type']: + if "text" in r.headers["Content-Type"]: return r.text else: return r.raw @@ -113,27 +132,29 @@ def upload_new_file(self, entity, file_to_upload): # Request the storage space on glsstorage # Create the xml to describe the file - root = ElementTree.Element(nsmap('file:file')) - s = ElementTree.SubElement(root, 'attached-to') + root = ElementTree.Element(nsmap("file:file")) + s = ElementTree.SubElement(root, "attached-to") s.text = entity.uri - s = ElementTree.SubElement(root, 'original-location') + s = ElementTree.SubElement(root, "original-location") s.text = file_to_upload root = self.post( - uri=self.get_uri('glsstorage'), - data=self.tostring(ElementTree.ElementTree(root)) + uri=self.get_uri("glsstorage"), + data=self.tostring(ElementTree.ElementTree(root)), ) # Create the file object root = self.post( - uri=self.get_uri('files'), - data=self.tostring(ElementTree.ElementTree(root)) + uri=self.get_uri("files"), data=self.tostring(ElementTree.ElementTree(root)) ) - file = File(self, uri=root.attrib['uri']) + file = File(self, uri=root.attrib["uri"]) # Actually upload the file - uri = self.get_uri('files', file.id, 'upload') - r = requests.post(uri, files={'file': (file_to_upload, open(file_to_upload, 'rb'))}, - auth=(self.username, self.password)) + uri = self.get_uri("files", file.id, "upload") + r = requests.post( + uri, + files={"file": (file_to_upload, open(file_to_upload, "rb"))}, + auth=(self.username, self.password), + ) self.validate_response(r) return file @@ -141,44 +162,53 @@ def put(self, uri, data, params=dict()): """PUT the serialized XML to the given URI. Return the response XML as an ElementTree. """ - r = requests.put(uri, data=data, params=params, - auth=(self.username, self.password), - headers={'content-type': 'application/xml', - 'accept': 'application/xml'}) + r = requests.put( + uri, + data=data, + params=params, + auth=(self.username, self.password), + headers={"content-type": "application/xml", "accept": "application/xml"}, + ) return self.parse_response(r) def post(self, uri, data, params=dict()): """POST the serialized XML to the given URI. Return the response XML as an ElementTree. """ - r = requests.post(uri, data=data, params=params, - auth=(self.username, self.password), - headers={'content-type': 'application/xml', - 'accept': 'application/xml'}) + r = requests.post( + uri, + data=data, + params=params, + auth=(self.username, self.password), + headers={"content-type": "application/xml", "accept": "application/xml"}, + ) return self.parse_response(r, accept_status_codes=[200, 201, 202]) def delete(self, uri, params=dict()): """sends a DELETE to the given URI. Return the response XML as an ElementTree. """ - r = requests.delete(uri, params=params, - auth=(self.username, self.password), - headers={'content-type': 'application/xml', - 'accept': 'application/xml'}) + r = requests.delete( + uri, + params=params, + auth=(self.username, self.password), + headers={"content-type": "application/xml", "accept": "application/xml"}, + ) return self.validate_response(r, accept_status_codes=[204]) def check_version(self): """Raise ValueError if the version for this interface does not match any of the versions given for the API. """ - uri = urljoin(self.baseuri, 'api') + uri = urljoin(self.baseuri, "api") r = requests.get(uri, auth=(self.username, self.password)) root = self.parse_response(r) - tag = nsmap('ver:versions') + tag = nsmap("ver:versions") assert tag == root.tag - for node in root.findall('version'): - if node.attrib['major'] == self.VERSION: return - raise ValueError('version mismatch') + for node in root.findall("version"): + if node.attrib["major"] == self.VERSION: + return + raise ValueError("version mismatch") def validate_response(self, response, accept_status_codes=[200]): """Parse the XML returned in the response. @@ -188,16 +218,18 @@ def validate_response(self, response, accept_status_codes=[200]): if response.status_code not in accept_status_codes: try: root = ElementTree.fromstring(response.content) - node = root.find('message') + node = root.find("message") if node is None: response.raise_for_status() message = "%s" % (response.status_code) else: message = "%s: %s" % (response.status_code, node.text) - node = root.find('suggested-actions') + node = root.find("suggested-actions") if node is not None: - message += ' ' + node.text - except ElementTree.ParseError: # some error messages might not follow the xml standard + message += " " + node.text + except ( + ElementTree.ParseError + ): # some error messages might not follow the xml standard message = response.content raise requests.exceptions.HTTPError(message, response=response) return True @@ -210,7 +242,14 @@ def parse_response(self, response, accept_status_codes=[200]): root = ElementTree.fromstring(response.content) return root - def get_udfs(self, name=None, attach_to_name=None, attach_to_category=None, start_index=None, add_info=False): + def get_udfs( + self, + name=None, + attach_to_name=None, + attach_to_category=None, + start_index=None, + add_info=False, + ): """Get a list of udfs, filtered by keyword arguments. name: name of udf attach_to_name: item in the system, to wich the udf is attached, such as @@ -219,10 +258,12 @@ def get_udfs(self, name=None, attach_to_name=None, attach_to_category=None, star then you need to set attach_to_category='ProcessType'. Must not be provided otherwise. start_index: Page to retrieve; all if None. """ - params = self._get_params(name=name, - attach_to_name=attach_to_name, - attach_to_category=attach_to_category, - start_index=start_index) + params = self._get_params( + name=name, + attach_to_name=attach_to_name, + attach_to_category=attach_to_category, + start_index=start_index, + ) return self._get_instances(Udfconfig, add_info=add_info, params=params) def get_reagent_types(self, name=None, start_index=None): @@ -230,12 +271,19 @@ def get_reagent_types(self, name=None, start_index=None): name: reagent type name, or list of names. start_index: Page to retrieve; all if None. """ - params = self._get_params(name=name, - start_index=start_index) + params = self._get_params(name=name, start_index=start_index) return self._get_instances(ReagentType, params=params) - def get_labs(self, name=None, last_modified=None, - udf=dict(), udtname=None, udt=dict(), start_index=None, add_info=False): + def get_labs( + self, + name=None, + last_modified=None, + udf=dict(), + udtname=None, + udt=dict(), + start_index=None, + add_info=False, + ): """Get a list of labs, filtered by keyword arguments. name: Lab name, or list of names. last_modified: Since the given ISO format datetime. @@ -245,16 +293,24 @@ def get_labs(self, name=None, last_modified=None, and a string or list of strings as value. start_index: Page to retrieve; all if None. """ - params = self._get_params(name=name, - last_modified=last_modified, - start_index=start_index) + params = self._get_params( + name=name, last_modified=last_modified, start_index=start_index + ) params.update(self._get_params_udf(udf=udf, udtname=udtname, udt=udt)) return self._get_instances(Lab, add_info=add_info, params=params) - def get_researchers(self, firstname=None, lastname=None, username=None, - last_modified=None, - udf=dict(), udtname=None, udt=dict(), start_index=None, - add_info=False): + def get_researchers( + self, + firstname=None, + lastname=None, + username=None, + last_modified=None, + udf=dict(), + udtname=None, + udt=dict(), + start_index=None, + add_info=False, + ): """Get a list of researchers, filtered by keyword arguments. firstname: Researcher first name, or list of names. lastname: Researcher last name, or list of names. @@ -266,17 +322,27 @@ def get_researchers(self, firstname=None, lastname=None, username=None, and a string or list of strings as value. start_index: Page to retrieve; all if None. """ - params = self._get_params(firstname=firstname, - lastname=lastname, - username=username, - last_modified=last_modified, - start_index=start_index) + params = self._get_params( + firstname=firstname, + lastname=lastname, + username=username, + last_modified=last_modified, + start_index=start_index, + ) params.update(self._get_params_udf(udf=udf, udtname=udtname, udt=udt)) return self._get_instances(Researcher, add_info=add_info, params=params) - def get_projects(self, name=None, open_date=None, last_modified=None, - udf=dict(), udtname=None, udt=dict(), start_index=None, - add_info=False): + def get_projects( + self, + name=None, + open_date=None, + last_modified=None, + udf=dict(), + udtname=None, + udt=dict(), + start_index=None, + add_info=False, + ): """Get a list of projects, filtered by keyword arguments. name: Project name, or list of names. open_date: Since the given ISO format date. @@ -287,33 +353,54 @@ def get_projects(self, name=None, open_date=None, last_modified=None, and a string or list of strings as value. start_index: Page to retrieve; all if None. """ - params = self._get_params(name=name, - open_date=open_date, - last_modified=last_modified, - start_index=start_index) + params = self._get_params( + name=name, + open_date=open_date, + last_modified=last_modified, + start_index=start_index, + ) params.update(self._get_params_udf(udf=udf, udtname=udtname, udt=udt)) return self._get_instances(Project, add_info=add_info, params=params) - def get_sample_number(self, name=None, projectname=None, projectlimsid=None, - udf=dict(), udtname=None, udt=dict(), start_index=None): + def get_sample_number( + self, + name=None, + projectname=None, + projectlimsid=None, + udf=dict(), + udtname=None, + udt=dict(), + start_index=None, + ): """Gets the number of samples matching the query without fetching every sample, so it should be faster than len(get_samples()""" - params = self._get_params(name=name, - projectname=projectname, - projectlimsid=projectlimsid, - start_index=start_index) + params = self._get_params( + name=name, + projectname=projectname, + projectlimsid=projectlimsid, + start_index=start_index, + ) params.update(self._get_params_udf(udf=udf, udtname=udtname, udt=udt)) root = self.get(self.get_uri(Sample._URI), params=params) total = 0 - while params.get('start-index') is None: # Loop over all pages. + while params.get("start-index") is None: # Loop over all pages. total += len(root.findall("sample")) - node = root.find('next-page') - if node is None: break - root = self.get(node.attrib['uri'], params=params) + node = root.find("next-page") + if node is None: + break + root = self.get(node.attrib["uri"], params=params) return total - def get_samples(self, name=None, projectname=None, projectlimsid=None, - udf=dict(), udtname=None, udt=dict(), start_index=None): + def get_samples( + self, + name=None, + projectname=None, + projectlimsid=None, + udf=dict(), + udtname=None, + udt=dict(), + start_index=None, + ): """Get a list of samples, filtered by keyword arguments. name: Sample name, or list of names. projectlimsid: Samples for the project of the given LIMS id. @@ -324,19 +411,35 @@ def get_samples(self, name=None, projectname=None, projectlimsid=None, and a string or list of strings as value. start_index: Page to retrieve; all if None. """ - params = self._get_params(name=name, - projectname=projectname, - projectlimsid=projectlimsid, - start_index=start_index) + params = self._get_params( + name=name, + projectname=projectname, + projectlimsid=projectlimsid, + start_index=start_index, + ) params.update(self._get_params_udf(udf=udf, udtname=udtname, udt=udt)) return self._get_instances(Sample, params=params) - def get_artifacts(self, name=None, type=None, process_type=None, - artifact_flag_name=None, working_flag=None, qc_flag=None, - sample_name=None, samplelimsid=None, artifactgroup=None, containername=None, - containerlimsid=None, reagent_label=None, - udf=dict(), udtname=None, udt=dict(), start_index=None, - resolve=False): + def get_artifacts( + self, + name=None, + type=None, + process_type=None, + artifact_flag_name=None, + working_flag=None, + qc_flag=None, + sample_name=None, + samplelimsid=None, + artifactgroup=None, + containername=None, + containerlimsid=None, + reagent_label=None, + udf=dict(), + udtname=None, + udt=dict(), + start_index=None, + resolve=False, + ): """Get a list of artifacts, filtered by keyword arguments. name: Artifact name, or list of names. type: Artifact type, or list of types. @@ -356,19 +459,21 @@ def get_artifacts(self, name=None, type=None, process_type=None, and a string or list of strings as value. start_index: Page to retrieve; all if None. """ - params = self._get_params(name=name, - type=type, - process_type=process_type, - artifact_flag_name=artifact_flag_name, - working_flag=working_flag, - qc_flag=qc_flag, - sample_name=sample_name, - samplelimsid=samplelimsid, - artifactgroup=artifactgroup, - containername=containername, - containerlimsid=containerlimsid, - reagent_label=reagent_label, - start_index=start_index) + params = self._get_params( + name=name, + type=type, + process_type=process_type, + artifact_flag_name=artifact_flag_name, + working_flag=working_flag, + qc_flag=qc_flag, + sample_name=sample_name, + samplelimsid=samplelimsid, + artifactgroup=artifactgroup, + containername=containername, + containerlimsid=containerlimsid, + reagent_label=reagent_label, + start_index=start_index, + ) params.update(self._get_params_udf(udf=udf, udtname=udtname, udt=udt)) if resolve: return self.get_batch(self._get_instances(Artifact, params=params)) @@ -382,10 +487,18 @@ def get_container_types(self, name=None, start_index=None): params = self._get_params(name=name, start_index=start_index) return self._get_instances(Containertype, params=params) - def get_containers(self, name=None, type=None, - state=None, last_modified=None, - udf=dict(), udtname=None, udt=dict(), start_index=None, - add_info=False): + def get_containers( + self, + name=None, + type=None, + state=None, + last_modified=None, + udf=dict(), + udtname=None, + udt=dict(), + start_index=None, + add_info=False, + ): """Get a list of containers, filtered by keyword arguments. name: Containers name, or list of names. type: Container type, or list of types. @@ -397,18 +510,29 @@ def get_containers(self, name=None, type=None, and a string or list of strings as value. start_index: Page to retrieve; all if None. """ - params = self._get_params(name=name, - type=type, - state=state, - last_modified=last_modified, - start_index=start_index) + params = self._get_params( + name=name, + type=type, + state=state, + last_modified=last_modified, + start_index=start_index, + ) params.update(self._get_params_udf(udf=udf, udtname=udtname, udt=udt)) return self._get_instances(Container, add_info=add_info, params=params) - def get_processes(self, last_modified=None, type=None, - inputartifactlimsid=None, - techfirstname=None, techlastname=None, projectname=None, - udf=dict(), udtname=None, udt=dict(), start_index=None): + def get_processes( + self, + last_modified=None, + type=None, + inputartifactlimsid=None, + techfirstname=None, + techlastname=None, + projectname=None, + udf=dict(), + udtname=None, + udt=dict(), + start_index=None, + ): """Get a list of processes, filtered by keyword arguments. last_modified: Since the given ISO format datetime. type: Process type, or list of types. @@ -422,23 +546,25 @@ def get_processes(self, last_modified=None, type=None, projectname: Name of project, or list of. start_index: Page to retrieve; all if None. """ - params = self._get_params(last_modified=last_modified, - type=type, - inputartifactlimsid=inputartifactlimsid, - techfirstname=techfirstname, - techlastname=techlastname, - projectname=projectname, - start_index=start_index) + params = self._get_params( + last_modified=last_modified, + type=type, + inputartifactlimsid=inputartifactlimsid, + techfirstname=techfirstname, + techlastname=techlastname, + projectname=projectname, + start_index=start_index, + ) params.update(self._get_params_udf(udf=udf, udtname=udtname, udt=udt)) return self._get_instances(Process, params=params) def get_automations(self, name=None, add_info=False): - """Get the list of configured automations on the system """ + """Get the list of configured automations on the system""" params = self._get_params(name=name) return self._get_instances(Automation, add_info=add_info, params=params) def get_workflows(self, name=None, add_info=False): - """Get the list of existing workflows on the system """ + """Get the list of existing workflows on the system""" params = self._get_params(name=name) return self._get_instances(Workflow, add_info=add_info, params=params) @@ -452,7 +578,7 @@ def get_reagent_types(self, name=None, add_info=False): return self._get_instances(ReagentType, add_info=add_info, params=params) def get_protocols(self, name=None, add_info=False): - """Get the list of existing protocols on the system """ + """Get the list of existing protocols on the system""" params = self._get_params(name=name) return self._get_instances(Protocol, add_info=add_info, params=params) @@ -461,20 +587,19 @@ def get_reagent_kits(self, name=None, start_index=None, add_info=False): name: reagent kit name, or list of names. start_index: Page to retrieve; all if None. """ - params = self._get_params(name=name, - start_index=start_index) + params = self._get_params(name=name, start_index=start_index) return self._get_instances(ReagentKit, add_info=add_info, params=params) - def get_reagent_lots(self, name=None, kitname=None, number=None, - start_index=None): + def get_reagent_lots(self, name=None, kitname=None, number=None, start_index=None): """Get a list of reagent lots, filtered by keyword arguments. name: reagent kit name, or list of names. kitname: name of the kit this lots belong to number: lot number or list of lot number start_index: Page to retrieve; all if None. """ - params = self._get_params(name=name, kitname=kitname, number=number, - start_index=start_index) + params = self._get_params( + name=name, kitname=kitname, number=number, start_index=start_index + ) return self._get_instances(ReagentLot, params=params) def get_instruments(self, name=None): @@ -486,8 +611,9 @@ def _get_params(self, **kwargs): "Convert keyword arguments to a kwargs dictionary." result = dict() for key, value in kwargs.items(): - if value is None: continue - result[key.replace('_', '-')] = value + if value is None: + continue + result[key.replace("_", "-")] = value return result def _get_params_udf(self, udf=dict(), udtname=None, udt=dict()): @@ -496,7 +622,7 @@ def _get_params_udf(self, udf=dict(), udtname=None, udt=dict()): for key, value in udf.items(): result["udf.%s" % key] = value if udtname is not None: - result['udt.name'] = udtname + result["udt.name"] = udtname for key, value in udt.items(): result["udt.%s" % key] = value return result @@ -508,18 +634,19 @@ def _get_instances(self, klass, add_info=None, params=dict()): if tag is None: tag = klass.__name__.lower() root = self.get(self.get_uri(klass._URI), params=params) - while params.get('start-index') is None: # Loop over all pages. + while params.get("start-index") is None: # Loop over all pages. for node in root.findall(tag): - results.append(klass(self, uri=node.attrib['uri'])) + results.append(klass(self, uri=node.attrib["uri"])) info_dict = {} for attrib_key in node.attrib: - info_dict[attrib_key] = node.attrib['uri'] + info_dict[attrib_key] = node.attrib["uri"] for subnode in node: info_dict[subnode.tag] = subnode.text additionnal_info_dicts.append(info_dict) - node = root.find('next-page') - if node is None: break - root = self.get(node.attrib['uri'], params=params) + node = root.find("next-page") + if node is None: + break + root = self.get(node.attrib["uri"], params=params) if add_info: return results, additionnal_info_dicts else: @@ -544,26 +671,31 @@ def get_batch(self, instances, force=False): if not instances: return [] - ALLOWED_TAGS = ('artifact', 'container', 'file', 'sample') + ALLOWED_TAGS = ("artifact", "container", "file", "sample") if instances[0]._TAG not in ALLOWED_TAGS: - raise TypeError("Cannot retrieve batch for instances of type '{}'".format(instances[0]._TAG)) + raise TypeError( + "Cannot retrieve batch for instances of type '{}'".format( + instances[0]._TAG + ) + ) - root = ElementTree.Element(nsmap('ri:links')) + root = ElementTree.Element(nsmap("ri:links")) needs_request = False instance_map = {} for instance in instances: instance_map[instance.id] = instance if force or instance.root is None: - ElementTree.SubElement(root, 'link', dict(uri=instance.uri, - rel=instance.__class__._URI)) + ElementTree.SubElement( + root, "link", dict(uri=instance.uri, rel=instance.__class__._URI) + ) needs_request = True if needs_request: - uri = self.get_uri(instance.__class__._URI, 'batch/retrieve') + uri = self.get_uri(instance.__class__._URI, "batch/retrieve") data = self.tostring(ElementTree.ElementTree(root)) root = self.post(uri, data) for node in list(root): - instance = instance_map[node.attrib['limsid']] + instance = instance_map[node.attrib["limsid"]] instance.root = node return list(instance_map.values()) @@ -573,9 +705,13 @@ def put_batch(self, instances): if not instances: return - ALLOWED_TAGS = ('artifact', 'container', 'file', 'sample') + ALLOWED_TAGS = ("artifact", "container", "file", "sample") if instances[0]._TAG not in ALLOWED_TAGS: - raise TypeError("Cannot update batch for instances of type '{}'".format(instances[0]._TAG)) + raise TypeError( + "Cannot update batch for instances of type '{}'".format( + instances[0]._TAG + ) + ) root = None # XML root element for batch request @@ -589,29 +725,33 @@ def put_batch(self, instances): root.append(instance.root) - uri = self.get_uri(klass._URI, 'batch/update') + uri = self.get_uri(klass._URI, "batch/update") data = self.tostring(ElementTree.ElementTree(root)) root = self.post(uri, data) - def route_artifacts(self, artifact_list, workflow_uri=None, stage_uri=None, unassign=False): - root = ElementTree.Element(nsmap('rt:routing')) + def route_artifacts( + self, artifact_list, workflow_uri=None, stage_uri=None, unassign=False + ): + root = ElementTree.Element(nsmap("rt:routing")) if unassign: - s = ElementTree.SubElement(root, 'unassign') + s = ElementTree.SubElement(root, "unassign") else: - s = ElementTree.SubElement(root, 'assign') + s = ElementTree.SubElement(root, "assign") if workflow_uri: - s.set('workflow-uri', workflow_uri) + s.set("workflow-uri", workflow_uri) if stage_uri: - s.set('stage-uri', stage_uri) + s.set("stage-uri", stage_uri) for artifact in artifact_list: - a = ElementTree.SubElement(s, 'artifact') - a.set('uri', artifact.uri) - - uri = self.get_uri('route', 'artifacts') - r = requests.post(uri, data=self.tostring(ElementTree.ElementTree(root)), - auth=(self.username, self.password), - headers={'content-type': 'application/xml', - 'accept': 'application/xml'}) + a = ElementTree.SubElement(s, "artifact") + a.set("uri", artifact.uri) + + uri = self.get_uri("route", "artifacts") + r = requests.post( + uri, + data=self.tostring(ElementTree.ElementTree(root)), + auth=(self.username, self.password), + headers={"content-type": "application/xml", "accept": "application/xml"}, + ) self.validate_response(r) def tostring(self, etree): @@ -622,19 +762,23 @@ def tostring(self, etree): def write(self, outfile, etree): "Write the ElementTree contents as UTF-8 encoded XML to the open file." - etree.write(outfile, encoding='utf-8', xml_declaration=True) + etree.write(outfile, encoding="utf-8", xml_declaration=True) def create_container(self, container_type, name=None): """Create a new container of type container_type and returns it Akin to Container.create(lims type=container_type, name=name)""" - el = ElementTree.Element(nsmap('con:container')) + el = ElementTree.Element(nsmap("con:container")) if name is not None: - nm = ElementTree.SubElement(el, 'name') + nm = ElementTree.SubElement(el, "name") nm.text = name - ty = ElementTree.SubElement(el, 'type', attrib={'uri':container_type.uri, 'name':container_type.name}) - ret_el = self.post(uri=self.get_uri('containers'), data=ElementTree.tostring(el)) - ret_con = Container(self, uri=ret_el.attrib['uri']) + ty = ElementTree.SubElement( + el, "type", attrib={"uri": container_type.uri, "name": container_type.name} + ) + ret_el = self.post( + uri=self.get_uri("containers"), data=ElementTree.tostring(el) + ) + ret_con = Container(self, uri=ret_el.attrib["uri"]) ret_con.root = ret_el return ret_con diff --git a/genologics/lims_utils.py b/genologics/lims_utils.py index 4c8eff2..c2d7b40 100644 --- a/genologics/lims_utils.py +++ b/genologics/lims_utils.py @@ -11,59 +11,63 @@ lims = Lims(BASEURI, USERNAME, PASSWORD) + def get_run_info(fc): - fc_summary={} - for iom in fc.input_output_maps: - art = iom[0]['uri'] - lane = art.location[1].split(':')[0] - if lane not in fc_summary: - fc_summary[lane]= dict(list(art.udf.items())) #"%.2f" % val ----round?? - return fc_summary + fc_summary = {} + for iom in fc.input_output_maps: + art = iom[0]["uri"] + lane = art.location[1].split(":")[0] + if lane not in fc_summary: + fc_summary[lane] = dict(list(art.udf.items())) # "%.2f" % val ----round?? + return fc_summary + def procHistory(proc, samplename): - """Quick wat to get the ids of parent processes from the given process, + """Quick wat to get the ids of parent processes from the given process, while staying in a sample scope""" - hist=[] - artifacts = lims.get_artifacts(sample_name = samplename, type = 'Analyte') - not_done=True - starting_art=proc.input_per_sample(samplename)[0].id + hist = [] + artifacts = lims.get_artifacts(sample_name=samplename, type="Analyte") + not_done = True + starting_art = proc.input_per_sample(samplename)[0].id while not_done: - not_done=False + not_done = False for o in artifacts: if o.id == starting_art: if o.parent_process is None: - #flow control : if there is no parent process, we can stop iterating, we're done. - not_done=False - break #breaks the for artifacts, we are done anyway. + # flow control : if there is no parent process, we can stop iterating, we're done. + not_done = False + break # breaks the for artifacts, we are done anyway. else: - not_done=True #keep the loop running + not_done = True # keep the loop running hist.append(o.parent_process.id) for i in o.parent_process.all_inputs(): if i in artifacts: # while increment - starting_art=i.id + starting_art = i.id - break #break the for allinputs, if we found the right one - break # breaks the for artifacts if we matched the current one + break # break the for allinputs, if we found the right one + break # breaks the for artifacts if we matched the current one return hist + def get_sequencing_info(fc): """Input: a process object 'fc', of type 'Illumina Sequencing (Illumina SBS) 4.0', Output: A dictionary where keys are lanes 1,2,...,8, and values are lane artifact udfs""" - fc_summary={} + fc_summary = {} for iom in fc.input_output_maps: - art = Artifact(lims,id = iom[0]['limsid']) - lane = art.location[1].split(':')[0] + art = Artifact(lims, id=iom[0]["limsid"]) + lane = art.location[1].split(":")[0] if lane not in fc_summary: - fc_summary[lane]= dict(list(art.udf.items())) #"%.2f" % val ----round?? - fc_summary[lane]['qc'] = art.qc_flag + fc_summary[lane] = dict(list(art.udf.items())) # "%.2f" % val ----round?? + fc_summary[lane]["qc"] = art.qc_flag return fc_summary + def make_sample_artifact_maps(sample_name): - """outin: connects each out_art for a specific sample to its + """outin: connects each out_art for a specific sample to its corresponding in_art and process. one-one relation""" outin = {} - artifacts = lims.get_artifacts(sample_name = sample_name, type = 'Analyte') + artifacts = lims.get_artifacts(sample_name=sample_name, type="Analyte") for outart in artifacts: try: pro = outart.parent_process @@ -75,5 +79,3 @@ def make_sample_artifact_maps(sample_name): except: pass return outin - - diff --git a/genologics/test_utils.py b/genologics/test_utils.py index 1c4f6d3..0188d19 100644 --- a/genologics/test_utils.py +++ b/genologics/test_utils.py @@ -17,42 +17,44 @@ """ - XML_DICT = {} def patched_get(*args, **kwargs): - params=None - if 'uri' in kwargs: - uri=kwargs['uri'] + params = None + if "uri" in kwargs: + uri = kwargs["uri"] else: for arg in args: if isinstance(arg, str): uri = arg - if 'params' in kwargs: - params=kwargs['params'] + if "params" in kwargs: + params = kwargs["params"] else: for arg in args: if isinstance(arg, dict): params = arg - r = requests.Request(method='GET', url=uri, params=params) + r = requests.Request(method="GET", url=uri, params=params) r = r.prepare() if not XML_DICT: - raise Exception("You need to update genologics.test_utils.XML_DICT before using this function") + raise Exception( + "You need to update genologics.test_utils.XML_DICT before using this function" + ) try: return ElementTree.fromstring(XML_DICT[r.url]) except KeyError: raise Exception("Cannot find mocked xml for uri {0}".format(r.url)) + def dump_source_xml(lims): """After using a LIMS object, using this method on it will dump all the cached XML in a serialized dictionnary form, to be used with patched_get""" final_string = [] - final_string.append('{') + final_string.append("{") for k, v in lims.cache.items(): final_string.append("'{0}':".format(k)) v.get() - final_string.append('"""{0}""",'.format(v.xml().replace('\n', "\n"))) - final_string.append('}') + final_string.append('"""{0}""",'.format(v.xml().replace("\n", "\n"))) + final_string.append("}") - return '\n'.join(final_string) + return "\n".join(final_string) diff --git a/genologics/version.py b/genologics/version.py index 97123c1..5becc17 100644 --- a/genologics/version.py +++ b/genologics/version.py @@ -1 +1 @@ -__version__="1.0.0" +__version__ = "1.0.0" diff --git a/setup.py b/setup.py index 7654e2a..b5e663b 100644 --- a/setup.py +++ b/setup.py @@ -10,41 +10,40 @@ try: with open("requirements.txt") as rq: - requires=rq.readlines() + requires = rq.readlines() except: - requires=["requests"] + requires = ["requests"] -setup(name='genologics', - version=__version__, - description="Python interface to the Illumina Basespace Clarity LIMS (Laboratory Information Management System) server via its REST API.", - long_description="""A basic module for interacting with the Illumina Basespace Clarity LIMS server via its REST API. +setup( + name="genologics", + version=__version__, + description="Python interface to the Illumina Basespace Clarity LIMS (Laboratory Information Management System) server via its REST API.", + long_description="""A basic module for interacting with the Illumina Basespace Clarity LIMS server via its REST API. The goal is to provide simple access to the most common entities and their attributes in a reasonably Pythonic fashion.""", - classifiers=[ - "Development Status :: 4 - Beta", - "Environment :: Console", - "Intended Audience :: Developers", - "Intended Audience :: Healthcare Industry", - "Intended Audience :: Science/Research", - "License :: OSI Approved :: MIT License", - "Operating System :: POSIX :: Linux", - "Programming Language :: Python", - "Topic :: Scientific/Engineering :: Medical Science Apps." - ], - keywords='genologics api rest clarity lims illumina', - author='Per Kraulis', - author_email='per.kraulis@scilifelab.se', - maintainer='Chuan Wang', - maintainer_email='chuan.wang@scilifelab.se', - url='https://github.com/scilifelab/genologics', - license='MIT', - packages=find_packages(exclude=['ez_setup', 'examples', 'tests']), - scripts=glob.glob("scripts/*.py"), - include_package_data=True, - zip_safe=False, - install_requires=[ - "requests" - ], - entry_points=""" + classifiers=[ + "Development Status :: 4 - Beta", + "Environment :: Console", + "Intended Audience :: Developers", + "Intended Audience :: Healthcare Industry", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Operating System :: POSIX :: Linux", + "Programming Language :: Python", + "Topic :: Scientific/Engineering :: Medical Science Apps.", + ], + keywords="genologics api rest clarity lims illumina", + author="Per Kraulis", + author_email="per.kraulis@scilifelab.se", + maintainer="Chuan Wang", + maintainer_email="chuan.wang@scilifelab.se", + url="https://github.com/scilifelab/genologics", + license="MIT", + packages=find_packages(exclude=["ez_setup", "examples", "tests"]), + scripts=glob.glob("scripts/*.py"), + include_package_data=True, + zip_safe=False, + install_requires=["requests"], + entry_points=""" # -*- Entry points: -*- """, - ) +) diff --git a/tests/test_descriptors.py b/tests/test_descriptors.py index cf0bf3f..07fe6c3 100644 --- a/tests/test_descriptors.py +++ b/tests/test_descriptors.py @@ -3,8 +3,16 @@ from unittest import TestCase from xml.etree import ElementTree -from genologics.descriptors import StringDescriptor, StringAttributeDescriptor, StringListDescriptor, \ - StringDictionaryDescriptor, IntegerDescriptor, BooleanDescriptor, UdfDictionary, EntityDescriptor +from genologics.descriptors import ( + StringDescriptor, + StringAttributeDescriptor, + StringListDescriptor, + StringDictionaryDescriptor, + IntegerDescriptor, + BooleanDescriptor, + UdfDictionary, + EntityDescriptor, +) from genologics.entities import Artifact from genologics.lims import Lims @@ -20,7 +28,9 @@ def _make_desc(self, klass, *args, **kwargs): def _tostring(self, e): outfile = BytesIO() - ElementTree.ElementTree(e).write(outfile, encoding='utf-8', xml_declaration=True) + ElementTree.ElementTree(e).write( + outfile, encoding="utf-8", xml_declaration=True + ) return outfile.getvalue() @@ -34,19 +44,19 @@ def setUp(self): self.instance = Mock(root=self.et) def test__get__(self): - sd = self._make_desc(StringDescriptor, 'name') + sd = self._make_desc(StringDescriptor, "name") assert sd.__get__(self.instance, None) == "test name" def test__set__(self): - sd = self._make_desc(StringDescriptor, 'name') + sd = self._make_desc(StringDescriptor, "name") sd.__set__(self.instance, "new test name") - assert self.et.find('name').text == "new test name" + assert self.et.find("name").text == "new test name" def test_create(self): - instance_new = Mock(root=ElementTree.Element('test-entry')) - sd = self._make_desc(StringDescriptor, 'name') + instance_new = Mock(root=ElementTree.Element("test-entry")) + sd = self._make_desc(StringDescriptor, "name") sd.__set__(instance_new, "test name") - assert instance_new.root.find('name').text == 'test name' + assert instance_new.root.find("name").text == "test name" class TestIntegerDescriptor(TestDescriptor): @@ -59,21 +69,21 @@ def setUp(self): self.instance = Mock(root=self.et) def test__get__(self): - sd = self._make_desc(IntegerDescriptor, 'count') + sd = self._make_desc(IntegerDescriptor, "count") assert sd.__get__(self.instance, None) == 32 def test__set__(self): - sd = self._make_desc(IntegerDescriptor, 'count') + sd = self._make_desc(IntegerDescriptor, "count") sd.__set__(self.instance, 23) - assert self.et.find('count').text == '23' - sd.__set__(self.instance, '23') - assert self.et.find('count').text == '23' + assert self.et.find("count").text == "23" + sd.__set__(self.instance, "23") + assert self.et.find("count").text == "23" def test_create(self): - instance_new = Mock(root=ElementTree.Element('test-entry')) - sd = self._make_desc(IntegerDescriptor, 'count') + instance_new = Mock(root=ElementTree.Element("test-entry")) + sd = self._make_desc(IntegerDescriptor, "count") sd.__set__(instance_new, 23) - assert instance_new.root.find('count').text == '23' + assert instance_new.root.find("count").text == "23" class TestBooleanDescriptor(TestDescriptor): @@ -86,21 +96,21 @@ def setUp(self): self.instance = Mock(root=self.et) def test__get__(self): - bd = self._make_desc(BooleanDescriptor, 'istest') + bd = self._make_desc(BooleanDescriptor, "istest") assert bd.__get__(self.instance, None) == True def test__set__(self): - bd = self._make_desc(BooleanDescriptor, 'istest') + bd = self._make_desc(BooleanDescriptor, "istest") bd.__set__(self.instance, False) - assert self.et.find('istest').text == 'false' - bd.__set__(self.instance, 'true') - assert self.et.find('istest').text == 'true' + assert self.et.find("istest").text == "false" + bd.__set__(self.instance, "true") + assert self.et.find("istest").text == "true" def test_create(self): - instance_new = Mock(root=ElementTree.Element('test-entry')) - bd = self._make_desc(BooleanDescriptor, 'istest') + instance_new = Mock(root=ElementTree.Element("test-entry")) + bd = self._make_desc(BooleanDescriptor, "istest") bd.__set__(instance_new, True) - assert instance_new.root.find('istest').text == 'true' + assert instance_new.root.find("istest").text == "true" class TestEntityDescriptor(TestDescriptor): @@ -110,25 +120,33 @@ def setUp(self): """) - self.lims = Lims('http://testgenologics.com:4040', username='test', password='password') - self.a1 = Artifact(self.lims, id='a1') - self.a2 = Artifact(self.lims, id='a2') + self.lims = Lims( + "http://testgenologics.com:4040", username="test", password="password" + ) + self.a1 = Artifact(self.lims, id="a1") + self.a2 = Artifact(self.lims, id="a2") self.instance = Mock(root=self.et, lims=self.lims) def test__get__(self): - ed = self._make_desc(EntityDescriptor, 'artifact', Artifact) + ed = self._make_desc(EntityDescriptor, "artifact", Artifact) assert ed.__get__(self.instance, None) == self.a1 def test__set__(self): - ed = self._make_desc(EntityDescriptor, 'artifact', Artifact) + ed = self._make_desc(EntityDescriptor, "artifact", Artifact) ed.__set__(self.instance, self.a2) - assert self.et.find('artifact').attrib['uri'] == 'http://testgenologics.com:4040/api/v2/artifacts/a2' + assert ( + self.et.find("artifact").attrib["uri"] + == "http://testgenologics.com:4040/api/v2/artifacts/a2" + ) def test_create(self): - instance_new = Mock(root=ElementTree.Element('test-entry')) - ed = self._make_desc(EntityDescriptor, 'artifact', Artifact) + instance_new = Mock(root=ElementTree.Element("test-entry")) + ed = self._make_desc(EntityDescriptor, "artifact", Artifact) ed.__set__(instance_new, self.a1) - assert instance_new.root.find('artifact').attrib['uri'] == 'http://testgenologics.com:4040/api/v2/artifacts/a1' + assert ( + instance_new.root.find("artifact").attrib["uri"] + == "http://testgenologics.com:4040/api/v2/artifacts/a1" + ) class TestStringAttributeDescriptor(TestDescriptor): @@ -139,19 +157,19 @@ def setUp(self): self.instance = Mock(root=self.et) def test__get__(self): - sd = self._make_desc(StringAttributeDescriptor, 'name') + sd = self._make_desc(StringAttributeDescriptor, "name") assert sd.__get__(self.instance, None) == "test name" def test__set__(self): - sd = self._make_desc(StringAttributeDescriptor, 'name') + sd = self._make_desc(StringAttributeDescriptor, "name") sd.__set__(self.instance, "test name2") - assert self.et.attrib['name'] == "test name2" + assert self.et.attrib["name"] == "test name2" def test_create(self): - instance_new = Mock(root=ElementTree.Element('test-entry')) - bd = self._make_desc(StringAttributeDescriptor, 'name') + instance_new = Mock(root=ElementTree.Element("test-entry")) + bd = self._make_desc(StringAttributeDescriptor, "name") bd.__set__(instance_new, "test name2") - assert instance_new.root.attrib['name'] == "test name2" + assert instance_new.root.attrib["name"] == "test name2" class TestStringListDescriptor(TestDescriptor): @@ -164,8 +182,8 @@ def setUp(self): self.instance = Mock(root=self.et) def test__get__(self): - sd = self._make_desc(StringListDescriptor, 'test-subentry') - assert sd.__get__(self.instance, None) == ['A01', 'B01'] + sd = self._make_desc(StringListDescriptor, "test-subentry") + assert sd.__get__(self.instance, None) == ["A01", "B01"] class TestStringDictionaryDescriptor(TestDescriptor): @@ -180,11 +198,11 @@ def setUp(self): self.instance = Mock(root=self.et) def test__get__(self): - sd = self._make_desc(StringDictionaryDescriptor, 'test-subentry') + sd = self._make_desc(StringDictionaryDescriptor, "test-subentry") res = sd.__get__(self.instance, None) assert type(res) == dict - assert res['test-firstkey'] is None - assert res['test-secondkey'] == 'second value' + assert res["test-firstkey"] is None + assert res["test-secondkey"] == "second value" class TestUdfDictionary(TestCase): @@ -201,7 +219,7 @@ def setUp(self): def _get_udf_value(self, udf_dict, key): for e in udf_dict._elems: - if e.attrib['name'] != key: + if e.attrib["name"] != key: continue else: return e.text @@ -225,43 +243,41 @@ def test___getitem__(self): pass def test___setitem__(self): - assert self._get_udf_value(self.dict1, 'test') == 'stuff' - self.dict1.__setitem__('test', 'other') - assert self._get_udf_value(self.dict1, 'test') == 'other' + assert self._get_udf_value(self.dict1, "test") == "stuff" + self.dict1.__setitem__("test", "other") + assert self._get_udf_value(self.dict1, "test") == "other" - assert self._get_udf_value(self.dict1, 'how much') == '42' - self.dict1.__setitem__('how much', 21) - assert self._get_udf_value(self.dict1, 'how much') == '21' + assert self._get_udf_value(self.dict1, "how much") == "42" + self.dict1.__setitem__("how much", 21) + assert self._get_udf_value(self.dict1, "how much") == "21" - assert self._get_udf_value(self.dict1, 'really?') == 'true' - self.dict1.__setitem__('really?', False) - assert self._get_udf_value(self.dict1, 'really?') == 'false' + assert self._get_udf_value(self.dict1, "really?") == "true" + self.dict1.__setitem__("really?", False) + assert self._get_udf_value(self.dict1, "really?") == "false" - self.assertRaises(TypeError, self.dict1.__setitem__, 'how much', '433') + self.assertRaises(TypeError, self.dict1.__setitem__, "how much", "433") # FIXME: I'm not sure if this is the expected behaviour - self.dict1.__setitem__('how much', None) - assert self._get_udf_value(self.dict1, 'how much') == '' - + self.dict1.__setitem__("how much", None) + assert self._get_udf_value(self.dict1, "how much") == "" def test___setitem__new(self): - self.dict1.__setitem__('new string', 'new stuff') - assert self._get_udf_value(self.dict1, 'new string') == 'new stuff' - - self.dict1.__setitem__('new numeric', 21) - assert self._get_udf_value(self.dict1, 'new numeric') == '21' + self.dict1.__setitem__("new string", "new stuff") + assert self._get_udf_value(self.dict1, "new string") == "new stuff" - self.dict1.__setitem__('new bool', False) - assert self._get_udf_value(self.dict1, 'new bool') == 'false' + self.dict1.__setitem__("new numeric", 21) + assert self._get_udf_value(self.dict1, "new numeric") == "21" + self.dict1.__setitem__("new bool", False) + assert self._get_udf_value(self.dict1, "new bool") == "false" def test___setitem__unicode(self): - assert self._get_udf_value(self.dict1, 'test') == 'stuff' - self.dict1.__setitem__('test', 'unicode') - assert self._get_udf_value(self.dict1, 'test') == 'unicode' + assert self._get_udf_value(self.dict1, "test") == "stuff" + self.dict1.__setitem__("test", "unicode") + assert self._get_udf_value(self.dict1, "test") == "unicode" - self.dict1.__setitem__('test', 'unicode2') - assert self._get_udf_value(self.dict1, 'test') == 'unicode2' + self.dict1.__setitem__("test", "unicode2") + assert self._get_udf_value(self.dict1, "test") == "unicode2" def test___delitem__(self): pass diff --git a/tests/test_entities.py b/tests/test_entities.py index 103abb4..cda30f5 100644 --- a/tests/test_entities.py +++ b/tests/test_entities.py @@ -3,8 +3,20 @@ from unittest import TestCase from xml.etree import ElementTree -from genologics.entities import StepActions, Researcher, Artifact, \ - Step, StepPlacements, StepPools, Container, Stage, ReagentKit, ReagentLot, Sample, Project +from genologics.entities import ( + StepActions, + Researcher, + Artifact, + Step, + StepPlacements, + StepPools, + Container, + Stage, + ReagentKit, + ReagentLot, + Sample, + Project, +) from genologics.lims import Lims if version_info[0] == 2: @@ -12,7 +24,7 @@ else: from unittest.mock import patch, Mock -url = 'http://testgenologics.com:4040' +url = "http://testgenologics.com:4040" ######## # Entities in XML @@ -169,22 +181,26 @@ def test_pass(self): def elements_equal(e1, e2): if e1.tag != e2.tag: - print('Tag: %s != %s' % (e1.tag, e2.tag)) + print("Tag: %s != %s" % (e1.tag, e2.tag)) return False if e1.text and e2.text and e1.text.strip() != e2.text.strip(): - print('Text: %s != %s' % (e1.text.strip(), e2.text.strip())) + print("Text: %s != %s" % (e1.text.strip(), e2.text.strip())) return False if e1.tail and e2.tail and e1.tail.strip() != e2.tail.strip(): - print('Tail: %s != %s' % (e1.tail.strip(), e2.tail.strip())) + print("Tail: %s != %s" % (e1.tail.strip(), e2.tail.strip())) return False if e1.attrib != e2.attrib: - print('Attrib: %s != %s' % (e1.attrib, e2.attrib)) + print("Attrib: %s != %s" % (e1.attrib, e2.attrib)) return False if len(e1) != len(e2): - print('length %s (%s) != length (%s) ' % (e1.tag, len(e1), e2.tag, len(e2))) + print("length %s (%s) != length (%s) " % (e1.tag, len(e1), e2.tag, len(e2))) return False return all( - elements_equal(c1, c2) for c1, c2 in zip(sorted(e1, key=lambda x: x.tag), sorted(e2, key=lambda x: x.tag))) + elements_equal(c1, c2) + for c1, c2 in zip( + sorted(e1, key=lambda x: x.tag), sorted(e2, key=lambda x: x.tag) + ) + ) class TestEntities(TestCase): @@ -192,7 +208,7 @@ class TestEntities(TestCase): """ def setUp(self): - self.lims = Lims(url, username='test', password='password') + self.lims = Lims(url, username="test", password="password") def _tostring(self, entity): return self.lims.tostring(ElementTree.ElementTree(entity.root)).decode("utf-8") @@ -200,32 +216,62 @@ def _tostring(self, entity): class TestStepActions(TestEntities): step_actions_xml = generic_step_actions_xml.format(url=url) - step_actions_no_escalation_xml = generic_step_actions_no_escalation_xml.format(url=url) + step_actions_no_escalation_xml = generic_step_actions_no_escalation_xml.format( + url=url + ) def test_escalation(self): - s = StepActions(uri=self.lims.get_uri('steps', 'step_id', 'actions'), lims=self.lims) - with patch('requests.Session.get', return_value=Mock(content=self.step_actions_xml, status_code=200)): - with patch('requests.post', return_value=Mock(content=self.dummy_xml, status_code=200)): - r = Researcher(uri='http://testgenologics.com:4040/researchers/r1', lims=self.lims) - a = Artifact(uri='http://testgenologics.com:4040/artifacts/r1', lims=self.lims) + s = StepActions( + uri=self.lims.get_uri("steps", "step_id", "actions"), lims=self.lims + ) + with patch( + "requests.Session.get", + return_value=Mock(content=self.step_actions_xml, status_code=200), + ): + with patch( + "requests.post", + return_value=Mock(content=self.dummy_xml, status_code=200), + ): + r = Researcher( + uri="http://testgenologics.com:4040/researchers/r1", lims=self.lims + ) + a = Artifact( + uri="http://testgenologics.com:4040/artifacts/r1", lims=self.lims + ) expected_escalation = { - 'status': 'Reviewed', - 'author': r, - 'artifacts': [a], 'request': 'no comments', - 'answer': 'no comments', - 'reviewer': r} + "status": "Reviewed", + "author": r, + "artifacts": [a], + "request": "no comments", + "answer": "no comments", + "reviewer": r, + } assert s.escalation == expected_escalation def test_next_actions(self): - s = StepActions(uri=self.lims.get_uri('steps', 'step_id', 'actions'), lims=self.lims) - with patch('requests.Session.get', - return_value=Mock(content=self.step_actions_no_escalation_xml, status_code=200)): - step1 = Step(self.lims, uri='http://testgenologics.com:4040/steps/s1') - step2 = Step(self.lims, uri='http://testgenologics.com:4040/steps/s2') - artifact = Artifact(self.lims, uri='http://testgenologics.com:4040/artifacts/a1') - expected_next_actions = [{'artifact': artifact, 'action': 'requeue', - 'step': step1, 'rework-step': step2}] + s = StepActions( + uri=self.lims.get_uri("steps", "step_id", "actions"), lims=self.lims + ) + with patch( + "requests.Session.get", + return_value=Mock( + content=self.step_actions_no_escalation_xml, status_code=200 + ), + ): + step1 = Step(self.lims, uri="http://testgenologics.com:4040/steps/s1") + step2 = Step(self.lims, uri="http://testgenologics.com:4040/steps/s2") + artifact = Artifact( + self.lims, uri="http://testgenologics.com:4040/artifacts/a1" + ) + expected_next_actions = [ + { + "artifact": artifact, + "action": "requeue", + "step": step1, + "rework-step": step2, + } + ] assert s.next_actions == expected_next_actions @@ -233,101 +279,184 @@ class TestStepPools(TestEntities): initial_step_pools = generic_step_pools_xml.format(url=url) def test_get_pool_list(self): - s = StepPools(uri=self.lims.get_uri('steps', 's1', 'pools'), lims=self.lims) - with patch('requests.Session.get', - return_value=Mock(content=self.initial_step_pools, status_code=200)): - output = Artifact(lims=self.lims, uri='http://testgenologics.com:4040/api/v2/artifacts/o1') - i1 = Artifact(lims=self.lims, uri='http://testgenologics.com:4040/api/v2/artifacts/a1') - i2 = Artifact(lims=self.lims, uri='http://testgenologics.com:4040/api/v2/artifacts/a2') - i3 = Artifact(lims=self.lims, uri='http://testgenologics.com:4040/api/v2/artifacts/a3') - i4 = Artifact(lims=self.lims, uri='http://testgenologics.com:4040/api/v2/artifacts/a4') - i5 = Artifact(lims=self.lims, uri='http://testgenologics.com:4040/api/v2/artifacts/a5') - assert s.pools[0]['output'] == output - assert s.pools[0]['name'] == "Pool #1" - assert len(s.pools[0]['inputs']) == 2 - assert s.pools[0]['inputs'][0] == i1 - assert s.pools[0]['inputs'][1] == i2 + s = StepPools(uri=self.lims.get_uri("steps", "s1", "pools"), lims=self.lims) + with patch( + "requests.Session.get", + return_value=Mock(content=self.initial_step_pools, status_code=200), + ): + output = Artifact( + lims=self.lims, uri="http://testgenologics.com:4040/api/v2/artifacts/o1" + ) + i1 = Artifact( + lims=self.lims, uri="http://testgenologics.com:4040/api/v2/artifacts/a1" + ) + i2 = Artifact( + lims=self.lims, uri="http://testgenologics.com:4040/api/v2/artifacts/a2" + ) + i3 = Artifact( + lims=self.lims, uri="http://testgenologics.com:4040/api/v2/artifacts/a3" + ) + i4 = Artifact( + lims=self.lims, uri="http://testgenologics.com:4040/api/v2/artifacts/a4" + ) + i5 = Artifact( + lims=self.lims, uri="http://testgenologics.com:4040/api/v2/artifacts/a5" + ) + assert s.pools[0]["output"] == output + assert s.pools[0]["name"] == "Pool #1" + assert len(s.pools[0]["inputs"]) == 2 + assert s.pools[0]["inputs"][0] == i1 + assert s.pools[0]["inputs"][1] == i2 assert i3 in s.available_inputs assert i4 in s.available_inputs assert i5 in s.available_inputs class TestStepPlacements(TestEntities): - original_step_placements_xml = generic_step_placements_xml.format(url=url, container="c1", loc1='1:1', loc2='2:1') - modloc_step_placements_xml = generic_step_placements_xml.format(url=url, container="c1", loc1='3:1', loc2='4:1') - modcont_step_placements_xml = generic_step_placements_xml.format(url=url, container="c2", loc1='1:1', loc2='1:1') + original_step_placements_xml = generic_step_placements_xml.format( + url=url, container="c1", loc1="1:1", loc2="2:1" + ) + modloc_step_placements_xml = generic_step_placements_xml.format( + url=url, container="c1", loc1="3:1", loc2="4:1" + ) + modcont_step_placements_xml = generic_step_placements_xml.format( + url=url, container="c2", loc1="1:1", loc2="1:1" + ) def test_get_placements_list(self): - s = StepPlacements(uri=self.lims.get_uri('steps', 's1', 'placements'), lims=self.lims) - with patch('requests.Session.get', - return_value=Mock(content=self.original_step_placements_xml, status_code=200)): - a1 = Artifact(uri='http://testgenologics.com:4040/artifacts/a1', lims=self.lims) - a2 = Artifact(uri='http://testgenologics.com:4040/artifacts/a2', lims=self.lims) - c1 = Container(uri='http://testgenologics.com:4040/containers/c1', lims=self.lims) - expected_placements = [[a1, (c1, '1:1')], [a2, (c1, '2:1')]] + s = StepPlacements( + uri=self.lims.get_uri("steps", "s1", "placements"), lims=self.lims + ) + with patch( + "requests.Session.get", + return_value=Mock( + content=self.original_step_placements_xml, status_code=200 + ), + ): + a1 = Artifact( + uri="http://testgenologics.com:4040/artifacts/a1", lims=self.lims + ) + a2 = Artifact( + uri="http://testgenologics.com:4040/artifacts/a2", lims=self.lims + ) + c1 = Container( + uri="http://testgenologics.com:4040/containers/c1", lims=self.lims + ) + expected_placements = [[a1, (c1, "1:1")], [a2, (c1, "2:1")]] assert s.get_placement_list() == expected_placements def test_set_placements_list(self): - a1 = Artifact(uri='http://testgenologics.com:4040/artifacts/a1', lims=self.lims) - a2 = Artifact(uri='http://testgenologics.com:4040/artifacts/a2', lims=self.lims) - c1 = Container(uri='http://testgenologics.com:4040/containers/c1', lims=self.lims) - c2 = Container(uri='http://testgenologics.com:4040/containers/c2', lims=self.lims) - - s = StepPlacements(uri=self.lims.get_uri('steps', 's1', 'placements'), lims=self.lims) - with patch('requests.Session.get', - return_value=Mock(content=self.original_step_placements_xml, status_code=200)): - new_placements = [[a1, (c1, '3:1')], [a2, (c1, '4:1')]] + a1 = Artifact(uri="http://testgenologics.com:4040/artifacts/a1", lims=self.lims) + a2 = Artifact(uri="http://testgenologics.com:4040/artifacts/a2", lims=self.lims) + c1 = Container( + uri="http://testgenologics.com:4040/containers/c1", lims=self.lims + ) + c2 = Container( + uri="http://testgenologics.com:4040/containers/c2", lims=self.lims + ) + + s = StepPlacements( + uri=self.lims.get_uri("steps", "s1", "placements"), lims=self.lims + ) + with patch( + "requests.Session.get", + return_value=Mock( + content=self.original_step_placements_xml, status_code=200 + ), + ): + new_placements = [[a1, (c1, "3:1")], [a2, (c1, "4:1")]] s.set_placement_list(new_placements) - assert elements_equal(s.root, ElementTree.fromstring(self.modloc_step_placements_xml)) + assert elements_equal( + s.root, ElementTree.fromstring(self.modloc_step_placements_xml) + ) def test_set_placements_list_fail(self): - a1 = Artifact(uri='http://testgenologics.com:4040/artifacts/a1', lims=self.lims) - a2 = Artifact(uri='http://testgenologics.com:4040/artifacts/a2', lims=self.lims) - c2 = Container(uri='http://testgenologics.com:4040/containers/c2', lims=self.lims) - - s = StepPlacements(uri=self.lims.get_uri('steps', 's1', 'placements'), lims=self.lims) - with patch('requests.Session.get', - return_value=Mock(content=self.original_step_placements_xml, status_code=200)): - new_placements = [[a1, (c2, '1:1')], [a2, (c2, '1:1')]] + a1 = Artifact(uri="http://testgenologics.com:4040/artifacts/a1", lims=self.lims) + a2 = Artifact(uri="http://testgenologics.com:4040/artifacts/a2", lims=self.lims) + c2 = Container( + uri="http://testgenologics.com:4040/containers/c2", lims=self.lims + ) + + s = StepPlacements( + uri=self.lims.get_uri("steps", "s1", "placements"), lims=self.lims + ) + with patch( + "requests.Session.get", + return_value=Mock( + content=self.original_step_placements_xml, status_code=200 + ), + ): + new_placements = [[a1, (c2, "1:1")], [a2, (c2, "1:1")]] s.set_placement_list(new_placements) - assert elements_equal(s.root, ElementTree.fromstring(self.modcont_step_placements_xml)) + assert elements_equal( + s.root, ElementTree.fromstring(self.modcont_step_placements_xml) + ) class TestArtifacts(TestEntities): root_artifact_xml = generic_artifact_xml.format(url=url) def test_input_artifact_list(self): - a = Artifact(uri=self.lims.get_uri('artifacts', 'a1'), lims=self.lims) - with patch('requests.Session.get', return_value=Mock(content=self.root_artifact_xml, status_code=200)): + a = Artifact(uri=self.lims.get_uri("artifacts", "a1"), lims=self.lims) + with patch( + "requests.Session.get", + return_value=Mock(content=self.root_artifact_xml, status_code=200), + ): assert a.input_artifact_list() == [] def test_workflow_stages_and_statuses(self): - a = Artifact(uri=self.lims.get_uri('artifacts', 'a1'), lims=self.lims) + a = Artifact(uri=self.lims.get_uri("artifacts", "a1"), lims=self.lims) expected_wf_stage = [ - (Stage(self.lims, uri=url + '/api/v2/configuration/workflows/1/stages/2'), 'QUEUED', 'Test workflow s2'), - (Stage(self.lims, uri=url + '/api/v2/configuration/workflows/1/stages/1'), 'COMPLETE', 'Test workflow s1') + ( + Stage( + self.lims, uri=url + "/api/v2/configuration/workflows/1/stages/2" + ), + "QUEUED", + "Test workflow s2", + ), + ( + Stage( + self.lims, uri=url + "/api/v2/configuration/workflows/1/stages/1" + ), + "COMPLETE", + "Test workflow s1", + ), ] - with patch('requests.Session.get', return_value=Mock(content=self.root_artifact_xml, status_code=200)): + with patch( + "requests.Session.get", + return_value=Mock(content=self.root_artifact_xml, status_code=200), + ): assert a.workflow_stages_and_statuses == expected_wf_stage class TestReagentKits(TestEntities): - url = 'http://testgenologics.com:4040' + url = "http://testgenologics.com:4040" reagentkit_xml = generic_reagentkit_xml.format(url=url) def test_parse_entity(self): - r = ReagentKit(uri=self.lims.get_uri('reagentkits', 'r1'), lims=self.lims) - with patch('requests.Session.get', return_value=Mock(content=self.reagentkit_xml, status_code=200)): - assert r.name == 'regaentkitname' - assert r.supplier == 'reagentProvider' - assert r.website == 'www.reagentprovider.com' + r = ReagentKit(uri=self.lims.get_uri("reagentkits", "r1"), lims=self.lims) + with patch( + "requests.Session.get", + return_value=Mock(content=self.reagentkit_xml, status_code=200), + ): + assert r.name == "regaentkitname" + assert r.supplier == "reagentProvider" + assert r.website == "www.reagentprovider.com" assert r.archived == False def test_create_entity(self): - with patch('genologics.lims.requests.post', return_value=Mock(content=self.reagentkit_xml, status_code=201)): - r = ReagentKit.create(self.lims, name='regaentkitname', supplier='reagentProvider', - website='www.reagentprovider.com', archived=False) - self.assertRaises(TypeError, ReagentKit.create, self.lims, error='test') + with patch( + "genologics.lims.requests.post", + return_value=Mock(content=self.reagentkit_xml, status_code=201), + ): + r = ReagentKit.create( + self.lims, + name="regaentkitname", + supplier="reagentProvider", + website="www.reagentprovider.com", + archived=False, + ) + self.assertRaises(TypeError, ReagentKit.create, self.lims, error="test") class TestReagentLots(TestEntities): @@ -335,45 +464,55 @@ class TestReagentLots(TestEntities): reagentkit_xml = generic_reagentkit_xml.format(url=url) def test_parse_entity(self): - l = ReagentLot(uri=self.lims.get_uri('reagentkits', 'r1'), lims=self.lims) - with patch('requests.Session.get', return_value=Mock(content=self.reagentlot_xml, status_code=200)): + l = ReagentLot(uri=self.lims.get_uri("reagentkits", "r1"), lims=self.lims) + with patch( + "requests.Session.get", + return_value=Mock(content=self.reagentlot_xml, status_code=200), + ): assert l.uri - assert l.name == 'kitname' - assert l.lot_number == '100' - assert l.status == 'ARCHIVED' + assert l.name == "kitname" + assert l.lot_number == "100" + assert l.status == "ARCHIVED" def test_create_entity(self): - with patch('requests.Session.get', return_value=Mock(content=self.reagentkit_xml, status_code=200)): - r = ReagentKit(uri=self.lims.get_uri('reagentkits', 'r1'), lims=self.lims) - with patch('genologics.lims.requests.post', - return_value=Mock(content=self.reagentlot_xml, status_code=201)) as patch_post: + with patch( + "requests.Session.get", + return_value=Mock(content=self.reagentkit_xml, status_code=200), + ): + r = ReagentKit(uri=self.lims.get_uri("reagentkits", "r1"), lims=self.lims) + with patch( + "genologics.lims.requests.post", + return_value=Mock(content=self.reagentlot_xml, status_code=201), + ) as patch_post: l = ReagentLot.create( self.lims, reagent_kit=r, - name='kitname', - lot_number='100', - expiry_date='2020-05-01', - status='ACTIVE' + name="kitname", + lot_number="100", + expiry_date="2020-05-01", + status="ACTIVE", ) assert l.uri - assert l.name == 'kitname' - assert l.lot_number == '100' + assert l.name == "kitname" + assert l.lot_number == "100" class TestSample(TestEntities): sample_creation = generic_sample_creation_xml.format(url=url) def test_create_entity(self): - with patch('genologics.lims.requests.post', - return_value=Mock(content=self.sample_creation, status_code=201)) as patch_post: + with patch( + "genologics.lims.requests.post", + return_value=Mock(content=self.sample_creation, status_code=201), + ) as patch_post: l = Sample.create( self.lims, - project=Project(self.lims, uri='project'), - container=Container(self.lims, uri='container'), - position='1:1', - name='s1', + project=Project(self.lims, uri="project"), + container=Container(self.lims, uri="container"), + position="1:1", + name="s1", ) - data = ''' + data = """ s1 @@ -381,6 +520,8 @@ def test_create_entity(self): 1:1 - ''' - assert elements_equal(ElementTree.fromstring(patch_post.call_args_list[0][1]['data']), - ElementTree.fromstring(data)) + """ + assert elements_equal( + ElementTree.fromstring(patch_post.call_args_list[0][1]["data"]), + ElementTree.fromstring(data), + ) diff --git a/tests/test_example.py b/tests/test_example.py index 270e289..8a25e01 100644 --- a/tests/test_example.py +++ b/tests/test_example.py @@ -1,4 +1,3 @@ - from sys import version_info from unittest import TestCase, main @@ -12,8 +11,8 @@ from unittest.mock import patch - -test_utils.XML_DICT={"https://test.claritylims.com/api/v2/projects/BLA1":""" +test_utils.XML_DICT = { + "https://test.claritylims.com/api/v2/projects/BLA1": """ Test 2016-04-20 @@ -21,18 +20,20 @@ -"""} +""" +} + class TestExample(TestCase): def __init__(self, *args, **kwargs): - self.lims = Lims('https://test.claritylims.com', 'user', 'password') + self.lims = Lims("https://test.claritylims.com", "user", "password") super(TestExample, self).__init__(*args, **kwargs) def test_project_example(self): with patch("genologics.lims.Lims.get", side_effect=test_utils.patched_get): - pj = Project(self.lims, id='BLA1') - self.assertEqual(pj.name, 'Test') + pj = Project(self.lims, id="BLA1") + self.assertEqual(pj.name, "Test") -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/tests/test_lims.py b/tests/test_lims.py index 2e122d4..15bebc3 100644 --- a/tests/test_lims.py +++ b/tests/test_lims.py @@ -4,14 +4,17 @@ from requests.exceptions import HTTPError from genologics.lims import Lims + try: callable(1) -except NameError: # callable() doesn't exist in Python 3.0 and 3.1 +except NameError: # callable() doesn't exist in Python 3.0 and 3.1 import collections + callable = lambda obj: isinstance(obj, collections.Callable) from sys import version_info + if version_info[0] == 2: from mock import patch, Mock import __builtin__ as builtins @@ -19,10 +22,11 @@ from unittest.mock import patch, Mock import builtins + class TestLims(TestCase): - url = 'http://testgenologics.com:4040' - username = 'test' - password = 'password' + url = "http://testgenologics.com:4040" + username = "test" + password = "password" sample_xml = """ @@ -36,62 +40,76 @@ class TestLims(TestCase): """ - def test_get_uri(self): lims = Lims(self.url, username=self.username, password=self.password) - assert lims.get_uri('artifacts',sample_name='test_sample') == '{url}/api/v2/artifacts?sample_name=test_sample'.format(url=self.url) - + assert lims.get_uri( + "artifacts", sample_name="test_sample" + ) == "{url}/api/v2/artifacts?sample_name=test_sample".format(url=self.url) def test_parse_response(self): lims = Lims(self.url, username=self.username, password=self.password) - r = Mock(content = self.sample_xml, status_code=200) + r = Mock(content=self.sample_xml, status_code=200) pr = lims.parse_response(r) assert pr is not None assert callable(pr.find) - assert hasattr(pr.attrib, '__getitem__') + assert hasattr(pr.attrib, "__getitem__") - r = Mock(content = self.error_xml, status_code=400) + r = Mock(content=self.error_xml, status_code=400) self.assertRaises(HTTPError, lims.parse_response, r) - r = Mock(content = self.error_no_msg_xml, status_code=400) + r = Mock(content=self.error_no_msg_xml, status_code=400) self.assertRaises(HTTPError, lims.parse_response, r) - - @patch('requests.Session.get',return_value=Mock(content = sample_xml, status_code=200)) + @patch( + "requests.Session.get", return_value=Mock(content=sample_xml, status_code=200) + ) def test_get(self, mocked_instance): lims = Lims(self.url, username=self.username, password=self.password) - r = lims.get('{url}/api/v2/artifacts?sample_name=test_sample'.format(url=self.url)) + r = lims.get( + "{url}/api/v2/artifacts?sample_name=test_sample".format(url=self.url) + ) assert r is not None assert callable(r.find) - assert hasattr(r.attrib, '__getitem__') + assert hasattr(r.attrib, "__getitem__") assert mocked_instance.call_count == 1 - mocked_instance.assert_called_with('http://testgenologics.com:4040/api/v2/artifacts?sample_name=test_sample', timeout=16, - headers={'accept': 'application/xml'}, params={}, auth=('test', 'password')) + mocked_instance.assert_called_with( + "http://testgenologics.com:4040/api/v2/artifacts?sample_name=test_sample", + timeout=16, + headers={"accept": "application/xml"}, + params={}, + auth=("test", "password"), + ) def test_put(self): lims = Lims(self.url, username=self.username, password=self.password) - uri = '{url}/api/v2/samples/test_sample'.format(url=self.url) - with patch('requests.put', return_value=Mock(content = self.sample_xml, status_code=200)) as mocked_put: + uri = "{url}/api/v2/samples/test_sample".format(url=self.url) + with patch( + "requests.put", return_value=Mock(content=self.sample_xml, status_code=200) + ) as mocked_put: response = lims.put(uri=uri, data=self.sample_xml) assert mocked_put.call_count == 1 - with patch('requests.put', return_value=Mock(content = self.error_xml, status_code=400)) as mocked_put: + with patch( + "requests.put", return_value=Mock(content=self.error_xml, status_code=400) + ) as mocked_put: self.assertRaises(HTTPError, lims.put, uri=uri, data=self.sample_xml) assert mocked_put.call_count == 1 - def test_post(self): lims = Lims(self.url, username=self.username, password=self.password) - uri = '{url}/api/v2/samples'.format(url=self.url) - with patch('requests.post', return_value=Mock(content = self.sample_xml, status_code=200)) as mocked_put: + uri = "{url}/api/v2/samples".format(url=self.url) + with patch( + "requests.post", return_value=Mock(content=self.sample_xml, status_code=200) + ) as mocked_put: response = lims.post(uri=uri, data=self.sample_xml) assert mocked_put.call_count == 1 - with patch('requests.post', return_value=Mock(content = self.error_xml, status_code=400)) as mocked_put: + with patch( + "requests.post", return_value=Mock(content=self.error_xml, status_code=400) + ) as mocked_put: self.assertRaises(HTTPError, lims.post, uri=uri, data=self.sample_xml) assert mocked_put.call_count == 1 - - @patch('os.path.isfile', return_value=True) - @patch.object(builtins, 'open') + @patch("os.path.isfile", return_value=True) + @patch.object(builtins, "open") def test_upload_new_file(self, mocked_open, mocked_isfile): lims = Lims(self.url, username=self.username, password=self.password) xml_intro = """""" @@ -101,44 +119,55 @@ def test_upload_new_file(self, mocked_open, mocked_isfile): upload = """ filename_to_upload""" content_loc = """ sftp://{url}/opt/gls/clarity/users/glsftp/clarity/samples/test_sample/test""" file_end = """""" - glsstorage_xml = '\n'.join([xml_intro,file_start, attached, upload, content_loc, file_end]).format(url=self.url) - file_post_xml = '\n'.join([xml_intro, file_start2, attached, upload, content_loc, file_end]).format(url=self.url) - with patch('requests.post', side_effect=[Mock(content=glsstorage_xml, status_code=200), - Mock(content=file_post_xml, status_code=200), - Mock(content="", status_code=200)]): - - file = lims.upload_new_file(Mock(uri=self.url+"/api/v2/samples/test_sample"), - 'filename_to_upload') + glsstorage_xml = "\n".join( + [xml_intro, file_start, attached, upload, content_loc, file_end] + ).format(url=self.url) + file_post_xml = "\n".join( + [xml_intro, file_start2, attached, upload, content_loc, file_end] + ).format(url=self.url) + with patch( + "requests.post", + side_effect=[ + Mock(content=glsstorage_xml, status_code=200), + Mock(content=file_post_xml, status_code=200), + Mock(content="", status_code=200), + ], + ): + file = lims.upload_new_file( + Mock(uri=self.url + "/api/v2/samples/test_sample"), "filename_to_upload" + ) assert file.id == "40-3501" - with patch('requests.post', side_effect=[Mock(content=self.error_xml, status_code=400)]): - - self.assertRaises(HTTPError, - lims.upload_new_file, - Mock(uri=self.url+"/api/v2/samples/test_sample"), - 'filename_to_upload') - - @patch('requests.post', return_value=Mock(content = sample_xml, status_code=200)) + with patch( + "requests.post", side_effect=[Mock(content=self.error_xml, status_code=400)] + ): + self.assertRaises( + HTTPError, + lims.upload_new_file, + Mock(uri=self.url + "/api/v2/samples/test_sample"), + "filename_to_upload", + ) + + @patch("requests.post", return_value=Mock(content=sample_xml, status_code=200)) def test_route_artifact(self, mocked_post): lims = Lims(self.url, username=self.username, password=self.password) - artifact = Mock(uri=self.url+"/artifact/2") - lims.route_artifacts(artifact_list=[artifact], workflow_uri=self.url+'/api/v2/configuration/workflows/1') + artifact = Mock(uri=self.url + "/artifact/2") + lims.route_artifacts( + artifact_list=[artifact], + workflow_uri=self.url + "/api/v2/configuration/workflows/1", + ) assert mocked_post.call_count == 1 - - def test_tostring(self): lims = Lims(self.url, username=self.username, password=self.password) from xml.etree import ElementTree as ET - a = ET.Element('a') - b = ET.SubElement(a, 'b') - c = ET.SubElement(a, 'c') - d = ET.SubElement(c, 'd') + + a = ET.Element("a") + b = ET.SubElement(a, "b") + c = ET.SubElement(a, "c") + d = ET.SubElement(c, "d") etree = ET.ElementTree(a) - expected_string=b""" + expected_string = b""" """ string = lims.tostring(etree) assert string == expected_string - - - diff --git a/tests/to_rewrite_test_logging.py b/tests/to_rewrite_test_logging.py index f997329..7dce599 100644 --- a/tests/to_rewrite_test_logging.py +++ b/tests/to_rewrite_test_logging.py @@ -9,9 +9,10 @@ file_path = os.path.realpath(__file__) test_dir_path = os.path.dirname(file_path) -tmp_dir_path = test_dir_path + '/nose_tmp_output' +tmp_dir_path = test_dir_path + "/nose_tmp_output" CWD = os.getcwd() + class TestLog(TestCase): def setUp(self): """Create temporary dir if necessary, @@ -24,54 +25,53 @@ def setUp(self): def tearDown(self): """remove temporary output files""" for d in os.listdir(tmp_dir_path): - d_path = os.path.join(tmp_dir_path,d) + d_path = os.path.join(tmp_dir_path, d) try: os.remove(d_path) except: for f in os.listdir(d_path): - f_path = os.path.join(d_path,f) + f_path = os.path.join(d_path, f) os.remove(f_path) os.rmdir(d_path) assert os.listdir(tmp_dir_path) == [] - def test_stderr(self): - """ Stderr should be printed to stderr and logged""" - tmp_file = os.path.join(tmp_dir_path,'tmp_log') + """Stderr should be printed to stderr and logged""" + tmp_file = os.path.join(tmp_dir_path, "tmp_log") saved_stderr = sys.stderr - tmp_stderr = os.path.join(tmp_dir_path,'tmp_stderr') - with open(tmp_stderr,'w') as sys.stderr: + tmp_stderr = os.path.join(tmp_dir_path, "tmp_stderr") + with open(tmp_stderr, "w") as sys.stderr: with EppLogger(tmp_file, prepend=False) as epp_logger: - print('stderr nosetest', file=sys.stderr) + print("stderr nosetest", file=sys.stderr) sys.stderr = saved_stderr - with open(tmp_stderr,'r') as stderr: + with open(tmp_stderr, "r") as stderr: stream_lines = stderr.readlines() - assert 'stderr nosetest' in stream_lines[-1] + assert "stderr nosetest" in stream_lines[-1] - with open(tmp_file,'r') as log_file: + with open(tmp_file, "r") as log_file: log_lines = log_file.readlines() - assert 'stderr nosetest' in log_lines[-1] + assert "stderr nosetest" in log_lines[-1] def test_stdout(self): - """ Stdout should be logged but not printed""" - tmp_file = os.path.join(tmp_dir_path,'tmp_log') + """Stdout should be logged but not printed""" + tmp_file = os.path.join(tmp_dir_path, "tmp_log") saved_stdout = sys.stdout - tmp_stdout = os.path.join(tmp_dir_path,'tmp_stdout') - with open(tmp_stdout,'w') as sys.stdout: + tmp_stdout = os.path.join(tmp_dir_path, "tmp_stdout") + with open(tmp_stdout, "w") as sys.stdout: with EppLogger(tmp_file, prepend=False) as epp_logger: - print('stdout nosetest', file=sys.stdout) + print("stdout nosetest", file=sys.stdout) sys.stdout = saved_stdout - with open(tmp_stdout,'r') as stdout: + with open(tmp_stdout, "r") as stdout: stream_lines = stdout.readlines() assert not stream_lines - with open(tmp_file,'r') as log_file: + with open(tmp_file, "r") as log_file: log_lines = log_file.readlines() - assert 'stdout nosetest' in log_lines[-1] - + assert "stdout nosetest" in log_lines[-1] + def test_exception(self): - """ Exceptions should be printed and logged""" + """Exceptions should be printed and logged""" # Hard to test, if exceptions are caught in a try statement, # they will not be printed... pass From 80257e38c2db9c3a6413938ecb7caf858d1d0163 Mon Sep 17 00:00:00 2001 From: kedhammar Date: Thu, 27 Jun 2024 11:19:14 +0200 Subject: [PATCH 04/40] suppress blame on prev commit --- .git-blame-ignore-revs | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 .git-blame-ignore-revs diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 0000000..8cedb97 --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,2 @@ +# 240627, ruff format // Alfred Kedhammar +429872dc8123126282f73c0c34ab2221fac00c90 From d44f18b60ca6dcd9f52a2bd673081a6944ecf354 Mon Sep 17 00:00:00 2001 From: kedhammar Date: Thu, 27 Jun 2024 11:23:46 +0200 Subject: [PATCH 05/40] run ci on python 3.12 --- .github/workflows/lint-code.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/lint-code.yml b/.github/workflows/lint-code.yml index 84cf192..0663e2f 100644 --- a/.github/workflows/lint-code.yml +++ b/.github/workflows/lint-code.yml @@ -11,7 +11,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.12" - name: Install dependencies run: | python -m pip install --upgrade pip @@ -29,7 +29,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.12" - name: Install dependencies run: | python -m pip install --upgrade pip @@ -46,7 +46,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.12" - name: Install dependencies run: | python -m pip install --upgrade pip @@ -67,7 +67,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.12" - name: Install pipreqs run: pip install pipreqs From 6b7e397e9a45a54f387e1764dedcd88f9b54d212 Mon Sep 17 00:00:00 2001 From: kedhammar Date: Thu, 27 Jun 2024 11:23:56 +0200 Subject: [PATCH 06/40] ruff safe fixes --- docs/conf.py | 3 -- examples/attach_delivery_report.py | 5 +-- examples/epp_script.py | 7 ++-- examples/get_application.py | 5 +-- examples/get_artifacts.py | 6 +-- examples/get_containers.py | 6 +-- examples/get_labs.py | 6 +-- examples/get_processes.py | 5 +-- examples/get_projects.py | 6 +-- examples/get_samples.py | 6 +-- examples/get_samples2.py | 3 +- examples/set_project_queued.py | 5 +-- examples/set_sample_name.py | 5 +-- genologics/config.py | 2 - genologics/descriptors.py | 18 ++++----- genologics/entities.py | 61 +++++++++++++++--------------- genologics/epp.py | 55 +++++++++++++-------------- genologics/internal_classes.py | 18 ++++----- genologics/lims.py | 23 +++++------ genologics/lims_utils.py | 6 +-- genologics/test_utils.py | 7 ++-- setup.py | 8 ++-- tests/test_descriptors.py | 12 +++--- tests/test_entities.py | 19 +++++----- tests/test_example.py | 4 +- tests/test_lims.py | 18 ++++----- tests/to_rewrite_test_logging.py | 12 +++--- 27 files changed, 149 insertions(+), 182 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index f9feb6b..6ed9f4e 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # # Genologics documentation build configuration file, created by # sphinx-quickstart on Tue Oct 1 11:51:30 2013. @@ -10,8 +9,6 @@ # # All configuration values have a default; values that are commented out # serve to show the default. -from __future__ import unicode_literals -import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the diff --git a/examples/attach_delivery_report.py b/examples/attach_delivery_report.py index afb5d35..1011c34 100644 --- a/examples/attach_delivery_report.py +++ b/examples/attach_delivery_report.py @@ -7,12 +7,11 @@ Roman Valls Guimera, Science for Life Laboratory, Stockholm, Sweden. """ -import codecs from pprint import pprint -from genologics.lims import * # Login parameters for connecting to a LIMS instance. -from genologics.config import BASEURI, USERNAME, PASSWORD +from genologics.config import BASEURI, PASSWORD, USERNAME +from genologics.lims import * # Create the LIMS interface instance, and check the connection and version. lims = Lims(BASEURI, USERNAME, PASSWORD) diff --git a/examples/epp_script.py b/examples/epp_script.py index a8e7678..2cdd3b7 100644 --- a/examples/epp_script.py +++ b/examples/epp_script.py @@ -13,12 +13,13 @@ Johannes Alneberg, Science for Life Laboratory, Stockholm, Sweden. """ +import sys from argparse import ArgumentParser -from genologics.lims import Lims + +from genologics.config import BASEURI, PASSWORD, USERNAME from genologics.entities import Process -from genologics.config import BASEURI, USERNAME, PASSWORD from genologics.epp import EppLogger, attach_file -import sys +from genologics.lims import Lims def main(lims, pid, file): diff --git a/examples/get_application.py b/examples/get_application.py index 2e8adda..3e2777f 100644 --- a/examples/get_application.py +++ b/examples/get_application.py @@ -7,12 +7,11 @@ Roman Valls Guimera, Science for Life Laboratory, Stockholm, Sweden. """ -import codecs from pprint import pprint -from genologics.lims import * # Login parameters for connecting to a LIMS instance. -from genologics.config import BASEURI, USERNAME, PASSWORD +from genologics.config import BASEURI, PASSWORD, USERNAME +from genologics.lims import * # Create the LIMS interface instance, and check the connection and version. lims = Lims(BASEURI, USERNAME, PASSWORD) diff --git a/examples/get_artifacts.py b/examples/get_artifacts.py index 331bfd3..e4071d8 100644 --- a/examples/get_artifacts.py +++ b/examples/get_artifacts.py @@ -7,12 +7,10 @@ Per Kraulis, Science for Life Laboratory, Stockholm, Sweden. """ -import codecs - -from genologics.lims import Lims # Login parameters for connecting to a LIMS instance. -from genologics.config import BASEURI, USERNAME, PASSWORD +from genologics.config import BASEURI, PASSWORD, USERNAME +from genologics.lims import Lims # Create the LIMS interface instance, and check the connection and version. lims = Lims(BASEURI, USERNAME, PASSWORD) diff --git a/examples/get_containers.py b/examples/get_containers.py index dfe9a33..226d4f1 100644 --- a/examples/get_containers.py +++ b/examples/get_containers.py @@ -7,12 +7,10 @@ Per Kraulis, Science for Life Laboratory, Stockholm, Sweden. """ -import codecs - -from genologics.lims import * # Login parameters for connecting to a LIMS instance. -from genologics.config import BASEURI, USERNAME, PASSWORD +from genologics.config import BASEURI, PASSWORD, USERNAME +from genologics.lims import * # Create the LIMS interface instance, and check the connection and version. lims = Lims(BASEURI, USERNAME, PASSWORD) diff --git a/examples/get_labs.py b/examples/get_labs.py index cf8b832..4697d56 100644 --- a/examples/get_labs.py +++ b/examples/get_labs.py @@ -7,11 +7,9 @@ Per Kraulis, Science for Life Laboratory, Stockholm, Sweden. """ -from __future__ import unicode_literals -from genologics.lims import * - # Login parameters for connecting to a LIMS instance. -from genologics.config import BASEURI, USERNAME, PASSWORD +from genologics.config import BASEURI, PASSWORD, USERNAME +from genologics.lims import * # Create the LIMS interface instance, and check the connection and version. lims = Lims(BASEURI, USERNAME, PASSWORD) diff --git a/examples/get_processes.py b/examples/get_processes.py index 0fb87d8..19040c3 100644 --- a/examples/get_processes.py +++ b/examples/get_processes.py @@ -7,10 +7,9 @@ Per Kraulis, Science for Life Laboratory, Stockholm, Sweden. """ -from genologics.lims import * - # Login parameters for connecting to a LIMS instance. -from genologics.config import BASEURI, USERNAME, PASSWORD +from genologics.config import BASEURI, PASSWORD, USERNAME +from genologics.lims import * # Create the LIMS interface instance, and check the connection and version. lims = Lims(BASEURI, USERNAME, PASSWORD) diff --git a/examples/get_projects.py b/examples/get_projects.py index 9317230..0a8123a 100644 --- a/examples/get_projects.py +++ b/examples/get_projects.py @@ -7,12 +7,10 @@ Per Kraulis, Science for Life Laboratory, Stockholm, Sweden. """ -from __future__ import unicode_literals - -from genologics.lims import * # Login parameters for connecting to a LIMS instance. -from genologics.config import BASEURI, USERNAME, PASSWORD +from genologics.config import BASEURI, PASSWORD, USERNAME +from genologics.lims import * # Create the LIMS interface instance, and check the connection and version. lims = Lims(BASEURI, USERNAME, PASSWORD) diff --git a/examples/get_samples.py b/examples/get_samples.py index 6c1e1c4..776b32c 100644 --- a/examples/get_samples.py +++ b/examples/get_samples.py @@ -7,11 +7,9 @@ Per Kraulis, Science for Life Laboratory, Stockholm, Sweden. """ -from genologics.lims import * - # Login parameters for connecting to a LIMS instance. - -from genologics.config import BASEURI, USERNAME, PASSWORD +from genologics.config import BASEURI, PASSWORD, USERNAME +from genologics.lims import * # Create the LIMS interface instance, and check the connection and version. lims = Lims(BASEURI, USERNAME, PASSWORD) diff --git a/examples/get_samples2.py b/examples/get_samples2.py index 23e68fa..29ce75e 100644 --- a/examples/get_samples2.py +++ b/examples/get_samples2.py @@ -7,10 +7,9 @@ Per Kraulis, Science for Life Laboratory, Stockholm, Sweden. """ +from genologics.config import BASEURI, PASSWORD, USERNAME from genologics.lims import * -from genologics.config import BASEURI, USERNAME, PASSWORD - lims = Lims(BASEURI, USERNAME, PASSWORD) lims.check_version() diff --git a/examples/set_project_queued.py b/examples/set_project_queued.py index 1a139e8..cad09bc 100644 --- a/examples/set_project_queued.py +++ b/examples/set_project_queued.py @@ -9,10 +9,9 @@ import datetime -from genologics.lims import * - # Login parameters for connecting to a LIMS instance. -from genologics.config import BASEURI, USERNAME, PASSWORD +from genologics.config import BASEURI, PASSWORD, USERNAME +from genologics.lims import * # Create the LIMS interface instance, and check the connection and version. lims = Lims(BASEURI, USERNAME, PASSWORD) diff --git a/examples/set_sample_name.py b/examples/set_sample_name.py index b09e5b9..e0d93aa 100644 --- a/examples/set_sample_name.py +++ b/examples/set_sample_name.py @@ -7,10 +7,9 @@ Per Kraulis, Science for Life Laboratory, Stockholm, Sweden. """ -from genologics.lims import * - # Login parameters for connecting to a LIMS instance. -from genologics.config import BASEURI, USERNAME, PASSWORD +from genologics.config import BASEURI, PASSWORD, USERNAME +from genologics.lims import * # Create the LIMS interface instance, and check the connection and version. lims = Lims(BASEURI, USERNAME, PASSWORD) diff --git a/genologics/config.py b/genologics/config.py index ce34fa3..5611399 100644 --- a/genologics/config.py +++ b/genologics/config.py @@ -1,6 +1,4 @@ import os -import sys -import warnings try: from ConfigParser import SafeConfigParser diff --git a/genologics/descriptors.py b/genologics/descriptors.py index 5aae925..27ba933 100644 --- a/genologics/descriptors.py +++ b/genologics/descriptors.py @@ -7,24 +7,24 @@ """ import six + from genologics.constants import nsmap try: - from urllib.parse import urlsplit, urlparse, parse_qs, urlunparse + from urllib.parse import parse_qs, urlparse, urlsplit, urlunparse except ImportError: - from urlparse import urlsplit, urlparse, parse_qs, urlunparse + pass -from decimal import Decimal import datetime +import logging import time +from decimal import Decimal from xml.etree import ElementTree -import logging - logger = logging.getLogger(__name__) -class BaseDescriptor(object): +class BaseDescriptor: "Abstract base descriptor for an instance attribute." def __get__(self, instance, cls): @@ -146,7 +146,7 @@ def __set__(self, instance, value): super(BooleanDescriptor, self).__set__(instance, str(value).lower()) -class UdfDictionary(object): +class UdfDictionary: "Dictionary-like container of UDFs, optionally within a UDT." def _is_string(self, value): @@ -263,7 +263,7 @@ def __setitem__(self, key, value): raise TypeError("URI UDF requires str or punycode (unicode) value") value = str(value) else: - raise NotImplemented("UDF type '%s'" % vtype) + raise NotImplementedError("UDF type '%s'" % vtype) if not isinstance(value, str): if not self._is_string(value): value = str(value).encode("UTF-8") @@ -692,7 +692,7 @@ def get_dict(self, lims, node): return result -class ProcessTypeParametersDescriptor(object): +class ProcessTypeParametersDescriptor: def __getitem__(self, index): return self.params[index] diff --git a/genologics/entities.py b/genologics/entities.py index c295f1d..ed5b599 100644 --- a/genologics/entities.py +++ b/genologics/entities.py @@ -8,44 +8,43 @@ from genologics.constants import nsmap from genologics.descriptors import ( - StringDescriptor, - StringDictionaryDescriptor, - UdfDictionaryDescriptor, - UdtDictionaryDescriptor, - ExternalidListDescriptor, - EntityDescriptor, BooleanDescriptor, - EntityListDescriptor, - StringAttributeDescriptor, - StringListDescriptor, DimensionDescriptor, - IntegerDescriptor, - PlacementDictionaryDescriptor, + EntityDescriptor, + EntityListDescriptor, + ExternalidListDescriptor, InputOutputMapList, + IntegerAttributeDescriptor, + IntegerDescriptor, LocationDescriptor, - ReagentLabelList, - NestedEntityListDescriptor, - NestedStringListDescriptor, + MultiPageNestedEntityListDescriptor, + NamedStringDescriptor, NestedAttributeListDescriptor, - IntegerAttributeDescriptor, - NestedStringDescriptor, NestedBooleanDescriptor, - MultiPageNestedEntityListDescriptor, + NestedEntityListDescriptor, + NestedStringDescriptor, + NestedStringListDescriptor, + OutputReagentList, + PlacementDictionaryDescriptor, ProcessTypeParametersDescriptor, ProcessTypeProcessInputDescriptor, ProcessTypeProcessOutputDescriptor, - NamedStringDescriptor, - OutputReagentList, + ReagentLabelList, + StringAttributeDescriptor, + StringDescriptor, + StringDictionaryDescriptor, + StringListDescriptor, + UdfDictionaryDescriptor, + UdtDictionaryDescriptor, ) try: - from urllib.parse import urlsplit, urlparse, parse_qs, urlunparse + from urllib.parse import parse_qs, urlparse, urlsplit, urlunparse except ImportError: - from urlparse import urlsplit, urlparse, parse_qs, urlunparse - -from xml.etree import ElementTree + from urlparse import parse_qs, urlparse, urlsplit, urlunparse import logging +from xml.etree import ElementTree logger = logging.getLogger(__name__) @@ -84,8 +83,8 @@ def __init__( def control(self): """this can be used to check the content of the object.""" - logger.info("SAMPLE NAME: {}".format(self.sample_name)) - logger.info("outart : {}".format(self.history_list[0])) + logger.info(f"SAMPLE NAME: {self.sample_name}") + logger.info(f"outart : {self.history_list[0]}") # logger.info ("\nmap :") # for key, value in self.art_map.iteritems(): # logger.info(value[1]+"->"+value[0].id+"->"+key) @@ -94,7 +93,7 @@ def control(self): for key, dict in self.history.items(): logger.info(key) for key2, dict2 in dict.items(): - logger.info("\t{}".format(key2)) + logger.info(f"\t{key2}") for key, value in dict2.items(): logger.info( "\t\t{0}->{1}".format( @@ -298,7 +297,7 @@ def _add_out_art_process_conection_list(self, input_art, out_artifact, history={ return history, input_art -class Entity(object): +class Entity: "Base class for the entities in the LIMS database." _TAG = None @@ -873,7 +872,7 @@ def _remove_available_inputs(self, input_art): del self._available_inputs[input_art] else: logger.info( - "using more inputs than replicates for input {0}".format(input_art.uri) + f"using more inputs than replicates for input {input_art.uri}" ) self.available_inputs = self._available_inputs @@ -910,7 +909,7 @@ def get_pools(self): for idx, pool_node in enumerate( self.root.find("pooled-inputs").findall("pool") ): - pool_name = pool_node.attrib.get("name", "Pool #{0}".format(idx + 1)) + pool_name = pool_node.attrib.get("name", f"Pool #{idx + 1}") pool_object = {"name": pool_name, "inputs": [], "output": None} if pool_node.attrib.get("output-uri", False): pool_object["output"] = Artifact( @@ -932,7 +931,7 @@ def set_pools(self, pools): if pool_obj.get("output", False): current_pool.attrib["output-uri"] = pool_obj["output"].uri current_pool.attrib["name"] = pool_obj.get( - "name", "Pool #{0}".format(idx + 1) + "name", f"Pool #{idx + 1}" ) for input_art in pool_obj.get("inputs", []): current_input = ElementTree.SubElement(current_pool, "input") @@ -1176,7 +1175,7 @@ class Step(Entity): def advance(self): self.get() self.root = self.lims.post( - uri="{0}/advance".format(self.uri), + uri=f"{self.uri}/advance", data=self.lims.tostring(ElementTree.ElementTree(self.root)), ) diff --git a/genologics/epp.py b/genologics/epp.py index 58d529e..a26d83e 100644 --- a/genologics/epp.py +++ b/genologics/epp.py @@ -1,4 +1,3 @@ -from __future__ import print_function """Contains useful and reusable code for EPP scripts. @@ -8,18 +7,20 @@ Copyright (C) 2013 Johannes Alneberg """ +import csv import logging -import sys import os +import sys +from logging.handlers import RotatingFileHandler +from shutil import copy +from time import localtime, strftime + import pkg_resources from pkg_resources import DistributionNotFound -from shutil import copy from requests import HTTPError -from genologics.entities import Artifact + from genologics.config import MAIN_LOG -from logging.handlers import RotatingFileHandler -from time import strftime, localtime -import csv +from genologics.entities import Artifact def attach_file(src, resource): @@ -50,19 +51,19 @@ class NotUniqueError(ValueError): def unique_check(l, msg): "Check that l is of length 1, otherwise raise error, with msg appended" if len(l) == 0: - raise EmptyError("No item found for {0}".format(msg)) + raise EmptyError(f"No item found for {msg}") elif len(l) != 1: - raise NotUniqueError("Multiple items found for {0}".format(msg)) + raise NotUniqueError(f"Multiple items found for {msg}") def set_field(element): try: element.put() except (TypeError, HTTPError) as e: - logging.warning("Error while updating element: {0}".format(e)) + logging.warning(f"Error while updating element: {e}") -class EppLogger(object): +class EppLogger: """Context manager for logging module useful for EPP script execution. This context manager (CM) automatically logs what script that is executed, @@ -80,17 +81,17 @@ class EppLogger(object): PACKAGE = "genologics" def __enter__(self): - logging.info("Executing file: {0}".format(sys.argv[0])) - logging.info("with parameters: {0}".format(sys.argv[1:])) + logging.info(f"Executing file: {sys.argv[0]}") + logging.info(f"with parameters: {sys.argv[1:]}") try: logging.info( - "Version of {0}: ".format(self.PACKAGE) + f"Version of {self.PACKAGE}: " + pkg_resources.require(self.PACKAGE)[0].version ) except DistributionNotFound as e: logging.error(e) logging.error( - ("Make sure you have the {0} " "package installed").format(self.PACKAGE) + f"Make sure you have the {self.PACKAGE} " "package installed" ) sys.exit(-1) return self @@ -184,21 +185,21 @@ def prepend_old_log(self, external_log_file=None): f.write("=" * 80 + "\n") except HTTPError: # Probably no artifact found, skip prepending print( - ("No log file artifact found " "for id: {0}").format(log_file_name), + ("No log file artifact found " f"for id: {log_file_name}"), file=sys.stderr, ) - except IOError as e: # Probably some path was wrong in copy + except OSError as e: # Probably some path was wrong in copy print( ( "Log could not be prepended, " - "make sure {0} and {1} are " + f"make sure {log_path} and {log_file_name} are " "proper paths." - ).format(log_path, log_file_name), + ), file=sys.stderr, ) raise e - class StreamToLogger(object): + class StreamToLogger: """Fake file-like stream object that redirects writes to a logger instance. @@ -247,7 +248,7 @@ def _pars_file(self, output_type): for outart in outarts: file_path = self.get_file_path(outart) if file_path: - of = open(file_path, "r") + of = open(file_path) file_ext = file_path.split(".")[-1] if file_ext == "csv": pf = [row for row in csv.reader(of.read().splitlines())] @@ -324,16 +325,14 @@ def format_file( "Fix the file to continue. " ).format(",".join(duplicated_lines), name) if not file_info: - error_message = error_message + "Could not format parsed file {0}.".format( - name - ) + error_message = error_message + f"Could not format parsed file {name}." if error_message: print(error_message, file=sys.stderr) sys.exit(-1) return file_info -class CopyField(object): +class CopyField: """Class to copy any filed (or udf) from any lims element to any udf on any other lims element @@ -380,7 +379,7 @@ def _set_udf(self, elt, udf_name, val): elt.put() return True except (TypeError, HTTPError) as e: - print("Error while updating element: {0}".format(e), file=sys.stderr) + print(f"Error while updating element: {e}", file=sys.stderr) sys.exit(-1) return False @@ -404,9 +403,7 @@ def _log_before_change(self, changelog_f=None): ) logging.info( - ("Copying from element with id: {0} to element with " " id: {1}").format( - self.s_elt.id, self.d_elt.id - ) + f"Copying from element with id: {self.s_elt.id} to element with " f" id: {self.d_elt.id}" ) def _log_after_change(self): diff --git a/genologics/internal_classes.py b/genologics/internal_classes.py index 6387890..9501d5d 100644 --- a/genologics/internal_classes.py +++ b/genologics/internal_classes.py @@ -1,14 +1,14 @@ +from genologics.constants import nsmap from genologics.descriptors import ( - StringDescriptor, - EntityListDescriptor, BooleanDescriptor, + EntityListDescriptor, IntegerDescriptor, + StringDescriptor, ) from genologics.entities import File, Udfconfig -from genologics.constants import nsmap -class ProcessTypeParameter(object): +class ProcessTypeParameter: instance = None name = None root = None @@ -26,13 +26,13 @@ def __init__(self, pt_instance, node): self.name = self.root.attrib["name"] def __repr__(self): - return "{0}({1})".format(self.__class__.__name__, self.name) + return f"{self.__class__.__name__}({self.name})" def get(self): pass -class ProcessTypeProcessInput(object): +class ProcessTypeProcessInput: instance = None name = None root = None @@ -48,13 +48,13 @@ def __init__(self, pt_instance, node): self.lims = pt_instance.lims def __repr__(self): - return "{0}({1})".format(self.__class__.__name__, self.display_name) + return f"{self.__class__.__name__}({self.display_name})" def get(self): pass -class ProcessTypeProcessOutput(object): +class ProcessTypeProcessOutput: instance = None name = None root = None @@ -74,7 +74,7 @@ def __init__(self, pt_instance, node): self.lims = pt_instance.lims def __repr__(self): - return "{0}({1})".format(self.__class__.__name__, self.output_name) + return f"{self.__class__.__name__}({self.output_name})" def get(self): pass diff --git a/genologics/lims.py b/genologics/lims.py index 83a9cea..78d4b28 100644 --- a/genologics/lims.py +++ b/genologics/lims.py @@ -22,17 +22,18 @@ import os import re from io import BytesIO -import requests # python 2.7, 3+ compatibility from sys import version_info +import requests + if version_info[0] == 2: - from urlparse import urljoin from urllib import urlencode + + from urlparse import urljoin else: - from urllib.parse import urljoin - from urllib.parse import urlencode + from urllib.parse import urlencode, urljoin from .entities import * @@ -56,7 +57,7 @@ def write_with_xml_declaration(self, file, encoding, xml_declaration): TIMEOUT = 16 -class Lims(object): +class Lims: "LIMS interface through which all entity instances are retrieved." VERSION = "v2" @@ -101,7 +102,7 @@ def get(self, uri, params=dict()): timeout=TIMEOUT, ) except requests.exceptions.Timeout as e: - raise type(e)("{0}, Error trying to reach {1}".format(str(e), uri)) + raise type(e)(f"{str(e)}, Error trying to reach {uri}") else: return self.parse_response(r) @@ -128,7 +129,7 @@ def upload_new_file(self, entity, file_to_upload): """Upload a file and attach it to the provided entity.""" file_to_upload = os.path.abspath(file_to_upload) if not os.path.isfile(file_to_upload): - raise IOError("{} not found".format(file_to_upload)) + raise OSError(f"{file_to_upload} not found") # Request the storage space on glsstorage # Create the xml to describe the file @@ -674,9 +675,7 @@ def get_batch(self, instances, force=False): ALLOWED_TAGS = ("artifact", "container", "file", "sample") if instances[0]._TAG not in ALLOWED_TAGS: raise TypeError( - "Cannot retrieve batch for instances of type '{}'".format( - instances[0]._TAG - ) + f"Cannot retrieve batch for instances of type '{instances[0]._TAG}'" ) root = ElementTree.Element(nsmap("ri:links")) @@ -708,9 +707,7 @@ def put_batch(self, instances): ALLOWED_TAGS = ("artifact", "container", "file", "sample") if instances[0]._TAG not in ALLOWED_TAGS: raise TypeError( - "Cannot update batch for instances of type '{}'".format( - instances[0]._TAG - ) + f"Cannot update batch for instances of type '{instances[0]._TAG}'" ) root = None # XML root element for batch request diff --git a/genologics/lims_utils.py b/genologics/lims_utils.py index c2d7b40..68f9746 100644 --- a/genologics/lims_utils.py +++ b/genologics/lims_utils.py @@ -1,13 +1,9 @@ #!/usr/bin/env python -from genologics.epp import EppLogger -import logging -import sys -import os +from genologics.config import BASEURI, PASSWORD, USERNAME from genologics.lims import * -from genologics.config import BASEURI, USERNAME, PASSWORD lims = Lims(BASEURI, USERNAME, PASSWORD) diff --git a/genologics/test_utils.py b/genologics/test_utils.py index 0188d19..63288e3 100644 --- a/genologics/test_utils.py +++ b/genologics/test_utils.py @@ -1,9 +1,8 @@ #!/usr/bin/env python -from __future__ import unicode_literals from xml.etree import ElementTree -import requests +import requests """ In order to use the patched get : @@ -43,7 +42,7 @@ def patched_get(*args, **kwargs): try: return ElementTree.fromstring(XML_DICT[r.url]) except KeyError: - raise Exception("Cannot find mocked xml for uri {0}".format(r.url)) + raise Exception(f"Cannot find mocked xml for uri {r.url}") def dump_source_xml(lims): @@ -52,7 +51,7 @@ def dump_source_xml(lims): final_string = [] final_string.append("{") for k, v in lims.cache.items(): - final_string.append("'{0}':".format(k)) + final_string.append(f"'{k}':") v.get() final_string.append('"""{0}""",'.format(v.xml().replace("\n", "\n"))) final_string.append("}") diff --git a/setup.py b/setup.py index b5e663b..1073f9d 100644 --- a/setup.py +++ b/setup.py @@ -1,9 +1,9 @@ -from setuptools import setup, find_packages -from genologics.version import __version__ -import sys, os -import subprocess import glob +from setuptools import find_packages, setup + +from genologics.version import __version__ + # Fetch version from git tags. # if git is not available (PyPi package), use stored version.py. diff --git a/tests/test_descriptors.py b/tests/test_descriptors.py index 07fe6c3..1705966 100644 --- a/tests/test_descriptors.py +++ b/tests/test_descriptors.py @@ -4,20 +4,20 @@ from xml.etree import ElementTree from genologics.descriptors import ( - StringDescriptor, + BooleanDescriptor, + EntityDescriptor, + IntegerDescriptor, StringAttributeDescriptor, - StringListDescriptor, + StringDescriptor, StringDictionaryDescriptor, - IntegerDescriptor, - BooleanDescriptor, + StringListDescriptor, UdfDictionary, - EntityDescriptor, ) from genologics.entities import Artifact from genologics.lims import Lims if version_info[0] == 2: - from mock import Mock + from unittest.mock import Mock else: from unittest.mock import Mock diff --git a/tests/test_entities.py b/tests/test_entities.py index cda30f5..3e2fee0 100644 --- a/tests/test_entities.py +++ b/tests/test_entities.py @@ -1,28 +1,27 @@ -import operator from sys import version_info from unittest import TestCase from xml.etree import ElementTree from genologics.entities import ( - StepActions, - Researcher, Artifact, - Step, - StepPlacements, - StepPools, Container, - Stage, + Project, ReagentKit, ReagentLot, + Researcher, Sample, - Project, + Stage, + Step, + StepActions, + StepPlacements, + StepPools, ) from genologics.lims import Lims if version_info[0] == 2: - from mock import patch, Mock + from unittest.mock import Mock, patch else: - from unittest.mock import patch, Mock + from unittest.mock import Mock, patch url = "http://testgenologics.com:4040" diff --git a/tests/test_example.py b/tests/test_example.py index 8a25e01..8b30b8c 100644 --- a/tests/test_example.py +++ b/tests/test_example.py @@ -2,11 +2,11 @@ from unittest import TestCase, main from genologics import test_utils -from genologics.lims import Lims from genologics.entities import Project +from genologics.lims import Lims if version_info[0] == 2: - from mock import patch + from unittest.mock import patch else: from unittest.mock import patch diff --git a/tests/test_lims.py b/tests/test_lims.py index 15bebc3..0200737 100644 --- a/tests/test_lims.py +++ b/tests/test_lims.py @@ -1,4 +1,3 @@ -import xml from unittest import TestCase from requests.exceptions import HTTPError @@ -16,22 +15,23 @@ from sys import version_info if version_info[0] == 2: - from mock import patch, Mock + from unittest.mock import Mock, patch + import __builtin__ as builtins else: - from unittest.mock import patch, Mock import builtins + from unittest.mock import Mock, patch class TestLims(TestCase): url = "http://testgenologics.com:4040" username = "test" password = "password" - sample_xml = """ + sample_xml = f""" -""".format(url=url) +""" error_xml = """ Generic error message @@ -44,7 +44,7 @@ def test_get_uri(self): lims = Lims(self.url, username=self.username, password=self.password) assert lims.get_uri( "artifacts", sample_name="test_sample" - ) == "{url}/api/v2/artifacts?sample_name=test_sample".format(url=self.url) + ) == f"{self.url}/api/v2/artifacts?sample_name=test_sample" def test_parse_response(self): lims = Lims(self.url, username=self.username, password=self.password) @@ -66,7 +66,7 @@ def test_parse_response(self): def test_get(self, mocked_instance): lims = Lims(self.url, username=self.username, password=self.password) r = lims.get( - "{url}/api/v2/artifacts?sample_name=test_sample".format(url=self.url) + f"{self.url}/api/v2/artifacts?sample_name=test_sample" ) assert r is not None assert callable(r.find) @@ -82,7 +82,7 @@ def test_get(self, mocked_instance): def test_put(self): lims = Lims(self.url, username=self.username, password=self.password) - uri = "{url}/api/v2/samples/test_sample".format(url=self.url) + uri = f"{self.url}/api/v2/samples/test_sample" with patch( "requests.put", return_value=Mock(content=self.sample_xml, status_code=200) ) as mocked_put: @@ -96,7 +96,7 @@ def test_put(self): def test_post(self): lims = Lims(self.url, username=self.username, password=self.password) - uri = "{url}/api/v2/samples".format(url=self.url) + uri = f"{self.url}/api/v2/samples" with patch( "requests.post", return_value=Mock(content=self.sample_xml, status_code=200) ) as mocked_put: diff --git a/tests/to_rewrite_test_logging.py b/tests/to_rewrite_test_logging.py index 7dce599..98aa898 100644 --- a/tests/to_rewrite_test_logging.py +++ b/tests/to_rewrite_test_logging.py @@ -1,10 +1,10 @@ #!/usr/bin/env python -from __future__ import print_function -from os.path import isdir import os import sys +from os.path import isdir from unittest import TestCase + from genologics.epp import EppLogger file_path = os.path.realpath(__file__) @@ -45,11 +45,11 @@ def test_stderr(self): with EppLogger(tmp_file, prepend=False) as epp_logger: print("stderr nosetest", file=sys.stderr) sys.stderr = saved_stderr - with open(tmp_stderr, "r") as stderr: + with open(tmp_stderr) as stderr: stream_lines = stderr.readlines() assert "stderr nosetest" in stream_lines[-1] - with open(tmp_file, "r") as log_file: + with open(tmp_file) as log_file: log_lines = log_file.readlines() assert "stderr nosetest" in log_lines[-1] @@ -62,11 +62,11 @@ def test_stdout(self): with EppLogger(tmp_file, prepend=False) as epp_logger: print("stdout nosetest", file=sys.stdout) sys.stdout = saved_stdout - with open(tmp_stdout, "r") as stdout: + with open(tmp_stdout) as stdout: stream_lines = stdout.readlines() assert not stream_lines - with open(tmp_file, "r") as log_file: + with open(tmp_file) as log_file: log_lines = log_file.readlines() assert "stdout nosetest" in log_lines[-1] From a894d677b99a242b107edc0ab2b4b9d9e5293a4a Mon Sep 17 00:00:00 2001 From: kedhammar Date: Thu, 27 Jun 2024 11:24:33 +0200 Subject: [PATCH 07/40] suppress blame --- .git-blame-ignore-revs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index 8cedb97..a4394ee 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -1,2 +1,4 @@ # 240627, ruff format // Alfred Kedhammar 429872dc8123126282f73c0c34ab2221fac00c90 +# 240627, ruff safe fixes // Alfred Kedhammar +6b7e397e9a45a54f387e1764dedcd88f9b54d212 From 3c757d16cf348fd677b10116a00b94ac96192488 Mon Sep 17 00:00:00 2001 From: kedhammar Date: Thu, 27 Jun 2024 11:30:21 +0200 Subject: [PATCH 08/40] fix most star imports --- examples/attach_delivery_report.py | 2 +- examples/epp_script.py | 2 +- examples/get_application.py | 2 +- examples/get_containers.py | 3 +-- examples/get_labs.py | 2 +- examples/get_processes.py | 2 +- examples/get_projects.py | 3 +-- examples/get_samples.py | 2 +- examples/get_samples2.py | 2 +- examples/set_project_queued.py | 2 +- examples/set_sample_name.py | 2 +- genologics/lims.py | 1 + genologics/lims_utils.py | 3 +-- 13 files changed, 13 insertions(+), 15 deletions(-) diff --git a/examples/attach_delivery_report.py b/examples/attach_delivery_report.py index 1011c34..9542899 100644 --- a/examples/attach_delivery_report.py +++ b/examples/attach_delivery_report.py @@ -11,7 +11,7 @@ # Login parameters for connecting to a LIMS instance. from genologics.config import BASEURI, PASSWORD, USERNAME -from genologics.lims import * +from genologics.lims import Lims, Project # Create the LIMS interface instance, and check the connection and version. lims = Lims(BASEURI, USERNAME, PASSWORD) diff --git a/examples/epp_script.py b/examples/epp_script.py index 2cdd3b7..78d8fb5 100644 --- a/examples/epp_script.py +++ b/examples/epp_script.py @@ -17,7 +17,7 @@ from argparse import ArgumentParser from genologics.config import BASEURI, PASSWORD, USERNAME -from genologics.entities import Process +from genologics.entities import Artifact, Process from genologics.epp import EppLogger, attach_file from genologics.lims import Lims diff --git a/examples/get_application.py b/examples/get_application.py index 3e2777f..57adf26 100644 --- a/examples/get_application.py +++ b/examples/get_application.py @@ -11,7 +11,7 @@ # Login parameters for connecting to a LIMS instance. from genologics.config import BASEURI, PASSWORD, USERNAME -from genologics.lims import * +from genologics.lims import Lims, Project # Create the LIMS interface instance, and check the connection and version. lims = Lims(BASEURI, USERNAME, PASSWORD) diff --git a/examples/get_containers.py b/examples/get_containers.py index 226d4f1..d4af08a 100644 --- a/examples/get_containers.py +++ b/examples/get_containers.py @@ -7,10 +7,9 @@ Per Kraulis, Science for Life Laboratory, Stockholm, Sweden. """ - # Login parameters for connecting to a LIMS instance. from genologics.config import BASEURI, PASSWORD, USERNAME -from genologics.lims import * +from genologics.lims import Lims # Create the LIMS interface instance, and check the connection and version. lims = Lims(BASEURI, USERNAME, PASSWORD) diff --git a/examples/get_labs.py b/examples/get_labs.py index 4697d56..fc690b3 100644 --- a/examples/get_labs.py +++ b/examples/get_labs.py @@ -9,7 +9,7 @@ # Login parameters for connecting to a LIMS instance. from genologics.config import BASEURI, PASSWORD, USERNAME -from genologics.lims import * +from genologics.lims import Lab, Lims # Create the LIMS interface instance, and check the connection and version. lims = Lims(BASEURI, USERNAME, PASSWORD) diff --git a/examples/get_processes.py b/examples/get_processes.py index 19040c3..6aacb27 100644 --- a/examples/get_processes.py +++ b/examples/get_processes.py @@ -9,7 +9,7 @@ # Login parameters for connecting to a LIMS instance. from genologics.config import BASEURI, PASSWORD, USERNAME -from genologics.lims import * +from genologics.lims import Lims, Process # Create the LIMS interface instance, and check the connection and version. lims = Lims(BASEURI, USERNAME, PASSWORD) diff --git a/examples/get_projects.py b/examples/get_projects.py index 0a8123a..41e4bb7 100644 --- a/examples/get_projects.py +++ b/examples/get_projects.py @@ -7,10 +7,9 @@ Per Kraulis, Science for Life Laboratory, Stockholm, Sweden. """ - # Login parameters for connecting to a LIMS instance. from genologics.config import BASEURI, PASSWORD, USERNAME -from genologics.lims import * +from genologics.lims import Lims, Project # Create the LIMS interface instance, and check the connection and version. lims = Lims(BASEURI, USERNAME, PASSWORD) diff --git a/examples/get_samples.py b/examples/get_samples.py index 776b32c..7c448d7 100644 --- a/examples/get_samples.py +++ b/examples/get_samples.py @@ -9,7 +9,7 @@ # Login parameters for connecting to a LIMS instance. from genologics.config import BASEURI, PASSWORD, USERNAME -from genologics.lims import * +from genologics.lims import Lims, Project, Sample # Create the LIMS interface instance, and check the connection and version. lims = Lims(BASEURI, USERNAME, PASSWORD) diff --git a/examples/get_samples2.py b/examples/get_samples2.py index 29ce75e..aa30f5f 100644 --- a/examples/get_samples2.py +++ b/examples/get_samples2.py @@ -8,7 +8,7 @@ """ from genologics.config import BASEURI, PASSWORD, USERNAME -from genologics.lims import * +from genologics.lims import Lims, Project lims = Lims(BASEURI, USERNAME, PASSWORD) lims.check_version() diff --git a/examples/set_project_queued.py b/examples/set_project_queued.py index cad09bc..f368c93 100644 --- a/examples/set_project_queued.py +++ b/examples/set_project_queued.py @@ -11,7 +11,7 @@ # Login parameters for connecting to a LIMS instance. from genologics.config import BASEURI, PASSWORD, USERNAME -from genologics.lims import * +from genologics.lims import Lims, Project # Create the LIMS interface instance, and check the connection and version. lims = Lims(BASEURI, USERNAME, PASSWORD) diff --git a/examples/set_sample_name.py b/examples/set_sample_name.py index e0d93aa..7073aa3 100644 --- a/examples/set_sample_name.py +++ b/examples/set_sample_name.py @@ -9,7 +9,7 @@ # Login parameters for connecting to a LIMS instance. from genologics.config import BASEURI, PASSWORD, USERNAME -from genologics.lims import * +from genologics.lims import Lims, Sample # Create the LIMS interface instance, and check the connection and version. lims = Lims(BASEURI, USERNAME, PASSWORD) diff --git a/genologics/lims.py b/genologics/lims.py index 78d4b28..44e0d2d 100644 --- a/genologics/lims.py +++ b/genologics/lims.py @@ -35,6 +35,7 @@ else: from urllib.parse import urlencode, urljoin +from xml.etree import ElementTree from .entities import * diff --git a/genologics/lims_utils.py b/genologics/lims_utils.py index 68f9746..19cd594 100644 --- a/genologics/lims_utils.py +++ b/genologics/lims_utils.py @@ -1,9 +1,8 @@ #!/usr/bin/env python - from genologics.config import BASEURI, PASSWORD, USERNAME -from genologics.lims import * +from genologics.lims import Artifact, Lims lims = Lims(BASEURI, USERNAME, PASSWORD) From f6a9c3e509216b4da64b370f5fd9e5cde64bc43a Mon Sep 17 00:00:00 2001 From: kedhammar Date: Thu, 27 Jun 2024 11:31:40 +0200 Subject: [PATCH 09/40] ruff unsafe fixes, passing --- genologics/config.py | 2 +- genologics/constants.py | 2 +- genologics/descriptors.py | 24 ++++++++++----------- genologics/entities.py | 28 ++++++++++++------------- genologics/epp.py | 2 +- genologics/lims.py | 36 +++++++------------------------- genologics/test_utils.py | 2 +- tests/test_descriptors.py | 9 ++------ tests/test_entities.py | 27 ++++++++++-------------- tests/test_example.py | 10 ++------- tests/test_lims.py | 21 +++++++------------ tests/to_rewrite_test_logging.py | 4 ++-- 12 files changed, 62 insertions(+), 105 deletions(-) diff --git a/genologics/config.py b/genologics/config.py index 5611399..c4b2b89 100644 --- a/genologics/config.py +++ b/genologics/config.py @@ -39,7 +39,7 @@ def get_config_info(config_file): def load_config(specified_config=None): - if specified_config != None: + if specified_config is not None: config_file = specified_config else: config = SafeConfigParser() diff --git a/genologics/constants.py b/genologics/constants.py index 29ce4d6..981b1d7 100644 --- a/genologics/constants.py +++ b/genologics/constants.py @@ -52,4 +52,4 @@ def nsmap(tag): parts = tag.split(":") if len(parts) != 2: raise ValueError("no namespace specifier in tag") - return "{%s}%s" % (_NSMAP[parts[0]], parts[1]) + return f"{{{_NSMAP[parts[0]]}}}{parts[1]}" diff --git a/genologics/descriptors.py b/genologics/descriptors.py index 27ba933..4ab60cd 100644 --- a/genologics/descriptors.py +++ b/genologics/descriptors.py @@ -117,7 +117,7 @@ class IntegerDescriptor(StringDescriptor): """ def __get__(self, instance, cls): - text = super(IntegerDescriptor, self).__get__(instance, cls) + text = super().__get__(instance, cls) if text is not None: return int(text) @@ -138,12 +138,12 @@ class BooleanDescriptor(StringDescriptor): """ def __get__(self, instance, cls): - text = super(BooleanDescriptor, self).__get__(instance, cls) + text = super().__get__(instance, cls) if text is not None: return text.lower() == "true" def __set__(self, instance, value): - super(BooleanDescriptor, self).__set__(instance, str(value).lower()) + super().__set__(instance, str(value).lower()) class UdfDictionary: @@ -170,7 +170,7 @@ def rootnode(self): return self._rootnode def get_udt(self): - if self._udt == True: + if self._udt is True: return None else: return self._udt @@ -263,7 +263,7 @@ def __setitem__(self, key, value): raise TypeError("URI UDF requires str or punycode (unicode) value") value = str(value) else: - raise NotImplementedError("UDF type '%s'" % vtype) + raise NotImplementedError(f"UDF type '{vtype}'") if not isinstance(value, str): if not self._is_string(value): value = str(value).encode("UTF-8") @@ -283,7 +283,7 @@ def __setitem__(self, key, value): value = str(value) else: raise NotImplementedError( - "Cannot handle value of type '%s'" " for UDF" % type(value) + f"Cannot handle value of type '{type(value)}'" " for UDF" ) if self._udt: root = self.rootnode.find(nsmap("udf:type")) @@ -399,7 +399,7 @@ class EntityDescriptor(TagDescriptor): "An instance attribute referencing another entity instance." def __init__(self, tag, klass): - super(EntityDescriptor, self).__init__(tag) + super().__init__(tag) self.klass = klass def __get__(self, instance, cls): @@ -438,7 +438,7 @@ def __get__(self, instance, cls): class NestedBooleanDescriptor(TagDescriptor): def __init__(self, tag, *args): - super(NestedBooleanDescriptor, self).__init__(tag) + super().__init__(tag) self.rootkeys = args def __get__(self, instance, cls): @@ -459,7 +459,7 @@ def __set__(self, instance, value): class NestedStringDescriptor(TagDescriptor): def __init__(self, tag, *args): - super(NestedStringDescriptor, self).__init__(tag) + super().__init__(tag) self.rootkeys = args def __get__(self, instance, cls): @@ -640,7 +640,7 @@ def __set__(self, instance, value): out_r, "output", attrib={"uri": artifact.uri} ) for reagent_label_name in value[artifact]: - rea_l = ElementTree.SubElement( + ElementTree.SubElement( out_a, "reagent-label", attrib={"name": reagent_label_name} ) @@ -728,7 +728,7 @@ def __delitem__(self, index): def __init__(self): self._inputs = [] self.tag = "process-input" - super(ProcessTypeProcessInputDescriptor, self).__init__(tag=self.tag) + super().__init__(tag=self.tag) def __get__(self, instance, owner): from genologics.internal_classes import ProcessTypeProcessInput @@ -754,7 +754,7 @@ def __delitem__(self, index): def __init__(self): self._inputs = [] self.tag = "process-output" - super(ProcessTypeProcessOutputDescriptor, self).__init__(tag=self.tag) + super().__init__(tag=self.tag) def __get__(self, instance, owner): from genologics.internal_classes import ProcessTypeProcessOutput diff --git a/genologics/entities.py b/genologics/entities.py index ed5b599..094256f 100644 --- a/genologics/entities.py +++ b/genologics/entities.py @@ -96,7 +96,7 @@ def control(self): logger.info(f"\t{key2}") for key, value in dict2.items(): logger.info( - "\t\t{0}->{1}".format( + "\t\t{}->{}".format( key, (value if value is not None else "None") ) ) @@ -134,7 +134,6 @@ def alternate_history(self, out_art, in_art=None): qc processes too) and puts everything in a dictionnary. """ history = {} - hist_list = [] # getting the list of all expected analytes. artifacts = self.lims.get_artifacts( sample_name=self.sample_name, type="Analyte", resolve=False @@ -332,10 +331,10 @@ def __init__(self, lims, uri=None, id=None, _create_new=False): self.root = None def __str__(self): - return "%s(%s)" % (self.__class__.__name__, self.id) + return f"{self.__class__.__name__}({self.id})" def __repr__(self): - return "%s(%s)" % (self.__class__.__name__, self.uri) + return f"{self.__class__.__name__}({self.uri})" @property def uri(self): @@ -390,8 +389,7 @@ def _create(cls, lims, creation_tag=None, udfs=None, **kwargs): setattr(instance, attribute, kwargs.get(attribute)) else: raise TypeError( - "%s create: got an unexpected keyword argument '%s'" - % (cls.__name__, attribute) + f"{cls.__name__} create: got an unexpected keyword argument '{attribute}'" ) return instance @@ -458,7 +456,7 @@ class Researcher(Entity): @property def name(self): - return "%s %s" % (self.first_name, self.last_name) + return f"{self.first_name} {self.last_name}" class Permission(Entity): @@ -543,8 +541,8 @@ def create(cls, lims, container, position, udfs=None, **kwargs): if udfs is None: udfs = {} if not isinstance(container, Container): - raise TypeError("%s is not of type Container" % container) - instance = super(Sample, cls)._create( + raise TypeError(f"{container} is not of type Container") + instance = super()._create( lims, creation_tag="samplecreation", udfs=udfs, **kwargs ) @@ -623,7 +621,7 @@ class Processtype(Entity): _PREFIX = "ptp" def __init__(self, lims, uri=None, id=None, _create_new=False): - super(Processtype, self).__init__(lims, uri, id, _create_new) + super().__init__(lims, uri, id, _create_new) self.parameters = ProcessTypeParametersDescriptor(self) name = StringAttributeDescriptor("name") @@ -1193,13 +1191,13 @@ def create(cls, lims, protocol_step, container_type, inputs, **kwargs): These need to be queued for that step for the query to be successful. """ if not isinstance(protocol_step, ProtocolStep): - raise TypeError("%s is not of type ProtocolStep" % protocol_step) + raise TypeError(f"{protocol_step} is not of type ProtocolStep") elif not isinstance(container_type, Containertype): - raise TypeError("%s is not of type Containertype" % container_type) + raise TypeError(f"{container_type} is not of type Containertype") elif not all([isinstance(input, Artifact) for input in inputs]): - raise TypeError("%s does not contain only items of type Artifact" % inputs) + raise TypeError(f"{inputs} does not contain only items of type Artifact") - instance = super(Step, cls)._create( + instance = super()._create( lims, creation_tag="step-creation", **kwargs ) @@ -1307,7 +1305,7 @@ class ReagentType(Entity): name = StringAttributeDescriptor("name") def __init__(self, lims, uri=None, id=None): - super(ReagentType, self).__init__(lims, uri, id) + super().__init__(lims, uri, id) assert self.uri is not None self.root = lims.get(self.uri) self.sequence = None diff --git a/genologics/epp.py b/genologics/epp.py index a26d83e..14b8b75 100644 --- a/genologics/epp.py +++ b/genologics/epp.py @@ -321,7 +321,7 @@ def format_file( keys = line if duplicated_lines: error_message = ( - "Row names {0} occurs more than once in file {1}. " + "Row names {} occurs more than once in file {}. " "Fix the file to continue. " ).format(",".join(duplicated_lines), name) if not file_info: diff --git a/genologics/lims.py b/genologics/lims.py index 44e0d2d..8a41052 100644 --- a/genologics/lims.py +++ b/genologics/lims.py @@ -24,36 +24,16 @@ from io import BytesIO # python 2.7, 3+ compatibility -from sys import version_info +from urllib.parse import urlencode, urljoin +from xml.etree import ElementTree import requests -if version_info[0] == 2: - from urllib import urlencode - - from urlparse import urljoin -else: - from urllib.parse import urlencode, urljoin - -from xml.etree import ElementTree - from .entities import * # Python 2.6 support work-arounds # - Exception ElementTree.ParseError does not exist # - ElementTree.ElementTree.write does not take arg. xml_declaration -if version_info[:2] < (2, 7): - from xml.parsers import expat - - ElementTree.ParseError = expat.ExpatError - p26_write = ElementTree.ElementTree.write - - def write_with_xml_declaration(self, file, encoding, xml_declaration): - assert xml_declaration is True # Support our use case only - file.write("\n") - p26_write(self, file, encoding=encoding) - - ElementTree.ElementTree.write = write_with_xml_declaration TIMEOUT = 16 @@ -223,9 +203,9 @@ def validate_response(self, response, accept_status_codes=[200]): node = root.find("message") if node is None: response.raise_for_status() - message = "%s" % (response.status_code) + message = f"{response.status_code}" else: - message = "%s: %s" % (response.status_code, node.text) + message = f"{response.status_code}: {node.text}" node = root.find("suggested-actions") if node is not None: message += " " + node.text @@ -622,11 +602,11 @@ def _get_params_udf(self, udf=dict(), udtname=None, udt=dict()): "Convert UDF-ish arguments to a params dictionary." result = dict() for key, value in udf.items(): - result["udf.%s" % key] = value + result[f"udf.{key}"] = value if udtname is not None: result["udt.name"] = udtname for key, value in udt.items(): - result["udt.%s" % key] = value + result[f"udt.{key}"] = value return result def _get_instances(self, klass, add_info=None, params=dict()): @@ -719,7 +699,7 @@ def put_batch(self, instances): # Tag is art:details, con:details, etc. example_root = instance.root ns_uri = re.match("{(.*)}.*", example_root.tag).group(1) - root = ElementTree.Element("{%s}details" % (ns_uri)) + root = ElementTree.Element(f"{{{ns_uri}}}details") root.append(instance.root) @@ -770,7 +750,7 @@ def create_container(self, container_type, name=None): nm = ElementTree.SubElement(el, "name") nm.text = name - ty = ElementTree.SubElement( + ElementTree.SubElement( el, "type", attrib={"uri": container_type.uri, "name": container_type.name} ) ret_el = self.post( diff --git a/genologics/test_utils.py b/genologics/test_utils.py index 63288e3..4767179 100644 --- a/genologics/test_utils.py +++ b/genologics/test_utils.py @@ -53,7 +53,7 @@ def dump_source_xml(lims): for k, v in lims.cache.items(): final_string.append(f"'{k}':") v.get() - final_string.append('"""{0}""",'.format(v.xml().replace("\n", "\n"))) + final_string.append('"""{}""",'.format(v.xml().replace("\n", "\n"))) final_string.append("}") return "\n".join(final_string) diff --git a/tests/test_descriptors.py b/tests/test_descriptors.py index 1705966..44f548f 100644 --- a/tests/test_descriptors.py +++ b/tests/test_descriptors.py @@ -1,6 +1,6 @@ from io import BytesIO -from sys import version_info from unittest import TestCase +from unittest.mock import Mock from xml.etree import ElementTree from genologics.descriptors import ( @@ -16,11 +16,6 @@ from genologics.entities import Artifact from genologics.lims import Lims -if version_info[0] == 2: - from unittest.mock import Mock -else: - from unittest.mock import Mock - class TestDescriptor(TestCase): def _make_desc(self, klass, *args, **kwargs): @@ -97,7 +92,7 @@ def setUp(self): def test__get__(self): bd = self._make_desc(BooleanDescriptor, "istest") - assert bd.__get__(self.instance, None) == True + assert bd.__get__(self.instance, None) is True def test__set__(self): bd = self._make_desc(BooleanDescriptor, "istest") diff --git a/tests/test_entities.py b/tests/test_entities.py index 3e2fee0..1e36331 100644 --- a/tests/test_entities.py +++ b/tests/test_entities.py @@ -1,5 +1,5 @@ -from sys import version_info from unittest import TestCase +from unittest.mock import Mock, patch from xml.etree import ElementTree from genologics.entities import ( @@ -18,11 +18,6 @@ ) from genologics.lims import Lims -if version_info[0] == 2: - from unittest.mock import Mock, patch -else: - from unittest.mock import Mock, patch - url = "http://testgenologics.com:4040" ######## @@ -180,19 +175,19 @@ def test_pass(self): def elements_equal(e1, e2): if e1.tag != e2.tag: - print("Tag: %s != %s" % (e1.tag, e2.tag)) + print(f"Tag: {e1.tag} != {e2.tag}") return False if e1.text and e2.text and e1.text.strip() != e2.text.strip(): - print("Text: %s != %s" % (e1.text.strip(), e2.text.strip())) + print(f"Text: {e1.text.strip()} != {e2.text.strip()}") return False if e1.tail and e2.tail and e1.tail.strip() != e2.tail.strip(): - print("Tail: %s != %s" % (e1.tail.strip(), e2.tail.strip())) + print(f"Tail: {e1.tail.strip()} != {e2.tail.strip()}") return False if e1.attrib != e2.attrib: - print("Attrib: %s != %s" % (e1.attrib, e2.attrib)) + print(f"Attrib: {e1.attrib} != {e2.attrib}") return False if len(e1) != len(e2): - print("length %s (%s) != length (%s) " % (e1.tag, len(e1), e2.tag, len(e2))) + print(f"length {e1.tag} ({len(e1)}) != length ({e2.tag}) ") return False return all( elements_equal(c1, c2) @@ -350,7 +345,7 @@ def test_set_placements_list(self): c1 = Container( uri="http://testgenologics.com:4040/containers/c1", lims=self.lims ) - c2 = Container( + Container( uri="http://testgenologics.com:4040/containers/c2", lims=self.lims ) @@ -441,14 +436,14 @@ def test_parse_entity(self): assert r.name == "regaentkitname" assert r.supplier == "reagentProvider" assert r.website == "www.reagentprovider.com" - assert r.archived == False + assert r.archived is False def test_create_entity(self): with patch( "genologics.lims.requests.post", return_value=Mock(content=self.reagentkit_xml, status_code=201), ): - r = ReagentKit.create( + ReagentKit.create( self.lims, name="regaentkitname", supplier="reagentProvider", @@ -482,7 +477,7 @@ def test_create_entity(self): with patch( "genologics.lims.requests.post", return_value=Mock(content=self.reagentlot_xml, status_code=201), - ) as patch_post: + ): l = ReagentLot.create( self.lims, reagent_kit=r, @@ -504,7 +499,7 @@ def test_create_entity(self): "genologics.lims.requests.post", return_value=Mock(content=self.sample_creation, status_code=201), ) as patch_post: - l = Sample.create( + Sample.create( self.lims, project=Project(self.lims, uri="project"), container=Container(self.lims, uri="container"), diff --git a/tests/test_example.py b/tests/test_example.py index 8b30b8c..34ecf01 100644 --- a/tests/test_example.py +++ b/tests/test_example.py @@ -1,16 +1,10 @@ -from sys import version_info from unittest import TestCase, main +from unittest.mock import patch from genologics import test_utils from genologics.entities import Project from genologics.lims import Lims -if version_info[0] == 2: - from unittest.mock import patch -else: - from unittest.mock import patch - - test_utils.XML_DICT = { "https://test.claritylims.com/api/v2/projects/BLA1": """ @@ -27,7 +21,7 @@ class TestExample(TestCase): def __init__(self, *args, **kwargs): self.lims = Lims("https://test.claritylims.com", "user", "password") - super(TestExample, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def test_project_example(self): with patch("genologics.lims.Lims.get", side_effect=test_utils.patched_get): diff --git a/tests/test_lims.py b/tests/test_lims.py index 0200737..c590d4d 100644 --- a/tests/test_lims.py +++ b/tests/test_lims.py @@ -9,18 +9,13 @@ except NameError: # callable() doesn't exist in Python 3.0 and 3.1 import collections - callable = lambda obj: isinstance(obj, collections.Callable) + def callable(obj): + return isinstance(obj, collections.Callable) -from sys import version_info -if version_info[0] == 2: - from unittest.mock import Mock, patch - - import __builtin__ as builtins -else: - import builtins - from unittest.mock import Mock, patch +import builtins +from unittest.mock import Mock, patch class TestLims(TestCase): @@ -86,7 +81,7 @@ def test_put(self): with patch( "requests.put", return_value=Mock(content=self.sample_xml, status_code=200) ) as mocked_put: - response = lims.put(uri=uri, data=self.sample_xml) + lims.put(uri=uri, data=self.sample_xml) assert mocked_put.call_count == 1 with patch( "requests.put", return_value=Mock(content=self.error_xml, status_code=400) @@ -100,7 +95,7 @@ def test_post(self): with patch( "requests.post", return_value=Mock(content=self.sample_xml, status_code=200) ) as mocked_put: - response = lims.post(uri=uri, data=self.sample_xml) + lims.post(uri=uri, data=self.sample_xml) assert mocked_put.call_count == 1 with patch( "requests.post", return_value=Mock(content=self.error_xml, status_code=400) @@ -163,9 +158,9 @@ def test_tostring(self): from xml.etree import ElementTree as ET a = ET.Element("a") - b = ET.SubElement(a, "b") + ET.SubElement(a, "b") c = ET.SubElement(a, "c") - d = ET.SubElement(c, "d") + ET.SubElement(c, "d") etree = ET.ElementTree(a) expected_string = b""" """ diff --git a/tests/to_rewrite_test_logging.py b/tests/to_rewrite_test_logging.py index 98aa898..0fd8f79 100644 --- a/tests/to_rewrite_test_logging.py +++ b/tests/to_rewrite_test_logging.py @@ -42,7 +42,7 @@ def test_stderr(self): saved_stderr = sys.stderr tmp_stderr = os.path.join(tmp_dir_path, "tmp_stderr") with open(tmp_stderr, "w") as sys.stderr: - with EppLogger(tmp_file, prepend=False) as epp_logger: + with EppLogger(tmp_file, prepend=False): print("stderr nosetest", file=sys.stderr) sys.stderr = saved_stderr with open(tmp_stderr) as stderr: @@ -59,7 +59,7 @@ def test_stdout(self): saved_stdout = sys.stdout tmp_stdout = os.path.join(tmp_dir_path, "tmp_stdout") with open(tmp_stdout, "w") as sys.stdout: - with EppLogger(tmp_file, prepend=False) as epp_logger: + with EppLogger(tmp_file, prepend=False): print("stdout nosetest", file=sys.stdout) sys.stdout = saved_stdout with open(tmp_stdout) as stdout: From ff50c12ea4a808e9f42e247539bdc7f170340f33 Mon Sep 17 00:00:00 2001 From: kedhammar Date: Thu, 27 Jun 2024 11:36:17 +0200 Subject: [PATCH 10/40] remove unused imports --- genologics/descriptors.py | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/genologics/descriptors.py b/genologics/descriptors.py index 4ab60cd..64ca242 100644 --- a/genologics/descriptors.py +++ b/genologics/descriptors.py @@ -6,21 +6,16 @@ Copyright (C) 2012 Per Kraulis """ -import six - -from genologics.constants import nsmap - -try: - from urllib.parse import parse_qs, urlparse, urlsplit, urlunparse -except ImportError: - pass - import datetime import logging import time from decimal import Decimal from xml.etree import ElementTree +import six + +from genologics.constants import nsmap + logger = logging.getLogger(__name__) From fbc6537d1e4775b4acf21040e1686c61c7277802 Mon Sep 17 00:00:00 2001 From: kedhammar Date: Thu, 27 Jun 2024 11:40:20 +0200 Subject: [PATCH 11/40] explicate imports --- genologics/lims.py | 25 +++++++++++++++++++++++-- 1 file changed, 23 insertions(+), 2 deletions(-) diff --git a/genologics/lims.py b/genologics/lims.py index 8a41052..2264736 100644 --- a/genologics/lims.py +++ b/genologics/lims.py @@ -29,7 +29,28 @@ import requests -from .entities import * +from genologics.constants import nsmap + +from .entities import ( + Artifact, + Automation, + Container, + Containertype, + File, + Instrument, + Lab, + Process, + Processtype, + Project, + Protocol, + ReagentKit, + ReagentLot, + ReagentType, + Researcher, + Sample, + Udfconfig, + Workflow, +) # Python 2.6 support work-arounds # - Exception ElementTree.ParseError does not exist @@ -234,7 +255,7 @@ def get_udfs( ): """Get a list of udfs, filtered by keyword arguments. name: name of udf - attach_to_name: item in the system, to wich the udf is attached, such as + attach_to_name: item in the system, to which the udf is attached, such as Sample, Project, Container, or the name of a process. attach_to_category: If 'attach_to_name' is the name of a process, such as 'CaliperGX QC (DNA)', then you need to set attach_to_category='ProcessType'. Must not be provided otherwise. From a12a77b4ece8289d761992766cc34108bfc7161f Mon Sep 17 00:00:00 2001 From: kedhammar Date: Thu, 27 Jun 2024 11:43:20 +0200 Subject: [PATCH 12/40] merge duplicate function --- genologics/lims.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/genologics/lims.py b/genologics/lims.py index 2264736..c6c92cb 100644 --- a/genologics/lims.py +++ b/genologics/lims.py @@ -269,13 +269,13 @@ def get_udfs( ) return self._get_instances(Udfconfig, add_info=add_info, params=params) - def get_reagent_types(self, name=None, start_index=None): + def get_reagent_types(self, name=None, start_index=None, add_info=False): """Get a list of reqgent types, filtered by keyword arguments. name: reagent type name, or list of names. start_index: Page to retrieve; all if None. """ params = self._get_params(name=name, start_index=start_index) - return self._get_instances(ReagentType, params=params) + return self._get_instances(ReagentType, add_info=add_info, params=params) def get_labs( self, @@ -576,10 +576,6 @@ def get_process_types(self, displayname=None, add_info=False): params = self._get_params(displayname=displayname) return self._get_instances(Processtype, add_info=add_info, params=params) - def get_reagent_types(self, name=None, add_info=False): - params = self._get_params(name=name) - return self._get_instances(ReagentType, add_info=add_info, params=params) - def get_protocols(self, name=None, add_info=False): """Get the list of existing protocols on the system""" params = self._get_params(name=name) From de179ab0bd91363a2d434b86ea690c6a26111e2d Mon Sep 17 00:00:00 2001 From: kedhammar Date: Thu, 27 Jun 2024 11:44:30 +0200 Subject: [PATCH 13/40] remove duplicate, autopassing test func --- tests/test_entities.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/tests/test_entities.py b/tests/test_entities.py index 1e36331..fe88354 100644 --- a/tests/test_entities.py +++ b/tests/test_entities.py @@ -168,11 +168,6 @@ """ -class TestEntities(TestCase): - def test_pass(self): - pass - - def elements_equal(e1, e2): if e1.tag != e2.tag: print(f"Tag: {e1.tag} != {e2.tag}") @@ -345,9 +340,7 @@ def test_set_placements_list(self): c1 = Container( uri="http://testgenologics.com:4040/containers/c1", lims=self.lims ) - Container( - uri="http://testgenologics.com:4040/containers/c2", lims=self.lims - ) + Container(uri="http://testgenologics.com:4040/containers/c2", lims=self.lims) s = StepPlacements( uri=self.lims.get_uri("steps", "s1", "placements"), lims=self.lims From 842213a583b70749e648af3b0c8850f19229a5f5 Mon Sep 17 00:00:00 2001 From: kedhammar Date: Thu, 27 Jun 2024 11:47:25 +0200 Subject: [PATCH 14/40] add isinstance --- genologics/epp.py | 10 ++++------ tests/test_descriptors.py | 2 +- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/genologics/epp.py b/genologics/epp.py index 14b8b75..8f241ed 100644 --- a/genologics/epp.py +++ b/genologics/epp.py @@ -1,4 +1,3 @@ - """Contains useful and reusable code for EPP scripts. Classes, methods and exceptions. @@ -90,9 +89,7 @@ def __enter__(self): ) except DistributionNotFound as e: logging.error(e) - logging.error( - f"Make sure you have the {self.PACKAGE} " "package installed" - ) + logging.error(f"Make sure you have the {self.PACKAGE} " "package installed") sys.exit(-1) return self @@ -292,7 +289,7 @@ def format_file( error_message = "" duplicated_lines = [] exeptions = ["Sample", "Fail", ""] - if type(first_header) is not list: + if not isinstance(first_header, list): if first_header: first_header = [first_header] else: @@ -403,7 +400,8 @@ def _log_before_change(self, changelog_f=None): ) logging.info( - f"Copying from element with id: {self.s_elt.id} to element with " f" id: {self.d_elt.id}" + f"Copying from element with id: {self.s_elt.id} to element with " + f" id: {self.d_elt.id}" ) def _log_after_change(self): diff --git a/tests/test_descriptors.py b/tests/test_descriptors.py index 44f548f..600b8bc 100644 --- a/tests/test_descriptors.py +++ b/tests/test_descriptors.py @@ -195,7 +195,7 @@ def setUp(self): def test__get__(self): sd = self._make_desc(StringDictionaryDescriptor, "test-subentry") res = sd.__get__(self.instance, None) - assert type(res) == dict + assert isinstance(res, dict) assert res["test-firstkey"] is None assert res["test-secondkey"] == "second value" From cfcb7faaee10ca39b4220c09289b9e1e661d4f17 Mon Sep 17 00:00:00 2001 From: kedhammar Date: Thu, 27 Jun 2024 12:13:45 +0200 Subject: [PATCH 15/40] bugfix --- genologics/entities.py | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/genologics/entities.py b/genologics/entities.py index 094256f..bef3d2e 100644 --- a/genologics/entities.py +++ b/genologics/entities.py @@ -276,7 +276,7 @@ def _add_out_art_process_conection_list(self, input_art, out_artifact, history={ for process in ( self.processes_per_artifact[input_art] if self.processes_per_artifact - else lims.get_processes(inputartifactlimsid=input_art) + else self.lims.get_processes(inputartifactlimsid=input_art) ): # outputs = map(lambda a: (a.id), process.all_outputs()) outputs = [a.id for a in process.all_outputs()] @@ -869,9 +869,7 @@ def _remove_available_inputs(self, input_art): elif rep == 1: del self._available_inputs[input_art] else: - logger.info( - f"using more inputs than replicates for input {input_art.uri}" - ) + logger.info(f"using more inputs than replicates for input {input_art.uri}") self.available_inputs = self._available_inputs def set_available_inputs(self, available_inputs): @@ -928,9 +926,7 @@ def set_pools(self, pools): current_pool = ElementTree.SubElement(pool_root, "pool") if pool_obj.get("output", False): current_pool.attrib["output-uri"] = pool_obj["output"].uri - current_pool.attrib["name"] = pool_obj.get( - "name", f"Pool #{idx + 1}" - ) + current_pool.attrib["name"] = pool_obj.get("name", f"Pool #{idx + 1}") for input_art in pool_obj.get("inputs", []): current_input = ElementTree.SubElement(current_pool, "input") current_input.attrib["uri"] = input_art.uri @@ -1197,9 +1193,7 @@ def create(cls, lims, protocol_step, container_type, inputs, **kwargs): elif not all([isinstance(input, Artifact) for input in inputs]): raise TypeError(f"{inputs} does not contain only items of type Artifact") - instance = super()._create( - lims, creation_tag="step-creation", **kwargs - ) + instance = super()._create(lims, creation_tag="step-creation", **kwargs) # Setup configuration element configuration_element = ElementTree.SubElement(instance.root, "configuration") From a11334d70ace2fce5eaf13668f0f25155ad15fbd Mon Sep 17 00:00:00 2001 From: kedhammar Date: Thu, 27 Jun 2024 16:39:41 +0200 Subject: [PATCH 16/40] formatting --- examples/get_artifacts.py | 1 - tests/test_lims.py | 12 +++++------- 2 files changed, 5 insertions(+), 8 deletions(-) diff --git a/examples/get_artifacts.py b/examples/get_artifacts.py index e4071d8..97c4d5d 100644 --- a/examples/get_artifacts.py +++ b/examples/get_artifacts.py @@ -7,7 +7,6 @@ Per Kraulis, Science for Life Laboratory, Stockholm, Sweden. """ - # Login parameters for connecting to a LIMS instance. from genologics.config import BASEURI, PASSWORD, USERNAME from genologics.lims import Lims diff --git a/tests/test_lims.py b/tests/test_lims.py index c590d4d..93b0f98 100644 --- a/tests/test_lims.py +++ b/tests/test_lims.py @@ -13,7 +13,6 @@ def callable(obj): return isinstance(obj, collections.Callable) - import builtins from unittest.mock import Mock, patch @@ -37,9 +36,10 @@ class TestLims(TestCase): def test_get_uri(self): lims = Lims(self.url, username=self.username, password=self.password) - assert lims.get_uri( - "artifacts", sample_name="test_sample" - ) == f"{self.url}/api/v2/artifacts?sample_name=test_sample" + assert ( + lims.get_uri("artifacts", sample_name="test_sample") + == f"{self.url}/api/v2/artifacts?sample_name=test_sample" + ) def test_parse_response(self): lims = Lims(self.url, username=self.username, password=self.password) @@ -60,9 +60,7 @@ def test_parse_response(self): ) def test_get(self, mocked_instance): lims = Lims(self.url, username=self.username, password=self.password) - r = lims.get( - f"{self.url}/api/v2/artifacts?sample_name=test_sample" - ) + r = lims.get(f"{self.url}/api/v2/artifacts?sample_name=test_sample") assert r is not None assert callable(r.find) assert hasattr(r.attrib, "__getitem__") From c2ffa9e8e82815eb4b0245f742ea70ae49af54ab Mon Sep 17 00:00:00 2001 From: kedhammar Date: Thu, 27 Jun 2024 16:41:36 +0200 Subject: [PATCH 17/40] add types --- requirements.txt | 3 +++ 1 file changed, 3 insertions(+) diff --git a/requirements.txt b/requirements.txt index 33da504..5493e70 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,3 +5,6 @@ pytest requests ruff six +types-requests +types-setuptools +types-six From 6539925d7bbbdc9f422dbc3dd223a20bfb03ea0b Mon Sep 17 00:00:00 2001 From: kedhammar Date: Thu, 27 Jun 2024 16:48:42 +0200 Subject: [PATCH 18/40] remove title from .toml --- pyproject.toml | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 4bb3c6a..7c587d7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,19 +1,16 @@ -title = "scilifelab_epps" - - [tool.ruff.lint] -select =[ +select = [ # Ruff default rules # ------------------------------ - "E4", # pycodestyle Imports - "E7", # pycodestyle Statements - "E9", # pycodestyle Runtime - "F", # Pyflakes + "E4", # pycodestyle Imports + "E7", # pycodestyle Statements + "E9", # pycodestyle Runtime + "F", # Pyflakes # Additional Comment # ------------------------------------------------------ - "I", # isort Best-practice sorting of imports - "UP", # pyupgrade Make sure syntax is up-to-date + "I", # isort Best-practice sorting of imports + "UP", # pyupgrade Make sure syntax is up-to-date ] ignore = [ "E402", # Module level import not at top of file From 230f4c5f49b7c5adec1d033f26a683373cbdd1c9 Mon Sep 17 00:00:00 2001 From: kedhammar Date: Thu, 27 Jun 2024 17:21:46 +0200 Subject: [PATCH 19/40] fix deprecated sphinx extension --- docs/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/conf.py b/docs/conf.py index 6ed9f4e..53365f4 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -25,7 +25,7 @@ extensions = [ "sphinx.ext.autodoc", "sphinx.ext.coverage", - "sphinx.ext.pngmath", + "sphinx.ext.imgmath", "sphinx.ext.ifconfig", "sphinx.ext.viewcode", "sphinxcontrib.programoutput", From 09d7048b8a9c59aeeeafbffc831b6cd714d3a3a9 Mon Sep 17 00:00:00 2001 From: kedhammar Date: Thu, 27 Jun 2024 17:22:07 +0200 Subject: [PATCH 20/40] mypy fixes --- genologics/entities.py | 33 +++++++++++++++++++-------------- 1 file changed, 19 insertions(+), 14 deletions(-) diff --git a/genologics/entities.py b/genologics/entities.py index bef3d2e..6361574 100644 --- a/genologics/entities.py +++ b/genologics/entities.py @@ -41,7 +41,7 @@ try: from urllib.parse import parse_qs, urlparse, urlsplit, urlunparse except ImportError: - from urlparse import parse_qs, urlparse, urlsplit, urlunparse + from urlparse import parse_qs, urlparse, urlsplit, urlunparse # type: ignore import logging from xml.etree import ElementTree @@ -299,9 +299,9 @@ def _add_out_art_process_conection_list(self, input_art, out_artifact, history={ class Entity: "Base class for the entities in the LIMS database." - _TAG = None - _URI = None - _PREFIX = None + _TAG: str | None = None + _URI: str | None = None + _PREFIX: str | None = None def __new__(cls, lims, uri=None, id=None, _create_new=False): if not uri: @@ -828,8 +828,8 @@ def stateless(self): return self # XXX set_state ? - state = property(get_state) - stateless = property(stateless) + state = property(get_state) # type: ignore + stateless = property(stateless) # type: ignore def _get_workflow_stages_and_statuses(self): self.get() @@ -1320,12 +1320,17 @@ class Queue(Entity): artifacts = MultiPageNestedEntityListDescriptor("artifact", Artifact, "artifacts") -Sample.artifact = EntityDescriptor("artifact", Artifact) -StepActions.step = EntityDescriptor("step", Step) -Stage.workflow = EntityDescriptor("workflow", Workflow) -Artifact.workflow_stages = NestedEntityListDescriptor( - "workflow-stage", Stage, "workflow-stages" +# Set class-interdependent class variables +setattr(Sample, "artifact", EntityDescriptor("artifact", Artifact)) +setattr(StepActions, "step", EntityDescriptor("step", Step)) +setattr(Stage, "workflow", EntityDescriptor("workflow", Workflow)) +setattr( + Artifact, + "workflow_stages", + NestedEntityListDescriptor("workflow-stage", Stage, "workflow-stages"), ) -Step.configuration = EntityDescriptor("configuration", ProtocolStep) -StepProgramStatus.configuration = EntityDescriptor("configuration", ProtocolStep) -Researcher.roles = NestedEntityListDescriptor("role", Role, "credentials") +setattr(Step, "configuration", EntityDescriptor("configuration", ProtocolStep)) +setattr( + StepProgramStatus, "configuration", EntityDescriptor("configuration", ProtocolStep) +) +setattr(Researcher, "roles", NestedEntityListDescriptor("role", Role, "credentials")) From ebafb498d24ba9b08644be3d40589143afd1d51f Mon Sep 17 00:00:00 2001 From: kedhammar Date: Thu, 27 Jun 2024 17:34:44 +0200 Subject: [PATCH 21/40] tweak gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 99dd0c8..02b1d54 100644 --- a/.gitignore +++ b/.gitignore @@ -14,3 +14,4 @@ docs/*.html scripts/*.csv scripts/*.log scripts/*.out +.DS_Store From 94126aa75d5fe431ebf261dfa39bda7704560879 Mon Sep 17 00:00:00 2001 From: kedhammar Date: Thu, 27 Jun 2024 17:34:50 +0200 Subject: [PATCH 22/40] add missing reqs --- requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements.txt b/requirements.txt index 5493e70..bd5bc0c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,6 +5,7 @@ pytest requests ruff six +sphinxcontrib-programoutput types-requests types-setuptools types-six From afe1ca6ac7d6b5c5fd1c95224893c70c76637e43 Mon Sep 17 00:00:00 2001 From: kedhammar Date: Fri, 28 Jun 2024 10:09:52 +0200 Subject: [PATCH 23/40] ignore typing for dynamically assigned class attribute --- examples/get_samples.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/get_samples.py b/examples/get_samples.py index 7c448d7..279c376 100644 --- a/examples/get_samples.py +++ b/examples/get_samples.py @@ -56,5 +56,5 @@ print() # Print the artifact of the sample. -artifact = sample.artifact +artifact = sample.artifact # type: ignore print(artifact, artifact.state, artifact.type, artifact.qc_flag) From af62abb99f560c880ffd8a30d484df960e988684 Mon Sep 17 00:00:00 2001 From: kedhammar Date: Fri, 28 Jun 2024 10:10:05 +0200 Subject: [PATCH 24/40] use public method instead of private method --- genologics/constants.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/genologics/constants.py b/genologics/constants.py index 981b1d7..9541aba 100644 --- a/genologics/constants.py +++ b/genologics/constants.py @@ -42,7 +42,7 @@ ) for prefix, uri in _NSMAP.items(): - ElementTree._namespace_map[uri] = prefix + ElementTree.register_namespace(prefix, uri) _NSPATTERN = re.compile(r"(\{)(.+?)(\})") From 59b58483689a76ce38eb5670eca7ccb6ea391d0b Mon Sep 17 00:00:00 2001 From: kedhammar Date: Fri, 28 Jun 2024 10:21:08 +0200 Subject: [PATCH 25/40] exclude docs/ from type checking --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 7c587d7..7675ea4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,3 +22,4 @@ ignore = [ [tool.mypy] ignore_missing_imports = true follow_imports = 'skip' +exclude = '^docs/' From 4805e3cfef21500eb7ea4c8fb928427c900c030c Mon Sep 17 00:00:00 2001 From: kedhammar Date: Fri, 28 Jun 2024 10:21:26 +0200 Subject: [PATCH 26/40] add required type annotation --- genologics/test_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/genologics/test_utils.py b/genologics/test_utils.py index 4767179..dfe518a 100644 --- a/genologics/test_utils.py +++ b/genologics/test_utils.py @@ -16,7 +16,7 @@ """ -XML_DICT = {} +XML_DICT: None | dict = {} def patched_get(*args, **kwargs): From 1b656f783f5491174f2a4aae09e75d0b6c553ca6 Mon Sep 17 00:00:00 2001 From: kedhammar Date: Fri, 28 Jun 2024 10:22:23 +0200 Subject: [PATCH 27/40] fix python version and get rid of import ambiguity --- genologics/config.py | 15 +++++---------- genologics/entities.py | 12 ++++-------- setup.py | 1 + 3 files changed, 10 insertions(+), 18 deletions(-) diff --git a/genologics/config.py b/genologics/config.py index c4b2b89..02f1929 100644 --- a/genologics/config.py +++ b/genologics/config.py @@ -1,25 +1,20 @@ import os - -try: - from ConfigParser import SafeConfigParser -except ImportError: - from configparser import SafeConfigParser - +from configparser import ConfigParser """ Usage: from genologics.config import BASEURI, USERNAME, PASSWORD -Alternate Usage: +Alternate Usage: from genologics import config -BASEURI, USERNAME, PASSWORD, VERSION, MAIN_LOG = config.load_config(specified_config = ) +BASEURI, USERNAME, PASSWORD, VERSION, MAIN_LOG = config.load_config(specified_config = ) """ spec_config = None def get_config_info(config_file): - config = SafeConfigParser() + config = ConfigParser() config.readfp(open(config_file)) BASEURI = config.get("genologics", "BASEURI").rstrip() @@ -42,7 +37,7 @@ def load_config(specified_config=None): if specified_config is not None: config_file = specified_config else: - config = SafeConfigParser() + config = ConfigParser() try: conf_file = config.read( [ diff --git a/genologics/entities.py b/genologics/entities.py index 6361574..8009a21 100644 --- a/genologics/entities.py +++ b/genologics/entities.py @@ -6,6 +6,10 @@ Copyright (C) 2012 Per Kraulis """ +import logging +from urllib.parse import parse_qs, urlparse, urlsplit, urlunparse +from xml.etree import ElementTree + from genologics.constants import nsmap from genologics.descriptors import ( BooleanDescriptor, @@ -38,14 +42,6 @@ UdtDictionaryDescriptor, ) -try: - from urllib.parse import parse_qs, urlparse, urlsplit, urlunparse -except ImportError: - from urlparse import parse_qs, urlparse, urlsplit, urlunparse # type: ignore - -import logging -from xml.etree import ElementTree - logger = logging.getLogger(__name__) diff --git a/setup.py b/setup.py index 1073f9d..d5c484e 100644 --- a/setup.py +++ b/setup.py @@ -43,6 +43,7 @@ include_package_data=True, zip_safe=False, install_requires=["requests"], + python_requires="3.12", entry_points=""" # -*- Entry points: -*- """, From 5b6472b4a957717317c736849457859d3ee3e7d7 Mon Sep 17 00:00:00 2001 From: kedhammar Date: Fri, 28 Jun 2024 10:28:05 +0200 Subject: [PATCH 28/40] prettier --- .github/pr_labels.yml | 2 +- README.md | 5 ++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/.github/pr_labels.yml b/.github/pr_labels.yml index d04f24f..8db6d10 100644 --- a/.github/pr_labels.yml +++ b/.github/pr_labels.yml @@ -1,4 +1,4 @@ -version: '1' +version: "1" invalidStatus: "pending" labelRule: values: diff --git a/README.md b/README.md index db01b71..943919d 100644 --- a/README.md +++ b/README.md @@ -6,8 +6,8 @@ A basic module for interacting with the Illumina Basespace Clarity LIMS server v its REST API. The goal is to provide simple access to the most common entities and their attributes in a reasonably Pythonic fashion. - ### Compatibility + From version **1.0.0** the scripts have been ported to support **Python 3**, and it is backwards compatible with **Python 2** as well. The previous versions (**<0.4.6**) are only compatible with **Python 2**. @@ -15,7 +15,7 @@ and it is backwards compatible with **Python 2** as well. The previous versions ### Design All instances of Project, Sample, Artifact, etc should be obtained using -the get_* methods of the Lims class, which keeps an internal cache of +the get\_\* methods of the Lims class, which keeps an internal cache of current instances. The idea is to create one and only one instance in a running script for representing an item in the database. If one has more than one instance representing the same item, there is a danger that @@ -78,7 +78,6 @@ NOTE: The example files rely on specific entities and configurations on the server, and use base URI, user name and password, so to work for your server, all these must be reviewed and modified. - ### EPPs The EPPs in use at Scilifelab can be found in the subdirectory 'scripts' of the repository [scilifelab_epps](https://github.com/SciLifeLab/scilifelab_epps/). From 92706609f0cbfce9c2557389a260b9c60d29cce1 Mon Sep 17 00:00:00 2001 From: kedhammar Date: Fri, 28 Jun 2024 10:29:59 +0200 Subject: [PATCH 29/40] editorconfig file exclusions --- .github/workflows/lint-code.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lint-code.yml b/.github/workflows/lint-code.yml index 0663e2f..43b04ad 100644 --- a/.github/workflows/lint-code.yml +++ b/.github/workflows/lint-code.yml @@ -127,4 +127,4 @@ jobs: run: npm install -g editorconfig-checker - name: editorconfig --> Lint files - run: editorconfig-checker $(git ls-files | grep -v '.py\|.md\|.json\|.yml\|.yaml\|.html') + run: editorconfig-checker $(git ls-files | grep -v '.py\|.md\|.json\|.yml\|.yaml\|.html\|.Makefile\|.rst') From 7d09c3b57e670e84755e41eef9811e5b796898dd Mon Sep 17 00:00:00 2001 From: kedhammar Date: Fri, 28 Jun 2024 10:50:16 +0200 Subject: [PATCH 30/40] tru updated pipreqs script separate user-vs-dev requirements and add pytest and codecov to ci make pkg comparison case-insensitive --- .github/workflows/lint-code.yml | 27 +++++++++++++++++++-------- .github/workflows/test-code.yml | 32 ++++++++++++++++++++++++++++++++ requirements-dev.txt | 9 +++++++++ requirements.txt | 10 +--------- 4 files changed, 61 insertions(+), 17 deletions(-) create mode 100644 .github/workflows/test-code.yml create mode 100644 requirements-dev.txt diff --git a/.github/workflows/lint-code.yml b/.github/workflows/lint-code.yml index 43b04ad..8738dfc 100644 --- a/.github/workflows/lint-code.yml +++ b/.github/workflows/lint-code.yml @@ -69,11 +69,10 @@ jobs: with: python-version: "3.12" - - name: Install pipreqs - run: pip install pipreqs - - - name: Install requirements - run: pip install -r requirements.txt + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install pipreqs - name: Run pipreqs run: pipreqs --savepath pipreqs.txt @@ -81,16 +80,28 @@ jobs: - name: Compare requirements run: | # Extract and sort package names - awk '{print $1}' $1 | sort -u > "$1".compare - awk -F'==' '{print $1}' $2 | sort -u > "$2".compare + awk -F'(=|==|>|>=|<|<=| @ )' '{print $1}' requirements.txt | tr '[:upper:]' '[:lower:]' | sort -u > requirements.compare + awk -F'(=|==|>|>=|<|<=| @ )' '{print $1}' pipreqs.txt | tr '[:upper:]' '[:lower:]' | sort -u > pipreqs.compare # Compare package lists - if cmp -s "$1".compare "$2".compare + if cmp -s requirements.compare pipreqs.compare then echo "Requirements are the same" + exit 0 else echo "Requirements are different" + echo "" + + echo "=== current requirements.txt ===" + echo "" + cat requirements.compare + echo "" + + echo "=== pipreqs requirements ===" + echo "" + cat pipreqs.compare + exit 1 fi diff --git a/.github/workflows/test-code.yml b/.github/workflows/test-code.yml new file mode 100644 index 0000000..bf7809d --- /dev/null +++ b/.github/workflows/test-code.yml @@ -0,0 +1,32 @@ +name: Test code +on: [push, pull_request] + +jobs: + pytest: + runs-on: ubuntu-latest + steps: + - name: Checkout repo + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.12" + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements.txt + pip install -r requirements-dev.txt + - name: Install genologics + run: pip install -e . + - name: pytest + # Options are configured in pyproject.toml + run: pytest + - name: CodeCov + run: | + # Replace `linux` below with the appropriate OS + # Options are `alpine`, `linux`, `macos`, `windows` + curl -Os https://uploader.codecov.io/latest/linux/codecov + chmod +x codecov + ./codecov -t ${{ secrets.CODECOV_TOKEN }} diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 0000000..8885285 --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,9 @@ +mock +mypy +pipreqs +pytest +ruff +sphinxcontrib-programoutput +types-requests +types-setuptools +types-six diff --git a/requirements.txt b/requirements.txt index bd5bc0c..9840079 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,11 +1,3 @@ -mock -mypy -pipreqs -pytest requests -ruff +setuptools six -sphinxcontrib-programoutput -types-requests -types-setuptools -types-six From cef46a2f33ec651794a03e4ebeda91c892ec162a Mon Sep 17 00:00:00 2001 From: kedhammar Date: Fri, 28 Jun 2024 10:51:35 +0200 Subject: [PATCH 31/40] fix version syntax --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index d5c484e..d039a66 100644 --- a/setup.py +++ b/setup.py @@ -43,7 +43,7 @@ include_package_data=True, zip_safe=False, install_requires=["requests"], - python_requires="3.12", + python_requires=">=3.12", entry_points=""" # -*- Entry points: -*- """, From c2eadcc0abe0e3030b224245aa13888b5ec95932 Mon Sep 17 00:00:00 2001 From: kedhammar Date: Fri, 28 Jun 2024 15:43:29 +0200 Subject: [PATCH 32/40] try new codecov workflow apply pytest-cov, try different codecov workflow try new codecov action correct syntax --- .github/workflows/test-code.yml | 11 ++++------- requirements-dev.txt | 1 + 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/.github/workflows/test-code.yml b/.github/workflows/test-code.yml index bf7809d..0369f7e 100644 --- a/.github/workflows/test-code.yml +++ b/.github/workflows/test-code.yml @@ -22,11 +22,8 @@ jobs: run: pip install -e . - name: pytest # Options are configured in pyproject.toml - run: pytest + run: pytest --cov=genologics --cov-report=xml - name: CodeCov - run: | - # Replace `linux` below with the appropriate OS - # Options are `alpine`, `linux`, `macos`, `windows` - curl -Os https://uploader.codecov.io/latest/linux/codecov - chmod +x codecov - ./codecov -t ${{ secrets.CODECOV_TOKEN }} + uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} diff --git a/requirements-dev.txt b/requirements-dev.txt index 8885285..76d34af 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -2,6 +2,7 @@ mock mypy pipreqs pytest +pytest-cov ruff sphinxcontrib-programoutput types-requests From 5bd64d1cf1cb084599cf7eda3ed3dc48f373365b Mon Sep 17 00:00:00 2001 From: kedhammar Date: Mon, 1 Jul 2024 15:06:32 +0200 Subject: [PATCH 33/40] improve pipreqs --- .github/workflows/lint-code.yml | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/.github/workflows/lint-code.yml b/.github/workflows/lint-code.yml index 8738dfc..40ac80f 100644 --- a/.github/workflows/lint-code.yml +++ b/.github/workflows/lint-code.yml @@ -72,10 +72,17 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install pipreqs + pip install -r requirements.txt + pip install -r requirements-dev.txt - name: Run pipreqs - run: pipreqs --savepath pipreqs.txt + run: | + pipreqs --savepath pipreqs.txt taca 2>&1 | tee pipreqs_output.log + if grep -q 'WARNING: Package .* does not exist or network problems' pipreqs_output.log; then + missing_packages=$(grep 'WARNING: Package .* does not exist or network problems' pipreqs_output.log | sed -E 's/.*Package "(.*)" does not exist.*/\1/') + echo "ERROR: Add unresolved packages to requirements. Missing package(s): $missing_packages. Example: ' @ git+https://github.com//.git'" + exit 1 + fi - name: Compare requirements run: | From e15d0f907b949c94cfa16d08fa54424b547a9109 Mon Sep 17 00:00:00 2001 From: kedhammar Date: Mon, 1 Jul 2024 15:16:42 +0200 Subject: [PATCH 34/40] bugfix --- .github/workflows/lint-code.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lint-code.yml b/.github/workflows/lint-code.yml index 40ac80f..99820ec 100644 --- a/.github/workflows/lint-code.yml +++ b/.github/workflows/lint-code.yml @@ -77,7 +77,7 @@ jobs: - name: Run pipreqs run: | - pipreqs --savepath pipreqs.txt taca 2>&1 | tee pipreqs_output.log + pipreqs --savepath pipreqs.txt 2>&1 | tee pipreqs_output.log if grep -q 'WARNING: Package .* does not exist or network problems' pipreqs_output.log; then missing_packages=$(grep 'WARNING: Package .* does not exist or network problems' pipreqs_output.log | sed -E 's/.*Package "(.*)" does not exist.*/\1/') echo "ERROR: Add unresolved packages to requirements. Missing package(s): $missing_packages. Example: ' @ git+https://github.com//.git'" From f66fa11cb2bcd6eb7733a5bd96ffbc8149b558f8 Mon Sep 17 00:00:00 2001 From: Anandashankar Anil Date: Tue, 16 Jul 2024 15:40:16 +0200 Subject: [PATCH 35/40] Would ruff accept this --- genologics/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/genologics/version.py b/genologics/version.py index 8613f6a..e9be607 100644 --- a/genologics/version.py +++ b/genologics/version.py @@ -1 +1 @@ -__version__="2.0.0" \ No newline at end of file +__version__= "2.0.0" \ No newline at end of file From 3c11b2e82496ff5153407ac1a42c59647b9727c1 Mon Sep 17 00:00:00 2001 From: Anandashankar Anil Date: Tue, 16 Jul 2024 15:44:29 +0200 Subject: [PATCH 36/40] Try propitiating ruff again --- genologics/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/genologics/version.py b/genologics/version.py index e9be607..926bdc6 100644 --- a/genologics/version.py +++ b/genologics/version.py @@ -1 +1 @@ -__version__= "2.0.0" \ No newline at end of file +__version__= "2.0.0" From 9d91d033d2b7bc50b3feb9faa92c8447a4821b9d Mon Sep 17 00:00:00 2001 From: Anandashankar Anil Date: Tue, 16 Jul 2024 17:33:40 +0200 Subject: [PATCH 37/40] And again --- genologics/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/genologics/version.py b/genologics/version.py index 926bdc6..8c0d5d5 100644 --- a/genologics/version.py +++ b/genologics/version.py @@ -1 +1 @@ -__version__= "2.0.0" +__version__ = "2.0.0" From c954e41ded227a75c9e839296932b9b27e30d22f Mon Sep 17 00:00:00 2001 From: Alfred Kedhammar <89784800+kedhammar@users.noreply.github.com> Date: Mon, 5 Aug 2024 14:15:59 +0200 Subject: [PATCH 38/40] Update genologics/lims_utils.py Co-authored-by: Anandashankar Anil --- genologics/lims_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/genologics/lims_utils.py b/genologics/lims_utils.py index 19cd594..00f8626 100644 --- a/genologics/lims_utils.py +++ b/genologics/lims_utils.py @@ -18,7 +18,7 @@ def get_run_info(fc): def procHistory(proc, samplename): - """Quick wat to get the ids of parent processes from the given process, + """Quick way to get the ids of parent processes from the given process, while staying in a sample scope""" hist = [] artifacts = lims.get_artifacts(sample_name=samplename, type="Analyte") From b0366763404b05ac1572da6274a92e7178fa512c Mon Sep 17 00:00:00 2001 From: kedhammar Date: Mon, 26 Aug 2024 09:36:28 +0200 Subject: [PATCH 39/40] bump version --- genologics/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/genologics/version.py b/genologics/version.py index 8c0d5d5..159d48b 100644 --- a/genologics/version.py +++ b/genologics/version.py @@ -1 +1 @@ -__version__ = "2.0.0" +__version__ = "2.0.1" From afe846e3f18cabc4fe831cf8f63b7adcad8970f7 Mon Sep 17 00:00:00 2001 From: kedhammar Date: Mon, 26 Aug 2024 09:41:18 +0200 Subject: [PATCH 40/40] typo --- tests/test_entities.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/test_entities.py b/tests/test_entities.py index fe88354..36aaa34 100644 --- a/tests/test_entities.py +++ b/tests/test_entities.py @@ -86,7 +86,7 @@ generic_reagentkit_xml = """ -regaentkitname +reagentkitname reagentProvider www.reagentprovider.com false @@ -426,7 +426,7 @@ def test_parse_entity(self): "requests.Session.get", return_value=Mock(content=self.reagentkit_xml, status_code=200), ): - assert r.name == "regaentkitname" + assert r.name == "reagentkitname" assert r.supplier == "reagentProvider" assert r.website == "www.reagentprovider.com" assert r.archived is False @@ -438,7 +438,7 @@ def test_create_entity(self): ): ReagentKit.create( self.lims, - name="regaentkitname", + name="reagentkitname", supplier="reagentProvider", website="www.reagentprovider.com", archived=False,