From 02c654fe6e3dc18b577037c5774d18a9360891a6 Mon Sep 17 00:00:00 2001
From: shachafl <66333410+shachafl@users.noreply.github.com>
Date: Sun, 11 Aug 2024 23:46:27 -0400
Subject: [PATCH] Scripts and dependencies updates for Python 3.8 - 3.10
(#2009)
* multichannel replaced with channel_axis in scikit-image ver 0.19
* in scikit-image 0.20 watershed was moved from morphology module to segmentation
* in scikit-image 0.20 selem was changed to footprint
selsm was replaced with footprint in scikit-image 0.20
* updates due to scikit-image 0.21.0 prefix GeometricTransform with _
* updating napari version and removing anisotropy in spot sizes
* fix(requirements): edit Makefile and update REQUIREMENTS.txt to generate all req files
updating requirements files to support sphinx's make html
* numpy.float is deprecated in ver 1.20
* append is deprecated in pandas 2.0 use concat instead. This update is backward compatible.
* Linting corrections following automatic checks in PR
* updating yaml.load() to support version 5.1+
* updating pip version
* linting corrections to crop.py and check_all_funcs.py
* mypy assignment error: replaced None with empty string
* mypy assignment error: replaced None with empty dict
* mypy assignment error: add Optional to args defaults with None
added: from typing import Optional
* mypy assignment error: silencing variable type IntensityTable
* check_all_func.py: added import typing
* round and ch_values from_spot_data of IntensityTable expected Sequence but got ndarray instead: replaced np.arange with range
* silencing mypy errors with type: ignore, to pass PR automatic checks
* check_all_funcs.py: F401 'typing' imported but unused: silencing linter
* fixing 6 files with linting errors E126, E128, E251
* fixing linting errors in 2 files: I101 and I201
* fixing conflict due to identical in 2 json schema files
* correcting spots detected from 53 to 54
* update expected values for registered images and two other minor counts
* small correction to expected values when testing registration on ISS test data
* correcting channel_ and round_must_be_present tests and corresponding field_of_view schema
* fixiing wrong underline length in docstring
* defining 'en' language and adding xarray to autodoc_mock_imports
* removing py 3.7 and adding 3.10 to starfish-prod-ci.yml
* commenting out napari gui and moving to the end of script
* removed extension 'sphinx_autodoc_typehints' due to failed guarded type import with xarray
* fix: update docker image tp python 3.8 instead of 3.7
* docs(packaging): update python version compatible with starfish
docs(packaging): minor syntax correction
* docs: updating README and installation file with python versions and how to fix broken napari or jupyter
---
.github/workflows/starfish-prod-ci.yml | 4 +-
Makefile | 8 +-
README.rst | 8 +-
REQUIREMENTS.txt | 41 ++-
docker/Dockerfile | 2 +-
docs/source/conf.py | 8 +-
docs/source/installation/index.rst | 72 ++++-
.../format_osmfish.py | 2 +-
notebooks/BaristaSeq.ipynb | 4 +-
notebooks/Starfish_simulation.ipynb | 4 +-
notebooks/py/BaristaSeq.py | 2 +-
notebooks/py/Starfish_simulation.py | 2 +-
notebooks/py/smFISH.py | 5 +-
requirements/REQUIREMENTS-CI.txt | 270 +++++++++--------
requirements/REQUIREMENTS-CI.txt.in | 16 +-
requirements/REQUIREMENTS-JUPYTER.txt | 149 ++++++++++
requirements/REQUIREMENTS-JUPYTER.txt.in | 4 +
requirements/REQUIREMENTS-NAPARI-CI.txt | 273 +++++++++---------
requirements/REQUIREMENTS-NAPARI-CI.txt.in | 7 +-
setup.cfg | 9 +-
setup.py | 2 +-
starfish/REQUIREMENTS-STRICT.txt | 147 ++++------
starfish/core/_display.py | 10 +-
starfish/core/codebook/codebook.py | 5 +-
.../core/codebook/test/test_metric_decode.py | 4 +-
.../test/test_normalize_code_traces.py | 4 +-
.../test/test_per_round_max_decode.py | 4 +-
starfish/core/experiment/builder/builder.py | 2 +-
starfish/core/experiment/experiment.py | 2 +-
.../core/image/Filter/gaussian_low_pass.py | 2 +-
.../core/image/Filter/match_histograms.py | 2 +-
starfish/core/image/Filter/white_tophat.py | 2 +-
.../ApplyTransform/test/test_warp.py | 40 +--
.../_registration/ApplyTransform/warp.py | 6 +-
.../LearnTransform/test/test_translation.py | 2 +-
.../test/test_transforms_list.py | 2 +-
.../image/_registration/transforms_list.py | 20 +-
starfish/core/imagestack/imagestack.py | 12 +-
starfish/core/imagestack/parser/crop.py | 16 +-
.../test/factories/synthetic_stack.py | 4 +-
.../decoded_intensity_table.py | 2 +-
.../core/intensity_table/intensity_table.py | 2 +-
.../morphology/Filter/min_distance_label.py | 2 +-
.../morphology/binary_mask/binary_mask.py | 4 +-
.../core/spacetx_format/test_field_of_view.py | 6 +-
starfish/core/spacetx_format/util.py | 8 +-
starfish/core/spacetx_format/validate_sptx.py | 4 +-
.../spots/DecodeSpots/check_all_decoder.py | 15 +-
.../core/spots/DecodeSpots/check_all_funcs.py | 27 +-
starfish/core/test/factories.py | 2 +-
starfish/core/util/config.py | 2 +-
starfish/core/util/exec.py | 2 +-
.../field_of_view_0.0.0/tiles/tiles.json | 2 +-
.../field_of_view_0.1.0/field_of_view.json | 4 +-
.../field_of_view_0.1.0/tiles/tiles.json | 2 +-
.../test/full_pipelines/api/test_dartfish.py | 2 +-
.../test/full_pipelines/api/test_iss_api.py | 44 +--
57 files changed, 753 insertions(+), 555 deletions(-)
create mode 100644 requirements/REQUIREMENTS-JUPYTER.txt
create mode 100644 requirements/REQUIREMENTS-JUPYTER.txt.in
diff --git a/.github/workflows/starfish-prod-ci.yml b/.github/workflows/starfish-prod-ci.yml
index 100a505da..22aa76dbb 100644
--- a/.github/workflows/starfish-prod-ci.yml
+++ b/.github/workflows/starfish-prod-ci.yml
@@ -41,7 +41,7 @@ jobs:
strategy:
matrix:
os: ["windows-latest", "ubuntu-latest"]
- python-version: ["3.7", "3.8", "3.9"]
+ python-version: ["3.8", "3.9", "3.10"]
fail-fast: false
runs-on: ${{ matrix.os }}
steps:
@@ -71,7 +71,7 @@ jobs:
strategy:
matrix:
os: ["windows-latest", "ubuntu-latest"]
- python-version: ["3.7", "3.8", "3.9"]
+ python-version: ["3.8", "3.9", "3.10"]
fail-fast: false
runs-on: ${{ matrix.os }}
steps:
diff --git a/Makefile b/Makefile
index ea3ff3a68..02c4cbc87 100644
--- a/Makefile
+++ b/Makefile
@@ -12,7 +12,7 @@ DOCKER_BUILD?=1
VERSION=$(shell sh -c "git describe --exact --dirty 2> /dev/null")
# if you update this, you will need to update the version pin for the "Install Napari & Test napari (pinned)" test in .travis.yml
-PIP_VERSION=21.2.4
+PIP_VERSION=24.0
define print_help
@printf " %-28s $(2)\n" $(1)
@@ -80,8 +80,8 @@ help-docs:
### REQUIREMENTS #############################################
#
-GENERATED_REQUIREMENT_FILES=starfish/REQUIREMENTS-STRICT.txt requirements/REQUIREMENTS-CI.txt requirements/REQUIREMENTS-NAPARI-CI.txt
-SOURCE_REQUIREMENT_FILES=REQUIREMENTS.txt requirements/REQUIREMENTS-CI.txt.in requirements/REQUIREMENTS-NAPARI-CI.txt.in
+GENERATED_REQUIREMENT_FILES=starfish/REQUIREMENTS-STRICT.txt requirements/REQUIREMENTS-CI.txt requirements/REQUIREMENTS-NAPARI-CI.txt requirements/REQUIREMENTS-JUPYTER.txt
+SOURCE_REQUIREMENT_FILES=REQUIREMENTS.txt requirements/REQUIREMENTS-CI.txt.in requirements/REQUIREMENTS-NAPARI-CI.txt.in requirements/REQUIREMENTS-JUPYTER.txt.in
# This rule pins the requirements with the minimal set of changes required to satisfy the
# requirements. This is typically run when a new requirement is added, and we want to
@@ -103,7 +103,6 @@ starfish/REQUIREMENTS-STRICT.txt : REQUIREMENTS.txt
[ ! -e .$<-env ] || exit 1
$(call create_venv, .$<-env)
.$<-env/bin/pip install --upgrade pip==$(PIP_VERSION)
- .$<-env/bin/pip install -r $@
.$<-env/bin/pip install -r $<
echo "# You should not edit this file directly. Instead, you should edit one of the following files ($^) and run make $@" >| $@
.$<-env/bin/pip freeze --all | grep -v "pip==$(PIP_VERSION)" >> $@
@@ -113,7 +112,6 @@ requirements/REQUIREMENTS-%.txt : requirements/REQUIREMENTS-%.txt.in REQUIREMENT
[ ! -e .$<-env ] || exit 1
$(call create_venv, .$<-env)
.$<-env/bin/pip install --upgrade pip==$(PIP_VERSION)
- .$<-env/bin/pip install -r $@
for src in $^; do \
.$<-env/bin/pip install -r $$src; \
done
diff --git a/README.rst b/README.rst
index 7faf51c83..e77682757 100644
--- a/README.rst
+++ b/README.rst
@@ -47,19 +47,19 @@ See `spacetx-starfish.readthedocs.io `_.
+`Some operating systems might need different dependencies for napari. For more detailed installation instructions, see here `_.
Python Version Notice
---------------------
-starfish will be dropping support for python 3.6 in the next release due to
-minimum python=3.7 version requirements in upstream dependencies.
+starfish will be dropping support for python 3.8 in the next release due to
+minimum python=3.9 version requirements in upstream dependencies.
Contributing
------------
diff --git a/REQUIREMENTS.txt b/REQUIREMENTS.txt
index 25bbdf926..5056135fa 100644
--- a/REQUIREMENTS.txt
+++ b/REQUIREMENTS.txt
@@ -1,30 +1,27 @@
-# The following requirement is here because networkx restricts the version
-# of decorator. Since pip is not a full dependency solver, it has already
-# installed decorator before networkx is installed, and bombs after the
-# fact.
-decorator < 5.0
click
-dataclasses==0.6
+# docutils introduces an AttributeError: module 'docutils.nodes' has no attribute 'reprunicode'
+docutils<0.21
h5py
-jsonschema
-matplotlib
-numpy != 1.13.0, >= 1.20.0
-pandas >= 0.23.4
+# jsonschema 4.18.0 made RefResolver deprecated
+jsonschema<4.18
+# matplotlib 3.8 changes colormap module
+matplotlib<3.8
+# pinning mistune for m2r2 in CI requirements
+mistune==0.8.4
+# numpy 1.25 slightly changes values in three tests causing them to fail
+numpy<1.25
read_roi
regional
-semantic_version
-# 0.16.[012] are excluded because https://github.com/scikit-image/scikit-image/pull/3984 introduced
-# a bug into max peak finder. 0.16.3 presumably will have the fix from
-# https://github.com/scikit-image/scikit-image/pull/4263.
-scikit-image >= 0.14.0, != 0.16.0.*, != 0.16.1.*, != 0.16.2.*, != 0.17.1.*, != 0.17.2.*, < 0.19.0
+# scikit-image is temporarily pinned as newer versions require updating module imports
+scikit-image==0.21
scikit-learn
-scipy
-showit >= 1.1.4
-slicedimage==4.1.1
-sympy ~= 1.5.0
-urllib3 <1.27, >=1.25.4
+seaborn
+semantic_version
+showit
+slicedimage
+sympy
tqdm
trackpy
validators
-xarray >= 0.14.1
-ipywidgets
+# xarray 2023.09 causses a ValueError: new dimensions ('y', 'x') must be a superset of existing dimensions ('dim_0', 'dim_1')
+xarray<2023.09
diff --git a/docker/Dockerfile b/docker/Dockerfile
index a2658b5d9..8f563fb1c 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -36,7 +36,7 @@
##
## $ docker run -e TEST_ISS_KEEP_DATA=true --entrypoint=pytest spacetx/starfish -vsxk TestWithIssData
##
-FROM python:3.7-slim-stretch
+FROM python:3.8-slim-buster
RUN useradd -m starfish
USER starfish
diff --git a/docs/source/conf.py b/docs/source/conf.py
index 197cb5084..e85ab7ae5 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -52,7 +52,6 @@
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
- 'sphinx_autodoc_typehints',
'sphinxcontrib.programoutput',
'sphinx_gallery.gen_gallery',
'sphinx.ext.intersphinx',
@@ -91,7 +90,7 @@
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
-language = None
+language = 'en'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
@@ -130,6 +129,8 @@
'download_all_examples': False,
'default_thumb_file': f'{dir_}/_static/design/logo-solo.png',
'min_reported_time': 10,
+ # Rerun stale examples even if their MD5 hash shows that the example did not change.
+ 'run_stale_examples': True,
}
@@ -276,9 +277,6 @@ def setup(app):
napoleon_use_param = True
napoleon_use_rtype = True
-# See https://stackoverflow.com/a/45565445/56887
-autodoc_mock_imports = ['_tkinter']
-
rst_epilog = """
.. include::
"""
diff --git a/docs/source/installation/index.rst b/docs/source/installation/index.rst
index 520edd7a0..e211eaaae 100644
--- a/docs/source/installation/index.rst
+++ b/docs/source/installation/index.rst
@@ -3,7 +3,7 @@
Installation
============
-Starfish supports python 3.6 and above (python 3.7 recommended). To install the starfish package,
+Starfish supports python 3.8-11. To install the starfish package,
first verify that your python version is compatible. You can check this by running :code:`python
--version`.
@@ -12,13 +12,13 @@ The output should look similar to this:
.. code-block:: bash
$ python --version
- Python 3.7.7
+ Python 3.8.18
.. warning::
- While starfish itself has no known issues with python 3.8, scikit-image is not fully
- compatible with python 3.8. As such, installation of scikit-image, as part of starfish
- installation, may unexpectedly fail. The workaround is to install numpy first before
- installing starfish or scikit-image.
+ While starfish itself should work on any operating system, some napari dependencies might not be
+ compatible with Apple Silicon or Windows. As such, installation of napari, as part of starfish[napari]
+ installation, may unexpectedly fail. The workaround is to install napari first before
+ installing starfish or updating the dependencies via cloning the project and working in development mode.
Using virtual environments
@@ -44,7 +44,7 @@ Conda_ users can set one up like so:
.. code-block:: bash
- $ conda create -n starfish "python=3.7"
+ $ conda create -n starfish "python=3.8"
$ conda activate starfish
Installing *starfish*
@@ -57,7 +57,7 @@ Starfish can easily be installed using pip:
$ pip install starfish
.. note::
- If using python 3.8, first install numpy using pip before installing starfish.
+ If using Windows or Apple Silicon (M1+), first install napari using pip before installing starfish.
To use napari for interactive image visualization via :py:func:`.display` you must also
install napari:
@@ -72,8 +72,8 @@ Interactive visualization with napari also requires using Qt (e.g. by running th
Installing *starfish* on Windows
--------------------------------
-Windows users can install starfish in the same way. Again, we recommend using a conda or virtual
-environment with python 3.7. Here is how you would install starfish in a virtual environment
+Windows (cmd.exe) users can install starfish in the same way. Again, we recommend using a conda or virtual
+environment with python 3.8+. Here is how you would install starfish in a virtual environment
created with python's ``venv`` module:
.. code-block:: bat
@@ -82,12 +82,11 @@ created with python's ``venv`` module:
> cd starfish
> python -m venv .venv
> .venv\Scripts\activate.bat
+ > pip install napari[all]
> pip install starfish
- > pip install starfish[napari]
.. note::
- Python 3.8 has trouble installing scikit-image v0.15.0 and the ``pip install numpy``
- workaround does not solve this issue on Windows.
+ If you encounter issues, you need to update the dependencies via cloning the project and working in development mode.
Jupyter notebook
----------------
@@ -97,7 +96,54 @@ the virtualenv kernel to jupyter by activating your virtual environment and then
.. code-block:: bash
+ $ python -m pip install jupyter
$ python -m ipykernel install --user --name=
Now you should be able to select ``venv_name`` as the kernel in a jupyter notebook to have access
to the starfish library.
+
+Installing *starfish* in development mode
+-----------------------------------------
+
+If you need to resolve dependency issues with napari and jupyter or want to tinker with the
+starfish package, it is best to work in development mode.
+If you are on a mac, make sure you have the `XCode CommandLine Tools`_
+installed.
+
+.. _`XCode CommandLine Tools`: https://developer.apple.com/library/archive/technotes/tn2339/_index.html
+
+Check out the code for starfish:
+
+.. code-block:: bash
+
+ $ git clone https://github.com/spacetx/starfish.git
+ $ cd starfish
+
+Set up a `virtual environment`_:
+
+.. _`virtual environment`: #using-virtual-environments
+
+.. code-block:: bash
+
+ $ python -m venv .venv
+ $ source .venv/bin/activate
+
+Install starfish:
+
+.. code-block:: bash
+
+ $ make install-dev
+
+Update dependencies for napari and jupyter:
+
+.. code-block:: bash
+
+ $ make -B requirements/REQUIREMENTS-NAPARI-CI.txt
+ $ make -B requirements/REQUIREMENTS-JUPYTER.txt
+
+Install napari and jupyter:
+
+.. code-block:: bash
+
+ $ pip install -r requirements/REQUIREMENTS-NAPARI-CI.txt
+ $ pip install -r requirements/REQUIREMENTS-JUPYTER.txt
diff --git a/examples/data_formatting_examples/format_osmfish.py b/examples/data_formatting_examples/format_osmfish.py
index 5983f0778..5fdd1d7e1 100644
--- a/examples/data_formatting_examples/format_osmfish.py
+++ b/examples/data_formatting_examples/format_osmfish.py
@@ -132,7 +132,7 @@ def __init__(self, input_dir: str, metadata_yaml) -> None:
import yaml
with open(metadata_yaml, "r") as f:
- self.osmfish_metadata = yaml.load(f)
+ self.osmfish_metadata = yaml.load(f, Loader=yaml.FullLoader)
self.num_z = self.osmfish_metadata['ImageProperties']['HybImageSize']['zcount']
self.input_dir = input_dir
diff --git a/notebooks/BaristaSeq.ipynb b/notebooks/BaristaSeq.ipynb
index d90beebcd..aede15b3c 100644
--- a/notebooks/BaristaSeq.ipynb
+++ b/notebooks/BaristaSeq.ipynb
@@ -289,7 +289,7 @@
"from functools import partial\n",
"\n",
"# calculate the background\n",
- "opening = partial(opening, selem=disk(5))\n",
+ "opening = partial(opening, footprint=disk(5))\n",
"\n",
"background = bleed_corrected.apply(\n",
" opening,\n",
@@ -479,4 +479,4 @@
},
"nbformat": 4,
"nbformat_minor": 2
-}
\ No newline at end of file
+}
diff --git a/notebooks/Starfish_simulation.ipynb b/notebooks/Starfish_simulation.ipynb
index dca803216..ffada9a70 100644
--- a/notebooks/Starfish_simulation.ipynb
+++ b/notebooks/Starfish_simulation.ipynb
@@ -40,7 +40,7 @@
"outputs": [],
"source": [
"from numpy.random import permutation, rand, normal\n",
- "from numpy import ones, zeros, concatenate, array, float\n",
+ "from numpy import ones, zeros, concatenate, array\n",
"from numpy.random import poisson\n",
"from pandas import DataFrame, concat\n",
"from skimage.filters import gaussian\n",
@@ -144,4 +144,4 @@
},
"nbformat": 4,
"nbformat_minor": 2
-}
\ No newline at end of file
+}
diff --git a/notebooks/py/BaristaSeq.py b/notebooks/py/BaristaSeq.py
index 3bab2379e..360cbf154 100644
--- a/notebooks/py/BaristaSeq.py
+++ b/notebooks/py/BaristaSeq.py
@@ -212,7 +212,7 @@
from functools import partial
# calculate the background
-opening = partial(opening, selem=disk(5))
+opening = partial(opening, footprint=disk(5))
background = bleed_corrected.apply(
opening,
diff --git a/notebooks/py/Starfish_simulation.py b/notebooks/py/Starfish_simulation.py
index 69dd94cb6..28d40fe71 100644
--- a/notebooks/py/Starfish_simulation.py
+++ b/notebooks/py/Starfish_simulation.py
@@ -28,7 +28,7 @@ def code_sum(codeword):
# EPY: START code
from numpy.random import permutation, rand, normal
-from numpy import ones, zeros, concatenate, array, float
+from numpy import ones, zeros, concatenate, array
from numpy.random import poisson
from pandas import DataFrame, concat
from skimage.filters import gaussian
diff --git a/notebooks/py/smFISH.py b/notebooks/py/smFISH.py
index f478af2cb..3299063a1 100644
--- a/notebooks/py/smFISH.py
+++ b/notebooks/py/smFISH.py
@@ -32,10 +32,6 @@
import starfish.data
from starfish import FieldOfView, DecodedIntensityTable
from starfish.types import TraceBuildingStrategies
-
-# equivalent to %gui qt
-ipython = get_ipython()
-ipython.magic("gui qt5")
# EPY: END code
# EPY: START markdown
@@ -171,5 +167,6 @@ def processing_pipeline(
image, intensities = processing_pipeline(experiment, fov_name='fov_001')
# uncomment the below line to visualize the output with the spot calls.
+# %gui qt
# viewer = starfish.display(image, intensities)
# EPY: END code
diff --git a/requirements/REQUIREMENTS-CI.txt b/requirements/REQUIREMENTS-CI.txt
index 677bad25a..c7b48e931 100644
--- a/requirements/REQUIREMENTS-CI.txt
+++ b/requirements/REQUIREMENTS-CI.txt
@@ -1,150 +1,164 @@
# You should not edit this file directly. Instead, you should edit one of the following files (requirements/REQUIREMENTS-CI.txt.in) and run make requirements/REQUIREMENTS-CI.txt
-alabaster==0.7.12
-argon2-cffi==21.1.0
-attrs==21.2.0
-Babel==2.9.1
+alabaster==0.7.13
+asttokens==2.4.1
+attrs==24.2.0
+Babel==2.15.0
backcall==0.2.0
-bleach==4.1.0
-boto3==1.18.37
-botocore==1.21.37
-certifi==2021.5.30
-cffi==1.14.6
-charset-normalizer==2.0.4
-click==8.0.1
-colorama==0.4.4
+backports.tarfile==1.2.0
+beautifulsoup4==4.12.3
+bleach==6.1.0
+boto3==1.34.156
+botocore==1.34.156
+certifi==2024.7.4
+cffi==1.17.0
+charset-normalizer==3.3.2
+click==8.1.7
commonmark==0.9.1
-coverage==5.5
-cryptography==3.4.8
-cycler==0.10.0
-dataclasses==0.6
-debugpy==1.4.1
-decorator==4.4.2
+contourpy==1.1.1
+coverage==7.6.1
+cryptography==43.0.0
+cycler==0.12.1
+decorator==5.1.1
defusedxml==0.7.1
-diskcache==5.2.1
-docutils==0.16
-entrypoints==0.3
-execnet==1.9.0
-flake8==3.9.2
-flake8-import-order==0.18.1
-h5py==3.4.0
-idna==3.2
-imageio==2.9.0
-imagesize==1.2.0
-importlib-metadata==4.8.1
-iniconfig==1.1.1
-ipykernel==6.3.1
-ipython==7.27.0
-ipython-genutils==0.2.0
-ipywidgets==7.6.4
-jedi==0.18.0
-jeepney==0.7.1
-Jinja2==3.0.1
-jmespath==0.10.0
-joblib==1.0.1
-jsonschema==3.2.0
-jupyter-client==7.0.2
-jupyter-core==4.7.1
-jupyterlab-pygments==0.1.2
-jupyterlab-widgets==1.0.1
-keyring==23.1.0
-kiwisolver==1.3.2
-m2r2==0.3.1
-MarkupSafe==2.0.1
-matplotlib==3.4.3
-matplotlib-inline==0.1.3
-mccabe==0.6.1
+diskcache==5.6.3
+docutils==0.20.1
+entrypoints==0.4
+exceptiongroup==1.2.2
+execnet==2.1.1
+executing==2.0.1
+fastjsonschema==2.20.0
+flake8==7.1.1
+flake8-import-order==0.18.2
+fonttools==4.53.1
+h5py==3.11.0
+idna==3.7
+imageio==2.34.2
+imagesize==1.4.1
+importlib_metadata==8.2.0
+importlib_resources==6.4.0
+iniconfig==2.0.0
+ipython==8.12.3
+jaraco.classes==3.4.0
+jaraco.context==5.3.0
+jaraco.functools==4.0.2
+jedi==0.19.1
+jeepney==0.8.0
+Jinja2==3.1.4
+jmespath==1.0.1
+joblib==1.4.2
+jsonschema==4.17.3
+jupyter_client==8.6.2
+jupyter_core==5.7.2
+jupyterlab_pygments==0.3.0
+keyring==25.3.0
+kiwisolver==1.4.5
+lazy_loader==0.4
+looseversion==1.3.0
+lxml==5.2.2
+lxml_html_clean==0.2.0
+m2r2==0.3.3.post2
+markdown-it-py==3.0.0
+MarkupSafe==2.1.5
+matplotlib==3.7.5
+matplotlib-inline==0.1.7
+mccabe==0.7.0
+mdurl==0.1.2
mistune==0.8.4
-mpmath==1.2.1
-mypy==0.910
-mypy-extensions==0.4.3
-nbclient==0.5.4
-nbconvert==6.1.0
+more-itertools==10.4.0
+mpmath==1.3.0
+mypy==1.10.1
+mypy-extensions==1.0.0
+nbclient==0.10.0
+nbconvert==6.5.4
nbencdec==0.0.10
-nbformat==5.1.3
-nest-asyncio==1.5.1
-networkx==2.6.2
-notebook==6.4.3
-numpy==1.21.2
-numpydoc==1.1.0
-packaging==21.0
-pandas==1.3.2
-pandocfilters==1.4.3
-parso==0.8.2
-pexpect==4.8.0
+nbformat==5.10.4
+networkx==3.1
+nh3==0.2.18
+numpy==1.24.4
+numpydoc==1.7.0
+packaging==24.1
+pandas==2.0.3
+pandocfilters==1.5.1
+parso==0.8.4
+pexpect==4.9.0
pickleshare==0.7.5
-Pillow==8.3.2
-pkginfo==1.7.1
-pluggy==1.0.0
-prometheus-client==0.11.0
-prompt-toolkit==3.0.20
+pillow==10.4.0
+pkginfo==1.10.0
+pkgutil_resolve_name==1.3.10
+platformdirs==4.2.2
+pluggy==1.5.0
+prompt_toolkit==3.0.47
ptyprocess==0.7.0
-py==1.10.0
-pycodestyle==2.7.0
-pycparser==2.20
-pyflakes==2.3.1
-Pygments==2.10.0
-pyparsing==2.4.7
-pyrsistent==0.18.0
-pytest==6.2.5
-pytest-cov==2.12.1
-pytest-forked==1.3.0
-pytest-xdist==2.3.0
-python-dateutil==2.8.2
-pytz==2021.1
-PyWavelets==1.1.1
-PyYAML==5.4.1
-pyzmq==22.2.1
+pure_eval==0.2.3
+pycodestyle==2.12.1
+pycparser==2.22
+pyflakes==3.2.0
+Pygments==2.18.0
+pyparsing==3.1.2
+pyrsistent==0.20.0
+pytest==8.3.2
+pytest-cov==5.0.0
+pytest-xdist==3.6.1
+python-dateutil==2.9.0.post0
+pytz==2024.1
+PyWavelets==1.4.1
+PyYAML==6.0.2
+pyzmq==26.1.0
read-roi==1.6.0
-readme-renderer==29.0
+readme_renderer==43.0
recommonmark==0.7.1
regional==1.1.2
-requests==2.26.0
-requests-toolbelt==0.9.1
-rfc3986==1.5.0
-s3transfer==0.5.0
-scikit-image==0.18.3
-scikit-learn==0.24.2
-scipy==1.7.1
-seaborn==0.11.2
-SecretStorage==3.3.1
-semantic-version==2.8.5
-Send2Trash==1.8.0
+requests==2.32.3
+requests-toolbelt==1.0.0
+rfc3986==2.0.0
+rich==13.7.1
+s3transfer==0.10.2
+scikit-image==0.21.0
+scikit-learn==1.3.2
+scipy==1.10.1
+seaborn==0.13.2
+SecretStorage==3.3.3
+semantic-version==2.10.0
setuptools==56.0.0
showit==1.1.4
six==1.16.0
slicedimage==4.1.1
-snowballstemmer==2.1.0
-Sphinx==4.1.2
-sphinx-autodoc-typehints==1.12.0
+snowballstemmer==2.2.0
+soupsieve==2.5
+Sphinx==7.1.2
+sphinx-autodoc-typehints==2.0.1
sphinx-bootstrap-theme==0.8.1
-sphinx-gallery==0.9.0
-sphinx-rtd-theme==0.5.2
-sphinxcontrib-applehelp==1.0.2
+sphinx-gallery==0.17.1
+sphinx-rtd-theme==2.0.0
+sphinxcontrib-applehelp==1.0.4
sphinxcontrib-devhelp==1.0.2
-sphinxcontrib-htmlhelp==2.0.0
+sphinxcontrib-htmlhelp==2.0.1
+sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-programoutput==0.17
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
-sympy==1.5.1
-terminado==0.12.1
-testpath==0.5.0
-threadpoolctl==2.2.0
-tifffile==2021.8.30
-toml==0.10.2
-tornado==6.1
-tqdm==4.62.2
-trackpy==0.5.0
-traitlets==5.1.0
-twine==3.4.2
-types-pkg-resources==0.1.3
-types-PyYAML==5.4.10
-types-requests==2.25.6
-typing-extensions==3.10.0.2
-urllib3==1.26.6
-validators==0.18.2
-wcwidth==0.2.5
+stack-data==0.6.3
+sympy==1.13.1
+tabulate==0.9.0
+threadpoolctl==3.5.0
+tifffile==2023.7.10
+tinycss2==1.3.0
+tomli==2.0.1
+tornado==6.4.1
+tqdm==4.66.5
+trackpy==0.6.4
+traitlets==5.14.3
+twine==5.1.1
+types-PyYAML==6.0.12.20240808
+types-requests==2.31.0.6
+types-setuptools==71.1.0.20240806
+types-urllib3==1.26.25.14
+typing_extensions==4.12.2
+tzdata==2024.1
+urllib3==1.26.19
+validators==0.33.0
+wcwidth==0.2.13
webencodings==0.5.1
-widgetsnbextension==3.5.1
-xarray==0.19.0
-zipp==3.5.0
\ No newline at end of file
+xarray==2023.1.0
+zipp==3.19.2
diff --git a/requirements/REQUIREMENTS-CI.txt.in b/requirements/REQUIREMENTS-CI.txt.in
index 0d16f555d..59a5d6e77 100644
--- a/requirements/REQUIREMENTS-CI.txt.in
+++ b/requirements/REQUIREMENTS-CI.txt.in
@@ -1,7 +1,11 @@
# requirements to run CI except for napari
+# constrain the CI requirements to packages already in REQUIREMENTS-STRICT.txt
+-r ../starfish/REQUIREMENTS-STRICT.txt
flake8
flake8-import-order
-m2r2
+lxml_html_clean
+# pin m2r2: https://github.com/CrossNox/m2r2/issues/52
+m2r2>=0.3.3
mypy != 0.740
numpydoc
nbencdec >= 0.0.5
@@ -9,17 +13,15 @@ pycodestyle
pytest>=4.4.0
pytest-cov>=2.5.1
pytest-xdist
-read_roi
recommonmark
requests
-types-pkg_resources
-types-PyYAML
-types-requests
-seaborn
sphinx
sphinx_autodoc_typehints
sphinx_bootstrap_theme
sphinxcontrib-programoutput
sphinx-gallery
sphinx_rtd_theme
-twine
\ No newline at end of file
+twine
+types-setuptools
+types-PyYAML
+types-requests
diff --git a/requirements/REQUIREMENTS-JUPYTER.txt b/requirements/REQUIREMENTS-JUPYTER.txt
new file mode 100644
index 000000000..712b34197
--- /dev/null
+++ b/requirements/REQUIREMENTS-JUPYTER.txt
@@ -0,0 +1,149 @@
+# You should not edit this file directly. Instead, you should edit one of the following files (requirements/REQUIREMENTS-JUPYTER.txt.in) and run make requirements/REQUIREMENTS-JUPYTER.txt
+anyio==4.4.0
+argon2-cffi==23.1.0
+argon2-cffi-bindings==21.2.0
+arrow==1.3.0
+asttokens==2.4.1
+async-lru==2.0.4
+attrs==24.2.0
+Babel==2.15.0
+backcall==0.2.0
+beautifulsoup4==4.12.3
+bleach==6.1.0
+boto3==1.34.156
+botocore==1.34.156
+certifi==2024.7.4
+cffi==1.17.0
+charset-normalizer==3.3.2
+click==8.1.7
+comm==0.2.2
+contourpy==1.1.1
+cycler==0.12.1
+debugpy==1.8.5
+decorator==5.1.1
+defusedxml==0.7.1
+diskcache==5.6.3
+docutils==0.20.1
+entrypoints==0.4
+exceptiongroup==1.2.2
+executing==2.0.1
+fastjsonschema==2.20.0
+fonttools==4.53.1
+fqdn==1.5.1
+h11==0.14.0
+h5py==3.11.0
+httpcore==1.0.5
+httpx==0.27.0
+idna==3.7
+imageio==2.34.2
+importlib_metadata==8.2.0
+importlib_resources==6.4.0
+ipykernel==6.29.5
+ipython==8.12.3
+ipywidgets==8.1.3
+isoduration==20.11.0
+jedi==0.19.1
+Jinja2==3.1.4
+jmespath==1.0.1
+joblib==1.4.2
+json5==0.9.25
+jsonpointer==3.0.0
+jsonschema==4.17.3
+jupyter==1.0.0
+jupyter-console==6.6.3
+jupyter-events==0.6.3
+jupyter-lsp==2.2.5
+jupyter_client==8.6.2
+jupyter_core==5.7.2
+jupyter_server==2.10.0
+jupyter_server_terminals==0.5.3
+jupyterlab==4.1.6
+jupyterlab_pygments==0.3.0
+jupyterlab_server==2.24.0
+jupyterlab_widgets==3.0.11
+kiwisolver==1.4.5
+lazy_loader==0.4
+looseversion==1.3.0
+lxml==5.2.2
+MarkupSafe==2.1.5
+matplotlib==3.7.5
+matplotlib-inline==0.1.7
+mistune==0.8.4
+mpmath==1.3.0
+nbclient==0.10.0
+nbconvert==6.5.4
+nbformat==5.10.4
+nest-asyncio==1.6.0
+networkx==3.1
+notebook==7.1.3
+notebook_shim==0.2.4
+numpy==1.24.4
+overrides==7.7.0
+packaging==24.1
+pandas==2.0.3
+pandocfilters==1.5.1
+parso==0.8.4
+pexpect==4.9.0
+pickleshare==0.7.5
+pillow==10.4.0
+pkgutil_resolve_name==1.3.10
+platformdirs==4.2.2
+prometheus_client==0.20.0
+prompt_toolkit==3.0.47
+psutil==6.0.0
+ptyprocess==0.7.0
+pure_eval==0.2.3
+pycparser==2.22
+Pygments==2.18.0
+pyparsing==3.1.2
+pyrsistent==0.20.0
+python-dateutil==2.9.0.post0
+python-json-logger==2.0.7
+pytz==2024.1
+PyWavelets==1.4.1
+PyYAML==6.0.2
+pyzmq==26.1.0
+qtconsole==5.5.2
+QtPy==2.4.1
+read-roi==1.6.0
+regional==1.1.2
+requests==2.32.3
+rfc3339-validator==0.1.4
+rfc3986-validator==0.1.1
+s3transfer==0.10.2
+scikit-image==0.21.0
+scikit-learn==1.3.2
+scipy==1.10.1
+seaborn==0.13.2
+semantic-version==2.10.0
+Send2Trash==1.8.3
+setuptools==56.0.0
+showit==1.1.4
+six==1.16.0
+slicedimage==4.1.1
+sniffio==1.3.1
+soupsieve==2.5
+stack-data==0.6.3
+sympy==1.13.1
+terminado==0.18.1
+threadpoolctl==3.5.0
+tifffile==2023.7.10
+tinycss2==1.3.0
+tomli==2.0.1
+tornado==6.4.1
+tqdm==4.66.5
+trackpy==0.6.4
+traitlets==5.14.3
+types-python-dateutil==2.9.0.20240316
+typing_extensions==4.12.2
+tzdata==2024.1
+uri-template==1.3.0
+urllib3==1.26.19
+validators==0.33.0
+wcwidth==0.2.13
+webcolors==24.6.0
+webencodings==0.5.1
+websocket-client==1.8.0
+widgetsnbextension==4.0.11
+xarray==2023.1.0
+zipp==3.19.2
diff --git a/requirements/REQUIREMENTS-JUPYTER.txt.in b/requirements/REQUIREMENTS-JUPYTER.txt.in
new file mode 100644
index 000000000..ca487d08a
--- /dev/null
+++ b/requirements/REQUIREMENTS-JUPYTER.txt.in
@@ -0,0 +1,4 @@
+# requirements to run jupyter
+# constrain the requirements to packages already in REQUIREMENTS-STRICT.txt
+-r ../starfish/REQUIREMENTS-STRICT.txt
+jupyter
diff --git a/requirements/REQUIREMENTS-NAPARI-CI.txt b/requirements/REQUIREMENTS-NAPARI-CI.txt
index 4d7989afa..91263fabb 100644
--- a/requirements/REQUIREMENTS-NAPARI-CI.txt
+++ b/requirements/REQUIREMENTS-NAPARI-CI.txt
@@ -1,160 +1,159 @@
# You should not edit this file directly. Instead, you should edit one of the following files (requirements/REQUIREMENTS-NAPARI-CI.txt.in) and run make requirements/REQUIREMENTS-NAPARI-CI.txt
-alabaster==0.7.12
+alabaster==0.7.13
+annotated-types==0.7.0
+app-model==0.2.8
appdirs==1.4.4
-argon2-cffi==21.3.0
-argon2-cffi-bindings==21.2.0
-asttokens==2.0.5
-attrs==21.4.0
-Babel==2.10.3
+asttokens==2.4.1
+attrs==24.2.0
+Babel==2.15.0
backcall==0.2.0
-beautifulsoup4==4.11.1
-bleach==5.0.0
-boto3==1.24.14
-botocore==1.27.14
-build==0.8.0
+boto3==1.34.156
+botocore==1.34.156
+build==1.2.1
cachey==0.2.1
-certifi==2022.6.15
-cffi==1.15.0
-charset-normalizer==2.0.12
-click==8.1.3
-cloudpickle==2.1.0
-cycler==0.11.0
-dask==2022.6.0
-dataclasses==0.6
-debugpy==1.6.0
-decorator==4.4.2
-defusedxml==0.7.1
-diskcache==5.4.0
-docstring-parser==0.14.1
-docutils==0.18.1
-entrypoints==0.4
-executing==0.8.3
-fastjsonschema==2.15.3
-fonttools==4.33.3
-freetype-py==2.3.0
-fsspec==2022.5.0
-h5py==3.7.0
+certifi==2024.7.4
+charset-normalizer==3.3.2
+click==8.1.7
+cloudpickle==3.0.0
+comm==0.2.2
+contourpy==1.1.1
+cycler==0.12.1
+dask==2023.5.0
+debugpy==1.8.5
+decorator==5.1.1
+diskcache==5.6.3
+docstring_parser==0.16
+docutils==0.20.1
+exceptiongroup==1.2.2
+executing==2.0.1
+fonttools==4.53.1
+freetype-py==2.4.0
+fsspec==2024.6.1
+h5py==3.11.0
HeapDict==1.0.1
-hsluv==5.0.3
-idna==3.3
-imageio==2.19.3
-imagesize==1.3.0
-importlib-metadata==4.11.4
-iniconfig==1.1.1
-ipykernel==6.15.0
-ipython==8.4.0
-ipython-genutils==0.2.0
-ipywidgets==7.7.1
-jedi==0.18.1
-Jinja2==3.1.2
+hsluv==5.0.4
+idna==3.7
+imageio==2.34.2
+imagesize==1.4.1
+importlib_metadata==8.2.0
+importlib_resources==6.4.0
+in-n-out==0.2.1
+iniconfig==2.0.0
+ipykernel==6.29.5
+ipython==8.12.3
+jedi==0.19.1
+Jinja2==3.1.4
jmespath==1.0.1
-joblib==1.1.0
-jsonschema==4.6.0
-jupyter-client==7.3.4
-jupyter-core==4.10.0
-jupyterlab-pygments==0.2.2
-jupyterlab-widgets==1.1.1
-kiwisolver==1.4.3
+joblib==1.4.2
+jsonschema==4.17.3
+jupyter_client==8.6.2
+jupyter_core==5.7.2
+kiwisolver==1.4.5
+lazy_loader==0.4
+llvmlite==0.41.1
locket==1.0.0
-magicgui==0.5.1
-MarkupSafe==2.1.1
-matplotlib==3.5.2
-matplotlib-inline==0.1.3
+looseversion==1.3.0
+magicgui==0.9.1
+markdown-it-py==3.0.0
+MarkupSafe==2.1.5
+matplotlib==3.7.5
+matplotlib-inline==0.1.7
+mdurl==0.1.2
mistune==0.8.4
-mpmath==1.2.1
-napari==0.4.16
-napari-console==0.0.4
+mpmath==1.3.0
+napari==0.4.19.post1
+napari-console==0.0.9
napari-plugin-engine==0.2.0
-napari-svg==0.1.6
-nbclient==0.6.4
-nbconvert==6.5.0
-nbformat==5.4.0
-nest-asyncio==1.5.5
-networkx==2.8.4
-notebook==6.4.12
-npe2==0.5.0
-numpy==1.22.4
-numpydoc==1.4.0
-packaging==21.3
-pandas==1.4.2
-pandocfilters==1.5.0
-parso==0.8.3
-partd==1.2.0
-pep517==0.12.0
-pexpect==4.8.0
+napari-plugin-manager==0.1.0
+napari-svg==0.1.10
+nest-asyncio==1.6.0
+networkx==3.1
+npe2==0.7.7
+numba==0.58.1
+numpy==1.24.4
+numpydoc==1.7.0
+packaging==24.1
+pandas==2.0.3
+parso==0.8.4
+partd==1.4.1
+pexpect==4.9.0
pickleshare==0.7.5
-Pillow==9.1.1
-Pint==0.19.2
-pluggy==1.0.0
-prometheus-client==0.14.1
-prompt-toolkit==3.0.29
-psutil==5.9.1
-psygnal==0.3.5
+pillow==10.4.0
+Pint==0.21.1
+pkgutil_resolve_name==1.3.10
+platformdirs==4.2.2
+pluggy==1.5.0
+pooch==1.8.2
+prompt_toolkit==3.0.47
+psutil==6.0.0
+psygnal==0.11.1
ptyprocess==0.7.0
-pure-eval==0.2.2
-py==1.11.0
-pycparser==2.21
-pydantic==1.9.1
-Pygments==2.12.0
-PyOpenGL==3.1.6
-pyparsing==3.0.9
-PyQt5==5.14.2
-PyQt5-sip==12.11.0
-pyrsistent==0.18.1
-pytest==7.1.2
-pytest-qt==4.0.2
-python-dateutil==2.8.2
-pytomlpp==1.0.11
-pytz==2022.1
-PyWavelets==1.3.0
-PyYAML==6.0
-pyzmq==23.2.0
-qtconsole==5.3.1
-QtPy==2.1.0
+pure_eval==0.2.3
+pyconify==0.1.6
+pydantic==2.8.2
+pydantic-compat==0.1.2
+pydantic_core==2.20.1
+Pygments==2.18.0
+PyOpenGL==3.1.7
+pyparsing==3.1.2
+pyproject_hooks==1.1.0
+PyQt5==5.15.11
+PyQt5-Qt5==5.15.14
+PyQt5_sip==12.15.0
+pyrsistent==0.20.0
+pytest==8.3.2
+pytest-qt==4.4.0
+python-dateutil==2.9.0.post0
+pytz==2024.1
+PyWavelets==1.4.1
+PyYAML==6.0.2
+pyzmq==26.1.0
+qtconsole==5.5.2
+QtPy==2.4.1
read-roi==1.6.0
regional==1.1.2
-requests==2.28.0
-s3transfer==0.6.0
-scikit-image==0.18.3
-scikit-learn==1.1.1
-scipy==1.8.1
+requests==2.32.3
+rich==13.7.1
+s3transfer==0.10.2
+scikit-image==0.21.0
+scikit-learn==1.3.2
+scipy==1.10.1
+seaborn==0.13.2
semantic-version==2.10.0
-Send2Trash==1.8.0
-setuptools==58.1.0
+setuptools==56.0.0
+shellingham==1.5.4
showit==1.1.4
six==1.16.0
slicedimage==4.1.1
snowballstemmer==2.2.0
-soupsieve==2.3.2.post1
-Sphinx==5.0.2
-sphinx-bootstrap-theme==0.8.1
-sphinxcontrib-applehelp==1.0.2
+Sphinx==7.1.2
+sphinxcontrib-applehelp==1.0.4
sphinxcontrib-devhelp==1.0.2
-sphinxcontrib-htmlhelp==2.0.0
+sphinxcontrib-htmlhelp==2.0.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
-stack-data==0.3.0
-superqt==0.3.2
-sympy==1.5.1
-terminado==0.15.0
-threadpoolctl==3.1.0
-tifffile==2022.5.4
-tinycss2==1.1.1
+stack-data==0.6.3
+superqt==0.6.7
+sympy==1.13.1
+tabulate==0.9.0
+threadpoolctl==3.5.0
+tifffile==2023.7.10
tomli==2.0.1
-toolz==0.11.2
-tornado==6.1
-tqdm==4.64.0
-trackpy==0.5.0
-traitlets==5.3.0
-typer==0.4.1
-typing_extensions==4.2.0
-urllib3==1.26.9
-validators==0.20.0
-vispy==0.10.0
-wcwidth==0.2.5
-webencodings==0.5.1
-widgetsnbextension==3.6.1
-wrapt==1.14.1
-xarray==2022.3.0
-zipp==3.8.0
+tomli_w==1.0.0
+toolz==0.12.1
+tornado==6.4.1
+tqdm==4.66.5
+trackpy==0.6.4
+traitlets==5.14.3
+triangle==20230923
+typer==0.12.3
+typing_extensions==4.12.2
+tzdata==2024.1
+urllib3==1.26.19
+validators==0.33.0
+vispy==0.14.2
+wcwidth==0.2.13
+wrapt==1.16.0
+xarray==2023.1.0
+zipp==3.19.2
diff --git a/requirements/REQUIREMENTS-NAPARI-CI.txt.in b/requirements/REQUIREMENTS-NAPARI-CI.txt.in
index 6e66df139..8030df5c4 100644
--- a/requirements/REQUIREMENTS-NAPARI-CI.txt.in
+++ b/requirements/REQUIREMENTS-NAPARI-CI.txt.in
@@ -1,3 +1,6 @@
-napari >= 0.3.4
-PyQt5==5.14.2
+# requirements to run napari and CI for napari
+# constrain the CI requirements to packages already in REQUIREMENTS-STRICT.txt
+-r ../starfish/REQUIREMENTS-STRICT.txt
+# napari 0.4.18 made point sizes only isotropic: https://forum.image.sc/t/anisotropic-point-sizes/83388
+napari[all]>0.4.17
pytest-qt
diff --git a/setup.cfg b/setup.cfg
index 0d3417cea..e35e6fe30 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -14,7 +14,7 @@ author_email = dganguli@chanzuckerberg.com
license = MIT
long_description = README.rst
long_description_content_type = text/x-rst; charset=UTF-8
-home-page = https://spacetx-starfish.readthedocs.io/en/latest/
+home_page = https://spacetx-starfish.readthedocs.io/en/latest/
project_urls =
Bug Tracker = https://github.com/spacetx/starfish/issues
Documentation = https://spacetx-starfish.readthedocs.io/en/latest/
@@ -25,8 +25,13 @@ classifier =
Operating System :: POSIX
Operating System :: Unix
Operating System :: MacOS :: MacOS X
+ Operating System :: Windows
License :: OSI Approved :: MIT License
- Programming Language :: Python :: 3.7
+ Programming Language :: Python :: 3
+ Programming Language :: Python :: 3.8
+ Programming Language :: Python :: 3.9
+ Programming Language :: Python :: 3.10
+ Programming Language :: Python :: 3.11
keywords =
single-cell
image-based
diff --git a/setup.py b/setup.py
index 7309d1df8..2bf5eefbc 100644
--- a/setup.py
+++ b/setup.py
@@ -30,5 +30,5 @@
include_package_data=True,
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
- python_requires='>=3.7'
+ python_requires='>=3.8, <3.12'
)
diff --git a/starfish/REQUIREMENTS-STRICT.txt b/starfish/REQUIREMENTS-STRICT.txt
index eda66e668..b8abce074 100644
--- a/starfish/REQUIREMENTS-STRICT.txt
+++ b/starfish/REQUIREMENTS-STRICT.txt
@@ -1,105 +1,60 @@
# You should not edit this file directly. Instead, you should edit one of the following files (REQUIREMENTS.txt) and run make starfish/REQUIREMENTS-STRICT.txt
-argon2-cffi==21.3.0
-argon2-cffi-bindings==21.2.0
-asttokens==2.0.5
-attrs==21.4.0
-backcall==0.2.0
-beautifulsoup4==4.11.1
-bleach==5.0.0
-boto3==1.24.14
-botocore==1.27.14
-certifi==2022.6.15
-cffi==1.15.0
-charset-normalizer==2.0.12
-click==8.1.3
-cycler==0.11.0
-dataclasses==0.6
-debugpy==1.6.0
-decorator==4.4.2
-defusedxml==0.7.1
-diskcache==5.4.0
-entrypoints==0.4
-executing==0.8.3
-fastjsonschema==2.15.3
-fonttools==4.33.3
-h5py==3.7.0
-idna==3.3
-imageio==2.19.3
-ipykernel==6.15.0
-ipython==8.4.0
-ipython-genutils==0.2.0
-ipywidgets==7.7.1
-jedi==0.18.1
-Jinja2==3.1.2
+attrs==24.2.0
+boto3==1.34.156
+botocore==1.34.156
+certifi==2024.7.4
+charset-normalizer==3.3.2
+click==8.1.7
+contourpy==1.1.1
+cycler==0.12.1
+diskcache==5.6.3
+docutils==0.20.1
+fonttools==4.53.1
+h5py==3.11.0
+idna==3.7
+imageio==2.34.2
+importlib_resources==6.4.0
jmespath==1.0.1
-joblib==1.1.0
-jsonschema==4.6.0
-jupyter-client==7.3.4
-jupyter-core==4.10.0
-jupyterlab-pygments==0.2.2
-jupyterlab-widgets==1.1.1
-kiwisolver==1.4.3
-MarkupSafe==2.1.1
-matplotlib==3.5.2
-matplotlib-inline==0.1.3
+joblib==1.4.2
+jsonschema==4.17.3
+kiwisolver==1.4.5
+lazy_loader==0.4
+looseversion==1.3.0
+matplotlib==3.7.5
mistune==0.8.4
-mpmath==1.2.1
-nbclient==0.6.4
-nbconvert==6.5.0
-nbformat==5.4.0
-nest-asyncio==1.5.5
-networkx==2.8.4
-notebook==6.4.12
-numpy==1.22.4
-packaging==21.3
-pandas==1.4.2
-pandocfilters==1.5.0
-parso==0.8.3
-pexpect==4.8.0
-pickleshare==0.7.5
-Pillow==9.1.1
-prometheus-client==0.14.1
-prompt-toolkit==3.0.29
-psutil==5.9.1
-ptyprocess==0.7.0
-pure-eval==0.2.2
-pycparser==2.21
-Pygments==2.12.0
-pyparsing==3.0.9
-pyrsistent==0.18.1
-python-dateutil==2.8.2
-pytz==2022.1
-PyWavelets==1.3.0
-PyYAML==6.0
-pyzmq==23.2.0
+mpmath==1.3.0
+networkx==3.1
+numpy==1.24.4
+packaging==24.1
+pandas==2.0.3
+pillow==10.4.0
+pkgutil_resolve_name==1.3.10
+pyparsing==3.1.2
+pyrsistent==0.20.0
+python-dateutil==2.9.0.post0
+pytz==2024.1
+PyWavelets==1.4.1
+PyYAML==6.0.2
read-roi==1.6.0
regional==1.1.2
-requests==2.28.0
-s3transfer==0.6.0
-scikit-image==0.18.3
-scikit-learn==1.1.1
-scipy==1.8.1
+requests==2.32.3
+s3transfer==0.10.2
+scikit-image==0.21.0
+scikit-learn==1.3.2
+scipy==1.10.1
+seaborn==0.13.2
semantic-version==2.10.0
-Send2Trash==1.8.0
-setuptools==58.1.0
+setuptools==56.0.0
showit==1.1.4
six==1.16.0
slicedimage==4.1.1
-soupsieve==2.3.2.post1
-sphinx-bootstrap-theme==0.8.1
-stack-data==0.3.0
-sympy==1.5.1
-terminado==0.15.0
-threadpoolctl==3.1.0
-tifffile==2022.5.4
-tinycss2==1.1.1
-tornado==6.1
-tqdm==4.64.0
-trackpy==0.5.0
-traitlets==5.3.0
-urllib3==1.26.9
-validators==0.20.0
-wcwidth==0.2.5
-webencodings==0.5.1
-widgetsnbextension==3.6.1
-xarray==2022.3.0
+sympy==1.13.1
+threadpoolctl==3.5.0
+tifffile==2023.7.10
+tqdm==4.66.5
+trackpy==0.6.4
+tzdata==2024.1
+urllib3==1.26.19
+validators==0.33.0
+xarray==2023.1.0
+zipp==3.19.2
diff --git a/starfish/core/_display.py b/starfish/core/_display.py
index eae709ea7..991a57cf8 100644
--- a/starfish/core/_display.py
+++ b/starfish/core/_display.py
@@ -18,7 +18,7 @@
Viewer = None
-NAPARI_VERSION = "0.3.4" # when changing this, update docs in display
+NAPARI_VERSION = "0.4.18" # when changing this, update docs in display
INTERACTIVE = not hasattr(__main__, "__file__")
@@ -64,7 +64,7 @@ def _max_intensity_table_maintain_dims(
initial_dimensions = OrderedDict(intensity_table.sizes)
projected_intensities = intensity_table.max(str_dimensions)
expanded_intensities = projected_intensities.expand_dims(str_dimensions)
- return expanded_intensities.transpose(*tuple(initial_dimensions.keys()))
+ return expanded_intensities.transpose(*tuple(initial_dimensions.keys())) # type: ignore
def _mask_low_intensity_spots(
@@ -155,6 +155,8 @@ def display(
Multiplies the radius of the displayed spots (default 1, no scaling)
z_multiplier : int
Multiplies the radius of the spots in z, to account for anisotropy.
+ Important note, anisotropy of spots is DEPRECATED in napari > 0.4.17 and would be returned
+ at a later version.
Examples
--------
@@ -197,7 +199,7 @@ def display(
-----
- To use in ipython, use the `%gui qt` magic.
- napari axes are labeled with the ImageStack axis names
- - Requires napari 0.3.4: use `pip install starfish[napari]`
+ - Requires napari 0.4.18: use `pip install starfish[napari]`
to install all necessary requirements
"""
if stack is None and spots is None and masks is None:
@@ -279,7 +281,7 @@ def display(
face_color="red",
edge_color="red",
symbol="ring",
- size=sizes * radius_multiplier,
+ size=np.mean(sizes[:, 2:4], axis=1) * radius_multiplier,
n_dimensional=True,
name="spots"
)
diff --git a/starfish/core/codebook/codebook.py b/starfish/core/codebook/codebook.py
index ef665470a..ae1ba12b8 100644
--- a/starfish/core/codebook/codebook.py
+++ b/starfish/core/codebook/codebook.py
@@ -71,7 +71,7 @@ class Codebook(xr.DataArray):
@property
def code_length(self) -> int:
"""return the length of codes in this codebook"""
- return int(np.dot(*self.shape[1:]))
+ return int(np.dot(*self.shape[1:])) # type: ignore[call-overload]
@classmethod
def zeros(cls, code_names: Sequence[str], n_round: int, n_channel: int):
@@ -707,7 +707,8 @@ def _view_row_as_element(array: np.ndarray) -> np.ndarray:
# found in the non-maximal channels.
max_intensities = intensities.max(Axes.CH.value)
round_intensities = intensities.sum(Axes.CH.value)
- distance: IntensityTable = 1 - (max_intensities / round_intensities).mean(Axes.ROUND.value)
+ distance: IntensityTable = 1 - (max_intensities / round_intensities).mean(
+ Axes.ROUND.value) # type: ignore
a = _view_row_as_element(codes.values.reshape(self.shape[0], -1)) # type: ignore
b = _view_row_as_element(
diff --git a/starfish/core/codebook/test/test_metric_decode.py b/starfish/core/codebook/test/test_metric_decode.py
index 99b830be0..cfbe3b773 100644
--- a/starfish/core/codebook/test/test_metric_decode.py
+++ b/starfish/core/codebook/test/test_metric_decode.py
@@ -26,8 +26,8 @@ def intensity_table_factory(data: np.ndarray=np.array([[[0, 3], [4, 0]]])) -> In
intensity_table = IntensityTable.from_spot_data(
data,
SpotAttributes(spot_attributes_data),
- ch_values=np.arange(data.shape[1]),
- round_values=np.arange(data.shape[2]),
+ ch_values=range(data.shape[1]),
+ round_values=range(data.shape[2]),
)
return intensity_table
diff --git a/starfish/core/codebook/test/test_normalize_code_traces.py b/starfish/core/codebook/test/test_normalize_code_traces.py
index ae9cf638a..b0757d8dd 100644
--- a/starfish/core/codebook/test/test_normalize_code_traces.py
+++ b/starfish/core/codebook/test/test_normalize_code_traces.py
@@ -23,8 +23,8 @@ def intensity_table_factory() -> IntensityTable:
intensity_table = IntensityTable.from_spot_data(
intensities, spot_attributes,
- ch_values=np.arange(intensities.shape[1]),
- round_values=np.arange(intensities.shape[2]),
+ ch_values=range(intensities.shape[1]),
+ round_values=range(intensities.shape[2]),
)
return intensity_table
diff --git a/starfish/core/codebook/test/test_per_round_max_decode.py b/starfish/core/codebook/test/test_per_round_max_decode.py
index ef4c244d6..3b1ff10b3 100644
--- a/starfish/core/codebook/test/test_per_round_max_decode.py
+++ b/starfish/core/codebook/test/test_per_round_max_decode.py
@@ -24,8 +24,8 @@ def intensity_table_factory(data: np.ndarray = np.array([[[0, 3], [4, 0]]])) ->
spot_attributes = SpotAttributes(spot_attributes_data)
intensity_table = IntensityTable.from_spot_data(
data, spot_attributes,
- round_values=np.arange(data.shape[1]),
- ch_values=np.arange(data.shape[2]),
+ round_values=range(data.shape[1]),
+ ch_values=range(data.shape[2]),
)
return intensity_table
diff --git a/starfish/core/experiment/builder/builder.py b/starfish/core/experiment/builder/builder.py
index 6f8aec303..a715faf3b 100644
--- a/starfish/core/experiment/builder/builder.py
+++ b/starfish/core/experiment/builder/builder.py
@@ -209,7 +209,7 @@ def write_irregular_experiment_json(
tile_fetchers: Mapping[str, TileFetcher],
postprocess_func: Optional[Callable[[dict], dict]]=None,
default_shape: Optional[Mapping[Axes, int]]=None,
- fov_path_generator: Callable[[Path, str], Path] = None,
+ fov_path_generator: Optional[Callable[[Path, str], Path]] = None,
tile_opener: Optional[Callable[[Path, Tile, str], BinaryIO]] = None,
writer_contract: Optional[WriterContract] = None,
) -> None:
diff --git a/starfish/core/experiment/experiment.py b/starfish/core/experiment/experiment.py
index 3a237e95f..8578adfb9 100644
--- a/starfish/core/experiment/experiment.py
+++ b/starfish/core/experiment/experiment.py
@@ -230,7 +230,7 @@ def __init__(
codebook: Codebook,
extras: dict,
*,
- src_doc: dict=None,
+ src_doc: dict = {},
) -> None:
self._fovs = fovs
self._codebook = codebook
diff --git a/starfish/core/image/Filter/gaussian_low_pass.py b/starfish/core/image/Filter/gaussian_low_pass.py
index 34a4ed304..77e4b7d5a 100644
--- a/starfish/core/image/Filter/gaussian_low_pass.py
+++ b/starfish/core/image/Filter/gaussian_low_pass.py
@@ -84,7 +84,7 @@ def _low_pass(
filtered = gaussian(
image,
- sigma=sigma, output=None, cval=0, multichannel=False, preserve_range=True, truncate=4.0
+ sigma=sigma, output=None, cval=0, channel_axis=None, preserve_range=True, truncate=4.0
)
return filtered
diff --git a/starfish/core/image/Filter/match_histograms.py b/starfish/core/image/Filter/match_histograms.py
index 20c5b7dcf..ab362859f 100644
--- a/starfish/core/image/Filter/match_histograms.py
+++ b/starfish/core/image/Filter/match_histograms.py
@@ -50,7 +50,7 @@ def _compute_reference_distribution(self, data: ImageStack) -> xr.DataArray:
stacked = data.xarray.stack(chunk_key=chunk_key)
stacked = stacked.stack(sort_key=sort_key)
- sorted_stacked = stacked.groupby("sort_key").map(np.sort)
+ sorted_stacked = stacked.groupby("sort_key").map(np.sort) # type: ignore[arg-type]
reference = sorted_stacked.mean("sort_key")
reference = reference.unstack("chunk_key")
return reference
diff --git a/starfish/core/image/Filter/white_tophat.py b/starfish/core/image/Filter/white_tophat.py
index 73964f94e..24716bace 100644
--- a/starfish/core/image/Filter/white_tophat.py
+++ b/starfish/core/image/Filter/white_tophat.py
@@ -76,7 +76,7 @@ def _white_tophat(self, image: xr.DataArray) -> xr.DataArray:
structuring_element = ball(self.masking_radius)
else:
structuring_element = disk(self.masking_radius)
- return white_tophat(image, selem=structuring_element)
+ return white_tophat(image, footprint=structuring_element)
def run(
self,
diff --git a/starfish/core/image/_registration/ApplyTransform/test/test_warp.py b/starfish/core/image/_registration/ApplyTransform/test/test_warp.py
index 1aaca19da..6a75a9e34 100644
--- a/starfish/core/image/_registration/ApplyTransform/test/test_warp.py
+++ b/starfish/core/image/_registration/ApplyTransform/test/test_warp.py
@@ -8,26 +8,26 @@
expected_registered_values = np.array(
- [[0.090654, 0.090593, 0.091554, 0.091661, 0.089967, 0.094072, 0.097398,
- 0.099046, 0.100969, 0.112108],
- [0.09926, 0.096925, 0.096269, 0.097002, 0.095842, 0.097704, 0.09984,
- 0.101457, 0.105455, 0.106004],
- [0.109834, 0.103609, 0.102693, 0.099931, 0.098222, 0.10074, 0.10251,
- 0.103838, 0.106874, 0.113451],
- [0.12369, 0.112428, 0.111482, 0.10631, 0.106203, 0.104753, 0.106706,
- 0.105013, 0.10811, 0.11371],
- [0.141802, 0.129946, 0.124285, 0.120928, 0.115908, 0.110735, 0.110735,
- 0.107454, 0.109468, 0.109255],
- [0.147326, 0.14464, 0.141436, 0.132845, 0.124071, 0.121828, 0.118074,
- 0.112306, 0.109163, 0.109483],
- [0.145296, 0.150362, 0.15082, 0.140337, 0.133806, 0.1299, 0.120592,
- 0.114046, 0.115496, 0.111666],
- [0.131121, 0.145525, 0.150011, 0.146609, 0.137407, 0.129198, 0.127306,
- 0.118029, 0.116594, 0.111559],
- [0.126482, 0.132372, 0.142596, 0.149538, 0.144701, 0.137469, 0.125353,
- 0.121996, 0.117342, 0.118273],
- [0.122866, 0.126543, 0.133669, 0.145418, 0.150515, 0.140139, 0.129992,
- 0.124605, 0.120867, 0.121889]], dtype=np.float32)
+ [[0.085832, 0.085084, 0.086229, 0.08687, 0.089662, 0.092256,
+ 0.099474, 0.099489, 0.11017, 0.122408],
+ [0.090654, 0.090593, 0.091554, 0.091661, 0.089967, 0.094072,
+ 0.097398, 0.099046, 0.100969, 0.112108],
+ [0.09926, 0.096925, 0.096269, 0.097002, 0.095842, 0.097704,
+ 0.09984, 0.101457, 0.105455, 0.106004],
+ [0.109834, 0.103609, 0.102693, 0.099931, 0.098222, 0.10074,
+ 0.10251, 0.103838, 0.106874, 0.113451],
+ [0.12369, 0.112428, 0.111482, 0.10631, 0.106203, 0.104753,
+ 0.106706, 0.105013, 0.10811, 0.11371],
+ [0.141802, 0.129946, 0.124285, 0.120928, 0.115908, 0.110735,
+ 0.110735, 0.107454, 0.109468, 0.109255],
+ [0.147326, 0.14464, 0.141436, 0.132845, 0.124071, 0.121828,
+ 0.118074, 0.112306, 0.109163, 0.109483],
+ [0.145296, 0.150362, 0.15082, 0.140337, 0.133806, 0.1299,
+ 0.120592, 0.114046, 0.115496, 0.111666],
+ [0.131121, 0.145525, 0.150011, 0.146609, 0.137408, 0.129198,
+ 0.127306, 0.118029, 0.116594, 0.111559],
+ [0.126482, 0.132372, 0.142596, 0.149538, 0.144701, 0.137469,
+ 0.125353, 0.121996, 0.117342, 0.118273]], dtype=np.float32)
def test_calculate_translation_transforms_and_apply():
diff --git a/starfish/core/image/_registration/ApplyTransform/warp.py b/starfish/core/image/_registration/ApplyTransform/warp.py
index aedd9172b..f0d139eb8 100644
--- a/starfish/core/image/_registration/ApplyTransform/warp.py
+++ b/starfish/core/image/_registration/ApplyTransform/warp.py
@@ -4,7 +4,7 @@
import numpy as np
import xarray as xr
from skimage import transform
-from skimage.transform._geometric import GeometricTransform
+from skimage.transform._geometric import _GeometricTransform
from tqdm import tqdm
from starfish.core.config import StarfishConfig
@@ -65,7 +65,7 @@ def run(self, stack: ImageStack, transforms_list: TransformsList,
def warp(
image: xr.DataArray,
- transformation_object: GeometricTransform,
+ transformation_object: _GeometricTransform,
**kwargs
) -> np.ndarray:
"""
@@ -76,7 +76,7 @@ def warp(
----------
image : xr.DataArray
The image to be transformed
- transformation_object : :py:class:`~skimage.transform._geometric.GeometricTransform`
+ transformation_object : :py:class:`~skimage.transform._geometric._GeometricTransform`
The transformation object to apply.
Returns
diff --git a/starfish/core/image/_registration/LearnTransform/test/test_translation.py b/starfish/core/image/_registration/LearnTransform/test/test_translation.py
index 92684067c..76a9cc3f3 100644
--- a/starfish/core/image/_registration/LearnTransform/test/test_translation.py
+++ b/starfish/core/image/_registration/LearnTransform/test/test_translation.py
@@ -6,7 +6,7 @@
from starfish.core.types import Axes
-ISS_SHIFTS = [[-23, 6], [-22, 2], [-22, -3], [-15, -4]]
+ISS_SHIFTS = [[-23, 6], [-22, 2], [-22, -4], [-15, -4]]
def test_learn_transforms_throws_error():
diff --git a/starfish/core/image/_registration/test/test_transforms_list.py b/starfish/core/image/_registration/test/test_transforms_list.py
index 652d48977..f645ce4a2 100644
--- a/starfish/core/image/_registration/test/test_transforms_list.py
+++ b/starfish/core/image/_registration/test/test_transforms_list.py
@@ -9,7 +9,7 @@
from starfish.core.types import Axes, TransformType
-ISS_SHIFTS = [[-23, 6], [-22, 2], [-22, -3], [-15, -4]]
+ISS_SHIFTS = [[-23, 6], [-22, 2], [-22, -4], [-15, -4]]
def test_export_import_transforms_object():
diff --git a/starfish/core/image/_registration/transforms_list.py b/starfish/core/image/_registration/transforms_list.py
index 21b959aad..d65e1ccfb 100644
--- a/starfish/core/image/_registration/transforms_list.py
+++ b/starfish/core/image/_registration/transforms_list.py
@@ -1,9 +1,9 @@
import json
-from typing import List, Mapping, Tuple
+from typing import List, Mapping, Optional, Tuple
import numpy as np
from semantic_version import Version
-from skimage.transform._geometric import GeometricTransform, SimilarityTransform
+from skimage.transform._geometric import _GeometricTransform, SimilarityTransform
from slicedimage.io import resolve_path_or_url
from starfish.core.config import StarfishConfig
@@ -26,19 +26,19 @@ class TransformsList:
objects to apply to an Imagestack"""
def __init__(self,
- transforms_list: List[Tuple[Mapping[Axes, int],
- TransformType,
- GeometricTransform]] = None
+ transforms_list: Optional[List[Tuple[Mapping[Axes, int],
+ TransformType,
+ _GeometricTransform]]] = None
):
"""
Parameters
----------
- transforms_list: List[Tuple[Mapping[Axes, int], TransformType, GeometricTransform]]
+ transforms_list: List[Tuple[Mapping[Axes, int], TransformType, _GeometricTransform]]
A list of tuples containing axes of an Imagestack and associated
transform to apply.
"""
- self.transforms: List[Tuple[Mapping[Axes, int], TransformType, GeometricTransform]]
+ self.transforms: List[Tuple[Mapping[Axes, int], TransformType, _GeometricTransform]]
if transforms_list:
self.transforms = transforms_list
else:
@@ -55,7 +55,7 @@ def __repr__(self) -> str:
def append(self,
selectors: Mapping[Axes, int],
transform_type: TransformType,
- transform_object: GeometricTransform
+ transform_object: _GeometricTransform
) -> None:
"""
Adds a new GoemetricTransform object to the list
@@ -134,7 +134,9 @@ def from_dict(cls, transforms_document: dict) -> "TransformsList":
cls._verify_version(version_str)
transforms_array = transforms_document[DocumentKeys.TRANSFORMS_LIST]
- transforms_list: List[Tuple[Mapping[Axes, int], TransformType, GeometricTransform]] = list()
+ transforms_list: List[Tuple[Mapping[Axes, int],
+ TransformType,
+ _GeometricTransform]] = list()
for selectors_str, transform_type_str, transforms_matrix in transforms_array:
selectors = {Axes(k): v for k, v in selectors_str.items()}
transform_type = TransformType(transform_type_str)
diff --git a/starfish/core/imagestack/imagestack.py b/starfish/core/imagestack/imagestack.py
index 49ac6d241..c2545af43 100644
--- a/starfish/core/imagestack/imagestack.py
+++ b/starfish/core/imagestack/imagestack.py
@@ -746,7 +746,7 @@ def _build_slice_list(
return tuple(slice_list), axes
- def _iter_axes(self, axes: Set[Axes]=None) -> Iterator[Mapping[Axes, int]]:
+ def _iter_axes(self, axes: Optional[Set[Axes]] = None) -> Iterator[Mapping[Axes, int]]:
"""Iterate over provided axes.
Parameters
@@ -772,10 +772,10 @@ def apply(
self,
func: Callable,
*args,
- group_by: Set[Axes]=None,
+ group_by: Optional[Set[Axes]] = None,
in_place=False,
- verbose: bool=False,
- n_processes: Optional[int]=None,
+ verbose: bool = False,
+ n_processes: Optional[int] = None,
level_method: Levels = Levels.CLIP,
**kwargs
) -> Optional["ImageStack"]:
@@ -891,9 +891,9 @@ def transform(
self,
func: Callable,
*args,
- group_by: Set[Axes]=None,
+ group_by: Optional[Set[Axes]] = None,
verbose=False,
- n_processes: Optional[int]=None,
+ n_processes: Optional[int] = None,
**kwargs
) -> List[Any]:
"""Split the image along a set of axes, and apply a function across all the components.
diff --git a/starfish/core/imagestack/parser/crop.py b/starfish/core/imagestack/parser/crop.py
index c38e6007b..45b972a08 100644
--- a/starfish/core/imagestack/parser/crop.py
+++ b/starfish/core/imagestack/parser/crop.py
@@ -13,11 +13,11 @@ class CropParameters:
def __init__(
self,
*,
- permitted_rounds: Optional[Collection[int]]=None,
- permitted_chs: Optional[Collection[int]]=None,
- permitted_zplanes: Optional[Collection[int]]=None,
- x_slice: Optional[Union[int, slice]]=None,
- y_slice: Optional[Union[int, slice]]=None,
+ permitted_rounds: Optional[Collection[int]] = None,
+ permitted_chs: Optional[Collection[int]] = None,
+ permitted_zplanes: Optional[Collection[int]] = None,
+ x_slice: Optional[Union[int, slice]] = None,
+ y_slice: Optional[Union[int, slice]] = None,
):
"""
Parameters
@@ -113,9 +113,9 @@ def parse_aligned_groups(tileset: TileSet,
y: Optional[Union[int, slice]] = None
) -> List["CropParameters"]:
- """Takes a tileset and any optional selected axes lists compares the physical coordinates on each
- tile to create aligned coordinate groups (groups of tiles that have the same physical
- coordinates)
+ """Takes a tileset and any optional selected axes lists compares the physical coordinates
+ on each tile to create aligned coordinate groups (groups of tiles that have the same
+ physical coordinates)
Parameters
----------
diff --git a/starfish/core/imagestack/test/factories/synthetic_stack.py b/starfish/core/imagestack/test/factories/synthetic_stack.py
index 6c2067730..e90460182 100644
--- a/starfish/core/imagestack/test/factories/synthetic_stack.py
+++ b/starfish/core/imagestack/test/factories/synthetic_stack.py
@@ -1,3 +1,5 @@
+from typing import Optional
+
from starfish.core.experiment.builder.builder import build_image
from starfish.core.experiment.builder.defaultproviders import OnesTile, tile_fetcher_factory
from starfish.core.experiment.builder.providers import TileFetcher
@@ -11,7 +13,7 @@ def synthetic_stack(
num_z: int = 12,
tile_height: int = 50,
tile_width: int = 40,
- tile_fetcher: TileFetcher = None,
+ tile_fetcher: Optional[TileFetcher] = None,
) -> ImageStack:
"""generate a synthetic ImageStack
diff --git a/starfish/core/intensity_table/decoded_intensity_table.py b/starfish/core/intensity_table/decoded_intensity_table.py
index 091086239..e9d071386 100644
--- a/starfish/core/intensity_table/decoded_intensity_table.py
+++ b/starfish/core/intensity_table/decoded_intensity_table.py
@@ -124,7 +124,7 @@ def to_mermaid(self, filename: str) -> pd.DataFrame:
Name for compressed-gzipped MERMAID data file. Should end in '.csv.gz'.
Notes
- ------
+ -----
See also https://github.com/JEFworks/MERmaid
"""
# construct the MERMAID dataframe. As MERMAID adds support for non-categorical variables,
diff --git a/starfish/core/intensity_table/intensity_table.py b/starfish/core/intensity_table/intensity_table.py
index 1752edae6..846d0f406 100644
--- a/starfish/core/intensity_table/intensity_table.py
+++ b/starfish/core/intensity_table/intensity_table.py
@@ -321,7 +321,7 @@ def synthetic_intensities(
assert 0 < data.max() <= 1
intensities = cls.from_spot_data(
- data, spot_attributes, np.arange(data.shape[1]), np.arange(data.shape[2]))
+ data, spot_attributes, range(data.shape[1]), range(data.shape[2]))
return intensities
@classmethod
diff --git a/starfish/core/morphology/Filter/min_distance_label.py b/starfish/core/morphology/Filter/min_distance_label.py
index 95ec9fb23..eb7e8f5e5 100644
--- a/starfish/core/morphology/Filter/min_distance_label.py
+++ b/starfish/core/morphology/Filter/min_distance_label.py
@@ -1,7 +1,7 @@
import numpy as np
from scipy.ndimage import distance_transform_edt, label
from skimage.feature import peak_local_max
-from skimage.morphology import watershed
+from skimage.segmentation import watershed
from starfish.core.morphology.binary_mask import BinaryMaskCollection
from ._base import FilterAlgorithm
diff --git a/starfish/core/morphology/binary_mask/binary_mask.py b/starfish/core/morphology/binary_mask/binary_mask.py
index d41be0e54..d50e20a90 100644
--- a/starfish/core/morphology/binary_mask/binary_mask.py
+++ b/starfish/core/morphology/binary_mask/binary_mask.py
@@ -284,7 +284,7 @@ def from_fiji_roi_set(
image from same FOV used in fiji segmentation workflow
Returns
- --------
+ -------
BinaryMaskCollection
Notes
@@ -700,7 +700,7 @@ def _apply_single_mask(
selection_range: Sequence[slice] = BinaryMaskCollection._crop_mask(output_mask)
return MaskData(
- output_mask[selection_range],
+ output_mask[selection_range], # type: ignore[index]
tuple(selection.start for selection in selection_range),
None
)
diff --git a/starfish/core/spacetx_format/test_field_of_view.py b/starfish/core/spacetx_format/test_field_of_view.py
index bea27d095..95dbfa672 100644
--- a/starfish/core/spacetx_format/test_field_of_view.py
+++ b/starfish/core/spacetx_format/test_field_of_view.py
@@ -29,15 +29,15 @@ def test_dartfish_nuclei_example_field_of_view():
def test_channel_must_be_present():
no_channel = validator.load_json(example)
- del no_channel['tiles'][0]['indices']['c']
+ del no_channel['shape']['c']
with pytest.warns(UserWarning):
assert not validator.validate_object(no_channel)
def test_round_must_be_present():
mangled_round = validator.load_json(example)
- del mangled_round['tiles'][0]['indices']['r']
- mangled_round['tiles'][0]['indices']['h'] = 0
+ del mangled_round['shape']['r']
+ mangled_round['shape']['h'] = 0
with pytest.warns(UserWarning):
assert not validator.validate_object(mangled_round)
diff --git a/starfish/core/spacetx_format/util.py b/starfish/core/spacetx_format/util.py
index e73478d7d..630a30798 100644
--- a/starfish/core/spacetx_format/util.py
+++ b/starfish/core/spacetx_format/util.py
@@ -71,8 +71,8 @@ def load_json(json_file: str) -> Dict:
@staticmethod
def _recurse_through_errors(error_iterator: Iterator[ValidationError],
- level: int=0,
- filename: str=None) -> None:
+ level: int = 0,
+ filename: str = '') -> None:
"""Recurse through ValidationErrors, printing message and schema path
Parameters
@@ -136,7 +136,7 @@ def validate_file(self, target_file: str) -> bool:
def validate_object(
self,
target_object: Union[dict, list],
- target_file: str=None,
+ target_file: str = '',
) -> bool:
"""validate a loaded json object, returning True if valid, and False otherwise
@@ -176,7 +176,7 @@ def validate_object(
def fuzz_object(
self,
target_object: Union[dict, list],
- target_file: str=None,
+ target_file: str = '',
out: IO=sys.stdout,
) -> None:
"""performs mutations on the given object and tests for validity.
diff --git a/starfish/core/spacetx_format/validate_sptx.py b/starfish/core/spacetx_format/validate_sptx.py
index fecb78ff7..f80cc2f2c 100644
--- a/starfish/core/spacetx_format/validate_sptx.py
+++ b/starfish/core/spacetx_format/validate_sptx.py
@@ -76,8 +76,8 @@ def validate(experiment_json: str, fuzz: bool=False) -> bool:
return valid
-def validate_file(file: str, schema: str, fuzz: bool=False,
- backend: Backend=None, output: Dict=None) -> bool:
+def validate_file(file: str, schema: str, fuzz: bool = False,
+ backend: Backend = None, output: Dict = {}) -> bool:
"""validate a spaceTx formatted file with a given schema.
Accepts local filepaths or files hosted at http links.
diff --git a/starfish/core/spots/DecodeSpots/check_all_decoder.py b/starfish/core/spots/DecodeSpots/check_all_decoder.py
index 847279e5b..20798ae70 100644
--- a/starfish/core/spots/DecodeSpots/check_all_decoder.py
+++ b/starfish/core/spots/DecodeSpots/check_all_decoder.py
@@ -17,6 +17,7 @@
distanceFilter, findNeighbors, removeUsedSpots
from .util import _merge_spots_by_round
+
class CheckAll(DecodeSpotsAlgorithm):
"""
Decode spots by generating all possible combinations of neighboring spots to form barcodes
@@ -97,8 +98,8 @@ class CheckAll(DecodeSpotsAlgorithm):
def __init__(
self,
codebook: Codebook,
- search_radius: float=3,
- error_rounds: int=0,
+ search_radius: float = 3,
+ error_rounds: int = 0,
mode='med',
physical_coords=False):
self.codebook = codebook
@@ -124,7 +125,7 @@ def __init__(
def run(self,
spots: SpotFindingResults,
- n_processes: int=1,
+ n_processes: int = 1,
*args) -> DecodedIntensityTable:
"""
Decode spots by finding the set of nonoverlapping barcodes that have the minimum spatial
@@ -258,6 +259,7 @@ def run(self,
'Invalid mode choice ("high", "med", or "low")')
# Decode for each round omission number, intensity cutoff, and then search radius
+ allCodes_list = list()
allCodes = pd.DataFrame()
for currentRoundOmitNum in roundOmits:
for s, strictness in enumerate(strictnesses):
@@ -390,8 +392,11 @@ def run(self,
spotTables = removeUsedSpots(finalCodes, spotTables)
currentTables = removeUsedSpots(finalCodes, currentTables)
- # Append found codes to allCodes table
- allCodes = allCodes.append(finalCodes).reset_index(drop=True)
+ # Append found codes to allCodes list
+ allCodes_list.append(finalCodes)
+
+ # Concatenate list of found codes to allCodes table
+ allCodes = pd.concat(allCodes_list, ignore_index=True)
# Create and fill in intensity table
channels = spots.ch_labels
diff --git a/starfish/core/spots/DecodeSpots/check_all_funcs.py b/starfish/core/spots/DecodeSpots/check_all_funcs.py
index ff3bdda20..15880477c 100644
--- a/starfish/core/spots/DecodeSpots/check_all_funcs.py
+++ b/starfish/core/spots/DecodeSpots/check_all_funcs.py
@@ -1,4 +1,4 @@
-import typing
+import typing # noqa
from collections import Counter, defaultdict
from concurrent.futures.process import ProcessPoolExecutor
from copy import deepcopy
@@ -12,6 +12,7 @@
from starfish.core.codebook.codebook import Codebook
from starfish.types import Axes
+
def findNeighbors(spotTables: dict,
searchRadius: float,
numJobs: int) -> dict:
@@ -44,6 +45,7 @@ def findNeighbors(spotTables: dict,
return allNeighborDict
+
def createNeighborDict(spotTables: dict,
searchRadius: float,
neighborsByRadius: dict) -> dict:
@@ -94,6 +96,7 @@ def createNeighborDict(spotTables: dict,
pass
return neighborDict
+
def createRefDicts(spotTables: dict, numJobs: int) -> tuple:
'''
@@ -135,6 +138,7 @@ def createRefDicts(spotTables: dict, numJobs: int) -> tuple:
return channelDict, spotCoords, spotIntensities, spotQualDict
+
def encodeSpots(spotCodes: list) -> list:
'''
@@ -156,6 +160,7 @@ def encodeSpots(spotCodes: list) -> list:
return compressed
+
def decodeSpots(compressed: list, roundNum: int) -> list:
'''
@@ -182,6 +187,7 @@ def decodeSpots(compressed: list, roundNum: int) -> list:
for j in range(len(idxs))]
return decompressed
+
def spotQuality(spotTables: dict,
spotIntensities: dict,
numJobs: int) -> dict:
@@ -234,6 +240,7 @@ def spotQuality(spotTables: dict,
return spotQuals
+
def barcodeBuildFunc(allNeighbors: list,
currentRound: int,
roundOmitNum: int,
@@ -279,6 +286,7 @@ def barcodeBuildFunc(allNeighbors: list,
return allSpotCodes
+
def buildBarcodes(roundData: pd.DataFrame,
neighborDict: dict,
roundOmitNum: int,
@@ -356,6 +364,7 @@ def buildBarcodes(roundData: pd.DataFrame,
return roundData
+
def generateRoundPermutations(size: int, roundOmitNum: int) -> list:
'''
@@ -379,6 +388,7 @@ def generateRoundPermutations(size: int, roundOmitNum: int) -> list:
return sorted(set(list(permutations([*([False] * roundOmitNum),
*([True] * (size - roundOmitNum))]))))
+
def decodeFunc(data: pd.DataFrame) -> tuple:
'''
@@ -416,10 +426,12 @@ def decodeFunc(data: pd.DataFrame) -> tuple:
allDecodedSpotCodes.append(decodedSpotCodes)
return (allTargets, allDecodedSpotCodes)
+
def setGlobalDecoder(permutationCodes):
global globPermutationCodes
globPermutationCodes = permutationCodes
+
def decoder(roundData: pd.DataFrame,
codebook: Codebook,
channelDict: dict,
@@ -514,6 +526,7 @@ def decoder(roundData: pd.DataFrame,
return roundData
+
def distanceFunc(spotsAndTargets: list,
currentRoundOmitNum: int) -> tuple:
@@ -563,12 +576,14 @@ def distanceFunc(spotsAndTargets: list,
return (bestSpotCodes, bestDistances, bestTargets)
+
def setGlobalDistance(spotCoords, spotQualDict):
global globSpotCoords
global globSpotQualDict
globSpotCoords = spotCoords
globSpotQualDict = spotQualDict
+
def distanceFilter(roundData: pd.DataFrame,
spotCoords: dict,
spotQualDict: dict,
@@ -654,6 +669,7 @@ def distanceFilter(roundData: pd.DataFrame,
return roundData
+
def cleanup(bestPerSpotTables: dict,
spotCoords: dict,
channelDict: dict,
@@ -687,6 +703,7 @@ def cleanup(bestPerSpotTables: dict,
'''
# Create merged spot results dataframe containing the passing barcodes found in all the rounds
+ mergedCodes_list = list()
mergedCodes = pd.DataFrame()
roundNum = len(bestPerSpotTables)
for r in range(roundNum):
@@ -698,12 +715,13 @@ def cleanup(bestPerSpotTables: dict,
# keys
bestPerSpotTables[r]['spot_codes'] = [tuple(spotCode[0]) for spotCode in spotCodes]
bestPerSpotTables[r]['targets'] = [target[0] for target in targets]
- mergedCodes = mergedCodes.append(bestPerSpotTables[r])
- mergedCodes = mergedCodes.reset_index(drop=True)
+ mergedCodes_list.append(bestPerSpotTables[r])
# If no codes return empty dataframe
- if len(mergedCodes) == 0:
+ if len(mergedCodes_list) == 0:
return pd.DataFrame()
+ else:
+ mergedCodes = pd.concat(mergedCodes_list, ignore_index=True)
# Only pass codes that are chosen as best for at least 2 of the spots that make it up
spotCodes = mergedCodes['spot_codes']
@@ -820,6 +838,7 @@ def cleanup(bestPerSpotTables: dict,
return finalCodes
+
def removeUsedSpots(finalCodes: pd.DataFrame, spotTables: dict) -> dict:
'''
diff --git a/starfish/core/test/factories.py b/starfish/core/test/factories.py
index 4f811f9b8..12203e65a 100644
--- a/starfish/core/test/factories.py
+++ b/starfish/core/test/factories.py
@@ -228,7 +228,7 @@ def select_uint_dtype(array):
image = image * camera_detection_efficiency
- image += np.random.normal(scale=background_electrons, size=image.shape)
+ image += np.random.normal(scale=background_electrons, size=image.shape) # type: ignore
# mimic analog to digital conversion
image = (image / graylevel).astype(int).clip(0, 2 ** ad_conversion_bits)
diff --git a/starfish/core/util/config.py b/starfish/core/util/config.py
index 701995d89..1d183deb8 100644
--- a/starfish/core/util/config.py
+++ b/starfish/core/util/config.py
@@ -23,7 +23,7 @@ class Config(object):
__NO_VALUE_PASSED = object()
- def __init__(self, value: Union[str, Dict]=None) -> None:
+ def __init__(self, value: Union[str, Dict]) -> None:
"""
Parse user arguments, environment variables, and external files to
generate a nested configuration object.
diff --git a/starfish/core/util/exec.py b/starfish/core/util/exec.py
index df9a96dc1..19adb16e8 100644
--- a/starfish/core/util/exec.py
+++ b/starfish/core/util/exec.py
@@ -7,7 +7,7 @@
def stages(commands: Sequence[Sequence[Union[str, Callable]]],
- subdirs: Sequence[str]=None,
+ subdirs: Sequence[str] = '',
keep_data: bool=False) -> str:
"""
diff --git a/starfish/spacetx_format/schema/field_of_view_0.0.0/tiles/tiles.json b/starfish/spacetx_format/schema/field_of_view_0.0.0/tiles/tiles.json
index a255a5cbd..a01105ded 100644
--- a/starfish/spacetx_format/schema/field_of_view_0.0.0/tiles/tiles.json
+++ b/starfish/spacetx_format/schema/field_of_view_0.0.0/tiles/tiles.json
@@ -1,6 +1,6 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
- "$id": "schema/field_of_view/tiles/tiles.json",
+ "$id": "https://github.com/spacetx/starfish/spacetx_format/schema/field_of_view_0.0.0/tiles/tiles.json",
"description": "Specification of a 2-d image tile.",
"type": "object",
"required": [
diff --git a/starfish/spacetx_format/schema/field_of_view_0.1.0/field_of_view.json b/starfish/spacetx_format/schema/field_of_view_0.1.0/field_of_view.json
index f47a709df..30e2668ea 100644
--- a/starfish/spacetx_format/schema/field_of_view_0.1.0/field_of_view.json
+++ b/starfish/spacetx_format/schema/field_of_view_0.1.0/field_of_view.json
@@ -1,5 +1,5 @@
{
- "$id": "https://github.com/spacetx/starfish/sptx-format/schema/field_of_view/field_of_view.json",
+ "$id": "https://github.com/spacetx/starfish/spacetx_format/schema/field_of_view_0.1.0/field_of_view.json",
"$schema": "http://json-schema.org/draft-07/schema#",
"type": "object",
"required": [
@@ -36,7 +36,7 @@
"shape": {
"type": "object",
"description": "The shape of the categorical dimensions (channel, round, z-plane) of the field of view.",
- "required": "c",
+ "required": ["c", "r"],
"properties": {
"r": {
"description": "Number of imaging rounds.",
diff --git a/starfish/spacetx_format/schema/field_of_view_0.1.0/tiles/tiles.json b/starfish/spacetx_format/schema/field_of_view_0.1.0/tiles/tiles.json
index a255a5cbd..227d1b468 100644
--- a/starfish/spacetx_format/schema/field_of_view_0.1.0/tiles/tiles.json
+++ b/starfish/spacetx_format/schema/field_of_view_0.1.0/tiles/tiles.json
@@ -1,6 +1,6 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
- "$id": "schema/field_of_view/tiles/tiles.json",
+ "$id": "https://github.com/spacetx/starfish/spacetx_format/schema/field_of_view_0.1.0/tiles/tiles.json",
"description": "Specification of a 2-d image tile.",
"type": "object",
"required": [
diff --git a/starfish/test/full_pipelines/api/test_dartfish.py b/starfish/test/full_pipelines/api/test_dartfish.py
index 3284735be..aa4cf0a03 100644
--- a/starfish/test/full_pipelines/api/test_dartfish.py
+++ b/starfish/test/full_pipelines/api/test_dartfish.py
@@ -163,7 +163,7 @@ def test_dartfish_pipeline_cropped_data(tmpdir):
# verify number of spots detected
spots_passing_filters = spot_intensities[Features.PASSES_THRESHOLDS].sum()
- assert spots_passing_filters == 53
+ assert spots_passing_filters == 54
# compare to benchmark data -- note that this particular part of the dataset appears completely
# uncorrelated
diff --git a/starfish/test/full_pipelines/api/test_iss_api.py b/starfish/test/full_pipelines/api/test_iss_api.py
index 601652246..c2c1ed01d 100644
--- a/starfish/test/full_pipelines/api/test_iss_api.py
+++ b/starfish/test/full_pipelines/api/test_iss_api.py
@@ -58,26 +58,26 @@ def test_iss_pipeline_cropped_data(tmpdir):
registered_image = iss.registered_imgs
expected_registered_values = np.array(
- [[9.972601e-03, 4.410370e-03, 3.392192e-03, 1.687834e-03, 1.880155e-04,
- 0.000000e+00, 1.047019e-04, 1.578360e-05, 1.069453e-03, 6.543968e-03],
- [1.456979e-02, 9.646147e-03, 8.203185e-03, 5.936079e-03, 1.839891e-03,
- 3.569032e-04, 5.237808e-04, 3.792955e-04, 4.592746e-05, 1.088151e-03],
- [2.313178e-02, 1.586836e-02, 1.240375e-02, 9.513815e-03, 3.563545e-03,
- 1.488329e-03, 1.326624e-03, 2.939297e-04, 5.607218e-04, 3.690171e-03],
- [3.531289e-02, 2.446796e-02, 1.964004e-02, 1.258251e-02, 7.771713e-03,
- 4.918387e-03, 2.766922e-03, 3.267574e-04, 4.892451e-04, 5.261183e-03],
- [5.146676e-02, 3.794888e-02, 3.141785e-02, 2.312119e-02, 1.555709e-02,
- 9.402979e-03, 6.135746e-03, 7.547007e-04, 1.231891e-03, 2.656648e-03],
- [5.952225e-02, 5.170041e-02, 4.459279e-02, 3.416265e-02, 2.403326e-02,
- 1.659481e-02, 1.189285e-02, 4.377660e-03, 1.810592e-03, 1.729033e-03],
- [5.872828e-02, 5.881007e-02, 5.405803e-02, 4.143796e-02, 3.181438e-02,
- 2.468321e-02, 1.451422e-02, 6.834699e-03, 6.021897e-03, 2.588449e-03],
- [4.815195e-02, 5.578594e-02, 5.535153e-02, 4.701486e-02, 3.499170e-02,
- 2.584777e-02, 1.871042e-02, 1.036013e-02, 8.698075e-03, 2.945077e-03],
- [4.108098e-02, 4.543370e-02, 4.911040e-02, 4.965232e-02, 4.022935e-02,
- 2.973786e-02, 1.956365e-02, 1.386791e-02, 8.811617e-03, 6.941982e-03],
- [3.560406e-02, 3.779930e-02, 4.068928e-02, 4.668610e-02, 4.536487e-02,
- 3.364870e-02, 2.244582e-02, 1.683235e-02, 1.113740e-02, 1.012298e-02]],
+ [[8.182720e-03, 2.094890e-03, 1.921310e-03, 8.036800e-04, 0.000000e+00,
+ 1.113000e-05, 2.362500e-04, 1.137100e-04, 2.881270e-03, 1.061417e-02],
+ [1.225884e-02, 7.359780e-03, 6.554780e-03, 3.967060e-03, 7.174400e-04,
+ 1.377300e-04, 3.282700e-04, 1.963200e-04, 1.156900e-04, 2.786610e-03],
+ [1.817651e-02, 1.313646e-02, 1.080875e-02, 8.000580e-03, 2.264170e-03,
+ 8.996000e-04, 1.071100e-03, 2.864700e-04, 4.592600e-04, 2.591370e-03],
+ [2.938030e-02, 2.027904e-02, 1.634731e-02, 1.067738e-02, 5.404920e-03,
+ 3.325540e-03, 2.005550e-03, 3.105000e-05, 9.240000e-04, 5.751660e-03],
+ [4.422882e-02, 3.153970e-02, 2.650198e-02, 1.768988e-02, 1.198124e-02,
+ 7.287380e-03, 4.359240e-03, 4.564000e-05, 1.156120e-03, 3.979250e-03],
+ [5.676676e-02, 4.604779e-02, 3.936250e-02, 2.943260e-02, 1.997995e-02,
+ 1.306023e-02, 9.153120e-03, 1.940280e-03, 1.672590e-03, 1.607550e-03],
+ [5.948846e-02, 5.680262e-02, 5.028814e-02, 3.747543e-02, 2.800228e-02,
+ 2.101545e-02, 1.274632e-02, 5.316650e-03, 4.062480e-03, 1.875220e-03],
+ [5.272433e-02, 5.822361e-02, 5.484088e-02, 4.344586e-02, 3.279146e-02,
+ 2.484935e-02, 1.552278e-02, 8.065560e-03, 8.012830e-03, 2.026330e-03],
+ [4.264575e-02, 5.009904e-02, 5.163100e-02, 4.824880e-02, 3.619050e-02,
+ 2.628749e-02, 1.851076e-02, 1.187091e-02, 8.305970e-03, 4.661620e-03],
+ [3.705096e-02, 4.012012e-02, 4.393976e-02, 4.851252e-02, 4.276346e-02,
+ 3.062118e-02, 1.944861e-02, 1.515226e-02, 9.333680e-03, 9.347120e-03]],
dtype=np.float32
)
@@ -100,7 +100,7 @@ def test_iss_pipeline_cropped_data(tmpdir):
'ETV4', 'GAPDH', 'GUS', 'HER2', 'RAC1',
'TFRC', 'TP53', 'VEGF']))
- assert np.array_equal(gene_counts, [19, 1, 5, 2, 1, 11, 1, 3, 2, 1, 1, 2])
+ assert np.array_equal(gene_counts, [19, 1, 5, 2, 1, 9, 1, 3, 2, 1, 1, 2])
assert decoded.sizes[Features.AXIS] == 99
masks = iss.masks
@@ -143,4 +143,4 @@ def test_iss_pipeline_cropped_data(tmpdir):
# test that nans were properly removed from the expression matrix
assert 'nan' not in expression_matrix.genes.data
# test the number of spots that did not decode per cell
- assert np.array_equal(expression_matrix.number_of_undecoded_spots.data, [13, 1, 36])
+ assert np.array_equal(expression_matrix.number_of_undecoded_spots.data, [14, 1, 37])