Skip to content

Commit

Permalink
Add demo entrypoint
Browse files Browse the repository at this point in the history
  • Loading branch information
manzt committed Oct 17, 2024
1 parent e49bf62 commit b1750d3
Show file tree
Hide file tree
Showing 4 changed files with 158 additions and 16 deletions.
18 changes: 13 additions & 5 deletions notebooks/getting-started.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -80,8 +80,12 @@
},
"outputs": [],
"source": [
"tissue_umap_embedding = Embedding.from_ozette(df=pd.read_parquet(\"./data/mair-2022-tissue-138-umap.pq\"))\n",
"tissue_ozette_embedding = Embedding.from_ozette(df=pd.read_parquet(\"./data/mair-2022-tissue-138-ozette.pq\"))"
"tissue_umap_embedding = Embedding.from_ozette(\n",
" df=pd.read_parquet(\"./data/mair-2022-tissue-138-umap.pq\")\n",
")\n",
"tissue_ozette_embedding = Embedding.from_ozette(\n",
" df=pd.read_parquet(\"./data/mair-2022-tissue-138-ozette.pq\")\n",
")"
]
},
{
Expand Down Expand Up @@ -126,7 +130,7 @@
},
"outputs": [],
"source": [
"umap_vs_ozette.select(['CD3+', 'CD4+', 'CD8-'])"
"umap_vs_ozette.select([\"CD3+\", \"CD4+\", \"CD8-\"])"
]
},
{
Expand All @@ -148,7 +152,9 @@
},
"outputs": [],
"source": [
"tumor_ozette_embedding = Embedding.from_ozette(df=pd.read_parquet(\"./data/mair-2022-tumor-006-ozette.pq\"))"
"tumor_ozette_embedding = Embedding.from_ozette(\n",
" df=pd.read_parquet(\"./data/mair-2022-tumor-006-ozette.pq\")\n",
")"
]
},
{
Expand Down Expand Up @@ -192,7 +198,9 @@
},
"outputs": [],
"source": [
"tissue_vs_tumor.select(\"CD4-CD8+CD3+CD45RA+CD27+CD19-CD103-CD28-CD69+PD1+HLADR-GranzymeB-CD25-ICOS-TCRgd-CD38-CD127-Tim3-\")"
"tissue_vs_tumor.select(\n",
" \"CD4-CD8+CD3+CD45RA+CD27+CD19-CD103-CD28-CD69+PD1+HLADR-GranzymeB-CD25-ICOS-TCRgd-CD38-CD127-Tim3-\"\n",
")"
]
},
{
Expand Down
15 changes: 10 additions & 5 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,32 +19,37 @@ classifiers = [
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
]
requires-python = ">=3.8"
requires-python = ">=3.8,<3.12"
dependencies = [
"anywidget>=0.2.3",
"cev-metrics>=0.1.2",
"ipywidgets>=8.0.0",
"jinja2>=3.0.0",
"jupyter-scatter>=0.14.0",
"pandas>=1.0",
"pandas>=1.0,<2.0",
"numpy>=1.0,<2.0",
"pyarrow",
"pooch>=1.3.0",
]
dynamic = ["version"]

# https://peps.python.org/pep-0621/#dependencies-optional-dependencies
[project.optional-dependencies]
dev = [
"black[jupyter]",
"black[jupyter]==23.1.0",
"jupyterlab",
"pytest",
"rich",
"ruff",
"ruff==0.0.246",
]
notebooks = [
"pyarrow",
"fastparquet",
"matplotlib",
]

[project.scripts]
cev = "cev._cli:main"

[project.urls]
homepage = "https://github.com/OzetteTech/comparative-embedding-visualization"

Expand Down
7 changes: 1 addition & 6 deletions src/cev/__init__.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,4 @@
from importlib.metadata import PackageNotFoundError, version
from cev._version import __version__ # noqa

import cev.metrics as metrics # noqa
import cev.widgets as widgets # noqa

try:
__version__ = version("cev")
except PackageNotFoundError:
__version__ = "uninstalled"
134 changes: 134 additions & 0 deletions src/cev/_cli.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,134 @@
import argparse
import json
import os
import shutil
import sys
import textwrap
import zipfile
from pathlib import Path

import pooch

from cev._version import __version__

_DEV = True


def download_data() -> tuple[Path, Path]:
archive = pooch.retrieve(
url="https://figshare.com/ndownloader/articles/23063615/versions/1",
path=pooch.os_cache("cev"),
fname="data.zip",
known_hash=None,
)
archive = Path(archive)
files = [
"mair-2022-tissue-138-umap.pq",
"mair-2022-tissue-138-ozette.pq",
]
with zipfile.ZipFile(archive, "r") as zip_ref:
for file in files:
zip_ref.extract(file, path=archive.parent)
return (
archive.parent / "mair-2022-tissue-138-umap.pq",
archive.parent / "mair-2022-tissue-138-ozette.pq",
)


def write_notebook(output: Path):
umap_path, ozette_path = download_data()
source = textwrap.dedent(
f"""
import pandas as pd
from cev.widgets import Embedding, EmbeddingComparisonWidget
umap_embedding = pd.read_parquet("{umap_path}").pipe(Embedding.from_ozette)
ozette_embedding = pd.read_parquet("{ozette_path}").pipe(Embedding.from_ozette)
EmbeddingComparisonWidget(
umap_embedding,
ozette_embedding,
titles=("Standard UMAP", "Annotation-Transformed UMAP"),
metric="confusion",
selection="synced",
auto_zoom=True,
row_height=320,
)
"""
).strip()

nb = {
"cells": [
{
"cell_type": "code",
"execution_count": None,
"metadata": {},
"outputs": [],
"source": source,
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3",
}
},
"nbformat": 4,
"nbformat_minor": 5,
}
with output.open("w") as f:
json.dump(nb, f, indent=2)


def check_uv_available():
if shutil.which("uv") is None:
print("Error: 'uv' command not found.", file=sys.stderr)
print("Please install 'uv' to run `cev demo` entrypoint.", file=sys.stderr)
print(
"For more information, visit: https://github.com/astral-sh/uv",
file=sys.stderr,
)
sys.exit(1)


def run_notebook(notebook_path: Path):
check_uv_available()
command = [
"uvx",
"--python",
"3.11",
"--with",
"." if _DEV else f"cev=={__version__}",
"--with",
"jupyterlab",
"jupyter",
"lab",
str(notebook_path),
]
try:
os.execvp(command[0], command)
except OSError as e:
print(f"Error executing {command[0]}: {e}", file=sys.stderr)
sys.exit(1)


def main():
parser = argparse.ArgumentParser(prog="cev")
subparsers = parser.add_subparsers(dest="command", help="Available commands")
subparsers.add_parser("download", help="Download the demo notebook (and data)")
subparsers.add_parser("demo", help="Run the demo notebook in JupyterLab")
args = parser.parse_args()

notebook_path = Path("cev-demo.ipynb")
if args.command == "download":
write_notebook(notebook_path)
elif args.command == "demo":
write_notebook(notebook_path)
run_notebook(notebook_path)
else:
parser.print_help()


if __name__ == "__main__":
main()

0 comments on commit b1750d3

Please sign in to comment.