From 442ad85e95a2953ce306c6767a23bd0fba4d3d70 Mon Sep 17 00:00:00 2001 From: Henry Webel Date: Mon, 7 Oct 2024 10:53:44 +0200 Subject: [PATCH 01/30] :construction: switch mamba installation - see if snakemake envs are somehow cached --- .github/workflows/ci.yaml | 50 +++++++++++++++++++++++---------------- 1 file changed, 30 insertions(+), 20 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 7e16f5b6..8bbbac45 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -25,26 +25,34 @@ jobs: steps: - name: Checkout uses: actions/checkout@v4 - - name: Set up Miniconda - # ! change action https://github.com/mamba-org/setup-micromamba - uses: conda-incubator/setup-miniconda@v3 - with: - miniforge-variant: Mambaforge - # miniforge-version: latest - use-mamba: true - channel-priority: disabled - python-version: ${{ matrix.python-version }} + # - name: Set up Miniconda + # # ! change action https://github.com/mamba-org/setup-micromamba + # uses: conda-incubator/setup-miniconda@v3 + # with: + # miniforge-variant: Mambaforge + # # miniforge-version: latest + # use-mamba: true + # channel-priority: disabled + # python-version: ${{ matrix.python-version }} + # environment-file: snakemake_env.yml + # activate-environment: snakemake + # auto-activate-base: true + # # auto-update-conda: true + - uses: mamba-org/setup-micromamba@v1 + with: + micromamba-version: 'latest' # any version from https://github.com/mamba-org/micromamba-releases environment-file: snakemake_env.yml - activate-environment: snakemake - auto-activate-base: true - # auto-update-conda: true - - name: inspect-conda-environment - run: | - conda info - conda list - conda env export --from-history --no-builds > environment.yml - conda env export --no-builds - conda env export --no-builds > environment_w_versions.yml + init-shell: >- + bash + cache-environment: true + post-cleanup: 'all' + # - name: inspect-conda-environment + # run: | + # conda info + # conda list + # conda env export --from-history --no-builds > environment.yml + # conda env export --no-builds + # conda env export --no-builds > environment_w_versions.yml # - name: test-r-kernel-imports # run: | # Rscript -e "library(stringi)" @@ -54,6 +62,8 @@ jobs: run: | cd project snakemake -p -c1 --configfile config/single_dev_dataset/example/config.yaml --use-conda -n + - name: Show current files in conda snakemake folder + run: ls -l /Users/runner/work/pimms/pimms/project/.snakemake/conda/ - name: Run demo workflow (integration test) continue-on-error: true run: | @@ -141,4 +151,4 @@ jobs: - uses: pypa/gh-action-pypi-publish@release/v1 with: user: __token__ - password: ${{ secrets.PYPI_API_TOKEN }} \ No newline at end of file + password: ${{ secrets.PYPI_API_TOKEN }} From 057647f9f147ca45264babed53bbd1663e8a8afb Mon Sep 17 00:00:00 2001 From: Henry Webel Date: Mon, 7 Oct 2024 12:51:57 +0200 Subject: [PATCH 02/30] :bug: specify python version, move ls --- .github/workflows/ci.yaml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 8bbbac45..24c0182e 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -42,6 +42,8 @@ jobs: with: micromamba-version: 'latest' # any version from https://github.com/mamba-org/micromamba-releases environment-file: snakemake_env.yml + create-args: >- + python=${{ matrix.python-version }} init-shell: >- bash cache-environment: true @@ -62,13 +64,13 @@ jobs: run: | cd project snakemake -p -c1 --configfile config/single_dev_dataset/example/config.yaml --use-conda -n - - name: Show current files in conda snakemake folder - run: ls -l /Users/runner/work/pimms/pimms/project/.snakemake/conda/ - name: Run demo workflow (integration test) continue-on-error: true run: | cd project snakemake -p -c4 -k --configfile config/single_dev_dataset/example/config.yaml --use-conda + - name: Show current files in conda snakemake folder + run: ls -l /Users/runner/work/pimms/pimms/project/.snakemake/conda/ - name: Run demo workflow again (in case of installation issues) continue-on-error: true run: | From 66dbb880c4f2b6a6d9dfb0246fa5587d509cbdfa Mon Sep 17 00:00:00 2001 From: Henry Webel Date: Mon, 7 Oct 2024 13:10:16 +0200 Subject: [PATCH 03/30] :construction: deactivate some workflow, run relatvie ls command --- .github/workflows/ci.yaml | 12 ++++++++---- .github/workflows/test_pkg_on_colab.yaml | 4 ++-- .github/workflows/workflow_website.yaml | 6 +++--- 3 files changed, 13 insertions(+), 9 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 24c0182e..e1c409da 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -29,11 +29,15 @@ jobs: # # ! change action https://github.com/mamba-org/setup-micromamba # uses: conda-incubator/setup-miniconda@v3 # with: - # miniforge-variant: Mambaforge + # # miniforge-variant: Mambaforge # # miniforge-version: latest + # # conda-solver: libmamba # use-mamba: true - # channel-priority: disabled + # channels: conda-forge,defaults # python-version: ${{ matrix.python-version }} + # mamba-version: "*" + # channels: conda-forge,defaults + # channel-priority: true # environment-file: snakemake_env.yml # activate-environment: snakemake # auto-activate-base: true @@ -47,7 +51,7 @@ jobs: init-shell: >- bash cache-environment: true - post-cleanup: 'all' + post-cleanup: 'none' # - name: inspect-conda-environment # run: | # conda info @@ -70,7 +74,7 @@ jobs: cd project snakemake -p -c4 -k --configfile config/single_dev_dataset/example/config.yaml --use-conda - name: Show current files in conda snakemake folder - run: ls -l /Users/runner/work/pimms/pimms/project/.snakemake/conda/ + run: ls -l project/.snakemake/conda/ - name: Run demo workflow again (in case of installation issues) continue-on-error: true run: | diff --git a/.github/workflows/test_pkg_on_colab.yaml b/.github/workflows/test_pkg_on_colab.yaml index 9fae4d14..b6260e34 100644 --- a/.github/workflows/test_pkg_on_colab.yaml +++ b/.github/workflows/test_pkg_on_colab.yaml @@ -3,8 +3,8 @@ name: Test that tutorial runs on latest colab image on: # push: # branches: [main] - pull_request: - branches: [main] + # pull_request: + # branches: [main] schedule: - cron: '0 2 3 * *' diff --git a/.github/workflows/workflow_website.yaml b/.github/workflows/workflow_website.yaml index 1d794bdd..743757bb 100644 --- a/.github/workflows/workflow_website.yaml +++ b/.github/workflows/workflow_website.yaml @@ -1,7 +1,7 @@ name: Build workflow website on public Alzheimer dataset (for protein groups) on: - pull_request: - branches: [main, dev] + # pull_request: + # branches: [main, dev] release: types: [published] schedule: @@ -73,4 +73,4 @@ jobs: uses: peaceiris/actions-gh-pages@v4 with: github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: project/runs/alzheimer_study/_build/ \ No newline at end of file + publish_dir: project/runs/alzheimer_study/_build/ From 9de1e37032baea093d823dff8f8d4f76ba442050 Mon Sep 17 00:00:00 2001 From: Henry Webel Date: Mon, 7 Oct 2024 13:21:50 +0200 Subject: [PATCH 04/30] try not to cache --- .github/workflows/ci.yaml | 2 +- .github/workflows/ci_workflow.yaml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index e1c409da..7fcc09d2 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -50,7 +50,7 @@ jobs: python=${{ matrix.python-version }} init-shell: >- bash - cache-environment: true + cache-environment: false post-cleanup: 'none' # - name: inspect-conda-environment # run: | diff --git a/.github/workflows/ci_workflow.yaml b/.github/workflows/ci_workflow.yaml index 20a64f2d..927e6735 100644 --- a/.github/workflows/ci_workflow.yaml +++ b/.github/workflows/ci_workflow.yaml @@ -2,8 +2,8 @@ name: run workflow with conda envs on: push: branches: [main, dev] - pull_request: - branches: [main, dev] + # pull_request: + # branches: [main, dev] release: types: [published] schedule: From 4e4965c5cbea25e9f8959c339b18e6361063c28f Mon Sep 17 00:00:00 2001 From: Henry Webel Date: Mon, 7 Oct 2024 11:43:47 +0000 Subject: [PATCH 05/30] :construction: test using venv created by codespace with python 3.12 - might be that I need to create (not sure what change in runner configurations) --- .github/workflows/ci.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 7fcc09d2..506affd5 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -110,16 +110,16 @@ jobs: python-version: ${{ matrix.python-version }} - name: install pimms - run: python -m pip install . + run: pip install . - name: Install pytest - run: python -m pip install pytest pytest-cov + run: pip install pytest pytest-cov - name: Run pytest run: pytest . - name: Install papermill - run: python -m pip install papermill ipykernel + run: pip install papermill ipykernel - name: View papermill help message for notebooks (as scripts) run: | From 7dc116892bc80a57746b752a1cdbcadeb6b8ef0a Mon Sep 17 00:00:00 2001 From: Henry Webel Date: Mon, 7 Oct 2024 11:51:24 +0000 Subject: [PATCH 06/30] try to use full snakemake installation --- snakemake_env.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/snakemake_env.yml b/snakemake_env.yml index 7713b7b1..067ede4c 100644 --- a/snakemake_env.yml +++ b/snakemake_env.yml @@ -4,5 +4,5 @@ channels: - bioconda - defaults dependencies: - - snakemake-minimal + - snakemake #-minimal - mamba From be2699e99e299dc50cf4a028a4e01e95d9ecdc26 Mon Sep 17 00:00:00 2001 From: Henry Webel Date: Mon, 7 Oct 2024 13:56:20 +0200 Subject: [PATCH 07/30] :construction: use miniconda for pypi installation test --- .github/workflows/ci.yaml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 506affd5..0733cbdb 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -105,8 +105,13 @@ jobs: steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v5 + # - uses: actions/setup-python@v5 + # with: + # python-version: ${{ matrix.python-version }} + + - uses: conda-incubator/setup-miniconda@v3 with: + auto-update-conda: true python-version: ${{ matrix.python-version }} - name: install pimms From 0f011a192275bd95de7d08c357c975cd239ebc04 Mon Sep 17 00:00:00 2001 From: Henry Webel Date: Mon, 7 Oct 2024 14:06:22 +0200 Subject: [PATCH 08/30] try miniconda again - snakemake environment has it's own mamba installation - auto-activate environment "test" --- .github/workflows/ci.yaml | 54 +++++++++++++++++++-------------------- 1 file changed, 27 insertions(+), 27 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 0733cbdb..7c115bcc 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -25,33 +25,32 @@ jobs: steps: - name: Checkout uses: actions/checkout@v4 - # - name: Set up Miniconda - # # ! change action https://github.com/mamba-org/setup-micromamba - # uses: conda-incubator/setup-miniconda@v3 - # with: - # # miniforge-variant: Mambaforge - # # miniforge-version: latest - # # conda-solver: libmamba - # use-mamba: true - # channels: conda-forge,defaults - # python-version: ${{ matrix.python-version }} - # mamba-version: "*" - # channels: conda-forge,defaults - # channel-priority: true - # environment-file: snakemake_env.yml - # activate-environment: snakemake - # auto-activate-base: true - # # auto-update-conda: true - - uses: mamba-org/setup-micromamba@v1 - with: - micromamba-version: 'latest' # any version from https://github.com/mamba-org/micromamba-releases + - name: Set up Miniconda + uses: conda-incubator/setup-miniconda@v3 + with: + # miniforge-variant: Mambaforge + # miniforge-version: latest + # conda-solver: libmamba + # use-mamba: true + # channels: conda-forge,defaults + python-version: ${{ matrix.python-version }} + # mamba-version: "*" + # channels: conda-forge,defaults + channel-priority: true environment-file: snakemake_env.yml - create-args: >- - python=${{ matrix.python-version }} - init-shell: >- - bash - cache-environment: false - post-cleanup: 'none' + activate-environment: snakemake + auto-activate-base: true + # auto-update-conda: true + # - uses: mamba-org/setup-micromamba@v1 + # with: + # micromamba-version: 'latest' # any version from https://github.com/mamba-org/micromamba-releases + # environment-file: snakemake_env.yml + # create-args: >- + # python=${{ matrix.python-version }} + # init-shell: >- + # bash + # cache-environment: false + # post-cleanup: 'none' # - name: inspect-conda-environment # run: | # conda info @@ -108,11 +107,12 @@ jobs: # - uses: actions/setup-python@v5 # with: # python-version: ${{ matrix.python-version }} - + - uses: conda-incubator/setup-miniconda@v3 with: auto-update-conda: true python-version: ${{ matrix.python-version }} + auto-activate-base: true - name: install pimms run: pip install . From 1b35c286aac00637395b44dd15df39b108e4c4ef Mon Sep 17 00:00:00 2001 From: Henry Webel Date: Mon, 7 Oct 2024 14:40:49 +0200 Subject: [PATCH 09/30] install build dependencies, fix ubuntu first --- .github/workflows/ci.yaml | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 7c115bcc..b8d7b22f 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -18,7 +18,7 @@ jobs: fail-fast: false matrix: os: ["ubuntu-latest", - "macos-13", + #"macos-13", # "windows-latest" # rrcovNA cannot be build from source on windows-server ] python-version: ["3.8", "3.9", "3.10"] @@ -30,17 +30,17 @@ jobs: with: # miniforge-variant: Mambaforge # miniforge-version: latest - # conda-solver: libmamba + conda-solver: classic #libmamba # use-mamba: true # channels: conda-forge,defaults python-version: ${{ matrix.python-version }} # mamba-version: "*" # channels: conda-forge,defaults - channel-priority: true + channel-priority: strict environment-file: snakemake_env.yml activate-environment: snakemake auto-activate-base: true - # auto-update-conda: true + auto-update-conda: true # - uses: mamba-org/setup-micromamba@v1 # with: # micromamba-version: 'latest' # any version from https://github.com/mamba-org/micromamba-releases @@ -99,7 +99,8 @@ jobs: strategy: fail-fast: false matrix: - os: ["ubuntu-latest", "macos-latest", "windows-latest"] + os: ["ubuntu-latest",] + # "macos-latest", "windows-latest"] python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] steps: - uses: actions/checkout@v4 @@ -113,6 +114,11 @@ jobs: auto-update-conda: true python-version: ${{ matrix.python-version }} auto-activate-base: true + + - name: Install build dependencies explicitly + run: | + pip install setuptools_scm + - name: install pimms run: pip install . From e732f8c724f8ff7f8d03d956e57d77eec384ec58 Mon Sep 17 00:00:00 2001 From: Henry Webel Date: Mon, 7 Oct 2024 15:34:53 +0200 Subject: [PATCH 10/30] :bug: try to put mamba below 2.0 https://github.com/snakemake/snakemake/issues/3108 --- snakemake_env.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/snakemake_env.yml b/snakemake_env.yml index 067ede4c..fc0ab023 100644 --- a/snakemake_env.yml +++ b/snakemake_env.yml @@ -4,5 +4,5 @@ channels: - bioconda - defaults dependencies: - - snakemake #-minimal - - mamba + - snakemake-minimal + - mamba<2.0 From 98b72790976b5c662c29b0903745737ee36a4f27 Mon Sep 17 00:00:00 2001 From: Henry Webel Date: Mon, 7 Oct 2024 15:46:35 +0200 Subject: [PATCH 11/30] test should be activate per default --- .github/workflows/ci.yaml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index b8d7b22f..0e9986fd 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -30,7 +30,7 @@ jobs: with: # miniforge-variant: Mambaforge # miniforge-version: latest - conda-solver: classic #libmamba + # conda-solver: libmamba # use-mamba: true # channels: conda-forge,defaults python-version: ${{ matrix.python-version }} @@ -111,9 +111,8 @@ jobs: - uses: conda-incubator/setup-miniconda@v3 with: - auto-update-conda: true python-version: ${{ matrix.python-version }} - auto-activate-base: true + - name: Install build dependencies explicitly run: | From 180141c87c2394e4f2574f9d0bdbc55bf0582c78 Mon Sep 17 00:00:00 2001 From: Henry Webel Date: Tue, 8 Oct 2024 09:50:16 +0200 Subject: [PATCH 12/30] :construction: conda env not activated... --- .github/workflows/ci.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 0e9986fd..51ca9921 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -112,11 +112,14 @@ jobs: - uses: conda-incubator/setup-miniconda@v3 with: python-version: ${{ matrix.python-version }} + auto-activate-base: true + auto-update-conda: true - name: Install build dependencies explicitly run: | pip install setuptools_scm + conda list - name: install pimms From 9d2c4a41ee4ce0dc46b66c00070db9357f451764 Mon Sep 17 00:00:00 2001 From: Henry Webel Date: Tue, 8 Oct 2024 10:02:11 +0200 Subject: [PATCH 13/30] :bug: pip does not install in environment --- .github/workflows/ci.yaml | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 51ca9921..7a81c924 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -112,16 +112,21 @@ jobs: - uses: conda-incubator/setup-miniconda@v3 with: python-version: ${{ matrix.python-version }} - auto-activate-base: true auto-update-conda: true + activate-environment: test + - name: check conda + run: | + conda info + conda list + conda info -e + which pip - name: Install build dependencies explicitly run: | pip install setuptools_scm conda list - - name: install pimms run: pip install . From 2b90eddd3b856acd96e43f932d84af95a23cc931 Mon Sep 17 00:00:00 2001 From: Henry Webel Date: Tue, 8 Oct 2024 10:05:50 +0200 Subject: [PATCH 14/30] :construction: experiment --- .github/workflows/ci.yaml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 7a81c924..dc693b74 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -114,17 +114,20 @@ jobs: python-version: ${{ matrix.python-version }} auto-update-conda: true activate-environment: test + auto-activate-base: true - name: check conda run: | conda info conda list conda info -e + which python + conda activate test which pip - name: Install build dependencies explicitly run: | - pip install setuptools_scm + python -m pip install setuptools_scm conda list - name: install pimms From 1b9f2acb8ff66c0cb327e59181630b94e585e843 Mon Sep 17 00:00:00 2001 From: Henry Webel Date: Tue, 8 Oct 2024 10:23:43 +0200 Subject: [PATCH 15/30] :bug: shell was not iniated --- .github/workflows/ci.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index dc693b74..d4c5e6fe 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -96,6 +96,9 @@ jobs: run-unit-local-pip-installation: runs-on: ${{ matrix.os }} name: test-pip-installation + defaults: + run: + shell: bash -el {0} strategy: fail-fast: false matrix: From 86f57b34cea5c9223c5a8e07cf93acf084c2e5fe Mon Sep 17 00:00:00 2001 From: Henry Webel Date: Tue, 8 Oct 2024 11:15:34 +0200 Subject: [PATCH 16/30] :bug: test installing njab separately --- .github/workflows/ci.yaml | 5 ++++- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index d4c5e6fe..a3bff618 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -134,7 +134,10 @@ jobs: conda list - name: install pimms - run: pip install . + run: python -m pip install . + + - name: install njab + run: python -m pip install njab - name: Install pytest run: pip install pytest pytest-cov diff --git a/pyproject.toml b/pyproject.toml index 571d9cb6..78d258ad 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ classifiers = [ "Topic :: Scientific/Engineering :: Bio-Informatics", ] dependencies = [ - "njab>=0.0.8", + # "njab>=0.0.8", "numpy", "matplotlib", "pandas", From a97968d7a40a8df89833e486f90feaf3500d1b51 Mon Sep 17 00:00:00 2001 From: Henry Webel Date: Tue, 8 Oct 2024 12:21:29 +0200 Subject: [PATCH 17/30] :bug: order matters! --- pyproject.toml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 78d258ad..1c5f0b63 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,20 +16,20 @@ classifiers = [ "Topic :: Scientific/Engineering :: Bio-Informatics", ] dependencies = [ - # "njab>=0.0.8", + "fastai", + "torch", "numpy", "matplotlib", "pandas", + "scipy", "plotly", - "torch", "scikit-learn>=1.0", - "scipy", "seaborn", - "fastai", "omegaconf", "tqdm", "mrmr-selection", "pingouin", + "njab>=0.0.8", ] [project.scripts] From 49f49026bdae16ee47aef8e79f0b21f14b74c5c2 Mon Sep 17 00:00:00 2001 From: Henry Webel Date: Tue, 8 Oct 2024 12:42:46 +0200 Subject: [PATCH 18/30] try again new order, add umap-learn explicitly --- pyproject.toml | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 1c5f0b63..ddb14dd9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,20 +16,21 @@ classifiers = [ "Topic :: Scientific/Engineering :: Bio-Informatics", ] dependencies = [ - "fastai", - "torch", "numpy", - "matplotlib", "pandas", + "torch", + "scikit-learn>=1.0", + "njab>=0.0.8", "scipy", + "fastai", + "matplotlib", "plotly", - "scikit-learn>=1.0", "seaborn", "omegaconf", "tqdm", "mrmr-selection", "pingouin", - "njab>=0.0.8", + "umap-learn", ] [project.scripts] From 4d202cb63d59b29d418b5ad288bfb452c58b92cf Mon Sep 17 00:00:00 2001 From: Henry Webel Date: Tue, 8 Oct 2024 13:29:08 +0200 Subject: [PATCH 19/30] :bug: do not re-install njab --- .github/workflows/ci.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index a3bff618..0c26ce13 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -136,8 +136,8 @@ jobs: - name: install pimms run: python -m pip install . - - name: install njab - run: python -m pip install njab + # - name: install njab + # run: python -m pip install njab - name: Install pytest run: pip install pytest pytest-cov From 6a17b90623bbb662489d3e15728b0dcfccd40ff6 Mon Sep 17 00:00:00 2001 From: Henry Webel Date: Tue, 8 Oct 2024 14:19:21 +0200 Subject: [PATCH 20/30] restrict scipy (trapz missing in lifelines) latest scipy not supported by lifelines --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index ddb14dd9..8b2e9639 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,7 +21,6 @@ dependencies = [ "torch", "scikit-learn>=1.0", "njab>=0.0.8", - "scipy", "fastai", "matplotlib", "plotly", @@ -31,6 +30,7 @@ dependencies = [ "mrmr-selection", "pingouin", "umap-learn", + "scipy>=1.7,<1.14", ] [project.scripts] From 85b386c31b9b019e9723cdc145af2800888d83fc Mon Sep 17 00:00:00 2001 From: Henry Webel Date: Tue, 8 Oct 2024 14:43:34 +0200 Subject: [PATCH 21/30] :bug: exclude numpy 2.0 for now --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 8b2e9639..b9b5cf8a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,11 +16,11 @@ classifiers = [ "Topic :: Scientific/Engineering :: Bio-Informatics", ] dependencies = [ - "numpy", "pandas", "torch", "scikit-learn>=1.0", "njab>=0.0.8", + "numpy>=1.0,<2.0", "fastai", "matplotlib", "plotly", From e6db81fff5bb7097fd0ca2daa202b4f4e547ae68 Mon Sep 17 00:00:00 2001 From: Henry Webel Date: Tue, 8 Oct 2024 15:07:41 +0200 Subject: [PATCH 22/30] numpy try two --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index b9b5cf8a..9dda7055 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,10 +17,10 @@ classifiers = [ ] dependencies = [ "pandas", + "numpy>=1.14.0,<2.0", "torch", "scikit-learn>=1.0", "njab>=0.0.8", - "numpy>=1.0,<2.0", "fastai", "matplotlib", "plotly", From 481ac9fcc92e4d7192ebbfb4ed3fca94afc6bff2 Mon Sep 17 00:00:00 2001 From: Henry Webel Date: Tue, 8 Oct 2024 15:20:53 +0200 Subject: [PATCH 23/30] swap numpy and njab, adapt other pkg to what it was before --- pyproject.toml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 9dda7055..2fe1f20e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,20 +17,20 @@ classifiers = [ ] dependencies = [ "pandas", - "numpy>=1.14.0,<2.0", - "torch", - "scikit-learn>=1.0", "njab>=0.0.8", + "numpy>=1.14,<2.0", + "torch", "fastai", + "scikit-learn>=1.0", + "scipy>=1.7,<1.14", + "seaborn", "matplotlib", "plotly", - "seaborn", "omegaconf", "tqdm", "mrmr-selection", "pingouin", - "umap-learn", - "scipy>=1.7,<1.14", + # "umap-learn", ] [project.scripts] From 83262206627b01f4a6064ec0337c5f6f3ffe6ebd Mon Sep 17 00:00:00 2001 From: Henry Webel Date: Wed, 9 Oct 2024 09:14:03 +0200 Subject: [PATCH 24/30] add back umap learn, relax constraints --- pyproject.toml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 2fe1f20e..7e16f0e4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,11 +18,11 @@ classifiers = [ dependencies = [ "pandas", "njab>=0.0.8", - "numpy>=1.14,<2.0", + "numpy>=1.0,<2.0", "torch", "fastai", "scikit-learn>=1.0", - "scipy>=1.7,<1.14", + "scipy>=1.5,<1.14", "seaborn", "matplotlib", "plotly", @@ -30,7 +30,7 @@ dependencies = [ "tqdm", "mrmr-selection", "pingouin", - # "umap-learn", + "umap-learn", ] [project.scripts] From 47988cf3cb066e78829a8b5afe5516a98a0d9959 Mon Sep 17 00:00:00 2001 From: Henry Webel Date: Wed, 9 Oct 2024 14:16:32 +0200 Subject: [PATCH 25/30] :construction: in package single requirement single packages cannot be specified to just ignore the dependencies. --- .github/workflows/ci.yaml | 8 ++++---- pyproject.toml | 4 +--- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 0c26ce13..5fa6623d 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -130,14 +130,14 @@ jobs: - name: Install build dependencies explicitly run: | - python -m pip install setuptools_scm + pip install setuptools_scm conda list - name: install pimms - run: python -m pip install . + run: pip install . - # - name: install njab - # run: python -m pip install njab + - name: install njab + run: pip install njab --no-deps - name: Install pytest run: pip install pytest pytest-cov diff --git a/pyproject.toml b/pyproject.toml index 7e16f0e4..95310b80 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,8 +17,7 @@ classifiers = [ ] dependencies = [ "pandas", - "njab>=0.0.8", - "numpy>=1.0,<2.0", + "numpy", "torch", "fastai", "scikit-learn>=1.0", @@ -30,7 +29,6 @@ dependencies = [ "tqdm", "mrmr-selection", "pingouin", - "umap-learn", ] [project.scripts] From d3fa8135757aaf45cfaa19a8b136a7640a7a8db1 Mon Sep 17 00:00:00 2001 From: Henry Webel Date: Wed, 9 Oct 2024 14:52:55 +0200 Subject: [PATCH 26/30] :heavy_minus_sign: remove scipy dependency - leave it to njab to install dependencies in a second step. --- .github/workflows/ci.yaml | 2 +- pimmslearn/imputation.py | 304 +++++++++++++++++----------------- pimmslearn/pandas/__init__.py | 3 +- pyproject.toml | 6 +- tests/test_imputation.py | 54 +++--- 5 files changed, 186 insertions(+), 183 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 5fa6623d..4fd20725 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -137,7 +137,7 @@ jobs: run: pip install . - name: install njab - run: pip install njab --no-deps + run: pip install njab - name: Install pytest run: pip install pytest pytest-cov diff --git a/pimmslearn/imputation.py b/pimmslearn/imputation.py index 4dd553ae..6eaa77b6 100644 --- a/pimmslearn/imputation.py +++ b/pimmslearn/imputation.py @@ -5,12 +5,14 @@ """ -from typing import Tuple, Dict -from sklearn.neighbors import NearestNeighbors -import scipy +import logging +from typing import Dict, Tuple + import numpy as np import pandas as pd -import logging + +# import scipy +# from sklearn.neighbors import NearestNeighbors logger = logging.getLogger(__name__) @@ -18,150 +20,150 @@ RANDOMSEED = 123 -def impute_missing(protein_values, mean=None, std=None): - """ - Imputation is based on the mean and standard deviation - from the protein_values. - If mean and standard deviation (std) are given, - missing values are imputed and protein_values are returned imputed. - If no mean and std are given, the mean and std are computed from - the non-missing protein_values. - - Parameters - ---------- - protein_values: Iterable - mean: float - std: float - - Returns - ------ - protein_values: pandas.Series - """ - raise NotImplementedError('Will be the main function combining features') - # clip by zero? - - -def _select_data(data: pd.DataFrame, threshold: float): - """Select (protein-) columns for imputation. - - Based on the threshold representing the minimum proportion of available - data per protein, the columns of a `pandas.DataFrame` are selected. - - Parameters - ---------- - data: pandas.DataFrame - threshold: float - Threshold of percentage of non-missing values to select a column/feature. - """ - columns_to_impute = data.notnull().mean() >= threshold - return columns_to_impute - - -def _sparse_coo_array(data: pd.DataFrame): - """Return a sparse scipy matrix from dense `pandas.DataFrame` with many - missing values. - """ - indices = np.nonzero(~np.isnan(data.to_numpy())) - data_selected_sparse = data.to_numpy() - data_selected_sparse = scipy.sparse.coo_matrix( - (data_selected_sparse[indices], indices), - shape=data_selected_sparse.shape) - return data_selected_sparse - - -def _get_weighted_mean(distances, data): - """Compute weighted mean ignoring - identical entries""" - mask = distances > 0.0 - weights = distances[mask] / distances[mask].sum() - weighted_sum = data.loc[mask].mul(weights, axis=0) - mean_imputed = weighted_sum.sum() / sum(mask) - return mean_imputed +# def impute_missing(protein_values, mean=None, std=None): +# """ +# Imputation is based on the mean and standard deviation +# from the protein_values. +# If mean and standard deviation (std) are given, +# missing values are imputed and protein_values are returned imputed. +# If no mean and std are given, the mean and std are computed from +# the non-missing protein_values. + +# Parameters +# ---------- +# protein_values: Iterable +# mean: float +# std: float + +# Returns +# ------ +# protein_values: pandas.Series +# """ +# raise NotImplementedError('Will be the main function combining features') +# # clip by zero? + + +# def _select_data(data: pd.DataFrame, threshold: float): +# """Select (protein-) columns for imputation. + +# Based on the threshold representing the minimum proportion of available +# data per protein, the columns of a `pandas.DataFrame` are selected. + +# Parameters +# ---------- +# data: pandas.DataFrame +# threshold: float +# Threshold of percentage of non-missing values to select a column/feature. +# """ +# columns_to_impute = data.notnull().mean() >= threshold +# return columns_to_impute + + +# def _sparse_coo_array(data: pd.DataFrame): +# """Return a sparse scipy matrix from dense `pandas.DataFrame` with many +# missing values. +# """ +# indices = np.nonzero(~np.isnan(data.to_numpy())) +# data_selected_sparse = data.to_numpy() +# data_selected_sparse = scipy.sparse.coo_matrix( +# (data_selected_sparse[indices], indices), +# shape=data_selected_sparse.shape) +# return data_selected_sparse + + +# def _get_weighted_mean(distances, data): +# """Compute weighted mean ignoring +# identical entries""" +# mask = distances > 0.0 +# weights = distances[mask] / distances[mask].sum() +# weighted_sum = data.loc[mask].mul(weights, axis=0) +# mean_imputed = weighted_sum.sum() / sum(mask) +# return mean_imputed # define imputation methods # could be done in PCA transformed space -def imputation_KNN(data, alone=True, threshold=0.5): - """ - - - Parameters - ---------- - data: pandas.DataFrame - alone: bool # is not used - threshold: float - Threshold of missing data by column in interval (0, 1) - """ - mask_selected = _select_data(data=data, threshold=threshold) - data_selected = data.loc[:, mask_selected].copy() - data_selected_sparse = _sparse_coo_array(data_selected) - # impute - knn_fitted = NearestNeighbors(n_neighbors=3, algorithm='brute').fit( - data_selected_sparse) - fit_distances, fit_neighbors = knn_fitted.kneighbors(data_selected_sparse) - for i, (distances, ids) in enumerate(zip(fit_distances, fit_neighbors)): - mean_imputed = _get_weighted_mean(distances, data_selected.loc[ids]) - if all(distances == 0.0): - logger.warning(f"Did not find any neighbor for int-id: {i}") - else: - assert i == ids[distances == 0.0], ( - "None or more then one identical data points " - "for ids: {}".format(ids[distances == 0.0]) - ) - mask = data_selected.iloc[i].isna() - data_selected.loc[i, mask] = mean_imputed.loc[mask] # SettingWithCopyError - - data.update(data_selected) - return data - - -def imputation_normal_distribution(log_intensities: pd.Series, - mean_shift=1.8, - std_shrinkage=0.3, - copy=True): - """Impute missing log-transformed intensity values of a single feature. - Samples one value for imputation for all samples. - - Parameters - ---------- - log_intensities: pd.Series - Series of normally distributed values of a single feature (for all samples/runs). - Here usually log-transformed intensities. - mean_shift: integer, float - Shift the mean of the log_intensities by factors of their standard - deviation to the negative. - std_shrinkage: float - Value greater than zero by which to shrink (or inflate) the - standard deviation of the log_intensities. - """ - np.random.seed(RANDOMSEED) - if not isinstance(log_intensities, pd.Series): - try: - log_intensities.Series(log_intensities) - logger.warning("Series created of Iterable.") - except BaseException: - raise ValueError( - "Plese provided data which is a pandas.Series or an Iterable") - if mean_shift < 0: - raise ValueError( - "Please specify a positive float as the std.-dev. is non-negative.") - if std_shrinkage <= 0: - raise ValueError( - "Please specify a positive float as shrinkage factor for std.-dev.") - if std_shrinkage >= 1: - logger.warning("Standard Deviation will increase for imputed values.") - - mean = log_intensities.mean() - std = log_intensities.std() - - mean_shifted = mean - (std * mean_shift) - std_shrinked = std * std_shrinkage - - if copy: - log_intensities = log_intensities.copy(deep=True) - - return log_intensities.where(log_intensities.notna(), - np.random.normal(mean_shifted, std_shrinked)) +# def imputation_KNN(data, alone=True, threshold=0.5): +# """ + + +# Parameters +# ---------- +# data: pandas.DataFrame +# alone: bool # is not used +# threshold: float +# Threshold of missing data by column in interval (0, 1) +# """ +# mask_selected = _select_data(data=data, threshold=threshold) +# data_selected = data.loc[:, mask_selected].copy() +# data_selected_sparse = _sparse_coo_array(data_selected) +# # impute +# knn_fitted = NearestNeighbors(n_neighbors=3, algorithm='brute').fit( +# data_selected_sparse) +# fit_distances, fit_neighbors = knn_fitted.kneighbors(data_selected_sparse) +# for i, (distances, ids) in enumerate(zip(fit_distances, fit_neighbors)): +# mean_imputed = _get_weighted_mean(distances, data_selected.loc[ids]) +# if all(distances == 0.0): +# logger.warning(f"Did not find any neighbor for int-id: {i}") +# else: +# assert i == ids[distances == 0.0], ( +# "None or more then one identical data points " +# "for ids: {}".format(ids[distances == 0.0]) +# ) +# mask = data_selected.iloc[i].isna() +# data_selected.loc[i, mask] = mean_imputed.loc[mask] # SettingWithCopyError + +# data.update(data_selected) +# return data + + +# def imputation_normal_distribution(log_intensities: pd.Series, +# mean_shift=1.8, +# std_shrinkage=0.3, +# copy=True): +# """Impute missing log-transformed intensity values of a single feature. +# Samples one value for imputation for all samples. + +# Parameters +# ---------- +# log_intensities: pd.Series +# Series of normally distributed values of a single feature (for all samples/runs). +# Here usually log-transformed intensities. +# mean_shift: integer, float +# Shift the mean of the log_intensities by factors of their standard +# deviation to the negative. +# std_shrinkage: float +# Value greater than zero by which to shrink (or inflate) the +# standard deviation of the log_intensities. +# """ +# np.random.seed(RANDOMSEED) +# if not isinstance(log_intensities, pd.Series): +# try: +# log_intensities.Series(log_intensities) +# logger.warning("Series created of Iterable.") +# except BaseException: +# raise ValueError( +# "Plese provided data which is a pandas.Series or an Iterable") +# if mean_shift < 0: +# raise ValueError( +# "Please specify a positive float as the std.-dev. is non-negative.") +# if std_shrinkage <= 0: +# raise ValueError( +# "Please specify a positive float as shrinkage factor for std.-dev.") +# if std_shrinkage >= 1: +# logger.warning("Standard Deviation will increase for imputed values.") + +# mean = log_intensities.mean() +# std = log_intensities.std() + +# mean_shifted = mean - (std * mean_shift) +# std_shrinked = std * std_shrinkage + +# if copy: +# log_intensities = log_intensities.copy(deep=True) + +# return log_intensities.where(log_intensities.notna(), +# np.random.normal(mean_shifted, std_shrinked)) def impute_shifted_normal(df_wide: pd.DataFrame, @@ -224,13 +226,13 @@ def impute_shifted_normal(df_wide: pd.DataFrame, return imputed_shifted_normal -def imputation_mixed_norm_KNN(data): - # impute columns with less than 50% missing values with KNN - data = imputation_KNN(data, alone=False) # ToDo: Alone is not used. - # impute remaining columns based on the distribution of the protein - data = imputation_normal_distribution( - data, mean_shift=1.8, std_shrinkage=0.3) - return data +# def imputation_mixed_norm_KNN(data): +# # impute columns with less than 50% missing values with KNN +# data = imputation_KNN(data, alone=False) # ToDo: Alone is not used. +# # impute remaining columns based on the distribution of the protein +# data = imputation_normal_distribution( +# data, mean_shift=1.8, std_shrinkage=0.3) +# return data def compute_moments_shift(observed: pd.Series, imputed: pd.Series, diff --git a/pimmslearn/pandas/__init__.py b/pimmslearn/pandas/__init__.py index 4be42b68..fa69cd7a 100644 --- a/pimmslearn/pandas/__init__.py +++ b/pimmslearn/pandas/__init__.py @@ -7,7 +7,8 @@ import omegaconf import pandas as pd -from pimmslearn.pandas.calc_errors import calc_errors_per_feat, get_absolute_error +from pimmslearn.pandas.calc_errors import (calc_errors_per_feat, + get_absolute_error) __all__ = [ 'calc_errors_per_feat', diff --git a/pyproject.toml b/pyproject.toml index 95310b80..00a133e4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,13 +21,11 @@ dependencies = [ "torch", "fastai", "scikit-learn>=1.0", - "scipy>=1.5,<1.14", + # "scipy>=1.5,<1.14", "seaborn", "matplotlib", - "plotly", + "plotly", # not used in library, but workflow "omegaconf", - "tqdm", - "mrmr-selection", "pingouin", ] diff --git a/tests/test_imputation.py b/tests/test_imputation.py index 0c98f77b..f5544f3e 100644 --- a/tests/test_imputation.py +++ b/tests/test_imputation.py @@ -1,9 +1,3 @@ -from pathlib import Path -import numpy as np -import pandas as pd -import pytest - -from pimmslearn.imputation import imputation_KNN, imputation_normal_distribution, impute_shifted_normal """ # Test Data set was created from a sample by shuffling: @@ -19,6 +13,14 @@ data.apply(numpy.random.shuffle, axis=1) data.to_csv('test_data.csv') """ +from pathlib import Path + +import numpy as np +import pandas as pd +import pytest + +from pimmslearn.imputation import \ + impute_shifted_normal # imputation_KNN,; imputation_normal_distribution, @pytest.fixture @@ -33,26 +35,26 @@ def example_data(): # pass -def test_imputation_KNN(example_data): - threshold = 0.55 - data = example_data.copy() - data_transformed = imputation_KNN(data, threshold=threshold) - columns_to_impute = data.notnull().mean() >= threshold - columns_to_impute = columns_to_impute[columns_to_impute].index - assert all(data_transformed.loc[:, columns_to_impute].isna().sum() < 15) - n_not_to_impute = data.loc[:, - data.notnull().mean() < threshold].isna().sum() - assert all(data_transformed.loc[:, n_not_to_impute.index].isna().sum() - == n_not_to_impute) - - -def test_imputation_normal_dist(): - log_intensities = pd.Series([26.0, np.nan, 24.0, 25.0, np.nan]) - imputed = imputation_normal_distribution(log_intensities) - imputed = round(imputed, ndigits=5) - assert imputed.equals( - pd.Series([26.0, 22.87431, 24.0, 25.0, 22.87431]) - ) +# def test_imputation_KNN(example_data): +# threshold = 0.55 +# data = example_data.copy() +# data_transformed = imputation_KNN(data, threshold=threshold) +# columns_to_impute = data.notnull().mean() >= threshold +# columns_to_impute = columns_to_impute[columns_to_impute].index +# assert all(data_transformed.loc[:, columns_to_impute].isna().sum() < 15) +# n_not_to_impute = data.loc[:, +# data.notnull().mean() < threshold].isna().sum() +# assert all(data_transformed.loc[:, n_not_to_impute.index].isna().sum() +# == n_not_to_impute) + + +# def test_imputation_normal_dist(): +# log_intensities = pd.Series([26.0, np.nan, 24.0, 25.0, np.nan]) +# imputed = imputation_normal_distribution(log_intensities) +# imputed = round(imputed, ndigits=5) +# assert imputed.equals( +# pd.Series([26.0, 22.87431, 24.0, 25.0, 22.87431]) +# ) # def test_imputation_mixed_norm_KNN(): # pass From f0ea20505ff9a265f6a432ae5816d0a84e40e59c Mon Sep 17 00:00:00 2001 From: Henry Webel Date: Wed, 9 Oct 2024 15:46:45 +0200 Subject: [PATCH 27/30] :arrow_up: remove support for python 3.8 (end-of-life) --- .github/workflows/ci.yaml | 4 ++-- .readthedocs.yaml | 4 ++-- environment.yml | 2 +- project/workflow/envs/pimms.yaml | 2 +- pyproject.toml | 2 +- 5 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 4fd20725..502e4d18 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -21,7 +21,7 @@ jobs: #"macos-13", # "windows-latest" # rrcovNA cannot be build from source on windows-server ] - python-version: ["3.8", "3.9", "3.10"] + python-version: ["3.9", "3.10", "3.11", "3.12"] steps: - name: Checkout uses: actions/checkout@v4 @@ -104,7 +104,7 @@ jobs: matrix: os: ["ubuntu-latest",] # "macos-latest", "windows-latest"] - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12"] steps: - uses: actions/checkout@v4 diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 6e817d6b..3199f225 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -9,7 +9,7 @@ version: 2 build: os: ubuntu-22.04 tools: - python: "3.8" + python: "3.10" # You can also specify other tool versions: # nodejs: "19" # rust: "1.64" @@ -32,4 +32,4 @@ python: - method: pip path: . extra_requirements: - - docs \ No newline at end of file + - docs diff --git a/environment.yml b/environment.yml index aad91462..7aa4526e 100644 --- a/environment.yml +++ b/environment.yml @@ -9,7 +9,7 @@ channels: - plotly # - defaults dependencies: - - python>=3.8,<=3.12 + - python>=3.9,<=3.12 - numpy - pandas>=1 - scipy>=1.6 diff --git a/project/workflow/envs/pimms.yaml b/project/workflow/envs/pimms.yaml index 9d1c927f..a2ab6f0a 100644 --- a/project/workflow/envs/pimms.yaml +++ b/project/workflow/envs/pimms.yaml @@ -9,7 +9,7 @@ channels: - plotly # - defaults dependencies: - - python>=3.8,<=3.12 + - python>=3.9,<=3.12 - numpy - pandas>=1 - scipy>=1.6 diff --git a/pyproject.toml b/pyproject.toml index 00a133e4..2c48cffd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,7 +6,7 @@ name = "pimms-learn" # See the section below: [tools.setuptools.dynamic] dynamic = ["version"] readme = "README.md" -requires-python = ">=3.8" +requires-python = ">=3.9" # These are keywords classifiers = [ "License :: OSI Approved :: GNU General Public License v3 (GPLv3)", From f4f06ab6f53dc7b18d7766a19a6796fdf8609a70 Mon Sep 17 00:00:00 2001 From: Henry Webel Date: Wed, 9 Oct 2024 16:12:51 +0200 Subject: [PATCH 28/30] :art: setuptools_scm uses tags to determine version, add tags --- .github/workflows/ci.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 502e4d18..69b320f0 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -107,6 +107,8 @@ jobs: python-version: ["3.9", "3.10", "3.11", "3.12"] steps: - uses: actions/checkout@v4 + with: + fetch-tags: true # - uses: actions/setup-python@v5 # with: From b57415ad6da30e34321d0a7e8ddf5d4cd04251a6 Mon Sep 17 00:00:00 2001 From: Henry Webel Date: Wed, 9 Oct 2024 16:20:57 +0200 Subject: [PATCH 29/30] :bug: tags not fetched without entire history see https://github.com/actions/checkout/issues/1471 --- .github/workflows/ci.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 69b320f0..dd5dc9ec 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -109,6 +109,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-tags: true + fetch-depth: 0 # - uses: actions/setup-python@v5 # with: From 62e30ea32cf4c67b0174508bb8e3c35590e18660 Mon Sep 17 00:00:00 2001 From: Henry Webel Date: Wed, 9 Oct 2024 17:01:58 +0200 Subject: [PATCH 30/30] :art: clean-up workflow file --- .github/workflows/ci.yaml | 44 ++++++++------------------------------- 1 file changed, 9 insertions(+), 35 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index dd5dc9ec..ff35c1ae 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -28,41 +28,19 @@ jobs: - name: Set up Miniconda uses: conda-incubator/setup-miniconda@v3 with: - # miniforge-variant: Mambaforge - # miniforge-version: latest - # conda-solver: libmamba - # use-mamba: true - # channels: conda-forge,defaults python-version: ${{ matrix.python-version }} - # mamba-version: "*" - # channels: conda-forge,defaults channel-priority: strict environment-file: snakemake_env.yml activate-environment: snakemake auto-activate-base: true auto-update-conda: true - # - uses: mamba-org/setup-micromamba@v1 - # with: - # micromamba-version: 'latest' # any version from https://github.com/mamba-org/micromamba-releases - # environment-file: snakemake_env.yml - # create-args: >- - # python=${{ matrix.python-version }} - # init-shell: >- - # bash - # cache-environment: false - # post-cleanup: 'none' - # - name: inspect-conda-environment - # run: | - # conda info - # conda list - # conda env export --from-history --no-builds > environment.yml - # conda env export --no-builds - # conda env export --no-builds > environment_w_versions.yml - # - name: test-r-kernel-imports - # run: | - # Rscript -e "library(stringi)" - # Rscript -e "library(stringr)" - # Rscript -e "library(reshape2)" + - name: inspect-conda-environment + run: | + conda info + conda list + conda env export --from-history --no-builds > environment.yml + conda env export --no-builds + conda env export --no-builds > environment_w_versions.yml - name: Dry-Run demo workflow (integration test) run: | cd project @@ -90,8 +68,8 @@ jobs: name: ${{ matrix.os }}-${{ matrix.python-version }}-example-workflow-results path: | project/runs/example/ - environment.yml - environment_w_versions.yml + snakemake_env + project/.snakemake/conda/ run-unit-local-pip-installation: runs-on: ${{ matrix.os }} @@ -126,10 +104,6 @@ jobs: run: | conda info conda list - conda info -e - which python - conda activate test - which pip - name: Install build dependencies explicitly run: |