From 06b56af40666d419fc57f58e0fceaff12525b139 Mon Sep 17 00:00:00 2001 From: runame Date: Sat, 24 Jul 2021 12:05:14 +0200 Subject: [PATCH 1/3] Restructure setup for PyPI release --- pyproject.toml | 6 +++++ requirements.txt | 5 ---- setup.cfg | 62 ++++++++++++++++++++++++++++++++++++++++++++++++ setup.py | 45 +++-------------------------------- 4 files changed, 71 insertions(+), 47 deletions(-) create mode 100644 pyproject.toml delete mode 100644 requirements.txt create mode 100644 setup.cfg diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..374b58cb --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,6 @@ +[build-system] +requires = [ + "setuptools>=42", + "wheel" +] +build-backend = "setuptools.build_meta" diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 38b22452..00000000 --- a/requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -torch -torchvision -torchaudio -backpack-for-pytorch -asdfghjkl @ git+https://github.com/kazukiosawa/asdfghjkl.git diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 00000000..34058551 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,62 @@ +############################################################################### +# Main library # +############################################################################### + +[metadata] +name = laplace-torch +version = 0.1a1 +author = Alex Immer +url = https://github.com/AlexImmer/Laplace +project_urls = + Bug Tracker = https://github.com/AlexImmer/Laplace/issues +description = laplace - Laplace approximations for deep learning +long_description = file: README.md +long_description_content_type = text/markdown +license = MIT +# Change if running only on Windows, Mac or Linux (comma-separated) +platforms = any +# Add all kinds of additional classifiers as defined under +# https://pypi.python.org/pypi?%3Aaction=list_classifiers +classifiers = + Development Status :: 3 - Alpha + License :: OSI Approved :: MIT License + Operating System :: OS Independent + Programming Language :: Python :: 3.8 + Programming Language :: Python :: 3.9 + +[options] +zip_safe = False +packages = find: +include_package_data = True +setup_requires = + setuptools_scm +# Dependencies of the project (semicolon/line-separated): +install_requires = + torch + torchvision + torchaudio + backpack-for-pytorch + asdfghjkl +# Require a specific Python version, e.g. Python 2.7 or >= 3.4 +python_requires = >=3.8 + +[options.packages.find] +exclude = tests* + +############################################################################### +# Development dependencies # +############################################################################### + +[options.extras_require] +# Dependencies needed to run the tests (semicolon/line-separated) +tests = + pytest + pytest-cov + coveralls + scipy + +# Dependencies needed to build/view the documentation (semicolon/line-separated) +docs = + matplotlib + pdoc3 + diff --git a/setup.py b/setup.py index d4e4488a..1abbd068 100644 --- a/setup.py +++ b/setup.py @@ -1,43 +1,4 @@ -from os import path -from setuptools import find_packages, setup +import setuptools - -CURRENT_DIR = path.abspath(path.dirname(__file__)) - - -def read_me(filename): - with open(path.join(CURRENT_DIR, filename), encoding='utf-8') as f: - return f.read() - - -def requirements(filename): - with open(path.join(CURRENT_DIR, filename)) as f: - return f.read().splitlines() - - -AUTHORS = "" -NAME = "laplace" -PACKAGES = find_packages() -DESCR = "" -LONG_DESCR = "" -LONG_DESCR_TYPE = 'text/markdown' -REQUIREMENTS = requirements('requirements.txt') -VERSION = "0.1" -URL = "" -LICENSE = "" - - -setup( - author=AUTHORS, - name=NAME, - version=VERSION, - description=DESCR, - long_description=LONG_DESCR, - long_description_content_type=LONG_DESCR_TYPE, - install_requires=REQUIREMENTS, - url=URL, - license=LICENSE, - packages=PACKAGES, - zip_safe=False, - python_requires=">=3.7", -) +if __name__ == "__main__": + setuptools.setup() From ff14f0210876723b1623bcee32b0b9f8922bdaef Mon Sep 17 00:00:00 2001 From: runame Date: Sat, 24 Jul 2021 12:06:11 +0200 Subject: [PATCH 2/3] Adjust README for PyPI --- README.md | 46 +++++++++++++++++------------------- docs/feature_extractor.html | 2 +- docs/index.html | 36 +++++++++++++--------------- docs/regression_example.png | Bin 108466 -> 108423 bytes 4 files changed, 40 insertions(+), 44 deletions(-) diff --git a/README.md b/README.md index 9168949f..079d1229 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,7 @@ There is also a corresponding paper, [*Laplace Redux — Effortless Bayesian Dee ```bibtex @article{daxberger2021laplace, title={Laplace Redux--Effortless Bayesian Deep Learning}, - author={Daxberger, Erik and Kristiadi, Agustinus and Immer, Alexander + author={Daxberger, Erik and Kristiadi, Agustinus and Immer, Alexander and Eschenhagen, Runa and Bauer, Matthias and Hennig, Philipp}, journal={arXiv preprint arXiv:2106.14806}, year={2021} @@ -22,54 +22,51 @@ There is also a corresponding paper, [*Laplace Redux — Effortless Bayesian Dee ## Setup We assume `python3.8` since the package was developed with that version. -To install `laplace` with `pip`, run the following: +To install laplace with `pip`, run the following: ```bash -# directly install from git -pip install laplace@git+https://github.com/AlexImmer/Laplace.git +pip install laplace-torch ``` For development purposes, clone the repository and then install: ```bash # or after cloning the repository for development -pip install -r requirements.txt -# for development pip install -e . # run tests -pip install -r tests/requirements.txt +pip install -e .[tests] pytest tests/ ``` -## Structure -The laplace package consists of two main components: +## Structure +The laplace package consists of two main components: -1. The subclasses of [`laplace.BaseLaplace`](laplace/baselaplace.py) that implement different sparsity structures: different subsets of weights (`'all'` and `'last_layer'`) and different structures of the Hessian approximation (`'full'`, `'kron'`, and `'diag'`). This results in six currently available options: `laplace.FullLaplace`, `laplace.KronLaplace`, `laplace.DiagLaplace`, and the corresponding last-layer variations `laplace.FullLLLaplace`, `laplace.KronLLLaplace`, and `laplace.DiagLLLaplace`, which are all subclasses of [`laplace.LLLaplace`](laplace/lllaplace.py). All of these can be conveniently accessed via the [`laplace.Laplace`](laplace/laplace.py) function. -2. The backends in [`laplace.curvature`](laplace/curvature/) which provide access to Hessian approximations of +1. The subclasses of [`laplace.BaseLaplace`](https://github.com/AlexImmer/Laplace/blob/main/laplace/baselaplace.py) that implement different sparsity structures: different subsets of weights (`'all'` and `'last_layer'`) and different structures of the Hessian approximation (`'full'`, `'kron'`, and `'diag'`). This results in six currently available options: `laplace.FullLaplace`, `laplace.KronLaplace`, `laplace.DiagLaplace`, and the corresponding last-layer variations `laplace.FullLLLaplace`, `laplace.KronLLLaplace`, and `laplace.DiagLLLaplace`, which are all subclasses of [`laplace.LLLaplace`](https://github.com/AlexImmer/Laplace/blob/main/laplace/lllaplace.py). All of these can be conveniently accessed via the [`laplace.Laplace`](https://github.com/AlexImmer/Laplace/blob/main/laplace/laplace.py) function. +2. The backends in [`laplace.curvature`](https://github.com/AlexImmer/Laplace/blob/main/laplace/curvature/) which provide access to Hessian approximations of the corresponding sparsity structures, for example, the diagonal GGN. Additionally, the package provides utilities for -decomposing a neural network into feature extractor and last layer for `LLLaplace` subclasses ([`laplace.feature_extractor`](laplace/feature_extractor.py)) +decomposing a neural network into feature extractor and last layer for `LLLaplace` subclasses ([`laplace.feature_extractor`](https://github.com/AlexImmer/Laplace/blob/main/laplace/feature_extractor.py)) and -effectively dealing with Kronecker factors ([`laplace.matrix`](laplace/matrix.py)). +effectively dealing with Kronecker factors ([`laplace.matrix`](https://github.com/AlexImmer/Laplace/blob/main/laplace/matrix.py)). ## Extendability To extend the laplace package, new `BaseLaplace` subclasses can be designed, for example, a block-diagonal structure or subset-of-weights Laplace. -Alternatively, extending or integrating backends (subclasses of [`curvature.curvature`](laplace/curvature/curvature.py)) allows to provide different Hessian +Alternatively, extending or integrating backends (subclasses of [`curvature.curvature`](https://github.com/AlexImmer/Laplace/blob/main/laplace/curvature/curvature.py)) allows to provide different Hessian approximations to the Laplace approximations. -For example, currently the [`curvature.BackPackInterface`](laplace/curvature/backpack.py) based on [BackPACK](https://github.com/f-dangel/backpack/) and [`curvature.AsdlInterface`](laplace/curvature/asdl.py) based on [ASDL](https://github.com/kazukiosawa/asdfghjkl) are available. +For example, currently the [`curvature.BackPackInterface`](https://github.com/AlexImmer/Laplace/blob/main/laplace/curvature/backpack.py) based on [BackPACK](https://github.com/f-dangel/backpack/) and [`curvature.AsdlInterface`](https://github.com/AlexImmer/Laplace/blob/main/laplace/curvature/asdl.py) based on [ASDL](https://github.com/kazukiosawa/asdfghjkl) are available. The `curvature.AsdlInterface` provides a Kronecker factored empirical Fisher while the `curvature.BackPackInterface` does not, and only the `curvature.BackPackInterface` provides access to Hessian approximations for a regression (MSELoss) loss function. ## Example usage -### *Post-hoc* prior precision tuning of last-layer LA +### *Post-hoc* prior precision tuning of last-layer LA In the following example, a pre-trained model is loaded, then the Laplace approximation is fit to the training data, and the prior precision is optimized with cross-validation `'CV'`. -After that, the resulting LA is used for prediction with -the `'probit'` predictive for classification. +After that, the resulting LA is used for prediction with +the `'probit'` predictive for classification. ```python from laplace import Laplace @@ -79,7 +76,7 @@ model = load_map_model() # User-specified LA flavor la = Laplace(model, 'classification', - subset_of_weights='all', + subset_of_weights='all', hessian_structure='diag') la.fit(train_loader) la.optimize_prior_precision(method='CV', val_loader=val_loader) @@ -97,14 +94,14 @@ the log marginal likelihood. ```python from laplace import Laplace - + # Un- or pre-trained model model = load_model() - + # Default to recommended last-layer KFAC LA: la = Laplace(model, likelihood='regression') la.fit(train_loader) - + # ML w.r.t. prior precision and observation noise ml = la.log_marginal_likelihood(prior_prec, obs_noise) ml.backward() @@ -115,7 +112,8 @@ ml.backward() The documentation is available [here](https://aleximmer.github.io/Laplace) or can be generated and/or viewed locally: ```bash -pip install pdoc3 matplotlib +# assuming the repository was cloned +pip install -e .[docs] # create docs and write to html bash update_docs.sh # .. or serve the docs directly @@ -127,7 +125,7 @@ pdoc --http 0.0.0.0:8080 laplace --template-dir template This package relies on various improvements to the Laplace approximation for neural networks, which was originally due to MacKay [1]. - [1] MacKay, DJC. [*A Practical Bayesian Framework for Backpropagation Networks*](https://authors.library.caltech.edu/13793/). Neural Computation 1992. -- [2] Gibbs, M. N. [*Bayesian Gaussian Processes for Regression and Classification*](https://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.147.1130&rep=rep1&type=pdf). PhD Thesis 1997. +- [2] Gibbs, M. N. [*Bayesian Gaussian Processes for Regression and Classification*](https://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.147.1130&rep=rep1&type=pdf). PhD Thesis 1997. - [3] Snoek, J., Rippel, O., Swersky, K., Kiros, R., Satish, N., Sundaram, N., Patwary, M., Prabhat, M., Adams, R. [*Scalable Bayesian Optimization Using Deep Neural Networks*](https://arxiv.org/abs/1502.05700). ICML 2015. - [4] Ritter, H., Botev, A., Barber, D. [*A Scalable Laplace Approximation for Neural Networks*](https://openreview.net/forum?id=Skdvd2xAZ). ICLR 2018. - [5] Foong, A. Y., Li, Y., Hernández-Lobato, J. M., Turner, R. E. [*'In-Between' Uncertainty in Bayesian Neural Networks*](https://arxiv.org/abs/1906.11537). ICML UDL Workshop 2019. diff --git a/docs/feature_extractor.html b/docs/feature_extractor.html index 44f50ac7..db8a7509 100644 --- a/docs/feature_extractor.html +++ b/docs/feature_extractor.html @@ -35,7 +35,7 @@

Classes

class FeatureExtractor -(model: torch.nn.modules.module.Module, last_layer_name: Union[str, NoneType] = None) +(model: torch.nn.modules.module.Module, last_layer_name: Optional[str] = None)

Feature extractor for a PyTorch neural network. diff --git a/docs/index.html b/docs/index.html index 8c792092..be863d91 100644 --- a/docs/index.html +++ b/docs/index.html @@ -35,7 +35,7 @@

Package laplace

There is also a corresponding paper, Laplace Redux — Effortless Bayesian Deep Learning, which introduces the library, provides an introduction to the Laplace approximation, reviews its use in deep learning, and empirically demonstrates its versatility and competitiveness. Please consider referring to the paper when using our library:

@article{daxberger2021laplace,
   title={Laplace Redux--Effortless Bayesian Deep Learning},
-  author={Daxberger, Erik and Kristiadi, Agustinus and Immer, Alexander 
+  author={Daxberger, Erik and Kristiadi, Agustinus and Immer, Alexander
           and Eschenhagen, Runa and Bauer, Matthias and Hennig, Philipp},
   journal={arXiv preprint arXiv:2106.14806},
   year={2021}
@@ -43,37 +43,34 @@ 

Package laplace

Setup

We assume python3.8 since the package was developed with that version. -To install laplace.laplace with pip, run the following:

-
# directly install from git
-pip install laplace@git+https://github.com/AlexImmer/Laplace.git
+To install laplace with pip, run the following:

+
pip install laplace-torch
 

For development purposes, clone the repository and then install:

# or after cloning the repository for development
-pip install -r requirements.txt
-# for development
 pip install -e .
 # run tests
-pip install -r tests/requirements.txt
+pip install -e .[tests]
 pytest tests/
 

Structure

-

The laplace package consists of two main components:

+

The laplace package consists of two main components:

    -
  1. The subclasses of laplace.BaseLaplace that implement different sparsity structures: different subsets of weights ('all' and 'last_layer') and different structures of the Hessian approximation ('full', 'kron', and 'diag'). This results in six currently available options: FullLaplace, KronLaplace, DiagLaplace, and the corresponding last-layer variations FullLLLaplace, KronLLLaplace, -and DiagLLLaplace, which are all subclasses of laplace.LLLaplace. All of these can be conveniently accessed via the laplace.Laplace function.
  2. -
  3. The backends in laplace.curvature which provide access to Hessian approximations of +
  4. The subclasses of laplace.BaseLaplace that implement different sparsity structures: different subsets of weights ('all' and 'last_layer') and different structures of the Hessian approximation ('full', 'kron', and 'diag'). This results in six currently available options: FullLaplace, KronLaplace, DiagLaplace, and the corresponding last-layer variations FullLLLaplace, KronLLLaplace, +and DiagLLLaplace, which are all subclasses of laplace.LLLaplace. All of these can be conveniently accessed via the laplace.Laplace function.
  5. +
  6. The backends in laplace.curvature which provide access to Hessian approximations of the corresponding sparsity structures, for example, the diagonal GGN.

Additionally, the package provides utilities for -decomposing a neural network into feature extractor and last layer for LLLaplace subclasses (laplace.feature_extractor) +decomposing a neural network into feature extractor and last layer for LLLaplace subclasses (laplace.feature_extractor) and -effectively dealing with Kronecker factors (laplace.matrix).

+effectively dealing with Kronecker factors (laplace.matrix).

Extendability

To extend the laplace package, new BaseLaplace subclasses can be designed, for example, a block-diagonal structure or subset-of-weights Laplace. -Alternatively, extending or integrating backends (subclasses of curvature.curvature) allows to provide different Hessian +Alternatively, extending or integrating backends (subclasses of curvature.curvature) allows to provide different Hessian approximations to the Laplace approximations. -For example, currently the curvature.BackPackInterface based on BackPACK and curvature.AsdlInterface based on ASDL are available. +For example, currently the curvature.BackPackInterface based on BackPACK and curvature.AsdlInterface based on ASDL are available. The AsdlInterface provides a Kronecker factored empirical Fisher while the BackPackInterface does not, and only the BackPackInterface provides access to Hessian approximations for a regression (MSELoss) loss function.

@@ -83,7 +80,7 @@

Post-hoc prio then the Laplace approximation is fit to the training data, and the prior precision is optimized with cross-validation 'CV'. After that, the resulting LA is used for prediction with -the 'probit' predictive for classification.

+the 'probit' predictive for classification.

from laplace import Laplace
 
 # pre-trained model
@@ -91,7 +88,7 @@ 

Post-hoc prio # User-specified LA flavor la = Laplace(model, 'classification', - subset_of_weights='all', + subset_of_weights='all', hessian_structure='diag') la.fit(train_loader) la.optimize_prior_precision(method='CV', val_loader=val_loader) @@ -119,7 +116,8 @@

Differe

Documentation

The documentation is available here or can be generated and/or viewed locally:

-
pip install pdoc3 matplotlib
+
# assuming the repository was cloned
+pip install -e .[docs]
 # create docs and write to html
 bash update_docs.sh
 # .. or serve the docs directly
@@ -129,7 +127,7 @@ 

References

This package relies on various improvements to the Laplace approximation for neural networks, which was originally due to MacKay [1].