From 3e2e9f6000d9c4e5e5269c0a3031e032389fc63f Mon Sep 17 00:00:00 2001 From: Aedial Date: Sat, 22 Apr 2023 17:12:02 +0200 Subject: [PATCH] [MISC] Add docs to Github Pages --- .github/workflows/sphinx.yml | 32 +++++++++++ TODO.md | 7 ++- docs/Makefile | 20 +++++++ docs/requirements.txt | 5 ++ docs/source/conf.py | 64 +++++++++++++++++++++ docs/source/index.rst | 20 +++++++ docs/source/novelai_api.BanList.rst | 7 +++ docs/source/novelai_api.BiasGroup.rst | 7 +++ docs/source/novelai_api.GlobalSettings.rst | 7 +++ docs/source/novelai_api.Idstore.rst | 7 +++ docs/source/novelai_api.ImagePreset.rst | 7 +++ docs/source/novelai_api.Keystore.rst | 7 +++ docs/source/novelai_api.NovelAIError.rst | 7 +++ docs/source/novelai_api.NovelAI_API.rst | 7 +++ docs/source/novelai_api.Preset.rst | 7 +++ docs/source/novelai_api.SchemaValidator.rst | 7 +++ docs/source/novelai_api.StoryHandler.rst | 7 +++ docs/source/novelai_api.Tokenizer.rst | 7 +++ docs/source/novelai_api.python_utils.rst | 7 +++ docs/source/novelai_api.rst | 20 +++++++ docs/source/novelai_api.utils.rst | 7 +++ novelai_api/GlobalSettings.py | 12 ++-- novelai_api/ImagePreset.py | 42 ++++++++------ novelai_api/Preset.py | 46 +++++++-------- novelai_api/Tokenizer.py | 10 +--- novelai_api/_high_level.py | 3 +- novelai_api/_low_level.py | 3 +- novelai_api/python_utils.py | 49 ++++++++++++++++ novelai_api/utils.py | 26 +-------- noxfile.py | 31 ++++++++++ 30 files changed, 404 insertions(+), 84 deletions(-) create mode 100644 .github/workflows/sphinx.yml create mode 100644 docs/Makefile create mode 100644 docs/requirements.txt create mode 100644 docs/source/conf.py create mode 100644 docs/source/index.rst create mode 100644 docs/source/novelai_api.BanList.rst create mode 100644 docs/source/novelai_api.BiasGroup.rst create mode 100644 docs/source/novelai_api.GlobalSettings.rst create mode 100644 docs/source/novelai_api.Idstore.rst create mode 100644 docs/source/novelai_api.ImagePreset.rst create mode 100644 docs/source/novelai_api.Keystore.rst create mode 100644 docs/source/novelai_api.NovelAIError.rst create mode 100644 docs/source/novelai_api.NovelAI_API.rst create mode 100644 docs/source/novelai_api.Preset.rst create mode 100644 docs/source/novelai_api.SchemaValidator.rst create mode 100644 docs/source/novelai_api.StoryHandler.rst create mode 100644 docs/source/novelai_api.Tokenizer.rst create mode 100644 docs/source/novelai_api.python_utils.rst create mode 100644 docs/source/novelai_api.rst create mode 100644 docs/source/novelai_api.utils.rst create mode 100644 novelai_api/python_utils.py diff --git a/.github/workflows/sphinx.yml b/.github/workflows/sphinx.yml new file mode 100644 index 0000000..f3f97c1 --- /dev/null +++ b/.github/workflows/sphinx.yml @@ -0,0 +1,32 @@ +name: Sphinx build + +on: push + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.10" + cache: 'pip' + + - name: Build HTML + uses: ammaraskar/sphinx-action@master + with: + docs-folder: "docs/" + pre-build-command: pip install nox + build-command: nox -s build-docs -- novelai_api + + - name: Deploy on Github Pages + uses: peaceiris/actions-gh-pages@v3 + if: github.ref == 'refs/heads/main' + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: docs/build/html + + # Add a deploy on RTD ? diff --git a/TODO.md b/TODO.md index 2843da3..1ebe766 100644 --- a/TODO.md +++ b/TODO.md @@ -1,7 +1,12 @@ ### FIXME: + - Controlnet mask + +### Reviewing/Fixing: + - Backend mocking ### Work in Progress: + - https://pre-commit.ci/ + - Story handling - Context building - Contribute to flake2pylint - - Backend mocking diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..8b6275a --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= -W --keep-going +SPHINXBUILD ?= sphinx-build +SOURCEDIR = source +BUILDDIR = build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 0000000..516006e --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1,5 @@ +sphinx==6.1.3 +myst-parser +sphinx-copybutton +sphinx_last_updated_by_git +sphinx-hoverxref diff --git a/docs/source/conf.py b/docs/source/conf.py new file mode 100644 index 0000000..874771b --- /dev/null +++ b/docs/source/conf.py @@ -0,0 +1,64 @@ +# Configuration file for the Sphinx documentation builder. +# +# For the full list of built-in configuration values, see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +import os +import sys + +# -- Project information ----------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information + +project = "NovelAI API" +# pylint: disable=W0622 +copyright = "2023, Aedial" # noqa (built-in) +author = "Aedial" +release = "0.11.6" + +sys.path.insert(0, os.path.abspath("../..")) +sys.path.insert(0, os.path.abspath("..")) +sys.path.insert(0, os.path.abspath(".")) + + +# -- General configuration --------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration + +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.extlinks", + "sphinx.ext.viewcode", + "myst_parser", + "sphinx_copybutton", + "sphinx_last_updated_by_git", + "hoverxref.extension", +] + +autodoc_member_order = "bysource" + +extlinks = {"issue": ("https://github.com/sphinx-doc/sphinx/issues/%s", "[issue %s]")} + +suppress_warnings = ["myst.header"] + +copybutton_exclude = ".linenos, .gp, .go" + +hoverxref_auto_ref = True +hoverxref_domains = ["py"] +hoverxref_role_types = { + "hoverxref": "modal", + "ref": "modal", # for hoverxref_auto_ref config + "confval": "modal", # for custom object + "mod": "modal", # for Python Sphinx Domain + "class": "modal", # for Python Sphinx Domain +} + + +templates_path = ["_templates"] +exclude_patterns = [] + + +# -- Options for HTML output ------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output + +html_theme = "classic" +# no asset yet +# html_static_path = ['_static'] diff --git a/docs/source/index.rst b/docs/source/index.rst new file mode 100644 index 0000000..7ac9422 --- /dev/null +++ b/docs/source/index.rst @@ -0,0 +1,20 @@ +.. NovelAI API documentation master file, created by + sphinx-quickstart on Wed Apr 19 16:51:01 2023. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + + +.. include:: ../../README.md + :parser: myst_parser.sphinx_ + +TODO +==== +.. include:: ../../TODO.md + :parser: myst_parser.sphinx_ + +Reference +========= +.. toctree:: + :maxdepth: 2 + + novelai_api diff --git a/docs/source/novelai_api.BanList.rst b/docs/source/novelai_api.BanList.rst new file mode 100644 index 0000000..84f16bd --- /dev/null +++ b/docs/source/novelai_api.BanList.rst @@ -0,0 +1,7 @@ +novelai\_api.BanList module +=========================== + +.. automodule:: novelai_api.BanList + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/novelai_api.BiasGroup.rst b/docs/source/novelai_api.BiasGroup.rst new file mode 100644 index 0000000..add1570 --- /dev/null +++ b/docs/source/novelai_api.BiasGroup.rst @@ -0,0 +1,7 @@ +novelai\_api.BiasGroup module +============================= + +.. automodule:: novelai_api.BiasGroup + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/novelai_api.GlobalSettings.rst b/docs/source/novelai_api.GlobalSettings.rst new file mode 100644 index 0000000..b6d1512 --- /dev/null +++ b/docs/source/novelai_api.GlobalSettings.rst @@ -0,0 +1,7 @@ +novelai\_api.GlobalSettings module +================================== + +.. automodule:: novelai_api.GlobalSettings + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/novelai_api.Idstore.rst b/docs/source/novelai_api.Idstore.rst new file mode 100644 index 0000000..d93c4ca --- /dev/null +++ b/docs/source/novelai_api.Idstore.rst @@ -0,0 +1,7 @@ +novelai\_api.Idstore module +=========================== + +.. automodule:: novelai_api.Idstore + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/novelai_api.ImagePreset.rst b/docs/source/novelai_api.ImagePreset.rst new file mode 100644 index 0000000..544da85 --- /dev/null +++ b/docs/source/novelai_api.ImagePreset.rst @@ -0,0 +1,7 @@ +novelai\_api.ImagePreset module +=============================== + +.. automodule:: novelai_api.ImagePreset + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/novelai_api.Keystore.rst b/docs/source/novelai_api.Keystore.rst new file mode 100644 index 0000000..2c0a814 --- /dev/null +++ b/docs/source/novelai_api.Keystore.rst @@ -0,0 +1,7 @@ +novelai\_api.Keystore module +============================ + +.. automodule:: novelai_api.Keystore + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/novelai_api.NovelAIError.rst b/docs/source/novelai_api.NovelAIError.rst new file mode 100644 index 0000000..5a70b41 --- /dev/null +++ b/docs/source/novelai_api.NovelAIError.rst @@ -0,0 +1,7 @@ +novelai\_api.NovelAIError module +================================ + +.. automodule:: novelai_api.NovelAIError + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/novelai_api.NovelAI_API.rst b/docs/source/novelai_api.NovelAI_API.rst new file mode 100644 index 0000000..05de576 --- /dev/null +++ b/docs/source/novelai_api.NovelAI_API.rst @@ -0,0 +1,7 @@ +novelai\_api.NovelAI\_API module +================================ + +.. automodule:: novelai_api.NovelAI_API + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/novelai_api.Preset.rst b/docs/source/novelai_api.Preset.rst new file mode 100644 index 0000000..610cfce --- /dev/null +++ b/docs/source/novelai_api.Preset.rst @@ -0,0 +1,7 @@ +novelai\_api.Preset module +========================== + +.. automodule:: novelai_api.Preset + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/novelai_api.SchemaValidator.rst b/docs/source/novelai_api.SchemaValidator.rst new file mode 100644 index 0000000..1fa1e47 --- /dev/null +++ b/docs/source/novelai_api.SchemaValidator.rst @@ -0,0 +1,7 @@ +novelai\_api.SchemaValidator module +=================================== + +.. automodule:: novelai_api.SchemaValidator + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/novelai_api.StoryHandler.rst b/docs/source/novelai_api.StoryHandler.rst new file mode 100644 index 0000000..a59d145 --- /dev/null +++ b/docs/source/novelai_api.StoryHandler.rst @@ -0,0 +1,7 @@ +novelai\_api.StoryHandler module +================================ + +.. automodule:: novelai_api.StoryHandler + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/novelai_api.Tokenizer.rst b/docs/source/novelai_api.Tokenizer.rst new file mode 100644 index 0000000..f5de5ca --- /dev/null +++ b/docs/source/novelai_api.Tokenizer.rst @@ -0,0 +1,7 @@ +novelai\_api.Tokenizer module +============================= + +.. automodule:: novelai_api.Tokenizer + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/novelai_api.python_utils.rst b/docs/source/novelai_api.python_utils.rst new file mode 100644 index 0000000..2a7b803 --- /dev/null +++ b/docs/source/novelai_api.python_utils.rst @@ -0,0 +1,7 @@ +novelai\_api.python\_utils module +================================= + +.. automodule:: novelai_api.python_utils + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/novelai_api.rst b/docs/source/novelai_api.rst new file mode 100644 index 0000000..fc3b5a7 --- /dev/null +++ b/docs/source/novelai_api.rst @@ -0,0 +1,20 @@ +novelai\_api package +==================== + +.. toctree:: + :maxdepth: 2 + + novelai_api.BanList + novelai_api.BiasGroup + novelai_api.GlobalSettings + novelai_api.Idstore + novelai_api.ImagePreset + novelai_api.Keystore + novelai_api.NovelAIError + novelai_api.NovelAI_API + novelai_api.Preset + novelai_api.SchemaValidator + novelai_api.StoryHandler + novelai_api.Tokenizer + novelai_api.utils + novelai_api.python_utils diff --git a/docs/source/novelai_api.utils.rst b/docs/source/novelai_api.utils.rst new file mode 100644 index 0000000..4be55a4 --- /dev/null +++ b/docs/source/novelai_api.utils.rst @@ -0,0 +1,7 @@ +novelai\_api.utils module +========================= + +.. automodule:: novelai_api.utils + :members: + :undoc-members: + :show-inheritance: diff --git a/novelai_api/GlobalSettings.py b/novelai_api/GlobalSettings.py index 537cfe6..085d7fa 100644 --- a/novelai_api/GlobalSettings.py +++ b/novelai_api/GlobalSettings.py @@ -2,6 +2,7 @@ from novelai_api.BiasGroup import BiasGroup from novelai_api.Preset import Model +from novelai_api.python_utils import expand_kwargs from novelai_api.Tokenizer import Tokenizer @@ -597,21 +598,22 @@ class GlobalSettings: # type completion for __setitem__ and __getitem__ if TYPE_CHECKING: - # generate up to 20 tokens after max_length if an end of sentence if found within these 20 tokens + #: Generate up to 20 tokens after max_length if an end of sentence if found within these 20 tokens generate_until_sentence: bool - # number of logprobs to return for each token. Set to NO_LOGPROBS to disable + #: Number of logprobs to return for each token. Set to NO_LOGPROBS to disable num_logprobs: int - # apply the BRACKET biases + #: Apply the BRACKET biases ban_brackets: bool - # apply the DINKUS_ASTERISM biases + #: Apply the DINKUS_ASTERISM biases bias_dinkus_asterism: bool - # apply the GENJI_AMBIGUOUS_TOKENS if model is Genji + #: Apply the GENJI_AMBIGUOUS_TOKENS if model is Genji ban_ambiguous_genji_tokens: bool NO_LOGPROBS = -1 _settings: Dict[str, Any] + @expand_kwargs(_DEFAULT_SETTINGS.keys(), (type(e) for e in _DEFAULT_SETTINGS.values())) def __init__(self, **kwargs): object.__setattr__(self, "_settings", {}) diff --git a/novelai_api/ImagePreset.py b/novelai_api/ImagePreset.py index 7e6867b..4d7c54f 100644 --- a/novelai_api/ImagePreset.py +++ b/novelai_api/ImagePreset.py @@ -5,6 +5,8 @@ import random from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, Union +from novelai_api.python_utils import NoneType, expand_kwargs + class ImageModel(enum.Enum): Anime_Curated = "safe-diffusion" @@ -109,7 +111,7 @@ class ImagePreset: _TYPE_MAPPING = { "quality_toggle": bool, "resolution": (ImageResolution, tuple), - "uc_preset": UCPreset, + "uc_preset": (UCPreset, NoneType), "n_samples": int, "seed": int, "sampler": ImageSampler, @@ -132,42 +134,43 @@ class ImagePreset: # type completion for __setitem__ and __getitem__ if TYPE_CHECKING: - # https://docs.novelai.net/image/qualitytags.html + #: https://docs.novelai.net/image/qualitytags.html quality_toggle: bool - # resolution of the image to generate as ImageResolution or a (width, height) tuple + #: Resolution of the image to generate as ImageResolution or a (width, height) tuple resolution: Union[ImageResolution, Tuple[int, int]] - # default UC to prepend to the UC + #: Default UC to prepend to the UC uc_preset: Union[UCPreset, None] - # number of images to return + #: Number of images to return n_samples: int - # random seed to use for the image. The ith image has seed + i for seed + #: Random seed to use for the image. The ith image has seed + i for seed seed: int - # https://docs.novelai.net/image/sampling.html + #: https://docs.novelai.net/image/sampling.html sampler: ImageSampler - # https://docs.novelai.net/image/strengthnoise.html + #: https://docs.novelai.net/image/strengthnoise.html noise: float - # https://docs.novelai.net/image/strengthnoise.html + #: https://docs.novelai.net/image/strengthnoise.html strength: float - # https://docs.novelai.net/image/stepsguidance.html (scale is called Prompt Guidance) + #: https://docs.novelai.net/image/stepsguidance.html (scale is called Prompt Guidance) scale: float - # https://docs.novelai.net/image/stepsguidance.html + #: https://docs.novelai.net/image/stepsguidance.html steps: int - # https://docs.novelai.net/image/undesiredcontent.html + #: https://docs.novelai.net/image/undesiredcontent.html uc: str - # enable SMEA for any sampler (makes Large+ generations + #: Enable SMEA for any sampler (makes Large+ generations manageable) smea: bool - # enable SMEA DYN for any sampler if SMEA is enabled (best for Large+, but not Wallpaper resolutions) + #: Enable SMEA DYN for any sampler if SMEA is enabled (best for Large+, but not Wallpaper resolutions) smea_dyn: bool - # b64-encoded png image for img2img + #: b64-encoded png image for img2img image: str - # controlnet mask gotten by the generate_controlnet_mask method + #: Controlnet mask gotten by the generate_controlnet_mask method controlnet_condition: str - # model to use for the controlnet + #: Model to use for the controlnet controlnet_model: ControlNetModel - # Influence of the chosen controlnet on the image + #: Influence of the chosen controlnet on the image controlnet_strength: float - # reduce the deepfrying effects of high scale (https://twitter.com/Birchlabs/status/1582165379832348672) + #: Reduce the deepfrying effects of high scale (https://twitter.com/Birchlabs/status/1582165379832348672) decrisper: bool + # TODO # dynamic_thresholding_mimic_scale: float # dynamic_thresholding_percentile: float @@ -194,6 +197,7 @@ class ImagePreset: # Seed provided when generating an image with seed 0 (default). Seed is also in metadata, but might be a hassle last_seed: int + @expand_kwargs(_TYPE_MAPPING.keys(), _TYPE_MAPPING.values()) def __init__(self, **kwargs): object.__setattr__(self, "_settings", self._DEFAULT.copy()) self.update(kwargs) diff --git a/novelai_api/Preset.py b/novelai_api/Preset.py index 66c84ba..1dfd7cd 100644 --- a/novelai_api/Preset.py +++ b/novelai_api/Preset.py @@ -121,52 +121,52 @@ class Preset(metaclass=_PresetMetaclass): # type completion for __setitem__ and __getitem__ if TYPE_CHECKING: - # preset version, only relevant for .preset files + #: Preset version, only relevant for .preset files textGenerationSettingsVersion: int - # list of tokenized strings that should stop the generation early + #: List of tokenized strings that should stop the generation early # TODO: add possibility for late tokenization stop_sequences: List[List[int]] - # https://naidb.miraheze.org/wiki/Generation_Settings#Randomness_(Temperature) + #: https://naidb.miraheze.org/wiki/Generation_Settings#Randomness_(Temperature) temperature: float - # response length, if no interrupted by a Stop Sequence + #: Response length, if no interrupted by a Stop Sequence max_length: int - # minimum number of token, if interrupted by a Stop Sequence + #: Minimum number of token, if interrupted by a Stop Sequence min_length: int - # https://naidb.miraheze.org/wiki/Generation_Settings#Top-K_Sampling + #: https://naidb.miraheze.org/wiki/Generation_Settings#Top-K_Sampling top_k: int - # https://naidb.miraheze.org/wiki/Generation_Settings#Top-A_Sampling + #: https://naidb.miraheze.org/wiki/Generation_Settings#Top-A_Sampling top_a: float - # https://naidb.miraheze.org/wiki/Generation_Settings#Nucleus_Sampling + #: https://naidb.miraheze.org/wiki/Generation_Settings#Nucleus_Sampling top_p: float - # https://naidb.miraheze.org/wiki/Generation_Settings#Typical_Sampling (https://arxiv.org/pdf/2202.00666.pdf + #: https://naidb.miraheze.org/wiki/Generation_Settings#Typical_Sampling (https://arxiv.org/pdf/2202.00666.pdf) typical_p: float - # https://naidb.miraheze.org/wiki/Generation_Settings#Tail-Free_Sampling + #: https://naidb.miraheze.org/wiki/Generation_Settings#Tail-Free_Sampling tail_free_sampling: float - # https://arxiv.org/pdf/1909.05858.pdf + #: https://arxiv.org/pdf/1909.05858.pdf repetition_penalty: float - # range (in tokens) the repetition penalty covers (https://arxiv.org/pdf/1909.05858.pdf) + #: Range (in tokens) the repetition penalty covers (https://arxiv.org/pdf/1909.05858.pdf) repetition_penalty_range: int - # https://arxiv.org/pdf/1909.05858.pdf + #: https://arxiv.org/pdf/1909.05858.pdf repetition_penalty_slope: float - # https://platform.openai.com/docs/api-reference/parameter-details + #: https://platform.openai.com/docs/api-reference/parameter-details repetition_penalty_frequency: float - # https://platform.openai.com/docs/api-reference/parameter-details + #: https://platform.openai.com/docs/api-reference/parameter-details repetition_penalty_presence: float - # list of tokens that are excluded from the repetition penalty (useful for colors and the likes) + #: List of tokens that are excluded from the repetition penalty (useful for colors and the likes) repetition_penalty_whitelist: list - # https://huggingface.co/docs/transformers/main_classes/text_generation#transformers.GenerationConfig.length_penalty + #: https://huggingface.co/docs/transformers/main_classes/text_generation#transformers.GenerationConfig length_penalty: float - # https://huggingface.co/docs/transformers/main_classes/text_generation#transformers.GenerationConfig.diversity_penalty + #: https://huggingface.co/docs/transformers/main_classes/text_generation#transformers.GenerationConfig diversity_penalty: float - # list of Order to set the sampling order + #: list of Order to set the sampling order order: List[Union[Order, int]] - # https://huggingface.co/docs/transformers/main_classes/text_generation#transformers.GenerationConfig.pad_token_id + #: https://huggingface.co/docs/transformers/main_classes/text_generation#transformers.GenerationConfig pad_token_id: int - # https://huggingface.co/docs/transformers/main_classes/text_generation#transformers.GenerationConfig.bos_token_id + #: https://huggingface.co/docs/transformers/main_classes/text_generation#transformers.GenerationConfig bos_token_id: int - # https://huggingface.co/docs/transformers/main_classes/text_generation#transformers.GenerationConfig.eos_token_id + #: https://huggingface.co/docs/transformers/main_classes/text_generation#transformers.GenerationConfig eos_token_id: int - # https://huggingface.co/docs/transformers/main_classes/text_generation#transformers.GenerationConfig.max_time(float, + #: https://huggingface.co/docs/transformers/main_classes/text_generation#transformers.GenerationConfig max_time: int _officials: Dict[str, Dict[str, "Preset"]] diff --git a/novelai_api/Tokenizer.py b/novelai_api/Tokenizer.py index 7905a33..f153e21 100644 --- a/novelai_api/Tokenizer.py +++ b/novelai_api/Tokenizer.py @@ -1,7 +1,6 @@ from pathlib import Path from typing import List, Union -import sentencepiece import tokenizers from novelai_api.ImagePreset import ImageModel @@ -48,18 +47,11 @@ def get_tokenizer_name(cls, model: Model) -> str: # TODO: check differences from NAI tokenizer (from my limited testing, there is None) _CLIP_TOKENIZER = SimpleTokenizer() - _NERDSTASH_PATH = tokenizers_path / "nerdstash_tokenizer.model" - _NERDSTASH_TOKENIZER = sentencepiece.SentencePieceProcessor() - _NERDSTASH_TOKENIZER.LoadFromFile(str(_NERDSTASH_PATH)) - _NERDSTASH_TOKENIZER.encode = _NERDSTASH_TOKENIZER.EncodeAsIds - _NERDSTASH_TOKENIZER.decode = _NERDSTASH_TOKENIZER.DecodeIds - _tokenizers = { "gpt2": _GPT2_TOKENIZER, "gpt2-genji": _GENJI_TOKENIZER, "pile": _PILE_TOKENIZER, "clip": _CLIP_TOKENIZER, - "nerdstash": _NERDSTASH_TOKENIZER, } @classmethod @@ -77,7 +69,7 @@ def encode(cls, model: AnyModel, o: str) -> List[int]: if isinstance(tokenizer, tokenizers.Tokenizer): return tokenizer.encode(o).ids - if isinstance(tokenizer, (sentencepiece.SentencePieceProcessor, SimpleTokenizer)): + if isinstance(tokenizer, SimpleTokenizer): return tokenizer.encode(o) raise ValueError(f"Tokenizer {tokenizer} ({tokenizer_name}) not recognized") diff --git a/novelai_api/_high_level.py b/novelai_api/_high_level.py index a48e76c..de9b5d6 100644 --- a/novelai_api/_high_level.py +++ b/novelai_api/_high_level.py @@ -9,7 +9,8 @@ from novelai_api.Keystore import Keystore from novelai_api.NovelAIError import NovelAIError from novelai_api.Preset import Model, Preset -from novelai_api.utils import assert_type, compress_user_data, encrypt_user_data, get_access_key +from novelai_api.python_utils import assert_type +from novelai_api.utils import compress_user_data, encrypt_user_data, get_access_key class HighLevel: diff --git a/novelai_api/_low_level.py b/novelai_api/_low_level.py index e852daa..e59d5e2 100644 --- a/novelai_api/_low_level.py +++ b/novelai_api/_low_level.py @@ -11,9 +11,10 @@ from novelai_api.ImagePreset import ControlNetModel, ImageGenerationType, ImageModel from novelai_api.NovelAIError import NovelAIError from novelai_api.Preset import Model +from novelai_api.python_utils import NoneType, assert_len, assert_type from novelai_api.SchemaValidator import SchemaValidator from novelai_api.Tokenizer import Tokenizer -from novelai_api.utils import NoneType, assert_len, assert_type, tokens_to_b64 +from novelai_api.utils import tokens_to_b64 # === INTERNALS === # SSE_FIELDS = ["event", "data", "id", "retry"] diff --git a/novelai_api/python_utils.py b/novelai_api/python_utils.py new file mode 100644 index 0000000..a043c6b --- /dev/null +++ b/novelai_api/python_utils.py @@ -0,0 +1,49 @@ +import inspect +import operator +from typing import Callable, Iterable, Union + +NoneType: type = type(None) + + +def assert_type(expected, **types): + for k, v in types.items(): + assert isinstance(v, expected), f"Expected type '{expected}' for {k}, but got type '{type(v)}'" + + +operator_to_str = { + operator.eq: "exactly {} characters", + operator.lt: "less than {} characters", + operator.le: "{} characters or less", + operator.gt: "more than {} characters", + operator.ge: "{} characters or more", +} + + +def assert_len(expected, op: operator = operator.eq, **values): + op_str = operator_to_str[op].format(expected) + + for k, v in values.items(): + assert v is None or op(len(v), expected), f"'{k}' should be {op_str}, got length of {len(v)}'" + + +def expand_kwargs(keys: Iterable[str], types: Union[Iterable[type], Iterable[type]]): + types = [set(t) - {int} if isinstance(t, (tuple, list, set)) and int in t and float in t else t for t in types] + types = [Union[tuple(t)] if isinstance(t, (tuple, list, set)) else t for t in types] + + kwargs_params = [inspect.Parameter(k, inspect.Parameter.KEYWORD_ONLY, annotation=v) for k, v in zip(keys, types)] + + def wrapper(func: Callable): + sig = inspect.signature(func) + params = list(sig.parameters.values()) + + # find the **kwargs + kwargs_pos = [i for i, param in enumerate(params) if param.kind is param.VAR_KEYWORD] + if not kwargs_pos: + raise ValueError(f"Couldn't find **kwargs for function {func}") + pos = kwargs_pos[0] + + func.__signature__ = sig.replace(parameters=[*params[:pos], *kwargs_params, *params[pos + 1 :]]) + + return func + + return wrapper diff --git a/novelai_api/utils.py b/novelai_api/utils.py index 11266bc..906f436 100644 --- a/novelai_api/utils.py +++ b/novelai_api/utils.py @@ -1,5 +1,4 @@ import json -import operator from base64 import b64decode, b64encode, urlsafe_b64encode from hashlib import blake2b from typing import Any, Dict, Iterable, List, Optional, Tuple, Union @@ -14,32 +13,9 @@ from novelai_api.Keystore import Keystore from novelai_api.NovelAIError import NovelAIError from novelai_api.Preset import Model, Preset +from novelai_api.python_utils import assert_type from novelai_api.Tokenizer import Tokenizer -# boilerplate -NoneType: type = type(None) - - -def assert_type(expected, **types): - for k, v in types.items(): - assert isinstance(v, expected), f"Expected type '{expected}' for {k}, but got type '{type(v)}'" - - -operator_to_str = { - operator.eq: "exactly {} characters", - operator.lt: "less than {} characters", - operator.le: "{} characters or less", - operator.gt: "more than {} characters", - operator.ge: "{} characters or more", -} - - -def assert_len(expected, op: operator = operator.eq, **values): - op_str = operator_to_str[op].format(expected) - - for k, v in values.items(): - assert v is None or op(len(v), expected), f"'{k}' should be {op_str}, got length of {len(v)}'" - # API utils def argon_hash(email: str, password: str, size: int, domain: str) -> str: diff --git a/noxfile.py b/noxfile.py index 9eb17fe..6147f2a 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1,4 +1,5 @@ import json +import pathlib import shutil import nox @@ -91,3 +92,33 @@ def run(session: nox.Session): for file in files: session.run("python", file) + + +@nox.session(name="build-docs") +def build_docs(session: nox.Session): + docs_path = pathlib.Path(__file__).parent / "docs" + source_path = docs_path / "source" + + install_package(session) + session.install("-r", str(docs_path / "requirements.txt")) + + paths = [pathlib.Path(path) for path in session.posargs] + if not paths: + raise ValueError("No path provided (put the path(s) after the --)") + + for path in paths: + if not path.exists(): + raise ValueError(f"Path {path.resolve()} does not exist") + + old_files_in_source = set(sorted(source_path.iterdir())) + for path in paths: + session.run("sphinx-apidoc", "-o", str(source_path.resolve()), "-Te", "-d", "2", str(path.resolve())) + new_files_in_source = set(sorted(source_path.iterdir())) + + source_diff = new_files_in_source - old_files_in_source + if source_diff: + print("New files generated:", ", ".join(f"'{f}'" for f in source_diff)) + print("Update the docs accordingly") + + with session.chdir(docs_path): + session.run("make", "html", external=True)