diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ba9267d8..8af069ae 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -50,7 +50,7 @@ repos: - id: generate_requirements.txt name: Generate requirements.txt entry: python -m scripts.export_requirements --docs - files: '(pyproject.toml|poetry.lock|requirements.txt|scripts\/export\_requirements\.py|docs\/.requirements.txt)$' + files: '(pyproject.toml|poetry.lock|requirements.txt|constraints.txt|scripts\/export\_requirements\.py|docs\/.requirements.txt)$' language: python pass_filenames: false require_serial: true diff --git a/Dockerfile b/Dockerfile index 6083d266..2f822cad 100644 --- a/Dockerfile +++ b/Dockerfile @@ -3,6 +3,9 @@ FROM python:3.9-slim # Set pip to have cleaner logs and no saved cache ENV PIP_NO_CACHE_DIR=false +# Update pip +RUN pip install -U pip + # Create the working directory WORKDIR /modmail diff --git a/docs/addons/README.md b/docs/addons/README.md new file mode 100644 index 00000000..3cfbd208 --- /dev/null +++ b/docs/addons/README.md @@ -0,0 +1,48 @@ +# Addons + +Addons are our built-in system to extend the features of the bot in an officially supported manner. + +Modmail, in its most basic form, is simple: relay messages to and from users to staff members. +However, we acknowledge that its not a one-size-fits-all solution. +Some communities need a few more features than others. +That's where the addon system fills the void. + +The addon system currently supports only one kind of addon, plugins. +This guide will help you set up a respository to create your own addons. +Once its set up, please refer to the [plugin creation guide][making-plugins] for more details. + +!!!note + This guide is for those who want to **write** addons. If you are looking to use an addon, please view our guide [on installing them][installation]. + +## Guides + +- [Installation] +- [Repo Setup](#repo-setup) +- [Creating Plugins][making-plugins] + +## Repo Setup + +In order to be able to install addons, a few things are required. +Each addon type will have its own requirements in addition to the following. + +### Overall File Structure + +At the base of the addon system is the source. Sources have a folder structure like the following: + +```sh +. +├── Plugins/ +└── README.md +``` + +In this structure, this repository is holding addons of a plugin type. The structure of the Plugins folder itself is detailed in the [creating plugins guide][making-plugins]. + +### Hosting + +All addons must be hosted on either github or gitlab as of now. + +!!!note + Addons currently do not automatically update, and need to be re-installed on each run. This will be fixed once the database client exists. + +[installation]: ./installation.md +[making-plugins]: ./plugins.md diff --git a/docs/addons/installation.md b/docs/addons/installation.md new file mode 100644 index 00000000..3056174c --- /dev/null +++ b/docs/addons/installation.md @@ -0,0 +1,65 @@ +# Installation + +!!!note + If you are looking to write addons, check out our [writing addons][addon-guide] guide. + +## Plugins + +Plugins are discord.py extensions which expand the functionality of the bot beyond its core feature: relaying messages back and forth between staff and members. + +We've done our best to make this system easy to use for both novice and experienced developers--installing plugins should require no programming knowledge at all. + +By default, modmail will install plugins hosted on [github.com](https://github.com), but also supports installing from [gitlab](https://gitlab.com). + +This may look complex, but it supports a wide variety of options, as demonstrated below + +```fix +?plugin install [git-host] / [@ref] +?plugin install [@ref] +``` + +### Git-host style + +> `[git-host] / [@ref]` + +#### Git-host (Optional) + +Valid options are: + +- `github` +- `gitlab` + +Default: + +- `github` + +#### User/Repo + +This is the user and the respository hosted on a valid git-host. + +In the link , the user and repo are `discord-modmail/addons`. + +#### Name + +This is the addon name, it is not allowed to contain `@`. +By default, this is the plugin folder name, unless it is defined in the plugin.toml file. +A repository should provide a list of their plugins either in a plugin readme, or the full repository readme. + +#### Ref + +This is the git reference, leave blank to use the repository default. +If you would like to use a specific commit, branch, or tag, then provide it preceeded by a `@`. +For example, to use tagged version 1.2, `@v1.2` would install from that tag. + +### Link + +> ` [@ref]` + +If the above githost format seems too complicated, its possible to just copy the url to the repo +(ex. https://github.com/discord-modmail/addons) and use that for the link. + +The name of the plugin still must be provided, however. +The @ref can also be provided, if installating a specific version is desired. + + +[addon-guide]: ./README.md diff --git a/docs/addons/plugins.md b/docs/addons/plugins.md new file mode 100644 index 00000000..46fe7fbc --- /dev/null +++ b/docs/addons/plugins.md @@ -0,0 +1,134 @@ +# Creating Plugins + +If you are looking to write a feature to extend the functionality of your modmail bot, plugins are *the* +supported way to add additional code to modmail. + +In short, plugins are discord.py extensions which expand the functionality of the bot beyond its built-in duties. + + +!!!Tip + This builds on the [addon structure documentation][addon-guide]. Please ensure you have a solid understanding of the basic repository structure beforehand. + +!!!note + This guide is **not** how to install plugins, please view our [installation guide][installation] for that. + +## File Structure Overview + +This details the structure of a plugin addon. + +```sh +Plugins/ +├── react_to_contact +│ ├── listener.py +│ └── react_to_contact.py +├── verify_contact +│ └── verify_contact.py +└── plugin.toml +``` + +Even though there are three `.py` files, this repository contains two plugins. Each top level folder in the Plugins folder contains one plugin. +The number of py files in each plugins folder does not matter, there are still two plugins here. + +One plugin here is named `react_to_contact`, the other is `verify_contact` + +However, those are not user friendly names. It would be a lot easier for the end user to reference with `React to Contact`, and for the user interface to refer to it as such. + +To do so, a name can be provided in the plugin.toml file. + +## plugin.toml + +There are several variables which can be configured by providing a plugin.toml file. + +If you don't already know what toml is, [check out their docs](https://toml.io/) + +!!!tip + `plugin.toml` is supplemental to the list of folders. This means that all plugins in the repository are installable at any time. Providing a plugin.toml does not mean that any plugins *not* in the toml are not included anymore. + + This has the advantage of being able to use `plugin.toml` to change the name of one plugin, without having to add all other plugins to the toml. + + +### Options + +A full `plugin.toml` for the above repository may look like this: + +```toml +[[plugins]] +name = 'React to Contact' +description = 'Provides a permanent message where the user can react to open a thread' +directory = 'react_to_contact' + +[[plugins]] +name = 'Verify Contact' +description = 'Prevents the user from accidently opening a thread by asking if they are sure.' +directory = 'verify_contact' +``` + +The name and directory are the only keys in use today, +the description is not yet used. + +The `directory` key is required, if wanting to set any other settings for a plugin. + +!!!tip + `directory` is aliased to `folder`. Both keys are valid, but if the `directory` key exists it will be used and `folder` will be ignored. + +Name is optional, and defaults to the directory if not provided. + +!!!warning + Capitals matter. Both the `plugin.toml` file and `[[plugins]]` table ***must*** be lowercase. + This also goes for all keys and directory arguments--they must match the capitials of the existing directory. + +### Dependencies + +If the dependencies that the bot is installed with, it is possible to declare a dependency and it will be installed when installing the plugin. + +!!! Waring + For the most part, you won't need to use this. But if you absolutely must use an additional dependency which isn't part of the bot, put it in this array. + +This is an array of arguments which should be just like they are being passed to pip. + +```toml +[[plugins]] +directory = 'solar_system' +dependencies = ['earthlib==0.2.2'] +``` + +This will install earthlib 0.2.2. + +## Code + +Now that we have an understanding of where the plugin files go, and how to configure them, its time to write their code. + +### `PluginCog` + +All plugin cogs ***must*** inherit from `PluginCog`. + +If plugin cogs do not inherit from this class, they will fail to load. + +A majority of the needed modmail classes have been imported into helpers for your convinence. + +```python +from modmail.addons import helpers + +# Cog +helpers.PluginCog + +# Extension Metadata +helpers.ExtMetadata + +### For Typehints +# bot class +helpers.ModmailBot + +# logger +helpers.ModmailLogger +``` + +### `ExtMetadata` + +There is a system where extensions can declare load modes. + +There is a longer write up on it [here][ext_metadata]. + +[addon-guide]: ./README.md +[ext_metadata]: /contributing/creating_an_extension/#bot_mode-and-extmetadata +[installation]: ./installation.md#plugins diff --git a/docs/changelog.md b/docs/changelog.md index 663988f7..136f0fb8 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -14,6 +14,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added - Threads system (#53) +- Plugin installation and uninstall system (#69) - Messages can now be relayed between a user and a server. - NOTE: There is not a database yet, so none of these messages are stored. - Added Dispatcher system, although it is not hooked into important features like thread creation yet. (#71) @@ -43,6 +44,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Running the bot is still the same method, but it loads extensions and plugins now. - `bot.start()` can also be used if already in a running event loop. Keep in mind using it will require handling loop errors, as run() does this automatically. +- Disabled some plugin management commands if PLUGIN_DEV mode is not set (#69) ### Internal diff --git a/docs/contributing/creating_an_extension.md b/docs/contributing/creating_an_extension.md new file mode 100644 index 00000000..411728f1 --- /dev/null +++ b/docs/contributing/creating_an_extension.md @@ -0,0 +1,83 @@ +# Creating an Extension + +Welcome! + +Please note that extensions are cogs are different things. Extensions are files which add features to the bot, +and a cog is way to group commands and features within a file. + +This is an addendum to the [discord.py guide](https://discordpy.readthedocs.io/en/master/ext/commands/extensions.html) on how to write extensions. +This guide below details additional information which is not part of discord.py. + +**There is one major change from discord.py**: +Cogs **must** inherit from `ModmailCog`. +If this does not happen, the bot will let you know. + +ModmailCog is defined in `modmail/utils/cogs.py`. + +## BOT_MODE and `ExtMetadata` + +In general, an extension does not need to use the feature of an extension metadata. + +On every extension of the bot, an `EXT_METADATA` constant should exist, and it should be an instance of `ExtMetadata`. +The `ExtMetadata` class is defined in `modmail/utils/cogs.py`, along with `BotModeEnum`. + +It should be sufficent to have an EXT_METADATA variable declared at the top of the file as an instance of ExtMetadata. + +```python +from modmail.utils.cogs import ExtMetadata + +EXT_METADATA = ExtMetadata() +``` + +### `ExtMetadata` + +The purpose of ExtMetadata is to define metadata about an extension. Currently, it supports two items of metadata. + +- `load_if_mode` + - used to determine if this extension should be loaded at runtime. +- `no_unload` (Not supported by plugins) + - prevents an extension from being unloaded by the `?ext unload` command. This is mainly used to keep the extension manager from unloading itself. + +`no_unload` is pretty self explanatory, pass either True or False and the extension will either be blocked from being unloaded, or allowed to unload. +This only has an impact if the current bot mode is DEVELOP. Note that this does prevent the developer from *reloading* the extension. + +### `load_if_mode` + +`load_if_mode` currently has three modes, which each have their own uses.: + +- `PRODUCTION` + - The default mode, the bot is always in this mode. +- `DEVELOP` + - Bot developer. Enables the extension management commands. +- `PLUGIN_DEV` + - Plugin developer. Enables lower-level plugin commands. + +!!!tip + To enable these modes, set the corresponding environment variable to a truthy value. eg `DEVELOP=1` in your project `.env` file will enable the bot developer mode. + +To set an extension to only load on one cog, set the load_if_mode param when initialising a ExtMetadata object. + +```python +from modmail.utils.cogs import BotModeEnum, ExtMetadata + +EXT_METADATA = ExtMetadata(load_if_mode=BotModeEnum.DEVELOP) +``` + +*This is not a complete extension and will not run if tested!* + +This `EXT_METADATA` variable above declares the extension will only run if a bot developer is running the bot. + +However, we may want to load our extension normally but have a command or two which only load in specific modes. + +### `BOT_MODE` + +The bot exposes a BOT_MODE variable which contains a bitmask of the current mode. This is created with the BotModeEnum. +This allows code like this to determine if the bot mode is a specific mode. + +```python +from modmail.utils.cogs import BOT_MODE, BotModeEnum + +is_plugin_dev_enabled = BOT_MODE & BotModeEnum.PLUGIN_DEV +``` + +This is used in the plugin_manager extension to determine if the lower-level commands which manage plugin extensions directly should be enabled. diff --git a/mkdocs.yml b/mkdocs.yml index 1712cdc8..c08b80a7 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -56,10 +56,18 @@ plugins: # Page tree nav: -- Home: README.md -- Contributing: contributing.md -- Security: security.md -- Changelog: changelog.md + - Home: README.md + - Changelog: changelog.md + - Security: security.md + - Addons: + - Overview: addons/README.md + - Installation: addons/installation.md + - Creating Plugins: addons/plugins.md + - Contributing: + - Guidelines: contributing.md + - Creating an Extension: contributing/creating_an_extension.md + + # Extensions markdown_extensions: diff --git a/modmail/addons/__init__.py b/modmail/addons/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/modmail/addons/converters.py b/modmail/addons/converters.py new file mode 100644 index 00000000..6e9edd2e --- /dev/null +++ b/modmail/addons/converters.py @@ -0,0 +1,61 @@ +import logging +import re +from typing import TYPE_CHECKING, Tuple + +from discord.ext import commands + +from modmail.addons.models import AddonSource, Plugin, SourceTypeEnum + + +if TYPE_CHECKING: + from modmail.log import ModmailLogger + +LOCAL_REGEX = re.compile(r"^\@local (?P[^@\s]+)$") +ZIP_REGEX = re.compile( + r"^(?:https?:\/\/)?(?P(?P.*\..+?)\/(?P.*\.zip)) (?P[^@\s]+)$" +) +REPO_REGEX = re.compile( + r"^(?:(?:https?:\/\/)?(?Pgithub|gitlab)(?:\.com\/| )?)?" + # github allows usernames from 1 to 39 characters, and projects of 1 to 100 characters + # gitlab allows 255 characters in the username, and 255 max in a project path + # see https://gitlab.com/gitlab-org/gitlab/-/issues/197976 for more details + r"(?P[a-zA-Z0-9][a-zA-Z0-9\-]{0,254})\/(?P[\w\-\.]{1,100}) " + r"(?P[^@]+[^\s@])(?: \@(?P[\w\.\-\S]*))?" +) + +logger: "ModmailLogger" = logging.getLogger(__name__) + + +class AddonConverter(commands.Converter): + """A converter that takes an addon source, and gets a Addon object from it.""" + + async def convert(self, ctx: commands.Context, argument: str) -> None: + """Convert an argument into an Addon.""" + raise NotImplementedError("Inheriting classes must overwrite this method.") + + +class SourceAndPluginConverter(AddonConverter): + """A plugin converter that takes a source, addon name, and returns a Plugin.""" + + async def convert(self, _: commands.Context, argument: str) -> Tuple[Plugin, AddonSource]: + """Convert a provided plugin and source to a Plugin.""" + if match := LOCAL_REGEX.match(argument): + logger.debug("Matched as a local file, creating a Plugin without a source url.") + addon = match.group("addon") + source = AddonSource(None, SourceTypeEnum.LOCAL) + elif match := ZIP_REGEX.fullmatch(argument): + logger.debug("Matched as a zip, creating a Plugin from zip.") + addon = match.group("addon") + source = AddonSource.from_zip(match.group("url")) + elif match := REPO_REGEX.fullmatch(argument): + addon = match.group("addon") + source = AddonSource.from_repo( + match.group("user"), + match.group("repo"), + match.group("reflike"), + match.group("githost") or "github", + ) + else: + raise commands.BadArgument(f"{argument} is not a valid source and plugin.") + + return Plugin(addon), source diff --git a/modmail/addons/errors.py b/modmail/addons/errors.py new file mode 100644 index 00000000..d4dc319a --- /dev/null +++ b/modmail/addons/errors.py @@ -0,0 +1,28 @@ +class AddonError(Exception): + """Base Addon utils and extension exception.""" + + pass + + +class PluginError(AddonError): + """General Plugin error.""" + + pass + + +class NoPluginDirectoryError(PluginError): + """No plugin directory exists.""" + + pass + + +class PluginNotFoundError(PluginError): + """Plugins are not found and can therefore not be actioned on.""" + + pass + + +class NoPluginTomlFoundError(PluginError): + """Raised when a plugin.toml file is expected to exist but does not exist.""" + + pass diff --git a/modmail/plugin_helpers.py b/modmail/addons/helpers.py similarity index 52% rename from modmail/plugin_helpers.py rename to modmail/addons/helpers.py index 4770584d..e8b2f1a3 100644 --- a/modmail/plugin_helpers.py +++ b/modmail/addons/helpers.py @@ -1,14 +1,24 @@ +from __future__ import annotations + from modmail.bot import ModmailBot from modmail.log import ModmailLogger -from modmail.utils.cogs import BotModes, ExtMetadata, ModmailCog +from modmail.utils.cogs import BOT_MODE, BotModeEnum, ExtMetadata +from modmail.utils.cogs import ModmailCog as _ModmailCog -__all__ = ["PluginCog", ModmailBot, ModmailLogger, BotModes, ExtMetadata] +__all__ = ( + "PluginCog", + BOT_MODE, + BotModeEnum, + ExtMetadata, + ModmailBot, + ModmailLogger, +) -class PluginCog(ModmailCog): +class PluginCog(_ModmailCog): """ - The base class that all cogs must inherit from. + The base class that all Plugin cogs must inherit from. A cog is a collection of commands, listeners, and optional state to help group commands together. More information on them can be found on diff --git a/modmail/addons/models.py b/modmail/addons/models.py new file mode 100644 index 00000000..809d1eec --- /dev/null +++ b/modmail/addons/models.py @@ -0,0 +1,262 @@ +from __future__ import annotations + +import logging +import urllib.parse +from enum import Enum +from typing import TYPE_CHECKING, Any, Dict, List, Literal, NoReturn, Optional, Set, Union + +from discord.ext import commands +from rapidfuzz import fuzz, process + +from modmail.utils.extensions import ModuleDict + + +if TYPE_CHECKING: + import pathlib + import zipfile + + from modmail.log import ModmailLogger + +logger: ModmailLogger = logging.getLogger(__name__) + +PLUGINS = None + +SCORE_CUTOFF = 69 + + +class SourceTypeEnum(Enum): + """Which source an addon is from.""" + + ZIP = 0 + REPO = 1 + LOCAL = 2 + + +class GitHost: + """Base class for git hosts.""" + + headers = {} + base_api_url: str + repo_api_url: str + zip_archive_api_url: str + zip_archive_api_url_with_ref: str + + +class Github(GitHost): + """Github's api.""" + + headers = {"Accept": "application/vnd.github.v3+json"} + base_api_url = "https://api.github.com" + repo_api_url = f"{base_api_url}/repos/{{user}}/{{repo}}" + zip_archive_api_url = f"{repo_api_url}/zipball" + zip_archive_api_url_with_ref = f"{zip_archive_api_url}/{{ref}}" + + +class Gitlab(GitHost): + """Gitlab's api.""" + + base_api_url = "https://gitlab.com/api/v4" + repo_api_url = f"{base_api_url}/projects/{{user}}%2F{{repo}}" + zip_archive_api_url = f"{repo_api_url}/repository/archive.zip" + zip_archive_api_url_with_ref = f"{zip_archive_api_url}?sha={{ref}}" + + +Host = Literal["github", "gitlab"] + + +class AddonSource: + """ + Represents an AddonSource. + + These could be from github, gitlab, or hosted zip file. + """ + + if TYPE_CHECKING: + repo: Optional[str] + user: Optional[str] + reflike: Optional[str] + githost: Optional[Host] + githost_api: Optional[GitHost] + + domain: Optional[str] + path: Optional[str] + + cache_file: Optional[Union[zipfile.Path, pathlib.Path]] + + def __init__(self, zip_url: str, type: SourceTypeEnum): + """Initialize the AddonSource.""" + self.zip_url = zip_url + if self.zip_url is not None: + parsed_url = urllib.parse.urlparse(self.zip_url) + self.zip_url = urllib.parse.urlunparse(parsed_url) + self.domain = parsed_url.netloc + self.path = parsed_url.path + else: + self.domain = None + self.path = None + + self.source_type = type + + @classmethod + def from_repo(cls, user: str, repo: str, reflike: str = None, githost: Host = "github") -> AddonSource: + """Create an AddonSource from a repo.""" + if githost == "github": + Host = Github() # noqa: N806 + elif githost == "gitlab": + Host = Gitlab() # noqa: N806 + else: + raise TypeError(f"{githost} is not a valid host.") + if reflike is not None: + zip_url = Host.zip_archive_api_url_with_ref.format(user=user, repo=repo, ref=reflike) + else: + zip_url = Host.zip_archive_api_url.format(user=user, repo=repo) + + source = cls(zip_url, SourceTypeEnum.REPO) + source.repo = repo + source.user = user + source.reflike = reflike + source.githost = githost + source.githost_api = Host + return source + + @classmethod + def from_zip(cls, url: str) -> AddonSource: + """Create an AddonSource from a zip file.""" + source = cls(url, SourceTypeEnum.ZIP) + return source + + def __repr__(self) -> str: # pragma: no cover + return f"" + + +class Addon: + """Base class of an addon which make the bot extendable.""" + + if TYPE_CHECKING: + name: str + description: Optional[str] + min_bot_version: str + + def __init__(self) -> NoReturn: + raise NotImplementedError("Inheriting classes need to implement their own init") + + def __hash__(self): + return hash(self.name) + + +class Plugin(Addon): + """An addon which is a plugin.""" + + if TYPE_CHECKING: + folder_name: str + folder_path: Optional[pathlib.Path] + extra_kwargs: Dict[str, Any] + installed_path: Optional[pathlib.Path] + extension_files: List[pathlib.Path] + modules: ModuleDict + + def __init__( + self, + folder: str, + description: Optional[str] = None, + *, + dependencies: Optional[List[str]] = None, + enabled: bool = True, + folder_path: Optional[pathlib.Path] = None, + local: bool = False, + min_bot_version: Optional[str] = None, + name: Optional[str] = None, + **kw, + ): + self.folder_name = folder + self.description = description + self.name = self.folder_name if name is None else name + self.folder_path = folder_path + self.min_bot_version = min_bot_version + self.local = local + self.enabled = enabled + + self.dependencies = dependencies or [] + + self.modules = {} + + # store any extra kwargs here + # this is to ensure backwards compatiablilty with plugins that support older versions, + # but want to use newer toml options + self.extra_kwargs = kw + + def __str__(self): + return self.name + + def __repr__(self): # pragma: no cover + return ( + f"" + ) + + def __hash__(self): + return hash(self.folder_name) + + def __eq__(self, other: Any): + return hash(self) == hash(other) + + @classmethod + async def convert(cls, ctx: commands.Context, argument: str) -> Plugin: + """Converts a plugin into a full plugin with a path and all other attributes.""" + # have to do this here to prevent a recursive import + global PLUGINS + if PLUGINS is None: + logger.debug("Lazy import of global PLUGINS from modmail.addons.plugins") + from modmail.addons.plugins import PLUGINS + + loaded_plugs: Set[Plugin] = PLUGINS + + # its possible to have a plugin with the same name as a folder of a plugin + # folder names are the priority + secondary_names = {} + for plug in loaded_plugs: + if argument == plug.name: + return plug + secondary_names[plug.folder_name] = plug + + if argument in secondary_names: + return secondary_names[argument] + + # Determine close plugins + # Using a dict to prevent duplicates + arg_mapping: Dict[str, Plugin] = {} + for plug in loaded_plugs: + for name in plug.name, plug.folder_name: + arg_mapping[name] = plug + + result = process.extract( + argument, + arg_mapping.keys(), + scorer=fuzz.ratio, + score_cutoff=SCORE_CUTOFF, + ) + logger.debug(f"{result = }") + + if not len(result): + raise commands.BadArgument(f"`{argument}` is not in list of installed plugins.") + + all_fully_matched_plugins: Set[Plugin] = set() + all_partially_matched_plugins: Dict[Plugin, float] = dict() + for res in result: + all_partially_matched_plugins[arg_mapping[res[0]]] = res[1] + + if res[1] == 100: + all_fully_matched_plugins.add(arg_mapping[res[0]]) + + if len(all_fully_matched_plugins) != 1: + suggested = "" + for plug, percent in all_partially_matched_plugins.items(): + suggested += f"`{plug.name}` ({round(percent)}%)\n" + raise commands.BadArgument( + f"`{argument}` is not in list of installed plugins." + f"\n\n**Suggested plugins**:\n{suggested}" + if len(suggested) + else "" + ) + + return await cls.convert(ctx, all_fully_matched_plugins.pop().name) diff --git a/modmail/addons/plugins.py b/modmail/addons/plugins.py new file mode 100644 index 00000000..84289598 --- /dev/null +++ b/modmail/addons/plugins.py @@ -0,0 +1,366 @@ +# original source: +# https://github.com/python-discord/bot/blob/a8869b4d60512b173871c886321b261cbc4acca9/bot/utils/extensions.py +# MIT License 2021 Python Discord +""" +Helper utilities for managing plugins. + +TODO: Expand file to download plugins from github and gitlab from a list that is passed. +""" +from __future__ import annotations + +import asyncio +import glob +import importlib +import importlib.util +import inspect +import logging +import os +import pathlib +import sys +from asyncio import subprocess +from collections.abc import Generator +from typing import List, Optional, Set, Tuple + +import atoml + +from modmail import plugins +from modmail.addons.errors import NoPluginDirectoryError, NoPluginTomlFoundError +from modmail.addons.models import Plugin +from modmail.log import ModmailLogger +from modmail.utils.cogs import ExtMetadata +from modmail.utils.extensions import ModuleName, unqualify + + +__all__ = ( + "VALID_ZIP_PLUGIN_DIRECTORIES", + "BASE_PLUGIN_PATH", + "PLUGINS", + "PLUGIN_TOML", + "LOCAL_PLUGIN_TOML", + "parse_plugin_toml_from_string", + "update_local_toml_enable_or_disable", + "find_partial_plugins_from_dir", + "find_plugins", + "install_dependencies", + "walk_plugin_files", +) + + +logger: ModmailLogger = logging.getLogger(__name__) + +VALID_ZIP_PLUGIN_DIRECTORIES = ["plugins", "Plugins"] + +BASE_PLUGIN_PATH = pathlib.Path(plugins.__file__).parent.resolve() + +PLUGINS: Set[Plugin] = set() + +PLUGIN_TOML = "plugin.toml" + +LOCAL_PLUGIN_TOML = BASE_PLUGIN_PATH / "local.toml" + +PYTHON_INTERPRETER: Optional[str] = sys.executable + +PIP_NO_ROOT_WARNING = ( + "WARNING: Running pip as the 'root' user can result in broken permissions and " + "conflicting behaviour with the system package manager. " + "It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv" +).encode() + + +async def install_dependencies(plugin: Plugin) -> Optional[str]: + """Installs provided dependencies from a plugin.""" + # check if there are any plugins to install + if not len(plugin.dependencies): + return None + + if PYTHON_INTERPRETER is None: + raise FileNotFoundError("Could not locate python interpreter.") + + # This uses the check argument with our exported requirements.txt + # to make pip promise that anything it installs won't change + # the packages that the bot requires to have installed. + pip_install_args = [ + "-m", + "pip", + "--no-input", + "--no-color", + "install", + "--constraint", + str(BASE_PLUGIN_PATH.parent / "constraints.txt"), + ] + proc = await asyncio.create_subprocess_exec( + PYTHON_INTERPRETER, + *pip_install_args, + *plugin.dependencies, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + stdout, stderr = await proc.communicate() + stderr = stderr.replace(PIP_NO_ROOT_WARNING, b"").strip() + if len(decoded_stderr := stderr.decode()) > 0: + logger.error(f"Received stderr: '{decoded_stderr}'") + raise Exception("Something went wrong when installing.") + return stdout.decode() + + +def parse_plugin_toml_from_string(unparsed_plugin_toml_str: str, /, local: bool = False) -> List[Plugin]: + """Parses a plugin toml, given the string loaded in.""" + doc = atoml.parse(unparsed_plugin_toml_str) + found_plugins: List[Plugin] = [] + for plug_entry in doc["plugins"]: + if local: + enabled = plug_entry.get("enabled", True) + else: + enabled = None + found_plugins.append( + Plugin( + plug_entry.get("directory") or plug_entry["folder"], + name=plug_entry.get("name"), + description=plug_entry.get("description"), + min_bot_version=plug_entry.get("min_bot_version"), + enabled=enabled, + dependencies=plug_entry.get("dependencies"), + ) + ) + return found_plugins + + +def update_local_toml_enable_or_disable(plugin: Plugin, /) -> None: + """ + Updates the local toml so local plugins stay disabled or enabled. + + This is the local implementation for disabling and enabling to actually disable and enable plugins. + Non local plugins are saved in the database. + """ + if not LOCAL_PLUGIN_TOML.exists(): + raise NoPluginTomlFoundError( + f"The required file at {LOCAL_PLUGIN_TOML!s} does not exist to deal with local plugins.\n" + "You may need to create it." + ) + + with LOCAL_PLUGIN_TOML.open("r") as f: + doc = atoml.loads(f.read()) + plugs = doc["plugins"] + + plug_found = False + for plug_entry in plugs: + folder_name = plug_entry.get("directory") or plug_entry["folder"] + if folder_name == plugin.folder_name: + plug_entry["enabled"] = plugin.enabled + plug_found = True + break + + if not plug_found: + # need to write a new entry + logger.trace(f"Local plugin toml does not contain an entry for {plugin}") + + plugin_table = atoml.table() + if plugin.name != plugin.folder_name: + plugin_table.add("name", atoml.item(plugin.name)) + + plugin_table.add("directory", atoml.item(plugin.folder_name)) + plug_entry["enabled"] = plugin.enabled + plugs.append(plugin_table) + print(plugs) + + with open(LOCAL_PLUGIN_TOML, "w") as f: + f.write(doc.as_string()) + + +def find_partial_plugins_from_dir( + addon_repo_path: pathlib.Path, + *, + parse_toml: bool = True, + no_toml_exist_ok: bool = True, +) -> Generator[Plugin, None, None]: + """ + Find the plugins that are in a directory. + + All plugins in a zip folder will be located at either `Plugins/` or `plugins/` + + If parse_toml is true, if the plugin.toml file is found, it will be parsed. + + Returns a dict containing all of the plugin folders as keys + and the values as lists of the files within those folders. + """ + temp_direct_children = [p for p in addon_repo_path.iterdir()] + if len(temp_direct_children) == 1: + folder = temp_direct_children[0] + if folder.is_dir(): + addon_repo_path = addon_repo_path / folder + del temp_direct_children + # figure out which directory plugins are in. Both Plugins and plugins are supported. + # default is plugins. + plugin_directory = None + direct_children = [p for p in addon_repo_path.iterdir()] + + for path_ in direct_children: + if path_.name.rsplit("/", 1)[-1] in VALID_ZIP_PLUGIN_DIRECTORIES: + plugin_directory = path_ + break + + if plugin_directory is None: + raise NoPluginDirectoryError(f"No {' or '.join(VALID_ZIP_PLUGIN_DIRECTORIES)} directory exists.") + + plugin_directory = addon_repo_path / plugin_directory + + all_plugins: Set[Plugin] = set() + + if parse_toml: + toml_path = plugin_directory / PLUGIN_TOML + if toml_path.exists(): + # parse the toml + with open(toml_path) as toml_file: + all_plugins.update(parse_plugin_toml_from_string(toml_file.read())) + + elif no_toml_exist_ok: + # toml does not exist but the caller does not care + pass + else: + raise NoPluginTomlFoundError(toml_path, "does not exist") + + for path in plugin_directory.iterdir(): + if path.is_dir(): + # use an existing toml plugin object + if path.name in all_plugins: + for p in all_plugins: + if p.folder_name == path.name: + p.folder_path = path + yield p + break + else: + logger.debug( + f"Plugin in {addon_repo_path!s} is not provided in toml. Creating new plugin object." + ) + yield Plugin(path.name, folder_path=path) + + +def find_plugins( + detection_path: pathlib.Path = None, /, *, local: Optional[bool] = True +) -> Generator[Plugin, None, None]: + """ + Walks the local path, and determines which files are local plugins. + + Yields a list of plugins, + """ + if detection_path is None: + detection_path = BASE_PLUGIN_PATH + all_plugins: Set[Plugin] = set() + + toml_plugins: List[Plugin] = [] + toml_path = LOCAL_PLUGIN_TOML + + if not toml_path.exists(): + raise NoPluginTomlFoundError(toml_path, "does not exist") + + # parse the toml + with open(toml_path) as toml_file: + toml_plugins = parse_plugin_toml_from_string(toml_file.read(), local=True) + + toml_plugin_names = [p.folder_name for p in toml_plugins] + for path in detection_path.iterdir(): + if path.is_dir() and path.name in toml_plugin_names: + # use an existing toml plugin object + for p in toml_plugins: + if p.folder_name == path.name: + p.folder_path = path + all_plugins.add(p) + + logger.debug(f"Local plugins detected: {[p.name for p in all_plugins]}") + + for plugin_ in all_plugins: + logger.trace(f"{plugin_.folder_path =}") + if local is not None: + # configure all plugins with the provided local variable + plugin_.local = local + for dirpath, dirnames, filenames in os.walk(plugin_.folder_path): + logger.trace(f"{dirpath =}, {dirnames =}, {filenames =}") + for list_ in dirnames, [dirpath]: + logger.trace(f"{list_ =}") + for dir_ in list_: + logger.trace(f"{dir_ =}") + + if "__pycache__" in dir_ or "__pycache__" in dirpath: + continue + + plugin_.modules = {} + plugin_.modules.update(walk_plugin_files(dirpath)) + yield plugin_ + + +def walk_plugin_files( + detection_path: pathlib.Path = None, +) -> Generator[Tuple[ModuleName, ExtMetadata], None, None]: + """Yield plugin names from the modmail.plugins subpackage.""" + # walk all files in the plugins folder + # this is to ensure folder symlinks are supported, + # which are important for ease of development. + # NOTE: We are not using Pathlib's glob utility as it doesn't + # support following symlinks, see: https://bugs.python.org/issue33428 + if detection_path is None: + detection_path = BASE_PLUGIN_PATH + for path in glob.iglob(f"{detection_path}/**/*.py", recursive=True): + + logger.trace(f"{path =}") + + # calculate the module name, dervived from the relative path + relative_path = pathlib.Path(path).relative_to(BASE_PLUGIN_PATH) + module_name = ".".join(relative_path.parent.parts) + "." + relative_path.stem + module_name = plugins.__name__ + "." + module_name + logger.trace(f"{module_name =}") + + if unqualify(module_name.split(".")[-1]).startswith("_"): + # Ignore module/package names starting with an underscore. + continue + + # due to the fact that plugins are user generated and may not have gone through + # the testing that the bot has, we want to ensure we try/except any plugins + # that fail to import. + try: + # load the plugins using importlib + # this needs to be done like this, due to the fact that + # its possible a plugin will not have an __init__.py file + spec = importlib.util.spec_from_file_location(module_name, path) + imported = importlib.util.module_from_spec(spec) + spec.loader.exec_module(imported) + except Exception: + logger.error( + f"Failed to import {module_name}. As a result, this plugin is not considered installed.", + exc_info=True, + ) + continue + + if not inspect.isfunction(getattr(imported, "setup", None)): + # If it lacks a setup function, it's not a plugin. This is enforced by dpy. + logger.trace(f"{module_name} does not have a setup function. Skipping.") + continue + + ext_metadata: ExtMetadata = getattr(imported, "EXT_METADATA", None) + if ext_metadata is not None: + if not isinstance(ext_metadata, ExtMetadata): + if ext_metadata == ExtMetadata: + logger.info( + f"{imported.__name__!r} seems to have passed the ExtMetadata class directly to " + "EXT_METADATA. Using defaults." + ) + else: + logger.error( + f"Plugin extension {imported.__name__!r} contains an invalid EXT_METADATA variable. " + "Loading with metadata defaults. " + "Please report this error to the respective plugin developers." + ) + yield imported.__name__, ExtMetadata() + continue + + logger.debug(f"{imported.__name__!r} contains a EXT_METADATA variable. Loading it.") + + yield imported.__name__, ext_metadata + continue + + logger.notice( + f"Plugin extension {imported.__name__!r} is missing an EXT_METADATA variable. " + "Assuming its a normal plugin extension." + ) + + # Presume Production Mode/Metadata defaults if metadata var does not exist. + yield imported.__name__, ExtMetadata() diff --git a/modmail/addons/utils.py b/modmail/addons/utils.py new file mode 100644 index 00000000..80a27abf --- /dev/null +++ b/modmail/addons/utils.py @@ -0,0 +1,63 @@ +from __future__ import annotations + +import io +import logging +import pathlib +import tempfile +import zipfile +from typing import TYPE_CHECKING + +from modmail.addons.models import SourceTypeEnum +from modmail.errors import HTTPError + + +if TYPE_CHECKING: + from aiohttp import ClientSession + + from modmail.addons.models import AddonSource + from modmail.log import ModmailLogger +logger: ModmailLogger = logging.getLogger(__name__) + +TEMP_DIR = pathlib.Path(tempfile.gettempdir()) + + +def unpack_zip(zip_file: zipfile.ZipFile, path: pathlib.Path = None) -> pathlib.Path: + """ + Unpack a zip file and return its new path. + + If path is provided, the zip will be unpacked to that path. + If path is not provided, a file in the platform's temp directory will be used. + """ + if path is None: + path = TEMP_DIR / "modmail-addons" / f"zip-{hash(zip_file)}" + + zip_file.extractall(path=path) + return path + + +async def download_zip_from_source(source: AddonSource, session: ClientSession) -> zipfile.ZipFile: + """ + Download a zip file from a source. + + It is currently required to provide an http session. + """ + if source.source_type not in (SourceTypeEnum.REPO, SourceTypeEnum.ZIP): + raise TypeError("Unsupported source detected.") + + async with session.get(source.zip_url, timeout=20) as resp: + if resp.status != 200: + raise HTTPError(resp) + raw_bytes = await resp.read() + + zip_stream = io.BytesIO(raw_bytes) + zip_stream.write(raw_bytes) + + return zipfile.ZipFile(zip_stream) + + +async def download_and_unpack_source( + source: AddonSource, session: ClientSession, path: pathlib.Path = None +) -> pathlib.Path: + """Downloads and unpacks source.""" + zip_file = await download_zip_from_source(source, session) + return unpack_zip(zip_file, path) diff --git a/modmail/bot.py b/modmail/bot.py index ed582f45..7831bd03 100644 --- a/modmail/bot.py +++ b/modmail/bot.py @@ -3,6 +3,7 @@ import signal import socket import typing as t +from typing import Optional, Set import aiohttp import arrow @@ -11,11 +12,14 @@ from discord.client import _cleanup_loop from discord.ext import commands +from modmail.addons.errors import NoPluginTomlFoundError +from modmail.addons.models import Plugin +from modmail.addons.plugins import PLUGINS, find_plugins from modmail.config import config from modmail.dispatcher import Dispatcher from modmail.log import ModmailLogger -from modmail.utils.extensions import EXTENSIONS, NO_UNLOAD, walk_extensions -from modmail.utils.plugins import PLUGINS, walk_plugins +from modmail.utils.cogs import ModmailCog +from modmail.utils.extensions import BOT_MODE, EXTENSIONS, NO_UNLOAD, walk_extensions from modmail.utils.threads import Ticket @@ -37,6 +41,7 @@ class ModmailBot(commands.Bot): """ logger: ModmailLogger = logging.getLogger(__name__) + mode: int dispatcher: Dispatcher _tickets: t.Dict[int, Ticket] = dict() @@ -50,6 +55,9 @@ def __init__(self, **kwargs): self._connector = None self._resolver = None + # keys: plugins, list values: all plugin files + self.installed_plugins: Optional[Set[Plugin]] = None + status = discord.Status.online activity = Activity(type=discord.ActivityType.listening, name="users dming me!") # listen to messages mentioning the bot or matching the prefix @@ -178,7 +186,12 @@ def stop_loop_on_completion(f: t.Any) -> None: async def close(self) -> None: """Safely close HTTP session, unload plugins and extensions when the bot is shutting down.""" - plugins = self.extensions & PLUGINS.keys() + plugins = [] + for plug in PLUGINS: + plugins.extend([mod for mod in plug.modules]) + + plugins = self.extensions.keys() & plugins + for plug in list(plugins): try: self.unload_extension(plug) @@ -210,31 +223,48 @@ async def close(self) -> None: def load_extensions(self) -> None: """Load all enabled extensions.""" + self.mode = BOT_MODE EXTENSIONS.update(walk_extensions()) - # set up no_unload global too - for ext, value in EXTENSIONS.items(): - if value[1]: + for ext, metadata in EXTENSIONS.items(): + # set up no_unload global too + if metadata.no_unload: NO_UNLOAD.append(ext) - for extension, value in EXTENSIONS.items(): - if value[0]: - self.logger.debug(f"Loading extension {extension}") - self.load_extension(extension) + if metadata.load_if_mode & BOT_MODE: + self.logger.info(f"Loading extension {ext}") + self.load_extension(ext) + else: + self.logger.debug(f"SKIPPING load of extension {ext} due to BOT_MODE.") def load_plugins(self) -> None: """Load all enabled plugins.""" - PLUGINS.update(walk_plugins()) - - for plugin, should_load in PLUGINS.items(): - if should_load: - self.logger.debug(f"Loading plugin {plugin}") - try: - # since we're loading user generated content, - # any errors here will take down the entire bot - self.load_extension(plugin) - except Exception: - self.logger.error("Failed to load plugin {0}".format(plugin), exc_info=True) + self.installed_plugins = PLUGINS + dont_load_at_start = [] + try: + PLUGINS.update(find_plugins()) + except NoPluginTomlFoundError: + # no local plugins + pass + else: + for plug in self.installed_plugins: + if plug.enabled: + continue + self.logger.debug(f"Not loading {plug.__str__()} on start since it's not enabled.") + dont_load_at_start.extend(plug.modules) + + for plug in PLUGINS: + for mod, metadata in plug.modules.items(): + if metadata.load_if_mode & self.mode and mod not in dont_load_at_start: + self.logger.debug(f"Loading plugin {mod}") + try: + # since we're loading user generated content, + # any errors here will take down the entire bot + self.load_extension(mod) + except Exception: + self.logger.error(f"Failed to load plugin {mod!s}", exc_info=True) + else: + self.logger.debug(f"SKIPPED loading plugin {mod}") def add_cog(self, cog: commands.Cog, *, override: bool = False) -> None: """ @@ -243,12 +273,10 @@ def add_cog(self, cog: commands.Cog, *, override: bool = False) -> None: Utilizes the default discord.py loader beneath, but also checks so we can warn when we're loading a non-ModmailCog cog. """ - from modmail.utils.cogs import ModmailCog - if not isinstance(cog, ModmailCog): self.logger.warning( - f"Cog {cog.name} is not a ModmailCog. All loaded cogs should always be" - f" instances of ModmailCog." + f"Cog {cog.qualified_name} is not a ModmailCog. All loaded cogs should always be" + " instances of ModmailCog." ) super().add_cog(cog, override=override) self.logger.info(f"Cog loaded: {cog.qualified_name}") diff --git a/modmail/constraints.txt b/modmail/constraints.txt new file mode 100644 index 00000000..40036752 --- /dev/null +++ b/modmail/constraints.txt @@ -0,0 +1,35 @@ +# NOTICE: This file is automatically generated by scripts/export_requirements.py +# This is also automatically regenerated when an edit to pyproject.toml or poetry.lock is commited. + +aiodns==3.0.0 +aiohttp==3.7.4.post0 +arrow==1.1.1 +async-timeout==3.0.1 +atoml==1.0.3 +attrs==21.2.0 +brotlipy==0.7.0 +cchardet==2.1.7 +cffi==1.15.0 +chardet==4.0.0 +colorama==0.4.4 +coloredlogs==15.0.1 +desert==2020.11.18 +discord.py @ https://github.com/Rapptz/discord.py/archive/45d498c1b76deaf3b394d17ccf56112fa691d160.zip +humanfriendly==10.0 +idna==3.2 +jarowinkler==1.0.2 +marshmallow-enum==1.5.1 +marshmallow==3.13.0 +multidict==5.2.0 +mypy-extensions==0.4.3 +pycares==4.1.2 +pycparser==2.20 +pyreadline3==3.3 +python-dateutil==2.8.2 +python-dotenv==0.19.2 +pyyaml==5.4.1 +rapidfuzz==2.0.10 +six==1.16.0 +typing-extensions==3.10.0.2 +typing-inspect==0.7.1 +yarl==1.7.2 diff --git a/modmail/errors.py b/modmail/errors.py index 1f314157..1ebed6bd 100644 --- a/modmail/errors.py +++ b/modmail/errors.py @@ -1,3 +1,13 @@ +from aiohttp import ClientResponse + + +class HTTPError(Exception): + """Response from an http request was not desired.""" + + def __init__(self, response: ClientResponse): + self.response = response + + class MissingAttributeError(Exception): """Missing attribute.""" diff --git a/modmail/extensions/extension_manager.py b/modmail/extensions/extension_manager.py index 19554a78..13bc269c 100644 --- a/modmail/extensions/extension_manager.py +++ b/modmail/extensions/extension_manager.py @@ -3,28 +3,37 @@ # MIT License 2021 Python Discord import functools import logging -import typing as t from collections import defaultdict from enum import Enum +from typing import Mapping, Tuple, Union -from discord import AllowedMentions, Colour, Embed +from discord import Colour, Embed from discord.ext import commands from discord.ext.commands import Context import modmail.config from modmail.bot import ModmailBot from modmail.log import ModmailLogger -from modmail.utils.cogs import BotModes, ExtMetadata, ModmailCog -from modmail.utils.extensions import EXTENSIONS, NO_UNLOAD, unqualify, walk_extensions +from modmail.utils import responses +from modmail.utils.cogs import BotModeEnum, ExtMetadata, ModmailCog +from modmail.utils.extensions import BOT_MODE, EXTENSIONS, NO_UNLOAD, ModuleDict, unqualify, walk_extensions from modmail.utils.pagination import ButtonPaginator log: ModmailLogger = logging.getLogger(__name__) -BASE_PATH_LEN = __name__.count(".") +EXT_METADATA = ExtMetadata(load_if_mode=BotModeEnum.DEVELOP, no_unload=True) -EXT_METADATA = ExtMetadata(load_if_mode=BotModes.DEVELOP, no_unload=True) + +class StatusEmojis: + """Status emojis for extension statuses.""" + + fully_loaded: str = ":green_circle:" + partially_loaded: str = ":yellow_circle:" + unloaded: str = ":red_circle:" + disabled: str = ":brown_circle:" + unknown: str = ":black_circle:" Emojis = modmail.config.config().user.emojis @@ -38,6 +47,12 @@ class Action(Enum): UNLOAD = functools.partial(ModmailBot.unload_extension) RELOAD = functools.partial(ModmailBot.reload_extension) + # for plugins + ENABLE = functools.partial(ModmailBot.load_extension) + DISABLE = functools.partial(ModmailBot.unload_extension) + + INSTALL = functools.partial(ModmailBot.reload_extension) + class ExtensionConverter(commands.Converter): """ @@ -91,11 +106,11 @@ class ExtensionManager(ModmailCog, name="Extension Manager"): type = "extension" module_name = "extensions" # modmail/extensions + all_extensions: ModuleDict def __init__(self, bot: ModmailBot): self.bot = bot self.all_extensions = EXTENSIONS - self.refresh_method = walk_extensions def get_black_listed_extensions(self) -> list: """Returns a list of all unload blacklisted extensions.""" @@ -106,40 +121,35 @@ async def extensions_group(self, ctx: Context) -> None: """Load, unload, reload, and list loaded extensions.""" await ctx.send_help(ctx.command) - @extensions_group.command(name="load", aliases=("l",)) + @extensions_group.command(name="load", aliases=("l",), require_var_positional=True) async def load_extensions(self, ctx: Context, *extensions: ExtensionConverter) -> None: r""" Load extensions given their fully qualified or unqualified names. If '\*' is given as the name, all unloaded extensions will be loaded. """ - if not extensions: - await ctx.send_help(ctx.command) - return - if "*" in extensions: extensions = sorted(ext for ext in self.all_extensions if ext not in self.bot.extensions.keys()) - msg = self.batch_manage(Action.LOAD, *extensions) - await ctx.send(msg) + msg, is_error = self.batch_manage(Action.LOAD, *extensions) + if not is_error: + await responses.send_positive_response(ctx, msg) + else: + await responses.send_negatory_response(ctx, msg) - @extensions_group.command(name="unload", aliases=("ul",)) + @extensions_group.command(name="unload", aliases=("ul",), require_var_positional=True) async def unload_extensions(self, ctx: Context, *extensions: ExtensionConverter) -> None: r""" Unload currently loaded extensions given their fully qualified or unqualified names. If '\*' is given as the name, all loaded extensions will be unloaded. """ - if not extensions: - await ctx.send_help(ctx.command) - return - blacklisted = [ext for ext in self.get_black_listed_extensions() if ext in extensions] if blacklisted: bl_msg = "\n".join(blacklisted) - await ctx.send( - f"{Emojis.failure} The following {self.type}(s) may not be unloaded:```\n{bl_msg}```" + await responses.send_negatory_response( + ctx, f"{Emojis.failure} The following {self.type}(s) may not be unloaded:```\n{bl_msg}```" ) return @@ -150,9 +160,13 @@ async def unload_extensions(self, ctx: Context, *extensions: ExtensionConverter) if ext not in (self.get_black_listed_extensions()) ) - await ctx.send(self.batch_manage(Action.UNLOAD, *extensions)) + msg, is_error = self.batch_manage(Action.UNLOAD, *extensions) + if not is_error: + await responses.send_positive_response(ctx, msg) + else: + await responses.send_negatory_response(ctx, msg) - @extensions_group.command(name="reload", aliases=("r", "rl")) + @extensions_group.command(name="reload", aliases=("r", "rl"), require_var_positional=True) async def reload_extensions(self, ctx: Context, *extensions: ExtensionConverter) -> None: r""" Reload extensions given their fully qualified or unqualified names. @@ -161,14 +175,14 @@ async def reload_extensions(self, ctx: Context, *extensions: ExtensionConverter) If '\*' is given as the name, all currently loaded extensions will be reloaded. """ - if not extensions: - await ctx.send_help(ctx.command) - return - if "*" in extensions: - extensions = self.bot.extensions.keys() & self.all_extensions.keys() + extensions = self.bot.extensions.keys() & self.all_extensions - await ctx.send(self.batch_manage(Action.RELOAD, *extensions)) + msg, is_error = self.batch_manage(Action.RELOAD, *extensions) + if not is_error: + await responses.send_positive_response(ctx, msg) + else: + await responses.send_negatory_response(ctx, msg) @extensions_group.command(name="list", aliases=("all", "ls")) async def list_extensions(self, ctx: Context) -> None: @@ -199,6 +213,23 @@ async def list_extensions(self, ctx: Context) -> None: lines or f"There are no {self.type}s installed.", ctx.message, embed=embed ) + def _resync_extensions(self) -> None: + """Resyncs extensions. Useful for when the files are dynamically updated.""" + log.debug(f"Refreshing list of {self.type}s.") + + # make sure the new walk contains all currently loaded extensions, so they can be unloaded + all_exts: ModuleDict = {} + for name, metadata in self.all_extensions.items(): + if name in self.bot.extensions: + all_exts[name] = metadata + + # re-walk the extensions + for name, metadata in walk_extensions(): + all_exts[name] = metadata + + self.all_extensions.clear() + self.all_extensions.update(all_exts) + @extensions_group.command(name="refresh", aliases=("rewalk", "rescan")) async def resync_extensions(self, ctx: Context) -> None: """ @@ -206,31 +237,20 @@ async def resync_extensions(self, ctx: Context) -> None: Typical use case is in the event that the existing extensions have changed while the bot is running. """ - log.debug(f"Refreshing list of {self.type}s.") - - # make sure the new walk contains all currently loaded extensions, so they can be unloaded - loaded_extensions = {} - for name, should_load in self.all_extensions.items(): - if name in self.bot.extensions: - loaded_extensions[name] = should_load - - # now that we know what the list was, we can clear it - self.all_extensions.clear() - # put the loaded extensions back in - self.all_extensions.update(loaded_extensions) - # now we can re-walk the extensions - self.all_extensions.update(self.refresh_method()) - await ctx.send(f":ok_hand: Refreshed list of {self.type}s.") + self._resync_extensions() + await responses.send_positive_response(ctx, f":ok_hand: Refreshed list of {self.type}s.") - def group_extension_statuses(self) -> t.Mapping[str, str]: + def group_extension_statuses(self) -> Mapping[str, str]: """Return a mapping of extension names and statuses to their categories.""" categories = defaultdict(list) - for ext in self.all_extensions: + for ext, metadata in self.all_extensions.items(): if ext in self.bot.extensions: - status = ":green_circle:" + status = StatusEmojis.fully_loaded + elif metadata.load_if_mode & BOT_MODE: + status = StatusEmojis.disabled else: - status = ":red_circle:" + status = StatusEmojis.unloaded root, name = ext.rsplit(".", 1) if root.split(".", 1)[1] == self.module_name: @@ -241,21 +261,26 @@ def group_extension_statuses(self) -> t.Mapping[str, str]: return dict(categories) - def batch_manage(self, action: Action, *extensions: str) -> str: + def batch_manage( + self, + action: Action, + *extensions: str, + **kw, + ) -> Tuple[str, bool]: """ Apply an action to multiple extensions and return a message with the results. - If only one extension is given, it is deferred to `manage()`. + Any extra kwargs are passed to `manage()` which handles all passed modules. """ if len(extensions) == 1: - msg, _ = self.manage(action, extensions[0]) - return msg + msg, failures = self.manage(action, extensions[0], **kw) + return msg, bool(failures) verb = action.name.lower() failures = {} for extension in sorted(extensions): - _, error = self.manage(action, extension) + _, error = self.manage(action, extension, **kw) if error: failures[extension] = error @@ -268,24 +293,41 @@ def batch_manage(self, action: Action, *extensions: str) -> str: log.debug(f"Batch {verb}ed {self.type}s.") - return msg + return msg, bool(failures) - def manage(self, action: Action, ext: str) -> t.Tuple[str, t.Optional[str]]: + def manage( + self, + action: Action, + ext: str, + *, + is_plugin: bool = False, + suppress_already_error: bool = False, + ) -> Tuple[str, Union[str, bool]]: """Apply an action to an extension and return the status message and any error message.""" verb = action.name.lower() error_msg = None - + msg = None + not_quite = False try: action.value(self.bot, ext) except (commands.ExtensionAlreadyLoaded, commands.ExtensionNotLoaded): - if action is Action.RELOAD: + if suppress_already_error: + pass + elif action is Action.RELOAD: # When reloading, have a special error. msg = ( f"{Emojis.failure} {self.type.capitalize()} " f"`{ext}` is not loaded, so it was not {verb}ed." ) + not_quite = True + elif action is Action.INSTALL: + # extension wasn't loaded, so load it + # this is used for plugins + Action.LOAD.value(self.bot, ext) + else: - msg = f"{Emojis.failure} {self.type.capitalize()} `{ext}` is already {verb}ed." + msg = f"{Emojis.failure} {self.type.capitalize()} `{ext}` is already {verb.rstrip('e')}ed." + not_quite = True except Exception as e: if hasattr(e, "original"): # If original exception is present, then utilize it @@ -295,11 +337,12 @@ def manage(self, action: Action, ext: str) -> t.Tuple[str, t.Optional[str]]: error_msg = f"{e.__class__.__name__}: {e}" msg = f"{Emojis.failure} Failed to {verb} {self.type} `{ext}`:\n```\n{error_msg}```" - else: - msg = f"{Emojis.success} {self.type.capitalize()} successfully {verb}ed: `{ext}`." + + if msg is None: + msg = f"{Emojis.success} {self.type.capitalize()} successfully {verb.rstrip('e')}ed: `{ext}`." log.debug(error_msg or msg) - return msg, error_msg + return msg, error_msg or not_quite # This cannot be static (must have a __func__ attribute). async def cog_check(self, ctx: Context) -> bool: @@ -311,8 +354,12 @@ async def cog_check(self, ctx: Context) -> bool: async def cog_command_error(self, ctx: Context, error: Exception) -> None: """Handle BadArgument errors locally to prevent the help command from showing.""" if isinstance(error, commands.BadArgument): - await ctx.send(str(error), allowed_mentions=AllowedMentions.none()) + await responses.send_negatory_response(ctx, str(error)) error.handled = True + elif isinstance(error, commands.MissingRequiredArgument): + await ctx.send_help(ctx.command) + else: + raise error def setup(bot: ModmailBot) -> None: diff --git a/modmail/extensions/plugin_manager.py b/modmail/extensions/plugin_manager.py index 16138c12..30bdbe0a 100644 --- a/modmail/extensions/plugin_manager.py +++ b/modmail/extensions/plugin_manager.py @@ -1,16 +1,50 @@ +from __future__ import annotations + +import asyncio +import logging +import shutil +from collections import defaultdict +from typing import TYPE_CHECKING, Mapping + +from atoml.exceptions import ParseError +from discord import Colour, Embed +from discord.abc import Messageable from discord.ext import commands from discord.ext.commands import Context -from modmail.bot import ModmailBot -from modmail.extensions.extension_manager import ExtensionConverter, ExtensionManager -from modmail.utils.cogs import BotModes, ExtMetadata -from modmail.utils.plugins import PLUGINS, walk_plugins +import modmail.addons.utils as addon_utils +from modmail import errors +from modmail.addons.converters import SourceAndPluginConverter +from modmail.addons.errors import NoPluginTomlFoundError +from modmail.addons.models import AddonSource, Plugin, SourceTypeEnum +from modmail.addons.plugins import ( + BASE_PLUGIN_PATH, + PLUGINS, + find_partial_plugins_from_dir, + find_plugins, + install_dependencies, + update_local_toml_enable_or_disable, + walk_plugin_files, +) +from modmail.extensions.extension_manager import Action, ExtensionConverter, ExtensionManager, StatusEmojis +from modmail.utils import responses +from modmail.utils.cogs import BotModeEnum, ExtMetadata +from modmail.utils.extensions import BOT_MODE, ModuleDict +from modmail.utils.pagination import ButtonPaginator + + +if TYPE_CHECKING: + from modmail.bot import ModmailBot + from modmail.log import ModmailLogger + +EXT_METADATA = ExtMetadata(no_unload=True) +logger: ModmailLogger = logging.getLogger(__name__) -EXT_METADATA = ExtMetadata(load_if_mode=BotModes.PRODUCTION) +PLUGIN_DEV_ENABLED = BOT_MODE & BotModeEnum.PLUGIN_DEV -class PluginConverter(ExtensionConverter): +class PluginDevPathConverter(ExtensionConverter): """ Fully qualify the name of a plugin and ensure it exists. @@ -21,6 +55,15 @@ class PluginConverter(ExtensionConverter): type = "plugin" NO_UNLOAD = None + def __init__(self): + """Properly set the source_list.""" + super().__init__() + PluginDevPathConverter.source_list + modules: ModuleDict = {} + for plug in PluginDevPathConverter.source_list: + modules.update({k: v for k, v in plug.modules.items()}) + self.source_list = modules + class PluginManager(ExtensionManager, name="Plugin Manager"): """Plugin management commands.""" @@ -28,10 +71,13 @@ class PluginManager(ExtensionManager, name="Plugin Manager"): type = "plugin" module_name = "plugins" # modmail/plugins - def __init__(self, bot: ModmailBot) -> None: + def __init__(self, bot: ModmailBot): super().__init__(bot) - self.all_extensions = PLUGINS - self.refresh_method = walk_plugins + + modules: ModuleDict = {} + for plug in PLUGINS: + modules.update({k: v for k, v in plug.modules.items()}) + self.all_extensions = modules def get_black_listed_extensions(self) -> list: """ @@ -42,56 +88,332 @@ def get_black_listed_extensions(self) -> list: """ return [] - @commands.group("plugins", aliases=("plug", "plugs"), invoke_without_command=True) + @commands.group("plugins", aliases=("plug", "plugs", "plugin"), invoke_without_command=True) async def plugins_group(self, ctx: Context) -> None: """Install, uninstall, disable, update, and enable installed plugins.""" await ctx.send_help(ctx.command) - @plugins_group.command(name="load", aliases=("l",)) - async def load_plugin(self, ctx: Context, *plugins: PluginConverter) -> None: + @plugins_group.group( + "dev", aliases=("developer",), invoke_without_command=True, enabled=PLUGIN_DEV_ENABLED + ) + async def plugin_dev_group(self, ctx: Context) -> None: + """Manage plugin files directly, rather than whole plugin objects.""" + await ctx.send_help(ctx.command) + + @plugin_dev_group.command(name="load", aliases=("l",), require_var_positional=True) + async def load_plugins(self, ctx: Context, *plugins: PluginDevPathConverter) -> None: r""" - Load plugins given their fully qualified or unqualified names. + Load singular plugin files given their fully qualified or unqualified names. If '\*' is given as the name, all unloaded plugins will be loaded. """ await self.load_extensions.callback(self, ctx, *plugins) - @plugins_group.command(name="unload", aliases=("ul",)) - async def unload_plugins(self, ctx: Context, *plugins: PluginConverter) -> None: + @plugin_dev_group.command(name="unload", aliases=("u", "ul"), require_var_positional=True) + async def unload_plugins(self, ctx: Context, *plugins: PluginDevPathConverter) -> None: r""" - Unload currently loaded plugins given their fully qualified or unqualified names. + Unoad singular plugin files given their fully qualified or unqualified names. If '\*' is given as the name, all loaded plugins will be unloaded. """ await self.unload_extensions.callback(self, ctx, *plugins) - @plugins_group.command(name="reload", aliases=("r", "rl")) - async def reload_plugins(self, ctx: Context, *plugins: PluginConverter) -> None: + @plugin_dev_group.command(name="reload", aliases=("r", "rl"), require_var_positional=True) + async def reload_plugins(self, ctx: Context, *plugins: PluginDevPathConverter) -> None: r""" - Reload plugins given their fully qualified or unqualified names. + Reload singular plugin files given their fully qualified or unqualified names. - If an plugin fails to be reloaded, it will be rolled-back to the prior working state. + If a plugin file fails to be reloaded, it will be rolled-back to the prior working state. If '\*' is given as the name, all currently loaded plugins will be reloaded. """ await self.reload_extensions.callback(self, ctx, *plugins) - @plugins_group.command(name="list", aliases=("all", "ls")) + def group_extension_statuses(self) -> Mapping[str, str]: + """Return a mapping of plugin names and statuses to their categories.""" + categories = defaultdict(list) + + for plug in PLUGINS: + for mod, metadata in plug.modules.items(): + if mod in self.bot.extensions: + status = StatusEmojis.fully_loaded + elif metadata.load_if_mode & BOT_MODE: + status = StatusEmojis.disabled + else: + status = StatusEmojis.unloaded + + name = mod.split(".", 2)[-1] + categories[plug.name].append(f"{status} `{name}`") + + return dict(categories) + + def _resync_extensions(self) -> None: + """Resyncs plugin. Useful for when the files are dynamically updated.""" + logger.debug(f"Refreshing list of {self.type}s.") + + # remove all fully unloaded plugins from the list + for plug in PLUGINS.copy(): + safe_to_remove = [mod not in self.bot.extensions for mod in plug.modules] + if all(safe_to_remove): + PLUGINS.remove(plug) + + PLUGINS.update(find_plugins()) + + modules: ModuleDict = {} + for plug in PLUGINS: + modules.update({k: v for k, v in plug.modules.items()}) + self.all_extensions = modules + + @plugin_dev_group.command(name="refresh", aliases=("rewalk", "rescan", "resync")) + async def resync_plugins(self, ctx: Context) -> None: + """Refreshes the list of plugins from disk, but do not unload any currently active.""" + await self.resync_extensions.callback(self, ctx) + + @commands.max_concurrency(1, per=commands.BucketType.default, wait=True) + @plugins_group.command(name="install", aliases=("add",)) + async def install_plugins(self, ctx: Context, *, source_and_plugin: SourceAndPluginConverter) -> None: + """Install plugins from provided repo.""" + # this could take a while + # I'm aware this should be a context manager, but do not want to indent almost the entire command + await ctx.trigger_typing() + + # if we send a preliminary action message this gets set and is edited upon success. + message = None + + # create variables for the user input, typehint them, then assign them from the converter tuple + plugin: Plugin + source: AddonSource + plugin, source = source_and_plugin + + if source.source_type is SourceTypeEnum.LOCAL: + # TODO: check the path of a local plugin + await responses.send_negatory_response( + ctx, + "This plugin seems to be a local plugin, and therefore can probably be " + "loaded with the load command, if it isn't loaded already.", + ) + return + logger.debug(f"Received command to download plugin {plugin.name} from https://{source.zip_url}") + try: + directory = await addon_utils.download_and_unpack_source(source, self.bot.http_session) + except errors.HTTPError as e: + await responses.send_negatory_response( + ctx, f"Downloading {source.zip_url} expected 200, received {e.response.status}." + ) + return + + source.cache_file = directory + + # determine plugins in the archive + archive_plugins = {x for x in find_partial_plugins_from_dir(directory)} + + # yield to any coroutines that need to run + # afaik its not possible to do this with aiofiles, so when we export the zip, + # its important to yield right after + await asyncio.sleep(0) + + # copy the requested plugin over to the new folder + for p in archive_plugins: + # check if user-provided plugin matches either plugin name or folder name + if plugin.name in (p.name, p.folder_name): + install_path = BASE_PLUGIN_PATH / p.folder_path.name + try: + shutil.copytree(p.folder_path, install_path, dirs_exist_ok=True) + except FileExistsError: + await responses.send_negatory_response( + ctx, + "Plugin already seems to be installed. " + "This could be caused by the plugin already existing, " + "or a plugin of the same name existing.", + ) + return + p.installed_path = install_path + plugin = p + break + + if plugin.folder_path is None: + await responses.send_negatory_response(ctx, f"Could not find plugin {plugin}") + return + + if plugin.dependencies and len(plugin.dependencies): + # install dependencies since they exist + message = await ctx.send( + embed=Embed( + description="Installing dependencies.", + title="Pending install", + colour=Colour.yellow(), + ) + ) + try: + await install_dependencies(plugin) + except Exception as e: + logger.error(e, exc_info=True) + await responses.send_negatory_response( + ctx, "Could not successfully install plugin dependencies.", message=message + ) + return + + logger.trace(f"{BASE_PLUGIN_PATH = }") + + plugin.modules.update(walk_plugin_files(BASE_PLUGIN_PATH / plugin.folder_name)) + + PLUGINS.add(plugin) + + self.batch_manage(Action.INSTALL, *plugin.modules.keys()) + + # check if the manage was successful + failed = [] + for mod, metadata in plugin.modules.items(): + fail = not (mod in self.bot.extensions or metadata.load_if_mode & BOT_MODE) + + failed.append(fail) + + if any(failed): + await responses.send_negatory_response( + ctx, f"Failed to fully install plugin {plugin}.", message=message + ) + else: + await responses.send_positive_response( + ctx, f"Successfully installed plugin {plugin}.", message=message + ) + + @plugins_group.command(name="uninstall", aliases=("rm",)) + async def uninstall_plugin(self, ctx: Context, *, plugin: Plugin) -> None: + """Uninstall a provided plugin, given the name of the plugin.""" + plugin: Plugin = plugin + + if plugin.local: + await responses.send_negatory_response( + ctx, "You may not uninstall a local plugin.\nUse the disable command to stop using it." + ) + return + + plugin = await Plugin.convert(ctx, plugin.folder_name) + _, err = self.batch_manage( + Action.UNLOAD, *plugin.modules.keys(), is_plugin=True, suppress_already_error=True + ) + if err: + await responses.send_negatory_response( + ctx, "There was a problem unloading the plugin from the bot." + ) + return + + shutil.rmtree(plugin.installed_path) + + plugin = await Plugin.convert(ctx, plugin.folder_name) + PLUGINS.remove(plugin) + + await responses.send_positive_response(ctx, f"Successfully uninstalled plugin {plugin}") + + async def _enable_or_disable_plugin( + self, + ctx: Messageable, + plugin: Plugin, + action: Action, + enable: bool, + ) -> None: + """Enables or disables a provided plugin.""" + verb = action.name.lower() + if plugin.enabled == enable: + await responses.send_negatory_response(ctx, f"Plugin {plugin!s} is already {verb}d.") + return + + plugin.enabled = enable + + if plugin.local: + try: + update_local_toml_enable_or_disable(plugin) + except (NoPluginTomlFoundError, ParseError) as e: + plugin.enabled = not plugin.enabled # reverse the state + await responses.send_negatory_response(ctx, e.args[0]) + + msg, err = self.batch_manage( + action, *plugin.modules.keys(), is_plugin=True, suppress_already_error=True + ) + if err: + await responses.send_negatory_response( + ctx, "Er, something went wrong.\n" f":x: {plugin!s} was unable to be {verb}d properly!" + ) + else: + await responses.send_positive_response(ctx, f":thumbsup: Plugin {plugin!s} successfully {verb}d.") + + @plugins_group.command(name="enable") + async def enable_plugin(self, ctx: Context, *, plugin: Plugin) -> None: + """Enable a provided plugin, given the name or folder of the plugin.""" + await self._enable_or_disable_plugin(ctx, plugin, Action.ENABLE, True) + + @plugins_group.command(name="disable") + async def disable_plugin(self, ctx: Context, *, plugin: Plugin) -> None: + """Disable a provided plugin, given the name or folder of the plugin.""" + await self._enable_or_disable_plugin(ctx, plugin, Action.DISABLE, False) + + def group_plugin_statuses(self) -> Mapping[str, str]: + """Return a mapping of plugin names and statuses to their module.""" + plugins = defaultdict(str) + + for plug in self.bot.installed_plugins: + plug_status = [] + for mod, metadata in plug.modules.items(): + status = mod in self.bot.extensions + # check that the file is supposed to be loaded + if not status and not metadata.load_if_mode & self.bot.mode: + continue + plug_status.append(status) + + if not plug_status: + status = StatusEmojis.unknown + elif all(plug_status): + status = StatusEmojis.fully_loaded + elif any(plug_status): + status = StatusEmojis.partially_loaded + else: + if plug.enabled: + status = StatusEmojis.unloaded + else: + status = StatusEmojis.disabled + + plugins[plug.name] = status + + return dict(plugins) + + @plugins_group.group(name="list", aliases=("all", "ls"), invoke_without_command=True) async def list_plugins(self, ctx: Context) -> None: """ Get a list of all plugins, including their loaded status. - Red indicates that the plugin is unloaded. - Green indicates that the plugin is currently loaded. + Green indicates that the extension is fully loaded. + Yellow indicates that the plugin is partially loaded. + Red indicates that the plugin is fully unloaded. """ - await self.list_extensions.callback(self, ctx) + embed = Embed(colour=Colour.blurple()) + embed.set_author( + name=f"{self.type.capitalize()} List", + ) - @plugins_group.command(name="refresh", aliases=("rewalk", "rescan")) - async def resync_plugins(self, ctx: Context) -> None: - """Refreshes the list of plugins from disk, but do not unload any currently active.""" - await self.resync_extensions.callback(self, ctx) + lines = [] + plugin_statuses = self.group_plugin_statuses() + for plugin_name, status in sorted(plugin_statuses.items()): + # plugin_name = plugin_name.replace("_", " ").title() + lines.append(f"{status} **{plugin_name}**") + + logger.debug(f"{ctx.author} requested a list of all {self.type}s. " "Returning a paginated list.") + if PLUGIN_DEV_ENABLED: + kw = {"footer_text": "Tip: use the detailed command to see all plugin files"} + else: + kw = {} + await ButtonPaginator.paginate( + lines or f"There are no {self.type}s installed.", ctx.message, embed=embed, **kw + ) + + @list_plugins.command(name="detailed", aliases=("files", "-a"), hidden=not PLUGIN_DEV_ENABLED) + async def dev_list_plugins(self, ctx: Context) -> None: + """ + Get a list of all plugin files, including their loaded status. - # TODO: Implement install/enable/disable/etc + Red indicates that the plugin file is unloaded. + Green indicates that the plugin file is currently loaded. + """ + await self.list_extensions.callback(self, ctx) # This cannot be static (must have a __func__ attribute). async def cog_check(self, ctx: Context) -> bool: @@ -99,7 +421,7 @@ async def cog_check(self, ctx: Context) -> bool: if ctx.guild is None: return await self.bot.is_owner(ctx.author) else: - return ctx.author.guild_permissions.administrator or await self.bot.is_owner(ctx.author) + return await self.bot.is_owner(ctx.author) # HACK: Delete the commands from ExtensionManager that PluginManager has inherited diff --git a/modmail/extensions/threads.py b/modmail/extensions/threads.py index 679a3f7a..1da2f805 100644 --- a/modmail/extensions/threads.py +++ b/modmail/extensions/threads.py @@ -14,7 +14,7 @@ from discord.utils import escape_markdown from modmail.utils.cogs import ExtMetadata, ModmailCog -from modmail.utils.extensions import BOT_MODE, BotModes +from modmail.utils.extensions import BOT_MODE, BotModeEnum from modmail.utils.threads import Ticket, is_modmail_thread from modmail.utils.threads.errors import ThreadAlreadyExistsError, ThreadNotFoundError from modmail.utils.time import TimeStampEnum, get_discord_formatted_timestamp @@ -27,7 +27,7 @@ EXT_METADATA = ExtMetadata() -DEV_MODE_ENABLED = BOT_MODE & BotModes.DEVELOP +DEV_MODE_ENABLED = BOT_MODE & BotModeEnum.DEVELOP BASE_JUMP_URL = "https://discord.com/channels" DM_FAILURE_MESSAGE = ( diff --git a/modmail/extensions/utils/error_handler.py b/modmail/extensions/utils/error_handler.py index ae44116f..55be1da7 100644 --- a/modmail/extensions/utils/error_handler.py +++ b/modmail/extensions/utils/error_handler.py @@ -9,7 +9,7 @@ from modmail.bot import ModmailBot from modmail.log import ModmailLogger from modmail.utils import responses -from modmail.utils.cogs import BotModes, ExtMetadata, ModmailCog +from modmail.utils.cogs import BotModeEnum, ExtMetadata, ModmailCog from modmail.utils.extensions import BOT_MODE @@ -21,7 +21,7 @@ ERROR_TITLE_REGEX = re.compile(r"((?<=[a-z])[A-Z]|(?<=[a-zA-Z])[A-Z](?=[a-z]))") -ANY_DEV_MODE = BOT_MODE & (BotModes.DEVELOP.value + BotModes.PLUGIN_DEV.value) +ANY_DEV_MODE = BOT_MODE & (BotModeEnum.DEVELOP.value + BotModeEnum.PLUGIN_DEV.value) MAYBE_DM_ON_PERM_ERROR = True diff --git a/modmail/extensions/utils/paginator_manager.py b/modmail/extensions/utils/paginator_manager.py index 899c0d4f..7bd501a2 100644 --- a/modmail/extensions/utils/paginator_manager.py +++ b/modmail/extensions/utils/paginator_manager.py @@ -6,7 +6,7 @@ from discord import InteractionType -from modmail.utils.cogs import ModmailCog +from modmail.utils.cogs import ExtMetadata, ModmailCog if TYPE_CHECKING: @@ -17,6 +17,8 @@ logger: ModmailLogger = logging.getLogger(__name__) +EXT_METADATA = ExtMetadata + class PaginatorManager(ModmailCog): """Handles paginators that were still active when the bot shut down.""" diff --git a/modmail/plugins/.gitignore b/modmail/plugins/.gitignore index 4be6c40b..60a54109 100644 --- a/modmail/plugins/.gitignore +++ b/modmail/plugins/.gitignore @@ -3,10 +3,7 @@ # don't ignore this file !/.gitignore -# ignore the local folder, but not the readme -local/** -!local/ -!local/README.md - -# ensure this file is uploaded so `plugins` is considered a module +# ensure __init__.py is uploaded so `plugins` is considered a module !/__init__.py +# keep our helper file in here +!/helpers.py diff --git a/modmail/plugins/local/README.md b/modmail/plugins/local/README.md deleted file mode 100644 index 817f6aba..00000000 --- a/modmail/plugins/local/README.md +++ /dev/null @@ -1,12 +0,0 @@ -# Plugins - -This folder is where local plugins can be put for developing. - -Plugins should be like normal discord cogs, but should subclass `PluginCog` from `modmail.plugin_helpers` - -```py -from modmail.plugin_helpers import PluginCog - -class MyPlugin(PluginCog): - pass -``` diff --git a/modmail/utils/__init__.py b/modmail/utils/__init__.py index e69de29b..259de944 100644 --- a/modmail/utils/__init__.py +++ b/modmail/utils/__init__.py @@ -0,0 +1,15 @@ +from typing import Any + + +class _MissingSentinel: + def __eq__(self, other: Any): + return False + + def __bool__(self): + return False + + def __repr__(self): + return "..." + + +MISSING: Any = _MissingSentinel() diff --git a/modmail/utils/cogs.py b/modmail/utils/cogs.py index d8fe5e0b..f4f8b948 100644 --- a/modmail/utils/cogs.py +++ b/modmail/utils/cogs.py @@ -4,6 +4,17 @@ from discord.ext import commands +from modmail.config import config + + +__all__ = ( + "BitwiseAutoEnum", + "BotModeEnum", + "ExtMetadata", + "BOT_MODE", + "ModmailCog", +) + if TYPE_CHECKING: # pragma: nocover import modmail.bot @@ -17,7 +28,7 @@ def _generate_next_value_(name, start, count, last_values) -> int: # noqa: ANN0 return 1 << count -class BotModes(BitwiseAutoEnum): +class BotModeEnum(BitwiseAutoEnum): """ Valid modes for the bot. @@ -29,22 +40,36 @@ class BotModes(BitwiseAutoEnum): PLUGIN_DEV = auto() -BOT_MODES = BotModes - - @dataclass() class ExtMetadata: """Ext metadata class to determine if extension should load at runtime depending on bot configuration.""" - load_if_mode: int = BotModes.PRODUCTION + load_if_mode: BotModeEnum = BotModeEnum.PRODUCTION # this is to determine if the cog is allowed to be unloaded. no_unload: bool = False - def __init__(self, load_if_mode: int = BotModes.PRODUCTION, no_unload: bool = False) -> "ExtMetadata": + def __init__(self, *, load_if_mode: BotModeEnum = BotModeEnum.PRODUCTION, no_unload: bool = False): self.load_if_mode = load_if_mode self.no_unload = no_unload +def determine_bot_mode() -> int: + """ + Figure out the bot mode from the configuration system. + + The configuration system uses true/false values, so we need to turn them into an integer for bitwise. + """ + bot_mode = 0 + _config = config() + for mode in BotModeEnum: + if getattr(_config.user.dev.mode, mode.name.lower(), True): + bot_mode += mode.value + return bot_mode + + +BOT_MODE = determine_bot_mode() + + class ModmailCog(commands.Cog): """ The base class that all cogs must inherit from. diff --git a/modmail/utils/extensions.py b/modmail/utils/extensions.py index 5e5d47fd..f22bc038 100644 --- a/modmail/utils/extensions.py +++ b/modmail/utils/extensions.py @@ -1,25 +1,26 @@ -# original source: +# initial source: # https://github.com/python-discord/bot/blob/a8869b4d60512b173871c886321b261cbc4acca9/bot/utils/extensions.py # MIT License 2021 Python Discord import importlib import inspect import logging import pkgutil -import typing as t +from typing import Dict, Generator, List, NewType, NoReturn, Tuple from modmail import extensions -from modmail.config import config from modmail.log import ModmailLogger -from modmail.utils.cogs import BOT_MODES, BotModes, ExtMetadata +from modmail.utils.cogs import BOT_MODE, BotModeEnum, ExtMetadata log: ModmailLogger = logging.getLogger(__name__) EXT_METADATA = ExtMetadata +ModuleName = NewType("ModuleName", str) +ModuleDict = Dict[ModuleName, ExtMetadata] -EXTENSIONS: t.Dict[str, t.Tuple[bool, bool]] = dict() -NO_UNLOAD: t.List[str] = list() +EXTENSIONS: ModuleDict = {} +NO_UNLOAD: List[ModuleName] = [] def unqualify(name: str) -> str: @@ -27,32 +28,15 @@ def unqualify(name: str) -> str: return name.rsplit(".", maxsplit=1)[-1] -def determine_bot_mode() -> int: - """ - Figure out the bot mode from the configuration system. - - The configuration system uses true/false values, so we need to turn them into an integer for bitwise. - """ - bot_mode = 0 - _config = config() - for mode in BotModes: - if getattr(_config.user.dev.mode, unqualify(str(mode)).lower(), True): - bot_mode += mode.value - return bot_mode - - -BOT_MODE = determine_bot_mode() - - log.trace(f"BOT_MODE value: {BOT_MODE}") -log.debug(f"Dev mode status: {bool(BOT_MODE & BOT_MODES.DEVELOP)}") -log.debug(f"Plugin dev mode status: {bool(BOT_MODE & BOT_MODES.PLUGIN_DEV)}") +log.debug(f"Dev mode status: {bool(BOT_MODE & BotModeEnum.DEVELOP)}") +log.debug(f"Plugin dev mode status: {bool(BOT_MODE & BotModeEnum.PLUGIN_DEV)}") -def walk_extensions() -> t.Iterator[t.Tuple[str, t.Tuple[bool, bool]]]: +def walk_extensions() -> Generator[Tuple[ModuleName, ExtMetadata], None, None]: """Yield extension names from the modmail.exts subpackage.""" - def on_error(name: str) -> t.NoReturn: + def on_error(name: str) -> NoReturn: raise ImportError(name=name) # pragma: no cover for module in pkgutil.walk_packages(extensions.__path__, f"{extensions.__name__}.", onerror=on_error): @@ -61,21 +45,34 @@ def on_error(name: str) -> t.NoReturn: continue imported = importlib.import_module(module.name) - if module.ispkg: - if not inspect.isfunction(getattr(imported, "setup", None)): - # If it lacks a setup function, it's not an extension. - continue + if not inspect.isfunction(getattr(imported, "setup", None)): + # If it lacks a setup function, it's not an extension. + continue ext_metadata: ExtMetadata = getattr(imported, "EXT_METADATA", None) if ext_metadata is not None: - # check if this cog is dev only or plugin dev only - load_cog = bool(int(ext_metadata.load_if_mode) & BOT_MODE) - log.trace(f"Load cog {module.name!r}?: {load_cog}") - no_unload = ext_metadata.no_unload - yield module.name, (load_cog, no_unload) + if not isinstance(ext_metadata, ExtMetadata): + if ext_metadata == ExtMetadata: + log.info( + f"{module.name!r} seems to have passed the ExtMetadata class directly to " + "EXT_METADATA. Using defaults." + ) + else: + log.error( + f"Extension {module.name!r} contains an invalid EXT_METADATA variable. " + "Loading with metadata defaults. Please report this bug to the developers." + ) + yield module.name, ExtMetadata() + continue + + log.debug(f"{module.name!r} contains a EXT_METADATA variable. Loading it.") + + yield module.name, ext_metadata continue - log.notice(f"Cog {module.name!r} is missing an EXT_METADATA variable. Assuming its a normal cog.") + log.notice( + f"Extension {module.name!r} is missing an EXT_METADATA variable. Assuming its a normal extension." + ) # Presume Production Mode/Metadata defaults if metadata var does not exist. - yield module.name, (ExtMetadata.load_if_mode, ExtMetadata.no_unload) + yield module.name, ExtMetadata() diff --git a/modmail/utils/plugins.py b/modmail/utils/plugins.py deleted file mode 100644 index ced30fd0..00000000 --- a/modmail/utils/plugins.py +++ /dev/null @@ -1,89 +0,0 @@ -# original source: -# https://github.com/python-discord/bot/blob/a8869b4d60512b173871c886321b261cbc4acca9/bot/utils/extensions.py -# MIT License 2021 Python Discord -""" -Helper utililites for managing plugins. - -TODO: Expand file to download plugins from github and gitlab from a list that is passed. -""" - - -import glob -import importlib -import importlib.util -import inspect -import logging -import typing as t -from pathlib import Path - -from modmail import plugins -from modmail.log import ModmailLogger -from modmail.utils.cogs import ExtMetadata -from modmail.utils.extensions import BOT_MODE, unqualify - - -log: ModmailLogger = logging.getLogger(__name__) - - -BASE_PATH = Path(plugins.__file__).parent.resolve() -PLUGIN_MODULE = "modmail.plugins" -PLUGINS: t.Dict[str, t.Tuple[bool, bool]] = dict() - - -def walk_plugins() -> t.Iterator[t.Tuple[str, bool]]: - """Yield plugin names from the modmail.plugins subpackage.""" - # walk all files in the plugins folder - # this is to ensure folder symlinks are supported, - # which are important for ease of development. - # NOTE: We are not using Pathlib's glob utility as it doesn't - # support following symlinks, see: https://bugs.python.org/issue33428 - for path in glob.iglob(f"{BASE_PATH}/**/*.py", recursive=True): - - log.trace("Path: {0}".format(path)) - - # calculate the module name, dervived from the relative path - relative_path = Path(path).relative_to(BASE_PATH) - name = relative_path.__str__().rstrip(".py").replace("/", ".") - name = PLUGIN_MODULE + "." + name - log.trace("Module name: {0}".format(name)) - - if unqualify(name.split(".")[-1]).startswith("_"): - # Ignore module/package names starting with an underscore. - continue - - # due to the fact that plugins are user generated and may not have gone through - # the testing that the bot has, we want to ensure we try/except any plugins - # that fail to import. - try: - # load the plugins using importlib - # this needs to be done like this, due to the fact that - # its possible a plugin will not have an __init__.py file - spec = importlib.util.spec_from_file_location(name, path) - imported = importlib.util.module_from_spec(spec) - spec.loader.exec_module(imported) - except Exception: - log.error( - "Failed to import {0}. As a result, this plugin is not considered installed.".format(name), - exc_info=True, - ) - continue - - if not inspect.isfunction(getattr(imported, "setup", None)): - # If it lacks a setup function, it's not a plugin. This is enforced by dpy. - log.trace("{0} does not have a setup function. Skipping.".format(name)) - continue - - ext_metadata: ExtMetadata = getattr(imported, "EXT_METADATA", None) - if ext_metadata is not None: - # check if this plugin is dev only or plugin dev only - load_cog = bool(int(ext_metadata.load_if_mode) & BOT_MODE) - log.trace(f"Load plugin {imported.__name__!r}?: {load_cog}") - yield imported.__name__, load_cog - continue - - log.info( - f"Plugin {imported.__name__!r} is missing a EXT_METADATA variable. Assuming its a normal plugin." - ) - - # Presume Production Mode/Metadata defaults if metadata var does not exist. - yield imported.__name__, ExtMetadata.load_if_mode diff --git a/poetry.lock b/poetry.lock index 0feff50a..c9c4dbdb 100644 --- a/poetry.lock +++ b/poetry.lock @@ -518,6 +518,14 @@ requirements_deprecated_finder = ["pipreqs", "pip-api"] colors = ["colorama (>=0.4.3,<0.5.0)"] plugins = ["setuptools"] +[[package]] +name = "jarowinkler" +version = "1.0.2" +description = "library for fast approximate string matching using Jaro and Jaro-Winkler similarity" +category = "main" +optional = false +python-versions = ">=3.6" + [[package]] name = "jinja2" version = "3.0.1" @@ -932,6 +940,20 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" py = "*" pytest = ">=3.10" +[[package]] +name = "pytest-raises" +version = "0.11" +description = "An implementation of pytest.raises as a pytest.mark fixture" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +pytest = ">=3.2.2" + +[package.extras] +develop = ["pylint", "pytest-cov"] + [[package]] name = "pytest-sugar" version = "0.9.4" @@ -1005,6 +1027,20 @@ python-versions = ">=3.6" [package.dependencies] pyyaml = "*" +[[package]] +name = "rapidfuzz" +version = "2.0.10" +description = "rapid fuzzy string matching" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +jarowinkler = ">=1.0.2,<1.1.0" + +[package.extras] +full = ["numpy"] + [[package]] name = "requests" version = "2.26.0" @@ -1183,7 +1219,7 @@ yaml = ["PyYAML"] [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "6b08e56ec00c0ccae344ff7844344a4bc42143f66638f071992bdafaa81852fe" +content-hash = "754f65c9960f5dcf80bcc7a48859962cb63bd6c6806bebcc80df477d27989560" [metadata.files] aiodns = [ @@ -1567,6 +1603,88 @@ isort = [ {file = "isort-5.9.3-py3-none-any.whl", hash = "sha256:e17d6e2b81095c9db0a03a8025a957f334d6ea30b26f9ec70805411e5c7c81f2"}, {file = "isort-5.9.3.tar.gz", hash = "sha256:9c2ea1e62d871267b78307fe511c0838ba0da28698c5732d54e2790bf3ba9899"}, ] +jarowinkler = [ + {file = "jarowinkler-1.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:71772fcd787e0286b779de0f1bef1e0a25deb4578328c0fc633bc345f13ffd20"}, + {file = "jarowinkler-1.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:912ee0a465822a8d659413cebc1ab9937ac5850c9cd1e80be478ba209e7c8095"}, + {file = "jarowinkler-1.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0320f7187dced1ad413bf2c3631ec47567e65dfdea92c523aafb2c085ae15035"}, + {file = "jarowinkler-1.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58bc6a8f01b0dfdf3721f9a4954060addeccf8bbe5e72a71cf23a88ce0d30440"}, + {file = "jarowinkler-1.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:679ec7a42f70baa61f3a214d1b59cec90fc036021c759722075efcc8697e7b1f"}, + {file = "jarowinkler-1.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dde57d47962d6a4436d8a3b477bcc8233c6da28e675027eb3a490b0d6dc325be"}, + {file = "jarowinkler-1.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:657f50204970fac8f120c293e52a3451b742c9b26125010405ec7365cb6e2a49"}, + {file = "jarowinkler-1.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:04f18a7398766b36ffbe4bcd26d34fcd6ed01f4f2f7eea13e316e6cca0e10c98"}, + {file = "jarowinkler-1.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:33a24b380e2c076eabf2d3e12eee56b6bf10b1f326444e18c36a495387dbf0de"}, + {file = "jarowinkler-1.0.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e1d7d6e6c98fb785026584373240cc4076ad21033f508973faae05e846206e8c"}, + {file = "jarowinkler-1.0.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e50c750a45c800d91134200d8cbf746258ed357a663e97cc0348ee42a948386a"}, + {file = "jarowinkler-1.0.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:5b380afce6cdc25a4dafd86874f07a393800577c05335c6ad67ccda41db95c60"}, + {file = "jarowinkler-1.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e73712747ac5d2218af3ed3c1600377f18a0a45af95f22c39576165aea2908b4"}, + {file = "jarowinkler-1.0.2-cp310-cp310-win32.whl", hash = "sha256:9511f4e1f00c822e08dbffeb69e15c75eb294a5f24729815a97807ecf03d22eb"}, + {file = "jarowinkler-1.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a5c44f92e9ac6088286292ecb69e970adc2b98e139b8923bce9bbb9d484e6a0f"}, + {file = "jarowinkler-1.0.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:02b0bf34ffc2995b695d9b10d2f18c1c447fbbdb7c913a84a0a48c186ccca3b8"}, + {file = "jarowinkler-1.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df7a8e45176298a1210c06f8b2328030cc3c93a45dab068ac1fbc9cf075cd95b"}, + {file = "jarowinkler-1.0.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:da27a9c206249a50701bfa5cfbbb3a04236e1145b2b0967e825438acb14269bf"}, + {file = "jarowinkler-1.0.2-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43ea0155379df92021af0f4a32253be3953dfa0f050ec3515f314b8f48a96674"}, + {file = "jarowinkler-1.0.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f33b6b1687db1be1abba60850628ee71547501592fcf3504e021274bc5ccb7a"}, + {file = "jarowinkler-1.0.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff304de32ee6acd5387103a0ad584060d8d419aa19cbbeca95204de9c4f01171"}, + {file = "jarowinkler-1.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:662dd6f59cca536640be0cda32c901989504d95316b192e6aa41d098fa08c795"}, + {file = "jarowinkler-1.0.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:01f85abb75fa43e98db34853d35570d98495ee2fcbbf45a93838e0289c162f19"}, + {file = "jarowinkler-1.0.2-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:5b9332dcc8130af4101c9752a03e977c54b8c12982a2a3ca4c2e4cc542accc00"}, + {file = "jarowinkler-1.0.2-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:af765b037404a536c372e33ddd4c430aea28f1d82a8ef51a2955442b8b690577"}, + {file = "jarowinkler-1.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aea2c7d66b57c56d00f9c45ae7862d86e3ae84368ecea17f3552c0052a7f3bcf"}, + {file = "jarowinkler-1.0.2-cp36-cp36m-win32.whl", hash = "sha256:8b1288a09a8d100e9bf7cf9ce1329433db73a0d0350d74c2c6f5c31ac69096cf"}, + {file = "jarowinkler-1.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:ed39199b0e806902347473c65e5c05933549cf7e55ba628c6812782f2c310b19"}, + {file = "jarowinkler-1.0.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:473b057d7e5a0f5e5b8c0e0f7960d3ca2f2954c3c93fd7a9fb2cc4bc3cc940fb"}, + {file = "jarowinkler-1.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdb892dbbbd77b3789a10b2ce5e8acfe5821cc6423e835bae2b489159f3c2211"}, + {file = "jarowinkler-1.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:012a8333328ce061cba1ff081843c8d80eb1afe8fa2889ad29d767ea3fdc7562"}, + {file = "jarowinkler-1.0.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3421120c07ee6d3f59c5adde32eb9a050cfd1b3666b0e2d8c337d934a9d091f9"}, + {file = "jarowinkler-1.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dad57327cc90f8daa3afb98e2d274d7dd1b60651f32717449be95d3b3366d61a"}, + {file = "jarowinkler-1.0.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4fd1757eff43df97227fd63d9c8078582267a0b25cefef6f6a64d3e46e80ba2"}, + {file = "jarowinkler-1.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:32269ebbcb860f01c055d9bb145b4cc91990f62c7644a85b21458b4868621113"}, + {file = "jarowinkler-1.0.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3b5a0839e84f5ff914b01b5b94d0273954affce9cc2b2ee2c31fe2fcb9c8ae76"}, + {file = "jarowinkler-1.0.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:6c9d3a9ef008428b5dce2855eebe2b6127ea7a7e433aedf240653fad4bd4baa6"}, + {file = "jarowinkler-1.0.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:a3d7759d8a66ee05595bde012f93da8a63499f38205e2bb47022c52bd6c47108"}, + {file = "jarowinkler-1.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2ba1b1b0bf45042a9bbb95d272fd8b0c559fe8f6806f088ec0372899e1bc6224"}, + {file = "jarowinkler-1.0.2-cp37-cp37m-win32.whl", hash = "sha256:4cb33f4343774d69abf8cf65ad57919e7a171c44ba6ad57b08147c3f0f06b073"}, + {file = "jarowinkler-1.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:0392b72ddb5ab5d6c1d5df94dbdac7bf229670e5e64b2b9a382d02d6158755e5"}, + {file = "jarowinkler-1.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:94f663ad85bc7a89d7e8b6048f93a46d2848a0570ab07fc895a239b9a5d97b93"}, + {file = "jarowinkler-1.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:895a10766ff3db15e7cf2b735e4277bee051eaafb437aaaef2c5de64a5c3f05c"}, + {file = "jarowinkler-1.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0c1a84e770b3ec7385a4f40efb30bdc96f96844564f91f8d3937d54a8969d82c"}, + {file = "jarowinkler-1.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27defe81d76e02b3929322baea999f5232837e7f308c2dc5b37de7568c2bc583"}, + {file = "jarowinkler-1.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:158f117481388f8d23fe4bd2567f37be0ccae0f4631c34e4b0345803147da207"}, + {file = "jarowinkler-1.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:427c675b4f3e83c79a4b6af7441f29e30a173c7a0ae72a54f51090eee7a8ae02"}, + {file = "jarowinkler-1.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90a7f3fd173339bc62e52c02f43d50c947cb3af9cda41646e218aea13547e0c2"}, + {file = "jarowinkler-1.0.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3975cbe8b6ae13fc63d74bcbed8dac1577078d8cd8728e60621fe75885d2a8c5"}, + {file = "jarowinkler-1.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:141840f33345b00abd611839080edc99d4d31abd2dcf701a3e50c90f9bfb2383"}, + {file = "jarowinkler-1.0.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f592f9f6179e347a5f518ca7feb9bf3ac068f2fad60ece5a0eef5e5e580d4c8b"}, + {file = "jarowinkler-1.0.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:30565d70396eb9d1eb622e1e707ddc2f3b7a9692558b8bf4ea49415a5ca2f854"}, + {file = "jarowinkler-1.0.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:35fc430c11b80a43ed826879c78c4197ec665d5150745b3668bec961acf8a757"}, + {file = "jarowinkler-1.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e4cf4b7090f0c4075bec1638717f54b22c3b0fe733dc87146a19574346ed3161"}, + {file = "jarowinkler-1.0.2-cp38-cp38-win32.whl", hash = "sha256:199f4f7edbc49439a97440caa1e244d2e33da3e16d7b0afce4e4dfd307e555c7"}, + {file = "jarowinkler-1.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:b587e8fdd96cc470d6bdf428129c65264731b09b5db442e2d092e983feec4aab"}, + {file = "jarowinkler-1.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4b233180b3e2f2d7967aa570d36984e9d2ec5a9067c0d1c44cd3b805d9da9363"}, + {file = "jarowinkler-1.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2220665a1f52262ae8b76e3baf474ebcd209bfcb6a7cada346ffd62818f5aa3e"}, + {file = "jarowinkler-1.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:08c98387e04e749c84cc967db628e5047843f19f87bf515a35b72f7050bc28ad"}, + {file = "jarowinkler-1.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d710921657442ad3c942de684aba0bdf16b7de5feed3223b12f3b2517cf17f7c"}, + {file = "jarowinkler-1.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:401c02ac7245103826f54c816324274f53d50b638ab0f8b359a13055a7a6e793"}, + {file = "jarowinkler-1.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a1929a0029f208cc9244499dc93b4d52ee8e80d2849177d425cf6e0be1ea781"}, + {file = "jarowinkler-1.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ab25d147be9b04e7de2d28a18e72fadc152698c3e51683c6c61f73ffbae2f9e"}, + {file = "jarowinkler-1.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:465cfdff355ec9c55f65fd1e1315260ec20c8cff0eb90d9f1a0ad8d503dc002b"}, + {file = "jarowinkler-1.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:29ef1113697cc74c2f04bc15008abbd726cb2d5b01c040ba87c6cb7abd1d0e0d"}, + {file = "jarowinkler-1.0.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:61b57c8b36361ec889f99f761441bb0fa21b850a5eb3305dea25fef68f6a797b"}, + {file = "jarowinkler-1.0.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ee9d9af1bbf194d78f4b69c2139807c23451068b27a053a1400d683d6f36c61d"}, + {file = "jarowinkler-1.0.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:a9b33b0ceb472bbc65683467189bd032c162256b2a137586ee3448a9f8f886ec"}, + {file = "jarowinkler-1.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:582f6e213a6744883ced44482a51efcc21ae632defac27f12f6430a8e99b1070"}, + {file = "jarowinkler-1.0.2-cp39-cp39-win32.whl", hash = "sha256:4d1c8f403016d5c0262de7a8588eee370c37a609e1f529f8407e99a70d020af7"}, + {file = "jarowinkler-1.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:ab50ffa66aa201616871c1b90ac0790f56666118db3c8a8fcb3a7a6e03971510"}, + {file = "jarowinkler-1.0.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8e59a289dcf93504ab92795666c39b2dbe98ac18655201992a7e6247de676bf4"}, + {file = "jarowinkler-1.0.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c36eccdc866f06a7b35da701bd8f91e0dfc83b35c07aba75ce8c906cbafaf184"}, + {file = "jarowinkler-1.0.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:123163f01a5c43f12e4294e7ce567607d859e1446b1a43bd6cd404b3403ffa07"}, + {file = "jarowinkler-1.0.2-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d41fdecd907189e47c7d478e558ad417da38bf3eb34cc20527035cb3fca3e2b8"}, + {file = "jarowinkler-1.0.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e7829368fc91de225f37f6325f8d8ec7ad831dc5b0e9547f1977e2fdc85eccc1"}, + {file = "jarowinkler-1.0.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:278595417974553a8fdf3c8cce5c2b4f859335344075b870ecb55cc416eb76cf"}, + {file = "jarowinkler-1.0.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:208fc49741db5d3e6bbd4a2f7b32d32644b462bf205e7510eca4e2d530225f03"}, + {file = "jarowinkler-1.0.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:924afcab6739c453f1c3492701d185d71dc0e5ba15692bd0bfa6d482c7e8f79e"}, + {file = "jarowinkler-1.0.2.tar.gz", hash = "sha256:788ac33e6ffdbd78fd913b481e37cfa149288575f087a1aae1a4ce219cb1c654"}, +] jinja2 = [ {file = "Jinja2-3.0.1-py3-none-any.whl", hash = "sha256:1f06f2da51e7b56b8f238affdd6b4e2c61e39598a378cc49345bc1bd42a978a4"}, {file = "Jinja2-3.0.1.tar.gz", hash = "sha256:703f484b47a6af502e743c9122595cc812b0271f661722403114f71a79d0f5a4"}, @@ -1891,6 +2009,10 @@ pytest-forked = [ {file = "pytest-forked-1.3.0.tar.gz", hash = "sha256:6aa9ac7e00ad1a539c41bec6d21011332de671e938c7637378ec9710204e37ca"}, {file = "pytest_forked-1.3.0-py2.py3-none-any.whl", hash = "sha256:dc4147784048e70ef5d437951728825a131b81714b398d5d52f17c7c144d8815"}, ] +pytest-raises = [ + {file = "pytest-raises-0.11.tar.gz", hash = "sha256:f64a4dbcb5f89c100670fe83d87a5cd9d956586db461c5c628f7eb94b749c90b"}, + {file = "pytest_raises-0.11-py2.py3-none-any.whl", hash = "sha256:33a1351f2debb9f74ca6ef70e374899f608a1217bf13ca4a0767f37b49e9cdda"}, +] pytest-sugar = [ {file = "pytest-sugar-0.9.4.tar.gz", hash = "sha256:b1b2186b0a72aada6859bea2a5764145e3aaa2c1cfbb23c3a19b5f7b697563d3"}, ] @@ -1941,6 +2063,54 @@ pyyaml-env-tag = [ {file = "pyyaml_env_tag-0.1-py3-none-any.whl", hash = "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069"}, {file = "pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb"}, ] +rapidfuzz = [ + {file = "rapidfuzz-2.0.10-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d7297eeeb8b50aeeef81167c3cb34994adcf86f8d6bf0c9ea06fb566540c878d"}, + {file = "rapidfuzz-2.0.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:263b7b9b7f9f68a255f2dc37c28c2213ae03903f5650dbdd4a0e1b44609ed222"}, + {file = "rapidfuzz-2.0.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c6738210e4dec381f41513e41eaf3a87124188dfab836459c6b392b121244a0f"}, + {file = "rapidfuzz-2.0.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3421c6b1dd36a4a5f9e4c323b9e3116b178430ab769c61bce77e7aa85c53575"}, + {file = "rapidfuzz-2.0.10-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbbb8e0abd7944254f62e510ed13fa9b5189c11c8247a77d5c7dc12cd58c20f1"}, + {file = "rapidfuzz-2.0.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f8d5c2d593557da37632bc5c84b05cff69b14bb8255210fa27183e35b848542"}, + {file = "rapidfuzz-2.0.10-cp310-cp310-win32.whl", hash = "sha256:db88723b83436b7188ad3f02f53d67ff78fbdb0e6a0b129cd7f51d18ebf52da6"}, + {file = "rapidfuzz-2.0.10-cp310-cp310-win_amd64.whl", hash = "sha256:270194fc82f055fc4fb63ce0550d9bb384540aef699218df330a30c24ce7546f"}, + {file = "rapidfuzz-2.0.10-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cea0dea5e79219777347a7c83c7953bc6ed3fc73d4ede0a931ea3362e99de0bd"}, + {file = "rapidfuzz-2.0.10-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0457b4a652484dc8b39b52e56b0d7ada2550b262df4e52a504db3b34f060ea6"}, + {file = "rapidfuzz-2.0.10-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3d9dd138c0f7edc48fd16ac8ad5b14c0c47f502e40f86979d54205b9b24e4d9f"}, + {file = "rapidfuzz-2.0.10-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68a28b4cb76c422c30aff221a14009bbfed7134b3305966817970a0ad83ca1ca"}, + {file = "rapidfuzz-2.0.10-cp36-cp36m-win32.whl", hash = "sha256:1dc06e50fb5410d2b3f607767ab6fc1dd8b9a559d40e0099a8f8f73d9d4d3db3"}, + {file = "rapidfuzz-2.0.10-cp36-cp36m-win_amd64.whl", hash = "sha256:8d5ebda761193087d19606cd8026c7d3aa528ed13f4bc98ceecdd6da1d55fb20"}, + {file = "rapidfuzz-2.0.10-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ca08437f42047a3e8b1aecd39ba06debf063bc171d8375f0ddfa9b6a507853e8"}, + {file = "rapidfuzz-2.0.10-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3fe107338825eadcda36ad3f50fe042e9e26018592af7c8ff3b4d16275f5fd01"}, + {file = "rapidfuzz-2.0.10-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:86fbbabd4176efb3e289cff65192a71e74475866c5738ae2039212c3b2db25cd"}, + {file = "rapidfuzz-2.0.10-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8156f7d8d7441c2bcb84ed9b5a873f5eee044fbdb3c1f599926a36663d323447"}, + {file = "rapidfuzz-2.0.10-cp37-cp37m-win32.whl", hash = "sha256:89482d7436b3a1768e330c952c5346bb777f1969858284f2a6dcfb1c7d47f51d"}, + {file = "rapidfuzz-2.0.10-cp37-cp37m-win_amd64.whl", hash = "sha256:db21778d648fa1805cea122b156c4727d3c6d2baf6ff0794af1794d17941512b"}, + {file = "rapidfuzz-2.0.10-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:48b92a056246adac535d66e34ae7f5b9ed52654962f90d39c94fcb11dbeb6f0c"}, + {file = "rapidfuzz-2.0.10-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:de02ce4d7e241f3fcfba3227e1f9665f82b9af38c5d36190df3a247fb2461411"}, + {file = "rapidfuzz-2.0.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8748f89974916b15e8d70c0ff7097e2656f3aa89cbeaa810e69b07819481f84c"}, + {file = "rapidfuzz-2.0.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1400e029195f511788675144f1aab01de43aae7d3f5ec683f263ee13b47f6b16"}, + {file = "rapidfuzz-2.0.10-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd8fdd75ad347b35eef562f39f5f8ad8c9784c5d3890bf49ecc24f5c1e3d16c1"}, + {file = "rapidfuzz-2.0.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84391cd3fa17a6655abd83961f4720806b64f090dbc46426ed4841b410dbc841"}, + {file = "rapidfuzz-2.0.10-cp38-cp38-win32.whl", hash = "sha256:3a0dd9a837288a65a74a819b0d6f0d139daeb7f6155c3158f6eedd0af1e6d021"}, + {file = "rapidfuzz-2.0.10-cp38-cp38-win_amd64.whl", hash = "sha256:16e69fcc7009659ee8459a9ad4605651b1cc7071e64698da1a5568f473c0ee3f"}, + {file = "rapidfuzz-2.0.10-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9f0daa98b6f9d69811d64cb2277209c56ba7b68e5f50d6903795a2b0a2a4d9c2"}, + {file = "rapidfuzz-2.0.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c3e7e5489fe1915950a663c8f6c572aa390765db96a171f36215af2d4bb19a6b"}, + {file = "rapidfuzz-2.0.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1a5d9bf9d03bc64720dc0ad4a10b8c1fb0326bc6883d3411181a519a3ccdf779"}, + {file = "rapidfuzz-2.0.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8495ec199402ffa5b6b8c724675e1c0fb7e5a6617ea3c90323bb56449df6b36d"}, + {file = "rapidfuzz-2.0.10-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:13e584cd7879e8528913a30d69d83cf88198287a7557435361f31f794a349878"}, + {file = "rapidfuzz-2.0.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad7aa0510c2291751d3bd90b38cf11e9c60cda41766927a25b169698fc2c2689"}, + {file = "rapidfuzz-2.0.10-cp39-cp39-win32.whl", hash = "sha256:a6fd46fe173a5bf7ec85819a1d2bb343303bd5b28a80671ce886b97f3c669ea9"}, + {file = "rapidfuzz-2.0.10-cp39-cp39-win_amd64.whl", hash = "sha256:be743ec34a7f88255c6735780b35578c03a6192ee2f9b325493ed736b0ab2cf3"}, + {file = "rapidfuzz-2.0.10-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:148e931c28aa09532c99db2f30f01a88eed5a065c9f9ed119c5b915994582054"}, + {file = "rapidfuzz-2.0.10-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16fbc2686eb9310ebcd77eb819743b541cd1dd2b83f555e0eaf356615452eb89"}, + {file = "rapidfuzz-2.0.10-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84a49b857c1d521691d64bbe085cc31f7b397853901acf0eb06b799f570dfbd3"}, + {file = "rapidfuzz-2.0.10-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ac7864b52714ef183d37c9fe50da806ad81bdb47f72bbe3e7c629932af62c66"}, + {file = "rapidfuzz-2.0.10-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba5d72407122e7c4131aaf8ddb37cd05496d80418796a57bf90db976d511a74c"}, + {file = "rapidfuzz-2.0.10-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2402b91631c5c8e48055a8551692b313f6422fece403e2a8020ecbcafef140a7"}, + {file = "rapidfuzz-2.0.10-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:628cfa67d48d2fcc9a97ed2612ae166395862fb2aea3a810d5d341c3d3490f29"}, + {file = "rapidfuzz-2.0.10-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4fea35a888cdd8f2db5d7ebb02436ac4892ce1eaa33e2b090b29bdead4cc41f6"}, + {file = "rapidfuzz-2.0.10-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c00164fc33f2b64cb7cc33f1fb714924e1eaecd0ce92b8f68b2891072910082"}, + {file = "rapidfuzz-2.0.10.tar.gz", hash = "sha256:6c8fe3051dce837c4deb080b438b38efc8268e1c14b9e6a64b173b35f4e32773"}, +] requests = [ {file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"}, {file = "requests-2.26.0.tar.gz", hash = "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"}, diff --git a/pyproject.toml b/pyproject.toml index 681d3940..4b8fb150 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,14 +16,15 @@ packages = [{ include = "modmail" }] python = "^3.8" aiohttp = { extras = ["speedups"], version = "^3.7.4" } arrow = "^1.1.1" +atoml = "^1.0.3" colorama = "^0.4.3" coloredlogs = "^15.0" "discord.py" = { url = "https://github.com/Rapptz/discord.py/archive/45d498c1b76deaf3b394d17ccf56112fa691d160.zip" } -python-dotenv = "^0.19.2" -atoml = "^1.0.3" attrs = "^21.2.0" desert = "^2020.11.18" marshmallow = "~=3.13.0" +rapidfuzz = "^2.0.10" +python-dotenv = "^0.19.2" PyYAML = { version = "^5.4.1", optional = true } typing-extensions = "^3.10.0.2" marshmallow-enum = "^1.5.1" @@ -56,6 +57,7 @@ pytest = "^6.2.4" pytest-asyncio = "^0.15.1" pytest-cov = "^3.0.0" pytest-dependency = "^0.5.1" +pytest-raises = "^0.11" pytest-sugar = "^0.9.4" pytest-xdist = { version = "^2.3.0", extras = ["psutil"] } # Documentation @@ -74,6 +76,9 @@ branch = true source_pkgs = ['modmail', 'tests'] omit = ["modmail/plugins/**.*"] +[tool.coverage.report] +exclude_lines = ["if TYPE_CHECKING", "pragma: no cover"] + [tool.pytest.ini_options] addopts = "--cov --cov-report=" minversion = "6.0" diff --git a/requirements.txt b/requirements.txt index 697589ef..eb152261 100644 --- a/requirements.txt +++ b/requirements.txt @@ -17,6 +17,7 @@ desert==2020.11.18 ; python_version >= "3.6" discord.py @ https://github.com/Rapptz/discord.py/archive/45d498c1b76deaf3b394d17ccf56112fa691d160.zip ; python_full_version >= "3.8.0" humanfriendly==10.0 ; python_version >= "2.7" and python_version != "3.0" and python_version != "3.1" and python_version != "3.2" and python_version != "3.3" and python_version != "3.4" idna==3.2 ; python_version >= "3.5" +jarowinkler==1.0.2 ; python_version >= "3.6" marshmallow-enum==1.5.1 marshmallow==3.13.0 ; python_version >= "3.5" multidict==5.2.0 ; python_version >= "3.6" @@ -27,6 +28,7 @@ pyreadline3==3.3 ; sys_platform == "win32" python-dateutil==2.8.2 ; python_version != "3.0" python-dotenv==0.19.2 ; python_version >= "3.5" pyyaml==5.4.1 ; python_version >= "2.7" and python_version != "3.0" and python_version != "3.1" and python_version != "3.2" and python_version != "3.3" and python_version != "3.4" and python_version != "3.5" +rapidfuzz==2.0.10 ; python_version >= "3.6" six==1.16.0 ; python_version >= "2.7" and python_version != "3.0" and python_version != "3.1" and python_version != "3.2" typing-extensions==3.10.0.2 typing-inspect==0.7.1 diff --git a/scripts/_utils.py b/scripts/_utils.py new file mode 100644 index 00000000..c305b405 --- /dev/null +++ b/scripts/_utils.py @@ -0,0 +1,85 @@ +"""Utility functions and variables which are useful for all scripts.""" +import difflib +import importlib.util +import os +import pathlib +import typing + + +MODMAIL_DIR = pathlib.Path(importlib.util.find_spec("modmail").origin).parent +PROJECT_DIR = MODMAIL_DIR.parent +try: + import pygments +except ModuleNotFoundError: + pygments = None +else: + from pygments.formatters import Terminal256Formatter + from pygments.lexers.diff import DiffLexer + + +class CheckFileEdit: + """Check if a file is edited within the body of this class.""" + + def __init__(self, *files: os.PathLike): + self.files: typing.List[pathlib.Path] = [] + for f in files: + self.files.append(pathlib.Path(f)) + self.return_value: typing.Optional[int] = None + self.edited_files: typing.Dict[pathlib.Path] = {} + + def __enter__(self): + self.file_contents = {} + for file in self.files: + try: + with open(file, "r") as f: + self.file_contents[file] = f.readlines() + except FileNotFoundError: + self.file_contents[file] = None + return self + + def __exit__(self, exc_type, exc_value, exc_traceback): # noqa: ANN001 + for file in self.files: + with open(file, "r") as f: + original_contents = self.file_contents[file] + new_contents = f.readlines() + if original_contents != new_contents: + # construct a diff + diff = difflib.unified_diff( + original_contents, new_contents, fromfile="before", tofile="after" + ) + try: + diff = "".join(diff) + except TypeError: + diff = None + else: + if pygments is not None: + diff = pygments.highlight(diff, DiffLexer(), Terminal256Formatter()) + self.edited_files[file] = diff + + def write(self, path: str, contents: typing.Union[str, bytes], *, force: bool = False, **kwargs) -> bool: + """ + Write to the provided path with contents. Must be within the context manager. + + Returns False if contents are not edited, True if they are. + If force is True, will modify the files even if the contents match. + + Any extras kwargs are passed to open() + """ + path = pathlib.Path(path) + if path not in self.files: + raise AssertionError(f"{path} must have been passed to __init__") + + if not force: + try: + with open(path, "r") as f: + if contents == f.read(): + return False + except FileNotFoundError: + pass + if isinstance(contents, str): + contents = contents.encode() + + with open(path, "wb") as f: + f.write(contents) + + return True diff --git a/scripts/export_requirements.py b/scripts/export_requirements.py index 6439db85..a4eb1357 100644 --- a/scripts/export_requirements.py +++ b/scripts/export_requirements.py @@ -16,9 +16,12 @@ import tomli +from ._utils import PROJECT_DIR, CheckFileEdit -GENERATED_FILE = pathlib.Path("requirements.txt") -DOC_REQUIREMENTS = pathlib.Path("docs/.requirements.txt") + +GENERATED_FILE = PROJECT_DIR / "requirements.txt" +CONSTRAINTS_FILE = PROJECT_DIR / "modmail/constraints.txt" +DOC_REQUIREMENTS = PROJECT_DIR / "docs/.requirements.txt" VERSION_RESTRICTER_REGEX = re.compile(r"(?P[<>=!]{1,2})(?P\d+\.\d+?)(?P\.\d+?|\.\*)?") PLATFORM_MARKERS_REGEX = re.compile(r'sys_platform\s?==\s?"(?P\w+)"') @@ -56,6 +59,24 @@ def get_hash(content: dict) -> str: return hash == get_hash(content) +def _write_file(path: os.PathLike, contents: str, skip_if_identical: bool = True) -> bool: + """ + Write to a supplied file. + + If skip_if_equal is True, will not write if the contents will not change. (Default: True) + """ + path = pathlib.Path(path) + if path.exists(): + with open(path, "r") as f: + if contents == f.read(): + # nothing to edit + return False + + with open(path, "w") as f: + f.write(contents) + return True + + def _extract_packages_from_requirements(requirements: str) -> "tuple[set[str],list[str]]": """Extract a list of packages from the provided requirements str.""" req = requirements.split("\n") @@ -109,6 +130,7 @@ def _export_doc_requirements(toml: dict, file: pathlib.Path, *packages) -> int: file = pathlib.Path(file) if not file.exists(): # file does not exist + print(f"{file.relative_to(PROJECT_DIR)!s} must exist to export doc requirements") return 2 with open(file) as f: @@ -130,19 +152,25 @@ def _export_doc_requirements(toml: dict, file: pathlib.Path, *packages) -> int: except AttributeError as e: print(e) return 3 - if new_contents == contents: - # don't write anything, just return 0 - return 0 - with open(file, "w") as f: - f.write(new_contents) + with CheckFileEdit(file) as check_file: - return 1 + check_file.write(file, new_contents) + + for file, diff in check_file.edited_files.items(): + print( + f"Exported new documentation requirements to {file.relative_to(PROJECT_DIR)!s}.", + file=sys.stderr, + ) + print(diff or "No diff to show.") + print() -def main( +def export( req_path: os.PathLike, should_validate_hash: bool = True, + *, + include_markers: bool = True, export_doc_requirements: bool = True, ) -> int: """Read and export all required packages to their pinned version in requirements.txt format.""" @@ -188,54 +216,58 @@ def main( line += "==" line += dep["version"] - if (pyvers := dep["python-versions"]) != "*": - # TODO: add support for platform and python combined version markers - line += " ; " - final_version_index = pyvers.count(", ") - for count, version in enumerate(pyvers.split(", ")): - match = VERSION_RESTRICTER_REGEX.match(version) - - if (patch := match.groupdict().get("patch", None)) is not None and not patch.endswith("*"): - version_kind = "python_full_version" - else: - version_kind = "python_version" - - patch = patch if patch is not None else "" - patch = patch if not patch.endswith("*") else "" - line += version_kind + " " - line += match.group("sign") + " " - line += '"' + match.group("version") + patch + '"' - line += " " - if count < final_version_index: - line += "and " - - if (dep_deps := dep.get("dependencies", None)) is not None: - - for k, v in copy.copy(dep_deps).items(): - if hasattr(v, "get") and v.get("markers", None) is not None: - pass - else: - del dep_deps[k] - if len(dep_deps): - to_add_markers.update(dep_deps) + if include_markers: + if (pyvers := dep["python-versions"]) != "*": + # TODO: add support for platform and python combined version markers + line += " ; " + final_version_index = pyvers.count(", ") + for count, version in enumerate(pyvers.split(", ")): + match = VERSION_RESTRICTER_REGEX.match(version) + + if (patch := match.groupdict().get("patch", None)) is not None and not patch.endswith( + "*" + ): + version_kind = "python_full_version" + else: + version_kind = "python_version" + + patch = patch if patch is not None else "" + patch = patch if not patch.endswith("*") else "" + line += version_kind + " " + line += match.group("sign") + " " + line += '"' + match.group("version") + patch + '"' + line += " " + if count < final_version_index: + line += "and " + + if (dep_deps := dep.get("dependencies", None)) is not None: + + for k, v in copy.copy(dep_deps).items(): + if hasattr(v, "get") and v.get("markers", None) is not None: + pass + else: + del dep_deps[k] + if len(dep_deps): + to_add_markers.update(dep_deps) dependency_lines[dep["name"]] = line - # add the sys_platform lines - # platform markers only matter based on what requires the dependency - # in order to support these properly, they have to be added to an already existing line - # for example, humanfriendly requires pyreadline on windows only, - # so sys_platform == win needs to be added to pyreadline - for k, v in to_add_markers.items(): - line = dependency_lines[k] - markers = PLATFORM_MARKERS_REGEX.match(v["markers"]) - if markers is not None: - if ";" not in line: - line += " ; " - elif "python_" in line or "sys_platform" in line: - line += "and " - line += 'sys_platform == "' + markers.group("platform") + '"' - dependency_lines[k] = line + if include_markers: + # add the sys_platform lines + # platform markers only matter based on what requires the dependency + # in order to support these properly, they have to be added to an already existing line + # for example, humanfriendly requires pyreadline on windows only, + # so sys_platform == win needs to be added to pyreadline + for k, v in to_add_markers.items(): + line = dependency_lines[k] + markers = PLATFORM_MARKERS_REGEX.match(v["markers"]) + if markers is not None: + if ";" not in line: + line += " ; " + elif "python_" in line or "sys_platform" in line: + line += "and " + line += 'sys_platform == "' + markers.group("platform") + '"' + dependency_lines[k] = line req_txt += "\n".join(sorted(k + v.rstrip() for k, v in dependency_lines.items())) + "\n" @@ -244,18 +276,25 @@ def main( else: exit_code = 0 - if req_path.exists(): - with open(req_path, "r") as f: - if req_txt == f.read(): - # nothing to edit - # if exit_code is ever removed from here, this should return zero - return exit_code + with CheckFileEdit(req_path) as check_file: + check_file.write(req_path, req_txt) + + for file, diff in check_file.edited_files.items(): + print( + f"Exported new requirements to {file.relative_to(PROJECT_DIR)}.", + file=sys.stderr, + ) + print(diff or "No diff to show.") + print() + return bool(len(check_file.edited_files)) or exit_code - with open(req_path, "w") as f: - f.write(req_txt) - print(f"Updated {req_path} with new requirements.") - return 1 +def main(path: os.PathLike, include_markers: bool = True, **kwargs) -> int: + """Export a requirements.txt and constraints.txt file.""" + if not include_markers: + path = path or CONSTRAINTS_FILE + kwargs["include_markers"] = include_markers + return export(path, **kwargs) if __name__ == "__main__": @@ -287,10 +326,12 @@ def main( ) args = parser.parse_args() + # I am aware that the second method will only run if the first method returns 0. This is intended. sys.exit( main( args.output_file, should_validate_hash=not args.skip_hash_check, export_doc_requirements=args.export_doc_requirements, ) + or main(CONSTRAINTS_FILE, include_markers=False, should_validate_hash=not args.skip_hash_check) ) diff --git a/tests/modmail/addons/__init__.py b/tests/modmail/addons/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/modmail/addons/test_converters.py b/tests/modmail/addons/test_converters.py new file mode 100644 index 00000000..75803e54 --- /dev/null +++ b/tests/modmail/addons/test_converters.py @@ -0,0 +1,224 @@ +from __future__ import annotations + +from typing import Optional + +import pytest +from discord.ext.commands.errors import BadArgument + +from modmail.addons.converters import ( + REPO_REGEX, + ZIP_REGEX, + AddonConverter, + SourceAndPluginConverter, + SourceTypeEnum, +) + + +@pytest.mark.asyncio +async def test_converter() -> None: + """Convert a user input into a Source.""" + with pytest.raises(NotImplementedError): + addon = await AddonConverter().convert(None, "github") # noqa: F841 + + +# fmt: off +@pytest.mark.parametrize( + "entry, user, repo, addon, reflike, githost", + [ + ( + "onerandomusername/addons planet", + "onerandomusername", "addons", "planet", None, None, + ), + ( + "github onerandomusername/addons planet @master", + "onerandomusername", "addons", "planet", "master", "github", + ), + ( + "gitlab onerandomusername/repo planet @v1.0.2", + "onerandomusername", "repo", "planet", "v1.0.2", "gitlab", + ), + ( + "github onerandomusername/repo planet @master", + "onerandomusername", "repo", "planet", "master", "github", + ), + ( + "github onerandomusername/repo planet @bad-toml", + "onerandomusername", "repo", "planet", "bad-toml", "github", + ), + ( + "gitlab onerandomusername/repo planet @main", + "onerandomusername", "repo", "planet", "main", "gitlab", + ), + ( + "https://github.com/onerandomusername/repo planet", + "onerandomusername", "repo", "planet", None, "github", + ), + ( + "https://gitlab.com/onerandomusername/repo planet", + "onerandomusername", "repo", "planet", None, "gitlab", + ), + ( + "https://github.com/psf/black black @21.70b", + "psf", "black", "black", "21.70b", "github", + ) + ], +) +@pytest.mark.dependency(name="repo_regex") +# fmt: on +def test_repo_regex( + entry: str, user: str, repo: str, addon: str, reflike: Optional[str], githost: Optional[str] +) -> None: + """Test the repo regex to ensure that it matches what it should.""" + match = REPO_REGEX.fullmatch(entry) + assert match is not None + assert match.group("user") == user + assert match.group("repo") == repo + assert match.group("addon") == addon + assert match.group("reflike") or None == reflike # noqa: E711 + assert match.group("githost") == githost + + +# fmt: off +@pytest.mark.parametrize( + "entry, url, domain, path, addon", + [ + ( + "https://github.com/onerandomusername/modmail-addons/archive/main.zip planet", + "github.com/onerandomusername/modmail-addons/archive/main.zip", + "github.com", + "onerandomusername/modmail-addons/archive/main.zip", + "planet", + ), + ( + "https://gitlab.com/onerandomusername/modmail-addons/-/archive/main/modmail-addons-main.zip earth", # noqa: E501 + "gitlab.com/onerandomusername/modmail-addons/-/archive/main/modmail-addons-main.zip", + "gitlab.com", + "onerandomusername/modmail-addons/-/archive/main/modmail-addons-main.zip", + "earth", + ), + ( + "https://example.com/bleeeep.zip myanmar", + "example.com/bleeeep.zip", + "example.com", + "bleeeep.zip", + "myanmar", + + ), + ( + "http://github.com/discord-modmail/addons/archive/bast.zip thebot", + "github.com/discord-modmail/addons/archive/bast.zip", + "github.com", + "discord-modmail/addons/archive/bast.zip", + "thebot", + ), + ( + "rtfd.io/plugs.zip documentation", + "rtfd.io/plugs.zip", + "rtfd.io", + "plugs.zip", + "documentation", + ), + ( + "pages.dev/hiy.zip black", + "pages.dev/hiy.zip", + "pages.dev", + "hiy.zip", + "black", + ), + ] +) +# fmt: on +@pytest.mark.dependency(name="zip_regex") +def test_zip_regex(entry: str, url: str, domain: str, path: str, addon: str) -> None: + """Test the repo regex to ensure that it matches what it should.""" + match = ZIP_REGEX.fullmatch(entry) + assert match is not None + assert match.group("url") == url + assert match.group("domain") == domain + assert match.group("path") == path + assert match.group("addon") == addon + + +# fmt: off +@pytest.mark.parametrize( + "entry, name, source_type", + [ + ( + "onerandomusername/addons planet", + "planet", SourceTypeEnum.REPO + ), + ( + "github onerandomusername/addons planet @master", + "planet", SourceTypeEnum.REPO + ), + ( + "gitlab onerandomusername/repo planet @v1.0.2", + "planet", SourceTypeEnum.REPO + ), + ( + "github onerandomusername/repo planet @master", + "planet", SourceTypeEnum.REPO + ), + ( + "gitlab onerandomusername/repo planet @main", + "planet", SourceTypeEnum.REPO + ), + ( + "https://github.com/onerandomusername/repo planet", + "planet", SourceTypeEnum.REPO + ), + ( + "https://gitlab.com/onerandomusername/repo planet", + "planet", SourceTypeEnum.REPO + ), + ( + "https://github.com/psf/black black @21.70b", + "black", SourceTypeEnum.REPO + ), + ( + "github.com/onerandomusername/modmail-addons/archive/main.zip earth", + "earth", SourceTypeEnum.ZIP + ), + ( + "https://github.com/onerandomusername/modmail-addons/archive/main.zip planet", + "planet", SourceTypeEnum.ZIP + ), + ( + "https://gitlab.com/onerandomusername/modmail-addons/-/archive/main/modmail-addons-main.zip earth", # noqa: E501 + "earth", SourceTypeEnum.ZIP + ), + ( + "https://example.com/bleeeep.zip myanmar", + "myanmar", SourceTypeEnum.ZIP + ), + ( + "http://github.com/discord-modmail/addons/archive/bast.zip thebot", + "thebot", SourceTypeEnum.ZIP + ), + ( + "rtfd.io/plugs.zip documentation", + "documentation", SourceTypeEnum.ZIP + ), + ( + "pages.dev/hiy.zip black", + "black", SourceTypeEnum.ZIP + ), + ( + "@local earth", + "earth", SourceTypeEnum.LOCAL + ), + pytest.param( + "the world exists.", + None, None, + marks=pytest.mark.raises(exception=BadArgument) + ), + ], +) +@pytest.mark.dependency(depends_on=["repo_regex", "zip_regex"]) +@pytest.mark.asyncio +# fmt: on +async def test_plugin_with_source_converter(entry: str, name: str, source_type: SourceTypeEnum) -> None: + """Test the Plugin converter works, and successfully converts a plugin with its source.""" + plugin, source = await SourceAndPluginConverter().convert(None, entry) + assert plugin.name == name + assert source.source_type == source_type diff --git a/tests/modmail/addons/test_models.py b/tests/modmail/addons/test_models.py new file mode 100644 index 00000000..e1814cac --- /dev/null +++ b/tests/modmail/addons/test_models.py @@ -0,0 +1,80 @@ +from __future__ import annotations + +from typing import Optional + +import pytest + +from modmail.addons.models import Addon, AddonSource, Plugin, SourceTypeEnum + + +def test_addon_model() -> None: + """All addons will be of a specific type, so we should not be able to create a generic addon.""" + with pytest.raises(NotImplementedError, match="Inheriting classes need to implement their own init"): + Addon() + + +@pytest.mark.parametrize( + "zip_url, source_type", + [ + ("github.com/bast0006.zip", SourceTypeEnum.ZIP), + ("gitlab.com/onerandomusername.zip", SourceTypeEnum.REPO), + (None, SourceTypeEnum.LOCAL), + ], +) +def test_addonsource_init(zip_url: str, source_type: SourceTypeEnum) -> None: + """Test the AddonSource init sets class vars appropiately.""" + addonsrc = AddonSource(zip_url, source_type) + assert addonsrc.zip_url == zip_url + assert addonsrc.source_type == source_type + + +@pytest.mark.parametrize( + "user, repo, reflike, githost", + [ + ("onerandomusername", "addons", None, "github"), + ("onerandomusername", "addons", "master", "github"), + ("onerandomusername", "repo", "v1.0.2", "gitlab"), + ("onerandomusername", "repo", "master", "github"), + ("onerandomusername", "repo", "main", "gitlab"), + ("onerandomusername", "repo", None, "github"), + ("onerandomusername", "repo", None, "gitlab"), + ("psf", "black", "21.70b", "github"), + ], +) +def test_addonsource_from_repo(user: str, repo: str, reflike: Optional[str], githost: str) -> None: + """Test an addon source is properly made from repository information.""" + src = AddonSource.from_repo(user, repo, reflike, githost) + assert src.user == user + assert src.repo == repo + assert src.reflike == reflike + assert src.githost == githost + assert src.source_type == SourceTypeEnum.REPO + + +@pytest.mark.parametrize( + "url", + [ + ("github.com/onerandomusername/modmail-addons/archive/main.zip"), + ("gitlab.com/onerandomusername/modmail-addons/-/archive/main/modmail-addons-main.zip"), + ("example.com/bleeeep.zip"), + ("github.com/discord-modmail/addons/archive/bast.zip"), + ("rtfd.io/plugs.zip"), + ("pages.dev/hiy.zip"), + ], +) +def test_addonsource_from_zip(url: str) -> None: + """Test an addon source is properly made from a zip url.""" + src = AddonSource.from_zip(url) + assert src.zip_url == url + assert src.source_type == SourceTypeEnum.ZIP + + +class TestPlugin: + """Test the Plugin class creation.""" + + @pytest.mark.parametrize("folder", [("earth"), ("mona-lisa")]) + def test_plugin_init(self, folder: str) -> None: + """Create a plugin model, and ensure it has the right properties.""" + plugin = Plugin(folder) + assert isinstance(plugin, Plugin) + assert plugin.folder_name == folder diff --git a/tests/modmail/addons/test_plugins.py b/tests/modmail/addons/test_plugins.py new file mode 100644 index 00000000..e24b412d --- /dev/null +++ b/tests/modmail/addons/test_plugins.py @@ -0,0 +1,69 @@ +from __future__ import annotations + +import unittest.mock +from copy import copy + +import pytest + +from modmail.addons.models import Plugin +from modmail.addons.plugins import PLUGINS as GLOBAL_PLUGINS +from modmail.addons.plugins import find_plugins, parse_plugin_toml_from_string +from tests import mocks + + +pytestmark = pytest.mark.usefixtures("reroute_plugins") + + +VALID_PLUGIN_TOML = """ +[[plugins]] +name = "Planet" +folder = "planet" +description = "Planet. Tells you which planet you are probably on." +min_bot_version = "v0.2.0" +""" + + +@pytest.mark.parametrize( + "toml, name, folder, description, min_bot_version", + [ + ( + VALID_PLUGIN_TOML, + "Planet", + "planet", + "Planet. Tells you which planet you are probably on.", + "v0.2.0", + ) + ], +) +def test_parse_plugin_toml_from_string( + toml: str, name: str, folder: str, description: str, min_bot_version: str +) -> None: + """Make sure that a plugin toml file is correctly parsed.""" + plugs = parse_plugin_toml_from_string(toml) + plug = plugs[0] + print(plug.__repr__()) + assert isinstance(plug, Plugin) + assert plug.name == name + assert plug.folder_name == folder + assert plug.description == description + assert plug.min_bot_version == min_bot_version + + +class TestPluginConversion: + """Test the extension converter converts extensions properly.""" + + @classmethod + def setup_class(cls): + """Set the class plugins var to the scanned plugins.""" + cls.plugins = set(find_plugins()) + + @pytest.mark.asyncio + async def test_conversion_success(self) -> None: + """Test all plugins in the list are properly converted.""" + with unittest.mock.patch("modmail.addons.plugins.PLUGINS", self.plugins): + + for plugin in self.plugins: + print(f"Current plugin: {plugin}") + converted = await Plugin.convert(mocks.MockContext(), plugin) + + assert plugin.name == converted.name diff --git a/tests/modmail/addons/test_utils.py b/tests/modmail/addons/test_utils.py new file mode 100644 index 00000000..2c31bbbe --- /dev/null +++ b/tests/modmail/addons/test_utils.py @@ -0,0 +1,21 @@ +from __future__ import annotations + +import zipfile + +import pytest +from aiohttp import ClientSession + +from modmail.addons.models import AddonSource, SourceTypeEnum +from modmail.addons.utils import download_zip_from_source + + +@pytest.mark.xfail +@pytest.mark.parametrize( + "source", [AddonSource.from_zip("https://github.com/discord-modmail/modmail/archive/main.zip")] +) +@pytest.mark.asyncio +async def test_download_zip_from_source(source: AddonSource, http_session: ClientSession): + """Test that a zip can be successfully downloaded and everything is safe inside.""" + file = await download_zip_from_source(source, http_session) + assert isinstance(file, zipfile.ZipFile) + assert file.testzip() is None diff --git a/tests/modmail/conftest.py b/tests/modmail/conftest.py index 650621c9..6a662a20 100644 --- a/tests/modmail/conftest.py +++ b/tests/modmail/conftest.py @@ -28,6 +28,22 @@ def _get_env(): return pathlib.Path(__file__).parent / "test.env" +@pytest.fixture(scope="package") +def reroute_plugins(): + """Reroute the plugin directory.""" + import modmail.plugins + from tests.modmail import plugins + + modmail.plugins.__file__ = plugins.__file__ + + import modmail.addons.plugins + + modmail.addons.plugins.BASE_PLUGIN_PATH = pathlib.Path(plugins.__file__).parent.resolve() + + modmail.addons.plugins.LOCAL_PLUGIN_TOML = modmail.addons.plugins.BASE_PLUGIN_PATH / "test1.toml" + yield + + def _get_env_vars() -> dict: result = {} for key, value in os.environ.items(): diff --git a/tests/modmail/extensions/test_plugin_manager.py b/tests/modmail/extensions/test_plugin_manager.py deleted file mode 100644 index eddae16d..00000000 --- a/tests/modmail/extensions/test_plugin_manager.py +++ /dev/null @@ -1,32 +0,0 @@ -from copy import copy - -import pytest - -from modmail.extensions.plugin_manager import PluginConverter -from modmail.utils.plugins import PLUGINS as GLOBAL_PLUGINS -from modmail.utils.plugins import walk_plugins - - -# load EXTENSIONS -PLUGINS = copy(GLOBAL_PLUGINS) -PLUGINS.update(walk_plugins()) - - -class TestPluginConverter: - """Test the extension converter converts extensions properly.""" - - all_plugins = {x: y for x, y in walk_plugins()} - - @pytest.fixture(scope="class", name="converter") - def converter(self) -> PluginConverter: - """Fixture method for a PluginConverter object.""" - return PluginConverter() - - @pytest.mark.asyncio - @pytest.mark.parametrize("plugin", [e.rsplit(".", 1)[-1] for e in all_plugins.keys()]) - async def test_conversion_success(self, plugin: str, converter: PluginConverter) -> None: - """Test all plugins in the list are properly converted.""" - converter.source_list = self.all_plugins - converted = await converter.convert(None, plugin) - - assert converted.endswith(plugin) diff --git a/tests/modmail/plugins/README.md b/tests/modmail/plugins/README.md new file mode 100644 index 00000000..2fba42e3 --- /dev/null +++ b/tests/modmail/plugins/README.md @@ -0,0 +1,4 @@ +These are where some plugins made for testing belong. + +The tests/modmail/conftest.py file overwrites modmail.plugins.__file__ +to redirect to the `[__init__.py](./__init__.py) located here.. diff --git a/tests/modmail/plugins/__init__.py b/tests/modmail/plugins/__init__.py new file mode 100644 index 00000000..5895363c --- /dev/null +++ b/tests/modmail/plugins/__init__.py @@ -0,0 +1 @@ +from modmail.plugins import * # noqa: F401 F403 diff --git a/tests/modmail/plugins/test1.toml b/tests/modmail/plugins/test1.toml new file mode 100644 index 00000000..ad7fc2cf --- /dev/null +++ b/tests/modmail/plugins/test1.toml @@ -0,0 +1,3 @@ +[[plugins]] +name = 'working plug' +directory = 'working_plugin' diff --git a/tests/modmail/plugins/working_plugin/working_plug.py b/tests/modmail/plugins/working_plugin/working_plug.py new file mode 100644 index 00000000..df5ca441 --- /dev/null +++ b/tests/modmail/plugins/working_plugin/working_plug.py @@ -0,0 +1,21 @@ +from __future__ import annotations + +import logging + +from modmail.addons.helpers import PluginCog +from modmail.bot import ModmailBot +from modmail.log import ModmailLogger + + +logger: ModmailLogger = logging.getLogger(__name__) + + +class WorkingPlugin(PluginCog): + """Demonstration plugin for testing.""" + + pass + + +def setup(bot: ModmailBot) -> None: + """Add the gateway logger to the bot.""" + bot.add_cog(WorkingPlugin(bot)) diff --git a/tox.ini b/tox.ini index 56e5d629..848b19a3 100644 --- a/tox.ini +++ b/tox.ini @@ -25,9 +25,9 @@ ignore= # Whitespace Before E203 per-file-ignores= - tests/*:,ANN,S101,F401 + tests/*:ANN,S101,F401 docs.py:B008 - modmail/utils/threads/__init__.py:F401 + modmail/**/__init__.py:F401 [isort] profile=black