From e173e958d1ccab747cecdb7139594b4865bfe396 Mon Sep 17 00:00:00 2001 From: Chris Rude Date: Fri, 26 May 2023 09:00:21 -0700 Subject: [PATCH] update version numbers to 0.1.9, config.sample.yml --- docs/config.sample.yml | 15 ++++++++++----- pyproject.toml | 2 +- src/oobabot/__init__.py | 2 +- 3 files changed, 12 insertions(+), 7 deletions(-) diff --git a/docs/config.sample.yml b/docs/config.sample.yml index 6135077a..76dacdc7 100644 --- a/docs/config.sample.yml +++ b/docs/config.sample.yml @@ -6,7 +6,7 @@ # "config.yml" from the current directory when it is run. # -version: 0.1.8 +version: 0.1.9 # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # persona @@ -78,6 +78,15 @@ discord: # default: False stream_responses: + # FEATURE PREVIEW: Adds a limit to the number of channels the bot will post unsolicited + # messages in at the same time. This is to prevent the bot from being too noisy in large + # servers. When set, only the most recent N channels the bot has been summoned in will + # have a chance of receiving an unsolicited message. The bot will still respond to + # @-mentions and wake words in any channel it can access. Set to 0 to disable this + # feature. + # default: 3 + unsolicited_channel_cap: + # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # oobabooga # . @@ -164,7 +173,6 @@ stable_diffusion: # . template: - # Path to a file containing the prompt template. # The main prompt sent to Oobabooga to generate a response from the bot AI. The AI's # reply to this prompt will be sent to discord as the bot's response. # . @@ -177,7 +185,6 @@ template: # Transcript: {MESSAGE_HISTORY} {IMAGE_COMING} prompt: - # Path to a file containing the prompt_history_line template. # Part of the AI response-generation prompt, this is used to render a single line of chat # history. A list of these, one for each past chat message, will become {MESSAGE_HISTORY} # and inserted into the main prompt @@ -187,7 +194,6 @@ template: # default: {USER_NAME} says: {USER_MESSAGE} prompt_history_line: - # Path to a file containing the prompt_image_coming template. # Part of the AI response-generation prompt, this is used to inform the AI that it is in # the process of generating an image. # . @@ -196,7 +202,6 @@ template: # default: {AI_NAME}: is currently generating an image, as requested. prompt_image_coming: - # Path to a file containing the prompt_image_keywords template. # Sent to Oobabooga, along with the user's image request, to generate image # keywords. The AI's response to this prompt will then be sent to Stable # Diffusion to generate an image. diff --git a/pyproject.toml b/pyproject.toml index a2c18e54..159d4457 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "oobabot" -version = "0.1.8" +version = "0.1.9" description = "A Discord bot which talks to Large Language Model AIs running on oobabooga's text-generation-webui" authors = ["Christopher Rude "] license = "MIT" diff --git a/src/oobabot/__init__.py b/src/oobabot/__init__.py index 6304a562..67d67ba1 100644 --- a/src/oobabot/__init__.py +++ b/src/oobabot/__init__.py @@ -4,4 +4,4 @@ """ # todo: sync this up automatically -__version__ = "0.1.8" +__version__ = "0.1.9"