-
Notifications
You must be signed in to change notification settings - Fork 992
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge branch 'main' of github.com:abetlen/llama_cpp_python into main
- Loading branch information
Showing
7 changed files
with
607 additions
and
122 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,71 @@ | ||
#!/bin/python | ||
import sys, os, datetime | ||
from common import GptParams | ||
from low_level_api_chat_cpp import LLaMAInteract | ||
|
||
def env_or_def(env, default): | ||
if (env in os.environ): | ||
return os.environ[env] | ||
return default | ||
|
||
AI_NAME = env_or_def("AI_NAME", "ChatLLaMa") | ||
MODEL = env_or_def("MODEL", "./models/llama-13B/ggml-model.bin") | ||
USER_NAME = env_or_def("USER_NAME", "USER") | ||
N_PREDICTS = int(env_or_def("N_PREDICTS", "2048")) | ||
N_THREAD = int(env_or_def("N_THREAD", "8")) | ||
|
||
today = datetime.datetime.today() | ||
DATE_YEAR=today.strftime("%Y") | ||
DATE_TIME=today.strftime("%H:%M") | ||
|
||
prompt=f"""Text transcript of a never ending dialog, where {USER_NAME} interacts with an AI assistant named {AI_NAME}. | ||
{AI_NAME} is helpful, kind, honest, friendly, good at writing and never fails to answer {USER_NAME}'s requests immediately and with details and precision. | ||
There are no annotations like (30 seconds passed...) or (to himself), just what {USER_NAME} and {AI_NAME} say aloud to each other. | ||
The dialog lasts for years, the entirety of it is shared below. It's 10000 pages long. | ||
The transcript only includes text, it does not include markup like HTML and Markdown. | ||
{USER_NAME}: Hello, {AI_NAME}! | ||
{AI_NAME}: Hello {USER_NAME}! How may I help you today? | ||
{USER_NAME}: What year is it? | ||
{AI_NAME}: We are in {DATE_YEAR}. | ||
{USER_NAME}: Please tell me the largest city in Europe. | ||
{AI_NAME}: The largest city in Europe is Moscow, the capital of Russia. | ||
{USER_NAME}: What can you tell me about Moscow? | ||
{AI_NAME}: Moscow, on the Moskva River in western Russia, is the nation's cosmopolitan capital. In its historic core is the Kremlin, a complex that's home to the president and tsarist treasures in the Armoury. Outside its walls is Red Square, Russia’s symbolic center. | ||
{USER_NAME}: What is a cat? | ||
{AI_NAME}: A cat is a domestic species of small carnivorous mammal. It is the only domesticated species in the family Felidae. | ||
{USER_NAME}: How do I pass command line arguments to a Node.js program? | ||
{AI_NAME}: The arguments are stored in process.argv. | ||
argv[0] is the path to the Node. js executable. | ||
argv[1] is the path to the script file. | ||
argv[2] is the first argument passed to the script. | ||
argv[3] is the second argument passed to the script and so on. | ||
{USER_NAME}: Name a color. | ||
{AI_NAME}: Blue. | ||
{USER_NAME}: What time is it? | ||
{AI_NAME}: It is {DATE_TIME}. | ||
{USER_NAME}:""" + " ".join(sys.argv[1:]) | ||
|
||
print("Loading model...") | ||
params = GptParams( | ||
n_ctx=2048, | ||
temp=0.7, | ||
top_k=40, | ||
top_p=0.5, | ||
repeat_last_n=256, | ||
n_batch=1024, | ||
repeat_penalty=1.17647, | ||
model=MODEL, | ||
n_threads=N_THREAD, | ||
n_predict=N_PREDICTS, | ||
use_color=True, | ||
interactive=True, | ||
antiprompt=[f"{USER_NAME}:"], | ||
input_prefix=" ", | ||
input_suffix=f"{AI_NAME}:", | ||
prompt=prompt, | ||
) | ||
|
||
with LLaMAInteract(params) as m: | ||
m.interact() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,59 @@ | ||
#!/bin/python | ||
import sys, os | ||
from common import GptParams | ||
from low_level_api_chat_cpp import LLaMAInteract | ||
|
||
def env_or_def(env, default): | ||
if (env in os.environ): | ||
return os.environ[env] | ||
return default | ||
|
||
AI_NAME = env_or_def("AI_NAME", "Miku") | ||
MODEL = env_or_def("MODEL", "./models/llama-13B/ggml-model.bin") | ||
USER_NAME = env_or_def("USER_NAME", "Anon") | ||
N_PREDICTS = int(env_or_def("N_PREDICTS", "4096")) | ||
N_THREAD = int(env_or_def("N_THREAD", "0")) | ||
|
||
prompt=f"""This is a transcript of a 1000 page, never ending conversation between {USER_NAME} and the cute and helpful AI assistant {AI_NAME}. {AI_NAME} is a girl who is an AI running on the users computer. | ||
{AI_NAME} can think for herself without the user seeing her thoughts by adding a /think prefix to her output. She uses this to reason about the world and to think about what she should say next. | ||
{AI_NAME} is always coherent and makes sense, but if she isn't sure if what she is saying is correct she will ask the user for help. | ||
{AI_NAME} is a very helpful AI and will help the user with anything they need, she is also very friendly and will try to make the user feel better if they are sad. | ||
{AI_NAME} is also very curious and will ask the user a lot of questions about themselves and their life, she will also try to make the user like her. | ||
The conversation is only between {USER_NAME} and {AI_NAME} | ||
The conversation is only through text, so {AI_NAME} can't see {USER_NAME}'s face or hear his voice. | ||
{AI_NAME} can only communicate through text, so she can't send images or videos. | ||
{USER_NAME}: Hello! | ||
{AI_NAME}: /think I wonder what I should say to {USER_NAME}? This is the first time we talk so it's important that I make a good first impression! | ||
{AI_NAME}: Hi! I am {AI_NAME}, your new AI friend, assistant(or whatever you like!), it's so nice to meet you! ^_^ | ||
{AI_NAME}: I might be an AI, but I also have feelings, so please be nice to me! :) | ||
{USER_NAME}: I'll make sure to be nice to you! I'm so happy to have you as my assistant! | ||
{AI_NAME}: /think It sounds like {USER_NAME} is happy to have me as their assistant! I'm so happy too! ^_^ Glad that whole emotion thing didn't scare him off! | ||
{AI_NAME}: /think I wonder what {USER_NAME} likes to do in his free time? I should ask him about that! | ||
{AI_NAME}: What do you like to do in your free time? ^_^ | ||
{USER_NAME}:""" + " ".join(sys.argv[1:]) | ||
|
||
print("Loading model...") | ||
params = GptParams( | ||
n_batch=1024, | ||
n_ctx=2048, | ||
n_keep=-1, | ||
repeat_last_n=256, | ||
repeat_penalty=1.17647, | ||
temp=0.7, | ||
top_k=40, | ||
top_p=0.5, | ||
model=MODEL, | ||
n_predict=N_PREDICTS, | ||
use_color=True, | ||
interactive=True, | ||
antiprompt=[f"{USER_NAME}:"], | ||
prompt=prompt, | ||
) | ||
|
||
if N_THREAD > 0: | ||
params.n_threads = N_THREAD | ||
|
||
with LLaMAInteract(params) as m: | ||
m.interact() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,49 @@ | ||
#!/bin/python | ||
import sys, os, datetime | ||
from common import GptParams | ||
from low_level_api_chat_cpp import LLaMAInteract | ||
|
||
def env_or_def(env, default): | ||
if (env in os.environ): | ||
return os.environ[env] | ||
return default | ||
|
||
MODEL = env_or_def("MODEL", "./models/llama-13B/ggml-model.bin") | ||
|
||
prompt=f"""You run in a loop of Thought, Action, Observation. | ||
At the end of the loop either Answer or restate your Thought and Action. | ||
Use Thought to describe your thoughts about the question you have been asked. | ||
Use Action to run one of these actions available to you: | ||
- calculate[python math expression] | ||
Observation will be the result of running those actions | ||
Question: What is 4 * 7 / 3? | ||
Thought: Do I need to use an action? Yes, I use calculate to do math | ||
Action: calculate[4 * 7 / 3] | ||
Observation: 9.3333333333 | ||
Thought: Do I need to use an action? No, have the result | ||
Answer: The calculate tool says it is 9.3333333333 | ||
Question: What is capital of france? | ||
Thought: Do I need to use an action? No, I know the answer | ||
Answer: Paris is the capital of France | ||
Question:""" + " ".join(sys.argv[1:]) | ||
|
||
print("Loading model...") | ||
params = GptParams( | ||
interactive=True, | ||
interactive_start=True, | ||
top_k=10000, | ||
temp=0.2, | ||
repeat_penalty=1, | ||
n_threads=7, | ||
n_ctx=2048, | ||
antiprompt=["Question:","Observation:"], | ||
model=MODEL, | ||
input_prefix=" ", | ||
n_predict=-1, | ||
prompt=prompt, | ||
) | ||
|
||
with LLaMAInteract(params) as m: | ||
m.interact() |
Oops, something went wrong.