Skip to content
This repository has been archived by the owner on Apr 24, 2024. It is now read-only.

Commit

Permalink
feat: support claude-2
Browse files Browse the repository at this point in the history
  • Loading branch information
ciuzaak committed Jul 11, 2023
1 parent 49c44b0 commit 91c8c99
Showing 1 changed file with 3 additions and 16 deletions.
19 changes: 3 additions & 16 deletions utils/claude_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

class Claude:
def __init__(self):
self.model = "claude-1.3-100k"
self.model = "claude-2"
self.temperature = 0.7
self.cutoff = 50
self.client = AsyncAnthropic(api_key=claude_api)
Expand All @@ -18,19 +18,7 @@ def revert(self):
self.prompt = self.prompt[: self.prompt.rfind(HUMAN_PROMPT)]

def change_model(self, model):
valid_models = {
"claude-1",
"claude-1-100k",
"claude-instant-1",
"claude-instant-1-100k",
"claude-1.3",
"claude-1.3-100k",
"claude-1.2",
"claude-1.0",
"claude-instant-1.1",
"claude-instant-1.1-100k",
"claude-instant-1.0",
}
valid_models = {"claude-2", "claude-instant-1"}
if model in valid_models:
self.model = model
return True
Expand Down Expand Up @@ -60,11 +48,10 @@ async def send_message_stream(self, message):
self.prompt = f"{self.prompt}{HUMAN_PROMPT} {message}{AI_PROMPT}"
response = await self.client.completions.create(
prompt=self.prompt,
stop_sequences=[HUMAN_PROMPT],
max_tokens_to_sample=9216,
model=self.model,
temperature=self.temperature,
stream=True,
max_tokens_to_sample=100000,
)
answer = ""
async for data in response:
Expand Down

0 comments on commit 91c8c99

Please sign in to comment.