Skip to content

Commit

Permalink
improve: adds better error handling (#6)
Browse files Browse the repository at this point in the history
  • Loading branch information
kevinhermawan authored Oct 27, 2024
1 parent 3b8f939 commit bcab253
Show file tree
Hide file tree
Showing 6 changed files with 312 additions and 83 deletions.
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
{
"originHash" : "9babdbe0d420e7da0e4b1fdca252d3d06b03638f979b3d4ce55ccda4d14b84d3",
"originHash" : "53903b86839844f0235d6409aeff93ce3322d067fe255f83676414c60e9c1cd7",
"pins" : [
{
"identity" : "swift-ai-model-retriever",
"kind" : "remoteSourceControl",
"location" : "https://github.com/kevinhermawan/swift-ai-model-retriever.git",
"state" : {
"branch" : "main",
"revision" : "5d22906f1bedcb53452257c784ebffa72e9ad1cb"
"revision" : "585de8246341cf0b715357ecfd57c20aea52545c"
}
},
{
Expand Down
39 changes: 34 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ let messages = [

let task = Task {
do {
let completion = try await chat.send(model: "claude-3-5-sonnet-20240620", messages: messages)
let completion = try await chat.send(model: "claude-3-5-sonnet", messages: messages)

print(completion.content.first?.text ?? "No response")
} catch {
Expand All @@ -85,7 +85,7 @@ let messages = [

let task = Task {
do {
for try await chunk in chat.stream(model: "claude-3-5-sonnet-20240620", messages: messages) {
for try await chunk in chat.stream(model: "claude-3-5-sonnet", messages: messages) {
if let text = chunk.delta?.text {
print(text, terminator: "")
}
Expand Down Expand Up @@ -116,7 +116,7 @@ let messages = [

Task {
do {
let completion = try await chat.send(model: "claude-3-5-sonnet-20240620", messages: messages)
let completion = try await chat.send(model: "claude-3-5-sonnet", messages: messages)

print(completion.content.first?.text ?? "")
} catch {
Expand Down Expand Up @@ -154,7 +154,7 @@ let options = ChatOptions(tools: [recommendBookTool])

Task {
do {
let completion = try await chat.send(model: "claude-3-5-sonnet-20240620", messages: messages, options: options)
let completion = try await chat.send(model: "claude-3-5-sonnet", messages: messages, options: options)

if let toolInput = completion.content.first(where: { $0.type == "tool_use" })?.toolInput {
print(toolInput)
Expand Down Expand Up @@ -182,7 +182,7 @@ let messages = [

let task = Task {
do {
let completion = try await chat.send(model: "claude-3-5-sonnet-20240620", messages: messages)
let completion = try await chat.send(model: "claude-3-5-sonnet", messages: messages)

print(completion.content.first?.text ?? "No response")
} catch {
Expand All @@ -193,6 +193,35 @@ let task = Task {

To learn more about prompt caching, check out the [Anthropic documentation](https://docs.anthropic.com/en/docs/build-with-claude/prompt-caching).

### Error Handling

`LLMChatAnthropic` provides structured error handling through the `LLMChatAnthropicError` enum. This enum contains three cases that represent different types of errors you might encounter:

```swift
let messages = [
ChatMessage(role: .system, content: "You are a helpful assistant."),
ChatMessage(role: .user, content: "What is the capital of Indonesia?")
]

do {
let completion = try await chat.send(model: "claude-3-5-sonnet", messages: messages)

print(completion.content.first?.text ?? "No response")
} catch let error as LLMChatAnthropicError {
switch error {
case .serverError(let message):
// Handle server-side errors (e.g., invalid API key, rate limits)
print("Server Error: \(message)")
case .networkError(let error):
// Handle network-related errors (e.g., no internet connection)
print("Network Error: \(error.localizedDescription)")
case .badServerResponse:
// Handle invalid server responses
print("Invalid response received from server")
}
}
```

## Related Packages

- [swift-ai-model-retriever](https://github.com/kevinhermawan/swift-ai-model-retriever)
Expand Down
39 changes: 34 additions & 5 deletions Sources/LLMChatAnthropic/Documentation.docc/Documentation.md
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ let messages = [

let task = Task {
do {
let completion = try await chat.send(model: "claude-3-5-sonnet-20240620", messages: messages)
let completion = try await chat.send(model: "claude-3-5-sonnet", messages: messages)

print(completion.content.first?.text ?? "No response")
} catch {
Expand All @@ -56,7 +56,7 @@ let messages = [

let task = Task {
do {
for try await chunk in chat.stream(model: "claude-3-5-sonnet-20240620", messages: messages) {
for try await chunk in chat.stream(model: "claude-3-5-sonnet", messages: messages) {
if let text = chunk.delta?.text {
print(text, terminator: "")
}
Expand Down Expand Up @@ -87,7 +87,7 @@ let messages = [

Task {
do {
let completion = try await chat.send(model: "claude-3-5-sonnet-20240620", messages: messages)
let completion = try await chat.send(model: "claude-3-5-sonnet", messages: messages)

print(completion.content.first?.text ?? "")
} catch {
Expand Down Expand Up @@ -125,7 +125,7 @@ let options = ChatOptions(tools: [recommendBookTool])

Task {
do {
let completion = try await chat.send(model: "claude-3-5-sonnet-20240620", messages: messages, options: options)
let completion = try await chat.send(model: "claude-3-5-sonnet", messages: messages, options: options)

if let toolInput = completion.content.first(where: { $0.type == "tool_use" })?.toolInput {
print(toolInput)
Expand Down Expand Up @@ -153,7 +153,7 @@ let messages = [

let task = Task {
do {
let completion = try await chat.send(model: "claude-3-5-sonnet-20240620", messages: messages)
let completion = try await chat.send(model: "claude-3-5-sonnet", messages: messages)

print(completion.content.first?.text ?? "No response")
} catch {
Expand All @@ -164,6 +164,35 @@ let task = Task {

To learn more about prompt caching, check out the [Anthropic documentation](https://docs.anthropic.com/en/docs/build-with-claude/prompt-caching).

### Error Handling

``LLMChatAnthropic`` provides structured error handling through the ``LLMChatAnthropicError`` enum. This enum contains three cases that represent different types of errors you might encounter:

```swift
let messages = [
ChatMessage(role: .system, content: "You are a helpful assistant."),
ChatMessage(role: .user, content: "What is the capital of Indonesia?")
]

do {
let completion = try await chat.send(model: "claude-3-5-sonnet", messages: messages)

print(completion.content.first?.text ?? "No response")
} catch let error as LLMChatAnthropicError {
switch error {
case .serverError(let message):
// Handle server-side errors (e.g., invalid API key, rate limits)
print("Server Error: \(message)")
case .networkError(let error):
// Handle network-related errors (e.g., no internet connection)
print("Network Error: \(error.localizedDescription)")
case .badServerResponse:
// Handle invalid server responses
print("Invalid response received from server")
}
}
```

## Related Packages

- [swift-ai-model-retriever](https://github.com/kevinhermawan/swift-ai-model-retriever)
Expand Down
Loading

0 comments on commit bcab253

Please sign in to comment.