Skip to content

Commit f7bf13e

Browse files
improve: adds better error handling (#5)
* improve: adds better error handling * simplify mock
1 parent 5b42916 commit f7bf13e

File tree

8 files changed

+226
-86
lines changed

8 files changed

+226
-86
lines changed

Playground/Playground/ViewModels/AppViewModel.swift

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77

88
import Foundation
99

10+
@MainActor
1011
@Observable
1112
final class AppViewModel {
1213
var cohereAPIKey: String

Playground/Playground/Views/ModelListView.swift

Lines changed: 47 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,12 @@ import AIModelRetriever
1111
struct ModelListView: View {
1212
private let title: String
1313
private let provider: AIProvider
14+
private let retriever = AIModelRetriever()
1415

1516
@Environment(AppViewModel.self) private var viewModel
17+
18+
@State private var isFetching: Bool = false
19+
@State private var fetchTask: Task<Void, Never>?
1620
@State private var models: [AIModel] = []
1721

1822
init(title: String, provider: AIProvider) {
@@ -21,36 +25,55 @@ struct ModelListView: View {
2125
}
2226

2327
var body: some View {
24-
List(models) { model in
25-
VStack(alignment: .leading) {
26-
Text(model.id)
27-
.font(.footnote)
28-
.foregroundStyle(.secondary)
29-
30-
Text(model.name)
28+
VStack {
29+
if models.isEmpty, isFetching {
30+
VStack(spacing: 16) {
31+
ProgressView()
32+
33+
Button("Cancel") {
34+
fetchTask?.cancel()
35+
}
36+
}
37+
} else {
38+
List(models) { model in
39+
VStack(alignment: .leading) {
40+
Text(model.id)
41+
.font(.footnote)
42+
.foregroundStyle(.secondary)
43+
44+
Text(model.name)
45+
}
46+
}
3147
}
3248
}
3349
.navigationTitle(title)
3450
.task {
35-
let retriever = AIModelRetriever()
51+
isFetching = true
3652

37-
do {
38-
switch provider {
39-
case .anthropic:
40-
models = retriever.anthropic()
41-
case .cohere:
42-
models = try await retriever.cohere(apiKey: viewModel.cohereAPIKey)
43-
case .google:
44-
models = retriever.google()
45-
case .ollama:
46-
models = try await retriever.ollama()
47-
case .openai:
48-
models = try await retriever.openAI(apiKey: viewModel.openaiAPIKey)
49-
case .groq:
50-
models = try await retriever.openAI(apiKey: viewModel.groqAPIKey, endpoint: URL(string: "https://api.groq.com/openai/v1/models"))
53+
fetchTask = Task {
54+
do {
55+
defer {
56+
self.isFetching = false
57+
self.fetchTask = nil
58+
}
59+
60+
switch provider {
61+
case .anthropic:
62+
models = retriever.anthropic()
63+
case .cohere:
64+
models = try await retriever.cohere(apiKey: viewModel.cohereAPIKey)
65+
case .google:
66+
models = retriever.google()
67+
case .ollama:
68+
models = try await retriever.ollama()
69+
case .openai:
70+
models = try await retriever.openAI(apiKey: viewModel.openaiAPIKey)
71+
case .groq:
72+
models = try await retriever.openAI(apiKey: viewModel.groqAPIKey, endpoint: URL(string: "https://api.groq.com/openai/v1/models"))
73+
}
74+
} catch {
75+
print(String(describing: error))
5176
}
52-
} catch {
53-
print(String(describing: error))
5477
}
5578
}
5679
}

README.md

Lines changed: 27 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -152,14 +152,39 @@ do {
152152
}
153153
```
154154

155-
## Donations
155+
### Error Handling
156+
157+
`AIModelRetrieverError` provides structured error handling through the `AIModelRetrieverError` enum. This enum contains three cases that represent different types of errors you might encounter:
158+
159+
```swift
160+
do {
161+
let models = try await modelRetriever.openai(apiKey: "your-api-key")
162+
} catch let error as LLMChatOpenAIError {
163+
switch error {
164+
case .serverError(let message):
165+
// Handle server-side errors (e.g., invalid API key, rate limits)
166+
print("Server Error: \(message)")
167+
case .networkError(let error):
168+
// Handle network-related errors (e.g., no internet connection)
169+
print("Network Error: \(error.localizedDescription)")
170+
case .badServerResponse:
171+
// Handle invalid server responses
172+
print("Invalid response received from server")
173+
case .cancelled:
174+
// Handle cancelled requests
175+
print("Request cancelled")
176+
}
177+
}
178+
```
179+
180+
## Support
156181

157182
If you find `AIModelRetriever` helpful and would like to support its development, consider making a donation. Your contribution helps maintain the project and develop new features.
158183

159184
- [GitHub Sponsors](https://github.com/sponsors/kevinhermawan)
160185
- [Buy Me a Coffee](https://buymeacoffee.com/kevinhermawan)
161186

162-
Your support is greatly appreciated!
187+
Your support is greatly appreciated! ❤️
163188

164189
## Contributing
165190

Sources/AIModelRetriever/AIModelRetriever.swift

Lines changed: 62 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -15,18 +15,33 @@ public struct AIModelRetriever: Sendable {
1515
/// Initializes a new instance of ``AIModelRetriever``.
1616
public init() {}
1717

18-
private func performRequest<T: Decodable>(_ request: URLRequest) async throws -> T {
19-
let (data, response) = try await URLSession.shared.data(for: request)
20-
21-
guard let httpResponse = response as? HTTPURLResponse else {
22-
throw AIModelRetrieverError.badServerResponse
23-
}
24-
25-
guard 200...299 ~= httpResponse.statusCode else {
26-
throw AIModelRetrieverError.serverError(statusCode: httpResponse.statusCode, error: String(data: data, encoding: .utf8))
18+
private func performRequest<T: Decodable, E: ProviderError>(_ request: URLRequest, errorType: E.Type) async throws -> T {
19+
do {
20+
let (data, response) = try await URLSession.shared.data(for: request)
21+
22+
if let errorResponse = try? JSONDecoder().decode(E.self, from: data) {
23+
throw AIModelRetrieverError.serverError(errorResponse.errorMessage)
24+
}
25+
26+
guard let httpResponse = response as? HTTPURLResponse, 200...299 ~= httpResponse.statusCode else {
27+
throw AIModelRetrieverError.badServerResponse
28+
}
29+
30+
let models = try JSONDecoder().decode(T.self, from: data)
31+
32+
return models
33+
} catch let error as AIModelRetrieverError {
34+
throw error
35+
} catch let error as URLError {
36+
switch error.code {
37+
case .cancelled:
38+
throw AIModelRetrieverError.cancelled
39+
default:
40+
throw AIModelRetrieverError.networkError(error)
41+
}
42+
} catch {
43+
throw AIModelRetrieverError.networkError(error)
2744
}
28-
29-
return try JSONDecoder().decode(T.self, from: data)
3045
}
3146

3247
private func createRequest(for endpoint: URL, with headers: [String: String]? = nil) -> URLRequest {
@@ -74,7 +89,7 @@ public extension AIModelRetriever {
7489
let allHeaders = ["Authorization": "Bearer \(apiKey)"]
7590

7691
let request = createRequest(for: defaultEndpoint, with: allHeaders)
77-
let response: CohereResponse = try await performRequest(request)
92+
let response: CohereResponse = try await performRequest(request, errorType: CohereError.self)
7893

7994
return response.models.map { AIModel(id: $0.name, name: $0.name) }
8095
}
@@ -86,6 +101,12 @@ public extension AIModelRetriever {
86101
private struct CohereModel: Decodable {
87102
let name: String
88103
}
104+
105+
private struct CohereError: ProviderError {
106+
let message: String
107+
108+
var errorMessage: String { message }
109+
}
89110
}
90111

91112
// MARK: - Google
@@ -122,7 +143,7 @@ public extension AIModelRetriever {
122143
guard let defaultEndpoint = URL(string: "http://localhost:11434/api/tags") else { return [] }
123144

124145
let request = createRequest(for: endpoint ?? defaultEndpoint, with: headers)
125-
let response: OllamaResponse = try await performRequest(request)
146+
let response: OllamaResponse = try await performRequest(request, errorType: OllamaError.self)
126147

127148
return response.models.map { AIModel(id: $0.model, name: $0.name) }
128149
}
@@ -135,6 +156,16 @@ public extension AIModelRetriever {
135156
let name: String
136157
let model: String
137158
}
159+
160+
private struct OllamaError: ProviderError {
161+
let error: Error
162+
163+
struct Error: Decodable {
164+
let message: String
165+
}
166+
167+
var errorMessage: String { error.message }
168+
}
138169
}
139170

140171
// MARK: - OpenAI
@@ -156,7 +187,7 @@ public extension AIModelRetriever {
156187
allHeaders["Authorization"] = "Bearer \(apiKey)"
157188

158189
let request = createRequest(for: endpoint ?? defaultEndpoint, with: allHeaders)
159-
let response: OpenAIResponse = try await performRequest(request)
190+
let response: OpenAIResponse = try await performRequest(request, errorType: OpenAIError.self)
160191

161192
return response.data.map { AIModel(id: $0.id, name: $0.id) }
162193
}
@@ -168,4 +199,21 @@ public extension AIModelRetriever {
168199
private struct OpenAIModel: Decodable {
169200
let id: String
170201
}
202+
203+
private struct OpenAIError: ProviderError {
204+
let error: Error
205+
206+
struct Error: Decodable {
207+
let message: String
208+
}
209+
210+
var errorMessage: String { error.message }
211+
}
212+
}
213+
214+
// MARK: - Supporting Types
215+
private extension AIModelRetriever {
216+
protocol ProviderError: Decodable {
217+
var errorMessage: String { get }
218+
}
171219
}

Sources/AIModelRetriever/AIModelRetrieverError.swift

Lines changed: 27 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -9,13 +9,33 @@ import Foundation
99

1010
/// An enum that represents errors that can occur during AI model retrieval.
1111
public enum AIModelRetrieverError: Error, Sendable {
12-
/// Indicates that the server response was not in the expected format.
13-
case badServerResponse
12+
/// A case that represents a server-side error response.
13+
///
14+
/// - Parameter message: The error message from the server.
15+
case serverError(String)
1416

15-
/// Indicates that the server returned an error.
17+
/// A case that represents a network-related error.
1618
///
17-
/// - Parameters:
18-
/// - statusCode: The HTTP status code returned by the server.
19-
/// - error: An optional string that contains additional error information provided by the server.
20-
case serverError(statusCode: Int, error: String?)
19+
/// - Parameter error: The underlying network error.
20+
case networkError(Error)
21+
22+
/// A case that represents an invalid server response.
23+
case badServerResponse
24+
25+
/// A case that represents a request has been canceled.
26+
case cancelled
27+
28+
/// A localized message that describes the error.
29+
public var errorDescription: String? {
30+
switch self {
31+
case .serverError(let error):
32+
return error
33+
case .networkError(let error):
34+
return error.localizedDescription
35+
case .badServerResponse:
36+
return "Invalid response received from server"
37+
case .cancelled:
38+
return "Request was cancelled"
39+
}
40+
}
2141
}

Sources/AIModelRetriever/Documentation.docc/Documentation.md

Lines changed: 14 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -103,22 +103,25 @@ do {
103103

104104
### Error Handling
105105

106-
The package uses ``AIModelRetrieverError`` to represent specific errors that may occur. You can catch and handle these errors as follows:
106+
``AIModelRetrieverError`` provides structured error handling through the ``AIModelRetrieverError`` enum. This enum contains three cases that represent different types of errors you might encounter:
107107

108108
```swift
109-
let apiKey = "your-openai-api-key"
110-
111109
do {
112-
let models = try await modelRetriever.openai(apiKey: apiKey)
113-
// Process models
114-
} catch let error as AIModelRetrieverError {
110+
let models = try await modelRetriever.openai(apiKey: "your-api-key")
111+
} catch let error as LLMChatOpenAIError {
115112
switch error {
113+
case .serverError(let message):
114+
// Handle server-side errors (e.g., invalid API key, rate limits)
115+
print("Server Error: \(message)")
116+
case .networkError(let error):
117+
// Handle network-related errors (e.g., no internet connection)
118+
print("Network Error: \(error.localizedDescription)")
116119
case .badServerResponse:
117-
print("Received an invalid response from the server")
118-
case .serverError(let statusCode, let errorMessage):
119-
print("Server error (status \(statusCode)): \(errorMessage ?? "No error message provided")")
120+
// Handle invalid server responses
121+
print("Invalid response received from server")
122+
case .cancelled:
123+
// Handle cancelled requests
124+
print("Request cancelled")
120125
}
121-
} catch {
122-
print("An unexpected error occurred: \(error)")
123126
}
124127
```

0 commit comments

Comments
 (0)