Skip to content

Commit f32dd35

Browse files
committed
add streaming support
1 parent 0953e7e commit f32dd35

File tree

6 files changed

+642
-354
lines changed

6 files changed

+642
-354
lines changed

GeminiClient/GeminiApiClient.cs

Lines changed: 116 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1-
// GeminiClient/GeminiApiClient.cs (Updated for trim-safe serialization)
2-
using System.Net.Http.Json;
1+
// GeminiClient/GeminiApiClient.cs (Updated for streaming support)
2+
using System.Runtime.CompilerServices;
33
using System.Text;
44
using System.Text.Json;
55
using System.Web;
@@ -25,7 +25,6 @@ public GeminiApiClient(HttpClient httpClient, IOptions<GeminiApiOptions> options
2525
{
2626
throw new ArgumentException("ApiKey is missing in GeminiApiOptions.");
2727
}
28-
2928
if (string.IsNullOrWhiteSpace(_options.BaseUrl))
3029
{
3130
throw new ArgumentException("BaseUrl is missing in GeminiApiOptions.");
@@ -38,7 +37,6 @@ public GeminiApiClient(HttpClient httpClient, IOptions<GeminiApiOptions> options
3837
ArgumentException.ThrowIfNullOrWhiteSpace(prompt);
3938

4039
string? apiKey = _options.ApiKey;
41-
4240
string path = $"/v1beta/models/{modelName}:generateContent";
4341
var uriBuilder = new UriBuilder(_httpClient.BaseAddress!)
4442
{
@@ -56,12 +54,10 @@ public GeminiApiClient(HttpClient httpClient, IOptions<GeminiApiOptions> options
5654

5755
try
5856
{
59-
// Trim-safe serialization using source-generated context
6057
var jsonString = JsonSerializer.Serialize(requestBody, GeminiJsonContext.Default.GeminiRequest);
6158
using var jsonContent = new StringContent(jsonString, Encoding.UTF8, "application/json");
6259

6360
using HttpResponseMessage response = await _httpClient.PostAsync(requestUri, jsonContent, cancellationToken);
64-
6561
if (!response.IsSuccessStatusCode)
6662
{
6763
string errorContent = await response.Content.ReadAsStringAsync(cancellationToken);
@@ -70,7 +66,6 @@ public GeminiApiClient(HttpClient httpClient, IOptions<GeminiApiOptions> options
7066
_ = response.EnsureSuccessStatusCode();
7167
}
7268

73-
// Trim-safe deserialization using source-generated context
7469
var responseJson = await response.Content.ReadAsStringAsync(cancellationToken);
7570
var geminiResponse = JsonSerializer.Deserialize(responseJson, GeminiJsonContext.Default.GeminiResponse);
7671

@@ -94,4 +89,117 @@ public GeminiApiClient(HttpClient httpClient, IOptions<GeminiApiOptions> options
9489
throw;
9590
}
9691
}
97-
}
92+
93+
public async IAsyncEnumerable<string> StreamGenerateContentAsync(
94+
string modelName,
95+
string prompt,
96+
[EnumeratorCancellation] CancellationToken cancellationToken = default)
97+
{
98+
ArgumentException.ThrowIfNullOrWhiteSpace(modelName);
99+
ArgumentException.ThrowIfNullOrWhiteSpace(prompt);
100+
101+
string? apiKey = _options.ApiKey;
102+
string path = $"/v1beta/models/{modelName}:streamGenerateContent";
103+
var uriBuilder = new UriBuilder(_httpClient.BaseAddress!)
104+
{
105+
Path = path,
106+
Query = $"key={HttpUtility.UrlEncode(apiKey)}&alt=sse" // Request SSE format
107+
};
108+
Uri requestUri = uriBuilder.Uri;
109+
110+
var requestBody = new GeminiRequest
111+
{
112+
Contents = [new Content { Parts = [new Part { Text = prompt }] }]
113+
};
114+
115+
_logger.LogInformation("Sending streaming request to Gemini API: {Uri}", requestUri);
116+
117+
// Setup the request - handle errors before yielding
118+
HttpResponseMessage response;
119+
Stream stream;
120+
StreamReader reader;
121+
122+
var jsonString = JsonSerializer.Serialize(requestBody, GeminiJsonContext.Default.GeminiRequest);
123+
using var jsonContent = new StringContent(jsonString, Encoding.UTF8, "application/json");
124+
125+
using var request = new HttpRequestMessage(HttpMethod.Post, requestUri)
126+
{
127+
Content = jsonContent
128+
};
129+
130+
// Add SSE headers
131+
request.Headers.Accept.Clear();
132+
request.Headers.Accept.Add(new System.Net.Http.Headers.MediaTypeWithQualityHeaderValue("text/event-stream"));
133+
request.Headers.CacheControl = new System.Net.Http.Headers.CacheControlHeaderValue { NoCache = true };
134+
135+
try
136+
{
137+
response = await _httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken);
138+
139+
if (!response.IsSuccessStatusCode)
140+
{
141+
string errorContent = await response.Content.ReadAsStringAsync(cancellationToken);
142+
_logger.LogError("Gemini API streaming request failed with status code {StatusCode}. Response: {ErrorContent}",
143+
response.StatusCode, errorContent);
144+
response.EnsureSuccessStatusCode();
145+
}
146+
147+
stream = await response.Content.ReadAsStreamAsync(cancellationToken);
148+
reader = new StreamReader(stream);
149+
}
150+
catch (HttpRequestException ex)
151+
{
152+
_logger.LogError(ex, "HTTP request error calling Gemini API streaming endpoint.");
153+
throw;
154+
}
155+
catch (Exception ex)
156+
{
157+
_logger.LogError(ex, "An unexpected error occurred while streaming from Gemini API.");
158+
throw;
159+
}
160+
161+
// Process SSE stream - no try-catch around yield statements
162+
using (response)
163+
using (stream)
164+
using (reader)
165+
{
166+
string? line;
167+
while ((line = await reader.ReadLineAsync(cancellationToken)) != null)
168+
{
169+
// Skip empty lines and comments
170+
if (string.IsNullOrWhiteSpace(line) || line.StartsWith(":"))
171+
continue;
172+
173+
// Parse SSE format: "data: {json}"
174+
if (line.StartsWith("data: "))
175+
{
176+
string jsonData = line.Substring(6); // Remove "data: " prefix
177+
178+
// Check for end of stream
179+
if (jsonData == "[DONE]")
180+
break;
181+
182+
// Parse JSON and extract text chunk
183+
string? textChunk = null;
184+
try
185+
{
186+
var streamResponse = JsonSerializer.Deserialize(jsonData, GeminiJsonContext.Default.GeminiResponse);
187+
textChunk = streamResponse?.Candidates?.FirstOrDefault()?.Content?.Parts?.FirstOrDefault()?.Text;
188+
}
189+
catch (JsonException ex)
190+
{
191+
_logger.LogWarning(ex, "Failed to parse SSE data: {JsonData}", jsonData);
192+
continue; // Skip this chunk and continue
193+
}
194+
195+
if (!string.IsNullOrEmpty(textChunk))
196+
{
197+
yield return textChunk;
198+
}
199+
}
200+
}
201+
}
202+
203+
_logger.LogInformation("Successfully completed streaming from Gemini API.");
204+
}
205+
}

GeminiClient/IGeminiApiClient.cs

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,4 +13,15 @@ public interface IGeminiApiClient
1313
/// <exception cref="HttpRequestException">Thrown if the API request fails.</exception>
1414
/// <exception cref="ArgumentException">Thrown if required configuration is missing.</exception>
1515
Task<string?> GenerateContentAsync(string modelName, string prompt, CancellationToken cancellationToken = default);
16+
17+
/// <summary>
18+
/// Generates content using streaming, yielding text chunks as they arrive.
19+
/// </summary>
20+
/// <param name="modelName">The name of the model (e.g., "gemini-2.0-flash").</param>
21+
/// <param name="prompt">The text prompt for content generation.</param>
22+
/// <param name="cancellationToken">A token to cancel the asynchronous operation.</param>
23+
/// <returns>An async enumerable of text chunks as they're generated.</returns>
24+
/// <exception cref="HttpRequestException">Thrown if the API request fails.</exception>
25+
/// <exception cref="ArgumentException">Thrown if required configuration is missing.</exception>
26+
IAsyncEnumerable<string> StreamGenerateContentAsync(string modelName, string prompt, CancellationToken cancellationToken = default);
1627
}

GeminiClientConsole/AppRunner.cs

Lines changed: 132 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1-
using GeminiClient;
2-
using GeminiClientConsole;
1+
// GeminiClientConsole/AppRunner.cs (Console-specific UI component)
2+
using GeminiClient;
33
using Microsoft.Extensions.Logging;
44
using System.Diagnostics;
55
using System.Text;
@@ -13,6 +13,7 @@ public class AppRunner
1313
private readonly ConsoleModelSelector _modelSelector;
1414
private string? _selectedModel;
1515
private readonly List<ResponseMetrics> _sessionMetrics = new();
16+
private bool _streamingEnabled = true; // Default to streaming
1617

1718
public AppRunner(
1819
IGeminiApiClient geminiClient,
@@ -33,7 +34,7 @@ public async Task RunAsync()
3334

3435
while (true)
3536
{
36-
Console.WriteLine("\n📝 Enter prompt ('exit' to quit, 'model' to change model, 'stats' for session stats):");
37+
Console.WriteLine($"\n📝 Enter prompt ('exit' to quit, 'model' to change model, 'stats' for session stats, 'stream' to toggle streaming: {(_streamingEnabled ? "ON" : "OFF")}):");
3738
Console.Write("> ");
3839
string? input = Console.ReadLine();
3940

@@ -56,6 +57,15 @@ public async Task RunAsync()
5657
continue;
5758
}
5859

60+
if (string.Equals(input, "stream", StringComparison.OrdinalIgnoreCase))
61+
{
62+
_streamingEnabled = !_streamingEnabled;
63+
Console.ForegroundColor = ConsoleColor.Green;
64+
Console.WriteLine($"✓ Streaming {(_streamingEnabled ? "enabled" : "disabled")}");
65+
Console.ResetColor();
66+
continue;
67+
}
68+
5969
if (string.IsNullOrWhiteSpace(input))
6070
{
6171
Console.ForegroundColor = ConsoleColor.Yellow;
@@ -64,12 +74,102 @@ public async Task RunAsync()
6474
continue;
6575
}
6676

67-
await ProcessPromptAsync(input);
77+
if (_streamingEnabled)
78+
{
79+
await ProcessPromptStreamingAsync(input);
80+
}
81+
else
82+
{
83+
await ProcessPromptAsync(input);
84+
}
6885
}
6986

7087
_logger.LogInformation("Application finished");
7188
}
7289

90+
private async Task ProcessPromptStreamingAsync(string prompt)
91+
{
92+
try
93+
{
94+
// Display response header
95+
Console.ForegroundColor = ConsoleColor.Cyan;
96+
Console.WriteLine($"\n╭─── Streaming Response ───╮");
97+
Console.ResetColor();
98+
99+
var totalTimer = Stopwatch.StartNew();
100+
var responseBuilder = new StringBuilder();
101+
var firstChunkReceived = false;
102+
103+
await foreach (string chunk in _geminiClient.StreamGenerateContentAsync(_selectedModel!, prompt))
104+
{
105+
if (!firstChunkReceived)
106+
{
107+
firstChunkReceived = true;
108+
// Display first chunk timing
109+
Console.ForegroundColor = ConsoleColor.DarkGreen;
110+
Console.WriteLine($"⚡ First response: {totalTimer.ElapsedMilliseconds}ms");
111+
Console.ResetColor();
112+
Console.WriteLine(); // Add some spacing
113+
}
114+
115+
// Write chunk immediately to console
116+
Console.Write(chunk);
117+
responseBuilder.Append(chunk);
118+
}
119+
120+
totalTimer.Stop();
121+
122+
// Add some spacing after the response
123+
Console.WriteLine();
124+
Console.ForegroundColor = ConsoleColor.Cyan;
125+
Console.WriteLine("╰────────────────╯");
126+
Console.ResetColor();
127+
128+
// Calculate and store metrics
129+
var completeResponse = responseBuilder.ToString();
130+
var metrics = new ResponseMetrics
131+
{
132+
Model = _selectedModel!,
133+
PromptLength = prompt.Length,
134+
ResponseLength = completeResponse.Length,
135+
ElapsedTime = totalTimer.Elapsed,
136+
Timestamp = DateTime.Now
137+
};
138+
139+
_sessionMetrics.Add(metrics);
140+
141+
// Display performance metrics for streaming
142+
DisplayStreamingMetrics(metrics, completeResponse);
143+
}
144+
catch (HttpRequestException httpEx) when (httpEx.Message.Contains("500"))
145+
{
146+
Console.ForegroundColor = ConsoleColor.Red;
147+
Console.WriteLine($"\n❌ Server Error: The model '{_selectedModel}' is experiencing issues.");
148+
Console.ForegroundColor = ConsoleColor.Yellow;
149+
Console.WriteLine($"💡 Tip: Try switching to a different model using the 'model' command.");
150+
Console.WriteLine($" Recommended stable models: gemini-2.5-flash, gemini-2.0-flash");
151+
Console.ResetColor();
152+
153+
_logger.LogError(httpEx, "Server error from Gemini API");
154+
}
155+
catch (HttpRequestException httpEx)
156+
{
157+
Console.ForegroundColor = ConsoleColor.Red;
158+
Console.WriteLine($"\n❌ Network Error: {httpEx.Message}");
159+
Console.ResetColor();
160+
161+
_logger.LogError(httpEx, "HTTP error during content generation");
162+
}
163+
catch (Exception ex)
164+
{
165+
Console.ForegroundColor = ConsoleColor.Red;
166+
Console.WriteLine($"\n❌ Unexpected Error: {ex.Message}");
167+
Console.ResetColor();
168+
169+
_logger.LogError(ex, "Error during content generation");
170+
}
171+
}
172+
73173
private async Task ProcessPromptAsync(string prompt)
74174
{
75175
Task? animationTask = null;
@@ -199,6 +299,32 @@ private void DisplayResponse(string response, ResponseMetrics metrics)
199299
DisplayMetrics(metrics, wordCount, tokensPerSecond);
200300
}
201301

302+
private void DisplayStreamingMetrics(ResponseMetrics metrics, string response)
303+
{
304+
int wordCount = response.Split(' ', StringSplitOptions.RemoveEmptyEntries).Length;
305+
double tokensPerSecond = EstimateTokens(response) / Math.Max(metrics.ElapsedTime.TotalSeconds, 0.001);
306+
307+
Console.ForegroundColor = ConsoleColor.DarkGray;
308+
Console.WriteLine($"📊 Streaming Performance Metrics:");
309+
310+
var speedBar = CreateSpeedBar(tokensPerSecond);
311+
312+
Console.WriteLine($" └─ Total Time: {FormatElapsedTime(metrics.ElapsedTime)}");
313+
Console.WriteLine($" └─ Words: {wordCount} | Characters: {metrics.ResponseLength:N0}");
314+
Console.WriteLine($" └─ Est. Tokens: ~{EstimateTokens(metrics.ResponseLength)} | Speed: {tokensPerSecond:F1} tokens/s {speedBar}");
315+
Console.WriteLine($" └─ Mode: 🌊 Streaming (real-time)");
316+
317+
// Compare with session average if we have enough data
318+
if (_sessionMetrics.Count > 1)
319+
{
320+
var avgTime = TimeSpan.FromMilliseconds(_sessionMetrics.Average(m => m.ElapsedTime.TotalMilliseconds));
321+
var comparison = metrics.ElapsedTime < avgTime ? "🟢 faster" : "🔴 slower";
322+
Console.WriteLine($" └─ Session Avg: {FormatElapsedTime(avgTime)} ({comparison})");
323+
}
324+
325+
Console.ResetColor();
326+
}
327+
202328
private void DisplayMetrics(ResponseMetrics metrics, int wordCount, double tokensPerSecond)
203329
{
204330
Console.ForegroundColor = ConsoleColor.DarkGray;
@@ -265,6 +391,7 @@ private void DisplaySessionSummary()
265391
Console.WriteLine($" 🐌 Slowest: {FormatElapsedTime(maxResponseTime)}");
266392
Console.WriteLine($" 📝 Total Output: {totalChars:N0} characters");
267393
Console.WriteLine($" ⏰ Session Duration: {FormatElapsedTime(sessionDuration)}");
394+
Console.WriteLine($" 🌊 Streaming: {(_streamingEnabled ? "Enabled" : "Disabled")}");
268395

269396
// Show model usage breakdown
270397
var modelUsage = _sessionMetrics.GroupBy(m => m.Model)
@@ -317,4 +444,4 @@ private class ResponseMetrics
317444
public TimeSpan ElapsedTime { get; set; }
318445
public DateTime Timestamp { get; set; }
319446
}
320-
}
447+
}

0 commit comments

Comments
 (0)