1- using GeminiClient ;
2- using GeminiClientConsole ;
1+ // GeminiClientConsole/AppRunner.cs (Console-specific UI component)
2+ using GeminiClient ;
33using Microsoft . Extensions . Logging ;
44using System . Diagnostics ;
55using System . Text ;
@@ -13,6 +13,7 @@ public class AppRunner
1313 private readonly ConsoleModelSelector _modelSelector ;
1414 private string ? _selectedModel ;
1515 private readonly List < ResponseMetrics > _sessionMetrics = new ( ) ;
16+ private bool _streamingEnabled = true ; // Default to streaming
1617
1718 public AppRunner (
1819 IGeminiApiClient geminiClient ,
@@ -33,7 +34,7 @@ public async Task RunAsync()
3334
3435 while ( true )
3536 {
36- Console . WriteLine ( "\n 📝 Enter prompt ('exit' to quit, 'model' to change model, 'stats' for session stats):" ) ;
37+ Console . WriteLine ( $ "\n 📝 Enter prompt ('exit' to quit, 'model' to change model, 'stats' for session stats, 'stream' to toggle streaming: { ( _streamingEnabled ? "ON" : "OFF" ) } ):") ;
3738 Console . Write ( "> " ) ;
3839 string ? input = Console . ReadLine ( ) ;
3940
@@ -56,6 +57,15 @@ public async Task RunAsync()
5657 continue ;
5758 }
5859
60+ if ( string . Equals ( input , "stream" , StringComparison . OrdinalIgnoreCase ) )
61+ {
62+ _streamingEnabled = ! _streamingEnabled ;
63+ Console . ForegroundColor = ConsoleColor . Green ;
64+ Console . WriteLine ( $ "✓ Streaming { ( _streamingEnabled ? "enabled" : "disabled" ) } ") ;
65+ Console . ResetColor ( ) ;
66+ continue ;
67+ }
68+
5969 if ( string . IsNullOrWhiteSpace ( input ) )
6070 {
6171 Console . ForegroundColor = ConsoleColor . Yellow ;
@@ -64,12 +74,102 @@ public async Task RunAsync()
6474 continue ;
6575 }
6676
67- await ProcessPromptAsync ( input ) ;
77+ if ( _streamingEnabled )
78+ {
79+ await ProcessPromptStreamingAsync ( input ) ;
80+ }
81+ else
82+ {
83+ await ProcessPromptAsync ( input ) ;
84+ }
6885 }
6986
7087 _logger . LogInformation ( "Application finished" ) ;
7188 }
7289
90+ private async Task ProcessPromptStreamingAsync ( string prompt )
91+ {
92+ try
93+ {
94+ // Display response header
95+ Console . ForegroundColor = ConsoleColor . Cyan ;
96+ Console . WriteLine ( $ "\n ╭─── Streaming Response ───╮") ;
97+ Console . ResetColor ( ) ;
98+
99+ var totalTimer = Stopwatch . StartNew ( ) ;
100+ var responseBuilder = new StringBuilder ( ) ;
101+ var firstChunkReceived = false ;
102+
103+ await foreach ( string chunk in _geminiClient . StreamGenerateContentAsync ( _selectedModel ! , prompt ) )
104+ {
105+ if ( ! firstChunkReceived )
106+ {
107+ firstChunkReceived = true ;
108+ // Display first chunk timing
109+ Console . ForegroundColor = ConsoleColor . DarkGreen ;
110+ Console . WriteLine ( $ "⚡ First response: { totalTimer . ElapsedMilliseconds } ms") ;
111+ Console . ResetColor ( ) ;
112+ Console . WriteLine ( ) ; // Add some spacing
113+ }
114+
115+ // Write chunk immediately to console
116+ Console . Write ( chunk ) ;
117+ responseBuilder . Append ( chunk ) ;
118+ }
119+
120+ totalTimer . Stop ( ) ;
121+
122+ // Add some spacing after the response
123+ Console . WriteLine ( ) ;
124+ Console . ForegroundColor = ConsoleColor . Cyan ;
125+ Console . WriteLine ( "╰────────────────╯" ) ;
126+ Console . ResetColor ( ) ;
127+
128+ // Calculate and store metrics
129+ var completeResponse = responseBuilder . ToString ( ) ;
130+ var metrics = new ResponseMetrics
131+ {
132+ Model = _selectedModel ! ,
133+ PromptLength = prompt . Length ,
134+ ResponseLength = completeResponse . Length ,
135+ ElapsedTime = totalTimer . Elapsed ,
136+ Timestamp = DateTime . Now
137+ } ;
138+
139+ _sessionMetrics . Add ( metrics ) ;
140+
141+ // Display performance metrics for streaming
142+ DisplayStreamingMetrics ( metrics , completeResponse ) ;
143+ }
144+ catch ( HttpRequestException httpEx ) when ( httpEx . Message . Contains ( "500" ) )
145+ {
146+ Console . ForegroundColor = ConsoleColor . Red ;
147+ Console . WriteLine ( $ "\n ❌ Server Error: The model '{ _selectedModel } ' is experiencing issues.") ;
148+ Console . ForegroundColor = ConsoleColor . Yellow ;
149+ Console . WriteLine ( $ "💡 Tip: Try switching to a different model using the 'model' command.") ;
150+ Console . WriteLine ( $ " Recommended stable models: gemini-2.5-flash, gemini-2.0-flash") ;
151+ Console . ResetColor ( ) ;
152+
153+ _logger . LogError ( httpEx , "Server error from Gemini API" ) ;
154+ }
155+ catch ( HttpRequestException httpEx )
156+ {
157+ Console . ForegroundColor = ConsoleColor . Red ;
158+ Console . WriteLine ( $ "\n ❌ Network Error: { httpEx . Message } ") ;
159+ Console . ResetColor ( ) ;
160+
161+ _logger . LogError ( httpEx , "HTTP error during content generation" ) ;
162+ }
163+ catch ( Exception ex )
164+ {
165+ Console . ForegroundColor = ConsoleColor . Red ;
166+ Console . WriteLine ( $ "\n ❌ Unexpected Error: { ex . Message } ") ;
167+ Console . ResetColor ( ) ;
168+
169+ _logger . LogError ( ex , "Error during content generation" ) ;
170+ }
171+ }
172+
73173 private async Task ProcessPromptAsync ( string prompt )
74174 {
75175 Task ? animationTask = null ;
@@ -199,6 +299,32 @@ private void DisplayResponse(string response, ResponseMetrics metrics)
199299 DisplayMetrics ( metrics , wordCount , tokensPerSecond ) ;
200300 }
201301
302+ private void DisplayStreamingMetrics ( ResponseMetrics metrics , string response )
303+ {
304+ int wordCount = response . Split ( ' ' , StringSplitOptions . RemoveEmptyEntries ) . Length ;
305+ double tokensPerSecond = EstimateTokens ( response ) / Math . Max ( metrics . ElapsedTime . TotalSeconds , 0.001 ) ;
306+
307+ Console . ForegroundColor = ConsoleColor . DarkGray ;
308+ Console . WriteLine ( $ "📊 Streaming Performance Metrics:") ;
309+
310+ var speedBar = CreateSpeedBar ( tokensPerSecond ) ;
311+
312+ Console . WriteLine ( $ " └─ Total Time: { FormatElapsedTime ( metrics . ElapsedTime ) } ") ;
313+ Console . WriteLine ( $ " └─ Words: { wordCount } | Characters: { metrics . ResponseLength : N0} ") ;
314+ Console . WriteLine ( $ " └─ Est. Tokens: ~{ EstimateTokens ( metrics . ResponseLength ) } | Speed: { tokensPerSecond : F1} tokens/s { speedBar } ") ;
315+ Console . WriteLine ( $ " └─ Mode: 🌊 Streaming (real-time)") ;
316+
317+ // Compare with session average if we have enough data
318+ if ( _sessionMetrics . Count > 1 )
319+ {
320+ var avgTime = TimeSpan . FromMilliseconds ( _sessionMetrics . Average ( m => m . ElapsedTime . TotalMilliseconds ) ) ;
321+ var comparison = metrics . ElapsedTime < avgTime ? "🟢 faster" : "🔴 slower" ;
322+ Console . WriteLine ( $ " └─ Session Avg: { FormatElapsedTime ( avgTime ) } ({ comparison } )") ;
323+ }
324+
325+ Console . ResetColor ( ) ;
326+ }
327+
202328 private void DisplayMetrics ( ResponseMetrics metrics , int wordCount , double tokensPerSecond )
203329 {
204330 Console . ForegroundColor = ConsoleColor . DarkGray ;
@@ -265,6 +391,7 @@ private void DisplaySessionSummary()
265391 Console . WriteLine ( $ " 🐌 Slowest: { FormatElapsedTime ( maxResponseTime ) } ") ;
266392 Console . WriteLine ( $ " 📝 Total Output: { totalChars : N0} characters") ;
267393 Console . WriteLine ( $ " ⏰ Session Duration: { FormatElapsedTime ( sessionDuration ) } ") ;
394+ Console . WriteLine ( $ " 🌊 Streaming: { ( _streamingEnabled ? "Enabled" : "Disabled" ) } ") ;
268395
269396 // Show model usage breakdown
270397 var modelUsage = _sessionMetrics . GroupBy ( m => m . Model )
@@ -317,4 +444,4 @@ private class ResponseMetrics
317444 public TimeSpan ElapsedTime { get ; set ; }
318445 public DateTime Timestamp { get ; set ; }
319446 }
320- }
447+ }
0 commit comments