@@ -64,44 +64,49 @@ def _normalize_hf_model(model: str) -> str:
6464 return model
6565
6666
67- async def _lookup_and_format_providers (model : str ) -> str | None :
68- """Look up inference providers for a model and return a formatted message.
67+ def _resolve_alias_display (model : str ) -> tuple [str , str ] | None :
68+ """Resolve alias to full model string for display, preserving suffix overrides."""
69+ from fast_agent .llm .model_factory import ModelFactory
6970
70- Returns None if the model is not a HuggingFace model (no '/').
71- """
72- import random
71+ if not model :
72+ return None
7373
74- # Extract the HF model ID from various formats
75- model_id = model
74+ alias_key = model
75+ alias_suffix : str | None = None
76+ if ":" in model :
77+ alias_key , alias_suffix = model .rsplit (":" , 1 )
7678
77- # Strip hf. prefix if present
78- if model_id . startswith ( "hf." ) :
79- model_id = model_id [ 3 :]
79+ alias_target = ModelFactory . MODEL_ALIASES . get ( alias_key )
80+ if not alias_target :
81+ return None
8082
81- # Strip :provider suffix if present
82- if ":" in model_id :
83- model_id = model_id .rsplit (":" , 1 )[0 ]
83+ resolved = alias_target
84+ if alias_suffix :
85+ if ":" in resolved :
86+ resolved = resolved .rsplit (":" , 1 )[0 ]
87+ resolved = f"{ resolved } :{ alias_suffix } "
8488
85- # Must have org/model format
86- if "/" not in model_id :
87- return None
89+ return model , resolved
8890
89- from fast_agent .llm .hf_inference_lookup import lookup_inference_providers
91+
92+ async def _lookup_and_format_providers (model : str ) -> str | None :
93+ """Look up inference providers for a model and return a formatted message.
94+
95+ Returns None if the model is not a HuggingFace model (no '/').
96+ """
97+ from fast_agent .llm .hf_inference_lookup import (
98+ format_provider_help_message ,
99+ lookup_inference_providers ,
100+ normalize_hf_model_id ,
101+ )
102+
103+ model_id = normalize_hf_model_id (model )
104+ if model_id is None :
105+ return None
90106
91107 try :
92108 result = await lookup_inference_providers (model_id )
93- if result .has_providers :
94- providers = result .format_provider_list ()
95- model_strings = result .format_model_strings ()
96- example = random .choice (model_strings )
97- return (
98- f"**Available providers:** { providers } \n \n "
99- f"**Autoroutes if no provider specified. Example use:** `/set-model { example } `"
100- )
101- elif result .exists :
102- return "No inference providers currently available for this model."
103- else :
104- return None
109+ return format_provider_help_message (result )
105110 except Exception :
106111 return None
107112
@@ -131,8 +136,9 @@ async def attach_llm(self, llm_factory, model=None, request_params=None, **kwarg
131136 llm = await super ().attach_llm (llm_factory , model , request_params , ** kwargs )
132137
133138 # Set up wizard callback if LLM supports it
134- if hasattr (llm , "set_completion_callback" ):
135- llm .set_completion_callback (self ._on_wizard_complete )
139+ callback_setter = getattr (llm , "set_completion_callback" , None )
140+ if callback_setter is not None :
141+ callback_setter (self ._on_wizard_complete )
136142
137143 return llm
138144
@@ -220,32 +226,45 @@ def acp_session_commands_allowlist(self) -> set[str]:
220226
221227 async def _handle_set_model (self , arguments : str ) -> str :
222228 """Handler for /set-model command."""
229+ from fast_agent .llm .hf_inference_lookup import validate_hf_model
223230 from fast_agent .llm .model_factory import ModelFactory
224231
225- model = arguments .strip ()
232+ raw_model = arguments .strip ()
233+ model = raw_model
226234 if not model :
227235 return format_model_list_help ()
228236
237+ alias_info = _resolve_alias_display (raw_model )
238+
229239 # Normalize the model string (auto-add hf. prefix if needed)
230240 model = _normalize_hf_model (model )
231241
232- # Validate the model string before saving to config
242+ # Validate the model string format
233243 try :
234244 ModelFactory .parse_model_string (model )
235245 except Exception as e :
236246 return f"Error: Invalid model `{ model } ` - { e } "
237247
238- # Look up inference providers for this model
239- provider_info = await _lookup_and_format_providers (model )
248+ # Validate model exists on HuggingFace and has providers
249+ validation = await validate_hf_model (model , aliases = ModelFactory .MODEL_ALIASES )
250+ if not validation .valid :
251+ return validation .error or "Error: Model validation failed"
240252
241253 try :
242254 update_model_in_config (model )
243255 applied = await self ._apply_model_to_running_hf_agent (model )
244256 applied_note = "\n \n Applied to the running Hugging Face agent." if applied else ""
245- provider_prefix = f"{ provider_info } \n \n " if provider_info else ""
257+ provider_prefix = (
258+ f"{ validation .display_message } \n \n " if validation .display_message else ""
259+ )
260+ if alias_info :
261+ alias_display , resolved_alias = alias_info
262+ model_status = f"Active model set to: `{ alias_display } ` (`{ resolved_alias } `)"
263+ else :
264+ model_status = f"Default model set to: `{ model } `"
246265 return (
247266 f"{ provider_prefix } "
248- f"Default model set to: ` { model } ` \n \n Config file updated: `{ CONFIG_FILE } `"
267+ f"{ model_status } \n \n Config file updated: `{ CONFIG_FILE } `"
249268 f"{ applied_note } "
250269 )
251270 except Exception as e :
@@ -311,32 +330,19 @@ async def _get_model_provider_info(self, model: str) -> str | None:
311330
312331 Returns None if providers cannot be looked up or model is not a HF model.
313332 """
314- from fast_agent .llm .hf_inference_lookup import lookup_inference_providers
315-
316- # Extract the HF model ID from various formats
317- model_id = model
318-
319- # Strip hf. prefix if present
320- if model_id .startswith ("hf." ):
321- model_id = model_id [3 :]
322-
323- # Strip :provider suffix if present
324- if ":" in model_id :
325- model_id = model_id .rsplit (":" , 1 )[0 ]
333+ from fast_agent .llm .hf_inference_lookup import (
334+ format_provider_summary ,
335+ lookup_inference_providers ,
336+ normalize_hf_model_id ,
337+ )
326338
327- # Must have org/ model format
328- if "/" not in model_id :
339+ model_id = normalize_hf_model_id ( model )
340+ if model_id is None :
329341 return None
330342
331343 try :
332344 result = await lookup_inference_providers (model_id )
333- if result .has_providers :
334- providers = result .format_provider_list ()
335- return f"Available providers: { providers } "
336- elif result .exists :
337- return "No inference providers available"
338- else :
339- return None
345+ return format_provider_summary (result )
340346 except Exception :
341347 return None
342348
@@ -498,29 +504,25 @@ async def _send_connect_update(
498504 await _send_connect_update (title = "Connected" , status = "in_progress" )
499505
500506 # Rebuild system prompt to include fresh server instructions
501- await _send_connect_update (
502- title = "Rebuilding system prompt…" , status = "in_progress"
503- )
504- await self .rebuild_instruction_templates ()
507+ await _send_connect_update (title = "Rebuilding system prompt…" , status = "in_progress" )
508+ await self ._apply_instruction_templates ()
505509
506510 # Get available tools
507511 await _send_connect_update (title = "Fetching available tools…" , status = "in_progress" )
508512 tools_result = await self ._aggregator .list_tools ()
509513 tool_names = [t .name for t in tools_result .tools ] if tools_result .tools else []
510514
515+ # Send final progress update (but don't mark as completed yet -
516+ # the return value serves as the completion signal)
511517 if tool_names :
512- preview = ", " .join (tool_names [:10 ])
513- suffix = f" (+{ len (tool_names ) - 10 } more)" if len (tool_names ) > 10 else ""
514518 await _send_connect_update (
515- title = "Connected (tools available )" ,
519+ title = f "Connected ({ len ( tool_names ) } tools )" ,
516520 status = "completed" ,
517- message = f"Available tools: { preview } { suffix } " ,
518521 )
519522 else :
520523 await _send_connect_update (
521- title = "Connected (no tools found )" ,
524+ title = "Connected (no tools)" ,
522525 status = "completed" ,
523- message = "No tools available from the server." ,
524526 )
525527
526528 if tool_names :
@@ -543,30 +545,43 @@ async def _send_connect_update(
543545
544546 async def _handle_set_model (self , arguments : str ) -> str :
545547 """Handler for /set-model in Hugging Face mode."""
548+ from fast_agent .llm .hf_inference_lookup import validate_hf_model
546549 from fast_agent .llm .model_factory import ModelFactory
547550
548- model = arguments .strip ()
551+ raw_model = arguments .strip ()
552+ model = raw_model
549553 if not model :
550554 return format_model_list_help ()
551555
556+ alias_info = _resolve_alias_display (raw_model )
557+
552558 # Normalize the model string (auto-add hf. prefix if needed)
553559 model = _normalize_hf_model (model )
554560
555- # Validate the model string before applying
561+ # Validate the model string format
556562 try :
557563 ModelFactory .parse_model_string (model )
558564 except Exception as e :
559565 return f"Error: Invalid model `{ model } ` - { e } "
560566
561- # Look up inference providers for this model
562- provider_info = await _lookup_and_format_providers (model )
567+ # Validate model exists on HuggingFace and has providers
568+ validation = await validate_hf_model (model , aliases = ModelFactory .MODEL_ALIASES )
569+ if not validation .valid :
570+ return validation .error or "Error: Model validation failed"
563571
564572 try :
565573 # Apply model first - if this fails, don't update config
566574 await self .apply_model (model )
567575 update_model_in_config (model )
568- provider_prefix = f"{ provider_info } \n \n " if provider_info else ""
569- return f"{ provider_prefix } Active model set to: `{ model } `\n \n Config file updated: `{ CONFIG_FILE } `"
576+ provider_prefix = (
577+ f"{ validation .display_message } \n \n " if validation .display_message else ""
578+ )
579+ if alias_info :
580+ alias_display , resolved_alias = alias_info
581+ model_status = f"Active model set to: `{ alias_display } ` (`{ resolved_alias } `)"
582+ else :
583+ model_status = f"Active model set to: `{ model } `"
584+ return f"{ provider_prefix } { model_status } \n \n Config file updated: `{ CONFIG_FILE } `"
570585 except Exception as e :
571586 return f"Error setting model: { e } "
572587
0 commit comments