Skip to content
Merged
Show file tree
Hide file tree
Changes from 9 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions codex-rs/core/src/models_manager/model_presets.rs
Original file line number Diff line number Diff line change
Expand Up @@ -318,6 +318,14 @@ fn gpt_52_codex_upgrade() -> ModelUpgrade {
"Codex is now powered by gpt-5.2-codex, our latest frontier agentic coding model. It is smarter and faster than its predecessors and capable of long-running project-scale work."
.to_string(),
),
migration_markdown: Some(
r#"**Codex just got an upgrade. Introducing {model_to}.**

Codex is now powered by gpt-5.2-codex, our latest frontier agentic coding model. It is smarter and faster than its predecessors and capable of long-running project-scale work. Learn more about {model_to} at https://openai.com/index/introducing-gpt-5-2-codex

You can continue using {model_from} if you prefer."#
.to_string(),
),
}
}

Expand Down
10 changes: 10 additions & 0 deletions codex-rs/core/tests/suite/list_models.rs
Original file line number Diff line number Diff line change
Expand Up @@ -410,6 +410,16 @@ fn gpt52_codex_upgrade() -> codex_protocol::openai_models::ModelUpgrade {
"Codex is now powered by gpt-5.2-codex, our latest frontier agentic coding model. It is smarter and faster than its predecessors and capable of long-running project-scale work."
.to_string(),
),
migration_markdown: Some(
r#"**Codex just got an upgrade. Introducing {model_to}.**

Codex is now powered by gpt-5.2-codex, our latest frontier agentic coding model. It is smarter and faster than its predecessors and capable of long-running project-scale work.

Learn more about {model_to} at https://openai.com/index/introducing-gpt-5-2-codex
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

P2 Badge Align migration_markdown copy with preset

The expected migration_markdown in gpt52_codex_upgrade() includes a blank line before “Learn more…”, but the real preset in core/src/models_manager/model_presets.rs keeps “Learn more…” in the same paragraph. Because list_models_returns_* uses assert_eq! on full ModelPreset values, this mismatch makes those tests fail whenever they run. Either update this expected string to exactly match the preset or adjust the preset to match the new paragraphing so the comparison stays stable.

Useful? React with 👍 / 👎.

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@codex fix it

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Summary

Testing

  • just fmt
  • just fix -p codex-core
  • cargo test -p codex-core (failed: exec::tests::kill_child_process_group_kills_grandchildren_on_timeout reports a grandchild process still alive)
  • cargo test -p codex-core exec::tests::kill_child_process_group_kills_grandchildren_on_timeout -- --nocapture (failed: grandchild process still alive)
  • ⚠️ cargo test --all-features (not run; requires approval)

View task →


You can continue using {model_from} if you prefer."#
.to_string(),
),
}
}

Expand Down
2 changes: 2 additions & 0 deletions codex-rs/protocol/src/openai_models.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ pub struct ModelUpgrade {
pub migration_config_key: String,
pub model_link: Option<String>,
pub upgrade_copy: Option<String>,
pub migration_markdown: Option<String>,
}

/// Metadata describing a Codex-supported model.
Expand Down Expand Up @@ -234,6 +235,7 @@ impl From<ModelInfo> for ModelPreset {
// todo(aibrahim): add the model link here.
model_link: None,
upgrade_copy: None,
migration_markdown: None,
}),
show_in_picker: info.visibility == ModelVisibility::List,
supported_in_api: info.supported_in_api,
Expand Down
7 changes: 7 additions & 0 deletions codex-rs/tui/src/app.rs
Original file line number Diff line number Diff line change
Expand Up @@ -201,6 +201,7 @@ async fn handle_model_migration_prompt_if_needed(
migration_config_key,
model_link,
upgrade_copy,
migration_markdown,
}) = upgrade
{
if migration_prompt_hidden(config, migration_config_key.as_str()) {
Expand Down Expand Up @@ -234,6 +235,7 @@ async fn handle_model_migration_prompt_if_needed(
&target_model,
model_link.clone(),
upgrade_copy.clone(),
migration_markdown.clone(),
heading_label,
target_description,
can_opt_out,
Expand Down Expand Up @@ -1503,6 +1505,9 @@ mod tests {
fn model_migration_copy_to_plain_text(
copy: &crate::model_migration::ModelMigrationCopy,
) -> String {
if let Some(markdown) = copy.markdown.as_ref() {
return markdown.clone();
}
let mut s = String::new();
for span in &copy.heading {
s.push_str(&span.content);
Expand Down Expand Up @@ -1585,6 +1590,7 @@ mod tests {
migration_config_key: HIDE_GPT5_1_MIGRATION_PROMPT_CONFIG.to_string(),
model_link: None,
upgrade_copy: None,
migration_markdown: None,
});
available.retain(|preset| preset.model != "gpt-5-codex");
available.push(current.clone());
Expand Down Expand Up @@ -1640,6 +1646,7 @@ mod tests {
&upgrade.id,
upgrade.model_link.clone(),
upgrade.upgrade_copy.clone(),
upgrade.migration_markdown.clone(),
target.display_name.clone(),
target_description,
can_opt_out,
Expand Down
55 changes: 52 additions & 3 deletions codex-rs/tui/src/model_migration.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
use crate::key_hint;
use crate::markdown_render::render_markdown_text_with_width;
use crate::render::Insets;
use crate::render::renderable::ColumnRenderable;
use crate::render::renderable::Renderable;
Expand Down Expand Up @@ -34,6 +35,7 @@ pub(crate) struct ModelMigrationCopy {
pub heading: Vec<Span<'static>>,
pub content: Vec<Line<'static>>,
pub can_opt_out: bool,
pub markdown: Option<String>,
}

#[derive(Clone, Copy, Debug, PartialEq, Eq)]
Expand All @@ -55,15 +57,30 @@ impl MigrationMenuOption {
}
}

#[allow(clippy::too_many_arguments)]
pub(crate) fn migration_copy_for_models(
current_model: &str,
target_model: &str,
model_link: Option<String>,
migration_copy: Option<String>,
migration_markdown: Option<String>,
target_display_name: String,
target_description: Option<String>,
can_opt_out: bool,
) -> ModelMigrationCopy {
if let Some(migration_markdown) = migration_markdown {
return ModelMigrationCopy {
heading: Vec::new(),
content: Vec::new(),
can_opt_out,
markdown: Some(fill_migration_markdown(
&migration_markdown,
current_model,
target_model,
)),
};
}

let heading_text = Span::from(format!(
"Codex just got an upgrade. Introducing {target_display_name}."
))
Expand Down Expand Up @@ -113,6 +130,7 @@ pub(crate) fn migration_copy_for_models(
heading: vec![heading_text],
content,
can_opt_out,
markdown: None,
}
}

Expand Down Expand Up @@ -237,9 +255,13 @@ impl WidgetRef for &ModelMigrationScreen {

let mut column = ColumnRenderable::new();
column.push("");
column.push(self.heading_line());
column.push(Line::from(""));
self.render_content(&mut column);
if let Some(markdown) = self.copy.markdown.as_ref() {
self.render_markdown_content(markdown, area.width, &mut column);
} else {
column.push(self.heading_line());
column.push(Line::from(""));
self.render_content(&mut column);
}
if self.copy.can_opt_out {
self.render_menu(&mut column);
}
Expand Down Expand Up @@ -290,6 +312,21 @@ impl ModelMigrationScreen {
}
}

fn render_markdown_content(
&self,
markdown: &str,
area_width: u16,
column: &mut ColumnRenderable,
) {
let horizontal_inset = 2;
let content_width = area_width.saturating_sub(horizontal_inset);
let wrap_width = (content_width > 0).then_some(content_width as usize);
let rendered = render_markdown_text_with_width(markdown, wrap_width);
for line in rendered.lines {
column.push(line.inset(Insets::tlbr(0, horizontal_inset, 0, 0)));
}
}

fn render_menu(&self, column: &mut ColumnRenderable) {
column.push(Line::from(""));
column.push(
Expand Down Expand Up @@ -348,6 +385,12 @@ fn is_ctrl_exit_combo(key_event: KeyEvent) -> bool {
&& matches!(key_event.code, KeyCode::Char('c') | KeyCode::Char('d'))
}

fn fill_migration_markdown(template: &str, current_model: &str, target_model: &str) -> String {
template
.replace("{model_from}", current_model)
.replace("{model_to}", target_model)
}

#[cfg(test)]
mod tests {
use super::ModelMigrationScreen;
Expand Down Expand Up @@ -378,6 +421,7 @@ mod tests {
"Upgrade to gpt-5.2-codex for the latest and greatest agentic coding model."
.to_string(),
),
None,
"gpt-5.1-codex-max".to_string(),
Some("Codex-optimized flagship for deep and fast reasoning.".to_string()),
true,
Expand Down Expand Up @@ -406,6 +450,7 @@ mod tests {
"gpt-5.1",
Some("https://www.codex.com/models/gpt-5.1".to_string()),
None,
None,
"gpt-5.1".to_string(),
Some("Broad world knowledge with strong general reasoning.".to_string()),
false,
Expand All @@ -432,6 +477,7 @@ mod tests {
"gpt-5.1-codex-max",
Some("https://www.codex.com/models/gpt-5.1-codex-max".to_string()),
None,
None,
"gpt-5.1-codex-max".to_string(),
Some("Codex-optimized flagship for deep and fast reasoning.".to_string()),
false,
Expand All @@ -458,6 +504,7 @@ mod tests {
"gpt-5.1-codex-mini",
Some("https://www.codex.com/models/gpt-5.1-codex-mini".to_string()),
None,
None,
"gpt-5.1-codex-mini".to_string(),
Some("Optimized for codex. Cheaper, faster, but less capable.".to_string()),
false,
Expand All @@ -480,6 +527,7 @@ mod tests {
"gpt-new",
Some("https://www.codex.com/models/gpt-new".to_string()),
None,
None,
"gpt-new".to_string(),
Some("Latest recommended model for better performance.".to_string()),
true,
Expand Down Expand Up @@ -508,6 +556,7 @@ mod tests {
"gpt-new",
Some("https://www.codex.com/models/gpt-new".to_string()),
None,
None,
"gpt-new".to_string(),
Some("Latest recommended model for better performance.".to_string()),
true,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
---
source: tui/src/app.rs
assertion_line: 1579
expression: model_migration_copy_to_plain_text(&copy)
---
Codex just got an upgrade. Introducing gpt-5.2-codex.
**Codex just got an upgrade. Introducing gpt-5.2-codex.**

Codex is now powered by gpt-5.2-codex, our latest frontier agentic coding model. It is smarter and faster than its predecessors and capable of long-running project-scale work. Learn more about gpt-5.2-codex at https://openai.com/index/introducing-gpt-5-2-codex

Expand Down
6 changes: 6 additions & 0 deletions codex-rs/tui2/src/app.rs
Original file line number Diff line number Diff line change
Expand Up @@ -231,6 +231,7 @@ async fn handle_model_migration_prompt_if_needed(
id: target_model,
reasoning_effort_mapping,
migration_config_key,
migration_markdown,
..
}) = upgrade
{
Expand Down Expand Up @@ -273,6 +274,7 @@ async fn handle_model_migration_prompt_if_needed(
&target_model,
heading_label,
target_description,
migration_markdown.clone(),
can_opt_out,
);
match run_model_migration_prompt(tui, prompt_copy).await {
Expand Down Expand Up @@ -2290,6 +2292,9 @@ mod tests {
fn model_migration_copy_to_plain_text(
copy: &crate::model_migration::ModelMigrationCopy,
) -> String {
if let Some(markdown) = copy.markdown.as_ref() {
return markdown.clone();
}
let mut s = String::new();
for span in &copy.heading {
s.push_str(&span.content);
Expand Down Expand Up @@ -2384,6 +2389,7 @@ mod tests {
&upgrade.id,
target.display_name,
target_description,
upgrade.migration_markdown.clone(),
can_opt_out,
);

Expand Down
Loading
Loading