From 4c051d3e9315cba8855d27711e3ff101979edf12 Mon Sep 17 00:00:00 2001 From: Ahmed Ibrahim Date: Fri, 27 Feb 2026 08:00:17 -0800 Subject: [PATCH 1/7] Add model availability NUX tooltips --- codex-rs/core/config.schema.json | 43 ++++ codex-rs/core/src/config/edit.rs | 36 +++ codex-rs/core/src/config/mod.rs | 65 ++++++ codex-rs/core/src/config/types.rs | 23 ++ codex-rs/tui/src/app.rs | 218 +++++++++++++++++- codex-rs/tui/src/chatwidget.rs | 11 + codex-rs/tui/src/chatwidget/tests.rs | 4 + codex-rs/tui/src/history_cell.rs | 96 +++++++- ...nfo_availability_nux_tooltip_snapshot.snap | 12 + 9 files changed, 503 insertions(+), 5 deletions(-) create mode 100644 codex-rs/tui/src/snapshots/codex_tui__history_cell__tests__session_info_availability_nux_tooltip_snapshot.snap diff --git a/codex-rs/core/config.schema.json b/codex-rs/core/config.schema.json index d207d5bd8e9..d9807cf2394 100644 --- a/codex-rs/core/config.schema.json +++ b/codex-rs/core/config.schema.json @@ -651,6 +651,37 @@ }, "type": "object" }, + "ModelAvailabilityNuxConfig": { + "additionalProperties": false, + "properties": { + "mode": { + "allOf": [ + { + "$ref": "#/definitions/ModelAvailabilityNuxMode" + } + ], + "default": "per_model", + "description": "Controls how startup availability NUX exposure is tracked." + }, + "shown_count": { + "additionalProperties": { + "format": "uint32", + "minimum": 0.0, + "type": "integer" + }, + "default": {}, + "description": "Number of times a startup availability NUX has been shown per model slug.", + "type": "object" + } + }, + "type": "object" + }, + "ModelAvailabilityNuxMode": { + "enum": [ + "per_model" + ], + "type": "string" + }, "ModelProviderInfo": { "additionalProperties": false, "description": "Serializable representation of a provider definition.", @@ -1412,6 +1443,18 @@ "description": "Enable animations (welcome screen, shimmer effects, spinners). Defaults to `true`.", "type": "boolean" }, + "model_availability_nux": { + "allOf": [ + { + "$ref": "#/definitions/ModelAvailabilityNuxConfig" + } + ], + "default": { + "mode": "per_model", + "shown_count": {} + }, + "description": "Startup tooltip availability NUX state persisted by the TUI." + }, "notification_method": { "allOf": [ { diff --git a/codex-rs/core/src/config/edit.rs b/codex-rs/core/src/config/edit.rs index 592f50d9075..d462d834055 100644 --- a/codex-rs/core/src/config/edit.rs +++ b/codex-rs/core/src/config/edit.rs @@ -75,6 +75,18 @@ pub fn status_line_items_edit(items: &[String]) -> ConfigEdit { } } +pub fn model_availability_nux_count_edit(model_slug: &str, count: u32) -> ConfigEdit { + ConfigEdit::SetPath { + segments: vec![ + "tui".to_string(), + "model_availability_nux".to_string(), + "shown_count".to_string(), + model_slug.to_string(), + ], + value: value(i64::from(count)), + } +} + // TODO(jif) move to a dedicated file mod document_helpers { use crate::config::types::McpServerConfig; @@ -799,6 +811,12 @@ impl ConfigEditsBuilder { self } + pub fn set_model_availability_nux_count(mut self, model_slug: &str, count: u32) -> Self { + self.edits + .push(model_availability_nux_count_edit(model_slug, count)); + self + } + pub fn replace_mcp_servers(mut self, servers: &BTreeMap) -> Self { self.edits .push(ConfigEdit::ReplaceMcpServers(servers.clone())); @@ -963,6 +981,24 @@ model_reasoning_effort = "high" assert_eq!(contents, "enabled = true\n"); } + #[test] + fn set_model_availability_nux_count_writes_shown_count() { + let tmp = tempdir().expect("tmpdir"); + let codex_home = tmp.path(); + + ConfigEditsBuilder::new(codex_home) + .set_model_availability_nux_count("gpt-foo", 4) + .apply_blocking() + .expect("persist"); + + let contents = + std::fs::read_to_string(codex_home.join(CONFIG_TOML_FILE)).expect("read config"); + let expected = r#"[tui.model_availability_nux.shown_count] +gpt-foo = 4 +"#; + assert_eq!(contents, expected); + } + #[test] fn set_skill_config_writes_disabled_entry() { let tmp = tempdir().expect("tmpdir"); diff --git a/codex-rs/core/src/config/mod.rs b/codex-rs/core/src/config/mod.rs index b3661fe790b..1dc0474d926 100644 --- a/codex-rs/core/src/config/mod.rs +++ b/codex-rs/core/src/config/mod.rs @@ -9,6 +9,7 @@ use crate::config::types::McpServerDisabledReason; use crate::config::types::McpServerTransportConfig; use crate::config::types::MemoriesConfig; use crate::config::types::MemoriesToml; +use crate::config::types::ModelAvailabilityNuxConfig; use crate::config::types::Notice; use crate::config::types::NotificationMethod; use crate::config::types::Notifications; @@ -276,6 +277,9 @@ pub struct Config { /// Show startup tooltips in the TUI welcome screen. pub show_tooltips: bool, + /// Persisted startup availability NUX state for model tooltips. + pub model_availability_nux: ModelAvailabilityNuxConfig, + /// Start the TUI in the specified collaboration mode (plan/default). /// Controls whether the TUI uses the terminal's alternate screen buffer. @@ -2213,6 +2217,11 @@ impl Config { .unwrap_or_default(), animations: cfg.tui.as_ref().map(|t| t.animations).unwrap_or(true), show_tooltips: cfg.tui.as_ref().map(|t| t.show_tooltips).unwrap_or(true), + model_availability_nux: cfg + .tui + .as_ref() + .map(|t| t.model_availability_nux.clone()) + .unwrap_or_default(), tui_alternate_screen: cfg .tui .as_ref() @@ -2401,6 +2410,8 @@ mod tests { use crate::config::types::McpServerTransportConfig; use crate::config::types::MemoriesConfig; use crate::config::types::MemoriesToml; + use crate::config::types::ModelAvailabilityNuxConfig; + use crate::config::types::ModelAvailabilityNuxMode; use crate::config::types::NotificationMethod; use crate::config::types::Notifications; use crate::config_loader::RequirementSource; @@ -2533,6 +2544,55 @@ phase_2_model = "gpt-5" ); } + #[test] + fn config_toml_deserializes_model_availability_nux() { + let toml = r#" +[tui.model_availability_nux] +mode = "per_model" + +[tui.model_availability_nux.shown_count] +"gpt-foo" = 2 +"gpt-bar" = 4 +"#; + let cfg: ConfigToml = + toml::from_str(toml).expect("TOML deserialization should succeed for TUI NUX"); + + assert_eq!( + cfg.tui.expect("tui config should deserialize"), + Tui { + notifications: Notifications::default(), + notification_method: NotificationMethod::default(), + animations: true, + show_tooltips: true, + alternate_screen: AltScreenMode::default(), + status_line: None, + theme: None, + model_availability_nux: ModelAvailabilityNuxConfig { + mode: ModelAvailabilityNuxMode::PerModel, + shown_count: HashMap::from([ + ("gpt-bar".to_string(), 4), + ("gpt-foo".to_string(), 2), + ]), + }, + } + ); + } + + #[test] + fn runtime_config_defaults_model_availability_nux() { + let cfg = Config::load_from_base_config_with_overrides( + ConfigToml::default(), + ConfigOverrides::default(), + tempdir().expect("tempdir").path().to_path_buf(), + ) + .expect("load config"); + + assert_eq!( + cfg.model_availability_nux, + ModelAvailabilityNuxConfig::default() + ); + } + #[test] fn config_toml_deserializes_permissions_network() { let toml = r#" @@ -2667,6 +2727,7 @@ theme = "dracula" alternate_screen: AltScreenMode::Auto, status_line: None, theme: None, + model_availability_nux: ModelAvailabilityNuxConfig::default(), } ); } @@ -4878,6 +4939,7 @@ model_verbosity = "high" tui_notification_method: Default::default(), animations: true, show_tooltips: true, + model_availability_nux: ModelAvailabilityNuxConfig::default(), analytics_enabled: Some(true), feedback_enabled: true, tui_alternate_screen: AltScreenMode::Auto, @@ -5005,6 +5067,7 @@ model_verbosity = "high" tui_notification_method: Default::default(), animations: true, show_tooltips: true, + model_availability_nux: ModelAvailabilityNuxConfig::default(), analytics_enabled: Some(true), feedback_enabled: true, tui_alternate_screen: AltScreenMode::Auto, @@ -5130,6 +5193,7 @@ model_verbosity = "high" tui_notification_method: Default::default(), animations: true, show_tooltips: true, + model_availability_nux: ModelAvailabilityNuxConfig::default(), analytics_enabled: Some(false), feedback_enabled: true, tui_alternate_screen: AltScreenMode::Auto, @@ -5241,6 +5305,7 @@ model_verbosity = "high" tui_notification_method: Default::default(), animations: true, show_tooltips: true, + model_availability_nux: ModelAvailabilityNuxConfig::default(), analytics_enabled: Some(true), feedback_enabled: true, tui_alternate_screen: AltScreenMode::Auto, diff --git a/codex-rs/core/src/config/types.rs b/codex-rs/core/src/config/types.rs index ec0bf15320b..259ddc166d6 100644 --- a/codex-rs/core/src/config/types.rs +++ b/codex-rs/core/src/config/types.rs @@ -657,6 +657,25 @@ impl fmt::Display for NotificationMethod { } } +#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, Default)] +#[serde(rename_all = "snake_case")] +pub enum ModelAvailabilityNuxMode { + #[default] + PerModel, +} + +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Default, JsonSchema)] +#[schemars(deny_unknown_fields)] +pub struct ModelAvailabilityNuxConfig { + /// Controls how startup availability NUX exposure is tracked. + #[serde(default)] + pub mode: ModelAvailabilityNuxMode, + + /// Number of times a startup availability NUX has been shown per model slug. + #[serde(default)] + pub shown_count: HashMap, +} + /// Collection of settings that are specific to the TUI. #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema)] #[schemars(deny_unknown_fields)] @@ -706,6 +725,10 @@ pub struct Tui { /// Use `/theme` in the TUI or see `$CODEX_HOME/themes` for custom themes. #[serde(default)] pub theme: Option, + + /// Startup tooltip availability NUX state persisted by the TUI. + #[serde(default)] + pub model_availability_nux: ModelAvailabilityNuxConfig, } const fn default_true() -> bool { diff --git a/codex-rs/tui/src/app.rs b/codex-rs/tui/src/app.rs index e32a76da0fa..e8348b82711 100644 --- a/codex-rs/tui/src/app.rs +++ b/codex-rs/tui/src/app.rs @@ -46,6 +46,8 @@ use codex_core::config::ConfigBuilder; use codex_core::config::ConfigOverrides; use codex_core::config::edit::ConfigEdit; use codex_core::config::edit::ConfigEditsBuilder; +use codex_core::config::types::ModelAvailabilityNuxConfig; +use codex_core::config::types::ModelAvailabilityNuxMode; use codex_core::config_loader::ConfigLayerStackOrdering; use codex_core::features::Feature; use codex_core::models_manager::collaboration_mode_presets::CollaborationModesConfig; @@ -61,6 +63,7 @@ use codex_protocol::config_types::Personality; #[cfg(target_os = "windows")] use codex_protocol::config_types::WindowsSandboxLevel; use codex_protocol::items::TurnItem; +use codex_protocol::openai_models::ModelAvailabilityNux; use codex_protocol::openai_models::ModelPreset; use codex_protocol::openai_models::ModelUpgrade; use codex_protocol::openai_models::ReasoningEffort as ReasoningEffortConfig; @@ -451,12 +454,83 @@ fn target_preset_for_upgrade<'a>( .find(|preset| preset.model == target_model && preset.show_in_picker) } +const MODEL_AVAILABILITY_NUX_MAX_SHOW_COUNT: u32 = 4; + +#[derive(Debug, Clone, PartialEq, Eq)] +struct StartupTooltipOverride { + model_slug: String, + message: String, +} + +fn select_model_availability_nux( + available_models: &[ModelPreset], + nux_config: &ModelAvailabilityNuxConfig, +) -> Option { + match nux_config.mode { + ModelAvailabilityNuxMode::PerModel => available_models.iter().find_map(|preset| { + let ModelAvailabilityNux { message } = preset.availability_nux.as_ref()?; + let shown_count = nux_config + .shown_count + .get(&preset.model) + .copied() + .unwrap_or_default(); + (shown_count < MODEL_AVAILABILITY_NUX_MAX_SHOW_COUNT).then(|| StartupTooltipOverride { + model_slug: preset.model.clone(), + message: message.clone(), + }) + }), + } +} + +async fn prepare_startup_tooltip_override( + config: &mut Config, + available_models: &[ModelPreset], + is_first_run: bool, +) -> Option { + if is_first_run || !config.show_tooltips { + return None; + } + + let Some(tooltip_override) = + select_model_availability_nux(available_models, &config.model_availability_nux) + else { + return None; + }; + + let shown_count = config + .model_availability_nux + .shown_count + .get(&tooltip_override.model_slug) + .copied() + .unwrap_or_default(); + let next_count = shown_count.saturating_add(1); + + if let Err(err) = ConfigEditsBuilder::new(&config.codex_home) + .set_model_availability_nux_count(&tooltip_override.model_slug, next_count) + .apply() + .await + { + tracing::error!( + error = %err, + model = %tooltip_override.model_slug, + "failed to persist model availability nux count" + ); + return Some(tooltip_override.message); + } + + config + .model_availability_nux + .shown_count + .insert(tooltip_override.model_slug, next_count); + Some(tooltip_override.message) +} + async fn handle_model_migration_prompt_if_needed( tui: &mut tui::Tui, config: &mut Config, model: &str, app_event_tx: &AppEventSender, - available_models: Vec, + available_models: &[ModelPreset], ) -> Option { let upgrade = available_models .iter() @@ -481,13 +555,13 @@ async fn handle_model_migration_prompt_if_needed( model, &target_model, &config.notices.model_migrations, - &available_models, + available_models, ) { return None; } let current_preset = available_models.iter().find(|preset| preset.model == model); - let target_preset = target_preset_for_upgrade(&available_models, &target_model); + let target_preset = target_preset_for_upgrade(available_models, &target_model); let target_preset = target_preset?; let target_display_name = target_preset.display_name.clone(); let heading_label = if target_display_name == model { @@ -668,6 +742,7 @@ impl App { is_first_run: false, feedback_audience: self.feedback_audience, model: Some(self.chat_widget.current_model().to_string()), + startup_tooltip_override: None, status_line_invalid_items_warned: self.status_line_invalid_items_warned.clone(), otel_manager: self.otel_manager.clone(), } @@ -1194,6 +1269,7 @@ impl App { is_first_run: false, feedback_audience: self.feedback_audience, model: Some(model), + startup_tooltip_override: None, status_line_invalid_items_warned: self.status_line_invalid_items_warned.clone(), otel_manager: self.otel_manager.clone(), }; @@ -1340,7 +1416,7 @@ impl App { &mut config, model.as_str(), &app_event_tx, - available_models, + &available_models, ) .await; if let Some(exit_info) = exit_info { @@ -1349,6 +1425,8 @@ impl App { if let Some(updated_model) = config.model.clone() { model = updated_model; } + let startup_tooltip_override = + prepare_startup_tooltip_override(&mut config, &available_models, is_first_run).await; let auth = auth_manager.auth().await; let auth_ref = auth.as_ref(); @@ -1410,6 +1488,7 @@ impl App { is_first_run, feedback_audience, model: Some(model.clone()), + startup_tooltip_override: startup_tooltip_override.clone(), status_line_invalid_items_warned: status_line_invalid_items_warned.clone(), otel_manager: otel_manager.clone(), }; @@ -1444,6 +1523,7 @@ impl App { is_first_run, feedback_audience, model: config.model.clone(), + startup_tooltip_override: startup_tooltip_override.clone(), status_line_invalid_items_warned: status_line_invalid_items_warned.clone(), otel_manager: otel_manager.clone(), }; @@ -1480,6 +1560,7 @@ impl App { is_first_run, feedback_audience, model: config.model.clone(), + startup_tooltip_override: startup_tooltip_override.clone(), status_line_invalid_items_warned: status_line_invalid_items_warned.clone(), otel_manager: otel_manager.clone(), }; @@ -3413,8 +3494,11 @@ mod tests { use codex_core::CodexAuth; use codex_core::config::ConfigBuilder; use codex_core::config::ConfigOverrides; + use codex_core::config::types::ModelAvailabilityNuxConfig; + use codex_core::config::types::ModelAvailabilityNuxMode; use codex_otel::OtelManager; use codex_protocol::ThreadId; + use codex_protocol::openai_models::ModelAvailabilityNux; use codex_protocol::protocol::AskForApproval; use codex_protocol::protocol::Event; use codex_protocol::protocol::EventMsg; @@ -3866,6 +3950,7 @@ mod tests { event, is_first, None, + None, )) as Arc }; @@ -4068,6 +4153,16 @@ mod tests { codex_core::test_support::all_model_presets().clone() } + fn model_availability_nux_config(shown_count: &[(&str, u32)]) -> ModelAvailabilityNuxConfig { + ModelAvailabilityNuxConfig { + mode: ModelAvailabilityNuxMode::PerModel, + shown_count: shown_count + .iter() + .map(|(model, count)| ((*model).to_string(), *count)) + .collect(), + } + } + fn model_migration_copy_to_plain_text( copy: &crate::model_migration::ModelMigrationCopy, ) -> String { @@ -4124,6 +4219,120 @@ mod tests { )); } + #[test] + fn select_model_availability_nux_picks_only_eligible_model() { + let mut presets = all_model_presets(); + presets.iter_mut().for_each(|preset| { + preset.availability_nux = None; + }); + let target = presets + .iter_mut() + .find(|preset| preset.model == "gpt-5") + .expect("target preset present"); + target.availability_nux = Some(ModelAvailabilityNux { + message: "gpt-5 is available".to_string(), + }); + + let selected = select_model_availability_nux(&presets, &model_availability_nux_config(&[])); + + assert_eq!( + selected, + Some(StartupTooltipOverride { + model_slug: "gpt-5".to_string(), + message: "gpt-5 is available".to_string(), + }) + ); + } + + #[test] + fn select_model_availability_nux_skips_missing_and_exhausted_models() { + let mut presets = all_model_presets(); + presets.iter_mut().for_each(|preset| { + preset.availability_nux = None; + }); + let gpt_5 = presets + .iter_mut() + .find(|preset| preset.model == "gpt-5") + .expect("gpt-5 preset present"); + gpt_5.availability_nux = Some(ModelAvailabilityNux { + message: "gpt-5 is available".to_string(), + }); + let gpt_5_2 = presets + .iter_mut() + .find(|preset| preset.model == "gpt-5.2") + .expect("gpt-5.2 preset present"); + gpt_5_2.availability_nux = Some(ModelAvailabilityNux { + message: "gpt-5.2 is available".to_string(), + }); + + let selected = select_model_availability_nux( + &presets, + &model_availability_nux_config(&[("gpt-5", MODEL_AVAILABILITY_NUX_MAX_SHOW_COUNT)]), + ); + + assert_eq!( + selected, + Some(StartupTooltipOverride { + model_slug: "gpt-5.2".to_string(), + message: "gpt-5.2 is available".to_string(), + }) + ); + } + + #[test] + fn select_model_availability_nux_uses_existing_model_order_as_priority() { + let mut presets = all_model_presets(); + presets.iter_mut().for_each(|preset| { + preset.availability_nux = None; + }); + let first = presets + .iter_mut() + .find(|preset| preset.model == "gpt-5") + .expect("gpt-5 preset present"); + first.availability_nux = Some(ModelAvailabilityNux { + message: "first".to_string(), + }); + let second = presets + .iter_mut() + .find(|preset| preset.model == "gpt-5.2") + .expect("gpt-5.2 preset present"); + second.availability_nux = Some(ModelAvailabilityNux { + message: "second".to_string(), + }); + + let selected = select_model_availability_nux(&presets, &model_availability_nux_config(&[])); + + assert_eq!( + selected, + Some(StartupTooltipOverride { + model_slug: "gpt-5.2".to_string(), + message: "second".to_string(), + }) + ); + } + + #[test] + fn select_model_availability_nux_returns_none_when_all_models_are_exhausted() { + let mut presets = all_model_presets(); + presets.iter_mut().for_each(|preset| { + preset.availability_nux = None; + }); + let target = presets + .iter_mut() + .find(|preset| preset.model == "gpt-5") + .expect("target preset present"); + target.availability_nux = Some(ModelAvailabilityNux { + message: "gpt-5 is available".to_string(), + }); + + let selected = select_model_availability_nux( + &presets, + &model_availability_nux_config(&[("gpt-5", MODEL_AVAILABILITY_NUX_MAX_SHOW_COUNT)]), + ); + + assert_eq!(selected, None); + } + #[tokio::test] async fn model_migration_prompt_respects_hide_flag_and_self_target() { let mut seen = BTreeMap::new(); @@ -4363,6 +4572,7 @@ mod tests { event, is_first, None, + None, )) as Arc }; diff --git a/codex-rs/tui/src/chatwidget.rs b/codex-rs/tui/src/chatwidget.rs index 4031cecaf63..d727a17129d 100644 --- a/codex-rs/tui/src/chatwidget.rs +++ b/codex-rs/tui/src/chatwidget.rs @@ -462,6 +462,7 @@ pub(crate) struct ChatWidgetInit { pub(crate) is_first_run: bool, pub(crate) feedback_audience: FeedbackAudience, pub(crate) model: Option, + pub(crate) startup_tooltip_override: Option, // Shared latch so we only warn once about invalid status-line item IDs. pub(crate) status_line_invalid_items_warned: Arc, pub(crate) otel_manager: OtelManager, @@ -603,6 +604,8 @@ pub(crate) struct ChatWidget { frame_requester: FrameRequester, // Whether to include the initial welcome banner on session configured show_welcome_banner: bool, + // One-shot tooltip override for the primary startup session. + startup_tooltip_override: Option, // When resuming an existing session (selected via resume picker), avoid an // immediate redraw on SessionConfigured to prevent a gratuitous UI flicker. suppress_session_configured_redraw: bool, @@ -1144,11 +1147,13 @@ impl ChatWidget { ); self.refresh_model_display(); self.sync_personality_command_enabled(); + let startup_tooltip_override = self.startup_tooltip_override.take(); let session_info_cell = history_cell::new_session_info( &self.config, &model_for_header, event, self.show_welcome_banner, + startup_tooltip_override, self.auth_manager .auth_cached() .and_then(|auth| auth.account_plan_type()), @@ -2755,6 +2760,7 @@ impl ChatWidget { is_first_run, feedback_audience, model, + startup_tooltip_override, status_line_invalid_items_warned, otel_manager, } = common; @@ -2852,6 +2858,7 @@ impl ChatWidget { queued_user_messages: VecDeque::new(), queued_message_edit_binding, show_welcome_banner: is_first_run, + startup_tooltip_override, suppress_session_configured_redraw: false, pending_notification: None, quit_shortcut_expires_at: None, @@ -2933,6 +2940,7 @@ impl ChatWidget { is_first_run, feedback_audience, model, + startup_tooltip_override, status_line_invalid_items_warned, otel_manager, } = common; @@ -3033,6 +3041,7 @@ impl ChatWidget { queued_user_messages: VecDeque::new(), queued_message_edit_binding, show_welcome_banner: is_first_run, + startup_tooltip_override, suppress_session_configured_redraw: false, pending_notification: None, quit_shortcut_expires_at: None, @@ -3099,6 +3108,7 @@ impl ChatWidget { is_first_run: _, feedback_audience, model, + startup_tooltip_override: _, status_line_invalid_items_warned, otel_manager, } = common; @@ -3195,6 +3205,7 @@ impl ChatWidget { queued_user_messages: VecDeque::new(), queued_message_edit_binding, show_welcome_banner: false, + startup_tooltip_override: None, suppress_session_configured_redraw: true, pending_notification: None, quit_shortcut_expires_at: None, diff --git a/codex-rs/tui/src/chatwidget/tests.rs b/codex-rs/tui/src/chatwidget/tests.rs index b0ba5200a2a..b3b1a492299 100644 --- a/codex-rs/tui/src/chatwidget/tests.rs +++ b/codex-rs/tui/src/chatwidget/tests.rs @@ -1581,6 +1581,7 @@ async fn helpers_are_available_and_do_not_panic() { is_first_run: true, feedback_audience: FeedbackAudience::External, model: Some(resolved_model), + startup_tooltip_override: None, status_line_invalid_items_warned: Arc::new(AtomicBool::new(false)), otel_manager, }; @@ -1705,6 +1706,7 @@ async fn make_chatwidget_manual( forked_from: None, frame_requester: FrameRequester::test_dummy(), show_welcome_banner: true, + startup_tooltip_override: None, queued_user_messages: VecDeque::new(), queued_message_edit_binding: crate::key_hint::alt(KeyCode::Up), suppress_session_configured_redraw: false, @@ -4488,6 +4490,7 @@ async fn collaboration_modes_defaults_to_code_on_startup() { is_first_run: true, feedback_audience: FeedbackAudience::External, model: Some(resolved_model.clone()), + startup_tooltip_override: None, status_line_invalid_items_warned: Arc::new(AtomicBool::new(false)), otel_manager, }; @@ -4537,6 +4540,7 @@ async fn experimental_mode_plan_is_ignored_on_startup() { is_first_run: true, feedback_audience: FeedbackAudience::External, model: Some(resolved_model.clone()), + startup_tooltip_override: None, status_line_invalid_items_warned: Arc::new(AtomicBool::new(false)), otel_manager, }; diff --git a/codex-rs/tui/src/history_cell.rs b/codex-rs/tui/src/history_cell.rs index 624d47d9154..a7ff11ef683 100644 --- a/codex-rs/tui/src/history_cell.rs +++ b/codex-rs/tui/src/history_cell.rs @@ -1041,6 +1041,7 @@ pub(crate) fn new_session_info( requested_model: &str, event: SessionConfiguredEvent, is_first_event: bool, + tooltip_override: Option, auth_plan: Option, ) -> SessionInfoCell { let SessionConfiguredEvent { @@ -1094,7 +1095,9 @@ pub(crate) fn new_session_info( parts.push(Box::new(PlainHistoryCell { lines: help_lines })); } else { if config.show_tooltips - && let Some(tooltips) = tooltips::get_tooltip(auth_plan).map(TooltipHistoryCell::new) + && let Some(tooltips) = tooltip_override + .or_else(|| tooltips::get_tooltip(auth_plan)) + .map(TooltipHistoryCell::new) { parts.push(Box::new(tooltips)); } @@ -2396,13 +2399,19 @@ mod tests { use codex_core::config::types::McpServerTransportConfig; use codex_otel::RuntimeMetricTotals; use codex_otel::RuntimeMetricsSummary; + use codex_protocol::ThreadId; + use codex_protocol::account::PlanType; use codex_protocol::models::WebSearchAction; use codex_protocol::parse_command::ParsedCommand; + use codex_protocol::protocol::AskForApproval; use codex_protocol::protocol::McpAuthStatus; + use codex_protocol::protocol::SandboxPolicy; + use codex_protocol::protocol::SessionConfiguredEvent; use dirs::home_dir; use pretty_assertions::assert_eq; use serde_json::json; use std::collections::HashMap; + use std::path::PathBuf; use codex_protocol::mcp::CallToolResult; use codex_protocol::mcp::Tool; @@ -2463,6 +2472,25 @@ mod tests { .expect("resource link content should serialize") } + fn session_configured_event(model: &str) -> SessionConfiguredEvent { + SessionConfiguredEvent { + session_id: ThreadId::new(), + forked_from_id: None, + thread_name: None, + model: model.to_string(), + model_provider_id: "test-provider".to_string(), + approval_policy: AskForApproval::Never, + sandbox_policy: SandboxPolicy::new_read_only_policy(), + cwd: PathBuf::from("/tmp/project"), + reasoning_effort: None, + history_log_id: 0, + history_entry_count: 0, + initial_messages: None, + network_proxy: None, + rollout_path: Some(PathBuf::new()), + } + } + #[test] fn unified_exec_interaction_cell_renders_input() { let cell = @@ -2547,6 +2575,72 @@ mod tests { insta::assert_snapshot!(rendered); } + #[tokio::test] + async fn session_info_uses_availability_nux_tooltip_override() { + let config = test_config().await; + let cell = new_session_info( + &config, + "gpt-5", + session_configured_event("gpt-5"), + false, + Some("Model just became available".to_string()), + Some(PlanType::Free), + ); + + let rendered = render_transcript(&cell).join("\n"); + assert!(rendered.contains("Model just became available")); + } + + #[tokio::test] + async fn session_info_availability_nux_tooltip_snapshot() { + let config = test_config().await; + let cell = new_session_info( + &config, + "gpt-5", + session_configured_event("gpt-5"), + false, + Some("Model just became available".to_string()), + Some(PlanType::Free), + ); + + let rendered = render_transcript(&cell).join("\n"); + insta::assert_snapshot!(rendered); + } + + #[tokio::test] + async fn session_info_first_event_suppresses_tooltips_and_nux() { + let config = test_config().await; + let cell = new_session_info( + &config, + "gpt-5", + session_configured_event("gpt-5"), + true, + Some("Model just became available".to_string()), + Some(PlanType::Free), + ); + + let rendered = render_transcript(&cell).join("\n"); + assert!(!rendered.contains("Model just became available")); + assert!(rendered.contains("To get started")); + } + + #[tokio::test] + async fn session_info_hides_tooltips_when_disabled() { + let mut config = test_config().await; + config.show_tooltips = false; + let cell = new_session_info( + &config, + "gpt-5", + session_configured_event("gpt-5"), + false, + Some("Model just became available".to_string()), + Some(PlanType::Free), + ); + + let rendered = render_transcript(&cell).join("\n"); + assert!(!rendered.contains("Model just became available")); + } + #[test] fn ps_output_multiline_snapshot() { let cell = new_unified_exec_processes_output(vec![ diff --git a/codex-rs/tui/src/snapshots/codex_tui__history_cell__tests__session_info_availability_nux_tooltip_snapshot.snap b/codex-rs/tui/src/snapshots/codex_tui__history_cell__tests__session_info_availability_nux_tooltip_snapshot.snap new file mode 100644 index 00000000000..9eccc01d28b --- /dev/null +++ b/codex-rs/tui/src/snapshots/codex_tui__history_cell__tests__session_info_availability_nux_tooltip_snapshot.snap @@ -0,0 +1,12 @@ +--- +source: tui/src/history_cell.rs +expression: rendered +--- +╭───────────────────────────────────────────────────────╮ +│ >_ OpenAI Codex (v0.0.0) │ +│ │ +│ model: gpt-5 /model to change │ +│ directory: ~/.codex/worktrees/22b2/codex/codex-rs/tui │ +╰───────────────────────────────────────────────────────╯ + + Tip: Model just became available From 6bd76b27aaf48ba15f17b4f88f6012e2f89f6866 Mon Sep 17 00:00:00 2001 From: Ahmed Ibrahim Date: Fri, 27 Feb 2026 09:37:58 -0800 Subject: [PATCH 2/7] Flatten model availability NUX config --- codex-rs/core/config.schema.json | 36 ++++------------------------- codex-rs/core/src/config/edit.rs | 37 +++++++++++++++++++----------- codex-rs/core/src/config/mod.rs | 5 ---- codex-rs/core/src/config/types.rs | 13 +---------- codex-rs/tui/src/app.rs | 38 +++++++++++++------------------ 5 files changed, 46 insertions(+), 83 deletions(-) diff --git a/codex-rs/core/config.schema.json b/codex-rs/core/config.schema.json index d9807cf2394..1e12fd6149c 100644 --- a/codex-rs/core/config.schema.json +++ b/codex-rs/core/config.schema.json @@ -652,36 +652,13 @@ "type": "object" }, "ModelAvailabilityNuxConfig": { - "additionalProperties": false, - "properties": { - "mode": { - "allOf": [ - { - "$ref": "#/definitions/ModelAvailabilityNuxMode" - } - ], - "default": "per_model", - "description": "Controls how startup availability NUX exposure is tracked." - }, - "shown_count": { - "additionalProperties": { - "format": "uint32", - "minimum": 0.0, - "type": "integer" - }, - "default": {}, - "description": "Number of times a startup availability NUX has been shown per model slug.", - "type": "object" - } + "additionalProperties": { + "format": "uint32", + "minimum": 0.0, + "type": "integer" }, "type": "object" }, - "ModelAvailabilityNuxMode": { - "enum": [ - "per_model" - ], - "type": "string" - }, "ModelProviderInfo": { "additionalProperties": false, "description": "Serializable representation of a provider definition.", @@ -1449,10 +1426,7 @@ "$ref": "#/definitions/ModelAvailabilityNuxConfig" } ], - "default": { - "mode": "per_model", - "shown_count": {} - }, + "default": {}, "description": "Startup tooltip availability NUX state persisted by the TUI." }, "notification_method": { diff --git a/codex-rs/core/src/config/edit.rs b/codex-rs/core/src/config/edit.rs index d462d834055..3a5b1d39835 100644 --- a/codex-rs/core/src/config/edit.rs +++ b/codex-rs/core/src/config/edit.rs @@ -8,6 +8,7 @@ use codex_protocol::config_types::Personality; use codex_protocol::config_types::TrustLevel; use codex_protocol::openai_models::ReasoningEffort; use std::collections::BTreeMap; +use std::collections::HashMap; use std::path::Path; use std::path::PathBuf; use tokio::task; @@ -75,16 +76,25 @@ pub fn status_line_items_edit(items: &[String]) -> ConfigEdit { } } -pub fn model_availability_nux_count_edit(model_slug: &str, count: u32) -> ConfigEdit { - ConfigEdit::SetPath { - segments: vec![ - "tui".to_string(), - "model_availability_nux".to_string(), - "shown_count".to_string(), - model_slug.to_string(), - ], - value: value(i64::from(count)), +pub fn model_availability_nux_count_edits(shown_count: &HashMap) -> Vec { + let mut shown_count_entries: Vec<_> = shown_count.iter().collect(); + shown_count_entries.sort_unstable_by(|(left, _), (right, _)| left.cmp(right)); + + let mut edits = vec![ConfigEdit::ClearPath { + segments: vec!["tui".to_string(), "model_availability_nux".to_string()], + }]; + for (model_slug, count) in shown_count_entries { + edits.push(ConfigEdit::SetPath { + segments: vec![ + "tui".to_string(), + "model_availability_nux".to_string(), + model_slug.clone(), + ], + value: value(i64::from(*count)), + }); } + + edits } // TODO(jif) move to a dedicated file @@ -811,9 +821,9 @@ impl ConfigEditsBuilder { self } - pub fn set_model_availability_nux_count(mut self, model_slug: &str, count: u32) -> Self { + pub fn set_model_availability_nux_count(mut self, shown_count: &HashMap) -> Self { self.edits - .push(model_availability_nux_count_edit(model_slug, count)); + .extend(model_availability_nux_count_edits(shown_count)); self } @@ -985,15 +995,16 @@ model_reasoning_effort = "high" fn set_model_availability_nux_count_writes_shown_count() { let tmp = tempdir().expect("tmpdir"); let codex_home = tmp.path(); + let shown_count = HashMap::from([("gpt-foo".to_string(), 4)]); ConfigEditsBuilder::new(codex_home) - .set_model_availability_nux_count("gpt-foo", 4) + .set_model_availability_nux_count(&shown_count) .apply_blocking() .expect("persist"); let contents = std::fs::read_to_string(codex_home.join(CONFIG_TOML_FILE)).expect("read config"); - let expected = r#"[tui.model_availability_nux.shown_count] + let expected = r#"[tui.model_availability_nux] gpt-foo = 4 "#; assert_eq!(contents, expected); diff --git a/codex-rs/core/src/config/mod.rs b/codex-rs/core/src/config/mod.rs index 1dc0474d926..17fd7d17592 100644 --- a/codex-rs/core/src/config/mod.rs +++ b/codex-rs/core/src/config/mod.rs @@ -2411,7 +2411,6 @@ mod tests { use crate::config::types::MemoriesConfig; use crate::config::types::MemoriesToml; use crate::config::types::ModelAvailabilityNuxConfig; - use crate::config::types::ModelAvailabilityNuxMode; use crate::config::types::NotificationMethod; use crate::config::types::Notifications; use crate::config_loader::RequirementSource; @@ -2548,9 +2547,6 @@ phase_2_model = "gpt-5" fn config_toml_deserializes_model_availability_nux() { let toml = r#" [tui.model_availability_nux] -mode = "per_model" - -[tui.model_availability_nux.shown_count] "gpt-foo" = 2 "gpt-bar" = 4 "#; @@ -2568,7 +2564,6 @@ mode = "per_model" status_line: None, theme: None, model_availability_nux: ModelAvailabilityNuxConfig { - mode: ModelAvailabilityNuxMode::PerModel, shown_count: HashMap::from([ ("gpt-bar".to_string(), 4), ("gpt-foo".to_string(), 2), diff --git a/codex-rs/core/src/config/types.rs b/codex-rs/core/src/config/types.rs index 259ddc166d6..c85640e01dd 100644 --- a/codex-rs/core/src/config/types.rs +++ b/codex-rs/core/src/config/types.rs @@ -657,22 +657,11 @@ impl fmt::Display for NotificationMethod { } } -#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, Default)] -#[serde(rename_all = "snake_case")] -pub enum ModelAvailabilityNuxMode { - #[default] - PerModel, -} - #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Default, JsonSchema)] #[schemars(deny_unknown_fields)] pub struct ModelAvailabilityNuxConfig { - /// Controls how startup availability NUX exposure is tracked. - #[serde(default)] - pub mode: ModelAvailabilityNuxMode, - /// Number of times a startup availability NUX has been shown per model slug. - #[serde(default)] + #[serde(default, flatten)] pub shown_count: HashMap, } diff --git a/codex-rs/tui/src/app.rs b/codex-rs/tui/src/app.rs index e8348b82711..ecd9b58a239 100644 --- a/codex-rs/tui/src/app.rs +++ b/codex-rs/tui/src/app.rs @@ -47,7 +47,6 @@ use codex_core::config::ConfigOverrides; use codex_core::config::edit::ConfigEdit; use codex_core::config::edit::ConfigEditsBuilder; use codex_core::config::types::ModelAvailabilityNuxConfig; -use codex_core::config::types::ModelAvailabilityNuxMode; use codex_core::config_loader::ConfigLayerStackOrdering; use codex_core::features::Feature; use codex_core::models_manager::collaboration_mode_presets::CollaborationModesConfig; @@ -466,20 +465,18 @@ fn select_model_availability_nux( available_models: &[ModelPreset], nux_config: &ModelAvailabilityNuxConfig, ) -> Option { - match nux_config.mode { - ModelAvailabilityNuxMode::PerModel => available_models.iter().find_map(|preset| { - let ModelAvailabilityNux { message } = preset.availability_nux.as_ref()?; - let shown_count = nux_config - .shown_count - .get(&preset.model) - .copied() - .unwrap_or_default(); - (shown_count < MODEL_AVAILABILITY_NUX_MAX_SHOW_COUNT).then(|| StartupTooltipOverride { - model_slug: preset.model.clone(), - message: message.clone(), - }) - }), - } + available_models.iter().find_map(|preset| { + let ModelAvailabilityNux { message } = preset.availability_nux.as_ref()?; + let shown_count = nux_config + .shown_count + .get(&preset.model) + .copied() + .unwrap_or_default(); + (shown_count < MODEL_AVAILABILITY_NUX_MAX_SHOW_COUNT).then(|| StartupTooltipOverride { + model_slug: preset.model.clone(), + message: message.clone(), + }) + }) } async fn prepare_startup_tooltip_override( @@ -504,9 +501,11 @@ async fn prepare_startup_tooltip_override( .copied() .unwrap_or_default(); let next_count = shown_count.saturating_add(1); + let mut updated_shown_count = config.model_availability_nux.shown_count.clone(); + updated_shown_count.insert(tooltip_override.model_slug.clone(), next_count); if let Err(err) = ConfigEditsBuilder::new(&config.codex_home) - .set_model_availability_nux_count(&tooltip_override.model_slug, next_count) + .set_model_availability_nux_count(&updated_shown_count) .apply() .await { @@ -518,10 +517,7 @@ async fn prepare_startup_tooltip_override( return Some(tooltip_override.message); } - config - .model_availability_nux - .shown_count - .insert(tooltip_override.model_slug, next_count); + config.model_availability_nux.shown_count = updated_shown_count; Some(tooltip_override.message) } @@ -3495,7 +3491,6 @@ mod tests { use codex_core::config::ConfigBuilder; use codex_core::config::ConfigOverrides; use codex_core::config::types::ModelAvailabilityNuxConfig; - use codex_core::config::types::ModelAvailabilityNuxMode; use codex_otel::OtelManager; use codex_protocol::ThreadId; use codex_protocol::openai_models::ModelAvailabilityNux; @@ -4155,7 +4150,6 @@ mod tests { fn model_availability_nux_config(shown_count: &[(&str, u32)]) -> ModelAvailabilityNuxConfig { ModelAvailabilityNuxConfig { - mode: ModelAvailabilityNuxMode::PerModel, shown_count: shown_count .iter() .map(|(model, count)| ((*model).to_string(), *count)) From e8b0544e4587edf049fa61c066b9571311918dc1 Mon Sep 17 00:00:00 2001 From: Ahmed Ibrahim Date: Fri, 27 Feb 2026 10:18:20 -0800 Subject: [PATCH 3/7] codex: fix CI failure on PR #13021 --- codex-rs/tui/src/app.rs | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/codex-rs/tui/src/app.rs b/codex-rs/tui/src/app.rs index ecd9b58a239..7ddb078c26f 100644 --- a/codex-rs/tui/src/app.rs +++ b/codex-rs/tui/src/app.rs @@ -488,11 +488,8 @@ async fn prepare_startup_tooltip_override( return None; } - let Some(tooltip_override) = - select_model_availability_nux(available_models, &config.model_availability_nux) - else { - return None; - }; + let tooltip_override = + select_model_availability_nux(available_models, &config.model_availability_nux)?; let shown_count = config .model_availability_nux From 545c24a7202b4bf4a78d8bb85f1462a655f89eb0 Mon Sep 17 00:00:00 2001 From: Ahmed Ibrahim Date: Fri, 27 Feb 2026 11:07:00 -0800 Subject: [PATCH 4/7] codex: address PR review feedback (#13021) --- codex-rs/tui/src/app.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/codex-rs/tui/src/app.rs b/codex-rs/tui/src/app.rs index 7ddb078c26f..77184d29f2e 100644 --- a/codex-rs/tui/src/app.rs +++ b/codex-rs/tui/src/app.rs @@ -1418,9 +1418,6 @@ impl App { if let Some(updated_model) = config.model.clone() { model = updated_model; } - let startup_tooltip_override = - prepare_startup_tooltip_override(&mut config, &available_models, is_first_run).await; - let auth = auth_manager.auth().await; let auth_ref = auth.as_ref(); // Determine who should see internal Slack routing. We treat @@ -1464,6 +1461,9 @@ impl App { Self::should_wait_for_initial_session(&session_selection); let mut chat_widget = match session_selection { SessionSelection::StartFresh | SessionSelection::Exit => { + let startup_tooltip_override = + prepare_startup_tooltip_override(&mut config, &available_models, is_first_run) + .await; let init = crate::chatwidget::ChatWidgetInit { config: config.clone(), frame_requester: tui.frame_requester(), @@ -1516,7 +1516,7 @@ impl App { is_first_run, feedback_audience, model: config.model.clone(), - startup_tooltip_override: startup_tooltip_override.clone(), + startup_tooltip_override: None, status_line_invalid_items_warned: status_line_invalid_items_warned.clone(), otel_manager: otel_manager.clone(), }; @@ -1553,7 +1553,7 @@ impl App { is_first_run, feedback_audience, model: config.model.clone(), - startup_tooltip_override: startup_tooltip_override.clone(), + startup_tooltip_override: None, status_line_invalid_items_warned: status_line_invalid_items_warned.clone(), otel_manager: otel_manager.clone(), }; From 858f3ee4a3b078a5ce5f753bb6df715b2849646e Mon Sep 17 00:00:00 2001 From: Ahmed Ibrahim Date: Fri, 27 Feb 2026 11:15:18 -0800 Subject: [PATCH 5/7] codex: fix CI failure on PR #13021 --- codex-rs/tui/src/history_cell.rs | 3 ++- ...sion_info_availability_nux_tooltip_snapshot.snap | 13 +++++++------ 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/codex-rs/tui/src/history_cell.rs b/codex-rs/tui/src/history_cell.rs index a7ff11ef683..bb06c16e4ac 100644 --- a/codex-rs/tui/src/history_cell.rs +++ b/codex-rs/tui/src/history_cell.rs @@ -2593,7 +2593,8 @@ mod tests { #[tokio::test] async fn session_info_availability_nux_tooltip_snapshot() { - let config = test_config().await; + let mut config = test_config().await; + config.cwd = PathBuf::from("/tmp/project"); let cell = new_session_info( &config, "gpt-5", diff --git a/codex-rs/tui/src/snapshots/codex_tui__history_cell__tests__session_info_availability_nux_tooltip_snapshot.snap b/codex-rs/tui/src/snapshots/codex_tui__history_cell__tests__session_info_availability_nux_tooltip_snapshot.snap index 9eccc01d28b..82869c01715 100644 --- a/codex-rs/tui/src/snapshots/codex_tui__history_cell__tests__session_info_availability_nux_tooltip_snapshot.snap +++ b/codex-rs/tui/src/snapshots/codex_tui__history_cell__tests__session_info_availability_nux_tooltip_snapshot.snap @@ -1,12 +1,13 @@ --- source: tui/src/history_cell.rs +assertion_line: 2608 expression: rendered --- -╭───────────────────────────────────────────────────────╮ -│ >_ OpenAI Codex (v0.0.0) │ -│ │ -│ model: gpt-5 /model to change │ -│ directory: ~/.codex/worktrees/22b2/codex/codex-rs/tui │ -╰───────────────────────────────────────────────────────╯ +╭─────────────────────────────────────╮ +│ >_ OpenAI Codex (v0.0.0) │ +│ │ +│ model: gpt-5 /model to change │ +│ directory: /tmp/project │ +╰─────────────────────────────────────╯ Tip: Model just became available From 7b99710c3a267c3df0fef632afb699333e46c68d Mon Sep 17 00:00:00 2001 From: Ahmed Ibrahim Date: Fri, 27 Feb 2026 11:33:12 -0800 Subject: [PATCH 6/7] codex: fix CI failure on PR #13021 --- codex-rs/tui/src/app.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/codex-rs/tui/src/app.rs b/codex-rs/tui/src/app.rs index 77184d29f2e..abb878ae0b3 100644 --- a/codex-rs/tui/src/app.rs +++ b/codex-rs/tui/src/app.rs @@ -1481,7 +1481,7 @@ impl App { is_first_run, feedback_audience, model: Some(model.clone()), - startup_tooltip_override: startup_tooltip_override.clone(), + startup_tooltip_override, status_line_invalid_items_warned: status_line_invalid_items_warned.clone(), otel_manager: otel_manager.clone(), }; From 958fd8f298754f37f267f657437c21f8438b5c23 Mon Sep 17 00:00:00 2001 From: Ahmed Ibrahim Date: Fri, 27 Feb 2026 13:34:50 -0800 Subject: [PATCH 7/7] tui: add e2e test for resume nux counts --- codex-rs/tui/tests/suite/mod.rs | 1 + .../tui/tests/suite/model_availability_nux.rs | 190 ++++++++++++++++++ 2 files changed, 191 insertions(+) create mode 100644 codex-rs/tui/tests/suite/model_availability_nux.rs diff --git a/codex-rs/tui/tests/suite/mod.rs b/codex-rs/tui/tests/suite/mod.rs index 9a8b9a1c4e6..c31326b10fe 100644 --- a/codex-rs/tui/tests/suite/mod.rs +++ b/codex-rs/tui/tests/suite/mod.rs @@ -1,4 +1,5 @@ // Aggregates all former standalone integration tests as modules. +mod model_availability_nux; mod no_panic_on_startup; mod status_indicator; mod vt100_history; diff --git a/codex-rs/tui/tests/suite/model_availability_nux.rs b/codex-rs/tui/tests/suite/model_availability_nux.rs new file mode 100644 index 00000000000..a76ef02bbc0 --- /dev/null +++ b/codex-rs/tui/tests/suite/model_availability_nux.rs @@ -0,0 +1,190 @@ +use std::collections::HashMap; +use std::time::Duration; + +use anyhow::Context; +use anyhow::Result; +use serde_json::Value as JsonValue; +use tempfile::tempdir; +use tokio::select; +use tokio::time::sleep; +use tokio::time::timeout; + +#[tokio::test] +async fn resume_startup_does_not_consume_model_availability_nux_count() -> Result<()> { + // run_codex_cli() does not work on Windows due to PTY limitations. + if cfg!(windows) { + return Ok(()); + } + + let repo_root = codex_utils_cargo_bin::repo_root()?; + let codex_home = tempdir()?; + + let source_catalog_path = codex_utils_cargo_bin::find_resource!("../core/models.json")?; + let source_catalog = std::fs::read_to_string(&source_catalog_path)?; + let mut source_catalog: JsonValue = serde_json::from_str(&source_catalog)?; + let models = source_catalog + .get_mut("models") + .and_then(JsonValue::as_array_mut) + .context("models array missing")?; + for model in models.iter_mut() { + if let Some(object) = model.as_object_mut() { + object.remove("availability_nux"); + } + } + let first_model = models.first_mut().context("models array is empty")?; + let first_model_object = first_model + .as_object_mut() + .context("first model was not a JSON object")?; + let model_slug = first_model_object + .get("slug") + .and_then(JsonValue::as_str) + .context("first model missing slug")? + .to_string(); + first_model_object.insert( + "availability_nux".to_string(), + serde_json::json!({ + "message": "Model now available", + }), + ); + + let custom_catalog_path = codex_home.path().join("catalog.json"); + std::fs::write( + &custom_catalog_path, + serde_json::to_string(&source_catalog)?, + )?; + + let repo_root_display = repo_root.display(); + let catalog_display = custom_catalog_path.display(); + let config_contents = format!( + r#"model = "{model_slug}" +model_provider = "openai" +model_catalog_json = "{catalog_display}" + +[projects."{repo_root_display}"] +trust_level = "trusted" + +[tui.model_availability_nux] +"{model_slug}" = 1 +"# + ); + std::fs::write(codex_home.path().join("config.toml"), config_contents)?; + + let fixture_path = + codex_utils_cargo_bin::find_resource!("../core/tests/cli_responses_fixture.sse")?; + let codex = if let Ok(path) = codex_utils_cargo_bin::cargo_bin("codex") { + path + } else { + let fallback = repo_root.join("codex-rs/target/debug/codex"); + if fallback.is_file() { + fallback + } else { + eprintln!("skipping integration test because codex binary is unavailable"); + return Ok(()); + } + }; + + let exec_output = std::process::Command::new(&codex) + .arg("exec") + .arg("--skip-git-repo-check") + .arg("-C") + .arg(&repo_root) + .arg("seed session for resume") + .env("CODEX_HOME", codex_home.path()) + .env("OPENAI_API_KEY", "dummy") + .env("CODEX_RS_SSE_FIXTURE", fixture_path) + .env("OPENAI_BASE_URL", "http://unused.local") + .output() + .context("failed to execute codex exec")?; + anyhow::ensure!( + exec_output.status.success(), + "codex exec failed: {}", + String::from_utf8_lossy(&exec_output.stderr) + ); + + let mut env = HashMap::new(); + env.insert( + "CODEX_HOME".to_string(), + codex_home.path().display().to_string(), + ); + env.insert("OPENAI_API_KEY".to_string(), "dummy".to_string()); + + let args = vec![ + "resume".to_string(), + "--last".to_string(), + "--no-alt-screen".to_string(), + "-C".to_string(), + repo_root.display().to_string(), + "-c".to_string(), + "analytics.enabled=false".to_string(), + ]; + + let spawned = codex_utils_pty::spawn_pty_process( + codex.to_string_lossy().as_ref(), + &args, + &repo_root, + &env, + &None, + ) + .await?; + + let mut output = Vec::new(); + let mut output_rx = spawned.output_rx; + let mut exit_rx = spawned.exit_rx; + let writer_tx = spawned.session.writer_sender(); + let interrupt_writer = writer_tx.clone(); + let interrupt_task = tokio::spawn(async move { + sleep(Duration::from_secs(2)).await; + for _ in 0..4 { + let _ = interrupt_writer.send(vec![3]).await; + sleep(Duration::from_millis(500)).await; + } + }); + + let exit_code_result = timeout(Duration::from_secs(15), async { + loop { + select! { + result = output_rx.recv() => match result { + Ok(chunk) => { + if chunk.windows(4).any(|window| window == b"\x1b[6n") { + let _ = writer_tx.send(b"\x1b[1;1R".to_vec()).await; + } + output.extend_from_slice(&chunk); + } + Err(tokio::sync::broadcast::error::RecvError::Closed) => break exit_rx.await, + Err(tokio::sync::broadcast::error::RecvError::Lagged(_)) => {} + }, + result = &mut exit_rx => break result, + } + } + }) + .await; + + interrupt_task.abort(); + + let exit_code = match exit_code_result { + Ok(Ok(code)) => code, + Ok(Err(err)) => return Err(err.into()), + Err(_) => { + spawned.session.terminate(); + anyhow::bail!("timed out waiting for codex resume to exit"); + } + }; + anyhow::ensure!( + exit_code == 0 || exit_code == 130, + "unexpected exit code from codex resume: {exit_code}; output: {}", + String::from_utf8_lossy(&output) + ); + + let config_contents = std::fs::read_to_string(codex_home.path().join("config.toml"))?; + let config: toml::Value = toml::from_str(&config_contents)?; + let shown_count = config + .get("tui") + .and_then(|tui| tui.get("model_availability_nux")) + .and_then(|nux| nux.get(&model_slug)) + .and_then(toml::Value::as_integer) + .context("missing tui.model_availability_nux count")?; + + assert_eq!(shown_count, 1); + + Ok(()) +}