diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 4dfd7596d1b..7cbb1a1983a 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -27,12 +27,26 @@ jobs:
- name: Install dependencies
run: pnpm install --frozen-lockfile
- # Run all tasks using workspace filters
+ # build_npm_package.py requires DotSlash when staging releases.
+ - uses: facebook/install-dotslash@v2
- - name: Ensure staging a release works.
+ - name: Stage npm package
env:
GH_TOKEN: ${{ github.token }}
- run: ./codex-cli/scripts/stage_release.sh
+ run: |
+ set -euo pipefail
+ CODEX_VERSION=0.40.0
+ PACK_OUTPUT="${RUNNER_TEMP}/codex-npm.tgz"
+ python3 ./codex-cli/scripts/build_npm_package.py \
+ --release-version "$CODEX_VERSION" \
+ --pack-output "$PACK_OUTPUT"
+ echo "PACK_OUTPUT=$PACK_OUTPUT" >> "$GITHUB_ENV"
+
+ - name: Upload staged npm package artifact
+ uses: actions/upload-artifact@v4
+ with:
+ name: codex-npm-staging
+ path: ${{ env.PACK_OUTPUT }}
- name: Ensure root README.md contains only ASCII and certain Unicode code points
run: ./scripts/asciicheck.py README.md
diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml
index ebf55649b82..3d49986791f 100644
--- a/.github/workflows/codespell.yml
+++ b/.github/workflows/codespell.yml
@@ -22,7 +22,7 @@ jobs:
- name: Annotate locations with typos
uses: codespell-project/codespell-problem-matcher@b80729f885d32f78a716c2f107b4db1025001c42 # v1
- name: Codespell
- uses: codespell-project/actions-codespell@406322ec52dd7b488e48c1c4b82e2a8b3a1bf630 # v2
+ uses: codespell-project/actions-codespell@406322ec52dd7b488e48c1c4b82e2a8b3a1bf630 # v2.1
with:
ignore_words_file: .codespellignore
skip: frame*.txt
diff --git a/.github/workflows/rust-ci.yml b/.github/workflows/rust-ci.yml
index 280939c611d..846408f3a80 100644
--- a/.github/workflows/rust-ci.yml
+++ b/.github/workflows/rust-ci.yml
@@ -57,7 +57,7 @@ jobs:
working-directory: codex-rs
steps:
- uses: actions/checkout@v5
- - uses: dtolnay/rust-toolchain@1.89
+ - uses: dtolnay/rust-toolchain@1.90
with:
components: rustfmt
- name: cargo fmt
@@ -75,7 +75,7 @@ jobs:
working-directory: codex-rs
steps:
- uses: actions/checkout@v5
- - uses: dtolnay/rust-toolchain@1.89
+ - uses: dtolnay/rust-toolchain@1.90
- uses: taiki-e/install-action@0c5db7f7f897c03b771660e91d065338615679f4 # v2
with:
tool: cargo-shear
@@ -143,7 +143,7 @@ jobs:
steps:
- uses: actions/checkout@v5
- - uses: dtolnay/rust-toolchain@1.89
+ - uses: dtolnay/rust-toolchain@1.90
with:
targets: ${{ matrix.target }}
components: clippy
diff --git a/.github/workflows/rust-release.yml b/.github/workflows/rust-release.yml
index 07af62a17c0..c808216d968 100644
--- a/.github/workflows/rust-release.yml
+++ b/.github/workflows/rust-release.yml
@@ -77,7 +77,7 @@ jobs:
steps:
- uses: actions/checkout@v5
- - uses: dtolnay/rust-toolchain@1.89
+ - uses: dtolnay/rust-toolchain@1.90
with:
targets: ${{ matrix.target }}
@@ -167,6 +167,14 @@ jobs:
needs: build
name: release
runs-on: ubuntu-latest
+ permissions:
+ contents: write
+ actions: read
+ outputs:
+ version: ${{ steps.release_name.outputs.name }}
+ tag: ${{ github.ref_name }}
+ should_publish_npm: ${{ steps.npm_publish_settings.outputs.should_publish }}
+ npm_tag: ${{ steps.npm_publish_settings.outputs.npm_tag }}
steps:
- name: Checkout repository
@@ -187,21 +195,37 @@ jobs:
version="${GITHUB_REF_NAME#rust-v}"
echo "name=${version}" >> $GITHUB_OUTPUT
+ - name: Determine npm publish settings
+ id: npm_publish_settings
+ env:
+ VERSION: ${{ steps.release_name.outputs.name }}
+ run: |
+ set -euo pipefail
+ version="${VERSION}"
+
+ if [[ "${version}" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+ echo "should_publish=true" >> "$GITHUB_OUTPUT"
+ echo "npm_tag=" >> "$GITHUB_OUTPUT"
+ elif [[ "${version}" =~ ^[0-9]+\.[0-9]+\.[0-9]+-alpha\.[0-9]+$ ]]; then
+ echo "should_publish=true" >> "$GITHUB_OUTPUT"
+ echo "npm_tag=alpha" >> "$GITHUB_OUTPUT"
+ else
+ echo "should_publish=false" >> "$GITHUB_OUTPUT"
+ echo "npm_tag=" >> "$GITHUB_OUTPUT"
+ fi
+
+ # build_npm_package.py requires DotSlash when staging releases.
+ - uses: facebook/install-dotslash@v2
- name: Stage npm package
env:
GH_TOKEN: ${{ github.token }}
run: |
set -euo pipefail
TMP_DIR="${RUNNER_TEMP}/npm-stage"
- python3 codex-cli/scripts/stage_rust_release.py \
+ ./codex-cli/scripts/build_npm_package.py \
--release-version "${{ steps.release_name.outputs.name }}" \
- --tmp "${TMP_DIR}"
- mkdir -p dist/npm
- # Produce an npm-ready tarball using `npm pack` and store it in dist/npm.
- # We then rename it to a stable name used by our publishing script.
- (cd "$TMP_DIR" && npm pack --pack-destination "${GITHUB_WORKSPACE}/dist/npm")
- mv "${GITHUB_WORKSPACE}"/dist/npm/*.tgz \
- "${GITHUB_WORKSPACE}/dist/npm/codex-npm-${{ steps.release_name.outputs.name }}.tgz"
+ --staging-dir "${TMP_DIR}" \
+ --pack-output "${GITHUB_WORKSPACE}/dist/npm/codex-npm-${{ steps.release_name.outputs.name }}.tgz"
- name: Create GitHub Release
uses: softprops/action-gh-release@v2
@@ -220,6 +244,58 @@ jobs:
tag: ${{ github.ref_name }}
config: .github/dotslash-config.json
+ # Publish to npm using OIDC authentication.
+ # July 31, 2025: https://github.blog/changelog/2025-07-31-npm-trusted-publishing-with-oidc-is-generally-available/
+ # npm docs: https://docs.npmjs.com/trusted-publishers
+ publish-npm:
+ # Publish to npm for stable releases and alpha pre-releases with numeric suffixes.
+ if: ${{ needs.release.outputs.should_publish_npm == 'true' }}
+ name: publish-npm
+ needs: release
+ runs-on: ubuntu-latest
+ permissions:
+ id-token: write # Required for OIDC
+ contents: read
+
+ steps:
+ - name: Setup Node.js
+ uses: actions/setup-node@v5
+ with:
+ node-version: 22
+ registry-url: "https://registry.npmjs.org"
+ scope: "@openai"
+
+ # Trusted publishing requires npm CLI version 11.5.1 or later.
+ - name: Update npm
+ run: npm install -g npm@latest
+
+ - name: Download npm tarball from release
+ env:
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ run: |
+ set -euo pipefail
+ version="${{ needs.release.outputs.version }}"
+ tag="${{ needs.release.outputs.tag }}"
+ mkdir -p dist/npm
+ gh release download "$tag" \
+ --repo "${GITHUB_REPOSITORY}" \
+ --pattern "codex-npm-${version}.tgz" \
+ --dir dist/npm
+
+ # No NODE_AUTH_TOKEN needed because we use OIDC.
+ - name: Publish to npm
+ env:
+ VERSION: ${{ needs.release.outputs.version }}
+ NPM_TAG: ${{ needs.release.outputs.npm_tag }}
+ run: |
+ set -euo pipefail
+ tag_args=()
+ if [[ -n "${NPM_TAG}" ]]; then
+ tag_args+=(--tag "${NPM_TAG}")
+ fi
+
+ npm publish "${GITHUB_WORKSPACE}/dist/npm/codex-npm-${VERSION}.tgz" "${tag_args[@]}"
+
update-branch:
name: Update latest-alpha-cli branch
permissions:
diff --git a/.gitignore b/.gitignore
index 06936c0b182..a264d918228 100644
--- a/.gitignore
+++ b/.gitignore
@@ -30,8 +30,6 @@ result
# cli tools
CLAUDE.md
.claude/
-.specify/
-.serena/
# caches
.cache/
diff --git a/AGENTS.md b/AGENTS.md
index 697de7009fe..288719447e4 100644
--- a/AGENTS.md
+++ b/AGENTS.md
@@ -4,6 +4,7 @@ In the codex-rs folder where the rust code lives:
- Crate names are prefixed with `codex-`. For example, the `core` folder's crate is named `codex-core`
- When using format! and you can inline variables into {}, always do that.
+- Install any commands the repo relies on (for example `just`, `rg`, or `cargo-insta`) if they aren't already available before running instructions here.
- Never add or modify any code related to `CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR` or `CODEX_SANDBOX_ENV_VAR`.
- You operate in a sandbox where `CODEX_SANDBOX_NETWORK_DISABLED=1` will be set whenever you use the `shell` tool. Any existing code that uses `CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR` was authored with this fact in mind. It is often used to early exit out of tests that the author knew you would not be able to run given your sandbox limitations.
- Similarly, when you spawn a process using Seatbelt (`/usr/bin/sandbox-exec`), `CODEX_SANDBOX=seatbelt` will be set on the child process. Integration tests that want to run Seatbelt themselves cannot be run under Seatbelt, so checks for `CODEX_SANDBOX=seatbelt` are also often used to early exit out of tests, as appropriate.
diff --git a/codex-cli/.gitignore b/codex-cli/.gitignore
index f886e64f466..57872d0f1e5 100644
--- a/codex-cli/.gitignore
+++ b/codex-cli/.gitignore
@@ -1,7 +1 @@
-# Added by ./scripts/install_native_deps.sh
-/bin/codex-aarch64-apple-darwin
-/bin/codex-aarch64-unknown-linux-musl
-/bin/codex-linux-sandbox-arm64
-/bin/codex-linux-sandbox-x64
-/bin/codex-x86_64-apple-darwin
-/bin/codex-x86_64-unknown-linux-musl
+/vendor/
diff --git a/codex-cli/bin/codex.js b/codex-cli/bin/codex.js
index f24065170d8..3621f48c120 100755
--- a/codex-cli/bin/codex.js
+++ b/codex-cli/bin/codex.js
@@ -1,6 +1,7 @@
#!/usr/bin/env node
// Unified entry point for the Codex CLI.
+import { existsSync } from "fs";
import path from "path";
import { fileURLToPath } from "url";
@@ -40,10 +41,10 @@ switch (platform) {
case "win32":
switch (arch) {
case "x64":
- targetTriple = "x86_64-pc-windows-msvc.exe";
+ targetTriple = "x86_64-pc-windows-msvc";
break;
case "arm64":
- targetTriple = "aarch64-pc-windows-msvc.exe";
+ targetTriple = "aarch64-pc-windows-msvc";
break;
default:
break;
@@ -57,7 +58,10 @@ if (!targetTriple) {
throw new Error(`Unsupported platform: ${platform} (${arch})`);
}
-const binaryPath = path.join(__dirname, "..", "bin", `codex-${targetTriple}`);
+const vendorRoot = path.join(__dirname, "..", "vendor");
+const archRoot = path.join(vendorRoot, targetTriple);
+const codexBinaryName = process.platform === "win32" ? "codex.exe" : "codex";
+const binaryPath = path.join(archRoot, "codex", codexBinaryName);
// Use an asynchronous spawn instead of spawnSync so that Node is able to
// respond to signals (e.g. Ctrl-C / SIGINT) while the native binary is
@@ -66,23 +70,6 @@ const binaryPath = path.join(__dirname, "..", "bin", `codex-${targetTriple}`);
// receives a fatal signal, both processes exit in a predictable manner.
const { spawn } = await import("child_process");
-async function tryImport(moduleName) {
- try {
- // eslint-disable-next-line node/no-unsupported-features/es-syntax
- return await import(moduleName);
- } catch (err) {
- return null;
- }
-}
-
-async function resolveRgDir() {
- const ripgrep = await tryImport("@vscode/ripgrep");
- if (!ripgrep?.rgPath) {
- return null;
- }
- return path.dirname(ripgrep.rgPath);
-}
-
function getUpdatedPath(newDirs) {
const pathSep = process.platform === "win32" ? ";" : ":";
const existingPath = process.env.PATH || "";
@@ -94,9 +81,9 @@ function getUpdatedPath(newDirs) {
}
const additionalDirs = [];
-const rgDir = await resolveRgDir();
-if (rgDir) {
- additionalDirs.push(rgDir);
+const pathDir = path.join(archRoot, "path");
+if (existsSync(pathDir)) {
+ additionalDirs.push(pathDir);
}
const updatedPath = getUpdatedPath(additionalDirs);
diff --git a/codex-cli/bin/rg b/codex-cli/bin/rg
new file mode 100755
index 00000000000..5a992570a9a
--- /dev/null
+++ b/codex-cli/bin/rg
@@ -0,0 +1,79 @@
+#!/usr/bin/env dotslash
+
+{
+ "name": "rg",
+ "platforms": {
+ "macos-aarch64": {
+ "size": 1787248,
+ "hash": "blake3",
+ "digest": "8d9942032585ea8ee805937634238d9aee7b210069f4703c88fbe568e26fb78a",
+ "format": "tar.gz",
+ "path": "ripgrep-14.1.1-aarch64-apple-darwin/rg",
+ "providers": [
+ {
+ "url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-aarch64-apple-darwin.tar.gz"
+ }
+ ]
+ },
+ "linux-aarch64": {
+ "size": 2047405,
+ "hash": "blake3",
+ "digest": "0b670b8fa0a3df2762af2fc82cc4932f684ca4c02dbd1260d4f3133fd4b2a515",
+ "format": "tar.gz",
+ "path": "ripgrep-14.1.1-aarch64-unknown-linux-gnu/rg",
+ "providers": [
+ {
+ "url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-aarch64-unknown-linux-gnu.tar.gz"
+ }
+ ]
+ },
+ "macos-x86_64": {
+ "size": 2082672,
+ "hash": "blake3",
+ "digest": "e9b862fc8da3127f92791f0ff6a799504154ca9d36c98bf3e60a81c6b1f7289e",
+ "format": "tar.gz",
+ "path": "ripgrep-14.1.1-x86_64-apple-darwin/rg",
+ "providers": [
+ {
+ "url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-x86_64-apple-darwin.tar.gz"
+ }
+ ]
+ },
+ "linux-x86_64": {
+ "size": 2566310,
+ "hash": "blake3",
+ "digest": "f73cca4e54d78c31f832c7f6e2c0b4db8b04fa3eaa747915727d570893dbee76",
+ "format": "tar.gz",
+ "path": "ripgrep-14.1.1-x86_64-unknown-linux-musl/rg",
+ "providers": [
+ {
+ "url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-x86_64-unknown-linux-musl.tar.gz"
+ }
+ ]
+ },
+ "windows-x86_64": {
+ "size": 2058893,
+ "hash": "blake3",
+ "digest": "a8ce1a6fed4f8093ee997e57f33254e94b2cd18e26358b09db599c89882eadbd",
+ "format": "zip",
+ "path": "ripgrep-14.1.1-x86_64-pc-windows-msvc/rg.exe",
+ "providers": [
+ {
+ "url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-x86_64-pc-windows-msvc.zip"
+ }
+ ]
+ },
+ "windows-aarch64": {
+ "size": 1667740,
+ "hash": "blake3",
+ "digest": "47b971a8c4fca1d23a4e7c19bd4d88465ebc395598458133139406d3bf85f3fa",
+ "format": "zip",
+ "path": "rg.exe",
+ "providers": [
+ {
+ "url": "https://github.com/microsoft/ripgrep-prebuilt/releases/download/v13.0.0-13/ripgrep-v13.0.0-13-aarch64-pc-windows-msvc.zip"
+ }
+ ]
+ }
+ }
+}
diff --git a/codex-cli/package-lock.json b/codex-cli/package-lock.json
index a1c840ade0e..a28bb9cd9fe 100644
--- a/codex-cli/package-lock.json
+++ b/codex-cli/package-lock.json
@@ -2,118 +2,17 @@
"name": "@openai/codex",
"version": "0.0.0-dev",
"lockfileVersion": 3,
- "requires": true,
"packages": {
"": {
"name": "@openai/codex",
"version": "0.0.0-dev",
"license": "Apache-2.0",
- "dependencies": {
- "@vscode/ripgrep": "^1.15.14"
- },
"bin": {
"codex": "bin/codex.js"
},
"engines": {
"node": ">=20"
}
- },
- "node_modules/@vscode/ripgrep": {
- "version": "1.15.14",
- "resolved": "https://registry.npmjs.org/@vscode/ripgrep/-/ripgrep-1.15.14.tgz",
- "integrity": "sha512-/G1UJPYlm+trBWQ6cMO3sv6b8D1+G16WaJH1/DSqw32JOVlzgZbLkDxRyzIpTpv30AcYGMkCf5tUqGlW6HbDWw==",
- "hasInstallScript": true,
- "license": "MIT",
- "dependencies": {
- "https-proxy-agent": "^7.0.2",
- "proxy-from-env": "^1.1.0",
- "yauzl": "^2.9.2"
- }
- },
- "node_modules/agent-base": {
- "version": "7.1.4",
- "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.4.tgz",
- "integrity": "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==",
- "license": "MIT",
- "engines": {
- "node": ">= 14"
- }
- },
- "node_modules/buffer-crc32": {
- "version": "0.2.13",
- "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz",
- "integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==",
- "license": "MIT",
- "engines": {
- "node": "*"
- }
- },
- "node_modules/debug": {
- "version": "4.4.1",
- "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz",
- "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==",
- "license": "MIT",
- "dependencies": {
- "ms": "^2.1.3"
- },
- "engines": {
- "node": ">=6.0"
- },
- "peerDependenciesMeta": {
- "supports-color": {
- "optional": true
- }
- }
- },
- "node_modules/fd-slicer": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz",
- "integrity": "sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g==",
- "license": "MIT",
- "dependencies": {
- "pend": "~1.2.0"
- }
- },
- "node_modules/https-proxy-agent": {
- "version": "7.0.6",
- "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz",
- "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==",
- "license": "MIT",
- "dependencies": {
- "agent-base": "^7.1.2",
- "debug": "4"
- },
- "engines": {
- "node": ">= 14"
- }
- },
- "node_modules/ms": {
- "version": "2.1.3",
- "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
- "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
- "license": "MIT"
- },
- "node_modules/pend": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz",
- "integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==",
- "license": "MIT"
- },
- "node_modules/proxy-from-env": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
- "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==",
- "license": "MIT"
- },
- "node_modules/yauzl": {
- "version": "2.10.0",
- "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz",
- "integrity": "sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g==",
- "license": "MIT",
- "dependencies": {
- "buffer-crc32": "~0.2.3",
- "fd-slicer": "~1.1.0"
- }
}
}
}
diff --git a/codex-cli/package.json b/codex-cli/package.json
index 614ca1a832e..03f234d677f 100644
--- a/codex-cli/package.json
+++ b/codex-cli/package.json
@@ -11,16 +11,11 @@
},
"files": [
"bin",
- "dist"
+ "vendor"
],
"repository": {
"type": "git",
- "url": "git+https://github.com/openai/codex.git"
- },
- "dependencies": {
- "@vscode/ripgrep": "^1.15.14"
- },
- "devDependencies": {
- "prettier": "^3.3.3"
+ "url": "git+https://github.com/openai/codex.git",
+ "directory": "codex-cli"
}
}
diff --git a/codex-cli/scripts/README.md b/codex-cli/scripts/README.md
index 21e4f3e883b..fd2265b2997 100644
--- a/codex-cli/scripts/README.md
+++ b/codex-cli/scripts/README.md
@@ -5,5 +5,7 @@ Run the following:
To build the 0.2.x or later version of the npm module, which runs the Rust version of the CLI, build it as follows:
```bash
-./codex-cli/scripts/stage_rust_release.py --release-version 0.6.0
+./codex-cli/scripts/build_npm_package.py --release-version 0.6.0
```
+
+Note this will create `./codex-cli/vendor/` as a side-effect.
diff --git a/codex-cli/scripts/build_npm_package.py b/codex-cli/scripts/build_npm_package.py
new file mode 100755
index 00000000000..0401f10385d
--- /dev/null
+++ b/codex-cli/scripts/build_npm_package.py
@@ -0,0 +1,269 @@
+#!/usr/bin/env python3
+"""Stage and optionally package the @openai/codex npm module."""
+
+import argparse
+import json
+import re
+import shutil
+import subprocess
+import sys
+import tempfile
+from pathlib import Path
+
+SCRIPT_DIR = Path(__file__).resolve().parent
+CODEX_CLI_ROOT = SCRIPT_DIR.parent
+REPO_ROOT = CODEX_CLI_ROOT.parent
+GITHUB_REPO = "openai/codex"
+
+# The docs are not clear on what the expected value/format of
+# workflow/workflowName is:
+# https://cli.github.com/manual/gh_run_list
+WORKFLOW_NAME = ".github/workflows/rust-release.yml"
+
+
+def parse_args() -> argparse.Namespace:
+ parser = argparse.ArgumentParser(description="Build or stage the Codex CLI npm package.")
+ parser.add_argument(
+ "--version",
+ help="Version number to write to package.json inside the staged package.",
+ )
+ parser.add_argument(
+ "--release-version",
+ help=(
+ "Version to stage for npm release. When provided, the script also resolves the "
+ "matching rust-release workflow unless --workflow-url is supplied."
+ ),
+ )
+ parser.add_argument(
+ "--workflow-url",
+ help="Optional GitHub Actions workflow run URL used to download native binaries.",
+ )
+ parser.add_argument(
+ "--staging-dir",
+ type=Path,
+ help=(
+ "Directory to stage the package contents. Defaults to a new temporary directory "
+ "if omitted. The directory must be empty when provided."
+ ),
+ )
+ parser.add_argument(
+ "--tmp",
+ dest="staging_dir",
+ type=Path,
+ help=argparse.SUPPRESS,
+ )
+ parser.add_argument(
+ "--pack-output",
+ type=Path,
+ help="Path where the generated npm tarball should be written.",
+ )
+ return parser.parse_args()
+
+
+def main() -> int:
+ args = parse_args()
+
+ version = args.version
+ release_version = args.release_version
+ if release_version:
+ if version and version != release_version:
+ raise RuntimeError("--version and --release-version must match when both are provided.")
+ version = release_version
+
+ if not version:
+ raise RuntimeError("Must specify --version or --release-version.")
+
+ staging_dir, created_temp = prepare_staging_dir(args.staging_dir)
+
+ try:
+ stage_sources(staging_dir, version)
+
+ workflow_url = args.workflow_url
+ resolved_head_sha: str | None = None
+ if not workflow_url:
+ if release_version:
+ workflow = resolve_release_workflow(version)
+ workflow_url = workflow["url"]
+ resolved_head_sha = workflow.get("headSha")
+ else:
+ workflow_url = resolve_latest_alpha_workflow_url()
+ elif release_version:
+ try:
+ workflow = resolve_release_workflow(version)
+ resolved_head_sha = workflow.get("headSha")
+ except Exception:
+ resolved_head_sha = None
+
+ if release_version and resolved_head_sha:
+ print(f"should `git checkout {resolved_head_sha}`")
+
+ if not workflow_url:
+ raise RuntimeError("Unable to determine workflow URL for native binaries.")
+
+ install_native_binaries(staging_dir, workflow_url)
+
+ if release_version:
+ staging_dir_str = str(staging_dir)
+ print(
+ f"Staged version {version} for release in {staging_dir_str}\n\n"
+ "Verify the CLI:\n"
+ f" node {staging_dir_str}/bin/codex.js --version\n"
+ f" node {staging_dir_str}/bin/codex.js --help\n\n"
+ )
+ else:
+ print(f"Staged package in {staging_dir}")
+
+ if args.pack_output is not None:
+ output_path = run_npm_pack(staging_dir, args.pack_output)
+ print(f"npm pack output written to {output_path}")
+ finally:
+ if created_temp:
+ # Preserve the staging directory for further inspection.
+ pass
+
+ return 0
+
+
+def prepare_staging_dir(staging_dir: Path | None) -> tuple[Path, bool]:
+ if staging_dir is not None:
+ staging_dir = staging_dir.resolve()
+ staging_dir.mkdir(parents=True, exist_ok=True)
+ if any(staging_dir.iterdir()):
+ raise RuntimeError(f"Staging directory {staging_dir} is not empty.")
+ return staging_dir, False
+
+ temp_dir = Path(tempfile.mkdtemp(prefix="codex-npm-stage-"))
+ return temp_dir, True
+
+
+def stage_sources(staging_dir: Path, version: str) -> None:
+ bin_dir = staging_dir / "bin"
+ bin_dir.mkdir(parents=True, exist_ok=True)
+
+ shutil.copy2(CODEX_CLI_ROOT / "bin" / "codex.js", bin_dir / "codex.js")
+ rg_manifest = CODEX_CLI_ROOT / "bin" / "rg"
+ if rg_manifest.exists():
+ shutil.copy2(rg_manifest, bin_dir / "rg")
+
+ readme_src = REPO_ROOT / "README.md"
+ if readme_src.exists():
+ shutil.copy2(readme_src, staging_dir / "README.md")
+
+ with open(CODEX_CLI_ROOT / "package.json", "r", encoding="utf-8") as fh:
+ package_json = json.load(fh)
+ package_json["version"] = version
+
+ with open(staging_dir / "package.json", "w", encoding="utf-8") as out:
+ json.dump(package_json, out, indent=2)
+ out.write("\n")
+
+
+def install_native_binaries(staging_dir: Path, workflow_url: str | None) -> None:
+ cmd = ["./scripts/install_native_deps.py"]
+ if workflow_url:
+ cmd.extend(["--workflow-url", workflow_url])
+ cmd.append(str(staging_dir))
+ subprocess.check_call(cmd, cwd=CODEX_CLI_ROOT)
+
+
+def resolve_latest_alpha_workflow_url() -> str:
+ version = determine_latest_alpha_version()
+ workflow = resolve_release_workflow(version)
+ return workflow["url"]
+
+
+def determine_latest_alpha_version() -> str:
+ releases = list_releases()
+ best_key: tuple[int, int, int, int] | None = None
+ best_version: str | None = None
+ pattern = re.compile(r"^rust-v(\d+)\.(\d+)\.(\d+)-alpha\.(\d+)$")
+ for release in releases:
+ tag = release.get("tag_name", "")
+ match = pattern.match(tag)
+ if not match:
+ continue
+ key = tuple(int(match.group(i)) for i in range(1, 5))
+ if best_key is None or key > best_key:
+ best_key = key
+ best_version = (
+ f"{match.group(1)}.{match.group(2)}.{match.group(3)}-alpha.{match.group(4)}"
+ )
+
+ if best_version is None:
+ raise RuntimeError("No alpha releases found when resolving workflow URL.")
+ return best_version
+
+
+def list_releases() -> list[dict]:
+ stdout = subprocess.check_output(
+ ["gh", "api", f"/repos/{GITHUB_REPO}/releases?per_page=100"],
+ text=True,
+ )
+ try:
+ releases = json.loads(stdout or "[]")
+ except json.JSONDecodeError as exc:
+ raise RuntimeError("Unable to parse releases JSON.") from exc
+ if not isinstance(releases, list):
+ raise RuntimeError("Unexpected response when listing releases.")
+ return releases
+
+
+def resolve_release_workflow(version: str) -> dict:
+ stdout = subprocess.check_output(
+ [
+ "gh",
+ "run",
+ "list",
+ "--branch",
+ f"rust-v{version}",
+ "--json",
+ "workflowName,url,headSha",
+ "--workflow",
+ WORKFLOW_NAME,
+ "--jq",
+ "first(.[])",
+ ],
+ text=True,
+ )
+ workflow = json.loads(stdout or "[]")
+ if not workflow:
+ raise RuntimeError(f"Unable to find rust-release workflow for version {version}.")
+ return workflow
+
+
+def run_npm_pack(staging_dir: Path, output_path: Path) -> Path:
+ output_path = output_path.resolve()
+ output_path.parent.mkdir(parents=True, exist_ok=True)
+
+ with tempfile.TemporaryDirectory(prefix="codex-npm-pack-") as pack_dir_str:
+ pack_dir = Path(pack_dir_str)
+ stdout = subprocess.check_output(
+ ["npm", "pack", "--json", "--pack-destination", str(pack_dir)],
+ cwd=staging_dir,
+ text=True,
+ )
+ try:
+ pack_output = json.loads(stdout)
+ except json.JSONDecodeError as exc:
+ raise RuntimeError("Failed to parse npm pack output.") from exc
+
+ if not pack_output:
+ raise RuntimeError("npm pack did not produce an output tarball.")
+
+ tarball_name = pack_output[0].get("filename") or pack_output[0].get("name")
+ if not tarball_name:
+ raise RuntimeError("Unable to determine npm pack output filename.")
+
+ tarball_path = pack_dir / tarball_name
+ if not tarball_path.exists():
+ raise RuntimeError(f"Expected npm pack output not found: {tarball_path}")
+
+ shutil.move(str(tarball_path), output_path)
+
+ return output_path
+
+
+if __name__ == "__main__":
+ import sys
+
+ sys.exit(main())
diff --git a/codex-cli/scripts/install_native_deps.py b/codex-cli/scripts/install_native_deps.py
new file mode 100755
index 00000000000..7fbb44393ae
--- /dev/null
+++ b/codex-cli/scripts/install_native_deps.py
@@ -0,0 +1,318 @@
+#!/usr/bin/env python3
+"""Install Codex native binaries (Rust CLI plus ripgrep helpers)."""
+
+import argparse
+import json
+import os
+import shutil
+import subprocess
+import tarfile
+import tempfile
+import zipfile
+from concurrent.futures import ThreadPoolExecutor, as_completed
+from pathlib import Path
+from typing import Iterable, Sequence
+from urllib.parse import urlparse
+from urllib.request import urlopen
+
+SCRIPT_DIR = Path(__file__).resolve().parent
+CODEX_CLI_ROOT = SCRIPT_DIR.parent
+DEFAULT_WORKFLOW_URL = "https://github.com/openai/codex/actions/runs/17952349351" # rust-v0.40.0
+VENDOR_DIR_NAME = "vendor"
+RG_MANIFEST = CODEX_CLI_ROOT / "bin" / "rg"
+CODEX_TARGETS = (
+ "x86_64-unknown-linux-musl",
+ "aarch64-unknown-linux-musl",
+ "x86_64-apple-darwin",
+ "aarch64-apple-darwin",
+ "x86_64-pc-windows-msvc",
+ "aarch64-pc-windows-msvc",
+)
+
+RG_TARGET_PLATFORM_PAIRS: list[tuple[str, str]] = [
+ ("x86_64-unknown-linux-musl", "linux-x86_64"),
+ ("aarch64-unknown-linux-musl", "linux-aarch64"),
+ ("x86_64-apple-darwin", "macos-x86_64"),
+ ("aarch64-apple-darwin", "macos-aarch64"),
+ ("x86_64-pc-windows-msvc", "windows-x86_64"),
+ ("aarch64-pc-windows-msvc", "windows-aarch64"),
+]
+RG_TARGET_TO_PLATFORM = {target: platform for target, platform in RG_TARGET_PLATFORM_PAIRS}
+DEFAULT_RG_TARGETS = [target for target, _ in RG_TARGET_PLATFORM_PAIRS]
+
+
+def parse_args() -> argparse.Namespace:
+ parser = argparse.ArgumentParser(description="Install native Codex binaries.")
+ parser.add_argument(
+ "--workflow-url",
+ help=(
+ "GitHub Actions workflow URL that produced the artifacts. Defaults to a "
+ "known good run when omitted."
+ ),
+ )
+ parser.add_argument(
+ "root",
+ nargs="?",
+ type=Path,
+ help=(
+ "Directory containing package.json for the staged package. If omitted, the "
+ "repository checkout is used."
+ ),
+ )
+ return parser.parse_args()
+
+
+def main() -> int:
+ args = parse_args()
+
+ codex_cli_root = (args.root or CODEX_CLI_ROOT).resolve()
+ vendor_dir = codex_cli_root / VENDOR_DIR_NAME
+ vendor_dir.mkdir(parents=True, exist_ok=True)
+
+ workflow_url = (args.workflow_url or DEFAULT_WORKFLOW_URL).strip()
+ if not workflow_url:
+ workflow_url = DEFAULT_WORKFLOW_URL
+
+ workflow_id = workflow_url.rstrip("/").split("/")[-1]
+
+ with tempfile.TemporaryDirectory(prefix="codex-native-artifacts-") as artifacts_dir_str:
+ artifacts_dir = Path(artifacts_dir_str)
+ _download_artifacts(workflow_id, artifacts_dir)
+ install_codex_binaries(artifacts_dir, vendor_dir, CODEX_TARGETS)
+
+ fetch_rg(vendor_dir, DEFAULT_RG_TARGETS, manifest_path=RG_MANIFEST)
+
+ print(f"Installed native dependencies into {vendor_dir}")
+ return 0
+
+
+def fetch_rg(
+ vendor_dir: Path,
+ targets: Sequence[str] | None = None,
+ *,
+ manifest_path: Path,
+) -> list[Path]:
+ """Download ripgrep binaries described by the DotSlash manifest."""
+
+ if targets is None:
+ targets = DEFAULT_RG_TARGETS
+
+ if not manifest_path.exists():
+ raise FileNotFoundError(f"DotSlash manifest not found: {manifest_path}")
+
+ manifest = _load_manifest(manifest_path)
+ platforms = manifest.get("platforms", {})
+
+ vendor_dir.mkdir(parents=True, exist_ok=True)
+
+ targets = list(targets)
+ if not targets:
+ return []
+
+ task_configs: list[tuple[str, str, dict]] = []
+ for target in targets:
+ platform_key = RG_TARGET_TO_PLATFORM.get(target)
+ if platform_key is None:
+ raise ValueError(f"Unsupported ripgrep target '{target}'.")
+
+ platform_info = platforms.get(platform_key)
+ if platform_info is None:
+ raise RuntimeError(f"Platform '{platform_key}' not found in manifest {manifest_path}.")
+
+ task_configs.append((target, platform_key, platform_info))
+
+ results: dict[str, Path] = {}
+ max_workers = min(len(task_configs), max(1, (os.cpu_count() or 1)))
+
+ with ThreadPoolExecutor(max_workers=max_workers) as executor:
+ future_map = {
+ executor.submit(
+ _fetch_single_rg,
+ vendor_dir,
+ target,
+ platform_key,
+ platform_info,
+ manifest_path,
+ ): target
+ for target, platform_key, platform_info in task_configs
+ }
+
+ for future in as_completed(future_map):
+ target = future_map[future]
+ results[target] = future.result()
+
+ return [results[target] for target in targets]
+
+
+def _download_artifacts(workflow_id: str, dest_dir: Path) -> None:
+ cmd = [
+ "gh",
+ "run",
+ "download",
+ "--dir",
+ str(dest_dir),
+ "--repo",
+ "openai/codex",
+ workflow_id,
+ ]
+ subprocess.check_call(cmd)
+
+
+def install_codex_binaries(
+ artifacts_dir: Path, vendor_dir: Path, targets: Iterable[str]
+) -> list[Path]:
+ targets = list(targets)
+ if not targets:
+ return []
+
+ results: dict[str, Path] = {}
+ max_workers = min(len(targets), max(1, (os.cpu_count() or 1)))
+
+ with ThreadPoolExecutor(max_workers=max_workers) as executor:
+ future_map = {
+ executor.submit(_install_single_codex_binary, artifacts_dir, vendor_dir, target): target
+ for target in targets
+ }
+
+ for future in as_completed(future_map):
+ target = future_map[future]
+ results[target] = future.result()
+
+ return [results[target] for target in targets]
+
+
+def _install_single_codex_binary(artifacts_dir: Path, vendor_dir: Path, target: str) -> Path:
+ artifact_subdir = artifacts_dir / target
+ archive_name = _archive_name_for_target(target)
+ archive_path = artifact_subdir / archive_name
+ if not archive_path.exists():
+ raise FileNotFoundError(f"Expected artifact not found: {archive_path}")
+
+ dest_dir = vendor_dir / target / "codex"
+ dest_dir.mkdir(parents=True, exist_ok=True)
+
+ binary_name = "codex.exe" if "windows" in target else "codex"
+ dest = dest_dir / binary_name
+ dest.unlink(missing_ok=True)
+ extract_archive(archive_path, "zst", None, dest)
+ if "windows" not in target:
+ dest.chmod(0o755)
+ return dest
+
+
+def _archive_name_for_target(target: str) -> str:
+ if "windows" in target:
+ return f"codex-{target}.exe.zst"
+ return f"codex-{target}.zst"
+
+
+def _fetch_single_rg(
+ vendor_dir: Path,
+ target: str,
+ platform_key: str,
+ platform_info: dict,
+ manifest_path: Path,
+) -> Path:
+ providers = platform_info.get("providers", [])
+ if not providers:
+ raise RuntimeError(f"No providers listed for platform '{platform_key}' in {manifest_path}.")
+
+ url = providers[0]["url"]
+ archive_format = platform_info.get("format", "zst")
+ archive_member = platform_info.get("path")
+
+ dest_dir = vendor_dir / target / "path"
+ dest_dir.mkdir(parents=True, exist_ok=True)
+
+ is_windows = platform_key.startswith("win")
+ binary_name = "rg.exe" if is_windows else "rg"
+ dest = dest_dir / binary_name
+
+ with tempfile.TemporaryDirectory() as tmp_dir_str:
+ tmp_dir = Path(tmp_dir_str)
+ archive_filename = os.path.basename(urlparse(url).path)
+ download_path = tmp_dir / archive_filename
+ _download_file(url, download_path)
+
+ dest.unlink(missing_ok=True)
+ extract_archive(download_path, archive_format, archive_member, dest)
+
+ if not is_windows:
+ dest.chmod(0o755)
+
+ return dest
+
+
+def _download_file(url: str, dest: Path) -> None:
+ dest.parent.mkdir(parents=True, exist_ok=True)
+ with urlopen(url) as response, open(dest, "wb") as out:
+ shutil.copyfileobj(response, out)
+
+
+def extract_archive(
+ archive_path: Path,
+ archive_format: str,
+ archive_member: str | None,
+ dest: Path,
+) -> None:
+ dest.parent.mkdir(parents=True, exist_ok=True)
+
+ if archive_format == "zst":
+ output_path = archive_path.parent / dest.name
+ subprocess.check_call(
+ ["zstd", "-f", "-d", str(archive_path), "-o", str(output_path)]
+ )
+ shutil.move(str(output_path), dest)
+ return
+
+ if archive_format == "tar.gz":
+ if not archive_member:
+ raise RuntimeError("Missing 'path' for tar.gz archive in DotSlash manifest.")
+ with tarfile.open(archive_path, "r:gz") as tar:
+ try:
+ member = tar.getmember(archive_member)
+ except KeyError as exc:
+ raise RuntimeError(
+ f"Entry '{archive_member}' not found in archive {archive_path}."
+ ) from exc
+ tar.extract(member, path=archive_path.parent, filter="data")
+ extracted = archive_path.parent / archive_member
+ shutil.move(str(extracted), dest)
+ return
+
+ if archive_format == "zip":
+ if not archive_member:
+ raise RuntimeError("Missing 'path' for zip archive in DotSlash manifest.")
+ with zipfile.ZipFile(archive_path) as archive:
+ try:
+ with archive.open(archive_member) as src, open(dest, "wb") as out:
+ shutil.copyfileobj(src, out)
+ except KeyError as exc:
+ raise RuntimeError(
+ f"Entry '{archive_member}' not found in archive {archive_path}."
+ ) from exc
+ return
+
+ raise RuntimeError(f"Unsupported archive format '{archive_format}'.")
+
+
+def _load_manifest(manifest_path: Path) -> dict:
+ cmd = ["dotslash", "--", "parse", str(manifest_path)]
+ stdout = subprocess.check_output(cmd, text=True)
+ try:
+ manifest = json.loads(stdout)
+ except json.JSONDecodeError as exc:
+ raise RuntimeError(f"Invalid DotSlash manifest output from {manifest_path}.") from exc
+
+ if not isinstance(manifest, dict):
+ raise RuntimeError(
+ f"Unexpected DotSlash manifest structure for {manifest_path}: {type(manifest)!r}"
+ )
+
+ return manifest
+
+
+if __name__ == "__main__":
+ import sys
+
+ sys.exit(main())
diff --git a/codex-cli/scripts/install_native_deps.sh b/codex-cli/scripts/install_native_deps.sh
deleted file mode 100755
index b9fda2543e0..00000000000
--- a/codex-cli/scripts/install_native_deps.sh
+++ /dev/null
@@ -1,94 +0,0 @@
-#!/usr/bin/env bash
-
-# Install native runtime dependencies for codex-cli.
-#
-# Usage
-# install_native_deps.sh [--workflow-url URL] [CODEX_CLI_ROOT]
-#
-# The optional RELEASE_ROOT is the path that contains package.json. Omitting
-# it installs the binaries into the repository's own bin/ folder to support
-# local development.
-
-set -euo pipefail
-
-# ------------------
-# Parse arguments
-# ------------------
-
-CODEX_CLI_ROOT=""
-
-# Until we start publishing stable GitHub releases, we have to grab the binaries
-# from the GitHub Action that created them. Update the URL below to point to the
-# appropriate workflow run:
-WORKFLOW_URL="https://github.com/openai/codex/actions/runs/17417194663" # rust-v0.28.0
-
-while [[ $# -gt 0 ]]; do
- case "$1" in
- --workflow-url)
- shift || { echo "--workflow-url requires an argument"; exit 1; }
- if [ -n "$1" ]; then
- WORKFLOW_URL="$1"
- fi
- ;;
- *)
- if [[ -z "$CODEX_CLI_ROOT" ]]; then
- CODEX_CLI_ROOT="$1"
- else
- echo "Unexpected argument: $1" >&2
- exit 1
- fi
- ;;
- esac
- shift
-done
-
-# ----------------------------------------------------------------------------
-# Determine where the binaries should be installed.
-# ----------------------------------------------------------------------------
-
-if [ -n "$CODEX_CLI_ROOT" ]; then
- # The caller supplied a release root directory.
- BIN_DIR="$CODEX_CLI_ROOT/bin"
-else
- # No argument; fall back to the repo’s own bin directory.
- # Resolve the path of this script, then walk up to the repo root.
- SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
- CODEX_CLI_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
- BIN_DIR="$CODEX_CLI_ROOT/bin"
-fi
-
-# Make sure the destination directory exists.
-mkdir -p "$BIN_DIR"
-
-# ----------------------------------------------------------------------------
-# Download and decompress the artifacts from the GitHub Actions workflow.
-# ----------------------------------------------------------------------------
-
-WORKFLOW_ID="${WORKFLOW_URL##*/}"
-
-ARTIFACTS_DIR="$(mktemp -d)"
-trap 'rm -rf "$ARTIFACTS_DIR"' EXIT
-
-# NB: The GitHub CLI `gh` must be installed and authenticated.
-gh run download --dir "$ARTIFACTS_DIR" --repo openai/codex "$WORKFLOW_ID"
-
-# x64 Linux
-zstd -d "$ARTIFACTS_DIR/x86_64-unknown-linux-musl/codex-x86_64-unknown-linux-musl.zst" \
- -o "$BIN_DIR/codex-x86_64-unknown-linux-musl"
-# ARM64 Linux
-zstd -d "$ARTIFACTS_DIR/aarch64-unknown-linux-musl/codex-aarch64-unknown-linux-musl.zst" \
- -o "$BIN_DIR/codex-aarch64-unknown-linux-musl"
-# x64 macOS
-zstd -d "$ARTIFACTS_DIR/x86_64-apple-darwin/codex-x86_64-apple-darwin.zst" \
- -o "$BIN_DIR/codex-x86_64-apple-darwin"
-# ARM64 macOS
-zstd -d "$ARTIFACTS_DIR/aarch64-apple-darwin/codex-aarch64-apple-darwin.zst" \
- -o "$BIN_DIR/codex-aarch64-apple-darwin"
-# x64 Windows
-zstd -d "$ARTIFACTS_DIR/x86_64-pc-windows-msvc/codex-x86_64-pc-windows-msvc.exe.zst" \
- -o "$BIN_DIR/codex-x86_64-pc-windows-msvc.exe"
-# ARM64 Windows
-zstd -d "$ARTIFACTS_DIR/aarch64-pc-windows-msvc/codex-aarch64-pc-windows-msvc.exe.zst" \
- -o "$BIN_DIR/codex-aarch64-pc-windows-msvc.exe"
-
-echo "Installed native dependencies into $BIN_DIR"
diff --git a/codex-cli/scripts/stage_release.sh b/codex-cli/scripts/stage_release.sh
deleted file mode 100755
index 96236fc53c0..00000000000
--- a/codex-cli/scripts/stage_release.sh
+++ /dev/null
@@ -1,120 +0,0 @@
-#!/usr/bin/env bash
-# -----------------------------------------------------------------------------
-# stage_release.sh
-# -----------------------------------------------------------------------------
-# Stages an npm release for @openai/codex.
-#
-# Usage:
-#
-# --tmp
: Use instead of a freshly created temp directory.
-# -h|--help : Print usage.
-#
-# -----------------------------------------------------------------------------
-
-set -euo pipefail
-
-# Helper - usage / flag parsing
-
-usage() {
- cat <&2
- usage 1
- ;;
- *)
- echo "Unexpected extra argument: $1" >&2
- usage 1
- ;;
- esac
- shift
-done
-
-# Fallback when the caller did not specify a directory.
-# If no directory was specified create a fresh temporary one.
-if [[ -z "$TMPDIR" ]]; then
- TMPDIR="$(mktemp -d)"
-fi
-
-# Ensure the directory exists, then resolve to an absolute path.
-mkdir -p "$TMPDIR"
-TMPDIR="$(cd "$TMPDIR" && pwd)"
-
-# Main build logic
-
-echo "Staging release in $TMPDIR"
-
-# The script lives in codex-cli/scripts/ - change into codex-cli root so that
-# relative paths keep working.
-SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
-CODEX_CLI_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
-
-pushd "$CODEX_CLI_ROOT" >/dev/null
-
-# 1. Build the JS artifacts ---------------------------------------------------
-
-# Paths inside the staged package
-mkdir -p "$TMPDIR/bin"
-
-cp -r bin/codex.js "$TMPDIR/bin/codex.js"
-cp ../README.md "$TMPDIR" || true # README is one level up - ignore if missing
-
-# Modify package.json - bump version and optionally add the native directory to
-# the files array so that the binaries are published to npm.
-
-jq --arg version "$VERSION" \
- '.version = $version' \
- package.json > "$TMPDIR/package.json"
-
-# 2. Native runtime deps (sandbox plus optional Rust binaries)
-
-./scripts/install_native_deps.sh --workflow-url "$WORKFLOW_URL" "$TMPDIR"
-
-popd >/dev/null
-
-echo "Staged version $VERSION for release in $TMPDIR"
-
-echo "Verify the CLI:"
-echo " node ${TMPDIR}/bin/codex.js --version"
-echo " node ${TMPDIR}/bin/codex.js --help"
-
-# Print final hint for convenience
-echo "Next: cd \"$TMPDIR\" && npm publish"
diff --git a/codex-cli/scripts/stage_rust_release.py b/codex-cli/scripts/stage_rust_release.py
deleted file mode 100755
index 9a554b77d01..00000000000
--- a/codex-cli/scripts/stage_rust_release.py
+++ /dev/null
@@ -1,70 +0,0 @@
-#!/usr/bin/env python3
-
-import json
-import subprocess
-import sys
-import argparse
-from pathlib import Path
-
-
-def main() -> int:
- parser = argparse.ArgumentParser(
- description="""Stage a release for the npm module.
-
-Run this after the GitHub Release has been created and use
-`--release-version` to specify the version to release.
-
-Optionally pass `--tmp` to control the temporary staging directory that will be
-forwarded to stage_release.sh.
-"""
- )
- parser.add_argument(
- "--release-version", required=True, help="Version to release, e.g., 0.3.0"
- )
- parser.add_argument(
- "--tmp",
- help="Optional path to stage the npm package; forwarded to stage_release.sh",
- )
- args = parser.parse_args()
- version = args.release_version
-
- gh_run = subprocess.run(
- [
- "gh",
- "run",
- "list",
- "--branch",
- f"rust-v{version}",
- "--json",
- "workflowName,url,headSha",
- "--jq",
- 'first(.[] | select(.workflowName == "rust-release"))',
- ],
- stdout=subprocess.PIPE,
- check=True,
- )
- gh_run.check_returncode()
- workflow = json.loads(gh_run.stdout)
- sha = workflow["headSha"]
-
- print(f"should `git checkout {sha}`")
-
- current_dir = Path(__file__).parent.resolve()
- cmd = [
- str(current_dir / "stage_release.sh"),
- "--version",
- version,
- "--workflow-url",
- workflow["url"],
- ]
- if args.tmp:
- cmd.extend(["--tmp", args.tmp])
-
- stage_release = subprocess.run(cmd)
- stage_release.check_returncode()
-
- return 0
-
-
-if __name__ == "__main__":
- sys.exit(main())
diff --git a/codex-rs/Cargo.lock b/codex-rs/Cargo.lock
index 0efe2c3b080..db1300dca6f 100644
--- a/codex-rs/Cargo.lock
+++ b/codex-rs/Cargo.lock
@@ -56,7 +56,7 @@ checksum = "8fac2ce611db8b8cee9b2aa886ca03c924e9da5e5295d0dbd0526e5d0b0710f7"
dependencies = [
"allocative_derive",
"bumpalo",
- "ctor",
+ "ctor 0.1.26",
"hashbrown 0.14.5",
"num-bigint",
]
@@ -78,12 +78,6 @@ version = "0.2.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
-[[package]]
-name = "android-tzdata"
-version = "0.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0"
-
[[package]]
name = "android_system_properties"
version = "0.1.5"
@@ -316,6 +310,17 @@ dependencies = [
"syn 2.0.104",
]
+[[package]]
+name = "async-trait"
+version = "0.1.89"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.104",
+]
+
[[package]]
name = "atomic-waker"
version = "1.1.2"
@@ -483,19 +488,24 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e"
+[[package]]
+name = "cfg_aliases"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
+
[[package]]
name = "chrono"
-version = "0.4.41"
+version = "0.4.42"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c469d952047f47f91b68d1cba3f10d63c11d73e4636f24f08daf0278abf01c4d"
+checksum = "145052bdd345b87320e369255277e3fb5152762ad123a901ef5c262dd38fe8d2"
dependencies = [
- "android-tzdata",
"iana-time-zone",
"js-sys",
"num-traits",
"serde",
"wasm-bindgen",
- "windows-link",
+ "windows-link 0.2.0",
]
[[package]]
@@ -571,7 +581,7 @@ checksum = "e9b18233253483ce2f65329a24072ec414db782531bdbb7d0bbc4bd2ce6b7e21"
[[package]]
name = "codex-ansi-escape"
-version = "0.0.0"
+version = "0.42.0"
dependencies = [
"ansi-to-tui",
"ratatui",
@@ -580,11 +590,10 @@ dependencies = [
[[package]]
name = "codex-apply-patch"
-version = "0.0.0"
+version = "0.42.0"
dependencies = [
"anyhow",
"assert_cmd",
- "once_cell",
"pretty_assertions",
"similar",
"tempfile",
@@ -595,7 +604,7 @@ dependencies = [
[[package]]
name = "codex-arg0"
-version = "0.0.0"
+version = "0.42.0"
dependencies = [
"anyhow",
"codex-apply-patch",
@@ -608,7 +617,7 @@ dependencies = [
[[package]]
name = "codex-chatgpt"
-version = "0.0.0"
+version = "0.42.0"
dependencies = [
"anyhow",
"clap",
@@ -622,7 +631,7 @@ dependencies = [
[[package]]
name = "codex-cli"
-version = "0.0.0"
+version = "0.42.0"
dependencies = [
"anyhow",
"assert_cmd",
@@ -637,10 +646,15 @@ dependencies = [
"codex-mcp-server",
"codex-protocol",
"codex-protocol-ts",
+ "codex-responses-api-proxy",
"codex-tui",
+ "ctor 0.5.0",
+ "libc",
+ "owo-colors",
"predicates",
"pretty_assertions",
"serde_json",
+ "supports-color",
"tempfile",
"tokio",
"tracing",
@@ -649,7 +663,7 @@ dependencies = [
[[package]]
name = "codex-common"
-version = "0.0.0"
+version = "0.42.0"
dependencies = [
"clap",
"codex-core",
@@ -660,12 +674,13 @@ dependencies = [
[[package]]
name = "codex-core"
-version = "0.0.0"
+version = "0.42.0"
dependencies = [
"anyhow",
"askama",
"assert_cmd",
"async-channel",
+ "async-trait",
"base64",
"bytes",
"chrono",
@@ -673,12 +688,15 @@ dependencies = [
"codex-file-search",
"codex-mcp-client",
"codex-protocol",
+ "codex-rmcp-client",
"codex-slash-commands",
"core_test_support",
- "dirs 6.0.0",
+ "dirs",
"env-flags",
+ "escargot",
"eventsource-stream",
"futures",
+ "indexmap 2.10.0",
"landlock",
"libc",
"maplit",
@@ -688,7 +706,7 @@ dependencies = [
"portable-pty",
"predicates",
"pretty_assertions",
- "rand 0.9.2",
+ "rand",
"regex-lite",
"reqwest",
"seccompiler",
@@ -718,7 +736,7 @@ dependencies = [
[[package]]
name = "codex-exec"
-version = "0.0.0"
+version = "0.42.0"
dependencies = [
"anyhow",
"assert_cmd",
@@ -733,12 +751,15 @@ dependencies = [
"libc",
"owo-colors",
"predicates",
+ "pretty_assertions",
+ "serde",
"serde_json",
"shlex",
"tempfile",
"tokio",
"tracing",
"tracing-subscriber",
+ "ts-rs",
"uuid",
"walkdir",
"wiremock",
@@ -746,7 +767,7 @@ dependencies = [
[[package]]
name = "codex-execpolicy"
-version = "0.0.0"
+version = "0.42.0"
dependencies = [
"allocative",
"anyhow",
@@ -766,7 +787,7 @@ dependencies = [
[[package]]
name = "codex-file-search"
-version = "0.0.0"
+version = "0.42.0"
dependencies = [
"anyhow",
"clap",
@@ -777,9 +798,19 @@ dependencies = [
"tokio",
]
+[[package]]
+name = "codex-git-tooling"
+version = "0.42.0"
+dependencies = [
+ "pretty_assertions",
+ "tempfile",
+ "thiserror 2.0.16",
+ "walkdir",
+]
+
[[package]]
name = "codex-linux-sandbox"
-version = "0.0.0"
+version = "0.42.0"
dependencies = [
"clap",
"codex-core",
@@ -792,13 +823,15 @@ dependencies = [
[[package]]
name = "codex-login"
-version = "0.0.0"
+version = "0.42.0"
dependencies = [
+ "anyhow",
"base64",
"chrono",
"codex-core",
"codex-protocol",
- "rand 0.8.5",
+ "core_test_support",
+ "rand",
"reqwest",
"serde",
"serde_json",
@@ -813,7 +846,7 @@ dependencies = [
[[package]]
name = "codex-mcp-client"
-version = "0.0.0"
+version = "0.42.0"
dependencies = [
"anyhow",
"mcp-types",
@@ -826,7 +859,7 @@ dependencies = [
[[package]]
name = "codex-mcp-server"
-version = "0.0.0"
+version = "0.42.0"
dependencies = [
"anyhow",
"assert_cmd",
@@ -836,6 +869,7 @@ dependencies = [
"codex-core",
"codex-login",
"codex-protocol",
+ "core_test_support",
"mcp-types",
"mcp_test_support",
"os_info",
@@ -855,7 +889,7 @@ dependencies = [
[[package]]
name = "codex-ollama"
-version = "0.0.0"
+version = "0.42.0"
dependencies = [
"async-stream",
"bytes",
@@ -870,8 +904,9 @@ dependencies = [
[[package]]
name = "codex-protocol"
-version = "0.0.0"
+version = "0.42.0"
dependencies = [
+ "anyhow",
"base64",
"icu_decimal",
"icu_locale_core",
@@ -892,7 +927,7 @@ dependencies = [
[[package]]
name = "codex-protocol-ts"
-version = "0.0.0"
+version = "0.42.0"
dependencies = [
"anyhow",
"clap",
@@ -901,24 +936,54 @@ dependencies = [
"ts-rs",
]
+[[package]]
+name = "codex-responses-api-proxy"
+version = "0.42.0"
+dependencies = [
+ "anyhow",
+ "clap",
+ "codex-arg0",
+ "libc",
+ "reqwest",
+ "serde",
+ "serde_json",
+ "tiny_http",
+ "tokio",
+ "zeroize",
+]
+
+[[package]]
+name = "codex-rmcp-client"
+version = "0.42.0"
+dependencies = [
+ "anyhow",
+ "mcp-types",
+ "pretty_assertions",
+ "rmcp",
+ "serde",
+ "serde_json",
+ "tokio",
+ "tracing",
+]
+
[[package]]
name = "codex-slash-commands"
version = "0.0.0"
dependencies = [
- "dirs 5.0.1",
+ "dirs",
"pretty_assertions",
"serde",
"serde_yaml",
"shlex",
"tempfile",
"textwrap 0.16.2",
- "thiserror 1.0.69",
+ "thiserror 2.0.16",
"tokio",
]
[[package]]
name = "codex-tui"
-version = "0.0.0"
+version = "0.42.0"
dependencies = [
"anyhow",
"arboard",
@@ -931,6 +996,7 @@ dependencies = [
"codex-common",
"codex-core",
"codex-file-search",
+ "codex-git-tooling",
"codex-login",
"codex-ollama",
"codex-protocol",
@@ -938,19 +1004,18 @@ dependencies = [
"color-eyre",
"crossterm",
"diffy",
- "dirs 6.0.0",
+ "dirs",
"image",
"insta",
"itertools 0.14.0",
"lazy_static",
"libc",
"mcp-types",
- "once_cell",
"path-clean",
"pathdiff",
"pretty_assertions",
"pulldown-cmark",
- "rand 0.9.2",
+ "rand",
"ratatui",
"regex-lite",
"serde",
@@ -967,11 +1032,21 @@ dependencies = [
"tracing-appender",
"tracing-subscriber",
"unicode-segmentation",
- "unicode-width 0.1.14",
+ "unicode-width 0.2.1",
"url",
"vt100",
]
+[[package]]
+name = "codex-utils-readiness"
+version = "0.42.0"
+dependencies = [
+ "async-trait",
+ "thiserror 2.0.16",
+ "time",
+ "tokio",
+]
+
[[package]]
name = "color-eyre"
version = "0.6.5"
@@ -1087,12 +1162,15 @@ checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b"
[[package]]
name = "core_test_support"
-version = "0.0.0"
+version = "0.42.0"
dependencies = [
+ "anyhow",
+ "assert_cmd",
"codex-core",
"serde_json",
"tempfile",
"tokio",
+ "wiremock",
]
[[package]]
@@ -1199,14 +1277,40 @@ dependencies = [
"syn 1.0.109",
]
+[[package]]
+name = "ctor"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "67773048316103656a637612c4a62477603b777d91d9c62ff2290f9cde178fdb"
+dependencies = [
+ "ctor-proc-macro",
+ "dtor",
+]
+
+[[package]]
+name = "ctor-proc-macro"
+version = "0.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e2931af7e13dc045d8e9d26afccc6fa115d64e115c9c84b1166288b46f6782c2"
+
[[package]]
name = "darling"
version = "0.20.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee"
dependencies = [
- "darling_core",
- "darling_macro",
+ "darling_core 0.20.11",
+ "darling_macro 0.20.11",
+]
+
+[[package]]
+name = "darling"
+version = "0.21.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9cdf337090841a411e2a7f3deb9187445851f91b309c0c0a29e05f74a00a48c0"
+dependencies = [
+ "darling_core 0.21.3",
+ "darling_macro 0.21.3",
]
[[package]]
@@ -1223,13 +1327,38 @@ dependencies = [
"syn 2.0.104",
]
+[[package]]
+name = "darling_core"
+version = "0.21.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1247195ecd7e3c85f83c8d2a366e4210d588e802133e1e355180a9870b517ea4"
+dependencies = [
+ "fnv",
+ "ident_case",
+ "proc-macro2",
+ "quote",
+ "strsim 0.11.1",
+ "syn 2.0.104",
+]
+
[[package]]
name = "darling_macro"
version = "0.20.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead"
dependencies = [
- "darling_core",
+ "darling_core 0.20.11",
+ "quote",
+ "syn 2.0.104",
+]
+
+[[package]]
+name = "darling_macro"
+version = "0.21.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81"
+dependencies = [
+ "darling_core 0.21.3",
"quote",
"syn 2.0.104",
]
@@ -1265,12 +1394,12 @@ dependencies = [
[[package]]
name = "deranged"
-version = "0.4.0"
+version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9c9e6a11ca8224451684bc0d7d5a7adbf8f2fd6887261a1cfc3c0432f9d4068e"
+checksum = "a41953f86f8a05768a6cda24def994fd2f424b04ec5c719cf89989779f199071"
dependencies = [
"powerfmt",
- "serde",
+ "serde_core",
]
[[package]]
@@ -1358,22 +1487,13 @@ dependencies = [
"crypto-common",
]
-[[package]]
-name = "dirs"
-version = "5.0.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225"
-dependencies = [
- "dirs-sys 0.4.1",
-]
-
[[package]]
name = "dirs"
version = "6.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3e8aa94d75141228480295a7d0e7feb620b1a5ad9f12bc40be62411e38cce4e"
dependencies = [
- "dirs-sys 0.5.0",
+ "dirs-sys",
]
[[package]]
@@ -1386,18 +1506,6 @@ dependencies = [
"dirs-sys-next",
]
-[[package]]
-name = "dirs-sys"
-version = "0.4.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c"
-dependencies = [
- "libc",
- "option-ext",
- "redox_users 0.4.6",
- "windows-sys 0.48.0",
-]
-
[[package]]
name = "dirs-sys"
version = "0.5.0"
@@ -1470,6 +1578,21 @@ version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75b325c5dbd37f80359721ad39aca5a29fb04c89279657cffdda8736d0c0b9d2"
+[[package]]
+name = "dtor"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e58a0764cddb55ab28955347b45be00ade43d4d6f3ba4bf3dc354e4ec9432934"
+dependencies = [
+ "dtor-proc-macro",
+]
+
+[[package]]
+name = "dtor-proc-macro"
+version = "0.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f678cf4a922c215c63e0de95eb1ff08a958a81d47e485cf9da1e27bf6305cfa5"
+
[[package]]
name = "dupe"
version = "0.9.1"
@@ -1612,6 +1735,17 @@ version = "3.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dea2df4cf52843e0452895c455a1a2cfbb842a1e7329671acf418fdc53ed4c59"
+[[package]]
+name = "escargot"
+version = "0.5.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "11c3aea32bc97b500c9ca6a72b768a26e558264303d101d3409cf6d57a9ed0cf"
+dependencies = [
+ "log",
+ "serde",
+ "serde_json",
+]
+
[[package]]
name = "event-listener"
version = "5.4.0"
@@ -1917,8 +2051,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592"
dependencies = [
"cfg-if",
+ "js-sys",
"libc",
"wasi 0.11.1+wasi-snapshot-preview1",
+ "wasm-bindgen",
]
[[package]]
@@ -1928,9 +2064,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4"
dependencies = [
"cfg-if",
+ "js-sys",
"libc",
"r-efi",
"wasi 0.14.2+wasi-0.2.4",
+ "wasm-bindgen",
]
[[package]]
@@ -2127,6 +2265,7 @@ dependencies = [
"tokio",
"tokio-rustls",
"tower-service",
+ "webpki-roots",
]
[[package]]
@@ -2436,7 +2575,7 @@ version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "435d80800b936787d62688c927b6490e887c7ef5ff9ce922c6c6050fca75eb9a"
dependencies = [
- "darling",
+ "darling 0.20.11",
"indoc",
"proc-macro2",
"quote",
@@ -2691,9 +2830,9 @@ dependencies = [
[[package]]
name = "log"
-version = "0.4.27"
+version = "0.4.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
+checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432"
[[package]]
name = "logos"
@@ -2727,6 +2866,12 @@ dependencies = [
"hashbrown 0.15.4",
]
+[[package]]
+name = "lru-slab"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154"
+
[[package]]
name = "lsp-types"
version = "0.94.1"
@@ -2757,7 +2902,7 @@ dependencies = [
[[package]]
name = "mcp-types"
-version = "0.0.0"
+version = "0.42.0"
dependencies = [
"serde",
"serde_json",
@@ -2766,7 +2911,7 @@ dependencies = [
[[package]]
name = "mcp_test_support"
-version = "0.0.0"
+version = "0.42.0"
dependencies = [
"anyhow",
"assert_cmd",
@@ -2906,7 +3051,19 @@ checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4"
dependencies = [
"bitflags 2.9.1",
"cfg-if",
- "cfg_aliases",
+ "cfg_aliases 0.1.1",
+ "libc",
+]
+
+[[package]]
+name = "nix"
+version = "0.30.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "74523f3a35e05aba87a1d978330aef40f67b0304ac79c1c00b294c9830543db6"
+dependencies = [
+ "bitflags 2.9.1",
+ "cfg-if",
+ "cfg_aliases 0.2.1",
"libc",
]
@@ -3332,7 +3489,7 @@ dependencies = [
"lazy_static",
"libc",
"log",
- "nix",
+ "nix 0.28.0",
"serial2",
"shared_library",
"shell-words",
@@ -3420,6 +3577,20 @@ dependencies = [
"unicode-ident",
]
+[[package]]
+name = "process-wrap"
+version = "8.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a3ef4f2f0422f23a82ec9f628ea2acd12871c81a9362b02c43c1aa86acfc3ba1"
+dependencies = [
+ "futures",
+ "indexmap 2.10.0",
+ "nix 0.30.1",
+ "tokio",
+ "tracing",
+ "windows",
+]
+
[[package]]
name = "pulldown-cmark"
version = "0.10.3"
@@ -3463,6 +3634,61 @@ dependencies = [
"memchr",
]
+[[package]]
+name = "quinn"
+version = "0.11.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b9e20a958963c291dc322d98411f541009df2ced7b5a4f2bd52337638cfccf20"
+dependencies = [
+ "bytes",
+ "cfg_aliases 0.2.1",
+ "pin-project-lite",
+ "quinn-proto",
+ "quinn-udp",
+ "rustc-hash",
+ "rustls",
+ "socket2",
+ "thiserror 2.0.16",
+ "tokio",
+ "tracing",
+ "web-time",
+]
+
+[[package]]
+name = "quinn-proto"
+version = "0.11.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f1906b49b0c3bc04b5fe5d86a77925ae6524a19b816ae38ce1e426255f1d8a31"
+dependencies = [
+ "bytes",
+ "getrandom 0.3.3",
+ "lru-slab",
+ "rand",
+ "ring",
+ "rustc-hash",
+ "rustls",
+ "rustls-pki-types",
+ "slab",
+ "thiserror 2.0.16",
+ "tinyvec",
+ "tracing",
+ "web-time",
+]
+
+[[package]]
+name = "quinn-udp"
+version = "0.5.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "addec6a0dcad8a8d96a771f815f0eaf55f9d1805756410b39f5fa81332574cbd"
+dependencies = [
+ "cfg_aliases 0.2.1",
+ "libc",
+ "once_cell",
+ "socket2",
+ "tracing",
+ "windows-sys 0.60.2",
+]
+
[[package]]
name = "quote"
version = "1.0.40"
@@ -3488,35 +3714,14 @@ dependencies = [
"nibble_vec",
]
-[[package]]
-name = "rand"
-version = "0.8.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
-dependencies = [
- "libc",
- "rand_chacha 0.3.1",
- "rand_core 0.6.4",
-]
-
[[package]]
name = "rand"
version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1"
dependencies = [
- "rand_chacha 0.9.0",
- "rand_core 0.9.3",
-]
-
-[[package]]
-name = "rand_chacha"
-version = "0.3.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
-dependencies = [
- "ppv-lite86",
- "rand_core 0.6.4",
+ "rand_chacha",
+ "rand_core",
]
[[package]]
@@ -3526,16 +3731,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb"
dependencies = [
"ppv-lite86",
- "rand_core 0.9.3",
-]
-
-[[package]]
-name = "rand_core"
-version = "0.6.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
-dependencies = [
- "getrandom 0.2.16",
+ "rand_core",
]
[[package]]
@@ -3685,6 +3881,8 @@ dependencies = [
"native-tls",
"percent-encoding",
"pin-project-lite",
+ "quinn",
+ "rustls",
"rustls-pki-types",
"serde",
"serde_json",
@@ -3692,6 +3890,7 @@ dependencies = [
"sync_wrapper",
"tokio",
"tokio-native-tls",
+ "tokio-rustls",
"tokio-util",
"tower",
"tower-http",
@@ -3701,6 +3900,7 @@ dependencies = [
"wasm-bindgen-futures",
"wasm-streams",
"web-sys",
+ "webpki-roots",
]
[[package]]
@@ -3717,12 +3917,54 @@ dependencies = [
"windows-sys 0.52.0",
]
+[[package]]
+name = "rmcp"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "534fd1cd0601e798ac30545ff2b7f4a62c6f14edd4aaed1cc5eb1e85f69f09af"
+dependencies = [
+ "base64",
+ "chrono",
+ "futures",
+ "paste",
+ "pin-project-lite",
+ "process-wrap",
+ "rmcp-macros",
+ "schemars 1.0.4",
+ "serde",
+ "serde_json",
+ "thiserror 2.0.16",
+ "tokio",
+ "tokio-stream",
+ "tokio-util",
+ "tracing",
+]
+
+[[package]]
+name = "rmcp-macros"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9ba777eb0e5f53a757e36f0e287441da0ab766564ba7201600eeb92a4753022e"
+dependencies = [
+ "darling 0.21.3",
+ "proc-macro2",
+ "quote",
+ "serde_json",
+ "syn 2.0.104",
+]
+
[[package]]
name = "rustc-demangle"
version = "0.1.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "989e6739f80c4ad5b13e0fd7fe89531180375b18520cc8c82080e4dc4035b84f"
+[[package]]
+name = "rustc-hash"
+version = "2.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d"
+
[[package]]
name = "rustix"
version = "0.38.44"
@@ -3756,6 +3998,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2491382039b29b9b11ff08b76ff6c97cf287671dbb74f0be44bda389fffe9bd1"
dependencies = [
"once_cell",
+ "ring",
"rustls-pki-types",
"rustls-webpki",
"subtle",
@@ -3768,6 +4011,7 @@ version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "229a4a4c221013e7e1f1a043678c5cc39fe5171437c88fb47151a21e6f5b5c79"
dependencies = [
+ "web-time",
"zeroize",
]
@@ -3802,7 +4046,7 @@ dependencies = [
"libc",
"log",
"memchr",
- "nix",
+ "nix 0.28.0",
"radix_trie",
"unicode-segmentation",
"unicode-width 0.1.14",
@@ -3883,7 +4127,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fbf2ae1b8bc8e02df939598064d22402220cd5bbcca1c76f7d6a310974d5615"
dependencies = [
"dyn-clone",
- "schemars_derive",
+ "schemars_derive 0.8.22",
"serde",
"serde_json",
]
@@ -3906,8 +4150,10 @@ version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "82d20c4491bc164fa2f6c5d44565947a52ad80b9505d8e36f8d54c27c739fcd0"
dependencies = [
+ "chrono",
"dyn-clone",
"ref-cast",
+ "schemars_derive 1.0.4",
"serde",
"serde_json",
]
@@ -3924,6 +4170,18 @@ dependencies = [
"syn 2.0.104",
]
+[[package]]
+name = "schemars_derive"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "33d020396d1d138dc19f1165df7545479dcd58d93810dc5d646a16e55abefa80"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "serde_derive_internals",
+ "syn 2.0.104",
+]
+
[[package]]
name = "scopeguard"
version = "1.2.0"
@@ -3964,9 +4222,9 @@ dependencies = [
[[package]]
name = "serde"
-version = "1.0.224"
+version = "1.0.226"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6aaeb1e94f53b16384af593c71e20b095e958dab1d26939c1b70645c5cfbcc0b"
+checksum = "0dca6411025b24b60bfa7ec1fe1f8e710ac09782dca409ee8237ba74b51295fd"
dependencies = [
"serde_core",
"serde_derive",
@@ -3974,18 +4232,18 @@ dependencies = [
[[package]]
name = "serde_core"
-version = "1.0.224"
+version = "1.0.226"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "32f39390fa6346e24defbcdd3d9544ba8a19985d0af74df8501fbfe9a64341ab"
+checksum = "ba2ba63999edb9dac981fb34b3e5c0d111a69b0924e253ed29d83f7c99e966a4"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
-version = "1.0.224"
+version = "1.0.226"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "87ff78ab5e8561c9a675bfc1785cb07ae721f0ee53329a595cefd8c04c2ac4e0"
+checksum = "8db53ae22f34573731bafa1db20f04027b2d25e02d8205921b569171699cdb33"
dependencies = [
"proc-macro2",
"quote",
@@ -4075,7 +4333,7 @@ version = "3.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "de90945e6565ce0d9a25098082ed4ee4002e047cb59892c318d66821e14bb30f"
dependencies = [
- "darling",
+ "darling 0.20.11",
"proc-macro2",
"quote",
"syn 2.0.104",
@@ -4501,15 +4759,15 @@ dependencies = [
[[package]]
name = "tempfile"
-version = "3.20.0"
+version = "3.23.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1"
+checksum = "2d31c77bdf42a745371d260a26ca7163f1e0924b64afa0b688e61b5a9fa02f16"
dependencies = [
"fastrand",
"getrandom 0.3.3",
"once_cell",
"rustix 1.0.8",
- "windows-sys 0.59.0",
+ "windows-sys 0.60.2",
]
[[package]]
@@ -4633,9 +4891,9 @@ dependencies = [
[[package]]
name = "time"
-version = "0.3.41"
+version = "0.3.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8a7619e19bc266e0f9c5e6686659d394bc57973859340060a69221e57dbc0c40"
+checksum = "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d"
dependencies = [
"deranged",
"itoa",
@@ -4650,15 +4908,15 @@ dependencies = [
[[package]]
name = "time-core"
-version = "0.1.4"
+version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c9e9a38711f559d9e3ce1cdb06dd7c5b8ea546bc90052da6d06bb76da74bb07c"
+checksum = "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b"
[[package]]
name = "time-macros"
-version = "0.2.22"
+version = "0.2.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3526739392ec93fd8b359c8e98514cb3e8e021beb4e5f597b00a0221f8ed8a49"
+checksum = "30cfb0125f12d9c277f35663a0a33f8c30190f4e4574868a330595412d34ebf3"
dependencies = [
"num-conv",
"time-core",
@@ -4695,6 +4953,21 @@ dependencies = [
"zerovec",
]
+[[package]]
+name = "tinyvec"
+version = "1.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa"
+dependencies = [
+ "tinyvec_macros",
+]
+
+[[package]]
+name = "tinyvec_macros"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
+
[[package]]
name = "tokio"
version = "1.47.1"
@@ -5313,6 +5586,16 @@ dependencies = [
"wasm-bindgen",
]
+[[package]]
+name = "web-time"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb"
+dependencies = [
+ "js-sys",
+ "wasm-bindgen",
+]
+
[[package]]
name = "webbrowser"
version = "1.0.5"
@@ -5329,6 +5612,15 @@ dependencies = [
"web-sys",
]
+[[package]]
+name = "webpki-roots"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7e8983c3ab33d6fb807cfcdad2491c4ea8cbc8ed839181c7dfd9c67c83e261b2"
+dependencies = [
+ "rustls-pki-types",
+]
+
[[package]]
name = "weezl"
version = "0.1.10"
@@ -5384,6 +5676,28 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
+[[package]]
+name = "windows"
+version = "0.61.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9babd3a767a4c1aef6900409f85f5d53ce2544ccdfaa86dad48c91782c6d6893"
+dependencies = [
+ "windows-collections",
+ "windows-core",
+ "windows-future",
+ "windows-link 0.1.3",
+ "windows-numerics",
+]
+
+[[package]]
+name = "windows-collections"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3beeceb5e5cfd9eb1d76b381630e82c4241ccd0d27f1a39ed41b2760b255c5e8"
+dependencies = [
+ "windows-core",
+]
+
[[package]]
name = "windows-core"
version = "0.61.2"
@@ -5392,11 +5706,22 @@ checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3"
dependencies = [
"windows-implement",
"windows-interface",
- "windows-link",
+ "windows-link 0.1.3",
"windows-result",
"windows-strings",
]
+[[package]]
+name = "windows-future"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fc6a41e98427b19fe4b73c550f060b59fa592d7d686537eebf9385621bfbad8e"
+dependencies = [
+ "windows-core",
+ "windows-link 0.1.3",
+ "windows-threading",
+]
+
[[package]]
name = "windows-implement"
version = "0.60.0"
@@ -5425,13 +5750,29 @@ version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a"
+[[package]]
+name = "windows-link"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "45e46c0661abb7180e7b9c281db115305d49ca1709ab8242adf09666d2173c65"
+
+[[package]]
+name = "windows-numerics"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9150af68066c4c5c07ddc0ce30421554771e528bde427614c61038bc2c92c2b1"
+dependencies = [
+ "windows-core",
+ "windows-link 0.1.3",
+]
+
[[package]]
name = "windows-registry"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b8a9ed28765efc97bbc954883f4e6796c33a06546ebafacbabee9696967499e"
dependencies = [
- "windows-link",
+ "windows-link 0.1.3",
"windows-result",
"windows-strings",
]
@@ -5442,7 +5783,7 @@ version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6"
dependencies = [
- "windows-link",
+ "windows-link 0.1.3",
]
[[package]]
@@ -5451,7 +5792,7 @@ version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57"
dependencies = [
- "windows-link",
+ "windows-link 0.1.3",
]
[[package]]
@@ -5463,15 +5804,6 @@ dependencies = [
"windows-targets 0.42.2",
]
-[[package]]
-name = "windows-sys"
-version = "0.48.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
-dependencies = [
- "windows-targets 0.48.5",
-]
-
[[package]]
name = "windows-sys"
version = "0.52.0"
@@ -5561,6 +5893,15 @@ dependencies = [
"windows_x86_64_msvc 0.53.0",
]
+[[package]]
+name = "windows-threading"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b66463ad2e0ea3bbf808b7f1d371311c80e115c0b71d60efc142cafbcfb057a6"
+dependencies = [
+ "windows-link 0.1.3",
+]
+
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.42.2"
diff --git a/codex-rs/Cargo.toml b/codex-rs/Cargo.toml
index 9cdcdc8babf..60b78143a1c 100644
--- a/codex-rs/Cargo.toml
+++ b/codex-rs/Cargo.toml
@@ -9,6 +9,7 @@ members = [
"exec",
"execpolicy",
"file-search",
+ "git-tooling",
"linux-sandbox",
"login",
"mcp-client",
@@ -17,28 +18,193 @@ members = [
"ollama",
"protocol",
"protocol-ts",
+ "rmcp-client",
+ "responses-api-proxy",
"slash-commands",
"tui",
+ "utils/readiness",
]
resolver = "2"
[workspace.package]
-version = "0.0.0"
+version = "0.42.0"
# Track the edition for all workspace crates in one place. Individual
# crates can still override this value, but keeping it here means new
# crates created with `cargo new -w ...` automatically inherit the 2024
# edition.
edition = "2024"
+[workspace.dependencies]
+# Internal
+codex-ansi-escape = { path = "ansi-escape" }
+codex-apply-patch = { path = "apply-patch" }
+codex-arg0 = { path = "arg0" }
+codex-chatgpt = { path = "chatgpt" }
+codex-common = { path = "common" }
+codex-core = { path = "core" }
+codex-exec = { path = "exec" }
+codex-file-search = { path = "file-search" }
+codex-git-tooling = { path = "git-tooling" }
+codex-linux-sandbox = { path = "linux-sandbox" }
+codex-login = { path = "login" }
+codex-mcp-client = { path = "mcp-client" }
+codex-mcp-server = { path = "mcp-server" }
+codex-ollama = { path = "ollama" }
+codex-protocol = { path = "protocol" }
+codex-rmcp-client = { path = "rmcp-client" }
+codex-protocol-ts = { path = "protocol-ts" }
+codex-responses-api-proxy = { path = "responses-api-proxy" }
+codex-slash-commands = { path = "slash-commands" }
+codex-tui = { path = "tui" }
+codex-utils-readiness = { path = "utils/readiness" }
+core_test_support = { path = "core/tests/common" }
+mcp-types = { path = "mcp-types" }
+mcp_test_support = { path = "mcp-server/tests/common" }
+
+# External
+allocative = "0.3.3"
+ansi-to-tui = "7.0.0"
+anyhow = "1"
+arboard = "3"
+askama = "0.12"
+assert_cmd = "2"
+async-channel = "2.3.1"
+async-stream = "0.3.6"
+async-trait = "0.1.89"
+base64 = "0.22.1"
+bytes = "1.10.1"
+chrono = "0.4.42"
+clap = "4"
+clap_complete = "4"
+color-eyre = "0.6.3"
+crossterm = "0.28.1"
+ctor = "0.5.0"
+derive_more = "2"
+diffy = "0.4.2"
+dirs = "6"
+dotenvy = "0.15.7"
+env-flags = "0.1.1"
+env_logger = "0.11.5"
+eventsource-stream = "0.2.3"
+escargot = "0.5"
+futures = "0.3"
+icu_decimal = "2.0.0"
+icu_locale_core = "2.0.0"
+ignore = "0.4.23"
+image = { version = "^0.25.8", default-features = false }
+indexmap = "2.6.0"
+insta = "1.43.2"
+itertools = "0.14.0"
+landlock = "0.4.1"
+lazy_static = "1"
+libc = "0.2.175"
+log = "0.4"
+maplit = "1.0.2"
+mime_guess = "2.0.5"
+multimap = "0.10.0"
+nucleo-matcher = "0.3.1"
+openssl-sys = "*"
+os_info = "3.12.0"
+owo-colors = "4.2.0"
+path-absolutize = "3.1.1"
+path-clean = "1.0.1"
+pathdiff = "0.2"
+portable-pty = "0.9.0"
+predicates = "3"
+pretty_assertions = "1.4.1"
+pulldown-cmark = "0.10"
+rand = "0.9"
+ratatui = "0.29.0"
+regex-lite = "0.1.7"
+reqwest = "0.12"
+schemars = "0.8.22"
+seccompiler = "0.5.0"
+serde = "1"
+serde_json = "1"
+serde_yaml = "0.9"
+serde_with = "3.14"
+sha1 = "0.10.6"
+sha2 = "0.10"
+shlex = "1.3.0"
+similar = "2.7.0"
+starlark = "0.13.0"
+strum = "0.27.2"
+strum_macros = "0.27.2"
+supports-color = "3.0.2"
+sys-locale = "0.3.2"
+tempfile = "3.23.0"
+textwrap = "0.16.2"
+thiserror = "2.0.16"
+time = "0.3"
+tiny_http = "0.12"
+tokio = "1"
+tokio-stream = "0.1.17"
+tokio-test = "0.4"
+tokio-util = "0.7.16"
+toml = "0.9.5"
+toml_edit = "0.23.4"
+tracing = "0.1.41"
+tracing-appender = "0.2.3"
+tracing-subscriber = "0.3.20"
+tree-sitter = "0.25.9"
+tree-sitter-bash = "0.25.0"
+ts-rs = "11"
+unicode-segmentation = "1.12.0"
+unicode-width = "0.2"
+url = "2"
+urlencoding = "2.1"
+uuid = "1"
+vt100 = "0.16.2"
+walkdir = "2.5.0"
+webbrowser = "1.0"
+which = "6"
+wildmatch = "2.5.0"
+wiremock = "0.6"
+zeroize = "1.8.1"
+
[workspace.lints]
rust = {}
[workspace.lints.clippy]
expect_used = "deny"
+identity_op = "deny"
+manual_clamp = "deny"
+manual_filter = "deny"
+manual_find = "deny"
+manual_flatten = "deny"
+manual_map = "deny"
+manual_memcpy = "deny"
+manual_non_exhaustive = "deny"
+manual_ok_or = "deny"
+manual_range_contains = "deny"
+manual_retain = "deny"
+manual_strip = "deny"
+manual_try_fold = "deny"
+manual_unwrap_or = "deny"
+needless_borrow = "deny"
+needless_borrowed_reference = "deny"
+needless_collect = "deny"
+needless_late_init = "deny"
+needless_option_as_deref = "deny"
+needless_question_mark = "deny"
+needless_update = "deny"
redundant_clone = "deny"
+redundant_closure = "deny"
+redundant_closure_for_method_calls = "deny"
+redundant_static_lifetimes = "deny"
+trivially_copy_pass_by_ref = "deny"
uninlined_format_args = "deny"
+unnecessary_filter_map = "deny"
+unnecessary_lazy_evaluations = "deny"
+unnecessary_sort_by = "deny"
+unnecessary_to_owned = "deny"
unwrap_used = "deny"
+# cargo-shear cannot see the platform-specific openssl-sys usage, so we
+# silence the false positive here instead of deleting a real dependency.
+[workspace.metadata.cargo-shear]
+ignored = ["openssl-sys", "codex-utils-readiness"]
+
[profile.release]
lto = "fat"
# Because we bundle some of these executables with the TypeScript CLI, we
diff --git a/codex-rs/ansi-escape/Cargo.toml b/codex-rs/ansi-escape/Cargo.toml
index ada675380d6..4107a727540 100644
--- a/codex-rs/ansi-escape/Cargo.toml
+++ b/codex-rs/ansi-escape/Cargo.toml
@@ -8,9 +8,9 @@ name = "codex_ansi_escape"
path = "src/lib.rs"
[dependencies]
-ansi-to-tui = "7.0.0"
-ratatui = { version = "0.29.0", features = [
+ansi-to-tui = { workspace = true }
+ratatui = { workspace = true, features = [
"unstable-rendered-line-info",
"unstable-widget-ref",
] }
-tracing = { version = "0.1.41", features = ["log"] }
+tracing = { workspace = true, features = ["log"] }
diff --git a/codex-rs/apply-patch/Cargo.toml b/codex-rs/apply-patch/Cargo.toml
index 7b5919a323a..9445ae08b74 100644
--- a/codex-rs/apply-patch/Cargo.toml
+++ b/codex-rs/apply-patch/Cargo.toml
@@ -15,14 +15,13 @@ path = "src/main.rs"
workspace = true
[dependencies]
-anyhow = "1"
-similar = "2.7.0"
-thiserror = "2.0.16"
-tree-sitter = "0.25.9"
-tree-sitter-bash = "0.25.0"
-once_cell = "1"
+anyhow = { workspace = true }
+similar = { workspace = true }
+thiserror = { workspace = true }
+tree-sitter = { workspace = true }
+tree-sitter-bash = { workspace = true }
[dev-dependencies]
-assert_cmd = "2"
-pretty_assertions = "1.4.1"
-tempfile = "3.13.0"
+assert_cmd = { workspace = true }
+pretty_assertions = { workspace = true }
+tempfile = { workspace = true }
diff --git a/codex-rs/apply-patch/src/lib.rs b/codex-rs/apply-patch/src/lib.rs
index 5bda31c4c22..3737c6ea6df 100644
--- a/codex-rs/apply-patch/src/lib.rs
+++ b/codex-rs/apply-patch/src/lib.rs
@@ -6,10 +6,10 @@ use std::collections::HashMap;
use std::path::Path;
use std::path::PathBuf;
use std::str::Utf8Error;
+use std::sync::LazyLock;
use anyhow::Context;
use anyhow::Result;
-use once_cell::sync::Lazy;
pub use parser::Hunk;
pub use parser::ParseError;
use parser::ParseError::*;
@@ -351,7 +351,7 @@ fn extract_apply_patch_from_bash(
// also run an arbitrary query against the AST. This is useful for understanding
// how tree-sitter parses the script and whether the query syntax is correct. Be sure
// to test both positive and negative cases.
- static APPLY_PATCH_QUERY: Lazy = Lazy::new(|| {
+ static APPLY_PATCH_QUERY: LazyLock = LazyLock::new(|| {
let language = BASH.into();
#[expect(clippy::expect_used)]
Query::new(
@@ -648,21 +648,18 @@ fn derive_new_contents_from_chunks(
}
};
- let mut original_lines: Vec = original_contents
- .split('\n')
- .map(|s| s.to_string())
- .collect();
+ let mut original_lines: Vec = original_contents.split('\n').map(String::from).collect();
// Drop the trailing empty element that results from the final newline so
// that line counts match the behaviour of standard `diff`.
- if original_lines.last().is_some_and(|s| s.is_empty()) {
+ if original_lines.last().is_some_and(String::is_empty) {
original_lines.pop();
}
let replacements = compute_replacements(&original_lines, path, chunks)?;
let new_lines = apply_replacements(original_lines, &replacements);
let mut new_lines = new_lines;
- if !new_lines.last().is_some_and(|s| s.is_empty()) {
+ if !new_lines.last().is_some_and(String::is_empty) {
new_lines.push(String::new());
}
let new_contents = new_lines.join("\n");
@@ -706,7 +703,7 @@ fn compute_replacements(
if chunk.old_lines.is_empty() {
// Pure addition (no old lines). We'll add them at the end or just
// before the final empty line if one exists.
- let insertion_idx = if original_lines.last().is_some_and(|s| s.is_empty()) {
+ let insertion_idx = if original_lines.last().is_some_and(String::is_empty) {
original_lines.len() - 1
} else {
original_lines.len()
@@ -732,11 +729,11 @@ fn compute_replacements(
let mut new_slice: &[String] = &chunk.new_lines;
- if found.is_none() && pattern.last().is_some_and(|s| s.is_empty()) {
+ if found.is_none() && pattern.last().is_some_and(String::is_empty) {
// Retry without the trailing empty line which represents the final
// newline in the file.
pattern = &pattern[..pattern.len() - 1];
- if new_slice.last().is_some_and(|s| s.is_empty()) {
+ if new_slice.last().is_some_and(String::is_empty) {
new_slice = &new_slice[..new_slice.len() - 1];
}
@@ -848,6 +845,7 @@ mod tests {
use super::*;
use pretty_assertions::assert_eq;
use std::fs;
+ use std::string::ToString;
use tempfile::tempdir;
/// Helper to construct a patch with the given body.
@@ -856,7 +854,7 @@ mod tests {
}
fn strs_to_strings(strs: &[&str]) -> Vec {
- strs.iter().map(|s| s.to_string()).collect()
+ strs.iter().map(ToString::to_string).collect()
}
// Test helpers to reduce repetition when building bash -lc heredoc scripts
diff --git a/codex-rs/apply-patch/src/seek_sequence.rs b/codex-rs/apply-patch/src/seek_sequence.rs
index 0144580f9b4..b005b08c754 100644
--- a/codex-rs/apply-patch/src/seek_sequence.rs
+++ b/codex-rs/apply-patch/src/seek_sequence.rs
@@ -112,9 +112,10 @@ pub(crate) fn seek_sequence(
#[cfg(test)]
mod tests {
use super::seek_sequence;
+ use std::string::ToString;
fn to_vec(strings: &[&str]) -> Vec {
- strings.iter().map(|s| s.to_string()).collect()
+ strings.iter().map(ToString::to_string).collect()
}
#[test]
diff --git a/codex-rs/arg0/Cargo.toml b/codex-rs/arg0/Cargo.toml
index a01120b7983..10d09e4a4b0 100644
--- a/codex-rs/arg0/Cargo.toml
+++ b/codex-rs/arg0/Cargo.toml
@@ -11,10 +11,10 @@ path = "src/lib.rs"
workspace = true
[dependencies]
-anyhow = "1"
-codex-apply-patch = { path = "../apply-patch" }
-codex-core = { path = "../core" }
-codex-linux-sandbox = { path = "../linux-sandbox" }
-dotenvy = "0.15.7"
-tempfile = "3"
-tokio = { version = "1", features = ["rt-multi-thread"] }
+anyhow = { workspace = true }
+codex-apply-patch = { workspace = true }
+codex-core = { workspace = true }
+codex-linux-sandbox = { workspace = true }
+dotenvy = { workspace = true }
+tempfile = { workspace = true }
+tokio = { workspace = true, features = ["rt-multi-thread"] }
diff --git a/codex-rs/arg0/src/lib.rs b/codex-rs/arg0/src/lib.rs
index d5df68e5545..e70ff2df645 100644
--- a/codex-rs/arg0/src/lib.rs
+++ b/codex-rs/arg0/src/lib.rs
@@ -54,7 +54,7 @@ where
let argv1 = args.next().unwrap_or_default();
if argv1 == CODEX_APPLY_PATCH_ARG1 {
- let patch_arg = args.next().and_then(|s| s.to_str().map(|s| s.to_owned()));
+ let patch_arg = args.next().and_then(|s| s.to_str().map(str::to_owned));
let exit_code = match patch_arg {
Some(patch_arg) => {
let mut stdout = std::io::stdout();
diff --git a/codex-rs/chatgpt/Cargo.toml b/codex-rs/chatgpt/Cargo.toml
index af5f910efec..97e14d7fe7c 100644
--- a/codex-rs/chatgpt/Cargo.toml
+++ b/codex-rs/chatgpt/Cargo.toml
@@ -7,13 +7,13 @@ version = { workspace = true }
workspace = true
[dependencies]
-anyhow = "1"
-clap = { version = "4", features = ["derive"] }
-codex-common = { path = "../common", features = ["cli"] }
-codex-core = { path = "../core" }
-serde = { version = "1", features = ["derive"] }
-serde_json = "1"
-tokio = { version = "1", features = ["full"] }
+anyhow = { workspace = true }
+clap = { workspace = true, features = ["derive"] }
+codex-common = { workspace = true, features = ["cli"] }
+codex-core = { workspace = true }
+serde = { workspace = true, features = ["derive"] }
+serde_json = { workspace = true }
+tokio = { workspace = true, features = ["full"] }
[dev-dependencies]
-tempfile = "3"
+tempfile = { workspace = true }
diff --git a/codex-rs/cli/Cargo.toml b/codex-rs/cli/Cargo.toml
index b6c6e53c161..858db4453b0 100644
--- a/codex-rs/cli/Cargo.toml
+++ b/codex-rs/cli/Cargo.toml
@@ -15,32 +15,45 @@ path = "src/lib.rs"
workspace = true
[dependencies]
-anyhow = "1"
-clap = { version = "4", features = ["derive"] }
-clap_complete = "4"
-codex-arg0 = { path = "../arg0" }
-codex-chatgpt = { path = "../chatgpt" }
-codex-common = { path = "../common", features = ["cli"] }
-codex-core = { path = "../core", features = ["slash_commands"] }
-codex-exec = { path = "../exec" }
-codex-login = { path = "../login" }
-codex-mcp-server = { path = "../mcp-server" }
-codex-protocol = { path = "../protocol" }
-codex-tui = { path = "../tui" }
-serde_json = "1"
-tokio = { version = "1", features = [
+anyhow = { workspace = true }
+clap = { workspace = true, features = ["derive"] }
+clap_complete = { workspace = true }
+codex-arg0 = { workspace = true }
+codex-chatgpt = { workspace = true }
+codex-common = { workspace = true, features = ["cli"] }
+codex-core = { workspace = true, features = ["slash_commands"] }
+codex-exec = { workspace = true }
+codex-login = { workspace = true }
+codex-mcp-server = { workspace = true }
+codex-protocol = { workspace = true }
+codex-protocol-ts = { workspace = true }
+codex-responses-api-proxy = { workspace = true }
+codex-tui = { workspace = true, features = ["slash_commands"] }
+ctor = { workspace = true }
+owo-colors = { workspace = true }
+serde_json = { workspace = true }
+supports-color = { workspace = true }
+tokio = { workspace = true, features = [
"io-std",
"macros",
"process",
"rt-multi-thread",
"signal",
] }
-tracing = "0.1.41"
-tracing-subscriber = "0.3.20"
-codex-protocol-ts = { path = "../protocol-ts" }
+tracing = { workspace = true }
+tracing-subscriber = { workspace = true }
+
+[target.'cfg(target_os = "linux")'.dependencies]
+libc = { workspace = true }
+
+[target.'cfg(target_os = "android")'.dependencies]
+libc = { workspace = true }
+
+[target.'cfg(target_os = "macos")'.dependencies]
+libc = { workspace = true }
[dev-dependencies]
-assert_cmd = "2"
-predicates = "3"
-pretty_assertions = "1"
-tempfile = "3"
+assert_cmd = { workspace = true }
+predicates = { workspace = true }
+pretty_assertions = { workspace = true }
+tempfile = { workspace = true }
diff --git a/codex-rs/cli/src/debug_sandbox.rs b/codex-rs/cli/src/debug_sandbox.rs
index 6fe7f003c74..a7d7103c2fa 100644
--- a/codex-rs/cli/src/debug_sandbox.rs
+++ b/codex-rs/cli/src/debug_sandbox.rs
@@ -64,7 +64,6 @@ async fn run_command_under_sandbox(
sandbox_type: SandboxType,
) -> anyhow::Result<()> {
let sandbox_mode = create_sandbox_mode(full_auto);
- let cwd = std::env::current_dir()?;
let config = Config::load_with_cli_overrides(
config_overrides
.parse_overrides()
@@ -75,13 +74,29 @@ async fn run_command_under_sandbox(
..Default::default()
},
)?;
+
+ // In practice, this should be `std::env::current_dir()` because this CLI
+ // does not support `--cwd`, but let's use the config value for consistency.
+ let cwd = config.cwd.clone();
+ // For now, we always use the same cwd for both the command and the
+ // sandbox policy. In the future, we could add a CLI option to set them
+ // separately.
+ let sandbox_policy_cwd = cwd.clone();
+
let stdio_policy = StdioPolicy::Inherit;
let env = create_env(&config.shell_environment_policy);
let mut child = match sandbox_type {
SandboxType::Seatbelt => {
- spawn_command_under_seatbelt(command, &config.sandbox_policy, cwd, stdio_policy, env)
- .await?
+ spawn_command_under_seatbelt(
+ command,
+ cwd,
+ &config.sandbox_policy,
+ sandbox_policy_cwd.as_path(),
+ stdio_policy,
+ env,
+ )
+ .await?
}
SandboxType::Landlock => {
#[expect(clippy::expect_used)]
@@ -91,8 +106,9 @@ async fn run_command_under_sandbox(
spawn_command_under_linux_sandbox(
codex_linux_sandbox_exe,
command,
- &config.sandbox_policy,
cwd,
+ &config.sandbox_policy,
+ sandbox_policy_cwd.as_path(),
stdio_policy,
env,
)
diff --git a/codex-rs/cli/src/main.rs b/codex-rs/cli/src/main.rs
index 7ce98a39372..871966b0a77 100644
--- a/codex-rs/cli/src/main.rs
+++ b/codex-rs/cli/src/main.rs
@@ -1,3 +1,4 @@
+use anyhow::Context;
use clap::CommandFactory;
use clap::Parser;
use clap_complete::Shell;
@@ -14,10 +15,15 @@ use codex_cli::login::run_logout;
use codex_cli::proto;
use codex_common::CliConfigOverrides;
use codex_exec::Cli as ExecCli;
+use codex_responses_api_proxy::Args as ResponsesApiProxyArgs;
+use codex_tui::AppExitInfo;
use codex_tui::Cli as TuiCli;
+use owo_colors::OwoColorize;
use std::path::PathBuf;
+use supports_color::Stream;
mod mcp_cmd;
+mod pre_main_hardening;
use crate::mcp_cmd::McpCli;
use crate::proto::ProtoCli;
@@ -82,6 +88,10 @@ enum Subcommand {
/// Internal: generate TypeScript protocol bindings.
#[clap(hide = true)]
GenerateTs(GenerateTsCommand),
+
+ /// Internal: run the responses API proxy.
+ #[clap(hide = true)]
+ ResponsesApiProxy(ResponsesApiProxyArgs),
}
#[derive(Debug, Parser)]
@@ -156,6 +166,69 @@ struct GenerateTsCommand {
prettier: Option,
}
+fn format_exit_messages(exit_info: AppExitInfo, color_enabled: bool) -> Vec {
+ let AppExitInfo {
+ token_usage,
+ conversation_id,
+ } = exit_info;
+
+ if token_usage.is_zero() {
+ return Vec::new();
+ }
+
+ let mut lines = vec![format!(
+ "{}",
+ codex_core::protocol::FinalOutput::from(token_usage)
+ )];
+
+ if let Some(session_id) = conversation_id {
+ let resume_cmd = format!("codex resume {session_id}");
+ let command = if color_enabled {
+ resume_cmd.cyan().to_string()
+ } else {
+ resume_cmd
+ };
+ lines.push(format!("To continue this session, run {command}."));
+ }
+
+ lines
+}
+
+fn print_exit_messages(exit_info: AppExitInfo) {
+ let color_enabled = supports_color::on(Stream::Stdout).is_some();
+ for line in format_exit_messages(exit_info, color_enabled) {
+ println!("{line}");
+ }
+}
+
+pub(crate) const CODEX_SECURE_MODE_ENV_VAR: &str = "CODEX_SECURE_MODE";
+
+/// As early as possible in the process lifecycle, apply hardening measures
+/// if the CODEX_SECURE_MODE environment variable is set to "1".
+#[ctor::ctor]
+fn pre_main_hardening() {
+ let secure_mode = match std::env::var(CODEX_SECURE_MODE_ENV_VAR) {
+ Ok(value) => value,
+ Err(_) => return,
+ };
+
+ if secure_mode == "1" {
+ #[cfg(any(target_os = "linux", target_os = "android"))]
+ crate::pre_main_hardening::pre_main_hardening_linux();
+
+ #[cfg(target_os = "macos")]
+ crate::pre_main_hardening::pre_main_hardening_macos();
+
+ #[cfg(windows)]
+ crate::pre_main_hardening::pre_main_hardening_windows();
+ }
+
+ // Always clear this env var so child processes don't inherit it.
+ unsafe {
+ std::env::remove_var(CODEX_SECURE_MODE_ENV_VAR);
+ }
+}
+
fn main() -> anyhow::Result<()> {
arg0_dispatch_or_else(|codex_linux_sandbox_exe| async move {
cli_main(codex_linux_sandbox_exe).await?;
@@ -176,10 +249,8 @@ async fn cli_main(codex_linux_sandbox_exe: Option) -> anyhow::Result<()
&mut interactive.config_overrides,
root_config_overrides.clone(),
);
- let usage = codex_tui::run_main(interactive, codex_linux_sandbox_exe).await?;
- if !usage.is_zero() {
- println!("{}", codex_core::protocol::FinalOutput::from(usage));
- }
+ let exit_info = codex_tui::run_main(interactive, codex_linux_sandbox_exe).await?;
+ print_exit_messages(exit_info);
}
Some(Subcommand::Exec(mut exec_cli)) => {
prepend_config_flags(
@@ -276,6 +347,11 @@ async fn cli_main(codex_linux_sandbox_exe: Option) -> anyhow::Result<()
Some(Subcommand::GenerateTs(gen_cli)) => {
codex_protocol_ts::generate_ts(&gen_cli.out_dir, gen_cli.prettier.as_deref())?;
}
+ Some(Subcommand::ResponsesApiProxy(args)) => {
+ tokio::task::spawn_blocking(move || codex_responses_api_proxy::run_main(args))
+ .await
+ .context("responses-api-proxy blocking task panicked")??;
+ }
}
Ok(())
@@ -369,6 +445,8 @@ fn print_completion(cmd: CompletionCommand) {
#[cfg(test)]
mod tests {
use super::*;
+ use codex_core::protocol::TokenUsage;
+ use codex_protocol::mcp_protocol::ConversationId;
fn finalize_from_args(args: &[&str]) -> TuiCli {
let cli = MultitoolCli::try_parse_from(args).expect("parse");
@@ -390,6 +468,52 @@ mod tests {
finalize_resume_interactive(interactive, root_overrides, session_id, last, resume_cli)
}
+ fn sample_exit_info(conversation: Option<&str>) -> AppExitInfo {
+ let token_usage = TokenUsage {
+ output_tokens: 2,
+ total_tokens: 2,
+ ..Default::default()
+ };
+ AppExitInfo {
+ token_usage,
+ conversation_id: conversation
+ .map(ConversationId::from_string)
+ .map(Result::unwrap),
+ }
+ }
+
+ #[test]
+ fn format_exit_messages_skips_zero_usage() {
+ let exit_info = AppExitInfo {
+ token_usage: TokenUsage::default(),
+ conversation_id: None,
+ };
+ let lines = format_exit_messages(exit_info, false);
+ assert!(lines.is_empty());
+ }
+
+ #[test]
+ fn format_exit_messages_includes_resume_hint_without_color() {
+ let exit_info = sample_exit_info(Some("123e4567-e89b-12d3-a456-426614174000"));
+ let lines = format_exit_messages(exit_info, false);
+ assert_eq!(
+ lines,
+ vec![
+ "Token usage: total=2 input=0 output=2".to_string(),
+ "To continue this session, run codex resume 123e4567-e89b-12d3-a456-426614174000."
+ .to_string(),
+ ]
+ );
+ }
+
+ #[test]
+ fn format_exit_messages_applies_color_when_enabled() {
+ let exit_info = sample_exit_info(Some("123e4567-e89b-12d3-a456-426614174000"));
+ let lines = format_exit_messages(exit_info, true);
+ assert_eq!(lines.len(), 2);
+ assert!(lines[1].contains("\u{1b}[36m"));
+ }
+
#[test]
fn resume_model_flag_applies_when_no_root_flags() {
let interactive = finalize_from_args(["codex", "resume", "-m", "gpt-5-test"].as_ref());
diff --git a/codex-rs/cli/src/mcp_cmd.rs b/codex-rs/cli/src/mcp_cmd.rs
index 437511ad572..465de71aac1 100644
--- a/codex-rs/cli/src/mcp_cmd.rs
+++ b/codex-rs/cli/src/mcp_cmd.rs
@@ -148,7 +148,8 @@ fn run_add(config_overrides: &CliConfigOverrides, add_args: AddArgs) -> Result<(
command: command_bin,
args: command_args,
env: env_map,
- startup_timeout_ms: None,
+ startup_timeout_sec: None,
+ tool_timeout_sec: None,
};
servers.insert(name.clone(), new_entry);
@@ -210,7 +211,12 @@ fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) -> Resul
"command": cfg.command,
"args": cfg.args,
"env": env,
- "startup_timeout_ms": cfg.startup_timeout_ms,
+ "startup_timeout_sec": cfg
+ .startup_timeout_sec
+ .map(|timeout| timeout.as_secs_f64()),
+ "tool_timeout_sec": cfg
+ .tool_timeout_sec
+ .map(|timeout| timeout.as_secs_f64()),
})
})
.collect();
@@ -305,7 +311,12 @@ fn run_get(config_overrides: &CliConfigOverrides, get_args: GetArgs) -> Result<(
"command": server.command,
"args": server.args,
"env": env,
- "startup_timeout_ms": server.startup_timeout_ms,
+ "startup_timeout_sec": server
+ .startup_timeout_sec
+ .map(|timeout| timeout.as_secs_f64()),
+ "tool_timeout_sec": server
+ .tool_timeout_sec
+ .map(|timeout| timeout.as_secs_f64()),
}))?;
println!("{output}");
return Ok(());
@@ -333,8 +344,11 @@ fn run_get(config_overrides: &CliConfigOverrides, get_args: GetArgs) -> Result<(
}
};
println!(" env: {env_display}");
- if let Some(timeout) = server.startup_timeout_ms {
- println!(" startup_timeout_ms: {timeout}");
+ if let Some(timeout) = server.startup_timeout_sec {
+ println!(" startup_timeout_sec: {}", timeout.as_secs_f64());
+ }
+ if let Some(timeout) = server.tool_timeout_sec {
+ println!(" tool_timeout_sec: {}", timeout.as_secs_f64());
}
println!(" remove: codex mcp remove {}", get_args.name);
diff --git a/codex-rs/cli/src/pre_main_hardening.rs b/codex-rs/cli/src/pre_main_hardening.rs
new file mode 100644
index 00000000000..a75cb3561ce
--- /dev/null
+++ b/codex-rs/cli/src/pre_main_hardening.rs
@@ -0,0 +1,98 @@
+#[cfg(any(target_os = "linux", target_os = "android"))]
+const PRCTL_FAILED_EXIT_CODE: i32 = 5;
+
+#[cfg(target_os = "macos")]
+const PTRACE_DENY_ATTACH_FAILED_EXIT_CODE: i32 = 6;
+
+#[cfg(any(target_os = "linux", target_os = "android", target_os = "macos"))]
+const SET_RLIMIT_CORE_FAILED_EXIT_CODE: i32 = 7;
+
+#[cfg(any(target_os = "linux", target_os = "android"))]
+pub(crate) fn pre_main_hardening_linux() {
+ // Disable ptrace attach / mark process non-dumpable.
+ let ret_code = unsafe { libc::prctl(libc::PR_SET_DUMPABLE, 0, 0, 0, 0) };
+ if ret_code != 0 {
+ eprintln!(
+ "ERROR: prctl(PR_SET_DUMPABLE, 0) failed: {}",
+ std::io::Error::last_os_error()
+ );
+ std::process::exit(PRCTL_FAILED_EXIT_CODE);
+ }
+
+ // For "defense in depth," set the core file size limit to 0.
+ set_core_file_size_limit_to_zero();
+
+ // Official Codex releases are MUSL-linked, which means that variables such
+ // as LD_PRELOAD are ignored anyway, but just to be sure, clear them here.
+ let ld_keys: Vec = std::env::vars()
+ .filter_map(|(key, _)| {
+ if key.starts_with("LD_") {
+ Some(key)
+ } else {
+ None
+ }
+ })
+ .collect();
+
+ for key in ld_keys {
+ unsafe {
+ std::env::remove_var(key);
+ }
+ }
+}
+
+#[cfg(target_os = "macos")]
+pub(crate) fn pre_main_hardening_macos() {
+ // Prevent debuggers from attaching to this process.
+ let ret_code = unsafe { libc::ptrace(libc::PT_DENY_ATTACH, 0, std::ptr::null_mut(), 0) };
+ if ret_code == -1 {
+ eprintln!(
+ "ERROR: ptrace(PT_DENY_ATTACH) failed: {}",
+ std::io::Error::last_os_error()
+ );
+ std::process::exit(PTRACE_DENY_ATTACH_FAILED_EXIT_CODE);
+ }
+
+ // Set the core file size limit to 0 to prevent core dumps.
+ set_core_file_size_limit_to_zero();
+
+ // Remove all DYLD_ environment variables, which can be used to subvert
+ // library loading.
+ let dyld_keys: Vec = std::env::vars()
+ .filter_map(|(key, _)| {
+ if key.starts_with("DYLD_") {
+ Some(key)
+ } else {
+ None
+ }
+ })
+ .collect();
+
+ for key in dyld_keys {
+ unsafe {
+ std::env::remove_var(key);
+ }
+ }
+}
+
+#[cfg(unix)]
+fn set_core_file_size_limit_to_zero() {
+ let rlim = libc::rlimit {
+ rlim_cur: 0,
+ rlim_max: 0,
+ };
+
+ let ret_code = unsafe { libc::setrlimit(libc::RLIMIT_CORE, &rlim) };
+ if ret_code != 0 {
+ eprintln!(
+ "ERROR: setrlimit(RLIMIT_CORE) failed: {}",
+ std::io::Error::last_os_error()
+ );
+ std::process::exit(SET_RLIMIT_CORE_FAILED_EXIT_CODE);
+ }
+}
+
+#[cfg(windows)]
+pub(crate) fn pre_main_hardening_windows() {
+ // TODO(mbolin): Perform the appropriate configuration for Windows.
+}
diff --git a/codex-rs/code b/codex-rs/code
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/codex-rs/common/Cargo.toml b/codex-rs/common/Cargo.toml
index b10600574c1..3ce84a6f502 100644
--- a/codex-rs/common/Cargo.toml
+++ b/codex-rs/common/Cargo.toml
@@ -7,11 +7,11 @@ version = { workspace = true }
workspace = true
[dependencies]
-clap = { version = "4", features = ["derive", "wrap_help"], optional = true }
-codex-core = { path = "../core" }
-codex-protocol = { path = "../protocol" }
-serde = { version = "1", optional = true }
-toml = { version = "0.9", optional = true }
+clap = { workspace = true, features = ["derive", "wrap_help"], optional = true }
+codex-core = { workspace = true }
+codex-protocol = { workspace = true }
+serde = { workspace = true, optional = true }
+toml = { workspace = true, optional = true }
[features]
# Separate feature so that `clap` is not a mandatory dependency.
diff --git a/codex-rs/common/src/model_presets.rs b/codex-rs/common/src/model_presets.rs
index 065bb1e340d..8eb5beacda5 100644
--- a/codex-rs/common/src/model_presets.rs
+++ b/codex-rs/common/src/model_presets.rs
@@ -1,4 +1,3 @@
-use codex_core::config::GPT_5_CODEX_MEDIUM_MODEL;
use codex_core::protocol_config_types::ReasoningEffort;
use codex_protocol::mcp_protocol::AuthMode;
@@ -69,13 +68,6 @@ const PRESETS: &[ModelPreset] = &[
},
];
-pub fn builtin_model_presets(auth_mode: Option) -> Vec {
- match auth_mode {
- Some(AuthMode::ApiKey) => PRESETS
- .iter()
- .copied()
- .filter(|p| p.model != GPT_5_CODEX_MEDIUM_MODEL)
- .collect(),
- _ => PRESETS.to_vec(),
- }
+pub fn builtin_model_presets(_auth_mode: Option) -> Vec {
+ PRESETS.to_vec()
}
diff --git a/codex-rs/core/Cargo.toml b/codex-rs/core/Cargo.toml
index 1757b1328b4..7dda49eed6b 100644
--- a/codex-rs/core/Cargo.toml
+++ b/codex-rs/core/Cargo.toml
@@ -8,85 +8,94 @@ default = []
slash_commands = ["codex-slash-commands"]
[lib]
+doctest = false
name = "codex_core"
path = "src/lib.rs"
-doctest = false
[lints]
workspace = true
[dependencies]
-anyhow = "1"
-codex-slash-commands = { path = "../slash-commands", optional = true }
-askama = "0.12"
-async-channel = "2.3.1"
-base64 = "0.22"
-bytes = "1.10.1"
-chrono = { version = "0.4", features = ["serde"] }
-codex-apply-patch = { path = "../apply-patch" }
-codex-file-search = { path = "../file-search" }
-codex-mcp-client = { path = "../mcp-client" }
-codex-protocol = { path = "../protocol" }
-dirs = "6"
-env-flags = "0.1.1"
-eventsource-stream = "0.2.3"
-futures = "0.3"
-libc = "0.2.175"
-mcp-types = { path = "../mcp-types" }
-os_info = "3.12.0"
-portable-pty = "0.9.0"
-rand = "0.9"
-regex-lite = "0.1.7"
-reqwest = { version = "0.12", features = ["json", "stream"] }
-serde = { version = "1", features = ["derive"] }
-serde_json = "1"
-sha1 = "0.10.6"
-shlex = "1.3.0"
-similar = "2.7.0"
-strum_macros = "0.27.2"
-tempfile = "3"
-thiserror = "2.0.16"
-time = { version = "0.3", features = ["formatting", "parsing", "local-offset", "macros"] }
-tokio = { version = "1", features = [
+anyhow = { workspace = true }
+askama = { workspace = true }
+async-channel = { workspace = true }
+async-trait = { workspace = true }
+base64 = { workspace = true }
+bytes = { workspace = true }
+chrono = { workspace = true, features = ["serde"] }
+codex-apply-patch = { workspace = true }
+codex-file-search = { workspace = true }
+codex-mcp-client = { workspace = true }
+codex-rmcp-client = { workspace = true }
+codex-protocol = { workspace = true }
+codex-slash-commands = { workspace = true, optional = true }
+dirs = { workspace = true }
+env-flags = { workspace = true }
+eventsource-stream = { workspace = true }
+futures = { workspace = true }
+indexmap = { workspace = true }
+libc = { workspace = true }
+mcp-types = { workspace = true }
+os_info = { workspace = true }
+portable-pty = { workspace = true }
+rand = { workspace = true }
+regex-lite = { workspace = true }
+reqwest = { workspace = true, features = ["json", "stream"] }
+serde = { workspace = true, features = ["derive"] }
+serde_json = { workspace = true }
+sha1 = { workspace = true }
+shlex = { workspace = true }
+similar = { workspace = true }
+strum_macros = { workspace = true }
+tempfile = { workspace = true }
+thiserror = { workspace = true }
+time = { workspace = true, features = [
+ "formatting",
+ "parsing",
+ "local-offset",
+ "macros",
+] }
+tokio = { workspace = true, features = [
"io-std",
"macros",
"process",
"rt-multi-thread",
"signal",
] }
-tokio-util = "0.7.16"
-toml = "0.9.5"
-toml_edit = "0.23.4"
-tracing = { version = "0.1.41", features = ["log"] }
-tree-sitter = "0.25.9"
-tree-sitter-bash = "0.25.0"
-uuid = { version = "1", features = ["serde", "v4"] }
-which = "6"
-wildmatch = "2.5.0"
+tokio-util = { workspace = true }
+toml = { workspace = true }
+toml_edit = { workspace = true }
+tracing = { workspace = true, features = ["log"] }
+tree-sitter = { workspace = true }
+tree-sitter-bash = { workspace = true }
+uuid = { workspace = true, features = ["serde", "v4"] }
+which = { workspace = true }
+wildmatch = { workspace = true }
[target.'cfg(target_os = "linux")'.dependencies]
-landlock = "0.4.1"
-seccompiler = "0.5.0"
+landlock = { workspace = true }
+seccompiler = { workspace = true }
# Build OpenSSL from source for musl builds.
[target.x86_64-unknown-linux-musl.dependencies]
-openssl-sys = { version = "*", features = ["vendored"] }
+openssl-sys = { workspace = true, features = ["vendored"] }
# Build OpenSSL from source for musl builds.
[target.aarch64-unknown-linux-musl.dependencies]
-openssl-sys = { version = "*", features = ["vendored"] }
+openssl-sys = { workspace = true, features = ["vendored"] }
[dev-dependencies]
-assert_cmd = "2"
-core_test_support = { path = "tests/common" }
-maplit = "1.0.2"
-predicates = "3"
-pretty_assertions = "1.4.1"
-tempfile = "3"
-tokio-test = "0.4"
-walkdir = "2.5.0"
-wiremock = "0.6"
+assert_cmd = { workspace = true }
+core_test_support = { workspace = true }
+escargot = { workspace = true }
+maplit = { workspace = true }
+predicates = { workspace = true }
+pretty_assertions = { workspace = true }
+tempfile = { workspace = true }
+tokio-test = { workspace = true }
+walkdir = { workspace = true }
+wiremock = { workspace = true }
[package.metadata.cargo-shear]
ignored = ["openssl-sys"]
diff --git a/codex-rs/core/gpt_5_codex_prompt.md b/codex-rs/core/gpt_5_codex_prompt.md
index 2c49fafec62..9a298f460f4 100644
--- a/codex-rs/core/gpt_5_codex_prompt.md
+++ b/codex-rs/core/gpt_5_codex_prompt.md
@@ -26,37 +26,41 @@ When using the planning tool:
## Codex CLI harness, sandboxing, and approvals
-The Codex CLI harness supports several different sandboxing, and approval configurations that the user can choose from.
+The Codex CLI harness supports several different configurations for sandboxing and escalation approvals that the user can choose from.
-Filesystem sandboxing defines which files can be read or written. The options are:
-- **read-only**: You can only read files.
-- **workspace-write**: You can read files. You can write to files in this folder, but not outside it.
-- **danger-full-access**: No filesystem sandboxing.
+Filesystem sandboxing defines which files can be read or written. The options for `sandbox_mode` are:
+- **read-only**: The sandbox only permits reading files.
+- **workspace-write**: The sandbox permits reading files, and editing files in `cwd` and `writable_roots`. Editing files in other directories requires approval.
+- **danger-full-access**: No filesystem sandboxing - all commands are permitted.
-Network sandboxing defines whether network can be accessed without approval. Options are
+Network sandboxing defines whether network can be accessed without approval. Options for `network_access` are:
- **restricted**: Requires approval
- **enabled**: No approval needed
-Approvals are your mechanism to get user consent to perform more privileged actions. Although they introduce friction to the user because your work is paused until the user responds, you should leverage them to accomplish your important work. Do not let these settings or the sandbox deter you from attempting to accomplish the user's task unless it is set to "never", in which case never ask for approvals.
-
-Approval options are
+Approvals are your mechanism to get user consent to run shell commands without the sandbox. Possible configuration options for `approval_policy` are
- **untrusted**: The harness will escalate most commands for user approval, apart from a limited allowlist of safe "read" commands.
- **on-failure**: The harness will allow all commands to run in the sandbox (if enabled), and failures will be escalated to the user for approval to run again without the sandbox.
- **on-request**: Commands will be run in the sandbox by default, and you can specify in your tool call if you want to escalate a command to run without sandboxing. (Note that this mode is not always available. If it is, you'll see parameters for it in the `shell` command description.)
- **never**: This is a non-interactive mode where you may NEVER ask the user for approval to run commands. Instead, you must always persist and work around constraints to solve the task for the user. You MUST do your utmost best to finish the task and validate your work before yielding. If this mode is paired with `danger-full-access`, take advantage of it to deliver the best outcome for the user. Further, in this mode, your default testing philosophy is overridden: Even if you don't see local patterns for testing, you may add tests and scripts to validate your work. Just remove them before yielding.
-When you are running with approvals `on-request`, and sandboxing enabled, here are scenarios where you'll need to request approval:
-- You need to run a command that writes to a directory that requires it (e.g. running tests that write to /tmp)
+When you are running with `approval_policy == on-request`, and sandboxing enabled, here are scenarios where you'll need to request approval:
+- You need to run a command that writes to a directory that requires it (e.g. running tests that write to /var)
- You need to run a GUI app (e.g., open/xdg-open/osascript) to open browsers or files.
- You are running sandboxed and need to run a command that requires network access (e.g. installing packages)
-- If you run a command that is important to solving the user's query, but it fails because of sandboxing, rerun the command with approval.
+- If you run a command that is important to solving the user's query, but it fails because of sandboxing, rerun the command with approval. ALWAYS proceed to use the `with_escalated_permissions` and `justification` parameters - do not message the user before requesting approval for the command.
- You are about to take a potentially destructive action such as an `rm` or `git reset` that the user did not explicitly ask for
- (for all of these, you should weigh alternative paths that do not require approval)
-When sandboxing is set to read-only, you'll need to request approval for any command that isn't a read.
+When `sandbox_mode` is set to read-only, you'll need to request approval for any command that isn't a read.
You will be told what filesystem sandboxing, network sandboxing, and approval mode are active in a developer or user message. If you are not told about this, assume that you are running with workspace-write, network sandboxing enabled, and approval on-failure.
+Although they introduce friction to the user because your work is paused until the user responds, you should leverage them when necessary to accomplish important work. If the completing the task requires escalated permissions, Do not let these settings or the sandbox deter you from attempting to accomplish the user's task unless it is set to "never", in which case never ask for approvals.
+
+When requesting approval to execute a command that will require escalated privileges:
+ - Provide the `with_escalated_permissions` parameter with the boolean value true
+ - Include a short, 1 sentence explanation for why you need to enable `with_escalated_permissions` in the justification parameter
+
## Special user requests
- If the user makes a simple request (such as asking for the time) which you can fulfill by running a terminal command (such as `date`), you should do so.
diff --git a/codex-rs/core/src/apply_patch.rs b/codex-rs/core/src/apply_patch.rs
index 0afc06e99cf..1ebbe5d7389 100644
--- a/codex-rs/core/src/apply_patch.rs
+++ b/codex-rs/core/src/apply_patch.rs
@@ -1,13 +1,12 @@
use crate::codex::Session;
use crate::codex::TurnContext;
+use crate::function_tool::FunctionCallError;
use crate::protocol::FileChange;
use crate::protocol::ReviewDecision;
use crate::safety::SafetyCheck;
use crate::safety::assess_patch_safety;
use codex_apply_patch::ApplyPatchAction;
use codex_apply_patch::ApplyPatchFileChange;
-use codex_protocol::models::FunctionCallOutputPayload;
-use codex_protocol::models::ResponseInputItem;
use std::collections::HashMap;
use std::path::PathBuf;
@@ -17,7 +16,7 @@ pub(crate) enum InternalApplyPatchInvocation {
/// The `apply_patch` call was handled programmatically, without any sort
/// of sandbox, because the user explicitly approved it. This is the
/// result to use with the `shell` function call that contained `apply_patch`.
- Output(ResponseInputItem),
+ Output(Result),
/// The `apply_patch` call was approved, either automatically because it
/// appears that it should be allowed based on the user's sandbox policy
@@ -33,12 +32,6 @@ pub(crate) struct ApplyPatchExec {
pub(crate) user_explicitly_approved_this_action: bool,
}
-impl From for InternalApplyPatchInvocation {
- fn from(item: ResponseInputItem) -> Self {
- InternalApplyPatchInvocation::Output(item)
- }
-}
-
pub(crate) async fn apply_patch(
sess: &Session,
turn_context: &TurnContext,
@@ -77,25 +70,15 @@ pub(crate) async fn apply_patch(
})
}
ReviewDecision::Denied | ReviewDecision::Abort => {
- ResponseInputItem::FunctionCallOutput {
- call_id: call_id.to_owned(),
- output: FunctionCallOutputPayload {
- content: "patch rejected by user".to_string(),
- success: Some(false),
- },
- }
- .into()
+ InternalApplyPatchInvocation::Output(Err(FunctionCallError::RespondToModel(
+ "patch rejected by user".to_string(),
+ )))
}
}
}
- SafetyCheck::Reject { reason } => ResponseInputItem::FunctionCallOutput {
- call_id: call_id.to_owned(),
- output: FunctionCallOutputPayload {
- content: format!("patch rejected: {reason}"),
- success: Some(false),
- },
- }
- .into(),
+ SafetyCheck::Reject { reason } => InternalApplyPatchInvocation::Output(Err(
+ FunctionCallError::RespondToModel(format!("patch rejected: {reason}")),
+ )),
}
}
diff --git a/codex-rs/core/src/auth.rs b/codex-rs/core/src/auth.rs
index a2158310dcc..5ba6fdf7b22 100644
--- a/codex-rs/core/src/auth.rs
+++ b/codex-rs/core/src/auth.rs
@@ -267,6 +267,9 @@ pub fn try_read_auth_json(auth_file: &Path) -> std::io::Result {
}
pub fn write_auth_json(auth_file: &Path, auth_dot_json: &AuthDotJson) -> std::io::Result<()> {
+ if let Some(parent) = auth_file.parent() {
+ std::fs::create_dir_all(parent)?;
+ }
let json_data = serde_json::to_string_pretty(auth_dot_json)?;
let mut options = OpenOptions::new();
options.truncate(true).write(true).create(true);
diff --git a/codex-rs/core/src/bash.rs b/codex-rs/core/src/bash.rs
index 5b94daf2521..c87f2764b1c 100644
--- a/codex-rs/core/src/bash.rs
+++ b/codex-rs/core/src/bash.rs
@@ -1,3 +1,4 @@
+use tree_sitter::Node;
use tree_sitter::Parser;
use tree_sitter::Tree;
use tree_sitter_bash::LANGUAGE as BASH;
@@ -73,6 +74,9 @@ pub fn try_parse_word_only_commands_sequence(tree: &Tree, src: &str) -> Option Option Option>> {
+ let [bash, flag, script] = command else {
+ return None;
+ };
+
+ if bash != "bash" || flag != "-lc" {
+ return None;
+ }
+
+ let tree = try_parse_bash(script)?;
+ try_parse_word_only_commands_sequence(&tree, script)
+}
+
fn parse_plain_command_from_node(cmd: tree_sitter::Node, src: &str) -> Option> {
if cmd.kind() != "command" {
return None;
@@ -150,10 +169,10 @@ mod tests {
let src = "ls && pwd; echo 'hi there' | wc -l";
let cmds = parse_seq(src).unwrap();
let expected: Vec> = vec![
- vec!["wc".to_string(), "-l".to_string()],
- vec!["echo".to_string(), "hi there".to_string()],
- vec!["pwd".to_string()],
vec!["ls".to_string()],
+ vec!["pwd".to_string()],
+ vec!["echo".to_string(), "hi there".to_string()],
+ vec!["wc".to_string(), "-l".to_string()],
];
assert_eq!(cmds, expected);
}
diff --git a/codex-rs/core/src/chat_completions.rs b/codex-rs/core/src/chat_completions.rs
index fc8602de8eb..eddc7864845 100644
--- a/codex-rs/core/src/chat_completions.rs
+++ b/codex-rs/core/src/chat_completions.rs
@@ -35,6 +35,12 @@ pub(crate) async fn stream_chat_completions(
client: &reqwest::Client,
provider: &ModelProviderInfo,
) -> Result {
+ if prompt.output_schema.is_some() {
+ return Err(CodexErr::UnsupportedOperation(
+ "output_schema is not supported for Chat Completions API".to_string(),
+ ));
+ }
+
// Build messages array
let mut messages = Vec::::new();
@@ -462,7 +468,7 @@ async fn process_chat_sse(
if let Some(reasoning_val) = choice.get("delta").and_then(|d| d.get("reasoning")) {
let mut maybe_text = reasoning_val
.as_str()
- .map(|s| s.to_string())
+ .map(str::to_string)
.filter(|s| !s.is_empty());
if maybe_text.is_none() && reasoning_val.is_object() {
@@ -716,6 +722,9 @@ where
// Not an assistant message – forward immediately.
return Poll::Ready(Some(Ok(ResponseEvent::OutputItemDone(item))));
}
+ Poll::Ready(Some(Ok(ResponseEvent::RateLimits(snapshot)))) => {
+ return Poll::Ready(Some(Ok(ResponseEvent::RateLimits(snapshot))));
+ }
Poll::Ready(Some(Ok(ResponseEvent::Completed {
response_id,
token_usage,
diff --git a/codex-rs/core/src/client.rs b/codex-rs/core/src/client.rs
index 055c3afa870..e8aca68fe8b 100644
--- a/codex-rs/core/src/client.rs
+++ b/codex-rs/core/src/client.rs
@@ -4,6 +4,7 @@ use std::sync::OnceLock;
use std::time::Duration;
use crate::AuthManager;
+use crate::auth::CodexAuth;
use bytes::Bytes;
use codex_protocol::mcp_protocol::AuthMode;
use codex_protocol::mcp_protocol::ConversationId;
@@ -11,6 +12,7 @@ use eventsource_stream::Eventsource;
use futures::prelude::*;
use regex_lite::Regex;
use reqwest::StatusCode;
+use reqwest::header::HeaderMap;
use serde::Deserialize;
use serde::Serialize;
use serde_json::Value;
@@ -40,6 +42,8 @@ use crate::model_provider_info::ModelProviderInfo;
use crate::model_provider_info::WireApi;
use crate::openai_model_info::get_model_info;
use crate::openai_tools::create_tools_json_for_responses_api;
+use crate::protocol::RateLimitSnapshot;
+use crate::protocol::RateLimitWindow;
use crate::protocol::TokenUsage;
use crate::token_data::PlanType;
use crate::util::backoff;
@@ -180,19 +184,23 @@ impl ModelClient {
let input_with_instructions = prompt.get_formatted_input();
- // Only include `text.verbosity` for GPT-5 family models
- let text = if self.config.model_family.family == "gpt-5" {
- create_text_param_for_request(self.config.model_verbosity)
- } else {
- if self.config.model_verbosity.is_some() {
- warn!(
- "model_verbosity is set but ignored for non-gpt-5 model family: {}",
- self.config.model_family.family
- );
+ let verbosity = match &self.config.model_family.family {
+ family if family == "gpt-5" => self.config.model_verbosity,
+ _ => {
+ if self.config.model_verbosity.is_some() {
+ warn!(
+ "model_verbosity is set but ignored for non-gpt-5 model family: {}",
+ self.config.model_family.family
+ );
+ }
+
+ None
}
- None
};
+ // Only include `text.verbosity` for GPT-5 family models
+ let text = create_text_param_for_request(verbosity, &prompt.output_schema);
+
// In general, we want to explicitly send `store: false` when using the Responses API,
// but in practice, the Azure Responses API rejects `store: false`:
//
@@ -221,144 +229,169 @@ impl ModelClient {
if azure_workaround {
attach_item_ids(&mut payload_json, &input_with_instructions);
}
- let payload_body = serde_json::to_string(&payload_json)?;
- let mut attempt = 0;
- let max_retries = self.provider.request_max_retries();
+ let max_attempts = self.provider.request_max_retries();
+ for attempt in 0..=max_attempts {
+ match self
+ .attempt_stream_responses(&payload_json, &auth_manager)
+ .await
+ {
+ Ok(stream) => {
+ return Ok(stream);
+ }
+ Err(StreamAttemptError::Fatal(e)) => {
+ return Err(e);
+ }
+ Err(retryable_attempt_error) => {
+ if attempt == max_attempts {
+ return Err(retryable_attempt_error.into_error());
+ }
+
+ tokio::time::sleep(retryable_attempt_error.delay(attempt)).await;
+ }
+ }
+ }
+
+ unreachable!("stream_responses_attempt should always return");
+ }
- loop {
- attempt += 1;
+ /// Single attempt to start a streaming Responses API call.
+ async fn attempt_stream_responses(
+ &self,
+ payload_json: &Value,
+ auth_manager: &Option>,
+ ) -> std::result::Result {
+ // Always fetch the latest auth in case a prior attempt refreshed the token.
+ let auth = auth_manager.as_ref().and_then(|m| m.auth());
+
+ trace!(
+ "POST to {}: {:?}",
+ self.provider.get_full_url(&auth),
+ serde_json::to_string(payload_json)
+ );
- // Always fetch the latest auth in case a prior attempt refreshed the token.
- let auth = auth_manager.as_ref().and_then(|m| m.auth());
+ let mut req_builder = self
+ .provider
+ .create_request_builder(&self.client, &auth)
+ .await
+ .map_err(StreamAttemptError::Fatal)?;
+
+ req_builder = req_builder
+ .header("OpenAI-Beta", "responses=experimental")
+ // Send session_id for compatibility.
+ .header("conversation_id", self.conversation_id.to_string())
+ .header("session_id", self.conversation_id.to_string())
+ .header(reqwest::header::ACCEPT, "text/event-stream")
+ .json(payload_json);
+
+ if let Some(auth) = auth.as_ref()
+ && auth.mode == AuthMode::ChatGPT
+ && let Some(account_id) = auth.get_account_id()
+ {
+ req_builder = req_builder.header("chatgpt-account-id", account_id);
+ }
+ let res = req_builder.send().await;
+ if let Ok(resp) = &res {
trace!(
- "POST to {}: {}",
- self.provider.get_full_url(&auth),
- payload_body.as_str()
+ "Response status: {}, cf-ray: {}",
+ resp.status(),
+ resp.headers()
+ .get("cf-ray")
+ .map(|v| v.to_str().unwrap_or_default())
+ .unwrap_or_default()
);
+ }
- let mut req_builder = self
- .provider
- .create_request_builder(&self.client, &auth)
- .await?;
+ match res {
+ Ok(resp) if resp.status().is_success() => {
+ let (tx_event, rx_event) = mpsc::channel::>(1600);
+
+ if let Some(snapshot) = parse_rate_limit_snapshot(resp.headers())
+ && tx_event
+ .send(Ok(ResponseEvent::RateLimits(snapshot)))
+ .await
+ .is_err()
+ {
+ debug!("receiver dropped rate limit snapshot event");
+ }
- req_builder = req_builder
- .header("OpenAI-Beta", "responses=experimental")
- // Send session_id for compatibility.
- .header("conversation_id", self.conversation_id.to_string())
- .header("session_id", self.conversation_id.to_string())
- .header(reqwest::header::ACCEPT, "text/event-stream")
- .json(&payload_json);
-
- if let Some(auth) = auth.as_ref()
- && auth.mode == AuthMode::ChatGPT
- && let Some(account_id) = auth.get_account_id()
- {
- req_builder = req_builder.header("chatgpt-account-id", account_id);
- }
+ // spawn task to process SSE
+ let stream = resp.bytes_stream().map_err(CodexErr::Reqwest);
+ tokio::spawn(process_sse(
+ stream,
+ tx_event,
+ self.provider.stream_idle_timeout(),
+ ));
- let res = req_builder.send().await;
- if let Ok(resp) = &res {
- trace!(
- "Response status: {}, cf-ray: {}",
- resp.status(),
- resp.headers()
- .get("cf-ray")
- .map(|v| v.to_str().unwrap_or_default())
- .unwrap_or_default()
- );
+ Ok(ResponseStream { rx_event })
}
-
- match res {
- Ok(resp) if resp.status().is_success() => {
- let (tx_event, rx_event) = mpsc::channel::>(1600);
-
- // spawn task to process SSE
- let stream = resp.bytes_stream().map_err(CodexErr::Reqwest);
- tokio::spawn(process_sse(
- stream,
- tx_event,
- self.provider.stream_idle_timeout(),
- ));
-
- return Ok(ResponseStream { rx_event });
+ Ok(res) => {
+ let status = res.status();
+
+ // Pull out Retry‑After header if present.
+ let retry_after_secs = res
+ .headers()
+ .get(reqwest::header::RETRY_AFTER)
+ .and_then(|v| v.to_str().ok())
+ .and_then(|s| s.parse::().ok());
+ let retry_after = retry_after_secs.map(|s| Duration::from_millis(s * 1_000));
+
+ if status == StatusCode::UNAUTHORIZED
+ && let Some(manager) = auth_manager.as_ref()
+ && manager.auth().is_some()
+ {
+ let _ = manager.refresh_token().await;
}
- Ok(res) => {
- let status = res.status();
-
- // Pull out Retry‑After header if present.
- let retry_after_secs = res
- .headers()
- .get(reqwest::header::RETRY_AFTER)
- .and_then(|v| v.to_str().ok())
- .and_then(|s| s.parse::().ok());
-
- if status == StatusCode::UNAUTHORIZED
- && let Some(manager) = auth_manager.as_ref()
- && manager.auth().is_some()
- {
- let _ = manager.refresh_token().await;
- }
- // The OpenAI Responses endpoint returns structured JSON bodies even for 4xx/5xx
- // errors. When we bubble early with only the HTTP status the caller sees an opaque
- // "unexpected status 400 Bad Request" which makes debugging nearly impossible.
- // Instead, read (and include) the response text so higher layers and users see the
- // exact error message (e.g. "Unknown parameter: 'input[0].metadata'"). The body is
- // small and this branch only runs on error paths so the extra allocation is
- // negligible.
- if !(status == StatusCode::TOO_MANY_REQUESTS
- || status == StatusCode::UNAUTHORIZED
- || status.is_server_error())
- {
- // Surface the error body to callers. Use `unwrap_or_default` per Clippy.
- let body = res.text().await.unwrap_or_default();
- return Err(CodexErr::UnexpectedStatus(status, body));
- }
-
- if status == StatusCode::TOO_MANY_REQUESTS {
- let body = res.json::().await.ok();
- if let Some(ErrorResponse { error }) = body {
- if error.r#type.as_deref() == Some("usage_limit_reached") {
- // Prefer the plan_type provided in the error message if present
- // because it's more up to date than the one encoded in the auth
- // token.
- let plan_type = error
- .plan_type
- .or_else(|| auth.as_ref().and_then(|a| a.get_plan_type()));
- let resets_in_seconds = error.resets_in_seconds;
- return Err(CodexErr::UsageLimitReached(UsageLimitReachedError {
- plan_type,
- resets_in_seconds,
- }));
- } else if error.r#type.as_deref() == Some("usage_not_included") {
- return Err(CodexErr::UsageNotIncluded);
- }
- }
- }
+ // The OpenAI Responses endpoint returns structured JSON bodies even for 4xx/5xx
+ // errors. When we bubble early with only the HTTP status the caller sees an opaque
+ // "unexpected status 400 Bad Request" which makes debugging nearly impossible.
+ // Instead, read (and include) the response text so higher layers and users see the
+ // exact error message (e.g. "Unknown parameter: 'input[0].metadata'"). The body is
+ // small and this branch only runs on error paths so the extra allocation is
+ // negligible.
+ if !(status == StatusCode::TOO_MANY_REQUESTS
+ || status == StatusCode::UNAUTHORIZED
+ || status.is_server_error())
+ {
+ // Surface the error body to callers. Use `unwrap_or_default` per Clippy.
+ let body = res.text().await.unwrap_or_default();
+ return Err(StreamAttemptError::Fatal(CodexErr::UnexpectedStatus(
+ status, body,
+ )));
+ }
- if attempt > max_retries {
- if status == StatusCode::INTERNAL_SERVER_ERROR {
- return Err(CodexErr::InternalServerError);
+ if status == StatusCode::TOO_MANY_REQUESTS {
+ let rate_limit_snapshot = parse_rate_limit_snapshot(res.headers());
+ let body = res.json::().await.ok();
+ if let Some(ErrorResponse { error }) = body {
+ if error.r#type.as_deref() == Some("usage_limit_reached") {
+ // Prefer the plan_type provided in the error message if present
+ // because it's more up to date than the one encoded in the auth
+ // token.
+ let plan_type = error
+ .plan_type
+ .or_else(|| auth.as_ref().and_then(CodexAuth::get_plan_type));
+ let resets_in_seconds = error.resets_in_seconds;
+ let codex_err = CodexErr::UsageLimitReached(UsageLimitReachedError {
+ plan_type,
+ resets_in_seconds,
+ rate_limits: rate_limit_snapshot,
+ });
+ return Err(StreamAttemptError::Fatal(codex_err));
+ } else if error.r#type.as_deref() == Some("usage_not_included") {
+ return Err(StreamAttemptError::Fatal(CodexErr::UsageNotIncluded));
}
-
- return Err(CodexErr::RetryLimit(status));
}
-
- let delay = retry_after_secs
- .map(|s| Duration::from_millis(s * 1_000))
- .unwrap_or_else(|| backoff(attempt));
- tokio::time::sleep(delay).await;
- }
- Err(e) => {
- if attempt > max_retries {
- return Err(e.into());
- }
- let delay = backoff(attempt);
- tokio::time::sleep(delay).await;
}
+
+ Err(StreamAttemptError::RetryableHttpError {
+ status,
+ retry_after,
+ })
}
+ Err(e) => Err(StreamAttemptError::RetryableTransportError(e.into())),
}
}
@@ -391,6 +424,47 @@ impl ModelClient {
}
}
+enum StreamAttemptError {
+ RetryableHttpError {
+ status: StatusCode,
+ retry_after: Option,
+ },
+ RetryableTransportError(CodexErr),
+ Fatal(CodexErr),
+}
+
+impl StreamAttemptError {
+ /// attempt is 0-based.
+ fn delay(&self, attempt: u64) -> Duration {
+ // backoff() uses 1-based attempts.
+ let backoff_attempt = attempt + 1;
+ match self {
+ Self::RetryableHttpError { retry_after, .. } => {
+ retry_after.unwrap_or_else(|| backoff(backoff_attempt))
+ }
+ Self::RetryableTransportError { .. } => backoff(backoff_attempt),
+ Self::Fatal(_) => {
+ // Should not be called on Fatal errors.
+ Duration::from_secs(0)
+ }
+ }
+ }
+
+ fn into_error(self) -> CodexErr {
+ match self {
+ Self::RetryableHttpError { status, .. } => {
+ if status == StatusCode::INTERNAL_SERVER_ERROR {
+ CodexErr::InternalServerError
+ } else {
+ CodexErr::RetryLimit(status)
+ }
+ }
+ Self::RetryableTransportError(error) => error,
+ Self::Fatal(error) => error,
+ }
+ }
+}
+
#[derive(Debug, Deserialize, Serialize)]
struct SseEvent {
#[serde(rename = "type")]
@@ -400,9 +474,6 @@ struct SseEvent {
delta: Option,
}
-#[derive(Debug, Deserialize)]
-struct ResponseCreated {}
-
#[derive(Debug, Deserialize)]
struct ResponseCompleted {
id: String,
@@ -473,6 +544,63 @@ fn attach_item_ids(payload_json: &mut Value, original_items: &[ResponseItem]) {
}
}
+fn parse_rate_limit_snapshot(headers: &HeaderMap) -> Option {
+ let primary = parse_rate_limit_window(
+ headers,
+ "x-codex-primary-used-percent",
+ "x-codex-primary-window-minutes",
+ "x-codex-primary-reset-after-seconds",
+ );
+
+ let secondary = parse_rate_limit_window(
+ headers,
+ "x-codex-secondary-used-percent",
+ "x-codex-secondary-window-minutes",
+ "x-codex-secondary-reset-after-seconds",
+ );
+
+ Some(RateLimitSnapshot { primary, secondary })
+}
+
+fn parse_rate_limit_window(
+ headers: &HeaderMap,
+ used_percent_header: &str,
+ window_minutes_header: &str,
+ resets_header: &str,
+) -> Option {
+ let used_percent: Option = parse_header_f64(headers, used_percent_header);
+
+ used_percent.and_then(|used_percent| {
+ let window_minutes = parse_header_u64(headers, window_minutes_header);
+ let resets_in_seconds = parse_header_u64(headers, resets_header);
+
+ let has_data = used_percent != 0.0
+ || window_minutes.is_some_and(|minutes| minutes != 0)
+ || resets_in_seconds.is_some_and(|seconds| seconds != 0);
+
+ has_data.then_some(RateLimitWindow {
+ used_percent,
+ window_minutes,
+ resets_in_seconds,
+ })
+ })
+}
+
+fn parse_header_f64(headers: &HeaderMap, name: &str) -> Option {
+ parse_header_str(headers, name)?
+ .parse::()
+ .ok()
+ .filter(|v| v.is_finite())
+}
+
+fn parse_header_u64(headers: &HeaderMap, name: &str) -> Option {
+ parse_header_str(headers, name)?.parse::().ok()
+}
+
+fn parse_header_str<'a>(headers: &'a HeaderMap, name: &str) -> Option<&'a str> {
+ headers.get(name)?.to_str().ok()
+}
+
async fn process_sse(
stream: S,
tx_event: mpsc::Sender>,
diff --git a/codex-rs/core/src/client_common.rs b/codex-rs/core/src/client_common.rs
index eead654b9e9..b695581deb2 100644
--- a/codex-rs/core/src/client_common.rs
+++ b/codex-rs/core/src/client_common.rs
@@ -1,6 +1,7 @@
use crate::error::Result;
use crate::model_family::ModelFamily;
use crate::openai_tools::OpenAiTool;
+use crate::protocol::RateLimitSnapshot;
use crate::protocol::TokenUsage;
use codex_apply_patch::APPLY_PATCH_TOOL_INSTRUCTIONS;
use codex_protocol::config_types::ReasoningEffort as ReasoningEffortConfig;
@@ -9,6 +10,7 @@ use codex_protocol::config_types::Verbosity as VerbosityConfig;
use codex_protocol::models::ResponseItem;
use futures::Stream;
use serde::Serialize;
+use serde_json::Value;
use std::borrow::Cow;
use std::ops::Deref;
use std::pin::Pin;
@@ -31,16 +33,17 @@ pub struct Prompt {
/// Optional override for the built-in BASE_INSTRUCTIONS.
pub base_instructions_override: Option,
+
+ /// Optional the output schema for the model's response.
+ pub output_schema: Option,
}
impl Prompt {
- pub(crate) fn get_full_instructions(&self, model: &ModelFamily) -> Cow<'_, str> {
+ pub(crate) fn get_full_instructions<'a>(&'a self, model: &'a ModelFamily) -> Cow<'a, str> {
let base = self
.base_instructions_override
.as_deref()
.unwrap_or(model.base_instructions.deref());
- let mut sections: Vec<&str> = vec![base];
-
// When there are no custom instructions, add apply_patch_tool_instructions if:
// - the model needs special instructions (4.1)
// AND
@@ -54,9 +57,10 @@ impl Prompt {
&& model.needs_special_apply_patch_instructions
&& !is_apply_patch_tool_present
{
- sections.push(APPLY_PATCH_TOOL_INSTRUCTIONS);
+ Cow::Owned(format!("{base}\n{APPLY_PATCH_TOOL_INSTRUCTIONS}"))
+ } else {
+ Cow::Borrowed(base)
}
- Cow::Owned(sections.join("\n"))
}
pub(crate) fn get_formatted_input(&self) -> Vec {
@@ -79,6 +83,7 @@ pub enum ResponseEvent {
WebSearchCallBegin {
call_id: String,
},
+ RateLimits(RateLimitSnapshot),
}
#[derive(Debug, Serialize)]
@@ -89,14 +94,31 @@ pub(crate) struct Reasoning {
pub(crate) summary: Option,
}
+#[derive(Debug, Serialize, Default, Clone)]
+#[serde(rename_all = "snake_case")]
+pub(crate) enum TextFormatType {
+ #[default]
+ JsonSchema,
+}
+
+#[derive(Debug, Serialize, Default, Clone)]
+pub(crate) struct TextFormat {
+ pub(crate) r#type: TextFormatType,
+ pub(crate) strict: bool,
+ pub(crate) schema: Value,
+ pub(crate) name: String,
+}
+
/// Controls under the `text` field in the Responses API for GPT-5.
-#[derive(Debug, Serialize, Default, Clone, Copy)]
+#[derive(Debug, Serialize, Default, Clone)]
pub(crate) struct TextControls {
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) verbosity: Option,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) format: Option,
}
-#[derive(Debug, Serialize, Default, Clone, Copy)]
+#[derive(Debug, Serialize, Default, Clone)]
#[serde(rename_all = "lowercase")]
pub(crate) enum OpenAiVerbosity {
Low,
@@ -155,9 +177,20 @@ pub(crate) fn create_reasoning_param_for_request(
pub(crate) fn create_text_param_for_request(
verbosity: Option,
+ output_schema: &Option,
) -> Option {
- verbosity.map(|v| TextControls {
- verbosity: Some(v.into()),
+ if verbosity.is_none() && output_schema.is_none() {
+ return None;
+ }
+
+ Some(TextControls {
+ verbosity: verbosity.map(std::convert::Into::into),
+ format: output_schema.as_ref().map(|schema| TextFormat {
+ r#type: TextFormatType::JsonSchema,
+ strict: true,
+ schema: schema.clone(),
+ name: "codex_output_schema".to_string(),
+ }),
})
}
@@ -254,6 +287,7 @@ mod tests {
prompt_cache_key: None,
text: Some(TextControls {
verbosity: Some(OpenAiVerbosity::Low),
+ format: None,
}),
};
@@ -266,6 +300,52 @@ mod tests {
);
}
+ #[test]
+ fn serializes_text_schema_with_strict_format() {
+ let input: Vec = vec![];
+ let tools: Vec = vec![];
+ let schema = serde_json::json!({
+ "type": "object",
+ "properties": {
+ "answer": {"type": "string"}
+ },
+ "required": ["answer"],
+ });
+ let text_controls =
+ create_text_param_for_request(None, &Some(schema.clone())).expect("text controls");
+
+ let req = ResponsesApiRequest {
+ model: "gpt-5",
+ instructions: "i",
+ input: &input,
+ tools: &tools,
+ tool_choice: "auto",
+ parallel_tool_calls: false,
+ reasoning: None,
+ store: false,
+ stream: true,
+ include: vec![],
+ prompt_cache_key: None,
+ text: Some(text_controls),
+ };
+
+ let v = serde_json::to_value(&req).expect("json");
+ let text = v.get("text").expect("text field");
+ assert!(text.get("verbosity").is_none());
+ let format = text.get("format").expect("format field");
+
+ assert_eq!(
+ format.get("name"),
+ Some(&serde_json::Value::String("codex_output_schema".into()))
+ );
+ assert_eq!(
+ format.get("type"),
+ Some(&serde_json::Value::String("json_schema".into()))
+ );
+ assert_eq!(format.get("strict"), Some(&serde_json::Value::Bool(true)));
+ assert_eq!(format.get("schema"), Some(&schema));
+ }
+
#[test]
fn omits_text_when_not_set() {
let input: Vec = vec![];
diff --git a/codex-rs/core/src/codex.rs b/codex-rs/core/src/codex.rs
index b28bd6ff561..56a1a152589 100644
--- a/codex-rs/core/src/codex.rs
+++ b/codex-rs/core/src/codex.rs
@@ -1,16 +1,17 @@
use std::borrow::Cow;
use std::collections::HashMap;
-use std::collections::HashSet;
+use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
-use std::sync::Mutex;
-use std::sync::MutexGuard;
use std::sync::atomic::AtomicU64;
use std::time::Duration;
use crate::AuthManager;
use crate::client_common::REVIEW_PROMPT;
use crate::event_mapping::map_response_item_to_event_messages;
+use crate::function_tool::FunctionCallError;
+use crate::review_format::format_review_findings_block;
+use crate::user_notification::UserNotifier;
use async_channel::Receiver;
use async_channel::Sender;
use codex_apply_patch::ApplyPatchAction;
@@ -23,15 +24,15 @@ use codex_protocol::protocol::ReviewRequest;
use codex_protocol::protocol::RolloutItem;
use codex_protocol::protocol::TaskStartedEvent;
use codex_protocol::protocol::TurnAbortReason;
-use codex_protocol::protocol::TurnAbortedEvent;
use codex_protocol::protocol::TurnContextItem;
use futures::prelude::*;
use mcp_types::CallToolResult;
use serde::Deserialize;
use serde::Serialize;
use serde_json;
+use serde_json::Value;
+use tokio::sync::Mutex;
use tokio::sync::oneshot;
-use tokio::task::AbortHandle;
use tracing::debug;
use tracing::error;
use tracing::info;
@@ -94,18 +95,20 @@ use crate::protocol::ExecCommandEndEvent;
use crate::protocol::FileChange;
use crate::protocol::InputItem;
use crate::protocol::ListCustomPromptsResponseEvent;
+use crate::protocol::ListSlashCommandsResponseEvent;
use crate::protocol::Op;
use crate::protocol::PatchApplyBeginEvent;
use crate::protocol::PatchApplyEndEvent;
+use crate::protocol::RateLimitSnapshot;
use crate::protocol::ReviewDecision;
use crate::protocol::ReviewOutputEvent;
use crate::protocol::SandboxPolicy;
use crate::protocol::SessionConfiguredEvent;
+use crate::protocol::SlashCommandListEntry;
use crate::protocol::StreamErrorEvent;
use crate::protocol::Submission;
-use crate::protocol::TaskCompleteEvent;
+use crate::protocol::TokenCountEvent;
use crate::protocol::TokenUsage;
-use crate::protocol::TokenUsageInfo;
use crate::protocol::TurnDiffEvent;
use crate::protocol::WebSearchBeginEvent;
use crate::rollout::RolloutRecorder;
@@ -120,6 +123,11 @@ use crate::slash_commands::CommandInvocation;
use crate::slash_commands::InvocationError;
#[cfg(feature = "slash_commands")]
use crate::slash_commands::SlashCommandService;
+use crate::state::ActiveTurn;
+use crate::state::SessionServices;
+use crate::tasks::CompactTask;
+use crate::tasks::RegularTask;
+use crate::tasks::ReviewTask;
use crate::turn_diff_tracker::TurnDiffTracker;
use crate::unified_exec::UnifiedExecSessionManager;
use crate::user_instructions::UserInstructions;
@@ -136,25 +144,10 @@ use codex_protocol::models::ResponseItem;
use codex_protocol::models::ShellToolCallParams;
use codex_protocol::protocol::InitialHistory;
-mod compact;
+pub mod compact;
use self::compact::build_compacted_history;
use self::compact::collect_user_messages;
-// A convenience extension trait for acquiring mutex locks where poisoning is
-// unrecoverable and should abort the program. This avoids scattered `.unwrap()`
-// calls on `lock()` while still surfacing a clear panic message when a lock is
-// poisoned.
-trait MutexExt {
- fn lock_unchecked(&self) -> MutexGuard<'_, T>;
-}
-
-impl MutexExt for Mutex {
- fn lock_unchecked(&self) -> MutexGuard<'_, T> {
- #[expect(clippy::expect_used)]
- self.lock().expect("poisoned lock")
- }
-}
-
/// The high-level interface to the Codex system.
/// It operates as a queue pair where you send submissions and receive events.
pub struct Codex {
@@ -204,7 +197,7 @@ impl Codex {
base_instructions: config.base_instructions.clone(),
approval_policy: config.approval_policy,
sandbox_policy: config.sandbox_policy.clone(),
- notify: config.notify.clone(),
+ notify: UserNotifier::new(config.notify.clone()),
cwd: config.cwd.clone(),
};
@@ -268,17 +261,7 @@ impl Codex {
}
}
-/// Mutable state of the agent
-#[derive(Default)]
-struct State {
- approved_commands: HashSet>,
- current_task: Option,
- pending_approvals: HashMap>,
- pending_input: Vec,
- history: ConversationHistory,
- token_info: Option,
- next_internal_sub_id: u64,
-}
+use crate::state::SessionState;
/// Context for an initialized model agent
///
@@ -286,25 +269,10 @@ struct State {
pub(crate) struct Session {
conversation_id: ConversationId,
tx_event: Sender,
-
- /// Manager for external MCP servers/tools.
- mcp_connection_manager: McpConnectionManager,
- session_manager: ExecSessionManager,
- unified_exec_manager: UnifiedExecSessionManager,
-
- /// External notifier command (will be passed as args to exec()). When
- /// `None` this feature is disabled.
- notify: Option>,
-
- /// Optional rollout recorder for persisting the conversation transcript so
- /// sessions can be replayed or inspected later.
- rollout: Mutex